code stringlengths 281 23.7M |
|---|
class Solution(object):
def minMoves(self, nums):
nums.sort()
(t, c, p) = ([], 0, None)
for n in reversed(nums):
if (p is None):
c += 1
elif (n == p):
c += 1
else:
t.append((p, c))
c = 1
p = n
if (p is not None):
t.append((p, c))
r = 0
(p, s) = (None, 0)
for (n, c) in t:
if (p is None):
s = c
p = n
continue
r += ((p - n) * s)
p = n
s += c
return r |
class LevelModel(proteus.Transport.OneLevelTransport):
nCalls = 0
def __init__(self, uDict, phiDict, testSpaceDict, matType, dofBoundaryConditionsDict, dofBoundaryConditionsSetterDict, coefficients, elementQuadrature, elementBoundaryQuadrature, fluxBoundaryConditionsDict=None, advectiveFluxBoundaryConditionsSetterDict=None, diffusiveFluxBoundaryConditionsSetterDictDict=None, stressTraceBoundaryConditionsSetterDict=None, stabilization=None, shockCapturing=None, conservativeFluxDict=None, numericalFluxType=None, TimeIntegrationClass=None, massLumping=False, reactionLumping=False, options=None, name='defaultName', reuse_trial_and_test_quadrature=True, sd=True, movingDomain=False, bdyNullSpace=False):
self.bdyNullSpace = bdyNullSpace
self.movingDomain = movingDomain
self.tLast_mesh = None
self.name = name
self.sd = sd
self.Hess = False
self.lowmem = True
self.timeTerm = True
self.testIsTrial = True
self.phiTrialIsTrial = True
self.u = uDict
self.ua = {}
self.phi = phiDict
self.dphi = {}
self.matType = matType
self.reuse_test_trial_quadrature = reuse_trial_and_test_quadrature
if self.reuse_test_trial_quadrature:
for ci in range(1, coefficients.nc):
assert (self.u[ci].femSpace.__class__.__name__ == self.u[0].femSpace.__class__.__name__), 'to reuse_test_trial_quad all femSpaces must be the same!'
self.mesh = self.u[0].femSpace.mesh
self.testSpace = testSpaceDict
self.dirichletConditions = dofBoundaryConditionsDict
self.dirichletNodeSetList = None
self.coefficients = coefficients
self.coefficients.initializeMesh(self.mesh)
self.nc = self.coefficients.nc
self.stabilization = stabilization
self.shockCapturing = shockCapturing
self.conservativeFlux = conservativeFluxDict
self.fluxBoundaryConditions = fluxBoundaryConditionsDict
self.advectiveFluxBoundaryConditionsSetterDict = advectiveFluxBoundaryConditionsSetterDict
self.diffusiveFluxBoundaryConditionsSetterDictDict = diffusiveFluxBoundaryConditionsSetterDictDict
self.stabilizationIsNonlinear = False
if (self.stabilization is not None):
for ci in range(self.nc):
if (ci in coefficients.mass):
for flag in list(coefficients.mass[ci].values()):
if (flag == 'nonlinear'):
self.stabilizationIsNonlinear = True
if (ci in coefficients.advection):
for flag in list(coefficients.advection[ci].values()):
if (flag == 'nonlinear'):
self.stabilizationIsNonlinear = True
if (ci in coefficients.diffusion):
for diffusionDict in list(coefficients.diffusion[ci].values()):
for flag in list(diffusionDict.values()):
if (flag != 'constant'):
self.stabilizationIsNonlinear = True
if (ci in coefficients.potential):
for flag in list(coefficients.potential[ci].values()):
if (flag == 'nonlinear'):
self.stabilizationIsNonlinear = True
if (ci in coefficients.reaction):
for flag in list(coefficients.reaction[ci].values()):
if (flag == 'nonlinear'):
self.stabilizationIsNonlinear = True
if (ci in coefficients.hamiltonian):
for flag in list(coefficients.hamiltonian[ci].values()):
if (flag == 'nonlinear'):
self.stabilizationIsNonlinear = True
self.elementBoundaryIntegrals = {}
for ci in range(self.nc):
self.elementBoundaryIntegrals[ci] = ((self.conservativeFlux is not None) or (numericalFluxType is not None) or (self.fluxBoundaryConditions[ci] == 'outFlow') or (self.fluxBoundaryConditions[ci] == 'mixedFlow') or (self.fluxBoundaryConditions[ci] == 'setFlow'))
self.nSpace_global = self.u[0].femSpace.nSpace_global
self.nDOF_trial_element = [u_j.femSpace.max_nDOF_element for u_j in list(self.u.values())]
self.nDOF_phi_trial_element = [phi_k.femSpace.max_nDOF_element for phi_k in list(self.phi.values())]
self.n_phi_ip_element = [phi_k.femSpace.referenceFiniteElement.interpolationConditions.nQuadraturePoints for phi_k in list(self.phi.values())]
self.nDOF_test_element = [femSpace.max_nDOF_element for femSpace in list(self.testSpace.values())]
self.nFreeDOF_global = [dc.nFreeDOF_global for dc in list(self.dirichletConditions.values())]
self.nVDOF_element = sum(self.nDOF_trial_element)
self.nFreeVDOF_global = sum(self.nFreeDOF_global)
proteus.NonlinearSolvers.NonlinearEquation.__init__(self, self.nFreeVDOF_global)
elementQuadratureDict = {}
elemQuadIsDict = isinstance(elementQuadrature, dict)
if elemQuadIsDict:
for I in self.coefficients.elementIntegralKeys:
if (I in elementQuadrature):
elementQuadratureDict[I] = elementQuadrature[I]
else:
elementQuadratureDict[I] = elementQuadrature['default']
else:
for I in self.coefficients.elementIntegralKeys:
elementQuadratureDict[I] = elementQuadrature
if (self.stabilization is not None):
for I in self.coefficients.elementIntegralKeys:
if elemQuadIsDict:
if (I in elementQuadrature):
elementQuadratureDict[(('stab',) + I[1:])] = elementQuadrature[I]
else:
elementQuadratureDict[(('stab',) + I[1:])] = elementQuadrature['default']
else:
elementQuadratureDict[(('stab',) + I[1:])] = elementQuadrature
if (self.shockCapturing is not None):
for ci in self.shockCapturing.components:
if elemQuadIsDict:
if (('numDiff', ci, ci) in elementQuadrature):
elementQuadratureDict[('numDiff', ci, ci)] = elementQuadrature[('numDiff', ci, ci)]
else:
elementQuadratureDict[('numDiff', ci, ci)] = elementQuadrature['default']
else:
elementQuadratureDict[('numDiff', ci, ci)] = elementQuadrature
if massLumping:
for ci in list(self.coefficients.mass.keys()):
elementQuadratureDict[('m', ci)] = Quadrature.SimplexLobattoQuadrature(self.nSpace_global, 1)
for I in self.coefficients.elementIntegralKeys:
elementQuadratureDict[(('stab',) + I[1:])] = Quadrature.SimplexLobattoQuadrature(self.nSpace_global, 1)
if reactionLumping:
for ci in list(self.coefficients.mass.keys()):
elementQuadratureDict[('r', ci)] = Quadrature.SimplexLobattoQuadrature(self.nSpace_global, 1)
for I in self.coefficients.elementIntegralKeys:
elementQuadratureDict[(('stab',) + I[1:])] = Quadrature.SimplexLobattoQuadrature(self.nSpace_global, 1)
elementBoundaryQuadratureDict = {}
if isinstance(elementBoundaryQuadrature, dict):
for I in self.coefficients.elementBoundaryIntegralKeys:
if (I in elementBoundaryQuadrature):
elementBoundaryQuadratureDict[I] = elementBoundaryQuadrature[I]
else:
elementBoundaryQuadratureDict[I] = elementBoundaryQuadrature['default']
else:
for I in self.coefficients.elementBoundaryIntegralKeys:
elementBoundaryQuadratureDict[I] = elementBoundaryQuadrature
(self.elementQuadraturePoints, self.elementQuadratureWeights, self.elementQuadratureRuleIndeces) = proteus.Quadrature.buildUnion(elementQuadratureDict)
self.nQuadraturePoints_element = self.elementQuadraturePoints.shape[0]
self.nQuadraturePoints_global = (self.nQuadraturePoints_element * self.mesh.nElements_global)
(self.elementBoundaryQuadraturePoints, self.elementBoundaryQuadratureWeights, self.elementBoundaryQuadratureRuleIndeces) = proteus.Quadrature.buildUnion(elementBoundaryQuadratureDict)
self.nElementBoundaryQuadraturePoints_elementBoundary = self.elementBoundaryQuadraturePoints.shape[0]
self.nElementBoundaryQuadraturePoints_global = ((self.mesh.nElements_global * self.mesh.nElementBoundaries_element) * self.nElementBoundaryQuadraturePoints_elementBoundary)
self.scalars_element = set()
self.q = {}
self.ebq = {}
self.ebq_global = {}
self.ebqe = {}
self.phi_ip = {}
self.ebqe['x'] = np.zeros((self.mesh.nExteriorElementBoundaries_global, self.nElementBoundaryQuadraturePoints_elementBoundary, 3), 'd')
self.ebqe['n'] = np.zeros((self.mesh.nExteriorElementBoundaries_global, self.nElementBoundaryQuadraturePoints_elementBoundary, self.nSpace_global), 'd')
self.q[('u', 0)] = np.zeros((self.mesh.nElements_global, self.nQuadraturePoints_element), 'd')
self.q[('grad(u)', 0)] = np.zeros((self.mesh.nElements_global, self.nQuadraturePoints_element, self.nSpace_global), 'd')
self.q[('a', 0, 0)] = np.zeros((self.mesh.nElements_global, self.nQuadraturePoints_element, self.nSpace_global), 'd')
self.q[('da', 0, 0, 0)] = np.zeros((self.mesh.nElements_global, self.nQuadraturePoints_element, self.nSpace_global), 'd')
self.q[('phi', 0)] = self.q[('u', 0)]
self.q[('grad(phi)', 0)] = self.q[('grad(u)', 0)]
self.q[('dphi', 0, 0)] = np.ones((self.mesh.nElements_global, self.nQuadraturePoints_element), 'd')
self.q[('m', 0)] = self.q[('u', 0)]
self.q[('m_last', 0)] = np.zeros((self.mesh.nElements_global, self.nQuadraturePoints_element), 'd')
self.q[('m_tmp', 0)] = self.q[('u', 0)]
self.q[('cfl', 0)] = np.zeros((self.mesh.nElements_global, self.nQuadraturePoints_element), 'd')
self.q[('numDiff', 0, 0)] = np.zeros((self.mesh.nElements_global, self.nQuadraturePoints_element), 'd')
self.ebqe[('u', 0)] = np.zeros((self.mesh.nExteriorElementBoundaries_global, self.nElementBoundaryQuadraturePoints_elementBoundary), 'd')
self.ebqe[('grad(u)', 0)] = np.zeros((self.mesh.nExteriorElementBoundaries_global, self.nElementBoundaryQuadraturePoints_elementBoundary, self.nSpace_global), 'd')
self.ebqe[('advectiveFlux_bc_flag', 0)] = np.zeros((self.mesh.nExteriorElementBoundaries_global, self.nElementBoundaryQuadraturePoints_elementBoundary), 'i')
self.ebqe[('advectiveFlux_bc', 0)] = np.zeros((self.mesh.nExteriorElementBoundaries_global, self.nElementBoundaryQuadraturePoints_elementBoundary), 'd')
self.ebqe[('advectiveFlux', 0)] = np.zeros((self.mesh.nExteriorElementBoundaries_global, self.nElementBoundaryQuadraturePoints_elementBoundary), 'd')
self.ebqe[('diffusiveFlux_bc_flag', 0, 0)] = np.zeros((self.mesh.nExteriorElementBoundaries_global, self.nElementBoundaryQuadraturePoints_elementBoundary), 'i')
self.ebqe[('diffusiveFlux_bc', 0, 0)] = np.zeros((self.mesh.nExteriorElementBoundaries_global, self.nElementBoundaryQuadraturePoints_elementBoundary), 'd')
self.ebqe['penalty'] = np.zeros((self.mesh.nExteriorElementBoundaries_global, self.nElementBoundaryQuadraturePoints_elementBoundary), 'd')
self.points_elementBoundaryQuadrature = set()
self.scalars_elementBoundaryQuadrature = set([('u', ci) for ci in range(self.nc)])
self.vectors_elementBoundaryQuadrature = set()
self.tensors_elementBoundaryQuadrature = set()
self.inflowBoundaryBC = {}
self.inflowBoundaryBC_values = {}
self.inflowFlux = {}
for cj in range(self.nc):
self.inflowBoundaryBC[cj] = np.zeros((self.mesh.nExteriorElementBoundaries_global,), 'i')
self.inflowBoundaryBC_values[cj] = np.zeros((self.mesh.nExteriorElementBoundaries_global, self.nDOF_trial_element[cj]), 'd')
self.inflowFlux[cj] = np.zeros((self.mesh.nExteriorElementBoundaries_global, self.nElementBoundaryQuadraturePoints_elementBoundary), 'd')
self.internalNodes = set(range(self.mesh.nNodes_global))
for ebNE in range(self.mesh.nExteriorElementBoundaries_global):
ebN = self.mesh.exteriorElementBoundariesArray[ebNE]
eN_global = self.mesh.elementBoundaryElementsArray[(ebN, 0)]
ebN_element = self.mesh.elementBoundaryLocalElementBoundariesArray[(ebN, 0)]
for i in range(self.mesh.nNodes_element):
if (i != ebN_element):
I = self.mesh.elementNodesArray[(eN_global, i)]
self.internalNodes -= set([I])
self.nNodes_internal = len(self.internalNodes)
self.internalNodesArray = np.zeros((self.nNodes_internal,), 'i')
for (nI, n) in enumerate(self.internalNodes):
self.internalNodesArray[nI] = n
del self.internalNodes
self.internalNodes = None
prof.logEvent('Updating local to global mappings', 2)
self.updateLocal2Global()
prof.logEvent('Building time integration object', 2)
prof.logEvent(prof.memory('inflowBC, internalNodes,updateLocal2Global', 'OneLevelTransport'), level=4)
if (self.stabilization and self.stabilization.usesGradientStabilization):
self.timeIntegration = TimeIntegrationClass(self, integrateInterpolationPoints=True)
else:
self.timeIntegration = TimeIntegrationClass(self)
if (options is not None):
self.timeIntegration.setFromOptions(options)
prof.logEvent(prof.memory('TimeIntegration', 'OneLevelTransport'), level=4)
prof.logEvent('Calculating numerical quadrature formulas', 2)
self.calculateQuadrature()
self.setupFieldStrides()
comm = proteus.Comm.get()
self.comm = comm
if (comm.size() > 1):
assert ((numericalFluxType is not None) and numericalFluxType.useWeakDirichletConditions), 'You must use a numerical flux to apply weak boundary conditions for parallel runs'
prof.logEvent(prof.memory('stride+offset', 'OneLevelTransport'), level=4)
if (numericalFluxType is not None):
if ((options is None) or (options.periodicDirichletConditions is None)):
self.numericalFlux = numericalFluxType(self, dofBoundaryConditionsSetterDict, advectiveFluxBoundaryConditionsSetterDict, diffusiveFluxBoundaryConditionsSetterDictDict)
else:
self.numericalFlux = numericalFluxType(self, dofBoundaryConditionsSetterDict, advectiveFluxBoundaryConditionsSetterDict, diffusiveFluxBoundaryConditionsSetterDictDict, options.periodicDirichletConditions)
else:
self.numericalFlux = None
if ('penalty' in self.ebq_global):
for ebN in range(self.mesh.nElementBoundaries_global):
for k in range(self.nElementBoundaryQuadraturePoints_elementBoundary):
self.ebq_global['penalty'][(ebN, k)] = old_div(self.numericalFlux.penalty_constant, (self.mesh.elementBoundaryDiametersArray[ebN] ** self.numericalFlux.penalty_power))
if ('penalty' in self.ebqe):
for ebNE in range(self.mesh.nExteriorElementBoundaries_global):
ebN = self.mesh.exteriorElementBoundariesArray[ebNE]
for k in range(self.nElementBoundaryQuadraturePoints_elementBoundary):
self.ebqe['penalty'][(ebNE, k)] = old_div(self.numericalFlux.penalty_constant, (self.mesh.elementBoundaryDiametersArray[ebN] ** self.numericalFlux.penalty_power))
prof.logEvent(prof.memory('numericalFlux', 'OneLevelTransport'), level=4)
self.elementEffectiveDiametersArray = self.mesh.elementInnerDiametersArray
from proteus import PostProcessingTools
self.velocityPostProcessor = PostProcessingTools.VelocityPostProcessingChooser(self)
prof.logEvent(prof.memory('velocity postprocessor', 'OneLevelTransport'), level=4)
from proteus import Archiver
self.elementQuadratureDictionaryWriter = Archiver.XdmfWriter()
self.elementBoundaryQuadratureDictionaryWriter = Archiver.XdmfWriter()
self.exteriorElementBoundaryQuadratureDictionaryWriter = Archiver.XdmfWriter()
for (ci, fbcObject) in list(self.fluxBoundaryConditionsObjectsDict.items()):
self.ebqe[('advectiveFlux_bc_flag', ci)] = np.zeros(self.ebqe[('advectiveFlux_bc', ci)].shape, 'i')
for (t, g) in list(fbcObject.advectiveFluxBoundaryConditionsDict.items()):
if (ci in self.coefficients.advection):
self.ebqe[('advectiveFlux_bc', ci)][(t[0], t[1])] = g(self.ebqe['x'][(t[0], t[1])], self.timeIntegration.t)
self.ebqe[('advectiveFlux_bc_flag', ci)][(t[0], t[1])] = 1
for (ck, diffusiveFluxBoundaryConditionsDict) in list(fbcObject.diffusiveFluxBoundaryConditionsDictDict.items()):
self.ebqe[('diffusiveFlux_bc_flag', ck, ci)] = np.zeros(self.ebqe[('diffusiveFlux_bc', ck, ci)].shape, 'i')
for (t, g) in list(diffusiveFluxBoundaryConditionsDict.items()):
self.ebqe[('diffusiveFlux_bc', ck, ci)][(t[0], t[1])] = g(self.ebqe['x'][(t[0], t[1])], self.timeIntegration.t)
self.ebqe[('diffusiveFlux_bc_flag', ck, ci)][(t[0], t[1])] = 1
if hasattr(self.numericalFlux, 'setDirichletValues'):
self.numericalFlux.setDirichletValues(self.ebqe)
if (not hasattr(self.numericalFlux, 'isDOFBoundary')):
self.numericalFlux.isDOFBoundary = {0: np.zeros(self.ebqe[('u', 0)].shape, 'i')}
if (not hasattr(self.numericalFlux, 'ebqe')):
self.numericalFlux.ebqe = {('u', 0): np.zeros(self.ebqe[('u', 0)].shape, 'd')}
self.globalResidualDummy = None
compKernelFlag = 0
if (self.nSpace_global == 2):
self.kappa = cKappa2D_base(self.nSpace_global, self.nQuadraturePoints_element, self.u[0].femSpace.elementMaps.localFunctionSpace.dim, self.u[0].femSpace.referenceFiniteElement.localFunctionSpace.dim, self.testSpace[0].referenceFiniteElement.localFunctionSpace.dim, self.nElementBoundaryQuadraturePoints_elementBoundary, compKernelFlag, self.coefficients.aDarcy, self.coefficients.betaForch, self.coefficients.grain, self.coefficients.packFraction, self.coefficients.packMargin, self.coefficients.maxFraction, self.coefficients.frFraction, self.coefficients.sigmaC, self.coefficients.C3e, self.coefficients.C4e, self.coefficients.eR, self.coefficients.fContact, self.coefficients.mContact, self.coefficients.nContact, self.coefficients.angFriction, self.coefficients.vos_limiter, self.coefficients.mu_fr_limiter)
else:
self.kappa = cKappa_base(self.nSpace_global, self.nQuadraturePoints_element, self.u[0].femSpace.elementMaps.localFunctionSpace.dim, self.u[0].femSpace.referenceFiniteElement.localFunctionSpace.dim, self.testSpace[0].referenceFiniteElement.localFunctionSpace.dim, self.nElementBoundaryQuadraturePoints_elementBoundary, compKernelFlag, self.coefficients.aDarcy, self.coefficients.betaForch, self.coefficients.grain, self.coefficients.packFraction, self.coefficients.packMargin, self.coefficients.maxFraction, self.coefficients.frFraction, self.coefficients.sigmaC, self.coefficients.C3e, self.coefficients.C4e, self.coefficients.eR, self.coefficients.fContact, self.coefficients.mContact, self.coefficients.nContact, self.coefficients.angFriction, self.coefficients.vos_limiter, self.coefficients.mu_fr_limiter)
self.forceStrongConditions = False
if self.forceStrongConditions:
self.dirichletConditionsForceDOF = DOFBoundaryConditions(self.u[0].femSpace, dofBoundaryConditionsSetterDict[0], weakDirichletConditions=False)
if self.movingDomain:
self.MOVING_DOMAIN = 1.0
else:
self.MOVING_DOMAIN = 0.0
self.movingDomain = False
self.MOVING_DOMAIN = 0.0
if (self.mesh.nodeVelocityArray is None):
self.mesh.nodeVelocityArray = np.zeros(self.mesh.nodeArray.shape, 'd')
def calculateCoefficients(self):
pass
def calculateElementResidual(self):
if (self.globalResidualDummy is not None):
self.getResidual(self.u[0].dof, self.globalResidualDummy)
def getResidual(self, u, r):
import pdb
import copy
r.fill(0.0)
self.timeIntegration.calculateCoefs()
self.timeIntegration.calculateU(u)
self.setUnknowns(self.timeIntegration.u)
self.numericalFlux.setDirichletValues(self.ebqe)
for (t, g) in list(self.fluxBoundaryConditionsObjectsDict[0].advectiveFluxBoundaryConditionsDict.items()):
self.ebqe[('advectiveFlux_bc', 0)][(t[0], t[1])] = g(self.ebqe['x'][(t[0], t[1])], self.timeIntegration.t)
self.ebqe[('advectiveFlux_bc_flag', 0)][(t[0], t[1])] = 1
for (ck, diffusiveFluxBoundaryConditionsDict) in list(self.fluxBoundaryConditionsObjectsDict[0].diffusiveFluxBoundaryConditionsDictDict.items()):
for (t, g) in list(diffusiveFluxBoundaryConditionsDict.items()):
self.ebqe[('diffusiveFlux_bc', ck, 0)][(t[0], t[1])] = g(self.ebqe['x'][(t[0], t[1])], self.timeIntegration.t)
self.ebqe[('diffusiveFlux_bc_flag', ck, 0)][(t[0], t[1])] = 1
if self.forceStrongConditions:
for (dofN, g) in list(self.dirichletConditionsForceDOF.DOFBoundaryConditionsDict.items()):
self.u[0].dof[dofN] = g(self.dirichletConditionsForceDOF.DOFBoundaryPointDict[dofN], self.timeIntegration.t)
argsDict = cArgumentsDict.ArgumentsDict()
argsDict['mesh_trial_ref'] = self.u[0].femSpace.elementMaps.psi
argsDict['mesh_grad_trial_ref'] = self.u[0].femSpace.elementMaps.grad_psi
argsDict['mesh_dof'] = self.mesh.nodeArray
argsDict['mesh_velocity_dof'] = self.mesh.nodeVelocityArray
argsDict['MOVING_DOMAIN'] = self.MOVING_DOMAIN
argsDict['mesh_l2g'] = self.mesh.elementNodesArray
argsDict['dV_ref'] = self.elementQuadratureWeights[('u', 0)]
argsDict['u_trial_ref'] = self.u[0].femSpace.psi
argsDict['u_grad_trial_ref'] = self.u[0].femSpace.grad_psi
argsDict['u_test_ref'] = self.u[0].femSpace.psi
argsDict['u_grad_test_ref'] = self.u[0].femSpace.grad_psi
argsDict['mesh_trial_trace_ref'] = self.u[0].femSpace.elementMaps.psi_trace
argsDict['mesh_grad_trial_trace_ref'] = self.u[0].femSpace.elementMaps.grad_psi_trace
argsDict['dS_ref'] = self.elementBoundaryQuadratureWeights[('u', 0)]
argsDict['u_trial_trace_ref'] = self.u[0].femSpace.psi_trace
argsDict['u_grad_trial_trace_ref'] = self.u[0].femSpace.grad_psi_trace
argsDict['u_test_trace_ref'] = self.u[0].femSpace.psi_trace
argsDict['u_grad_test_trace_ref'] = self.u[0].femSpace.grad_psi_trace
argsDict['normal_ref'] = self.u[0].femSpace.elementMaps.boundaryNormals
argsDict['boundaryJac_ref'] = self.u[0].femSpace.elementMaps.boundaryJacobians
argsDict['nElements_global'] = self.mesh.nElements_global
argsDict['nu_0'] = self.coefficients.nu_0
argsDict['nu_1'] = self.coefficients.nu_1
argsDict['sigma_k'] = self.coefficients.sigma_k
argsDict['c_mu'] = self.coefficients.c_mu
argsDict['rho_0'] = self.coefficients.rho_0
argsDict['rho_1'] = self.coefficients.rho_1
argsDict['sedFlag'] = self.coefficients.sedFlag
argsDict['q_vos'] = self.coefficients.q_vos
argsDict['q_vos_gradc'] = self.coefficients.grad_vos
argsDict['ebqe_q_vos'] = self.coefficients.ebqe_vos
argsDict['ebqe_q_vos_gradc'] = self.coefficients.ebqe_grad_vos
argsDict['rho_f'] = self.coefficients.rho_0
argsDict['rho_s'] = self.coefficients.rho_s
argsDict['vs'] = self.coefficients.vs
argsDict['ebqe_vs'] = self.coefficients.ebqe_vs
argsDict['g'] = self.coefficients.g
argsDict['dissipation_model_flag'] = self.coefficients.dissipation_model_flag
argsDict['useMetrics'] = self.coefficients.useMetrics
argsDict['alphaBDF'] = self.timeIntegration.alpha_bdf
argsDict['lag_shockCapturing'] = self.shockCapturing.lag
argsDict['shockCapturingDiffusion'] = self.shockCapturing.shockCapturingFactor
argsDict['sc_uref'] = self.coefficients.sc_uref
argsDict['sc_alpha'] = self.coefficients.sc_beta
argsDict['u_l2g'] = self.u[0].femSpace.dofMap.l2g
argsDict['elementDiameter'] = self.mesh.elementDiametersArray
argsDict['u_dof'] = self.u[0].dof
argsDict['u_dof_old'] = self.coefficients.u_old_dof
argsDict['velocity'] = self.coefficients.q_v
argsDict['phi_ls'] = self.coefficients.q_phi
argsDict['q_dissipation'] = self.coefficients.q_dissipation
argsDict['q_grad_dissipation'] = self.coefficients.q_grad_dissipation
argsDict['q_porosity'] = self.coefficients.q_porosity
argsDict['velocity_dof_u'] = self.coefficients.velocity_dof_u
argsDict['velocity_dof_v'] = self.coefficients.velocity_dof_v
argsDict['velocity_dof_w'] = self.coefficients.velocity_dof_w
argsDict['q_m'] = self.timeIntegration.m_tmp[0]
argsDict['q_u'] = self.q[('u', 0)]
argsDict['q_grad_u'] = self.q[('grad(u)', 0)]
argsDict['q_m_betaBDF'] = self.timeIntegration.beta_bdf[0]
argsDict['cfl'] = self.q[('cfl', 0)]
argsDict['q_numDiff_u'] = self.shockCapturing.numDiff[0]
argsDict['q_numDiff_u_last'] = self.shockCapturing.numDiff_last[0]
argsDict['ebqe_penalty_ext'] = self.ebqe['penalty']
argsDict['offset_u'] = self.offset[0]
argsDict['stride_u'] = self.stride[0]
argsDict['globalResidual'] = r
argsDict['nExteriorElementBoundaries_global'] = self.mesh.nExteriorElementBoundaries_global
argsDict['exteriorElementBoundariesArray'] = self.mesh.exteriorElementBoundariesArray
argsDict['elementBoundaryElementsArray'] = self.mesh.elementBoundaryElementsArray
argsDict['elementBoundaryLocalElementBoundariesArray'] = self.mesh.elementBoundaryLocalElementBoundariesArray
argsDict['ebqe_velocity_ext'] = self.coefficients.ebqe_v
argsDict['isDOFBoundary_u'] = self.numericalFlux.isDOFBoundary[0]
argsDict['ebqe_bc_u_ext'] = self.numericalFlux.ebqe[('u', 0)]
argsDict['isAdvectiveFluxBoundary_u'] = self.ebqe[('advectiveFlux_bc_flag', 0)]
argsDict['ebqe_bc_advectiveFlux_u_ext'] = self.ebqe[('advectiveFlux_bc', 0)]
argsDict['isDiffusiveFluxBoundary_u'] = self.ebqe[('diffusiveFlux_bc_flag', 0, 0)]
argsDict['ebqe_bc_diffusiveFlux_u_ext'] = self.ebqe[('diffusiveFlux_bc', 0, 0)]
argsDict['ebqe_phi'] = self.coefficients.ebqe_phi
argsDict['epsFact'] = self.coefficients.epsFact
argsDict['ebqe_dissipation'] = self.coefficients.ebqe_dissipation
argsDict['ebqe_porosity'] = self.coefficients.ebqe_porosity
argsDict['ebqe_u'] = self.ebqe[('u', 0)]
argsDict['ebqe_flux'] = self.ebqe[('advectiveFlux', 0)]
self.kappa.calculateResidual(argsDict)
if self.forceStrongConditions:
for (dofN, g) in list(self.dirichletConditionsForceDOF.DOFBoundaryConditionsDict.items()):
r[dofN] = 0
if self.stabilization:
self.stabilization.accumulateSubgridMassHistory(self.q)
prof.logEvent('Global residual', level=9, data=r)
self.nonlinear_function_evaluations += 1
if (self.globalResidualDummy is None):
self.globalResidualDummy = np.zeros(r.shape, 'd')
def getJacobian(self, jacobian):
cfemIntegrals.zeroJacobian_CSR(self.nNonzerosInJacobian, jacobian)
argsDict = cArgumentsDict.ArgumentsDict()
argsDict['mesh_trial_ref'] = self.u[0].femSpace.elementMaps.psi
argsDict['mesh_grad_trial_ref'] = self.u[0].femSpace.elementMaps.grad_psi
argsDict['mesh_dof'] = self.mesh.nodeArray
argsDict['mesh_velocity_dof'] = self.mesh.nodeVelocityArray
argsDict['MOVING_DOMAIN'] = self.MOVING_DOMAIN
argsDict['mesh_l2g'] = self.mesh.elementNodesArray
argsDict['dV_ref'] = self.elementQuadratureWeights[('u', 0)]
argsDict['u_trial_ref'] = self.u[0].femSpace.psi
argsDict['u_grad_trial_ref'] = self.u[0].femSpace.grad_psi
argsDict['u_test_ref'] = self.u[0].femSpace.psi
argsDict['u_grad_test_ref'] = self.u[0].femSpace.grad_psi
argsDict['mesh_trial_trace_ref'] = self.u[0].femSpace.elementMaps.psi_trace
argsDict['mesh_grad_trial_trace_ref'] = self.u[0].femSpace.elementMaps.grad_psi_trace
argsDict['dS_ref'] = self.elementBoundaryQuadratureWeights[('u', 0)]
argsDict['u_trial_trace_ref'] = self.u[0].femSpace.psi_trace
argsDict['u_grad_trial_trace_ref'] = self.u[0].femSpace.grad_psi_trace
argsDict['u_test_trace_ref'] = self.u[0].femSpace.psi_trace
argsDict['u_grad_test_trace_ref'] = self.u[0].femSpace.grad_psi_trace
argsDict['normal_ref'] = self.u[0].femSpace.elementMaps.boundaryNormals
argsDict['boundaryJac_ref'] = self.u[0].femSpace.elementMaps.boundaryJacobians
argsDict['nElements_global'] = self.mesh.nElements_global
argsDict['nu_0'] = self.coefficients.nu_0
argsDict['nu_1'] = self.coefficients.nu_1
argsDict['sigma_k'] = self.coefficients.sigma_k
argsDict['c_mu'] = self.coefficients.c_mu
argsDict['rho_0'] = self.coefficients.rho_0
argsDict['rho_1'] = self.coefficients.rho_1
argsDict['dissipation_model_flag'] = self.coefficients.dissipation_model_flag
argsDict['useMetrics'] = self.coefficients.useMetrics
argsDict['alphaBDF'] = self.timeIntegration.alpha_bdf
argsDict['lag_shockCapturing'] = self.shockCapturing.lag
argsDict['shockCapturingDiffusion'] = self.shockCapturing.shockCapturingFactor
argsDict['u_l2g'] = self.u[0].femSpace.dofMap.l2g
argsDict['elementDiameter'] = self.mesh.elementDiametersArray
argsDict['u_dof'] = self.u[0].dof
argsDict['u_dof_old'] = self.coefficients.u_old_dof
argsDict['velocity'] = self.coefficients.q_v
argsDict['phi_ls'] = self.coefficients.q_phi
argsDict['q_dissipation'] = self.coefficients.q_dissipation
argsDict['q_grad_dissipation'] = self.coefficients.q_grad_dissipation
argsDict['q_porosity'] = self.coefficients.q_porosity
argsDict['sedFlag'] = self.coefficients.sedFlag
argsDict['q_vos'] = self.coefficients.q_vos
argsDict['q_vos_gradc'] = self.coefficients.grad_vos
argsDict['ebqe_q_vos'] = self.coefficients.ebqe_vos
argsDict['ebqe_q_vos_gradc'] = self.coefficients.ebqe_grad_vos
argsDict['rho_f'] = self.coefficients.rho_0
argsDict['rho_s'] = self.coefficients.rho_s
argsDict['vs'] = self.coefficients.vs
argsDict['ebqe_vs'] = self.coefficients.ebqe_vs
argsDict['g'] = self.coefficients.g
argsDict['velocity_dof_u'] = self.coefficients.velocity_dof_u
argsDict['velocity_dof_v'] = self.coefficients.velocity_dof_v
argsDict['velocity_dof_w'] = self.coefficients.velocity_dof_w
argsDict['q_m_betaBDF'] = self.timeIntegration.beta_bdf[0]
argsDict['cfl'] = self.q[('cfl', 0)]
argsDict['q_numDiff_u_last'] = self.shockCapturing.numDiff_last[0]
argsDict['ebqe_penalty_ext'] = self.ebqe['penalty']
argsDict['csrRowIndeces_u_u'] = self.csrRowIndeces[(0, 0)]
argsDict['csrColumnOffsets_u_u'] = self.csrColumnOffsets[(0, 0)]
argsDict['globalJacobian'] = jacobian.getCSRrepresentation()[2]
argsDict['nExteriorElementBoundaries_global'] = self.mesh.nExteriorElementBoundaries_global
argsDict['exteriorElementBoundariesArray'] = self.mesh.exteriorElementBoundariesArray
argsDict['elementBoundaryElementsArray'] = self.mesh.elementBoundaryElementsArray
argsDict['elementBoundaryLocalElementBoundariesArray'] = self.mesh.elementBoundaryLocalElementBoundariesArray
argsDict['ebqe_velocity_ext'] = self.coefficients.ebqe_v
argsDict['isDOFBoundary_u'] = self.numericalFlux.isDOFBoundary[0]
argsDict['ebqe_bc_u_ext'] = self.numericalFlux.ebqe[('u', 0)]
argsDict['isAdvectiveFluxBoundary_u'] = self.ebqe[('advectiveFlux_bc_flag', 0)]
argsDict['ebqe_bc_advectiveFlux_u_ext'] = self.ebqe[('advectiveFlux_bc', 0)]
argsDict['isDiffusiveFluxBoundary_u'] = self.ebqe[('diffusiveFlux_bc_flag', 0, 0)]
argsDict['ebqe_bc_diffusiveFlux_u_ext'] = self.ebqe[('diffusiveFlux_bc', 0, 0)]
argsDict['csrColumnOffsets_eb_u_u'] = self.csrColumnOffsets_eb[(0, 0)]
argsDict['ebqe_phi'] = self.coefficients.ebqe_phi
argsDict['epsFact'] = self.coefficients.epsFact
argsDict['ebqe_dissipation'] = self.coefficients.ebqe_dissipation
argsDict['ebqe_porosity'] = self.coefficients.ebqe_porosity
self.kappa.calculateJacobian(argsDict)
if self.forceStrongConditions:
scaling = 1.0
for dofN in list(self.dirichletConditionsForceDOF.DOFBoundaryConditionsDict.keys()):
global_dofN = dofN
for i in range(self.rowptr[global_dofN], self.rowptr[(global_dofN + 1)]):
if (self.colind[i] == global_dofN):
self.nzval[i] = scaling
else:
self.nzval[i] = 0.0
prof.logEvent('Jacobian ', level=10, data=jacobian)
self.nonlinear_function_jacobian_evaluations += 1
return jacobian
def calculateElementQuadrature(self):
self.u[0].femSpace.elementMaps.getBasisValuesRef(self.elementQuadraturePoints)
self.u[0].femSpace.elementMaps.getBasisGradientValuesRef(self.elementQuadraturePoints)
self.u[0].femSpace.getBasisValuesRef(self.elementQuadraturePoints)
self.u[0].femSpace.getBasisGradientValuesRef(self.elementQuadraturePoints)
self.coefficients.initializeElementQuadrature(self.timeIntegration.t, self.q)
if (self.stabilization is not None):
self.stabilization.initializeElementQuadrature(self.mesh, self.timeIntegration.t, self.q)
self.stabilization.initializeTimeIntegration(self.timeIntegration)
if (self.shockCapturing is not None):
self.shockCapturing.initializeElementQuadrature(self.mesh, self.timeIntegration.t, self.q)
def calculateElementBoundaryQuadrature(self):
pass
def calculateExteriorElementBoundaryQuadrature(self):
self.u[0].femSpace.elementMaps.getBasisValuesTraceRef(self.elementBoundaryQuadraturePoints)
self.u[0].femSpace.elementMaps.getBasisGradientValuesTraceRef(self.elementBoundaryQuadraturePoints)
self.u[0].femSpace.getBasisValuesTraceRef(self.elementBoundaryQuadraturePoints)
self.u[0].femSpace.getBasisGradientValuesTraceRef(self.elementBoundaryQuadraturePoints)
self.u[0].femSpace.elementMaps.getValuesGlobalExteriorTrace(self.elementBoundaryQuadraturePoints, self.ebqe['x'])
self.fluxBoundaryConditionsObjectsDict = dict([(cj, proteus.FemTools.FluxBoundaryConditions(self.mesh, self.nElementBoundaryQuadraturePoints_elementBoundary, self.ebqe['x'], getAdvectiveFluxBoundaryConditions=self.advectiveFluxBoundaryConditionsSetterDict[cj], getDiffusiveFluxBoundaryConditions=self.diffusiveFluxBoundaryConditionsSetterDictDict[cj])) for cj in list(self.advectiveFluxBoundaryConditionsSetterDict.keys())])
self.coefficients.initializeGlobalExteriorElementBoundaryQuadrature(self.timeIntegration.t, self.ebqe)
def estimate_mt(self):
pass
def calculateSolutionAtQuadrature(self):
pass
def calculateAuxiliaryQuantitiesAfterStep(self):
pass |
def _test_correct_response_for_set_extent_competed_type_codes(client):
resp = client.post('/api/v2/search/spending_over_time', content_type='application/json', data=json.dumps({'group': 'fiscal_year', 'filters': {'extent_competed_type_codes': ['extent_competed_4', 'extent_competed_24', 'extent_competed_26'], 'time_period': [{'start_date': '2007-10-01', 'end_date': '2020-09-30'}]}}))
expected_result = [{'aggregated_amount': 0, 'time_period': {'fiscal_year': '2008'}}, {'aggregated_amount': 0, 'time_period': {'fiscal_year': '2009'}}, {'aggregated_amount': 0, 'time_period': {'fiscal_year': '2010'}}, {'aggregated_amount': 0, 'time_period': {'fiscal_year': '2011'}}, {'aggregated_amount': 0, 'time_period': {'fiscal_year': '2012'}}, {'aggregated_amount': 0, 'time_period': {'fiscal_year': '2013'}}, {'aggregated_amount': 16028.0, 'time_period': {'fiscal_year': '2014'}}, {'aggregated_amount': 0, 'time_period': {'fiscal_year': '2015'}}, {'aggregated_amount': 8026.0, 'time_period': {'fiscal_year': '2016'}}, {'aggregated_amount': 0, 'time_period': {'fiscal_year': '2017'}}, {'aggregated_amount': 0, 'time_period': {'fiscal_year': '2018'}}, {'aggregated_amount': 0, 'time_period': {'fiscal_year': '2019'}}, {'aggregated_amount': 0, 'time_period': {'fiscal_year': '2020'}}]
assert (resp.status_code == status.HTTP_200_OK)
assert (resp.json().get('results') == expected_result), 'Extent Competed Type Codes filter does not match expected result' |
class ZenpyCacheManager():
def __init__(self, disabled=False):
self.disabled = disabled
self.mapping = {'user': ZenpyCache('LRUCache', maxsize=10000), 'organization': ZenpyCache('LRUCache', maxsize=10000), 'group': ZenpyCache('LRUCache', maxsize=10000), 'brand': ZenpyCache('LRUCache', maxsize=10000), 'ticket': ZenpyCache('TTLCache', maxsize=10000, ttl=30), 'request': ZenpyCache('LRUCache', maxsize=10000), 'ticket_field': ZenpyCache('LRUCache', maxsize=10000), 'sharing_agreement': ZenpyCache('TTLCache', maxsize=10000, ttl=6000), 'identity': ZenpyCache('LRUCache', maxsize=10000)}
def add(self, zenpy_object):
object_type = get_object_type(zenpy_object)
if ((object_type not in self.mapping) or self.disabled):
return
attr_name = self._cache_key_attribute(object_type)
cache_key = getattr(zenpy_object, attr_name)
log.debug('Caching: [{}({}={})]'.format(zenpy_object.__class__.__name__, attr_name, cache_key))
self.mapping[object_type][cache_key] = zenpy_object
def delete(self, to_delete):
if (not isinstance(to_delete, list)):
to_delete = [to_delete]
for zenpy_object in to_delete:
object_type = get_object_type(zenpy_object)
object_cache = self.mapping.get(object_type, None)
if object_cache:
removed_object = object_cache.pop(zenpy_object.id, None)
if removed_object:
log.debug(('Cache RM: [%s %s]' % (object_type.capitalize(), zenpy_object.id)))
def get(self, object_type, cache_key):
if ((object_type not in self.mapping) or self.disabled):
return None
cache = self.mapping[object_type]
if (cache_key in cache):
log.debug(('Cache HIT: [%s %s]' % (object_type.capitalize(), cache_key)))
return cache[cache_key]
else:
log.debug(('Cache MISS: [%s %s]' % (object_type.capitalize(), cache_key)))
def query_cache_by_object(self, zenpy_object):
object_type = get_object_type(zenpy_object)
cache_key = self._cache_key_attribute(object_type)
return self.get(object_type, getattr(zenpy_object, cache_key))
def purge_cache(self, object_type):
if (object_type in self.mapping):
cache = self.mapping[object_type]
log.debug('Purging [{}] cache of {} values.'.format(object_type, len(cache)))
cache.purge()
def in_cache(self, zenpy_object):
object_type = get_object_type(zenpy_object)
cache_key_attr = self._cache_key_attribute(object_type)
return (self.get(object_type, getattr(zenpy_object, cache_key_attr)) is not None)
def should_cache(self, zenpy_object):
return (get_object_type(zenpy_object) in self.mapping)
def disable(self):
self.disabled = True
def enable(self):
self.disabled = False
def status(self):
return ('Cache disabled' if self.disabled else 'Cache enabled')
def get_cache_engines(self):
return ZenpyCache.AVAILABLE_CACHES
def _cache_key_attribute(self, object_type):
return 'id' |
def isAlive():
global hunger, fun, health, alive
if ((hunger <= 0) or (fun <= 0) or (health <= 0)):
catPic.config(image=deadphoto)
_thread.start_new_thread(updateLabel, (2, (TAMA_NAME + ' has died!')))
startLabel.config(text='Game Over!')
endGame()
if tkinter.messagebox.askyesno('Play again?', 'Do you want to play again?'):
start()
return False
if (alive == True):
hungerBar.after(100, isAlive) |
('PyTorchWrapper.v1')
def PyTorchWrapper(pytorch_model: Any, convert_inputs: Optional[Callable]=None, convert_outputs: Optional[Callable]=None) -> Model[(Any, Any)]:
if (convert_inputs is None):
convert_inputs = convert_pytorch_default_inputs
if (convert_outputs is None):
convert_outputs = convert_pytorch_default_outputs
return Model('pytorch', forward, attrs={'convert_inputs': convert_inputs, 'convert_outputs': convert_outputs}, shims=[PyTorchShim(pytorch_model)], dims={'nI': None, 'nO': None}) |
def get_repo(repo_dir: 'Path | None'=None) -> 'tuple[str,str,str,Path]':
if (git_dir := git_toplevel_dir(repo_dir, use_git=True)):
gradle_props = git_dir.joinpath('gradle.properties')
if gradle_props.is_file():
try:
with gradle_props.open('r') as gpf:
for line in gpf.readlines():
if line.startswith('transformsRepoRid'):
return (line.split('=')[1].strip(), get_git_ref(git_dir), get_git_revision_hash(git_dir), git_dir)
except Exception as e:
raise Exception("Can't get repository RID from the gradle.properties file. Malformed file?") from e
raise Exception("Can't get repository RID from the gradle.properties file. Is this really a foundry repository?")
raise Exception("There is no gradle.properties file at the top of the git repository, can't get repository RID.")
raise Exception("If you don't provide a repository RID you need to be in a repository directory to detect what you want to build.") |
def get_current_tag(existing: bool=False, repo=None, all_tags: List=[]) -> Optional[str]:
if (repo is None):
from git.repo import Repo
repo = Repo()
if existing:
if (not all_tags):
all_tags = get_all_tags(repo)
return next((tag.name for tag in all_tags if (tag.commit == repo.head.commit)), None)
git_session = repo.git()
git_session.fetch('--force', '--tags')
current_tag = git_session.describe('--tags', '--dirty', '--always')
return current_tag |
def print_help(available_plugins=None):
print((_('usage: ') + _('fdroid [<command>] [-h|--help|--version|<args>]')))
print('')
print(_('Valid commands are:'))
for (cmd, summary) in COMMANDS.items():
print((((' ' + cmd) + (' ' * (15 - len(cmd)))) + summary))
if available_plugins:
print(_('commands from plugin modules:'))
for command in sorted(available_plugins.keys()):
print(' {:15}{}'.format(command, available_plugins[command]['summary']))
print('') |
class TestClientRequestFunctionality():
(autouse=True)
def patch_ patch_
pass
def setup_method(self):
self.base_url = '
self.api_key = 'super_secret_api_key'
self.user_agent = 'fintoc-python/test'
self.params = {'first_param': 'first_value', 'second_param': 'second_value'}
self.client = Client(self.base_url, self.api_key, self.user_agent, params=self.params)
def test_paginated_request(self):
data = self.client.request('/movements', paginated=True)
assert isinstance(data, GeneratorType)
def test_get_request(self):
data = self.client.request('/movements/3', method='get')
assert isinstance(data, dict)
assert (len(data.keys()) > 0)
def test_delete_request(self):
data = self.client.request('/movements/3', method='delete')
assert isinstance(data, dict)
assert (len(data.keys()) == 0) |
def test_rulesDocument(tmpdir):
tmpdir = str(tmpdir)
testDocPath = os.path.join(tmpdir, 'testRules.designspace')
testDocPath2 = os.path.join(tmpdir, 'testRules_roundtrip.designspace')
doc = DesignSpaceDocument()
doc.rulesProcessingLast = True
a1 = AxisDescriptor()
a1.minimum = 0
a1.maximum = 1000
a1.default = 0
a1.name = 'axisName_a'
a1.tag = 'TAGA'
b1 = AxisDescriptor()
b1.minimum = 2000
b1.maximum = 3000
b1.default = 2000
b1.name = 'axisName_b'
b1.tag = 'TAGB'
doc.addAxis(a1)
doc.addAxis(b1)
r1 = RuleDescriptor()
r1.name = 'named.rule.1'
r1.conditionSets.append([dict(name='axisName_a', minimum=0, maximum=1000), dict(name='axisName_b', minimum=0, maximum=3000)])
r1.subs.append(('a', 'a.alt'))
doc.addRule(r1)
assert (len(doc.rules) == 1)
assert (len(doc.rules[0].conditionSets) == 1)
assert (len(doc.rules[0].conditionSets[0]) == 2)
assert (_axesAsDict(doc.axes) == {'axisName_a': {'map': [], 'name': 'axisName_a', 'default': 0, 'minimum': 0, 'maximum': 1000, 'tag': 'TAGA'}, 'axisName_b': {'map': [], 'name': 'axisName_b', 'default': 2000, 'minimum': 2000, 'maximum': 3000, 'tag': 'TAGB'}})
assert (doc.rules[0].conditionSets == [[{'minimum': 0, 'maximum': 1000, 'name': 'axisName_a'}, {'minimum': 0, 'maximum': 3000, 'name': 'axisName_b'}]])
assert (doc.rules[0].subs == [('a', 'a.alt')])
doc.normalize()
assert (doc.rules[0].name == 'named.rule.1')
assert (doc.rules[0].conditionSets == [[{'minimum': 0.0, 'maximum': 1.0, 'name': 'axisName_a'}, {'minimum': 0.0, 'maximum': 1.0, 'name': 'axisName_b'}]])
assert (len(doc.rules[0].conditionSets) == 1)
doc.write(testDocPath)
_addUnwrappedCondition(testDocPath)
doc2 = DesignSpaceDocument()
doc2.read(testDocPath)
assert doc2.rulesProcessingLast
assert (len(doc2.axes) == 2)
assert (len(doc2.rules) == 1)
assert (len(doc2.rules[0].conditionSets) == 2)
doc2.write(testDocPath2)
doc3 = DesignSpaceDocument()
doc3.read(testDocPath2)
assert (len(doc3.rules) == 1)
assert (len(doc3.rules[0].conditionSets) == 2) |
def main() -> int:
global configure, cmake, options
parser = argparse.ArgumentParser()
parser.add_argument('--arch', default='linux-x86_64', choices=['linux-x86_64', 'x86_64', 'aarch64'])
parser.add_argument('--python', default='3.11')
parser.add_argument('--android-api-level', type=int, default=21)
options = parser.parse_args()
(python, pip) = setup_python(options.python)
os.environ['CFLAGS'] = '-fPIC'
os.environ['CXXFLAGS'] = '-fPIC'
os.environ['MAKEFLAGS'] = f'-j {os.cpu_count()}'
if (options.arch != 'linux-x86_64'):
env = setup_android_ndk(options.arch, api_level=options.android_api_level)
configure = configure.bake(*env.autoconf_cross_args)
build_sqlite('version-3.41.2', options.arch)
build_libffi('v3.4.4', options.arch)
build_openssl('OpenSSL_1_1_1t', options.arch)
build_libgit2('v1.6.4', options.arch)
build_libev('master', options.arch)
build_libcares('cares-1_19_0', options.arch)
build_cpython(python, 'v3.11.3', options.arch)
if (options.arch == 'linux-x86_64'):
host_pip_install = pip
else:
host_pip_install = setup_crossenv(python, pip, options.arch)
build_trivial_packages(host_pip_install, options.arch)
build_gevent(host_pip_install, '22.10.2', options.arch)
strip_binaries(options.arch)
print('** Build complete')
print(f" python is at {ProjectPaths('cpython', options.arch).install}")
print(f" libgit2 is at {ProjectPaths('libgit2', options.arch).install}") |
def win_common_types(maxsize):
result = {}
if (maxsize < (1 << 32)):
result.update({'HALF_PTR': 'short', 'INT_PTR': 'int', 'LONG_PTR': 'long', 'UHALF_PTR': 'unsigned short', 'UINT_PTR': 'unsigned int', 'ULONG_PTR': 'unsigned long'})
else:
result.update({'HALF_PTR': 'int', 'INT_PTR': 'long long', 'LONG_PTR': 'long long', 'UHALF_PTR': 'unsigned int', 'UINT_PTR': 'unsigned long long', 'ULONG_PTR': 'unsigned long long'})
result.update({'BYTE': 'unsigned char', 'BOOL': 'int', 'CCHAR': 'char', 'CHAR': 'char', 'DWORD': 'unsigned long', 'DWORD32': 'unsigned int', 'DWORD64': 'unsigned long long', 'FLOAT': 'float', 'INT': 'int', 'INT8': 'signed char', 'INT16': 'short', 'INT32': 'int', 'INT64': 'long long', 'LONG': 'long', 'LONGLONG': 'long long', 'LONG32': 'int', 'LONG64': 'long long', 'WORD': 'unsigned short', 'PVOID': model.voidp_type, 'ULONGLONG': 'unsigned long long', 'WCHAR': 'wchar_t', 'SHORT': 'short', 'UCHAR': 'unsigned char', 'UINT': 'unsigned int', 'UINT8': 'unsigned char', 'UINT16': 'unsigned short', 'UINT32': 'unsigned int', 'UINT64': 'unsigned long long', 'ULONG': 'unsigned long', 'ULONG32': 'unsigned int', 'ULONG64': 'unsigned long long', 'USHORT': 'unsigned short', 'SIZE_T': 'ULONG_PTR', 'SSIZE_T': 'LONG_PTR', 'ATOM': 'WORD', 'BOOLEAN': 'BYTE', 'COLORREF': 'DWORD', 'HANDLE': 'PVOID', 'DWORDLONG': 'ULONGLONG', 'DWORD_PTR': 'ULONG_PTR', 'HACCEL': 'HANDLE', 'HBITMAP': 'HANDLE', 'HBRUSH': 'HANDLE', 'HCOLORSPACE': 'HANDLE', 'HCONV': 'HANDLE', 'HCONVLIST': 'HANDLE', 'HDC': 'HANDLE', 'HDDEDATA': 'HANDLE', 'HDESK': 'HANDLE', 'HDROP': 'HANDLE', 'HDWP': 'HANDLE', 'HENHMETAFILE': 'HANDLE', 'HFILE': 'int', 'HFONT': 'HANDLE', 'HGDIOBJ': 'HANDLE', 'HGLOBAL': 'HANDLE', 'HHOOK': 'HANDLE', 'HICON': 'HANDLE', 'HCURSOR': 'HICON', 'HINSTANCE': 'HANDLE', 'HKEY': 'HANDLE', 'HKL': 'HANDLE', 'HLOCAL': 'HANDLE', 'HMENU': 'HANDLE', 'HMETAFILE': 'HANDLE', 'HMODULE': 'HINSTANCE', 'HMONITOR': 'HANDLE', 'HPALETTE': 'HANDLE', 'HPEN': 'HANDLE', 'HRESULT': 'LONG', 'HRGN': 'HANDLE', 'HRSRC': 'HANDLE', 'HSZ': 'HANDLE', 'WINSTA': 'HANDLE', 'HWND': 'HANDLE', 'LANGID': 'WORD', 'LCID': 'DWORD', 'LCTYPE': 'DWORD', 'LGRPID': 'DWORD', 'LPARAM': 'LONG_PTR', 'LPBOOL': 'BOOL *', 'LPBYTE': 'BYTE *', 'LPCOLORREF': 'DWORD *', 'LPCSTR': 'const char *', 'LPCVOID': model.const_voidp_type, 'LPCWSTR': 'const WCHAR *', 'LPDWORD': 'DWORD *', 'LPHANDLE': 'HANDLE *', 'LPINT': 'int *', 'LPLONG': 'long *', 'LPSTR': 'CHAR *', 'LPWSTR': 'WCHAR *', 'LPVOID': model.voidp_type, 'LPWORD': 'WORD *', 'LRESULT': 'LONG_PTR', 'PBOOL': 'BOOL *', 'PBOOLEAN': 'BOOLEAN *', 'PBYTE': 'BYTE *', 'PCHAR': 'CHAR *', 'PCSTR': 'const CHAR *', 'PCWSTR': 'const WCHAR *', 'PDWORD': 'DWORD *', 'PDWORDLONG': 'DWORDLONG *', 'PDWORD_PTR': 'DWORD_PTR *', 'PDWORD32': 'DWORD32 *', 'PDWORD64': 'DWORD64 *', 'PFLOAT': 'FLOAT *', 'PHALF_PTR': 'HALF_PTR *', 'PHANDLE': 'HANDLE *', 'PHKEY': 'HKEY *', 'PINT': 'int *', 'PINT_PTR': 'INT_PTR *', 'PINT8': 'INT8 *', 'PINT16': 'INT16 *', 'PINT32': 'INT32 *', 'PINT64': 'INT64 *', 'PLCID': 'PDWORD', 'PLONG': 'LONG *', 'PLONGLONG': 'LONGLONG *', 'PLONG_PTR': 'LONG_PTR *', 'PLONG32': 'LONG32 *', 'PLONG64': 'LONG64 *', 'PSHORT': 'SHORT *', 'PSIZE_T': 'SIZE_T *', 'PSSIZE_T': 'SSIZE_T *', 'PSTR': 'CHAR *', 'PUCHAR': 'UCHAR *', 'PUHALF_PTR': 'UHALF_PTR *', 'PUINT': 'UINT *', 'PUINT_PTR': 'UINT_PTR *', 'PUINT8': 'UINT8 *', 'PUINT16': 'UINT16 *', 'PUINT32': 'UINT32 *', 'PUINT64': 'UINT64 *', 'PULONG': 'ULONG *', 'PULONGLONG': 'ULONGLONG *', 'PULONG_PTR': 'ULONG_PTR *', 'PULONG32': 'ULONG32 *', 'PULONG64': 'ULONG64 *', 'PUSHORT': 'USHORT *', 'PWCHAR': 'WCHAR *', 'PWORD': 'WORD *', 'PWSTR': 'WCHAR *', 'QWORD': 'unsigned long long', 'SC_HANDLE': 'HANDLE', 'SC_LOCK': 'LPVOID', 'SERVICE_STATUS_HANDLE': 'HANDLE', 'UNICODE_STRING': model.StructType('_UNICODE_STRING', ['Length', 'MaximumLength', 'Buffer'], [model.PrimitiveType('unsigned short'), model.PrimitiveType('unsigned short'), model.PointerType(model.PrimitiveType('wchar_t'))], [(- 1), (- 1), (- 1)]), 'PUNICODE_STRING': 'UNICODE_STRING *', 'PCUNICODE_STRING': 'const UNICODE_STRING *', 'USN': 'LONGLONG', 'VOID': model.void_type, 'WPARAM': 'UINT_PTR', 'TBYTE': 'set-unicode-needed', 'TCHAR': 'set-unicode-needed', 'LPCTSTR': 'set-unicode-needed', 'PCTSTR': 'set-unicode-needed', 'LPTSTR': 'set-unicode-needed', 'PTSTR': 'set-unicode-needed', 'PTBYTE': 'set-unicode-needed', 'PTCHAR': 'set-unicode-needed'})
return result |
class Quantization(object):
def __init__(self, conf, quantize_model_mode='Q6', normalize_weight_mode='filterwise_mean', standardize_weight_mode='filterwise_std', quantize_model_use_scaling=True, quantize_model_scaling_mode='itself', quantize_model_add_mean_back=True, quantize_model_clip=True):
assert (quantize_model_mode is not None)
self.conf = conf
self.quantize_fn = getattr(self, quantize_model_mode)
self.normalize_weight_mode = normalize_weight_mode
self.standardize_weight_mode = standardize_weight_mode
self.quantize_model_use_scaling = quantize_model_use_scaling
self.quantize_model_scaling_mode = quantize_model_scaling_mode
self.quantize_model_add_mean_back = quantize_model_add_mean_back
self.quantize_model_clip = quantize_model_clip
def _filterwise_mean(self, param_data):
return param_data.mean(dim=3, keepdim=True).mean(dim=2, keepdim=True).mean(dim=1, keepdim=True).mul((- 1))
def _layerwise_mean(self, param_data):
return param_data.mean().mul((- 1))
def _filterwise_std(self, param_data, size):
return torch.sqrt(torch.mean((param_data ** 2), dim=3, keepdim=True).mean(dim=2, keepdim=True).mean(dim=1, keepdim=True))
def _layerwise_std(self, param_data, size):
return torch.sqrt(torch.mean((param_data ** 2)))
def _filterwise_norm(self, param_data, norm_type=1):
if (norm_type == float('inf')):
(maxed, _) = param_data.abs().max(dim=3, keepdim=True)
(maxed, _) = maxed.max(dim=2, keepdim=True)
(maxed, _) = maxed.max(dim=1, keepdim=True)
return maxed
else:
return param_data.norm(p=norm_type, dim=3, keepdim=True).sum(dim=2, keepdim=True).sum(dim=1, keepdim=True)
def _layerwise_norm(self, param_data, norm_type=1):
return param_data.norm(p=norm_type, keepdim=True)
def _normalize_weight(self, param_data, size, normalize_weight_mode):
if (normalize_weight_mode is None):
return torch.zeros_like(param_data)
elif (normalize_weight_mode == 'filterwise_mean'):
if (len(size) == 4):
neg_mean = self._filterwise_mean(param_data)
else:
neg_mean = self._layerwise_mean(param_data)
return neg_mean
elif (normalize_weight_mode == 'layerwise_mean'):
neg_mean = self._layerwise_mean(param_data)
return neg_mean
else:
raise NotImplementedError
def _standardize_weight(self, param_data, size, standardize_weight_mode, epsilon=1e-08):
if (standardize_weight_mode is None):
denon = torch.ones_like(param_data)
elif (standardize_weight_mode == 'filterwise_std'):
if (len(size) == 4):
denon = self._filterwise_std(param_data, size)
else:
denon = self._layerwise_std(param_data, size)
denon = denon.add_(epsilon)
elif ('linf_norm' in standardize_weight_mode):
norm_type = float('inf')
if ((len(size) == 4) and ('filterwise' in standardize_weight_mode)):
denon = self._filterwise_norm(param_data, norm_type=norm_type)
else:
denon = self._layerwise_norm(param_data, norm_type=norm_type)
denon = denon.add_(epsilon)
else:
raise NotImplementedError
return denon
def _quantize(self, x, n_bits):
if (n_bits == 1):
return torch.sign(x)
else:
_sign = torch.sign(x)
_abs = x.abs()
n = (1.0 * ((2 ** (n_bits - 1)) - 1))
return ((_sign * torch.round((n * _abs))) / n)
' the quantization function listed below can be found in the related work.\n Q6: Xnor-net: Imagenet classification using binary convolutional neural networks.\n b * quantize_k ((W_i - a) / b) + a,\n where `a` could be the mean/median of W and `b` could be L_1, L_2, L_\\infinity norm, or the variance of W.\n Q2: Dorefa-net: Training low bitwidth convolutional neural networks with low bitwidth gradients.\n 1-bit quantization:\n \\frac{1}{n} \\norm{W}_1 * sign(W_i)\n k-bit quantization:\n 2 * quantize_k( \\frac{ \\tanh(W_i) }{ 2 \\max{ \\abs{ \\tanh{W_i} } } } + 0.5 ) - 1\n Q5: Training and inference with integers in deep neural networks.\n k-bit quantization:\n \\clip{ \\sigma(k) * \\round{ \\frac{ W_i }{ \\sigma(k) } }, -1 + \\sigma(k), 1 - \\sigma(k) },\n where \\sigma(k) = 2^{1 - k}\n Q4: Incremental network quantization: Towards lossless cnns with low-precision weights.\n '
def Q6(self, param_name, param, n_bits, epsilon=1e-08):
param_data = param.data.clone()
size = param_data.size()
neg_mean = self._normalize_weight(param_data, size, normalize_weight_mode=self.normalize_weight_mode)
param_data.add_(neg_mean.expand(size))
denon = self._standardize_weight(param_data, size, standardize_weight_mode=self.standardize_weight_mode)
param_data.div_(denon.expand(size))
if self.quantize_model_clip:
param_data.clamp_((- 1.0), 1.0)
if self.quantize_model_use_scaling:
if ('l1' in self.quantize_model_scaling_mode):
nelement = param_data.nelement()
if ((len(size) == 4) and ('filterwise' in self.normalize_weight_mode)):
scaling = self._filterwise_norm(param_data, norm_type=1)
else:
scaling = self._layerwise_norm(param_data, norm_type=1)
scaling = scaling.div(nelement).expand(size)
if ('both' in self.quantize_model_scaling_mode):
scaling.mul_(denon)
elif (self.quantize_model_scaling_mode == 'itself'):
scaling = denon.expand(size)
else:
scaling = 1
if self.quantize_model_add_mean_back:
mean = (- neg_mean.expand(size))
else:
mean = 0
return (self._quantize(param_data, n_bits).mul(scaling) + mean)
def Q2(self, param_name, param, n_bits, epsilon=1e-08):
param_data = param.data.clone()
size = param_data.size()
if self.quantize_model_use_scaling:
assert (n_bits == 1)
alpha = (param_data.abs().mean() + epsilon)
alpha = alpha.expand(size)
return torch.sign(param_data.div(alpha)).mul(alpha)
else:
_d = torch.tanh(param_data)
_d_abs_max = _d.abs().max()
_epsilon = (epsilon if (_d_abs_max == 0) else 0)
if (n_bits == 1):
return torch.sign(param_data)
else:
return ((2 * self._quantize((((0.5 * _d) / (_d_abs_max + _epsilon)) + 0.5), n_bits)) - 1)
def Q5(self, param_name, param, n_bits):
def _sigma(bits):
return (2.0 ** (bits - 1))
def _shift_factor(x):
return (2 ** torch.round(torch.log2(x)))
def _c(x, bits):
delta = (0.0 if ((bits > 15) or (bits == 1)) else (1.0 / _sigma(bits)))
return x.clamp(((- 1) + delta), ((+ 1) - delta))
def _q(x, bits):
if (bits > 15):
return x
elif (bits == 1):
return torch.sign(x)
else:
_scale = _sigma(bits)
return (torch.round((x * _scale)) / _scale)
param_data = param.data.clone()
return _q(_c(param_data, n_bits), n_bits)
def Q4(self, param_name, param, n_bits):
param_data = param.data.clone()
s = param_data.abs().max().item()
if (s == 0):
return param_data
else:
n_1 = math.floor(math.log(((4 * s) / 3), 2))
n_2 = int(((n_1 + 1) - ((2 ** (n_bits - 1)) / 2)))
def _quantize_weight(weight):
alpha = 0
beta = (2 ** n_2)
abs_weight = math.fabs(weight)
for i in range(int(n_2), (int(n_1) + 1)):
if ((abs_weight >= ((alpha + beta) / 2)) and (abs_weight < ((3 * beta) / 2))):
return math.copysign(beta, weight)
else:
alpha = (2 ** i)
beta = (2 ** (i + 1))
return 0
param_data.cpu().apply_(_quantize_weight).to(param_data.device)
return param_data
def quantize(self, param_name, param, n_bits):
return self.quantize_fn(param_name, param, n_bits) |
def get_compiled_workflow_closure():
cwc_pb = _compiler_pb2.CompiledWorkflowClosure()
basepath = os.path.dirname(__file__)
filepath = os.path.abspath(os.path.join(basepath, 'responses', 'CompiledWorkflowClosure.pb'))
with open(filepath, 'rb') as fh:
cwc_pb.ParseFromString(fh.read())
return CompiledWorkflowClosure.from_flyte_idl(cwc_pb) |
(suppress_health_check=[HealthCheck.function_scoped_fixture])
(xtgeo_compatible_egrids(head=xtgeo_compatible_egridheads(mpaxes=st.just(None))))
def test_egrid_from_xtgeo(tmp_path, egrid):
tmp_file = (tmp_path / 'grid.EGRID')
egrid.to_file(tmp_file)
with pytest.warns(UserWarning):
xtgeo_grid = xtg.grid_from_file(tmp_file, relative_to=xtge.GridRelative.ORIGIN)
roundtrip_grid = xtge.EGrid.from_xtgeo_grid(xtgeo_grid)
assert (roundtrip_grid.zcorn.tolist() == egrid.zcorn.tolist())
assert (roundtrip_grid.coord.tolist() == egrid.coord.tolist())
if (roundtrip_grid.actnum is None):
assert ((egrid.actnum is None) or all((egrid.actnum == 1)))
elif (egrid.actnum is None):
assert ((roundtrip_grid.actnum is None) or all((roundtrip_grid.actnum == 1)))
else:
assert (roundtrip_grid.actnum.tolist() == egrid.actnum.tolist()) |
def _get_todays_backup_path() -> str:
file_path = mw.addonManager.getConfig(__name__)['addonNoteDBFolderPath']
if ((file_path is None) or (len(file_path) == 0)):
file_path = utility.misc.get_user_files_folder_path()
file_path += f'siac_backups/siac-notes.backup.{utility.date.date_only_stamp()}.db'
return file_path.strip() |
def dq_a(m0, m1, m2, o0, o1, o2, n0, n1, n2):
m0 = mpmath.mpf(m0)
m1 = mpmath.mpf(m1)
m2 = mpmath.mpf(m2)
o0 = mpmath.mpf(o0)
o1 = mpmath.mpf(o1)
o2 = mpmath.mpf(o2)
n0 = mpmath.mpf(n0)
n1 = mpmath.mpf(n1)
n2 = mpmath.mpf(n2)
x0 = (- o0)
x1 = (m0 + x0)
x2 = (- o1)
x3 = (m1 + x2)
x4 = (- o2)
x5 = (m2 + x4)
x6 = (((x1 ** 2) + (x3 ** 2)) + (x5 ** 2))
x7 = (n0 + x0)
x8 = (n1 + x2)
x9 = (n2 + x4)
x10 = (((x7 ** 2) + (x8 ** 2)) + (x9 ** 2))
x11 = (((x1 * x7) + (x3 * x8)) + (x5 * x9))
x12 = (1 / mpmath.sqrt((1 - ((x11 ** 2) / (x10 * x6)))))
x13 = (1 / mpmath.sqrt(x6))
x14 = (1 / mpmath.sqrt(x10))
x15 = (x13 * x14)
x16 = ((x11 * x14) / (x6 ** (mpmath.mpf(3) / mpmath.mpf(2))))
x17 = ((x11 * x13) / (x10 ** (mpmath.mpf(3) / mpmath.mpf(2))))
return np.array([((- x12) * (((- x1) * x16) + (x15 * x7))), ((- x12) * ((x15 * x8) - (x16 * x3))), ((- x12) * ((x15 * x9) - (x16 * x5))), ((- x12) * (((x1 * x16) + (x15 * (((- m0) - n0) + (2 * o0)))) + (x17 * x7))), ((- x12) * (((x15 * (((- m1) - n1) + (2 * o1))) + (x16 * x3)) + (x17 * x8))), ((- x12) * (((x15 * (((- m2) - n2) + (2 * o2))) + (x16 * x5)) + (x17 * x9))), ((- x12) * ((x1 * x15) - (x17 * x7))), ((- x12) * ((x15 * x3) - (x17 * x8))), ((- x12) * ((x15 * x5) - (x17 * x9)))], dtype=np.float64) |
def run_efc(get_image, dark_zone, num_modes, jacobian, rcond=0.01):
efc_matrix = inverse_tikhonov(jacobian, rcond)
current_actuators = np.zeros(num_modes)
actuators = []
electric_fields = []
images = []
for i in range(50):
img = get_image(current_actuators)
electric_field = img.electric_field
image = img.intensity
actuators.append(current_actuators.copy())
electric_fields.append(electric_field)
images.append(image)
x = np.concatenate((electric_field[dark_zone].real, electric_field[dark_zone].imag))
y = efc_matrix.dot(x)
current_actuators -= (efc_loop_gain * y)
return (actuators, electric_fields, images) |
def get_fake_table_resource(fake_table_data_input):
data_creater = FakeTableDataCreater(fake_table_data_input.table_id, fake_table_data_input.dataset)
if (fake_table_data_input.expiration_time is not None):
data_creater.SetExpirationTime(fake_table_data_input.expiration_time)
return data_creater.get_resource() |
class TimeStampField(IntegerField):
type_name = 'TimeStampField'
default_error_messages = {'invalid': _('Enter a valid timestamp.'), 'max_value': _('Ensure this value is less than or equal to %(limit_value)s.'), 'min_value': _('Ensure this value is greater than or equal to %(limit_value)s.')}
def __init__(self, *args, **kwargs):
if ('min_value' not in kwargs):
kwargs['min_value'] =
if ('max_value' not in kwargs):
kwargs['max_value'] =
super(TimeStampField, self).__init__(*args, **kwargs)
def one_hour_ago():
return int((timezone.now() - timezone.timedelta(hours=1)).strftime('%s'))
def now():
return int(timezone.now().strftime('%s')) |
def read_groups_ltsp(filename):
data = read_plain_file(filename)
if (not data):
return ([], [])
groups = {}
group = None
mac = None
hostname = None
for line in data:
s = re.search('^#\\s*EPOPTES_GROUP\\s*=\\s*(.*)$', line)
if s:
group = s.groups()[0]
if (group == 'None'):
group = None
mac = None
continue
s = re.search('^\\[([0-9a-fA-F:]{17})\\]', line)
if s:
if group:
mac = s.groups()[0]
continue
s = re.search('^\\[(.*)\\]', line)
if s:
mac = None
continue
s = re.search('^\\s*HOSTNAME\\s*=\\s*(.*)$', line)
if ((not s) or (not group) or (not mac)):
continue
hostname = s.groups()[0]
if (group not in groups):
groups[group] = {}
groups[group][mac] = hostname
mac = None
clients_struct = {}
for group in groups:
for mac in groups[group]:
alias = groups[group][mac]
new = structs.Client('offline', mac, '', alias)
clients_struct[(group + mac)] = new
groups_struct = []
for group in groups:
members = {}
for mac in groups[group]:
members[clients_struct[(group + mac)]] = {}
groups_struct.append(structs.Group(group, members))
return (clients_struct.values(), groups_struct) |
class InlineQueryResultBase(ABC, Dictionaryable, JsonSerializable):
def __init__(self, type, id, title=None, caption=None, input_message_content=None, reply_markup=None, caption_entities=None, parse_mode=None):
self.type = type
self.id = id
self.title = title
self.caption = caption
self.input_message_content = input_message_content
self.reply_markup = reply_markup
self.caption_entities = caption_entities
self.parse_mode = parse_mode
def to_json(self):
return json.dumps(self.to_dict())
def to_dict(self):
json_dict = {'type': self.type, 'id': self.id}
if self.title:
json_dict['title'] = self.title
if self.caption:
json_dict['caption'] = self.caption
if self.input_message_content:
json_dict['input_message_content'] = self.input_message_content.to_dict()
if self.reply_markup:
json_dict['reply_markup'] = self.reply_markup.to_dict()
if self.caption_entities:
json_dict['caption_entities'] = MessageEntity.to_list_of_dicts(self.caption_entities)
if self.parse_mode:
json_dict['parse_mode'] = self.parse_mode
return json_dict |
def test_flaskbb_config(default_settings):
flaskbb_config = FlaskBBConfig()
assert (len(flaskbb_config) > 0)
assert (flaskbb_config['PROJECT_TITLE'] == 'FlaskBB')
flaskbb_config['PROJECT_TITLE'] = 'FlaskBBTest'
assert (flaskbb_config['PROJECT_TITLE'] == 'FlaskBBTest')
assert ('PROJECT_TITLE' in list(flaskbb_config.__iter__())) |
class OptionPlotoptionsSankeySonificationDefaultinstrumentoptionsMappingPlaydelay(Options):
def mapFunction(self):
return self._config_get(None)
def mapFunction(self, value: Any):
self._config(value, js_type=False)
def mapTo(self):
return self._config_get(None)
def mapTo(self, text: str):
self._config(text, js_type=False)
def max(self):
return self._config_get(None)
def max(self, num: float):
self._config(num, js_type=False)
def min(self):
return self._config_get(None)
def min(self, num: float):
self._config(num, js_type=False)
def within(self):
return self._config_get(None)
def within(self, value: Any):
self._config(value, js_type=False) |
class OptionSeriesAreasplinerangeData(Options):
def accessibility(self) -> 'OptionSeriesAreasplinerangeDataAccessibility':
return self._config_sub_data('accessibility', OptionSeriesAreasplinerangeDataAccessibility)
def className(self):
return self._config_get(None)
def className(self, text: str):
self._config(text, js_type=False)
def color(self):
return self._config_get(None)
def color(self, text: str):
self._config(text, js_type=False)
def colorIndex(self):
return self._config_get(None)
def colorIndex(self, num: float):
self._config(num, js_type=False)
def custom(self):
return self._config_get(None)
def custom(self, value: Any):
self._config(value, js_type=False)
def dataLabels(self) -> 'OptionSeriesAreasplinerangeDataDatalabels':
return self._config_sub_data('dataLabels', OptionSeriesAreasplinerangeDataDatalabels)
def description(self):
return self._config_get(None)
def description(self, text: str):
self._config(text, js_type=False)
def dragDrop(self) -> 'OptionSeriesAreasplinerangeDataDragdrop':
return self._config_sub_data('dragDrop', OptionSeriesAreasplinerangeDataDragdrop)
def drilldown(self):
return self._config_get(None)
def drilldown(self, text: str):
self._config(text, js_type=False)
def events(self) -> 'OptionSeriesAreasplinerangeDataEvents':
return self._config_sub_data('events', OptionSeriesAreasplinerangeDataEvents)
def high(self):
return self._config_get(None)
def high(self, num: float):
self._config(num, js_type=False)
def id(self):
return self._config_get(None)
def id(self, text: str):
self._config(text, js_type=False)
def labelrank(self):
return self._config_get(None)
def labelrank(self, num: float):
self._config(num, js_type=False)
def low(self):
return self._config_get(None)
def low(self, num: float):
self._config(num, js_type=False)
def name(self):
return self._config_get(None)
def name(self, text: str):
self._config(text, js_type=False)
def selected(self):
return self._config_get(False)
def selected(self, flag: bool):
self._config(flag, js_type=False)
def x(self):
return self._config_get(None)
def x(self, num: float):
self._config(num, js_type=False) |
class CompoundEditor(Editor):
kind = Str()
def init(self, parent):
self.control = QtGui.QWidget()
layout = QtGui.QVBoxLayout(self.control)
layout.setContentsMargins(0, 0, 0, 0)
self._editors = editors = []
for factory in self.factory.editors:
editor = getattr(factory, self.kind)(self.ui, self.object, self.name, self.description, None)
editor.prepare(self.control)
layout.addWidget(editor.control)
editors.append(editor)
def update_editor(self):
pass
def dispose(self):
for editor in self._editors:
editor.dispose()
super().dispose() |
class MDockPane(HasTraits):
closable = Bool(True)
dock_area = Enum('left', 'right', 'top', 'bottom')
floatable = Bool(True)
floating = Bool(False)
movable = Bool(True)
size = Tuple()
visible = Bool(False)
caption_visible = Bool(True)
dock_layer = Bool(0)
def hide(self):
self.visible = False
def show(self):
self.visible = True |
def prove_low_degree(values, root_of_unity, maxdeg_plus_1, modulus, exclude_multiples_of=0):
f = PrimeField(modulus)
print(('Proving %d values are degree <= %d' % (len(values), maxdeg_plus_1)))
if (maxdeg_plus_1 <= 16):
print('Produced FRI proof')
return [[x.to_bytes(32, 'big') for x in values]]
xs = get_power_cycle(root_of_unity, modulus)
assert (len(values) == len(xs))
m = merkelize(values)
special_x = (int.from_bytes(m[1], 'big') % modulus)
quarter_len = (len(xs) // 4)
x_polys = f.multi_interp_4([[xs[(i + (quarter_len * j))] for j in range(4)] for i in range(quarter_len)], [[values[(i + (quarter_len * j))] for j in range(4)] for i in range(quarter_len)])
column = [f.eval_quartic(p, special_x) for p in x_polys]
m2 = merkelize(column)
ys = get_pseudorandom_indices(m2[1], len(column), 40, exclude_multiples_of=exclude_multiples_of)
poly_positions = sum([[(y + ((len(xs) // 4) * j)) for j in range(4)] for y in ys], [])
o = [m2[1], mk_multi_branch(m2, ys), mk_multi_branch(m, poly_positions)]
return ([o] + prove_low_degree(column, f.exp(root_of_unity, 4), (maxdeg_plus_1 // 4), modulus, exclude_multiples_of=exclude_multiples_of)) |
class SearchExportResultGenerator(CursorResultsGenerator):
def process_page(self):
search_results = list()
for object_json in self._response_json['results']:
object_type = object_json.pop('result_type')
search_results.append(self.response_handler.api._object_mapping.object_from_json(object_type, object_json))
return search_results |
def main(model_base_path, image, warm_up_itr, study_itr):
print('Loading frozen graph...')
frozen_model_path = os.path.join(model_base_path, 'frozen_inference_graph.pb')
st_time = time()
image_np_expanded = np.expand_dims(image, axis=0)
detection_graph = tf.Graph()
with detection_graph.as_default():
od_graph_def = tf.GraphDef()
with tf.gfile.GFile(frozen_model_path, 'rb') as fid:
serialized_graph = fid.read()
od_graph_def.ParseFromString(serialized_graph)
tf.import_graph_def(od_graph_def, name='')
print('time taken for loading graph is {}'.format((time() - st_time)))
with detection_graph.as_default():
with tf.Session(graph=detection_graph) as sess:
image_tensor = detection_graph.get_tensor_by_name('image_tensor:0')
detection_boxes = detection_graph.get_tensor_by_name('detection_boxes:0')
detection_scores = detection_graph.get_tensor_by_name('detection_scores:0')
detection_classes = detection_graph.get_tensor_by_name('detection_classes:0')
num_detections = detection_graph.get_tensor_by_name('num_detections:0')
print('warming up the device for {} iters'.format(warm_up_itr))
for _ in range(warm_up_itr):
(boxes, scores, class_ids, num) = sess.run([detection_boxes, detection_scores, detection_classes, num_detections], feed_dict={image_tensor: image_np_expanded})
print('Running time study for model {} for {} iterations'.format(os.path.basename(model_base_path), study_itr))
start_time = time()
for _ in tqdm(range(study_itr)):
(boxes, scores, class_ids, num) = sess.run([detection_boxes, detection_scores, detection_classes, num_detections], feed_dict={image_tensor: image_np_expanded})
end_time = time()
total_time = (end_time - start_time)
avg_time = (total_time / study_itr)
print('Total time for {} iters is {} secs'.format(study_itr, total_time))
print('Average time per iter is {}'.format(avg_time))
tf.reset_default_graph()
del detection_graph
print('Graph cleared!')
return (total_time, avg_time) |
class OptionSeriesArearangeStatesSelectHalo(Options):
def attributes(self):
return self._config_get(None)
def attributes(self, value: Any):
self._config(value, js_type=False)
def opacity(self):
return self._config_get(0.25)
def opacity(self, num: float):
self._config(num, js_type=False)
def size(self):
return self._config_get(10)
def size(self, num: float):
self._config(num, js_type=False) |
class BasicObject():
attributes = {}
linkage = {}
def __init__(self, attic, lf):
self.type = attic.type
self.name = attic.name.id
self.origin = attic.name.origin
self.copynumber = attic.name.copynumber
self.attic = attic
self.logicalfile = lf
def __repr__(self):
return '{}({})'.format(self.type.capitalize(), self.name)
def __getitem__(self, key):
if ((key not in self.attributes) and (key not in self.attic.keys())):
raise KeyError("'{}'".format(key))
try:
parse_as = self.attributes[key]
except KeyError:
parse_as = utils.vector
if (len(self.attic.log) > 0):
context = '{}'.format(self.fingerprint)
for error in self.attic.log:
self.logicalfile.error_handler.log(error.severity, context, error.problem, error.specification, error.action, '')
try:
attribute = self.attic[key]
except KeyError:
return utils.defaultvalue(parse_as)
rp66value = attribute.value
if (len(attribute.log) > 0):
context = '{}-A.{}'.format(self.fingerprint, key)
for error in attribute.log:
self.logicalfile.error_handler.log(error.severity, context, error.problem, error.specification, error.action, '')
if (rp66value is None):
return utils.defaultvalue(parse_as)
if (rp66value == []):
return utils.defaultvalue(parse_as)
if ((key in self.linkage) and utils.isreference(rp66value[0])):
reftype = self.linkage[key]
value = [utils.lookup(self.logicalfile, reftype, v) for v in rp66value]
else:
value = [(v.strip() if isinstance(v, str) else v) for v in rp66value]
return utils.parsevalue(value, parse_as)
def __eq__(self, rhs):
try:
return (self.attic == rhs.attic)
except AttributeError:
return False
def fingerprint(self):
return core.fingerprint(self.attic.type, self.attic.name.id, self.attic.name.origin, self.attic.name.copynumber)
def stash(self):
stash = {key: self.attic[key].value for key in self.attic.keys() if (key not in self.attributes)}
for (key, value) in stash.items():
value = [(v.strip() if isinstance(v, str) else v) for v in value]
stash[key] = value
return stash
def describe(self, width=80, indent='', exclude='er'):
from io import StringIO
buf = StringIO()
exclude = utils.parseoptions(exclude)
if (not exclude['head']):
utils.describe_header(buf, self.type.capitalize(), width, indent)
utils.describe_dict(buf, utils.headerinfo(self), width, indent, exclude)
if (not exclude['attr']):
self.describe_attr(buf, indent=indent, width=width, exclude=exclude)
if (not exclude['stash']):
if (len(self.stash) > 0):
utils.describe_header(buf, 'Unknown attributes', width, indent, lvl=2)
d = {k: k for k in self.stash.keys()}
utils.describe_attributes(buf, d, self, width, indent, exclude)
return utils.Summary(info=buf.getvalue())
def describe_attr(self, buf, width, indent, exclude):
pass |
.parametrize('abi_type,should_match', (('uint', False), ('uint32', True), ('uint255', False), ('uint256', True), ('uint0', False), ('int', False), ('int16', False), ('suint', False), ('uints', False)))
def test_is_uint_type(abi_type, should_match):
is_match = is_uint_type(abi_type)
assert (is_match is should_match) |
class BuildDocs(Command):
description = 'This command generates the documentation by running Sphinx. It then zips the docs into an html.zip file.'
user_options = [('None', None, 'this command has no options')]
def make_docs(self):
if (os.name == 'nt'):
print('Please impelemnt sphinx building on windows here.')
else:
subprocess.call(['make', 'html'], cwd='docs')
def run(self):
self.make_docs()
def initialize_options(self):
pass
def finalize_options(self):
pass |
def generic_create(evm: Evm, endowment: U256, contract_address: Address, memory_start_position: U256, memory_size: U256) -> None:
from ...vm.interpreter import STACK_DEPTH_LIMIT, process_create_message
create_message_gas = max_message_call_gas(Uint(evm.gas_left))
evm.gas_left -= create_message_gas
ensure((not evm.message.is_static), WriteInStaticContext)
evm.return_data = b''
sender_address = evm.message.current_target
sender = get_account(evm.env.state, sender_address)
if ((sender.balance < endowment) or (sender.nonce == Uint(((2 ** 64) - 1))) or ((evm.message.depth + 1) > STACK_DEPTH_LIMIT)):
evm.gas_left += create_message_gas
push(evm.stack, U256(0))
return
if account_has_code_or_nonce(evm.env.state, contract_address):
increment_nonce(evm.env.state, evm.message.current_target)
push(evm.stack, U256(0))
return
call_data = memory_read_bytes(evm.memory, memory_start_position, memory_size)
increment_nonce(evm.env.state, evm.message.current_target)
child_message = Message(caller=evm.message.current_target, target=Bytes0(), gas=create_message_gas, value=endowment, data=b'', code=call_data, current_target=contract_address, depth=(evm.message.depth + 1), code_address=None, should_transfer_value=True, is_static=False, parent_evm=evm)
child_evm = process_create_message(child_message, evm.env)
if child_evm.error:
incorporate_child_on_error(evm, child_evm)
evm.return_data = child_evm.output
push(evm.stack, U256(0))
else:
incorporate_child_on_success(evm, child_evm)
evm.return_data = b''
push(evm.stack, U256.from_be_bytes(child_evm.message.current_target)) |
.parametrize(('code_cell_content', 'encoding'), [(['import foo', "print('')"], 'utf-8'), (['import foo', "print('\tC')"], 'iso-8859-15'), (['import foo', "print('')"], 'utf-16'), (['import foo', "my_string = ''"], None)])
def test_import_parser_file_encodings_ipynb(code_cell_content: list[str], encoding: (str | None), tmp_path: Path) -> None:
random_file = Path(f'file_{uuid.uuid4()}.ipynb')
with run_within_dir(tmp_path):
with random_file.open('w', encoding=encoding) as f:
file_content = {'cells': [{'cell_type': 'code', 'metadata': {}, 'source': code_cell_content}]}
f.write(json.dumps(file_content))
assert (get_imported_modules_from_file(random_file) == {'foo': [Location(random_file, 1, 0)]}) |
class SessionPoolManager(object):
_shared_state = {}
max_sessions = 20
lastSession = 0
sessions = {}
log = logging.getLogger('Main.SessionMgr')
def __init__(self):
self.__dict__ = self._shared_state
def __getitem__(self, key):
if (key not in self.sessions):
self.sessions[key] = ViewerSession()
return self.sessions[key]
def __contains__(self, key):
return (key in self.sessions)
def prune(self):
self.log.info('Checking if any of %s session cookies need to be pruned due to age', len(self.sessions))
for key in list(self.sessions.keys()):
if self.sessions[key].shouldPrune():
self.log.info('Pruning stale session with ID %s', key)
try:
self.sessions.pop(key)
except KeyError:
self.log.error('Failed to prune session?')
self.log.error('Current sessions:')
for (key, session) in self.sessions.items():
self.log.error("\t'{key}' -> '{sess}'".format(key=key, sess=session))
if (len(self.sessions) > self.max_sessions):
self.log.info('Need to prune sessions due to session limits')
sessionList = [(val.lastAccess, key) for (key, val) in self.sessions.items()]
sessionList.sort()
while (len(sessionList) > self.max_sessions):
(dummy_acces, delSession) = sessionList.pop(0)
self.log.info('Pruning oldest session with ID %s', delSession)
self.sessions.pop(delSession) |
class ParseError(Exception):
def __init__(self, msg, line=0, column=0):
self._msg = msg
self._line = line
self._column = column
def __str__(self):
if ((self._line == 0) and (self._column == 0)):
return self._msg
return 'Line: {}, Column: {}\n {}'.format(self._line, self._column, self._msg) |
class Builder():
def __init__(self, logger: logging.Logger, tmpdir: Path, yaml_file: str, enable_cache=True) -> None:
self.logger = logger
self.test_dir = os.path.join(os.path.dirname(os.path.abspath(__file__)), 'testdata')
self.cache: Optional[Cache] = None
if enable_cache:
self.cache = Cache(logger)
self.resources: Dict[(str, Any)] = {}
self.deltas: Dict[(str, Any)] = {}
self.apply_yaml(yaml_file, allow_updates=False)
self.secret_handler = NullSecretHandler(logger, str(((tmpdir / 'secrets') / 'src')), str(((tmpdir / 'secrets') / 'cache')), '0')
self.builds: List[Tuple[(IR, EnvoyConfig)]] = []
def current_yaml(self) -> str:
return yaml.safe_dump_all(list(self.resources.values()))
def apply_yaml(self, yaml_file: str, allow_updates=True) -> None:
yaml_data = open(os.path.join(self.test_dir, yaml_file), 'r').read()
self.apply_yaml_string(yaml_data, allow_updates)
def apply_yaml_string(self, yaml_data: str, allow_updates=True) -> None:
for rsrc in yaml.safe_load_all(yaml_data):
kind = rsrc['kind']
metadata = rsrc['metadata']
name = metadata['name']
namespace = metadata['namespace']
key = f'{kind}-v2-{name}-{namespace}'
dtype = 'add'
if (key in self.resources):
if (not allow_updates):
raise RuntimeError(f'Cannot update {key}')
dtype = 'update'
self.resources[key] = rsrc
self.deltas[key] = {'kind': kind, 'apiVersion': rsrc['apiVersion'], 'metadata': {'name': name, 'namespace': namespace, 'creationTimestamp': metadata.get('creationTimestamp', '2021-11-19T15:11:45Z')}, 'deltaType': dtype}
def delete_yaml(self, yaml_file: str) -> None:
yaml_data = open(os.path.join(self.test_dir, yaml_file), 'r').read()
self.delete_yaml_string(yaml_data)
def delete_yaml_string(self, yaml_data: str) -> None:
for rsrc in yaml.safe_load_all(yaml_data):
kind = rsrc['kind']
metadata = rsrc['metadata']
name = metadata['name']
namespace = metadata['namespace']
key = f'{kind}-v2-{name}-{namespace}'
if (key in self.resources):
del self.resources[key]
self.deltas[key] = {'kind': kind, 'apiVersion': rsrc['apiVersion'], 'metadata': {'name': name, 'namespace': namespace, 'creationTimestamp': metadata.get('creationTimestamp', '2021-11-19T15:11:45Z')}, 'deltaType': 'delete'}
def build(self) -> Tuple[(IR, EnvoyConfig)]:
watt: Dict[(str, Any)] = {'Kubernetes': {}, 'Deltas': list(self.deltas.values())}
self.deltas = {}
for rsrc in self.resources.values():
kind = rsrc['kind']
if (kind not in watt['Kubernetes']):
watt['Kubernetes'][kind] = []
watt['Kubernetes'][kind].append(rsrc)
watt_json = json.dumps(watt, sort_keys=True, indent=4)
self.logger.debug(f'''Watt JSON:
{watt_json}''')
aconf = Config()
fetcher = ResourceFetcher(self.logger, aconf)
fetcher.parse_watt(watt_json)
aconf.load_all(fetcher.sorted())
(config_type, reset_cache, invalidate_groups_for) = IR.check_deltas(self.logger, fetcher, self.cache)
if (self.cache is None):
assert (config_type == 'complete'), "check_deltas wants an incremental reconfiguration with no cache, which it shouldn't"
assert reset_cache, 'check_deltas with no cache does not want to reset the cache, but it should'
else:
assert (config_type == 'incremental'), "check_deltas with a cache wants a complete reconfiguration, which it shouldn't"
assert (not reset_cache), "check_deltas with a cache wants to reset the cache, which it shouldn't"
ir = IR(aconf, logger=self.logger, cache=self.cache, invalidate_groups_for=invalidate_groups_for, file_checker=(lambda path: True), secret_handler=self.secret_handler)
assert ir, 'could not create an IR'
econf = EnvoyConfig.generate(ir, cache=self.cache)
assert econf, 'could not create an econf'
self.builds.append((ir, econf))
return (ir, econf)
def invalidate(self, key) -> None:
if (self.cache is not None):
assert (self.cache[key] is not None), f'key {key} is not cached'
self.cache.invalidate(key)
def check(self, what: str, b1: Tuple[(IR, EnvoyConfig)], b2: Tuple[(IR, EnvoyConfig)], strip_cache_keys=False) -> bool:
for (kind, idx) in [('IR', 0), ('econf', 1)]:
if (strip_cache_keys and (idx == 0)):
x1 = self.strip_cache_keys(b1[idx].as_dict())
j1 = json.dumps(x1, sort_keys=True, indent=4)
x2 = self.strip_cache_keys(b2[idx].as_dict())
j2 = json.dumps(x2, sort_keys=True, indent=4)
else:
j1 = b1[idx].as_json()
j2 = b2[idx].as_json()
match = (j1 == j2)
output = ''
if (not match):
l1 = j1.split('\n')
l2 = j2.split('\n')
n1 = f'{what} {kind} 1'
n2 = f'{what} {kind} 2'
output += '\n\n'
for line in difflib.context_diff(l1, l2, fromfile=n1, tofile=n2):
line = line.rstrip()
output += line
output += '\n'
assert match, output
return match
def check_last(self, what: str) -> None:
build_count = len(self.builds)
b1 = self.builds[(build_count - 2)]
b2 = self.builds[(build_count - 1)]
self.check(what, b1, b2)
def strip_cache_keys(self, node: Any) -> Any:
if isinstance(node, dict):
output = {}
for (k, v) in node.items():
if (k == '_cache_key'):
continue
output[k] = self.strip_cache_keys(v)
return output
elif isinstance(node, list):
return [self.strip_cache_keys(x) for x in node]
return node |
def int2c2e3d_30(ax, da, A, bx, db, B):
result = numpy.zeros((10, 1), dtype=float)
x0 = (ax + bx)
x1 = (x0 ** (- 1.0))
x2 = (((- x1) * ((ax * A[0]) + (bx * B[0]))) + A[0])
x3 = (ax ** (- 1.0))
x4 = (((ax * bx) * x1) * ((((A[0] - B[0]) ** 2) + ((A[1] - B[1]) ** 2)) + ((A[2] - B[2]) ** 2)))
x5 = (bx ** (- 1.0))
x6 = (x3 * ((x1 * boys(2, x4)) - (x5 * boys(1, x4))))
x7 = (- x6)
x8 = ((2.0 * x5) * boys(3, x4))
x9 = ((x2 ** 2) * x8)
x10 = ((((17. * da) * db) * (x0 ** (- 0.5))) * x3)
x11 = (0. * x10)
x12 = (((- x1) * ((ax * A[1]) + (bx * B[1]))) + A[1])
x13 = ((- x12) * x6)
x14 = (0. * x10)
x15 = (((- x1) * ((ax * A[2]) + (bx * B[2]))) + A[2])
x16 = ((- x15) * x6)
x17 = ((x12 ** 2) * x8)
x18 = (x17 + x7)
x19 = (x14 * x2)
x20 = (((x15 ** 2) * x8) + x7)
x21 = (- x20)
result[(0, 0)] = numpy.sum(((x11 * x2) * (((2.0 * x6) - x7) - x9)))
result[(1, 0)] = numpy.sum(((- x14) * ((x12 * x9) + x13)))
result[(2, 0)] = numpy.sum(((- x14) * ((x15 * x9) + x16)))
result[(3, 0)] = numpy.sum(((- x18) * x19))
result[(4, 0)] = numpy.sum((((((- x10) * x12) * x15) * x2) * x8))
result[(5, 0)] = numpy.sum((x19 * x21))
result[(6, 0)] = numpy.sum(((- x11) * ((x12 * x18) + (2.0 * x13))))
result[(7, 0)] = numpy.sum(((- x14) * ((x15 * x17) + x16)))
result[(8, 0)] = numpy.sum(((x12 * x14) * x21))
result[(9, 0)] = numpy.sum(((- x11) * ((x15 * x20) + (2.0 * x16))))
return result |
class TrackedTestModule(TestModule):
def __init__(self) -> None:
super().__init__()
self.num_epochs_seen = 0
self.num_batches_seen = 0
self.num_on_load_checkpoint_called = 0
def on_train_epoch_end(self) -> None:
self.num_epochs_seen += 1
def on_train_batch_start(self, batch: torch.Tensor, batch_idx: int, unused: Optional[int]=None) -> None:
self.num_batches_seen += 1
def on_load_checkpoint(self, checkpoint: Dict[(str, Any)]) -> None:
self.num_on_load_checkpoint_called += 1 |
def create_cosmwasm_instantiate_msg(code_id: int, args: Any, label: str, sender_address: Address, funds: Optional[str]=None, admin_address: Optional[Address]=None) -> MsgInstantiateContract:
msg = MsgInstantiateContract(sender=str(sender_address), code_id=code_id, msg=json_encode(args).encode('UTF8'), label=label)
if (funds is not None):
msg.funds.extend(parse_coins(funds))
if (admin_address is not None):
msg.admin = str(admin_address)
return msg |
_HOOK_REGISTRY.register()
class TimesTwoHook(mh.ModelingHook):
def __init__(self, cfg):
super().__init__(cfg)
def apply(self, model: torch.nn.Module) -> torch.nn.Module:
return TimesTwoWrapper(model)
def unapply(self, model: torch.nn.Module) -> torch.nn.Module:
assert isinstance(model, TimesTwoWrapper)
return model.model |
def extractTenseitranslatorBlogspotCom(item):
(vol, chp, frag, postfix) = extractVolChapterFragmentPostfix(item['title'])
if ((not (chp or vol)) or ('preview' in item['title'].lower())):
return None
tagmap = [('tenseikizoku', 'Aristocrat Reborn in Another World', 'translated'), ('PRC', 'PRC', 'translated'), ('Loiterous', 'Loiterous', 'oel')]
for (tagname, name, tl_type) in tagmap:
if (tagname in item['tags']):
return buildReleaseMessageWithType(item, name, vol, chp, frag=frag, postfix=postfix, tl_type=tl_type)
return False |
def gen_fuzz_states(nvals):
choices = gen_fuzz_choices(nvals)
spec_idx = (specn() - 1)
if (spec_idx < len(choices)):
bits = choices[spec_idx]
else:
next_p2_states = (2 ** math.ceil(math.log(nvals, 2)))
bits = random.randint(0, (2 ** next_p2_states))
for i in range(nvals):
mask = (1 << i)
(yield int(bool((bits & mask)))) |
class EmulateEfuseController(EmulateEfuseControllerBase):
CHIP_NAME = 'ESP32-C5(beta3)'
mem = None
debug = False
def __init__(self, efuse_file=None, debug=False):
self.Blocks = EfuseDefineBlocks
self.Fields = EfuseDefineFields()
self.REGS = EfuseDefineRegisters
super(EmulateEfuseController, self).__init__(efuse_file, debug)
self.write_reg(self.REGS.EFUSE_CMD_REG, 0)
' esptool method start >>'
def get_major_chip_version(self):
return 0
def get_minor_chip_version(self):
return 0
def get_crystal_freq(self):
return 40
def get_security_info(self):
return {'flags': 0, 'flash_crypt_cnt': 0, 'key_purposes': 0, 'chip_id': 0, 'api_version': 0}
' << esptool method end '
def handle_writing_event(self, addr, value):
if (addr == self.REGS.EFUSE_CMD_REG):
if (value & self.REGS.EFUSE_PGM_CMD):
self.copy_blocks_wr_regs_to_rd_regs(updated_block=((value >> 2) & 15))
self.clean_blocks_wr_regs()
self.check_rd_protection_area()
self.write_reg(addr, 0)
self.write_reg(self.REGS.EFUSE_CMD_REG, 0)
elif (value == self.REGS.EFUSE_READ_CMD):
self.write_reg(addr, 0)
self.write_reg(self.REGS.EFUSE_CMD_REG, 0)
self.save_to_file()
def get_bitlen_of_block(self, blk, wr=False):
if (blk.id == 0):
if wr:
return (32 * 8)
else:
return (32 * blk.len)
elif wr:
rs_coding = (32 * 3)
return ((32 * 8) + rs_coding)
else:
return (32 * blk.len)
def handle_coding_scheme(self, blk, data):
if (blk.id != 0):
coded_bytes = 12
data.pos = (coded_bytes * 8)
plain_data = data.readlist('32*uint:8')[::(- 1)]
rs = reedsolo.RSCodec(coded_bytes)
calc_encoded_data = list(rs.encode([x for x in plain_data]))
data.pos = 0
if (calc_encoded_data != data.readlist('44*uint:8')[::(- 1)]):
raise FatalError('Error in coding scheme data')
data = data[(coded_bytes * 8):]
if (blk.len < 8):
data = data[((8 - blk.len) * 32):]
return data |
class RyuTestResult(result.TextTestResult):
def __init__(self, *args, **kw):
result.TextTestResult.__init__(self, *args, **kw)
self._last_case = None
self.colorizer = None
stdout = sys.__stdout__
sys.stdout = sys.__stdout__
for colorizer in [_Win32Colorizer, _AnsiColorizer, _NullColorizer]:
if colorizer.supported():
self.colorizer = colorizer(self.stream)
break
sys.stdout = stdout
def getDescription(self, test):
return str(test)
def addSuccess(self, test):
unittest.TestResult.addSuccess(self, test)
if self.showAll:
self.colorizer.write('OK', 'green')
self.stream.writeln()
elif self.dots:
self.stream.write('.')
self.stream.flush()
def addFailure(self, test, err):
unittest.TestResult.addFailure(self, test, err)
if self.showAll:
self.colorizer.write('FAIL', 'red')
self.stream.writeln()
elif self.dots:
self.stream.write('F')
self.stream.flush()
def addError(self, test, err):
stream = getattr(self, 'stream', None)
(ec, ev, tb) = err
try:
exc_info = self._exc_info_to_string(err, test)
except TypeError:
exc_info = self._exc_info_to_string(err)
for (cls, (storage, label, isfail)) in self.errorClasses.items():
if (result.isclass(ec) and issubclass(ec, cls)):
if isfail:
test.passwd = False
storage.append((test, exc_info))
if (stream is not None):
if self.showAll:
message = [label]
detail = result._exception_detail(err[1])
if detail:
message.append(detail)
stream.writeln(': '.join(message))
elif self.dots:
stream.write(label[:1])
return
self.errors.append((test, exc_info))
test.passed = False
if (stream is not None):
if self.showAll:
self.colorizer.write('ERROR', 'red')
self.stream.writeln()
elif self.dots:
stream.write('E')
def startTest(self, test):
unittest.TestResult.startTest(self, test)
current_case = test.test.__class__.__name__
if self.showAll:
if (current_case != self._last_case):
self.stream.writeln(current_case)
self._last_case = current_case
self.stream.write((' %s' % str(test.test).ljust(60)))
self.stream.flush() |
(st.floats(allow_nan=False, allow_infinity=False), st.floats(allow_nan=False, allow_infinity=False), valid_params())
def test_that_truncated_normal_is_monotonic(x1, x2, arg):
arg = (0.0, 2.0, (- 1.0), 1.0)
x1 = 0.0
x2 = 7.e-09
result1 = TransferFunction.trans_truncated_normal(x1, arg)
result2 = TransferFunction.trans_truncated_normal(x2, arg)
if np.isclose(x1, x2):
assert np.isclose(result1, result2, atol=1e-07)
elif (x1 < x2):
assert ((result1 < result2) or ((result1 == arg[2]) and (result2 == arg[2])) or ((result1 == arg[3]) and (result2 == arg[3]))) |
class IsAdminFilter(SimpleCustomFilter):
key = 'is_chat_admin'
def __init__(self, bot):
self._bot = bot
def check(self, message):
if isinstance(message, types.CallbackQuery):
return (self._bot.get_chat_member(message.message.chat.id, message.from_user.id).status in ['creator', 'administrator'])
return (self._bot.get_chat_member(message.chat.id, message.from_user.id).status in ['creator', 'administrator']) |
def prepare_data(prompt: str, model_path: str):
tokenizer = BertTokenizer.from_pretrained(model_path)
result = tokenizer(prompt, return_attention_mask=False, return_tensors='pt')
target_size = result['input_ids'].size()
if (target_size[1] > 512):
raise ValueError('Sequence length > 512 is not supported')
result['position_ids'] = torch.arange(target_size[1], dtype=torch.int64).reshape(result['input_ids'].size()).contiguous().cuda()
return result |
class TestSample(CommandLineTestMixIn, unittest.TestCase):
in_suffix = '.fasta'
out_suffix = '.fasta'
input_path = p('input5.fasta')
expected_path = p('output5.fasta')
command = 'convert --sample 2 --sample-seed 0 {input} {output}'
def setUp(self):
super(TestSample, self).setUp()
self.orig_level = logging.getLogger(None).level
logging.getLogger(None).setLevel(logging.FATAL)
random.seed(1)
def tearDown(self):
super(TestSample, self).tearDown()
logging.getLogger(None).setLevel(self.orig_level) |
def extractLainconnueEbisutranslationsCom(item):
(vol, chp, frag, postfix) = extractVolChapterFragmentPostfix(item['title'])
if ((not (chp or vol)) or ('preview' in item['title'].lower())):
return None
tagmap = [('Unlimited Anime Works', 'Unlimited Anime Works', 'translated'), ('Holistic Fantasy', 'Holistic Fantasy', 'translated'), ('Shoujo Grand Summoning', 'Shoujo Grand Summoning', 'translated')]
for (tagname, name, tl_type) in tagmap:
if (tagname in item['tags']):
return buildReleaseMessageWithType(item, name, vol, chp, frag=frag, postfix=postfix, tl_type=tl_type)
return False |
def _mock_gcp_resource_iter(_, resource_type):
resources = []
if (resource_type != 'dataset_policy'):
raise ValueError('unexpected resource type: got %s, want dataset_policy', resource_type)
Resource = collections.namedtuple('Resource', ['full_name', 'type', 'name', 'parent_type_name', 'parent', 'data'])
for resource in fbsd.BIGQUERY_DATA:
policy_name = resource['full_name']
dataset_name = ('/'.join(policy_name.split('/')[:(- 3)]) + '/')
proj_name = ('/'.join(dataset_name.split('/')[:(- 3)]) + '/')
proj = Resource(full_name=proj_name, type='project', name=('projects/' + resource['project_id']), parent_type_name='', parent=None, data='')
dataset = Resource(full_name=dataset_name, type='dataset', name=('dataset/' + resource['dataset_id']), parent_type_name='project', parent=proj, data='')
policy = Resource(full_name=policy_name, type='dataset_policy', parent_type_name='dataset', name=('dataset_policies/' + resource['dataset_id']), parent=dataset, data=json.dumps([{}]))
resources.append(policy)
return resources |
def t8n_arguments(subparsers: argparse._SubParsersAction) -> None:
t8n_parser = subparsers.add_parser('t8n', help='This is the t8n tool.')
t8n_parser.add_argument('--input.alloc', dest='input_alloc', type=str, default='alloc.json')
t8n_parser.add_argument('--input.env', dest='input_env', type=str, default='env.json')
t8n_parser.add_argument('--input.txs', dest='input_txs', type=str, default='txs.json')
t8n_parser.add_argument('--output.alloc', dest='output_alloc', type=str, default='alloc.json')
t8n_parser.add_argument('--output.basedir', dest='output_basedir', type=str, default='.')
t8n_parser.add_argument('--output.body', dest='output_body', type=str)
t8n_parser.add_argument('--output.result', dest='output_result', type=str, default='result.json')
t8n_parser.add_argument('--state.chainid', dest='state_chainid', type=int, default=1)
t8n_parser.add_argument('--state.fork', dest='state_fork', type=str, default='Frontier')
t8n_parser.add_argument('--state.reward', dest='state_reward', type=int, default=0)
t8n_parser.add_argument('--trace', action='store_true')
t8n_parser.add_argument('--trace.memory', action='store_true')
t8n_parser.add_argument('--trace.nomemory', action='store_true')
t8n_parser.add_argument('--trace.noreturndata', action='store_true')
t8n_parser.add_argument('--trace.nostack', action='store_true')
t8n_parser.add_argument('--trace.returndata', action='store_true') |
def downgrade():
op.add_column('updates', sa.Column('title', sa.TEXT(), autoincrement=False, nullable=True))
op.execute("UPDATE updates SET title=(SELECT string_agg(nvr, ' ') as title FROM (SELECT builds.nvr FROM builds WHERE update_id=updates.id ORDER BY nvr) as nvr)")
op.create_index('ix_updates_title', 'updates', ['title'], unique=True)
op.alter_column('updates', 'alias', existing_type=sa.BOOLEAN(), nullable=True) |
.django_db
.skip(reason='Test based on pre-databricks loader code. Remove when fully cut over.')
def test_load_source_procurement_by_ids():
source_procurement_id_list = [101, 201, 301]
_assemble_source_procurement_records(source_procurement_id_list)
call_command('load_fpds_transactions', '--ids', *source_procurement_id_list)
usaspending_transactions = TransactionFPDS.objects.all()
assert (len(usaspending_transactions) == 3)
tx_fpds_broker_ref_ids = [_.detached_award_procurement_id for _ in usaspending_transactions]
assert (101 in tx_fpds_broker_ref_ids)
assert (201 in tx_fpds_broker_ref_ids)
assert (301 in tx_fpds_broker_ref_ids)
tx_fpds_broker_ref_id_strings = [_.detached_award_proc_unique for _ in usaspending_transactions]
assert ('101' in tx_fpds_broker_ref_id_strings)
assert ('201' in tx_fpds_broker_ref_id_strings)
assert ('301' in tx_fpds_broker_ref_id_strings)
tx_norm_broker_ref_id_strings = [_.transaction.transaction_unique_id for _ in usaspending_transactions]
assert ('101' in tx_norm_broker_ref_id_strings)
assert ('201' in tx_norm_broker_ref_id_strings)
assert ('301' in tx_norm_broker_ref_id_strings)
usaspending_awards = Award.objects.all()
assert (len(usaspending_awards) == 1)
new_award = usaspending_awards[0]
tx_norm_awd = [_.transaction.award for _ in usaspending_transactions]
assert (usaspending_awards[0] == tx_norm_awd[0] == tx_norm_awd[1] == tx_norm_awd[2])
tx_norm_awd_ids = [_.transaction.award_id for _ in usaspending_transactions]
assert (new_award.id == tx_norm_awd_ids[0] == tx_norm_awd_ids[1] == tx_norm_awd_ids[2])
assert (new_award.transaction_unique_id == '101')
assert (new_award.latest_transaction.transaction_unique_id == '301')
assert (new_award.earliest_transaction.transaction_unique_id == '101')
transactions_by_id = {transaction.detached_award_procurement_id: transaction.transaction for transaction in usaspending_transactions}
assert (transactions_by_id[101].fiscal_year == 2010)
assert (transactions_by_id[201].fiscal_year == 2010)
assert (transactions_by_id[301].fiscal_year == 2011) |
def _get_args(args):
try:
import argparse
except ImportError:
print('ERROR: You are running Python < 2.7. Please use pip to install argparse: pip install argparse')
parser = argparse.ArgumentParser(add_help=True, description='Print out the current prices of EC2 instances')
parser.add_argument('--type', '-t', help='Show elb, ondemand, reserved, spot , spotordemand or all instances prices', choices=OUTPUT_PRICE_TYPES, default='all')
parser.add_argument('--filter-region', '-fr', help='Filter results to a specific region', choices=EC2_REGIONS, default=None)
parser.add_argument('--filter-type', '-ft', help='Filter results to a specific instance type', choices=EC2_INSTANCE_TYPES, default=None)
parser.add_argument('--filter-type-pattern', '-fp', help='Filter results to a specific instance type pattern', choices=EC2_INSTANCE_TYPES_PATTERN, default=None)
parser.add_argument('--filter-os-type', '-fo', help='Filter results to a specific os type', choices=EC2_OS_TYPES, default='linux')
parser.add_argument('--format', '-f', choices=OUTPUT_FORMATS, help='Output format', default='table')
parser.add_argument('--statsd-prefix', '-sp', help='Pass the prefix of the metric you want to have (Only for statsd output format)', default='statsd.ec2instancespricing.hourly')
args = parser.parse_args(args=args)
return args |
def grpc_msg(rosmsg):
try:
result = mmsg.DiagnosticArray()
result.timestamp = rosmsg.header.stamp.secs
stats = []
for stat in rosmsg.status:
ds = mmsg.DiagnosticStatus()
ds.level = stat.level
ds.name = stat.name
ds.message = stat.message
ds.hardware_id = stat.hardware_id
values = []
for val in stat.values:
dv = mmsg.KeyValue()
dv.key = val.key
dv.value = utf8(val.value)
values.append(dv)
ds.values.extend(values)
stats.append(ds)
result.status.extend(stats)
return result
except Exception as _err:
import traceback
raise Exception(traceback.format_exc()) |
class OptionSeriesPyramid3dDatalabels(Options):
def align(self):
return self._config_get('right')
def align(self, text: str):
self._config(text, js_type=False)
def allowOverlap(self):
return self._config_get(False)
def allowOverlap(self, flag: bool):
self._config(flag, js_type=False)
def animation(self) -> 'OptionSeriesPyramid3dDatalabelsAnimation':
return self._config_sub_data('animation', OptionSeriesPyramid3dDatalabelsAnimation)
def backgroundColor(self):
return self._config_get(None)
def backgroundColor(self, text: str):
self._config(text, js_type=False)
def borderColor(self):
return self._config_get(None)
def borderColor(self, text: str):
self._config(text, js_type=False)
def borderRadius(self):
return self._config_get(0)
def borderRadius(self, num: float):
self._config(num, js_type=False)
def borderWidth(self):
return self._config_get(0)
def borderWidth(self, num: float):
self._config(num, js_type=False)
def className(self):
return self._config_get(None)
def className(self, text: str):
self._config(text, js_type=False)
def color(self):
return self._config_get(None)
def color(self, text: str):
self._config(text, js_type=False)
def crop(self):
return self._config_get(False)
def crop(self, flag: bool):
self._config(flag, js_type=False)
def defer(self):
return self._config_get(True)
def defer(self, flag: bool):
self._config(flag, js_type=False)
def enabled(self):
return self._config_get(False)
def enabled(self, flag: bool):
self._config(flag, js_type=False)
def filter(self) -> 'OptionSeriesPyramid3dDatalabelsFilter':
return self._config_sub_data('filter', OptionSeriesPyramid3dDatalabelsFilter)
def format(self):
return self._config_get('point.value')
def format(self, text: str):
self._config(text, js_type=False)
def formatter(self):
return self._config_get(None)
def formatter(self, value: Any):
self._config(value, js_type=False)
def inside(self):
return self._config_get(False)
def inside(self, flag: bool):
self._config(flag, js_type=False)
def nullFormat(self):
return self._config_get(None)
def nullFormat(self, flag: bool):
self._config(flag, js_type=False)
def nullFormatter(self):
return self._config_get(None)
def nullFormatter(self, value: Any):
self._config(value, js_type=False)
def overflow(self):
return self._config_get('allow')
def overflow(self, text: str):
self._config(text, js_type=False)
def padding(self):
return self._config_get(5)
def padding(self, num: float):
self._config(num, js_type=False)
def position(self):
return self._config_get('center')
def position(self, text: str):
self._config(text, js_type=False)
def rotation(self):
return self._config_get(0)
def rotation(self, num: float):
self._config(num, js_type=False)
def shadow(self):
return self._config_get(False)
def shadow(self, flag: bool):
self._config(flag, js_type=False)
def shape(self):
return self._config_get('square')
def shape(self, text: str):
self._config(text, js_type=False)
def style(self):
return self._config_get(None)
def style(self, value: Any):
self._config(value, js_type=False)
def textPath(self) -> 'OptionSeriesPyramid3dDatalabelsTextpath':
return self._config_sub_data('textPath', OptionSeriesPyramid3dDatalabelsTextpath)
def useHTML(self):
return self._config_get(False)
def useHTML(self, flag: bool):
self._config(flag, js_type=False)
def verticalAlign(self):
return self._config_get('top')
def verticalAlign(self, text: str):
self._config(text, js_type=False)
def x(self):
return self._config_get(0)
def x(self, num: float):
self._config(num, js_type=False)
def y(self):
return self._config_get('undefined')
def y(self, num: float):
self._config(num, js_type=False)
def zIndex(self):
return self._config_get(6)
def zIndex(self, num: float):
self._config(num, js_type=False) |
def test_vector_trend_fails(simple_2d_model):
(coords, _, data) = simple_2d_model
trend = Vector([Trend(degree=1), Trend(degree=1)])
with pytest.raises(ValueError):
trend.fit(coords, list(data))
with pytest.raises(ValueError):
trend.fit(coords, data, weights=([np.ones_like(data)] * 2)) |
class Constant(Term):
value: Union[(int, float)]
type: Union[(int.__class__, float.__class__)]
def __init__(self, value) -> None:
super().__init__()
self.value = value
self.type = type(value)
def value(self):
return getattr(self, 'value')
def type(self):
return getattr(self, 'type')
def string(self):
return str(self.value)
def subtree_depth(self):
return self.depth |
class Table(Base):
__tablename__ = 'table'
id = sa.Column(sa.Integer, sa.Sequence('table_id_seq'), primary_key=True)
table_name = sa.Column(sa.String(255), nullable=False)
snapshot_id = sa.Column(sa.Integer, sa.ForeignKey(Snapshot.id), nullable=False)
snapshot = sa.orm.relationship(Snapshot, backref='tables')
def get_table_name(self, postfix, old=False):
if (not self.snapshot):
raise Exception('Table name requires snapshot')
if (not self.snapshot.hash):
raise Exception('Snapshot hash is empty.')
if old:
return ('stellar_%s_%s_%s' % (self.table_name, self.snapshot.hash, postfix))
else:
return ('stellar_%s' % hashlib.md5(('%s|%s|%s' % (self.table_name, self.snapshot.hash, postfix)).encode('utf-8')).hexdigest()[0:16])
def __repr__(self):
return ('<Table(table_name=%r)>' % (self.table_name,)) |
def petsc4py_sparse_2_dense(sparse_matrix, output=False):
rowptr = sparse_matrix.getValuesCSR()[0]
colptr = sparse_matrix.getValuesCSR()[1]
data = sparse_matrix.getValuesCSR()[2]
nr = sparse_matrix.getSize()[0]
nc = sparse_matrix.getSize()[1]
return _pythonCSR_2_dense(rowptr, colptr, data, nr, nc, output) |
def assert_tx_failed(base_tester):
def assert_tx_failed(function_to_test, exception=eth_tester.exceptions.TransactionFailed):
snapshot_id = base_tester.take_snapshot()
with pytest.raises(exception):
function_to_test()
base_tester.revert_to_snapshot(snapshot_id)
return assert_tx_failed |
class IdSpineCombinerStageService(PrivateComputationStageService):
def __init__(self, storage_svc: StorageService, onedocker_svc: OneDockerService, onedocker_binary_config_map: DefaultDict[(str, OneDockerBinaryConfig)], log_cost_to_s3: bool=DEFAULT_LOG_COST_TO_S3, padding_size: Optional[int]=None, protocol_type: str=Protocol.PID_PROTOCOL.value) -> None:
self._storage_svc = storage_svc
self._onedocker_svc = onedocker_svc
self._onedocker_binary_config_map = onedocker_binary_config_map
self._log_cost_to_s3 = log_cost_to_s3
self._logger: logging.Logger = logging.getLogger(__name__)
self.padding_size = padding_size
self.protocol_type = protocol_type
async def run_async(self, pc_instance: PrivateComputationInstance, server_certificate_provider: CertificateProvider, ca_certificate_provider: CertificateProvider, server_certificate_path: str, ca_certificate_path: str, server_ips: Optional[List[str]]=None, server_hostnames: Optional[List[str]]=None, server_private_key_ref_provider: Optional[PrivateKeyReferenceProvider]=None) -> PrivateComputationInstance:
output_path = pc_instance.data_processing_output_path
combine_output_path = (output_path + '_combine')
self._logger.info(f'[{self}] Starting id spine combiner service')
should_wait_spin_up: bool = (pc_instance.infra_config.role is PrivateComputationRole.PARTNER)
container_instances = (await self._start_combiner_service(pc_instance, self._onedocker_svc, self._onedocker_binary_config_map, combine_output_path, log_cost_to_s3=self._log_cost_to_s3, max_id_column_count=pc_instance.product_config.common.pid_max_column_count, protocol_type=self.protocol_type, wait_for_containers_to_start_up=should_wait_spin_up))
self._logger.info('Finished running CombinerService')
stage_state = StageStateInstance(pc_instance.infra_config.instance_id, pc_instance.current_stage.name, containers=container_instances)
pc_instance.infra_config.instances.append(stage_state)
return pc_instance
def get_status(self, pc_instance: PrivateComputationInstance) -> PrivateComputationInstanceStatus:
return get_pc_status_from_stage_state(pc_instance, self._onedocker_svc)
async def _start_combiner_service(self, private_computation_instance: PrivateComputationInstance, onedocker_svc: OneDockerService, onedocker_binary_config_map: DefaultDict[(str, OneDockerBinaryConfig)], combine_output_path: str, log_cost_to_s3: bool=DEFAULT_LOG_COST_TO_S3, wait_for_containers: bool=False, max_id_column_count: int=1, protocol_type: str=Protocol.PID_PROTOCOL.value, wait_for_containers_to_start_up: bool=True) -> List[ContainerInstance]:
stage_data = PrivateComputationServiceData.get(private_computation_instance.infra_config.game_type).combiner_stage
binary_name = stage_data.binary_name
binary_config = onedocker_binary_config_map[binary_name]
if (private_computation_instance.infra_config.game_type is PrivateComputationGameType.ATTRIBUTION):
run_name = (private_computation_instance.infra_config.instance_id if log_cost_to_s3 else '')
padding_size = checked_cast(int, private_computation_instance.product_config.common.padding_size)
multi_conversion_limit = None
log_cost = log_cost_to_s3
elif (private_computation_instance.infra_config.game_type is PrivateComputationGameType.LIFT):
run_name = None
padding_size = None
multi_conversion_limit = private_computation_instance.product_config.common.padding_size
log_cost = None
else:
run_name = (private_computation_instance.infra_config.instance_id if log_cost_to_s3 else '')
padding_size = None
multi_conversion_limit = None
log_cost = log_cost_to_s3
combiner_service = checked_cast(IdSpineCombinerService, stage_data.service)
if (protocol_type == Protocol.MR_PID_PROTOCOL.value):
spine_path = private_computation_instance.pid_mr_stage_output_spine_path
data_path = private_computation_instance.pid_mr_stage_output_data_path
else:
spine_path = private_computation_instance.pid_stage_output_spine_path
data_path = private_computation_instance.pid_stage_output_data_path
if private_computation_instance.has_feature(PCSFeature.NUM_MPC_CONTAINER_MUTATION):
new_num_mpc_containers = (await self.get_mutated_num_mpc_containers(spine_path, private_computation_instance.infra_config.num_pid_containers, private_computation_instance.infra_config.game_type))
self._logger.info(f'[{self}] Mutate num MPC containers from {private_computation_instance.infra_config.num_mpc_containers} to {new_num_mpc_containers}')
private_computation_instance.infra_config.num_mpc_containers = new_num_mpc_containers
args = combiner_service.build_args(spine_path=spine_path, data_path=data_path, output_path=combine_output_path, num_shards=private_computation_instance.infra_config.num_pid_containers, tmp_directory=binary_config.tmp_directory, protocol_type=protocol_type, max_id_column_cnt=max_id_column_count, run_name=run_name, padding_size=padding_size, multi_conversion_limit=multi_conversion_limit, log_cost=log_cost, run_id=private_computation_instance.infra_config.run_id, log_cost_bucket=private_computation_instance.infra_config.log_cost_bucket)
env_vars = generate_env_vars_dict(repository_path=binary_config.repository_path)
container_type = None
if ((private_computation_instance.infra_config.num_pid_containers == 1) and private_computation_instance.has_feature(PCSFeature.PID_SNMK_LARGER_CONTAINER_TYPE)):
logging.info('Setting id spine combiner stage container to LARGE')
container_type = ContainerType.LARGE
container_permission = gen_container_permission(private_computation_instance)
return (await combiner_service.start_containers(cmd_args_list=args, onedocker_svc=onedocker_svc, binary_version=binary_config.binary_version, binary_name=binary_name, timeout=None, wait_for_containers_to_finish=wait_for_containers, env_vars=env_vars, wait_for_containers_to_start_up=wait_for_containers_to_start_up, existing_containers=private_computation_instance.get_existing_containers_for_retry(), container_type=container_type, permission=container_permission))
async def get_mutated_num_mpc_containers(self, spine_path: str, num_pid_containers: int, game_type: PrivateComputationGameType) -> int:
if (game_type is PrivateComputationGameType.ATTRIBUTION):
num_rows_per_shard = NUM_ROWS_PER_MPC_SHARD_PA
else:
num_rows_per_shard = NUM_ROWS_PER_MPC_SHARD_PL
loop = asyncio.get_running_loop()
union_file_size = 0
for shard in range(num_pid_containers):
pid_match_metric_path = get_metrics_filepath(spine_path, shard)
if (not self._storage_svc.file_exists(pid_match_metric_path)):
raise Exception(f"PID metrics file doesn't exist at {pid_match_metric_path}")
pid_match_metric_json_str = (await loop.run_in_executor(None, self._storage_svc.read, pid_match_metric_path))
pid_match_metric_dict = json.loads(pid_match_metric_json_str)
if ('union_file_size' not in pid_match_metric_dict):
raise Exception(f"PID metrics file doesn't have union_file_size in {pid_match_metric_path}")
union_file_size += pid_match_metric_dict['union_file_size']
return ceil((union_file_size / float(num_rows_per_shard))) |
def test_multiple_subtractions():
df_input = pd.DataFrame({'date1': ['2022-09-01', '2022-10-01', '2022-12-01'], 'date2': ['2022-09-15', '2022-10-15', '2022-12-15'], 'date3': ['2022-08-01', '2022-09-01', '2022-11-01'], 'date4': ['2022-08-15', '2022-09-15', '2022-11-15']})
df_expected = pd.DataFrame({'date1': ['2022-09-01', '2022-10-01', '2022-12-01'], 'date2': ['2022-09-15', '2022-10-15', '2022-12-15'], 'date3': ['2022-08-01', '2022-09-01', '2022-11-01'], 'date4': ['2022-08-15', '2022-09-15', '2022-11-15'], 'date1_sub_date3': [31, 30, 30], 'date2_sub_date3': [45, 44, 44], 'date1_sub_date4': [17, 16, 16], 'date2_sub_date4': [31, 30, 30]})
dtf = DatetimeSubtraction(variables=['date1', 'date2'], reference=['date3', 'date4'])
df_output = dtf.fit_transform(df_input)
pd.testing.assert_frame_equal(df_output, df_expected, check_dtype=False) |
class Giveaways(commands.Cog):
__version__ = '0.13.0'
__author__ = 'flare'
def format_help_for_context(self, ctx):
pre_processed = super().format_help_for_context(ctx)
return f'''{pre_processed}
Cog Version: {self.__version__}
Author: {self.__author__}'''
def __init__(self, bot):
self.bot = bot
self.config = Config.get_conf(self, identifier=)
self.config.init_custom(GIVEAWAY_KEY, 2)
self.giveaways = {}
self.giveaway_bgloop = asyncio.create_task(self.init())
self.session = aio
with contextlib.suppress(Exception):
self.bot.add_dev_env_value('giveaways', (lambda x: self))
async def init(self) -> None:
(await self.bot.wait_until_ready())
data = (await self.config.custom(GIVEAWAY_KEY).all())
for (_, giveaways) in data.items():
for (msgid, giveaway) in giveaways.items():
if giveaway.get('ended', False):
continue
if (datetime.now(timezone.utc) > datetime.fromtimestamp(giveaway['endtime']).replace(tzinfo=timezone.utc)):
continue
self.giveaways[int(msgid)] = Giveaway(guildid=giveaway['guildid'], channelid=giveaway['channelid'], messageid=msgid, endtime=datetime.fromtimestamp(giveaway['endtime']).replace(tzinfo=timezone.utc), prize=giveaway['prize'], emoji=giveaway.get('emoji', ''), entrants=giveaway['entrants'], **giveaway['kwargs'])
while True:
try:
(await self.check_giveaways())
except Exception as exc:
log.error('Exception in giveaway loop: ', exc_info=exc)
(await asyncio.sleep(15))
def cog_unload(self) -> None:
with contextlib.suppress(Exception):
self.bot.remove_dev_env_value('giveaways')
self.giveaway_bgloop.cancel()
asyncio.create_task(self.session.close())
async def check_giveaways(self) -> None:
to_clear = []
for (msgid, giveaway) in self.giveaways.items():
if (giveaway.endtime < datetime.now(timezone.utc)):
(await self.draw_winner(giveaway))
to_clear.append(msgid)
gw = (await self.config.custom(GIVEAWAY_KEY, giveaway.guildid, str(msgid)).all())
gw['ended'] = True
(await self.config.custom(GIVEAWAY_KEY, giveaway.guildid, str(msgid)).set(gw))
for msgid in to_clear:
del self.giveaways[msgid]
async def draw_winner(self, giveaway: Giveaway):
guild = self.bot.get_guild(giveaway.guildid)
if (guild is None):
return
channel_obj = guild.get_channel(giveaway.channelid)
if (channel_obj is None):
return
winners = giveaway.draw_winner()
winner_objs = None
if (winners is None):
txt = 'Not enough entries to roll the giveaway.'
else:
winner_objs = []
txt = ''
for winner in winners:
winner_obj = guild.get_member(winner)
if (winner_obj is None):
txt += f'''{winner} (Not Found)
'''
else:
txt += f'''{winner_obj.mention}
'''
winner_objs.append(winner_obj)
msg = channel_obj.get_partial_message(giveaway.messageid)
winners = (giveaway.kwargs.get('winners', 1) or 1)
embed = discord.Embed(title=f"{(f'{winners}x ' if (winners > 1) else '')}{giveaway.prize}", description=f'''Winner(s):
{txt}''', color=(await self.bot.get_embed_color(channel_obj)), timestamp=datetime.now(timezone.utc))
embed.set_footer(text=f'Reroll: {(await self.bot.get_prefix(msg))[(- 1)]}gw reroll {giveaway.messageid} | Ended at')
try:
(await msg.edit(content=' Giveaway Ended ', embed=embed))
except (discord.NotFound, discord.Forbidden) as exc:
log.error('Error editing giveaway message: ', exc_info=exc)
async with self.config.custom(GIVEAWAY_KEY, giveaway.guildid, int(giveaway.messageid)).entrants() as entrants:
entrants = [x for x in entrants if (x != winner)]
del self.giveaways[giveaway.messageid]
gw = (await self.config.custom(GIVEAWAY_KEY, giveaway.guildid, str(giveaway.messageid)).all())
gw['ended'] = True
(await self.config.custom(GIVEAWAY_KEY, giveaway.guildid, str(giveaway.messageid)).set(gw))
return
if giveaway.kwargs.get('announce'):
announce_embed = discord.Embed(title='Giveaway Ended', description=f'''Congratulations to the {(f'{str(winners)} ' if (winners > 1) else '')}winner{('s' if (winners > 1) else '')} of [{giveaway.prize}]({msg.jump_url}).
{txt}''', color=(await self.bot.get_embed_color(channel_obj)))
announce_embed.set_footer(text=f'Reroll: {(await self.bot.get_prefix(msg))[(- 1)]}gw reroll {giveaway.messageid}')
(await channel_obj.send(content=(('Congratulations ' + ','.join([x.mention for x in winner_objs])) if (winner_objs is not None) else ''), embed=announce_embed))
if channel_obj.permissions_for(guild.me).manage_messages:
(await msg.clear_reactions())
if (winner_objs is not None):
if giveaway.kwargs.get('congratulate', False):
for winner in winner_objs:
with contextlib.suppress(discord.Forbidden):
(await winner.send(f'Congratulations! You won {giveaway.prize} in the giveaway on {guild}!'))
async with self.config.custom(GIVEAWAY_KEY, giveaway.guildid, int(giveaway.messageid)).entrants() as entrants:
entrants = [x for x in entrants if (x != winner)]
return
_group(aliases=['gw'])
_has_permissions(add_reactions=True, embed_links=True)
_permissions(manage_guild=True)
async def giveaway(self, ctx: commands.Context):
()
_commands.describe(channel='The channel in which to start the giveaway.', time='The time the giveaway should last.', prize='The prize for the giveaway.')
async def start(self, ctx: commands.Context, channel: Optional[discord.TextChannel], time: TimedeltaConverter(default_unit='minutes'), *, prize: str):
channel = (channel or ctx.channel)
end = (datetime.now(timezone.utc) + time)
embed = discord.Embed(title=f'{prize}', description=f'''
React with to enter
**Hosted by:** {ctx.author.mention}
Ends: <t:{int(end.timestamp())}:R>''', color=(await ctx.embed_color()))
msg = (await channel.send(embed=embed))
giveaway_obj = Giveaway(ctx.guild.id, channel.id, msg.id, end, prize, '', **{'congratulate': True, 'notify': True})
if ctx.interaction:
(await ctx.send('Giveaway created!', ephemeral=True))
self.giveaways[msg.id] = giveaway_obj
(await msg.add_reaction(''))
giveaway_dict = deepcopy(giveaway_obj.__dict__)
giveaway_dict['endtime'] = giveaway_dict['endtime'].timestamp()
(await self.config.custom(GIVEAWAY_KEY, str(ctx.guild.id), str(msg.id)).set(giveaway_dict))
()
_commands.describe(msgid='The message ID of the giveaway to end.')
async def reroll(self, ctx: commands.Context, msgid: int):
data = (await self.config.custom(GIVEAWAY_KEY, ctx.guild.id).all())
if (str(msgid) not in data):
return (await ctx.send('Giveaway not found.'))
if (msgid in self.giveaways):
return (await ctx.send(f'Giveaway already running. Please wait for it to end or end it via `{ctx.clean_prefix}gw end {msgid}`.'))
giveaway_dict = data[str(msgid)]
giveaway_dict['endtime'] = datetime.fromtimestamp(giveaway_dict['endtime']).replace(tzinfo=timezone.utc)
giveaway = Giveaway(**giveaway_dict)
try:
(await self.draw_winner(giveaway))
except GiveawayExecError as e:
(await ctx.send(e.message))
else:
(await ctx.tick())
()
_commands.describe(msgid='The message ID of the giveaway to end.')
async def end(self, ctx: commands.Context, msgid: int):
if (msgid in self.giveaways):
if (self.giveaways[msgid].guildid != ctx.guild.id):
return (await ctx.send('Giveaway not found.'))
(await self.draw_winner(self.giveaways[msgid]))
del self.giveaways[msgid]
gw = (await self.config.custom(GIVEAWAY_KEY, ctx.guild.id, str(msgid)).all())
gw['ended'] = True
(await self.config.custom(GIVEAWAY_KEY, ctx.guild.id, str(msgid)).set(gw))
(await ctx.tick())
else:
(await ctx.send('Giveaway not found.'))
(aliases=['adv'])
_commands.describe(arguments='The arguments for the giveaway. See `[p]gw explain` for more info.')
async def advanced(self, ctx: commands.Context, *, arguments: Args):
prize = arguments['prize']
duration = arguments['duration']
channel = (arguments['channel'] or ctx.channel)
winners = (arguments.get('winners', 1) or 1)
end = (datetime.now(timezone.utc) + duration)
description = (arguments['description'] or '')
if arguments['show_requirements']:
description += '\n\n**Requirements**:'
for kwarg in (set(arguments) - {'show_requirements', 'prize', 'duration', 'channel', 'winners', 'description', 'congratulate', 'notify', 'announce', 'emoji', 'thumbnail', 'image'}):
if arguments[kwarg]:
description += f'''
**{kwarg.title()}:** {arguments[kwarg]}'''
emoji = (arguments['emoji'] or '')
if isinstance(emoji, int):
emoji = self.bot.get_emoji(emoji)
embed = discord.Embed(title=f"{(f'{winners}x ' if (winners > 1) else '')}{prize}", description=f'''{description}
React with {emoji} to enter
**Hosted by:** {ctx.author.mention}
Ends: <t:{int(end.timestamp())}:R>''', color=(await ctx.embed_color()))
if (arguments['image'] is not None):
embed.set_image(url=arguments['image'])
if (arguments['thumbnail'] is not None):
embed.set_thumbnail(url=arguments['thumbnail'])
txt = '\n'
if arguments['ateveryone']:
txt += ' '
if arguments['athere']:
txt += ' '
if arguments['mentions']:
for mention in arguments['mentions']:
role = ctx.guild.get_role(mention)
if (role is not None):
txt += f'{role.mention} '
msg = (await channel.send(content=f' Giveaway {txt}', embed=embed, allowed_mentions=discord.AllowedMentions(roles=bool(arguments['mentions']), everyone=bool(arguments['ateveryone']))))
if ctx.interaction:
(await ctx.send('Giveaway created!', ephemeral=True))
giveaway_obj = Giveaway(ctx.guild.id, channel.id, msg.id, end, prize, str(emoji), **{k: v for (k, v) in arguments.items() if (k not in ['prize', 'duration', 'channel', 'emoji'])})
self.giveaways[msg.id] = giveaway_obj
(await msg.add_reaction(emoji))
giveaway_dict = deepcopy(giveaway_obj.__dict__)
giveaway_dict['endtime'] = giveaway_dict['endtime'].timestamp()
(await self.config.custom(GIVEAWAY_KEY, str(ctx.guild.id), str(msg.id)).set(giveaway_dict))
()
_commands.describe(msgid='The message ID of the giveaway to edit.')
async def entrants(self, ctx: commands.Context, msgid: int):
if (msgid not in self.giveaways):
return (await ctx.send('Giveaway not found.'))
giveaway = self.giveaways[msgid]
if (not giveaway.entrants):
return (await ctx.send('No entrants.'))
count = {}
for entrant in giveaway.entrants:
if (entrant not in count):
count[entrant] = 1
else:
count[entrant] += 1
msg = ''
for (userid, count_int) in count.items():
user = ctx.guild.get_member(userid)
msg += (f'''{user.mention} ({count_int})
''' if user else f'''<{userid}> ({count_int})
''')
embeds = []
for page in pagify(msg, delims=['\n'], page_length=800):
embed = discord.Embed(title='Entrants', description=page, color=(await ctx.embed_color()))
embed.set_footer(text=f'Total entrants: {len(count)}')
embeds.append(embed)
if (len(embeds) == 1):
return (await ctx.send(embed=embeds[0]))
return (await menu(ctx, embeds, DEFAULT_CONTROLS))
()
_commands.describe(msgid='The message ID of the giveaway to edit.')
async def info(self, ctx: commands.Context, msgid: int):
if (msgid not in self.giveaways):
return (await ctx.send('Giveaway not found.'))
giveaway = self.giveaways[msgid]
winners = (giveaway.kwargs.get('winners', 1) or 1)
msg = f'''**Entrants:**: {len(giveaway.entrants)}
**End**: <t:{int(giveaway.endtime.timestamp())}:R>
'''
for kwarg in giveaway.kwargs:
if giveaway.kwargs[kwarg]:
msg += f'''**{kwarg.title()}:** {giveaway.kwargs[kwarg]}
'''
embed = discord.Embed(title=f"{(f'{winners}x ' if (winners > 1) else '')}{giveaway.prize}", color=(await ctx.embed_color()), description=msg)
embed.set_footer(text=f'Giveaway ID #{msgid}')
(await ctx.send(embed=embed))
(name='list')
async def _list(self, ctx: commands.Context):
if (not self.giveaways):
return (await ctx.send('No giveaways are running.'))
giveaways = {x: self.giveaways[x] for x in self.giveaways if (self.giveaways[x].guildid == ctx.guild.id)}
if (not giveaways):
return (await ctx.send('No giveaways are running.'))
msg = ''.join((f'''{msgid}: [{giveaways[msgid].prize}](
''' for (msgid, value) in giveaways.items()))
embeds = []
for page in pagify(msg, delims=['\n']):
embed = discord.Embed(title=f'Giveaways in {ctx.guild}', description=page, color=(await ctx.embed_color()))
embeds.append(embed)
if (len(embeds) == 1):
return (await ctx.send(embed=embeds[0]))
return (await menu(ctx, embeds, DEFAULT_CONTROLS))
()
async def explain(self, ctx: commands.Context):
msg = '\n Giveaway advanced creation.\n NOTE: Giveaways are checked every 20 seconds, this means that the giveaway may end up being slightly longer than the specified duration.\n\n Giveaway advanced contains many different flags that can be used to customize the giveaway.\n The flags are as follows:\n\n Required arguments:\n `--prize`: The prize to be won.\n\n Required Mutual Exclusive Arguments:\n You must one ONE of these, but not both:\n `--duration`: The duration of the giveaway. Must be in format such as `2d3h30m`.\n `--end`: The end time of the giveaway. Must be in format such as `2021-12-23T30:00:00.000Z`, `tomorrow at 3am`, `in 4 hours`. Defaults to UTC if no timezone is provided.\n\n Optional arguments:\n `--channel`: The channel to post the giveaway in. Will default to this channel if not specified.\n `--emoji`: The emoji to use for the giveaway.\n `--roles`: Roles that the giveaway will be restricted to. If the role contains a space, use their ID.\n `--multiplier`: Multiplier for those in specified roles. Must be a positive number.\n `--multi-roles`: Roles that will receive the multiplier. If the role contains a space, use their ID.\n `--cost`: Cost of credits to enter the giveaway. Must be a positive number.\n `--joined`: How long the user must be a member of the server for to enter the giveaway. Must be a positive number of days.\n `--created`: How long the user has been on discord for to enter the giveaway. Must be a positive number of days.\n `--blacklist`: Blacklisted roles that cannot enter the giveaway. If the role contains a space, use their ID.\n `--winners`: How many winners to draw. Must be a positive number.\n `--mentions`: Roles to mention in the giveaway notice.\n `--description`: Description of the giveaway.\n `--image`: Image URL to use for the giveaway embed.\n `--thumbnail`: Thumbnail URL to use for the giveaway embed.\n\n Setting Arguments:\n `--congratulate`: Whether or not to congratulate the winner. Not passing will default to off.\n `--notify`: Whether or not to notify a user if they failed to enter the giveaway. Not passing will default to off.\n `--multientry`: Whether or not to allow multiple entries. Not passing will default to off.\n `--announce`: Whether to post a seperate message when the giveaway ends. Not passing will default to off.\n `--ateveryone`: Whether to tag in the giveaway notice.\n `--show-requirements`: Whether to show the requirements of the giveaway.\n\n\n 3rd party integrations:\n See `[p]gw integrations` for more information.\n\n Examples:\n `{prefix}gw advanced --prize A new sword --duration 1h30m --restrict Role ID --multiplier 2 --multi-roles RoleID RoleID2`\n `{prefix}gw advanced --prize A better sword --duration 2h3h30m --channel channel-name --cost 250 --joined 50 --congratulate --notify --multientry --level-req 100`'.format(prefix=ctx.clean_prefix)
embed = discord.Embed(title='Giveaway Advanced Explanation', description=msg, color=(await ctx.embed_color()))
(await ctx.send(embed=embed))
()
async def integrations(self, ctx: commands.Context):
msg = "\n 3rd party integrations for giveaways.\n\n You can use these integrations to integrate giveaways with other 3rd party services.\n\n `--level-req`: Integrate with the Red Level system Must be Fixator's leveler.\n `--rep-req`: Integrate with the Red Level Rep system Must be Fixator's leveler.\n `--tatsu-level`: Integrate with the Tatsumaki's levelling system, must have a valid Tatsumaki API key set.\n `--tatsu-rep`: Integrate with the Tatsumaki's rep system, must have a valid Tatsumaki API key set.\n `--mee6-level`: Integrate with the MEE6 levelling system.\n `--amari-level`: Integrate with the Amari's levelling system.\n `--amari-weekly-xp`: Integrate with the Amari's weekly xp system.".format(prefix=ctx.clean_prefix)
if (await self.bot.is_owner(ctx.author)):
msg += "\n **API Keys**\n Tatsu's API key can be set with the following command (You must find where this key is yourself): `{prefix}set api tatsumaki authorization <key>`\n Amari's API key can be set with the following command (Apply [here]( `{prefix}set api amari authorization <key>`\n\n\n For any integration suggestions, suggest them via the [#support-flare-cogs]( channel on the support server or [flare-cogs]( github.".format(prefix=ctx.clean_prefix)
embed = discord.Embed(title='3rd Party Integrations', description=msg, color=(await ctx.embed_color()))
(await ctx.send(embed=embed))
.listener()
async def on_raw_reaction_add(self, payload: discord.RawReactionActionEvent):
if (payload.user_id == self.bot.user.id):
return
if (payload.message_id in self.giveaways):
giveaway = self.giveaways[payload.message_id]
if (payload.emoji.is_custom_emoji() and (str(payload.emoji) != giveaway.emoji)):
return
elif (payload.emoji.is_unicode_emoji() and (str(payload.emoji) != giveaway.emoji)):
return
try:
(await giveaway.add_entrant(payload.member, bot=self.bot, session=self.session))
except GiveawayEnterError as e:
if giveaway.kwargs.get('notify', False):
with contextlib.suppress(discord.Forbidden):
(await payload.member.send(e.message))
return
except GiveawayExecError as e:
log.exception('Error while adding user to giveaway', exc_info=e)
return
(await self.config.custom(GIVEAWAY_KEY, payload.guild_id, payload.message_id).entrants.set(self.giveaways[payload.message_id].entrants)) |
class WafFirewallVersion(ModelNormal):
allowed_values = {}
validations = {}
_property
def additional_properties_type():
lazy_import()
return (bool, date, datetime, dict, float, int, list, str, none_type)
_nullable = False
_property
def openapi_types():
lazy_import()
return {'data': (WafFirewallVersionData,)}
_property
def discriminator():
return None
attribute_map = {'data': 'data'}
read_only_vars = {}
_composed_schemas = {}
_js_args_to_python_args
def _from_openapi_data(cls, *args, **kwargs):
_check_type = kwargs.pop('_check_type', True)
_spec_property_naming = kwargs.pop('_spec_property_naming', False)
_path_to_item = kwargs.pop('_path_to_item', ())
_configuration = kwargs.pop('_configuration', None)
_visited_composed_classes = kwargs.pop('_visited_composed_classes', ())
self = super(OpenApiModel, cls).__new__(cls)
if args:
raise ApiTypeError(('Invalid positional arguments=%s passed to %s. Remove those invalid positional arguments.' % (args, self.__class__.__name__)), path_to_item=_path_to_item, valid_classes=(self.__class__,))
self._data_store = {}
self._check_type = _check_type
self._spec_property_naming = _spec_property_naming
self._path_to_item = _path_to_item
self._configuration = _configuration
self._visited_composed_classes = (_visited_composed_classes + (self.__class__,))
for (var_name, var_value) in kwargs.items():
if ((var_name not in self.attribute_map) and (self._configuration is not None) and self._configuration.discard_unknown_keys and (self.additional_properties_type is None)):
continue
setattr(self, var_name, var_value)
return self
required_properties = set(['_data_store', '_check_type', '_spec_property_naming', '_path_to_item', '_configuration', '_visited_composed_classes'])
_js_args_to_python_args
def __init__(self, *args, **kwargs):
_check_type = kwargs.pop('_check_type', True)
_spec_property_naming = kwargs.pop('_spec_property_naming', False)
_path_to_item = kwargs.pop('_path_to_item', ())
_configuration = kwargs.pop('_configuration', None)
_visited_composed_classes = kwargs.pop('_visited_composed_classes', ())
if args:
raise ApiTypeError(('Invalid positional arguments=%s passed to %s. Remove those invalid positional arguments.' % (args, self.__class__.__name__)), path_to_item=_path_to_item, valid_classes=(self.__class__,))
self._data_store = {}
self._check_type = _check_type
self._spec_property_naming = _spec_property_naming
self._path_to_item = _path_to_item
self._configuration = _configuration
self._visited_composed_classes = (_visited_composed_classes + (self.__class__,))
for (var_name, var_value) in kwargs.items():
if ((var_name not in self.attribute_map) and (self._configuration is not None) and self._configuration.discard_unknown_keys and (self.additional_properties_type is None)):
continue
setattr(self, var_name, var_value)
if (var_name in self.read_only_vars):
raise ApiAttributeError(f'`{var_name}` is a read-only attribute. Use `from_openapi_data` to instantiate class with read only attributes.') |
class OptionPlotoptionsSeriesDragdropDraghandle(Options):
def className(self):
return self._config_get('highcharts-drag-handle')
def className(self, text: str):
self._config(text, js_type=False)
def color(self):
return self._config_get('#fff')
def color(self, text: str):
self._config(text, js_type=False)
def lineColor(self):
return self._config_get('rgba(0, 0, 0, 0.6)')
def lineColor(self, text: str):
self._config(text, js_type=False)
def lineWidth(self):
return self._config_get(1)
def lineWidth(self, num: float):
self._config(num, js_type=False)
def zIndex(self):
return self._config_get(901)
def zIndex(self, num: float):
self._config(num, js_type=False) |
class Velobike(BikeShareSystem):
sync = True
meta = {'system': 'Velobike', 'company': ['Velobike.kz, LLP', 'Smoove']}
def __init__(self, tag, feed_url, meta):
super(Velobike, self).__init__(tag, meta)
self.feed_url = feed_url
def update(self, scraper=None):
if (scraper is None):
scraper = utils.PyBikesScraper()
stations = []
data = json.loads(scraper.request(self.feed_url, ssl_verification=False))
if isinstance(data, dict):
data = data['data']
for item in data:
if (item['is_deleted'] == '1'):
continue
if (item['is_hidden'] == '1'):
continue
if (item['is_sales'] == '1'):
continue
if (item['is_not_active'] == '1'):
continue
name = item['name_ru']
latitude = float(item['lat'])
longitude = float(item['lng'])
bikes = int(item['avl_bikes'])
free = int(item['free_slots'])
extra = {'uid': int(item['id']), 'slots': int(item['total_slots']), 'address': item['address_ru']}
station = BikeShareStation(name, latitude, longitude, bikes, free, extra)
stations.append(station)
self.stations = stations |
def process_auto(text):
(_text, _lang) = ([], [])
for slice in text.split('|'):
if (slice == ''):
continue
(temp_text, temp_lang) = ([], [])
sentences_list = split_by_language(slice, target_languages=['zh', 'ja', 'en'])
for (sentence, lang) in sentences_list:
if (sentence == ''):
continue
temp_text.append(sentence)
if (lang == 'ja'):
lang = 'jp'
temp_lang.append(lang.upper())
_text.append(temp_text)
_lang.append(temp_lang)
return (_text, _lang) |
def train(env):
n_episodes = 10
n_steps_per_episode = 5
writer = LogStatsWriterTensorboard(log_dir='test_log', tensorboard_render_figure=True)
register_log_stats_writer(writer)
register_log_stats_writer(LogStatsWriterConsole())
env = LogStatsWrapper.wrap(env, logging_prefix='train')
with SimpleStatsLoggingSetup(env):
for episode in range(n_episodes):
_ = env.reset()
for step in range(n_steps_per_episode):
action = env.action_space.sample()
(_, _, _, _) = env.step(action)
env.get_stats(LogStatsLevel.EPOCH)
env.get_stats_value(BaseEnvEvents.reward, LogStatsLevel.EPOCH, name='mean') |
def forward(apps, schema_editor):
Reservation = apps.get_model('core', 'Reservation')
BillLineItem = apps.get_model('core', 'BillLineItem')
Payment = apps.get_model('core', 'Payment')
Bill = apps.get_model('core', 'Bill')
reservations = Reservation.objects.all()
for r in reservations:
bill = Bill.objects.create()
r.bill = bill
r.save()
payments = Payment.objects.filter(reservation=r)
for p in payments:
p.bill = bill
p.user = r.user
p.save()
lineitems = BillLineItem.objects.filter(reservation=r)
for l in lineitems:
l.bill = bill
l.save()
bill.created_on = r.created
bill.save() |
def test_local_import_error(testdir):
testdir.makepyfile("\n\n def test_problem():\n import does_not_exist\n assert str(True) == 'True'\n ")
result = testdir.runpytest('-v')
result.stdout.fnmatch_lines(['test_local_import_error.py::test_problem ... ERROR', 'ERROR: test_local_import_error.py::test_problem'])
assert (result.ret == 1) |
class InstantiateCvarTest(object):
.parametrize('location, expected', [pytest.param({'wght': (- 1.0)}, [500, (- 400), 150, 250], id='wght=-1.0'), pytest.param({'wdth': (- 1.0)}, [500, (- 400), 180, 200], id='wdth=-1.0'), pytest.param({'wght': (- 0.5)}, [500, (- 400), 165, 250], id='wght=-0.5'), pytest.param({'wdth': (- 0.3)}, [500, (- 400), 180, 235], id='wdth=-0.3')])
def test_pin_and_drop_axis(self, varfont, location, expected):
location = instancer.NormalizedAxisLimits(location)
instancer.instantiateCvar(varfont, location)
assert (list(varfont['cvt '].values) == expected)
pinned_axes = location.keys()
assert (not any(((axis in t.axes) for t in varfont['cvar'].variations for axis in pinned_axes)))
def test_full_instance(self, varfont):
location = instancer.NormalizedAxisLimits(wght=(- 0.5), wdth=(- 0.5))
instancer.instantiateCvar(varfont, location)
assert (list(varfont['cvt '].values) == [500, (- 400), 165, 225])
assert ('cvar' not in varfont) |
def coulomb3d_10(ax, da, A, bx, db, B, R):
result = numpy.zeros((3, 1), dtype=float)
x0 = (ax + bx)
x1 = (x0 ** (- 1.0))
x2 = ((- x1) * ((ax * A[0]) + (bx * B[0])))
x3 = (x2 + R[0])
x4 = ((- x1) * ((ax * A[1]) + (bx * B[1])))
x5 = (x4 + R[1])
x6 = ((- x1) * ((ax * A[2]) + (bx * B[2])))
x7 = (x6 + R[2])
x8 = (x0 * (((x3 ** 2) + (x5 ** 2)) + (x7 ** 2)))
x9 = boys(0, x8)
x10 = boys(1, x8)
x11 = ((((6. * da) * db) * x1) * numpy.exp(((((- ax) * bx) * x1) * ((((A[0] - B[0]) ** 2) + ((A[1] - B[1]) ** 2)) + ((A[2] - B[2]) ** 2)))))
result[(0, 0)] = numpy.sum((x11 * ((x10 * x3) - (x9 * (x2 + A[0])))))
result[(1, 0)] = numpy.sum((x11 * ((x10 * x5) - (x9 * (x4 + A[1])))))
result[(2, 0)] = numpy.sum((x11 * ((x10 * x7) - (x9 * (x6 + A[2])))))
return result |
def show_score(dataset=None, model=None, method=None, prompt=None):
if (dataset is None):
raise ValueError('dataset cannot be None!')
elif (model is None):
json_data = baseline_json[dataset]
elif (method is None):
json_data = baseline_json[dataset][model]
elif (prompt is None):
json_data = baseline_json[dataset][model][method]
else:
json_data = baseline_json[dataset][model][method][prompt]
table_scores = ColorTable(theme=MYTHEME)
table_scores.field_names = HEADER
add_scores_to_table(table_scores, json_data, dataset, model, method, prompt)
table_scores.sortby = 'all'
table_scores.reversesort = True
print(table_scores) |
class SolidQCReporter(QCReporter):
def __init__(self, dirn, data_format=None, qc_dir='qc', regex_pattern=None, version=None):
self.__stats = None
paired_end = False
stats_file = os.path.join(os.path.abspath(dirn), 'SOLiD_preprocess_filter.stats')
if (not os.path.exists(stats_file)):
stats_file = os.path.join(os.path.abspath(dirn), 'SOLiD_preprocess_filter_paired.stats')
if os.path.exists(stats_file):
paired_end = True
else:
stats_file = None
logging.error(("Can't find stats file in %s" % dirn))
if (data_format is None):
if (not paired_end):
data_format = 'solid'
else:
data_format = 'solid_paired_end'
elif (data_format == 'solid'):
paired_end = False
elif (data_format == 'solid_paired_end'):
paired_end = True
else:
logging.error(("Ignoring unrecognised format '%s'" % data_format))
QCReporter.__init__(self, dirn, data_format=data_format, qc_dir=qc_dir, regex_pattern=regex_pattern, version=None)
self.__paired_end = paired_end
primary_data = self.getPrimaryDataFiles()
for data in primary_data:
sample = strip_ngs_extensions(os.path.basename(data[0]))
if self.__paired_end:
sample = sample.replace('_F3', '')
self.addSample(SolidQCSample(sample, self.qc_dir, self.__paired_end))
print(("Processing outputs for sample '%s'" % sample))
if (stats_file and os.path.exists(stats_file)):
self.__stats = TabFile.TabFile(stats_file, first_line_is_header=True)
if self.__paired_end:
try:
for line in self.__stats:
line['File'] = line['File'].replace('_paired', '')
except KeyError:
logging.error(('Failed to process stats file %s' % stats_file))
self.__stats = TabFile.TabFile()
else:
logging.error(("Can't find stats file %s" % stats_file))
self.__stats = TabFile.TabFile()
for sample in self.samples:
if self.__paired_end:
if (len(sample.boxplots()) != 4):
logging.warning(('%s: wrong number of boxplots' % sample.name))
elif (len(sample.boxplots()) != 2):
logging.warning(('%s: wrong number of boxplots' % sample.name))
if (len(sample.screens()) != 3):
logging.warning(('%s: wrong number of screens' % sample.name))
def report(self):
self.html.add(('<p>%d samples in %s</p>' % (len(self.samples), self.dirn)))
self.html.add("<table class='summary'>")
if (not self.__paired_end):
self.html.add('<tr><th>Sample</th><th>Reads</th><th>Reads after filter</th><th># removed</th><th>% removed</th></tr>')
else:
self.html.add('<tr><th colspan=2> </th><th colspan=3>Lenient filtering</th><th colspan=3>Strict filtering</th></tr>')
self.html.add('<tr><th>Sample</th><th>Reads</th><th>Reads after filter</th><th># removed</th><th>% removed</th><th>Reads after filter</th><th># removed</th><th>% removed</th></tr>')
for sample in self.samples:
try:
stats = self.__stats.lookup('File', sample.name)[0]
except IndexError:
stats = {}
for i in ('Reads', 2, 3, 4, 5, 6, 7):
stats[i] = 'n/a'
csfasta = ('%s.csfasta' % sample.name)
print(('Attempting to getting read count from %s' % csfasta))
nreads = count_reads(csfasta)
if (nreads is not None):
stats['Reads'] = nreads
else:
stats['Reads'] = '?'
self.html.add('<tr>')
self.html.add(("<td><a href='#%s'>%s</a></td>" % (sample.name, sample.name)))
self.html.add(('<td>%s</td>' % stats['Reads']))
self.html.add(('<td>%s</td>' % stats[2]))
self.html.add(('<td>%s</td>' % stats[3]))
self.html.add(('<td>%s</td>' % stats[4]))
if self.__paired_end:
self.html.add(('<td>%s</td>' % stats[5]))
self.html.add(('<td>%s</td>' % stats[6]))
self.html.add(('<td>%s</td>' % stats[7]))
self.html.add('</tr>')
self.html.add('</table>')
if self.__paired_end:
self.html.add('<p>Number of reads are the sum of F3 and F5 reads</p>')
self.html.add('<p>"Lenient filtering" filters each F3/F5 read pair only on the quality of the F3 reads</p>')
self.html.add('<p>"Strict filtering" filters each F3/F5 read pair on the quality of both F3 and F5 reads</p>')
for sample in self.samples:
sample.report(self.html)
self.html.write(os.path.join(self.dirn, ('%s.html' % self.report_base_name)))
def verify(self):
if self.__paired_end:
print('Verifying output of paired-end QC run')
else:
print('Verifying output of fragment QC run')
status = QCReporter.verify(self)
if (self.__stats is None):
logging.warning('No statistics found')
status = False
else:
for sample in self.samples:
try:
self.__stats.lookup('File', sample.name)[0]
except IndexError:
logging.warning(('%s: missing statistics' % sample.name))
status = False
for sample in self.samples:
if (not self.__paired_end):
fastq = os.path.join(self.dirn, ('%s.fastq' % sample.name))
if (not os.path.exists(fastq)):
logging.warning(('%s: missing Fastq file' % sample.name))
status = False
filtered_csfasta = os.path.join(self.dirn, ('%s_T_F3.csfasta' % sample.name))
filtered_qual = os.path.join(self.dirn, ('%s_T_F3_QV.qual' % sample.name))
filtered_fastq = os.path.join(self.dirn, ('%s_T_F3.fastq' % sample.name))
if (not (os.path.exists(filtered_csfasta) and os.path.exists(filtered_qual) and os.path.exists(filtered_fastq))):
logging.warning(('%s: missing one or more filtered data files' % sample.name))
status = False
else:
fastq = os.path.join(self.dirn, ('%s_paired_F3_and_F5_filt.fastq' % sample.name))
fastq_f3 = os.path.join(self.dirn, ('%s_paired_F3_and_F5_filt.F3.fastq' % sample.name))
fastq_f5 = os.path.join(self.dirn, ('%s_paired_F3_and_F5_filt.F5.fastq' % sample.name))
if (not (os.path.exists(fastq) and os.path.exists(fastq_f3) and os.path.exists(fastq_f5))):
logging.warning(('%s: missing one or more unfiltered Fastq files' % sample.name))
status = False
filtered_csfasta_f3 = os.path.join(self.dirn, ('%s_F3_T_F3.csfasta' % sample.name))
filtered_qual_f3 = os.path.join(self.dirn, ('%s_F3_T_F3_QV.qual' % sample.name))
filtered_csfasta_f5 = os.path.join(self.dirn, ('%s_F5_T_F3.csfasta' % sample.name))
filtered_qual_f5 = os.path.join(self.dirn, ('%s_F5_T_F3_QV.qual' % sample.name))
if (not (os.path.exists(filtered_csfasta_f3) and os.path.exists(filtered_qual_f3) and os.path.exists(filtered_csfasta_f5) and os.path.exists(filtered_qual_f5))):
logging.warning(('%s: missing one or more filtered data files' % sample.name))
status = False
lenient_fastq = os.path.join(self.dirn, ('%s_paired_F3_filt.fastq' % sample.name))
lenient_fastq_f3 = os.path.join(self.dirn, ('%s_paired_F3_filt.F3.fastq' % sample.name))
lenient_fastq_f5 = os.path.join(self.dirn, ('%s_paired_F3_filt.F5.fastq' % sample.name))
if (not (os.path.exists(lenient_fastq) and os.path.exists(lenient_fastq_f3) and os.path.exists(lenient_fastq_f5))):
logging.warning(("%s: missing one or more 'lenient' Fastq files" % sample.name))
status = False
strict_fastq = os.path.join(self.dirn, ('%s_paired_F3_and_F5_filt.fastq' % sample.name))
strict_fastq_f3 = os.path.join(self.dirn, ('%s_paired_F3_and_F5_filt.F3.fastq' % sample.name))
strict_fastq_f5 = os.path.join(self.dirn, ('%s_paired_F3_and_F5_filt.F5.fastq' % sample.name))
if (not (os.path.exists(strict_fastq) and os.path.exists(strict_fastq_f3) and os.path.exists(strict_fastq_f5))):
logging.warning(("%s: missing one or more 'strict' Fastq files" % sample.name))
status = False
return status |
class DataLink(Html.Html):
name = 'Data link'
filename = 'Download'
tag = 'a'
_option_cls = OptText.OptionsLink
def __init__(self, page: primitives.PageModel, text: str, value: Any, width: tuple, height: tuple, fmt: str, options: Optional[str], profile: Optional[Union[(bool, dict)]], verbose: bool=False):
super(DataLink, self).__init__(page, {'text': text, 'value': value}, profile=profile, options=options, css_attrs={'width': width, 'height': height}, verbose=verbose)
self.format = fmt
def options(self) -> OptText.OptionsLink:
return super().options
def no_decoration(self):
self.style.css.text_decoration = None
self.style.css.list_style_type = None
return self
def __str__(self):
self.page.properties.js.add_builders(self.refresh())
return ('<%(tag)s %(attr)s href="#" download="%(filename)s.%(format)s" type="text/%(format)s">%(val)s</%(tag)s>' % {'filename': self.filename, 'attr': self.get_attrs(css_class_names=self.style.get_classes()), 'val': self.val['text'], 'tag': self.tag, 'format': self.format})
def loading(self, status: bool=True, label: str=Default_html.TEMPLATE_LOADING_ONE_LINE, data: types.JS_DATA_TYPES=None):
self.options.templateLoading = label
if status:
return self.build(data, options={'templateMode': 'loading'})
return ''
def error(self, status: bool=True, label: str=Default_html.TEMPLATE_ERROR_LINE, data: types.JS_DATA_TYPES=None) -> str:
self.options.templateError = label
if status:
return self.build(data, options={'templateMode': 'error'})
return '' |
class MDataViewWidget(HasTraits):
data_model = Instance(AbstractDataModel, allow_none=False)
header_visible = Bool(True)
drop_handlers = List(Instance(IDropHandler, allow_none=False))
selection = Property(observe='_selection.items')
exporters = List(Instance(AbstractDataExporter))
_selection_updating_flag = Bool()
_selection = List(Tuple)
def _header_visible_updated(self, event):
if (self.control is not None):
self._set_control_header_visible(event.new)
def _get_control_header_visible(self):
raise NotImplementedError()
def _set_control_header_visible(self, control_header_visible):
raise NotImplementedError()
def _selection_type_updated(self, event):
if (self.control is not None):
self._set_control_selection_type(event.new)
self.selection = []
def _get_control_selection_type(self):
raise NotImplementedError()
def _set_control_selection_type(self, selection_type):
raise NotImplementedError()
def _selection_mode_updated(self, event):
if (self.control is not None):
self._set_control_selection_mode(event.new)
self.selection = []
def _get_control_selection_mode(self):
raise NotImplementedError()
def _set_control_selection_mode(self, selection_mode):
raise NotImplementedError()
def _selection_updated(self, event):
if ((self.control is not None) and (not self._selection_updating_flag)):
with self._selection_updating():
self._set_control_selection(self.selection)
def _get_control_selection(self):
raise NotImplementedError()
def _set_control_selection(self, selection):
raise NotImplementedError()
def _observe_control_selection(self, remove=False):
raise NotImplementedError()
def _update_selection(self, *args, **kwargs):
if (not self._selection_updating_flag):
with self._selection_updating():
self._selection = self._get_control_selection()
def create(self, parent=None):
super().create(parent=parent)
self.show(self.visible)
self.enable(self.enabled)
def _initialize_control(self):
logger.debug('Initializing DataViewWidget')
super()._initialize_control()
self._set_control_header_visible(self.header_visible)
self._set_control_selection_mode(self.selection_mode)
self._set_control_selection_type(self.selection_type)
self._set_control_selection(self.selection)
def _add_event_listeners(self):
logger.debug('Adding DataViewWidget listeners')
super()._add_event_listeners()
self.observe(self._header_visible_updated, 'header_visible', dispatch='ui')
self.observe(self._selection_type_updated, 'selection_type', dispatch='ui')
self.observe(self._selection_mode_updated, 'selection_mode', dispatch='ui')
self.observe(self._selection_updated, '_selection.items', dispatch='ui')
if (self.control is not None):
self._observe_control_selection()
def _remove_event_listeners(self):
logger.debug('Removing DataViewWidget listeners')
if (self.control is not None):
self._observe_control_selection(remove=True)
self.observe(self._header_visible_updated, 'header_visible', dispatch='ui', remove=True)
self.observe(self._selection_type_updated, 'selection_type', dispatch='ui', remove=True)
self.observe(self._selection_mode_updated, 'selection_mode', dispatch='ui', remove=True)
self.observe(self._selection_updated, '_selection.items', dispatch='ui', remove=True)
super()._remove_event_listeners()
def _selection_updating(self):
if self._selection_updating_flag:
(yield)
else:
self._selection_updating_flag = True
try:
(yield)
finally:
self._selection_updating_flag = False
_property
def _get_selection(self):
return self._selection
def _set_selection(self, selection):
if ((self.selection_mode == 'none') and (len(selection) != 0)):
raise TraitError("Selection must be empty when selection_mode is 'none', got {!r}".format(selection))
elif ((self.selection_mode == 'single') and (len(selection) > 1)):
raise TraitError("Selection must have at most one element when selection_mode is 'single', got {!r}".format(selection))
if (self.selection_type == 'row'):
for (row, column) in selection:
if (column != ()):
raise TraitError("Column values must be () when selection_type is 'row', got {!r}".format(column))
if (not self.data_model.is_row_valid(row)):
raise TraitError('Invalid row index {!r}'.format(row))
elif (self.selection_type == 'column'):
for (row, column) in selection:
if (not (self.data_model.is_row_valid(row) and self.data_model.can_have_children(row) and (self.data_model.get_row_count(row) > 0))):
raise TraitError("Row values must have children when selection_type is 'column', got {!r}".format(column))
if (not self.data_model.is_column_valid(column)):
raise TraitError('Invalid column index {!r}'.format(column))
else:
for (row, column) in selection:
if (not self.data_model.is_row_valid(row)):
raise TraitError('Invalid row index {!r}'.format(row))
if (not self.data_model.is_column_valid(column)):
raise TraitError('Invalid column index {!r}'.format(column))
self._selection = selection |
class Annotations(FlyteIdlEntity):
def __init__(self, values):
self._values = values
def values(self):
return self._values
def to_flyte_idl(self):
return _common_pb2.Annotations(values={k: v for (k, v) in self.values.items()})
def from_flyte_idl(cls, pb2_object):
return cls({k: v for (k, v) in pb2_object.values.items()}) |
class TestsAchromatic(util.ColorAsserts, unittest.TestCase):
def test_achromatic(self):
self.assertEqual(Color('lab', [30, 0, 0]).is_achromatic(), True)
self.assertEqual(Color('lab', [30, 1e-06, 0]).is_achromatic(), True)
self.assertEqual(Color('lab', [NaN, 1e-05, 0]).is_achromatic(), True)
self.assertEqual(Color('lab', [0, NaN, NaN]).is_achromatic(), True)
self.assertEqual(Color('lab', [0, NaN, NaN]).is_achromatic(), True)
self.assertEqual(Color('lab', [0, 30, (- 40)]).is_achromatic(), False)
self.assertEqual(Color('lab', [NaN, 0, (- 30)]).is_achromatic(), False)
self.assertEqual(Color('lab', [30, NaN, 0]).is_achromatic(), True)
self.assertEqual(Color('lab', [NaN, NaN, 0]).is_achromatic(), True) |
class TestTraitSetEvent(unittest.TestCase):
def test_trait_set_event_str_representation(self):
desired_repr = 'TraitSetEvent(removed=set(), added=set())'
trait_set_event = TraitSetEvent()
self.assertEqual(desired_repr, str(trait_set_event))
self.assertEqual(desired_repr, repr(trait_set_event))
def test_trait_set_event_subclass_str_representation(self):
class DifferentName(TraitSetEvent):
pass
desired_repr = 'DifferentName(removed=set(), added=set())'
different_name_subclass = DifferentName()
self.assertEqual(desired_repr, str(different_name_subclass))
self.assertEqual(desired_repr, repr(different_name_subclass)) |
class SubscribableSchema(Schema):
OPERATION_MAP = {OperationType.QUERY: 'query', OperationType.MUTATION: 'mutation', OperationType.SUBSCRIPTION: 'subscription'}
async def execute(cls, socket: T):
subscription_router = {}
while True:
try:
message = (await socket.receive())
except Exception:
(await socket.close())
return
try:
data = json.loads(message)
except Exception as e:
logging.error(e, exc_info=True)
(await cls.send_connection_error(socket, e))
continue
(query_type, payload) = (data['type'], data.get('payload'))
if (query_type == 'connection_init'):
asyncio.ensure_future(cls.start_ack_loop(socket))
elif (query_type == 'start'):
id = data['id']
(variables, query) = (payload['variables'], payload['query'])
task = asyncio.ensure_future(cls.subscribe(socket, id, query, variables))
subscription_router[id] = task
elif (query_type == 'stop'):
id = data['id']
task = subscription_router.get(id)
if task:
task.cancel()
del subscription_router[id]
else:
(await cls.send_connection_error(socket, f'Unsupported message type {repr(query_type)}'))
return
async def subscribe(cls, socket, id, query, variables):
document = parse(query)
for definition in document.definitions:
if (not isinstance(definition, OperationDefinitionNode)):
continue
if (cls.OPERATION_MAP[definition.operation] not in cls.__fields__):
(await cls.send_error(socket, id, 'This API does not support this operation'))
break
async for operation_result in cls._execute_operation(document, definition, variables, socket):
try:
(await socket.send(json.dumps({'type': 'data', 'id': id, 'payload': operation_result}, cls=GraphQLEncoder)))
except Exception as e:
logging.error(e, exc_info=True)
raise
try:
(await socket.send(json.dumps({'type': 'complete', 'id': id}, cls=GraphQLEncoder)))
except Exception as e:
logging.error(e, exc_info=True)
raise
break
async def send_error(socket, id, e):
try:
(await socket.send(json.dumps({'type': 'error', 'id': id, 'payload': {'errors': {'message': e}, 'data': None}}, cls=GraphQLEncoder)))
except Exception as e:
logging.error(e, exc_info=True)
raise
async def send_connection_error(socket, e):
try:
(await socket.send(json.dumps({'type': 'connection_error', 'payload': {'errors': {'message': e}, 'data': None}}, cls=GraphQLEncoder)))
except Exception as e:
logging.error(e, exc_info=True)
raise
async def start_ack_loop(socket, sleep=20):
try:
(await socket.send(json.dumps({'type': 'connection_ack'})))
except RuntimeError:
return
while True:
try:
(await socket.send(json.dumps({'type': 'ka'})))
except RuntimeError:
return
(await asyncio.sleep(sleep)) |
(CISAudit, '_shellexec', mock_homedirs_data)
def test_get_homedirs_pass():
homedirs = test._get_homedirs()
homedirs_list = list(homedirs)
assert isinstance(homedirs, GeneratorType)
assert (homedirs_list[0] == ('root', 0, '/root'))
assert (homedirs_list[1] == ('pytest', 1000, '/home/pytest')) |
def gen_log_test(out):
out.write('\n\n/**\n * Test object dump functions\n */\n\nint\ntest_dump_objs(void)\n{\n of_object_t *obj;\n\n FILE *out = fopen("/dev/null", "w");\n\n /* Call each obj dump function */\n')
for version in of_g.of_version_range:
for (j, cls) in enumerate(of_g.all_class_order):
if (not loxi_utils.class_in_version(cls, version)):
continue
if type_maps.class_is_virtual(cls):
continue
if (cls == 'of_bsn_virtual_port_create_request'):
out.write(('\n obj = (of_object_t *)%(cls)s_new(%(version)s);\n {\n of_object_t *vport = of_bsn_vport_q_in_q_new(%(version)s);\n %(cls)s_vport_set(obj, vport);\n of_object_delete(vport);\n }\n of_object_dump((loci_writer_f)fprintf, out, obj);\n of_object_delete(obj);\n' % dict(cls=cls, version=of_g.of_version_wire2name[version])))
else:
out.write(('\n obj = (of_object_t *)%(cls)s_new(%(version)s);\n of_object_dump((loci_writer_f)fprintf, out, obj);\n of_object_delete(obj);\n' % dict(cls=cls, version=of_g.of_version_wire2name[version])))
out.write('\n fclose(out);\n return TEST_PASS;\n}\n') |
def _is_instance_method(target: Any, method: str) -> bool:
if inspect.ismodule(target):
return False
klass = (target if inspect.isclass(target) else type(target))
for k in klass.mro():
if (method in k.__dict__):
value = k.__dict__[method]
if isinstance(value, _DescriptorProxy):
value = value.original_class_attr
if inspect.isfunction(value):
return True
return False |
class StringTransform(ABC):
def __init__(self, reversible: bool=True):
super().__init__()
self._reversible = reversible
def _apply(self, string: str) -> str:
raise NotImplementedError
def _revert(self, string: str) -> str:
raise NotImplementedError
def apply(self, string: str) -> str:
return self._apply(string)
def revert(self, string: str) -> str:
if self._reversible:
return self._revert(string)
else:
return string |
class oRGB(Labish, Space):
BASE = 'srgb'
NAME = 'orgb'
SERIALIZE = ('--orgb',)
WHITE = WHITES['2deg']['D65']
EXTENDED_RANGE = True
CHANNELS = (Channel('l', 0.0, 1.0, bound=True), Channel('cyb', (- 1.0), 1.0, bound=True, flags=FLG_MIRROR_PERCENT), Channel('crg', (- 1.0), 1.0, bound=True, flags=FLG_MIRROR_PERCENT))
CHANNEL_ALIASES = {'luma': 'l'}
def to_base(self, coords: Vector) -> Vector:
return orgb_to_srgb(coords)
def from_base(self, coords: Vector) -> Vector:
return srgb_to_orgb(coords) |
class OptionPlotoptionsWaterfallSonificationDefaultinstrumentoptionsMappingPan(Options):
def mapFunction(self):
return self._config_get(None)
def mapFunction(self, value: Any):
self._config(value, js_type=False)
def mapTo(self):
return self._config_get(None)
def mapTo(self, text: str):
self._config(text, js_type=False)
def max(self):
return self._config_get(None)
def max(self, num: float):
self._config(num, js_type=False)
def min(self):
return self._config_get(None)
def min(self, num: float):
self._config(num, js_type=False)
def within(self):
return self._config_get(None)
def within(self, value: Any):
self._config(value, js_type=False) |
def curves2ass(node, hair_name, min_pixel_width=0.5, mode='ribbon', export_motion=False):
sample_count = (2 if export_motion else 1)
template_vars = dict()
geo = node.geometry()
base_template = '\ncurves\n{\n name %(name)s\n num_points %(curve_count)i %(sample_count)s UINT\n %(number_of_points_per_curve)s\n points %(point_count)s %(sample_count)s b85POINT\n %(point_positions)s\n\n radius %(radius_count)s 1 b85FLOAT\n %(radius)s\n basis "catmull-rom"\n mode "%(mode)s"\n min_pixel_width %(min_pixel_width)s\n visibility 65535\n receive_shadows on\n self_shadows on\n matrix 1 %(sample_count)s MATRIX\n %(matrix)s\n opaque on\n declare uparamcoord uniform FLOAT\n uparamcoord %(curve_count)i %(sample_count)s b85FLOAT\n %(uparamcoord)s\n declare vparamcoord uniform FLOAT\n vparamcoord %(curve_count)i %(sample_count)s b85FLOAT\n %(vparamcoord)s\n declare curve_id uniform UINT\n curve_id %(curve_count)i %(sample_count)s UINT\n %(curve_ids)s\n}\n'
number_of_curves = geo.intrinsicValue('primitivecount')
real_point_count = geo.intrinsicValue('pointcount')
point_count = (real_point_count + (number_of_curves * 2))
radius_count = real_point_count
real_number_of_points_in_one_curve = (real_point_count / number_of_curves)
number_of_points_in_one_curve = (real_number_of_points_in_one_curve + 2)
number_of_points_per_curve = ([str(number_of_points_in_one_curve)] * number_of_curves)
curve_ids = ' '.join((str(id_) for id_ in range(number_of_curves)))
radius = None
pack = struct.pack
getting_radius_start = time.time()
radius_attribute = geo.findPointAttrib('width')
if radius_attribute:
radius = geo.pointFloatAttribValuesAsString('width')
else:
radius_i = 0
radius_str_buffer = []
radius_file_str = StringIO()
radius_file_str_write = radius_file_str.write
radius_str_buffer_append = radius_str_buffer.append
for prim in geo.prims():
prim_vertices = prim.vertices()
radius_i += real_number_of_points_in_one_curve
if (radius_i >= 1000):
radius_file_str_write(''.join(radius_str_buffer))
radius_str_buffer = []
radius_str_buffer_append = radius_str_buffer.append
radius_i = 0
for vertex in prim_vertices:
radius_str_buffer_append(pack('f', vertex.attribValue('width')))
radius_file_str_write(''.join(radius_str_buffer))
radius = radius_file_str.getvalue()
getting_radius_end = time.time()
print(('Getting Radius Info : %3.3f' % (getting_radius_end - getting_radius_start)))
encode_start = time.time()
getting_point_positions_start = time.time()
point_positions = geo.pointFloatAttribValuesAsString('P')
if export_motion:
point_prime_positions = geo.pointFloatAttribValuesAsString('pprime')
point_positions = ('%s%s' % (point_positions, point_prime_positions))
getting_point_positions_end = time.time()
print(('Getting Point Position : %3.3f' % (getting_point_positions_end - getting_point_positions_start)))
zip_start = time.time()
point_positions = ''.join(map((lambda x: ('%s%s%s' % (x[:12], x, x[(- 12):]))), map(''.join, zip(*([iter(point_positions)] * ((real_number_of_points_in_one_curve * 4) * 3))))))
zip_end = time.time()
print(('Zipping Point Position : %3.3f' % (zip_end - zip_start)))
encoded_point_positions = base85.arnold_b85_encode(point_positions)
encode_end = time.time()
print(('Encoding Point Position : %3.3f' % (encode_end - encode_start)))
split_start = time.time()
splitted_point_positions = split_data(encoded_point_positions, 500)
split_end = time.time()
print(('Splitting Point Positions : %3.3f' % (split_end - split_start)))
encode_start = time.time()
encoded_radius = base85.arnold_b85_encode(radius)
encode_end = time.time()
print(('Radius encode : %3.3f' % (encode_end - encode_start)))
split_start = time.time()
splitted_radius = split_data(encoded_radius, 500)
split_end = time.time()
print(('Splitting Radius : %3.3f' % (split_end - split_start)))
getting_uv_start = time.time()
u = geo.primFloatAttribValuesAsString('uv_u')
v = geo.primFloatAttribValuesAsString('uv_v')
getting_uv_end = time.time()
print(('Getting uv : %3.3f' % (getting_uv_end - getting_uv_start)))
encode_start = time.time()
encoded_u = base85.arnold_b85_encode(u)
encode_end = time.time()
print(('Encoding UParamcoord : %3.3f' % (encode_end - encode_start)))
split_start = time.time()
splitted_u = split_data(encoded_u, 500)
if export_motion:
splitted_u = ('%(data)s%(data)s' % {'data': splitted_u})
split_end = time.time()
print(('Splitting UParamCoord : %3.3f' % (split_end - split_start)))
encode_start = time.time()
encoded_v = base85.arnold_b85_encode(v)
encode_end = time.time()
print(('Encoding VParamcoord : %3.3f' % (encode_end - encode_start)))
split_start = time.time()
splitted_v = split_data(encoded_v, 500)
if export_motion:
splitted_v = ('%(data)s%(data)s' % {'data': splitted_v})
split_end = time.time()
print(('Splitting VParamCoord : %3.3f' % (split_end - split_start)))
print(('len(encoded_point_positions) : %s' % len(encoded_point_positions)))
print(('(p + 2 * c) * 5 * 3 : %s' % ((point_count * 5) * 3)))
print(('len(encoded_radius) : %s' % len(encoded_radius)))
print(('len(uv) : %s' % len(u)))
print(('len(encoded_u) : %s' % len(encoded_u)))
print(('len(encoded_v) : %s' % len(encoded_v)))
matrix = '1 0 0 0\n 0 1 0 0\n 0 0 1 0\n 0 0 0 1\n'
if export_motion:
number_of_points_per_curve.extend(number_of_points_per_curve)
matrix += matrix
template_vars.update({'name': node.path().replace('/', '_'), 'curve_count': number_of_curves, 'real_point_count': real_point_count, 'number_of_points_per_curve': ' '.join(number_of_points_per_curve), 'point_count': point_count, 'point_positions': splitted_point_positions, 'radius': splitted_radius, 'radius_count': radius_count, 'curve_ids': curve_ids, 'uparamcoord': splitted_u, 'vparamcoord': splitted_v, 'min_pixel_width': min_pixel_width, 'mode': mode, 'sample_count': sample_count, 'matrix': matrix})
rendered_curve_data = (base_template % template_vars)
del geo
return rendered_curve_data |
def set_buffer_sizes_bw_delay_prod(topology, buffer_unit='bytes', packet_size=1500):
try:
assert all((('capacity' in topology.adj[u][v]) for (u, v) in topology.edges()))
assert all((('delay' in topology.adj[u][v]) for (u, v) in topology.edges()))
capacity_unit = topology.graph['capacity_unit']
delay_unit = topology.graph['delay_unit']
except (AssertionError, KeyError):
raise ValueError('All links must have a capacity and delay attribute')
topology.graph['buffer_unit'] = buffer_unit
edges = [(u, v) for (u, v) in topology.edges() if (u != v)]
route_presence = dict(zip(edges, [[] for _ in range(len(edges))]))
route = dict(nx.all_pairs_dijkstra_path(topology, weight='weight'))
e2e_delay = {}
for orig in route:
e2e_delay[orig] = {}
for dest in route[orig]:
path = route[orig][dest]
if (len(path) <= 1):
continue
path_delay = 0
for (u, v) in zip(path[:(- 1)], path[1:]):
if ('delay' in topology.adj[u][v]):
if ((u, v) in route_presence):
route_presence[(u, v)].append((orig, dest))
else:
route_presence[(v, u)].append((orig, dest))
path_delay += topology.adj[u][v]['delay']
else:
raise ValueError('No link delays available')
e2e_delay[orig][dest] = path_delay
mean_rtt_dict = {}
for ((u, v), route) in route_presence.items():
if route:
try:
mean_rtt = mean([(e2e_delay[o][d] + e2e_delay[d][o]) for (o, d) in route])
except KeyError:
raise ValueError('Cannot assign buffer sizes because some paths do not have corresponding return path')
elif ((v, u) in edges):
mean_rtt = (topology.adj[u][v]['delay'] + topology.adj[v][u]['delay'])
else:
try:
mean_rtt = (topology.adj[u][v]['delay'] + e2e_delay[v][u])
except KeyError:
raise ValueError('Cannot assign buffer sizes because some paths do not have corresponding return path')
mean_rtt_dict[(u, v)] = mean_rtt
norm_factor = ((capacity_units[capacity_unit] * time_units[delay_unit]) / 8000.0)
if (buffer_unit == 'packets'):
norm_factor /= packet_size
for (u, v) in edges:
capacity = topology.adj[u][v]['capacity']
buffer_size = int(((mean_rtt_dict[(u, v)] * capacity) * norm_factor))
topology.adj[u][v]['buffer'] = buffer_size
return |
class Observe(object):
def __init__(self, core: Core):
self.core = core
D = core.events.server_command
D[wire.ObserverEnter] += self._observer_enter
D[wire.ObserverLeave] += self._observer_leave
D[wire.ObserveRequest] += self._observe_request
def _observer_enter(self, ev: wire.ObserverEnter) -> wire.ObserverEnter:
core = self.core
core.events.observer_enter.emit((ev.observer, ev.observee))
return ev
def _observer_leave(self, ev: wire.ObserverLeave) -> wire.ObserverLeave:
core = self.core
core.events.observer_leave.emit((ev.observer, ev.observee))
return ev
def _observe_request(self, ev: wire.ObserveRequest) -> wire.ObserveRequest:
core = self.core
core.events.observe_request.emit(ev.pid)
return ev
def observe(self, pid: int) -> None:
core = self.core
core.server.write(wire.Observe(pid=pid))
def grant(self, pid: int, grant: bool) -> None:
core = self.core
core.server.write(wire.GrantObserve(pid=pid, grant=grant))
def kick(self, pid: int) -> None:
core = self.core
core.server.write(wire.KickObserver(pid=pid)) |
def main(page: ft.Page):
chat = ft.Column()
new_message = ft.TextField()
def on_message(message: Message):
chat.controls.append(ft.Text(f'{message.user}: {message.text}'))
page.update()
page.pubsub.subscribe(on_message)
def send_click(e):
page.pubsub.send_all(Message(user=page.session_id, text=new_message.value))
new_message.value = ''
page.update()
page.add(chat, ft.Row([new_message, ft.ElevatedButton('Send', on_click=send_click)])) |
class TestPRElections(ApiBaseTest):
def setUp(self):
super().setUp()
self.candidate = factories.CandidateDetailFactory()
self.candidates = [factories.CandidateHistoryFactory(candidate_id=self.candidate.candidate_id, state='PR', district='00', two_year_period=2018, election_years=[2020], cycles=[2018, 2020], office='H', candidate_election_year=2020), factories.CandidateHistoryFactory(candidate_id=self.candidate.candidate_id, state='PR', district='00', two_year_period=2020, election_years=[2020], cycles=[2018, 2020], office='H', candidate_election_year=2020)]
self.committees = [factories.CommitteeHistoryFactory(cycle=2020, designation='P'), factories.CommitteeHistoryFactory(cycle=2020, designation='A')]
[factories.CandidateElectionFactory(candidate_id=self.candidate.candidate_id, cand_election_year=year) for year in [2016, 2020]]
[factories.CommitteeDetailFactory(committee_id=each.committee_id) for each in self.committees]
db.session.flush()
self.candidate_committee_links = [factories.CandidateCommitteeLinkFactory(candidate_id=self.candidate.candidate_id, committee_id=self.committees[0].committee_id, committee_designation='P', fec_election_year=2018, election_yr_to_be_included=2020), factories.CandidateCommitteeLinkFactory(candidate_id=self.candidate.candidate_id, committee_id=self.committees[1].committee_id, committee_designation='A', fec_election_year=2018, election_yr_to_be_included=2020), factories.CandidateCommitteeLinkFactory(candidate_id=self.candidate.candidate_id, committee_id=self.committees[0].committee_id, committee_designation='P', fec_election_year=2020, election_yr_to_be_included=2020), factories.CandidateCommitteeLinkFactory(candidate_id=self.candidate.candidate_id, committee_id=self.committees[1].committee_id, committee_designation='A', fec_election_year=2020, election_yr_to_be_included=2020)]
self.totals = [factories.TotalsHouseSenateFactory(receipts=50, disbursements=75, committee_id=self.committees[1].committee_id, coverage_end_date=datetime.datetime(2018, 12, 31), last_cash_on_hand_end_period=100, cycle=2018), factories.TotalsHouseSenateFactory(receipts=50, disbursements=75, committee_id=self.committees[1].committee_id, coverage_end_date=datetime.datetime(2020, 12, 31), last_cash_on_hand_end_period=300, cycle=2020)]
db.session.flush()
def test_elections_2_year(self):
results = self._results(api.url_for(ElectionView, office='house', district='00', cycle=2020, state='PR', election_full=False))
self.assertEqual(len(results), 1)
totals = [each for each in self.totals if (each.cycle == 2020)]
expected = {'candidate_id': self.candidate.candidate_id, 'candidate_name': self.candidate.name, 'incumbent_challenge_full': self.candidate.incumbent_challenge_full, 'party_full': self.candidate.party_full, 'total_receipts': sum((each.receipts for each in totals)), 'total_disbursements': sum((each.disbursements for each in totals)), 'cash_on_hand_end_period': sum((each.last_cash_on_hand_end_period for each in totals))}
assert_dicts_subset(results[0], expected)
assert (set((each.committee_id for each in self.committees)) == set(results[0]['committee_ids']))
def test_elections_full(self):
results = self._results(api.url_for(ElectionView, office='house', district='00', cycle=2020, state='PR', election_full='true'))
totals = self.totals
cash_on_hand_totals = self.totals[:2]
expected = {'candidate_id': self.candidate.candidate_id, 'candidate_name': self.candidate.name, 'incumbent_challenge_full': self.candidate.incumbent_challenge_full, 'party_full': self.candidate.party_full, 'total_receipts': sum((each.receipts for each in totals)), 'total_disbursements': sum((each.disbursements for each in totals)), 'cash_on_hand_end_period': max((each.last_cash_on_hand_end_period for each in cash_on_hand_totals))}
assert (len(results) == 1)
assert_dicts_subset(results[0], expected) |
class OptionPlotoptionsVariablepiePointEvents(Options):
def click(self):
return self._config_get(None)
def click(self, value: Any):
self._config(value, js_type=False)
def drag(self):
return self._config_get(None)
def drag(self, value: Any):
self._config(value, js_type=False)
def dragStart(self):
return self._config_get(None)
def dragStart(self, value: Any):
self._config(value, js_type=False)
def drop(self):
return self._config_get(None)
def drop(self, value: Any):
self._config(value, js_type=False)
def legendItemClick(self):
return self._config_get(None)
def legendItemClick(self, value: Any):
self._config(value, js_type=False)
def mouseOut(self):
return self._config_get(None)
def mouseOut(self, value: Any):
self._config(value, js_type=False)
def mouseOver(self):
return self._config_get(None)
def mouseOver(self, value: Any):
self._config(value, js_type=False)
def remove(self):
return self._config_get(None)
def remove(self, value: Any):
self._config(value, js_type=False)
def select(self):
return self._config_get(None)
def select(self, value: Any):
self._config(value, js_type=False)
def unselect(self):
return self._config_get(None)
def unselect(self, value: Any):
self._config(value, js_type=False)
def update(self):
return self._config_get(None)
def update(self, value: Any):
self._config(value, js_type=False) |
class Char64Test(unittest.TestCase):
font = FakeFont([])
converter = otConverters.Char64('char64', 0, None, None)
def test_read(self):
reader = OTTableReader((b'Hello\x00junk after zero byte' + (100 * b'\x00')))
self.assertEqual(self.converter.read(reader, self.font, {}), 'Hello')
self.assertEqual(reader.pos, 64)
def test_read_replace_not_ascii(self):
reader = OTTableReader((b'Hello \xe4 world' + (100 * b'\x00')))
with CapturingLogHandler(otConverters.log, 'WARNING') as captor:
data = self.converter.read(reader, self.font, {})
self.assertEqual(data, 'Hello world')
self.assertEqual(reader.pos, 64)
self.assertIn('replaced non-ASCII characters in "Hello world"', [r.msg for r in captor.records])
def test_write(self):
writer = OTTableWriter()
self.converter.write(writer, self.font, {}, 'Hello world')
self.assertEqual(writer.getData(), (b'Hello world' + (53 * b'\x00')))
def test_write_replace_not_ascii(self):
writer = OTTableWriter()
with CapturingLogHandler(otConverters.log, 'WARNING') as captor:
self.converter.write(writer, self.font, {}, 'Hello ')
self.assertEqual(writer.getData(), (b'Hello ?' + (57 * b'\x00')))
self.assertIn('replacing non-ASCII characters in "Hello "', [r.msg for r in captor.records])
def test_write_truncated(self):
writer = OTTableWriter()
with CapturingLogHandler(otConverters.log, 'WARNING') as captor:
self.converter.write(writer, self.font, {}, ('A' * 80))
self.assertEqual(writer.getData(), (b'A' * 64))
self.assertIn((('truncating overlong "' + ('A' * 80)) + '" to 64 bytes'), [r.msg for r in captor.records])
def test_xmlRead(self):
value = self.converter.xmlRead({'value': 'Foo'}, [], self.font)
self.assertEqual(value, 'Foo')
def test_xmlWrite(self):
writer = makeXMLWriter()
self.converter.xmlWrite(writer, self.font, 'Hello world', 'Element', [('attr', 'v')])
xml = writer.file.getvalue().decode('utf-8').rstrip()
self.assertEqual(xml, '<Element attr="v" value="Hello world"/>') |
def test_describe_container_images_report():
runner = CliRunner()
pipeline_file_path = (((Path(__file__).parent / 'resources') / 'pipelines') / 'kfp_3_node_custom.pipeline')
result = runner.invoke(pipeline, ['describe', str(pipeline_file_path)])
assert (result.exit_code == 0)
assert ('Container image dependencies: None specified' in result.output)
pipeline_file_path = (((Path(__file__).parent / 'resources') / 'pipelines') / 'pipeline_with_notebooks.pipeline')
result = runner.invoke(pipeline, ['describe', str(pipeline_file_path)])
assert (result.exit_code == 0)
assert ('Container image dependencies:\n' in result.output)
assert ('- tensorflow/tensorflow:2.8.0' in result.output), result.output
pipeline_file_path = (((Path(__file__).parent / 'resources') / 'pipelines') / 'pipeline_with_scripts.pipeline')
result = runner.invoke(pipeline, ['describe', str(pipeline_file_path)])
assert (result.exit_code == 0)
assert ('Container image dependencies:\n' in result.output), result.output
assert ('- tensorflow/tensorflow:2.8.0-gpu' in result.output), result.output
assert ('- tensorflow/tensorflow:2.8.0' in result.output), result.output
pipeline_file_path = (((Path(__file__).parent / 'resources') / 'pipelines') / 'pipeline_with_notebooks_and_scripts.pipeline')
result = runner.invoke(pipeline, ['describe', str(pipeline_file_path)])
assert (result.exit_code == 0)
assert ('Container image dependencies:\n' in result.output)
assert ('- tensorflow/tensorflow:2.8.0-gpu' in result.output), result.output
assert ('- tensorflow/tensorflow:2.8.0' in result.output), result.output
assert ('- amancevice/pandas:1.4.1' in result.output), result.output |
def search_record_by_oid(oid, file_obj, text_parser):
lo = mid = 0
prev_mid = (- 1)
file_obj.seek(0, 2)
hi = sz = file_obj.tell()
while (lo < hi):
mid = ((lo + hi) // 2)
file_obj.seek(mid)
mid = find_eol(file_obj, mid)
file_obj.seek(mid)
if (mid == prev_mid):
break
if (mid >= sz):
return sz
(line, _, skipped_offset) = get_record(file_obj)
if (not line):
return hi
(midval, _) = text_parser.evaluate(line, oidOnly=True)
if (midval < oid):
lo = ((mid + skipped_offset) + len(line))
elif (midval > oid):
hi = mid
else:
return mid
prev_mid = mid
if (lo == mid):
return lo
else:
return hi |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.