input stringlengths 2.65k 237k | output stringclasses 1
value |
|---|---|
from hashlib import md5, blake2b, sha256, sha1
################################################################################################################
#
# All Music
#
################################################################################################################
class allmusicUtils:
def __init__(self):
self.baseURL = "https://www.allmusic.com/search/"
self.disc = None
def setDiscogs(self, disc):
self.disc = disc
def getBaseURL(self):
return baseURL
def getArtistID(self, href, debug=False):
if href is None:
if debug:
print("Could not get artist disc ID from None!")
return None
ival = "/artist"
pos = href.find(ival)
if pos == -1:
if debug:
print("Could not find discID in {0}".format(suburl))
return None
try:
data = href[pos+len(ival)+1:]
pos = data.rfind("-")
discID = data[(pos)+3:]
except:
print("Could not extract discID from {0}".format(href))
return None
try:
int(discID)
except:
if debug:
print("DiscID {0} is not an integer".format(discID))
return None
if debug:
print("Found ID {0} from {1}".format(discID, href))
return discID
def getArtistName(self, artist, debug=False):
if artist is None:
return "None"
return artist
###############################################################################
#
# Album Functions
#
###############################################################################
def getAlbumID(self, href):
code = None
if href is not None:
try:
code = href.split('/')[-1]
code = str(int(code))
except:
return None
else:
return None
return code
def getArtistModVal(self, artistID):
if self.disc is not None:
modValue = self.getDiscIDHashMod(discID=artistID, modval=self.disc.getMaxModVal())
return modValue
else:
raise ValueError("Must set discogs()!")
###############################################################################
#
# Basic Artist IO Functions
#
###############################################################################
def getArtistSavename(self, discID):
modValue = self.discogsUtils.getDiscIDHashMod(discID=discID, modval=self.disc.getMaxModVal())
if modValue is not None:
outdir = mkSubDir(artistDir, str(modValue))
savename = setFile(outdir, discID+".p")
return savename
return None
###############################################################################
#
# Discog Hash Functions
#
###############################################################################
def getHashVal(self, artist, href):
m = md5()
if artist: m.update(artist)
if href: m.update(href)
retval = m.hexdigest()
return retval
def getHashMod(self, hashval, modval):
ival = int(hashval, 16)
return ival % modval
def getDiscIDHashMod(self, discID, modval):
if discID == None:
return None
try:
ival = int(discID)
except:
return None
return ival % modval
def getArtistHashVal(self, artist, href):
artist = makeStrFromUnicode(artist)
hashval = getHashVal(artist, href)
return hashval
def getFileHashVal(self, ifile):
fname = getBaseFilename(ifile)
hname = makeStrFromUnicode(fname)
hashval = getHashVal(hname, None)
return hashval
def getArtistHashMod(self, artist, href, modval):
hashval = getArtistHashVal(artist,href)
return getHashMod(hashval, modval)
def getFileHashMod(self, ifile, modval):
hashval = getFileHashVal(ifile)
return getHashMod(hashval, modval)
################################################################################################################
#
# Last FM
#
################################################################################################################
class lastfmUtils:
def __init__(self):
self.baseURL = "https://www.last.fm/search/"
self.disc = None
def setDiscogs(self, disc):
self.disc = disc
def getBaseURL(self):
return baseURL
def getArtistID(self, name, debug=False):
if name is None:
return None
m = md5()
for val in name.split(" "):
m.update(val.encode('utf-8'))
hashval = m.hexdigest()
discID = str(int(hashval, 16) % int(1e11))
return discID
def getArtistName(self, artist, debug=False):
if artist is None:
return "None"
return artist
###############################################################################
#
# Album Functions
#
###############################################################################
def getAlbumID(self, name):
return self.getArtistID(name)
def getArtistModVal(self, artistID):
if self.disc is not None:
modValue = self.getDiscIDHashMod(discID=artistID, modval=self.disc.getMaxModVal())
return modValue
else:
raise ValueError("Must set discogs()!")
###############################################################################
#
# Basic Artist IO Functions
#
###############################################################################
def getArtistSavename(self, discID):
modValue = self.discogsUtils.getDiscIDHashMod(discID=discID, modval=self.disc.getMaxModVal())
if modValue is not None:
outdir = mkSubDir(artistDir, str(modValue))
savename = setFile(outdir, discID+".p")
return savename
return None
###############################################################################
#
# Discog Hash Functions
#
###############################################################################
def getHashVal(self, artist, href):
m = md5()
if artist: m.update(artist)
if href: m.update(href)
retval = m.hexdigest()
return retval
def getHashMod(self, hashval, modval):
ival = int(hashval, 16)
return ival % modval
def getDiscIDHashMod(self, discID, modval):
if discID == None:
return None
try:
ival = int(discID)
except:
return None
return ival % modval
def getArtistHashVal(self, artist, href):
artist = makeStrFromUnicode(artist)
hashval = getHashVal(artist, href)
return hashval
def getFileHashVal(self, ifile):
fname = getBaseFilename(ifile)
hname = makeStrFromUnicode(fname)
hashval = getHashVal(hname, None)
return hashval
def getArtistHashMod(self, artist, href, modval):
hashval = getArtistHashVal(artist,href)
return getHashMod(hashval, modval)
def getFileHashMod(self, ifile, modval):
hashval = getFileHashVal(ifile)
return getHashMod(hashval, modval)
################################################################################################################
#
# MusicBrainz
#
################################################################################################################
class musicbrainzUtils:
def __init__(self):
self.baseURL = "https://musicbrainz.org/search?"
self.disc = None
def setDiscogs(self, disc):
self.disc = disc
def getBaseURL(self):
return baseURL
def getArtistID(self, href, debug=False):
if href is None:
if debug:
print("Could not get artist disc ID from None!")
return None
uuid = href.split('/')[-1]
m = md5()
for val in uuid.split("-"):
m.update(val.encode('utf-8'))
hashval = m.hexdigest()
discID = str(int(hashval, 16))
try:
int(discID)
except:
if debug:
print("DiscID {0} is not an integer".format(discID))
return None
if debug:
print("Found ID {0} from {1}".format(discID, href))
return discID
def getArtistName(self, artist, debug=False):
if artist is None:
return "None"
return artist
###############################################################################
#
# Album Functions
#
###############################################################################
def getAlbumID(self, href):
code = None
if href is not None:
try:
uuid = href.split('/')[-1]
m = md5()
for val in uuid.split("-"):
m.update(val.encode('utf-8'))
hashval = m.hexdigest()
code = str(int(hashval, 16))
except:
return None
else:
return None
return code
def getArtistModVal(self, artistID):
if self.disc is not None:
modValue = self.getDiscIDHashMod(discID=artistID, modval=self.disc.getMaxModVal())
return modValue
else:
raise ValueError("Must set discogs()!")
###############################################################################
#
# Basic Artist IO Functions
#
###############################################################################
def getArtistSavename(self, discID):
modValue = self.discogsUtils.getDiscIDHashMod(discID=discID, modval=self.disc.getMaxModVal())
if modValue is not None:
outdir = mkSubDir(artistDir, str(modValue))
savename = setFile(outdir, discID+".p")
return savename
return None
###############################################################################
#
# Discog Hash Functions
#
###############################################################################
def getHashVal(self, artist, href):
m = md5()
if artist: m.update(artist)
if href: m.update(href)
retval = m.hexdigest()
return retval
def getHashMod(self, hashval, modval):
ival = int(hashval, 16)
return ival % modval
def getDiscIDHashMod(self, discID, modval):
if discID == None:
return None
try:
ival = int(discID)
except:
return None
return ival % modval
def getArtistHashVal(self, artist, href):
artist = makeStrFromUnicode(artist)
hashval = getHashVal(artist, href)
return hashval
def getFileHashVal(self, ifile):
fname = getBaseFilename(ifile)
hname = makeStrFromUnicode(fname)
hashval = getHashVal(hname, None)
return hashval
def getArtistHashMod(self, artist, href, modval):
hashval = getArtistHashVal(artist,href)
return getHashMod(hashval, modval)
def getFileHashMod(self, ifile, modval):
hashval = getFileHashVal(ifile)
return getHashMod(hashval, modval)
################################################################################################################
#
# Discogs
#
################################################################################################################
class discogsUtils:
def __init__(self):
self.baseURL = "https://www.discogs.com/search/"
self.disc = None
def setDiscogs(self, disc):
self.disc = disc
def getBaseURL(self):
return baseURL
###############################################################################
#
# Artist Functions
#
###############################################################################
def getArtistName(self, artist, debug=False):
if artist is None:
return "None"
name = artist
if artist.endswith(")"):
name = None
for x in [-3,-4,-5]:
if artist is not None:
continue
if abs(x) > len(artist):
continue
if artist[x] == "(":
try:
val = int(artist[(x+1):-1])
name = artist[:x].strip()
except:
continue
if name is None:
name = artist
return name
def getArtistID(self, href, debug=False):
if href is None:
if debug:
print("Could not get artist disc ID from None!")
return None
ival = "/artist"
pos = href.find(ival)
if pos == -1:
if debug:
print("Could not find discID in {0}".format(suburl))
return None
try:
data = href[pos+len(ival)+1:]
pos = data.find("-")
discID = data[:pos]
except:
print("Could not extract discID from {0}".format(href))
return None
try:
int(discID)
except:
if debug:
print("DiscID {0} is not an integer".format(discID))
return None
if debug:
print("Found ID {0} from {1}".format(discID, href))
return discID
###############################################################################
#
# Album Functions
#
###############################################################################
def getAlbumID(self, href):
code = None
if href is not None:
try:
code = href.split('/')[-1]
code = str(int(code))
except:
return None
else:
return None
return code
def getArtistModVal(self, artistID):
if self.disc is not None:
modValue = self.getDiscIDHashMod(discID=artistID, modval=self.disc.getMaxModVal())
return modValue
else:
raise ValueError("Must set discogs()!")
###############################################################################
#
# Basic Artist IO Functions
#
###############################################################################
def getArtistSavename(self, discID):
modValue = self.discogsUtils.getDiscIDHashMod(discID=discID, modval=self.disc.getMaxModVal())
if modValue is not None:
outdir = mkSubDir(artistDir, str(modValue))
savename = setFile(outdir, discID+".p")
return savename
return None
###############################################################################
#
# Discog Hash Functions
#
###############################################################################
def getHashVal(self, artist, href):
m = md5()
if artist: m.update(artist)
if href: m.update(href)
retval = m.hexdigest()
return retval
def getHashMod(self, hashval, modval):
ival = int(hashval, 16)
return ival % modval
def getDiscIDHashMod(self, discID, modval):
if discID == None:
return None
try:
ival = int(discID)
except:
return None
return ival % modval
def getArtistHashVal(self, artist, href):
artist = makeStrFromUnicode(artist)
hashval = getHashVal(artist, href)
return hashval
def getFileHashVal(self, ifile):
fname = getBaseFilename(ifile)
hname = makeStrFromUnicode(fname)
hashval = getHashVal(hname, None)
return hashval
def getArtistHashMod(self, artist, href, modval):
hashval = getArtistHashVal(artist,href)
return getHashMod(hashval, modval)
def getFileHashMod(self, ifile, modval):
hashval = getFileHashVal(ifile)
return | |
# for (\nabla\mu) \nu + \mu (\nabla\nu)
vmata[i] = vmata[i] + vmata[i].swapaxes(-1,-2).conj()
vmatb[i] = vmatb[i] + vmatb[i].swapaxes(-1,-2).conj()
elif xctype == 'MGGA':
raise NotImplementedError('meta-GGA')
if dma.ndim == vmata[0].ndim: # One set of DMs in the input
vmata = vmata[0]
vmatb = vmatb[0]
return lib.asarray((vmata,vmatb))
def _fxc_mat(cell, ao, wv, non0tab, xctype, ao_loc):
shls_slice = (0, cell.nbas)
if xctype == 'LDA' or xctype == 'HF':
#:aow = numpy.einsum('pi,p->pi', ao, wv)
aow = _scale_ao(ao, wv)
mat = _dot_ao_ao(cell, ao, aow, non0tab, shls_slice, ao_loc)
else:
#:aow = numpy.einsum('npi,np->pi', ao, wv)
aow = _scale_ao(ao, wv)
mat = _dot_ao_ao(cell, ao[0], aow, non0tab, shls_slice, ao_loc)
return mat
def cache_xc_kernel(ni, cell, grids, xc_code, mo_coeff, mo_occ, spin=0,
kpts=None, max_memory=2000):
'''Compute the 0th order density, Vxc and fxc. They can be used in TDDFT,
DFT hessian module etc.
'''
if kpts is None:
kpts = numpy.zeros((1,3))
xctype = ni._xc_type(xc_code)
ao_deriv = 0
if xctype == 'GGA':
ao_deriv = 1
elif xctype == 'MGGA':
raise NotImplementedError('meta-GGA')
nao = cell.nao_nr()
if spin == 0:
rho = []
for ao_k1, ao_k2, mask, weight, coords \
in ni.block_loop(cell, grids, nao, ao_deriv, kpts, None, max_memory):
rho.append(ni.eval_rho2(cell, ao_k1, mo_coeff, mo_occ, mask, xctype))
rho = numpy.hstack(rho)
else:
rhoa = []
rhob = []
for ao_k1, ao_k2, mask, weight, coords \
in ni.block_loop(cell, grids, nao, ao_deriv, kpts, None, max_memory):
rhoa.append(ni.eval_rho2(cell, ao_k1, mo_coeff[0], mo_occ[0], mask, xctype))
rhob.append(ni.eval_rho2(cell, ao_k1, mo_coeff[1], mo_occ[1], mask, xctype))
rho = (numpy.hstack(rhoa), numpy.hstack(rhob))
vxc, fxc = ni.eval_xc(xc_code, rho, spin, 0, 2, 0)[1:3]
return rho, vxc, fxc
def get_rho(ni, cell, dm, grids, kpts=numpy.zeros((1,3)), max_memory=2000):
'''Density in real space
'''
make_rho, nset, nao = ni._gen_rho_evaluator(cell, dm)
assert(nset == 1)
rho = numpy.empty(grids.weights.size)
p1 = 0
for ao_k1, ao_k2, mask, weight, coords \
in ni.block_loop(cell, grids, nao, 0, kpts, None, max_memory):
p0, p1 = p1, p1 + weight.size
rho[p0:p1] = make_rho(0, ao_k1, mask, 'LDA')
return rho
class NumInt(numint.NumInt):
'''Generalization of pyscf's NumInt class for a single k-point shift and
periodic images.
'''
def eval_ao(self, cell, coords, kpt=numpy.zeros(3), deriv=0, relativity=0,
shls_slice=None, non0tab=None, out=None, verbose=None):
return eval_ao(cell, coords, kpt, deriv, relativity, shls_slice,
non0tab, out, verbose)
@lib.with_doc(make_mask.__doc__)
def make_mask(self, cell, coords, relativity=0, shls_slice=None,
verbose=None):
return make_mask(cell, coords, relativity, shls_slice, verbose)
@lib.with_doc(eval_rho.__doc__)
def eval_rho(self, cell, ao, dm, non0tab=None, xctype='LDA', hermi=0, verbose=None):
return eval_rho(cell, ao, dm, non0tab, xctype, hermi, verbose)
def eval_rho2(self, cell, ao, mo_coeff, mo_occ, non0tab=None, xctype='LDA',
verbose=None):
return eval_rho2(cell, ao, mo_coeff, mo_occ, non0tab, xctype, verbose)
def nr_vxc(self, cell, grids, xc_code, dms, spin=0, relativity=0, hermi=0,
kpt=None, kpts_band=None, max_memory=2000, verbose=None):
'''Evaluate RKS/UKS XC functional and potential matrix.
See :func:`nr_rks` and :func:`nr_uks` for more details.
'''
if spin == 0:
return self.nr_rks(cell, grids, xc_code, dms, hermi,
kpt, kpts_band, max_memory, verbose)
else:
return self.nr_uks(cell, grids, xc_code, dms, hermi,
kpt, kpts_band, max_memory, verbose)
@lib.with_doc(nr_rks.__doc__)
def nr_rks(self, cell, grids, xc_code, dms, hermi=0,
kpt=numpy.zeros(3), kpts_band=None, max_memory=2000, verbose=None):
if kpts_band is not None:
# To compute Vxc on kpts_band, convert the NumInt object to KNumInt object.
ni = KNumInt()
ni.__dict__.update(self.__dict__)
nao = dms.shape[-1]
return ni.nr_rks(cell, grids, xc_code, dms.reshape(-1,1,nao,nao),
hermi, kpt.reshape(1,3), kpts_band, max_memory,
verbose)
return nr_rks(self, cell, grids, xc_code, dms,
0, 0, hermi, kpt, kpts_band, max_memory, verbose)
@lib.with_doc(nr_uks.__doc__)
def nr_uks(self, cell, grids, xc_code, dms, hermi=0,
kpt=numpy.zeros(3), kpts_band=None, max_memory=2000, verbose=None):
if kpts_band is not None:
# To compute Vxc on kpts_band, convert the NumInt object to KNumInt object.
ni = KNumInt()
ni.__dict__.update(self.__dict__)
nao = dms[0].shape[-1]
return ni.nr_uks(cell, grids, xc_code, dms.reshape(-1,1,nao,nao),
hermi, kpt.reshape(1,3), kpts_band, max_memory,
verbose)
return nr_uks(self, cell, grids, xc_code, dms,
1, 0, hermi, kpt, kpts_band, max_memory, verbose)
def eval_mat(self, cell, ao, weight, rho, vxc,
non0tab=None, xctype='LDA', spin=0, verbose=None):
# Guess whether ao is evaluated for kpts_band. When xctype is LDA, ao on grids
# should be a 2D array. For other xc functional, ao should be a 3D array.
if ao.ndim == 2 or (xctype != 'LDA' and ao.ndim == 3):
mat = eval_mat(cell, ao, weight, rho, vxc, non0tab, xctype, spin, verbose)
else:
nkpts = len(ao)
nao = ao[0].shape[-1]
mat = numpy.empty((nkpts,nao,nao), dtype=numpy.complex128)
for k in range(nkpts):
mat[k] = eval_mat(cell, ao[k], weight, rho, vxc,
non0tab, xctype, spin, verbose)
return mat
def _fxc_mat(self, cell, ao, wv, non0tab, xctype, ao_loc):
return _fxc_mat(cell, ao, wv, non0tab, xctype, ao_loc)
def block_loop(self, cell, grids, nao, deriv=0, kpt=numpy.zeros(3),
kpts_band=None, max_memory=2000, non0tab=None, blksize=None):
'''Define this macro to loop over grids by blocks.
'''
# For UniformGrids, grids.coords does not indicate whehter grids are initialized
if grids.non0tab is None:
grids.build(with_non0tab=True)
grids_coords = grids.coords
grids_weights = grids.weights
ngrids = grids_coords.shape[0]
comp = (deriv+1)*(deriv+2)*(deriv+3)//6
# NOTE to index grids.non0tab, the blksize needs to be the integer multiplier of BLKSIZE
if blksize is None:
blksize = int(max_memory*1e6/(comp*2*nao*16*BLKSIZE))*BLKSIZE
blksize = max(BLKSIZE, min(blksize, ngrids, BLKSIZE*1200))
if non0tab is None:
non0tab = grids.non0tab
if non0tab is None:
non0tab = numpy.empty(((ngrids+BLKSIZE-1)//BLKSIZE,cell.nbas),
dtype=numpy.uint8)
non0tab[:] = 0xff
kpt = numpy.reshape(kpt, 3)
if kpts_band is None:
kpt1 = kpt2 = kpt
else:
kpt1 = kpts_band
kpt2 = kpt
for ip0 in range(0, ngrids, blksize):
ip1 = min(ngrids, ip0+blksize)
coords = grids_coords[ip0:ip1]
weight = grids_weights[ip0:ip1]
non0 = non0tab[ip0//BLKSIZE:]
ao_k2 = self.eval_ao(cell, coords, kpt2, deriv=deriv, non0tab=non0)
if abs(kpt1-kpt2).sum() < 1e-9:
ao_k1 = ao_k2
else:
ao_k1 = self.eval_ao(cell, coords, kpt1, deriv=deriv)
yield ao_k1, ao_k2, non0, weight, coords
ao_k1 = ao_k2 = None
def _gen_rho_evaluator(self, cell, dms, hermi=0):
return numint.NumInt._gen_rho_evaluator(self, cell, dms, hermi)
nr_rks_fxc = nr_rks_fxc
nr_uks_fxc = nr_uks_fxc
cache_xc_kernel = cache_xc_kernel
get_rho = get_rho
def rsh_and_hybrid_coeff(self, xc_code, spin=0):
omega, alpha, hyb = numint.NumInt.rsh_and_hybrid_coeff(self, xc_code, spin)
if abs(omega) > 1e-10:
raise NotImplementedError
return omega, alpha, hyb
_NumInt = NumInt
class KNumInt(numint.NumInt):
'''Generalization of pyscf's NumInt class for k-point sampling and
periodic images.
'''
def __init__(self, kpts=numpy.zeros((1,3))):
numint.NumInt.__init__(self)
self.kpts = numpy.reshape(kpts, (-1,3))
def eval_ao(self, cell, coords, kpts=numpy.zeros((1,3)), deriv=0, relativity=0,
shls_slice=None, non0tab=None, out=None, verbose=None, **kwargs):
return eval_ao_kpts(cell, coords, kpts, deriv,
relativity, shls_slice, non0tab, out, verbose)
@lib.with_doc(make_mask.__doc__)
def make_mask(self, cell, coords, relativity=0, shls_slice=None,
verbose=None):
return make_mask(cell, coords, relativity, shls_slice, verbose)
def eval_rho(self, cell, ao_kpts, dm_kpts, non0tab=None, xctype='LDA',
hermi=0, verbose=None):
'''Collocate the *real* density (opt. gradients) on the real-space grid.
Args:
cell : Mole or Cell object
ao_kpts : (nkpts, ngrids, nao) ndarray
AO values at each k-point
dm_kpts: (nkpts, nao, nao) ndarray
Density matrix at each k-point
Returns:
rhoR : (ngrids,) ndarray
'''
nkpts = len(ao_kpts)
rhoR = 0
for k in range(nkpts):
rhoR += eval_rho(cell, ao_kpts[k], dm_kpts[k], non0tab, xctype,
hermi, verbose)
rhoR *= 1./nkpts
return rhoR
def eval_rho2(self, cell, ao_kpts, mo_coeff_kpts, mo_occ_kpts,
non0tab=None, xctype='LDA', verbose=None):
nkpts = len(ao_kpts)
rhoR = 0
for k in range(nkpts):
rhoR += eval_rho2(cell, ao_kpts[k], mo_coeff_kpts[k],
mo_occ_kpts[k], non0tab, xctype, verbose)
rhoR *= 1./nkpts
return rhoR
def nr_vxc(self, cell, grids, xc_code, dms, spin=0, relativity=0, hermi=0,
kpts=None, kpts_band=None, max_memory=2000, verbose=None):
'''Evaluate RKS/UKS XC functional and potential matrix.
See :func:`nr_rks` and :func:`nr_uks` for more details.
'''
if spin == 0:
return self.nr_rks(cell, grids, xc_code, dms, hermi,
kpts, kpts_band, max_memory, verbose)
else:
return self.nr_uks(cell, grids, xc_code, dms, hermi,
kpts, kpts_band, max_memory, verbose)
@lib.with_doc(nr_rks.__doc__)
def nr_rks(self, cell, grids, xc_code, dms, hermi=0, kpts=None, kpts_band=None,
max_memory=2000, verbose=None, **kwargs):
if kpts is None:
if 'kpt' in kwargs:
sys.stderr.write('WARN: KNumInt.nr_rks function finds keyword '
'argument "kpt" and converts it to "kpts"\n')
kpts = kwargs['kpt']
else:
kpts = self.kpts
kpts = kpts.reshape(-1,3)
return nr_rks(self, cell, grids, xc_code, dms, 0, 0,
hermi, kpts, kpts_band, max_memory, verbose)
@lib.with_doc(nr_uks.__doc__)
def nr_uks(self, cell, grids, xc_code, dms, hermi=0, kpts=None, kpts_band=None,
max_memory=2000, verbose=None, **kwargs):
if kpts is None:
if 'kpt' in kwargs:
sys.stderr.write('WARN: KNumInt.nr_uks function finds keyword '
'argument "kpt" and converts it to "kpts"\n')
kpts = kwargs['kpt']
else:
kpts = self.kpts
kpts = kpts.reshape(-1,3)
return nr_uks(self, cell, grids, xc_code, dms, 1, 0,
hermi, kpts, kpts_band, max_memory, verbose)
def eval_mat(self, cell, ao_kpts, weight, rho, vxc,
non0tab=None, xctype='LDA', spin=0, verbose=None):
nkpts = len(ao_kpts)
nao = ao_kpts[0].shape[-1]
dtype = numpy.result_type(*ao_kpts)
mat = numpy.empty((nkpts,nao,nao), dtype=dtype)
for k in range(nkpts):
mat[k] = eval_mat(cell, ao_kpts[k], weight, rho, vxc,
non0tab, xctype, spin, verbose)
return mat
def _fxc_mat(self, cell, ao_kpts, wv, non0tab, xctype, ao_loc):
nkpts = len(ao_kpts)
nao = ao_kpts[0].shape[-1]
dtype = numpy.result_type(*ao_kpts)
mat = numpy.empty((nkpts,nao,nao), dtype=dtype)
for k in range(nkpts):
mat[k] = _fxc_mat(cell, ao_kpts[k], wv, non0tab, xctype, ao_loc)
return mat
def block_loop(self, cell, grids, nao, deriv=0, | |
<reponame>PlasticMem/tencentcloud-sdk-python<filename>tencentcloud/live/v20180801/models.py
# -*- coding: utf8 -*-
# Copyright (c) 2017-2021 THL A29 Limited, a Tencent company. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import warnings
from tencentcloud.common.abstract_model import AbstractModel
class AddDelayLiveStreamRequest(AbstractModel):
"""AddDelayLiveStream请求参数结构体
"""
def __init__(self):
r"""
:param AppName: 推流路径,与推流和播放地址中的 AppName 保持一致,默认为 live。
:type AppName: str
:param DomainName: 推流域名。
:type DomainName: str
:param StreamName: 流名称。
:type StreamName: str
:param DelayTime: 延播时间,单位:秒,上限:600秒。
:type DelayTime: int
:param ExpireTime: 延播设置的过期时间。UTC 格式,例如:2018-11-29T19:00:00Z。
注意:
1. 默认7天后过期,且最长支持7天内生效。
2. 北京时间值为 UTC 时间值 + 8 小时,格式按照 ISO 8601 标准表示,详见 [ISO 日期格式说明](https://cloud.tencent.com/document/product/266/11732#I)。
:type ExpireTime: str
"""
self.AppName = None
self.DomainName = None
self.StreamName = None
self.DelayTime = None
self.ExpireTime = None
def _deserialize(self, params):
self.AppName = params.get("AppName")
self.DomainName = params.get("DomainName")
self.StreamName = params.get("StreamName")
self.DelayTime = params.get("DelayTime")
self.ExpireTime = params.get("ExpireTime")
memeber_set = set(params.keys())
for name, value in vars(self).items():
if name in memeber_set:
memeber_set.remove(name)
if len(memeber_set) > 0:
warnings.warn("%s fileds are useless." % ",".join(memeber_set))
class AddDelayLiveStreamResponse(AbstractModel):
"""AddDelayLiveStream返回参数结构体
"""
def __init__(self):
r"""
:param RequestId: 唯一请求 ID,每次请求都会返回。定位问题时需要提供该次请求的 RequestId。
:type RequestId: str
"""
self.RequestId = None
def _deserialize(self, params):
self.RequestId = params.get("RequestId")
class AddLiveDomainRequest(AbstractModel):
"""AddLiveDomain请求参数结构体
"""
def __init__(self):
r"""
:param DomainName: 域名名称。
:type DomainName: str
:param DomainType: 域名类型,
0:推流域名,
1:播放域名。
:type DomainType: int
:param PlayType: 拉流域名类型:
1:国内,
2:全球,
3:境外。
默认值:1。
:type PlayType: int
:param IsDelayLive: 是否是慢直播:
0: 普通直播,
1 :慢直播 。
默认值: 0。
:type IsDelayLive: int
:param IsMiniProgramLive: 是否是小程序直播:
0: 标准直播,
1 :小程序直播 。
默认值: 0。
:type IsMiniProgramLive: int
"""
self.DomainName = None
self.DomainType = None
self.PlayType = None
self.IsDelayLive = None
self.IsMiniProgramLive = None
def _deserialize(self, params):
self.DomainName = params.get("DomainName")
self.DomainType = params.get("DomainType")
self.PlayType = params.get("PlayType")
self.IsDelayLive = params.get("IsDelayLive")
self.IsMiniProgramLive = params.get("IsMiniProgramLive")
memeber_set = set(params.keys())
for name, value in vars(self).items():
if name in memeber_set:
memeber_set.remove(name)
if len(memeber_set) > 0:
warnings.warn("%s fileds are useless." % ",".join(memeber_set))
class AddLiveDomainResponse(AbstractModel):
"""AddLiveDomain返回参数结构体
"""
def __init__(self):
r"""
:param RequestId: 唯一请求 ID,每次请求都会返回。定位问题时需要提供该次请求的 RequestId。
:type RequestId: str
"""
self.RequestId = None
def _deserialize(self, params):
self.RequestId = params.get("RequestId")
class AddLiveWatermarkRequest(AbstractModel):
"""AddLiveWatermark请求参数结构体
"""
def __init__(self):
r"""
:param PictureUrl: 水印图片 URL。
URL中禁止包含的字符:
;(){}$>`#"\'|
:type PictureUrl: str
:param WatermarkName: 水印名称。
最长16字节。
:type WatermarkName: str
:param XPosition: 显示位置,X轴偏移,单位是百分比,默认 0。
:type XPosition: int
:param YPosition: 显示位置,Y轴偏移,单位是百分比,默认 0。
:type YPosition: int
:param Width: 水印宽度,占直播原始画面宽度百分比,建议高宽只设置一项,另外一项会自适应缩放,避免变形。默认原始宽度。
:type Width: int
:param Height: 水印高度,占直播原始画面高度百分比,建议高宽只设置一项,另外一项会自适应缩放,避免变形。默认原始高度。
:type Height: int
"""
self.PictureUrl = None
self.WatermarkName = None
self.XPosition = None
self.YPosition = None
self.Width = None
self.Height = None
def _deserialize(self, params):
self.PictureUrl = params.get("PictureUrl")
self.WatermarkName = params.get("WatermarkName")
self.XPosition = params.get("XPosition")
self.YPosition = params.get("YPosition")
self.Width = params.get("Width")
self.Height = params.get("Height")
memeber_set = set(params.keys())
for name, value in vars(self).items():
if name in memeber_set:
memeber_set.remove(name)
if len(memeber_set) > 0:
warnings.warn("%s fileds are useless." % ",".join(memeber_set))
class AddLiveWatermarkResponse(AbstractModel):
"""AddLiveWatermark返回参数结构体
"""
def __init__(self):
r"""
:param WatermarkId: 水印ID。
:type WatermarkId: int
:param RequestId: 唯一请求 ID,每次请求都会返回。定位问题时需要提供该次请求的 RequestId。
:type RequestId: str
"""
self.WatermarkId = None
self.RequestId = None
def _deserialize(self, params):
self.WatermarkId = params.get("WatermarkId")
self.RequestId = params.get("RequestId")
class BandwidthInfo(AbstractModel):
"""带宽信息
"""
def __init__(self):
r"""
:param Time: 返回格式:
yyyy-mm-dd HH:MM:SS
根据粒度会有不同程度的缩减。
:type Time: str
:param Bandwidth: 带宽。
:type Bandwidth: float
"""
self.Time = None
self.Bandwidth = None
def _deserialize(self, params):
self.Time = params.get("Time")
self.Bandwidth = params.get("Bandwidth")
memeber_set = set(params.keys())
for name, value in vars(self).items():
if name in memeber_set:
memeber_set.remove(name)
if len(memeber_set) > 0:
warnings.warn("%s fileds are useless." % ",".join(memeber_set))
class BillAreaInfo(AbstractModel):
"""海外分区直播带宽出参,分区信息
"""
def __init__(self):
r"""
:param Name: 大区名称
:type Name: str
:param Countrys: 国家明细数据
:type Countrys: list of BillCountryInfo
"""
self.Name = None
self.Countrys = None
def _deserialize(self, params):
self.Name = params.get("Name")
if params.get("Countrys") is not None:
self.Countrys = []
for item in params.get("Countrys"):
obj = BillCountryInfo()
obj._deserialize(item)
self.Countrys.append(obj)
memeber_set = set(params.keys())
for name, value in vars(self).items():
if name in memeber_set:
memeber_set.remove(name)
if len(memeber_set) > 0:
warnings.warn("%s fileds are useless." % ",".join(memeber_set))
class BillCountryInfo(AbstractModel):
"""海外分区直播带宽出参国家带宽信息
"""
def __init__(self):
r"""
:param Name: 国家名称
:type Name: str
:param BandInfoList: 带宽明细数据信息。
:type BandInfoList: list of BillDataInfo
"""
self.Name = None
self.BandInfoList = None
def _deserialize(self, params):
self.Name = params.get("Name")
if params.get("BandInfoList") is not None:
self.BandInfoList = []
for item in params.get("BandInfoList"):
obj = BillDataInfo()
obj._deserialize(item)
self.BandInfoList.append(obj)
memeber_set = set(params.keys())
for name, value in vars(self).items():
if name in memeber_set:
memeber_set.remove(name)
if len(memeber_set) > 0:
warnings.warn("%s fileds are useless." % ",".join(memeber_set))
class BillDataInfo(AbstractModel):
"""带宽和流量信息。
"""
def __init__(self):
r"""
:param Time: 时间点,格式: yyyy-mm-dd HH:MM:SS。
:type Time: str
:param Bandwidth: 带宽,单位是 Mbps。
:type Bandwidth: float
:param Flux: 流量,单位是 MB。
:type Flux: float
:param PeakTime: 峰值时间点,格式: yyyy-mm-dd HH:MM:SS,原始数据为5分钟粒度,如果查询小时和天粒度数据,则返回对应粒度内的带宽峰值时间点。
:type PeakTime: str
"""
self.Time = None
self.Bandwidth = None
self.Flux = None
self.PeakTime = None
def _deserialize(self, params):
self.Time = params.get("Time")
self.Bandwidth = params.get("Bandwidth")
self.Flux = params.get("Flux")
self.PeakTime = params.get("PeakTime")
memeber_set = set(params.keys())
for name, value in vars(self).items():
if name in memeber_set:
memeber_set.remove(name)
if len(memeber_set) > 0:
warnings.warn("%s fileds are useless." % ",".join(memeber_set))
class BindLiveDomainCertRequest(AbstractModel):
"""BindLiveDomainCert请求参数结构体
"""
def __init__(self):
r"""
:param CertId: 证书Id。使用添加证书接口获取证书Id。
:type CertId: int
:param DomainName: 播放域名。
:type DomainName: str
:param Status: HTTPS开启状态,0: 关闭 1:打开。
:type Status: int
"""
self.CertId = None
self.DomainName = None
self.Status = None
def _deserialize(self, params):
self.CertId = params.get("CertId")
self.DomainName = params.get("DomainName")
self.Status = params.get("Status")
memeber_set = set(params.keys())
for name, value in vars(self).items():
if name in memeber_set:
memeber_set.remove(name)
if len(memeber_set) > 0:
warnings.warn("%s fileds are useless." % ",".join(memeber_set))
class BindLiveDomainCertResponse(AbstractModel):
"""BindLiveDomainCert返回参数结构体
"""
def __init__(self):
r"""
:param RequestId: 唯一请求 ID,每次请求都会返回。定位问题时需要提供该次请求的 RequestId。
:type RequestId: str
"""
self.RequestId = None
def _deserialize(self, params):
self.RequestId = params.get("RequestId")
class CallBackRuleInfo(AbstractModel):
"""规则信息
"""
def __init__(self):
r"""
:param CreateTime: 规则创建时间。
:type CreateTime: str
:param UpdateTime: 规则更新时间。
:type UpdateTime: str
:param TemplateId: 模板 ID。
:type TemplateId: int
:param DomainName: 推流域名。
:type DomainName: str
:param AppName: 推流路径。
:type AppName: str
"""
self.CreateTime = None
self.UpdateTime = None
self.TemplateId = None
self.DomainName = None
self.AppName = None
def _deserialize(self, params):
self.CreateTime = params.get("CreateTime")
self.UpdateTime = params.get("UpdateTime")
self.TemplateId = params.get("TemplateId")
self.DomainName = params.get("DomainName")
self.AppName = params.get("AppName")
memeber_set = set(params.keys())
for name, value in vars(self).items():
if name in memeber_set:
memeber_set.remove(name)
if len(memeber_set) > 0:
warnings.warn("%s fileds are useless." % ",".join(memeber_set))
class CallBackTemplateInfo(AbstractModel):
"""回调模板信息。
"""
def __init__(self):
r"""
:param TemplateId: 模板 ID。
:type TemplateId: int
:param TemplateName: 模板名称。
:type TemplateName: str
:param Description: 描述信息。
:type Description: str
:param StreamBeginNotifyUrl: 开播回调 URL。
:type StreamBeginNotifyUrl: str
:param StreamMixNotifyUrl: 混流回调 URL。(参数已弃用)。
:type StreamMixNotifyUrl: str
:param StreamEndNotifyUrl: 断流回调 URL。
:type StreamEndNotifyUrl: str
:param RecordNotifyUrl: 录制回调 URL。
:type RecordNotifyUrl: str
:param SnapshotNotifyUrl: 截图回调 URL。
:type SnapshotNotifyUrl: str
:param PornCensorshipNotifyUrl: 鉴黄回调 URL。
:type PornCensorshipNotifyUrl: str
:param CallbackKey: 回调的鉴权 key。
:type CallbackKey: str
"""
self.TemplateId = None
self.TemplateName = None
self.Description = None
self.StreamBeginNotifyUrl = None
self.StreamMixNotifyUrl = None
self.StreamEndNotifyUrl = None
self.RecordNotifyUrl = None
self.SnapshotNotifyUrl = None
self.PornCensorshipNotifyUrl = None
self.CallbackKey = None
def _deserialize(self, params):
self.TemplateId = params.get("TemplateId")
self.TemplateName = params.get("TemplateName")
self.Description = params.get("Description")
self.StreamBeginNotifyUrl = params.get("StreamBeginNotifyUrl")
self.StreamMixNotifyUrl = params.get("StreamMixNotifyUrl")
self.StreamEndNotifyUrl = params.get("StreamEndNotifyUrl")
self.RecordNotifyUrl = params.get("RecordNotifyUrl")
self.SnapshotNotifyUrl = params.get("SnapshotNotifyUrl")
self.PornCensorshipNotifyUrl = params.get("PornCensorshipNotifyUrl")
self.CallbackKey = params.get("CallbackKey")
memeber_set = set(params.keys())
for name, value in vars(self).items():
if name in memeber_set:
memeber_set.remove(name)
if len(memeber_set) > 0:
warnings.warn("%s fileds are useless." % ",".join(memeber_set))
class CallbackEventInfo(AbstractModel):
"""回调事件信息
"""
def __init__(self):
r"""
:param EventTime: 事件时间
:type EventTime: str
:param EventType: 事件类型
:type EventType: int
:param Request: 回调请求
:type Request: str
:param Response: 回调响应
:type Response: str
:param ResponseTime: 客户接口响应时间
:type ResponseTime: str
:param ResultCode: 回调结果
:type ResultCode: int
:param StreamId: 流名称
:type StreamId: str
"""
self.EventTime = None
self.EventType = None
self.Request = None
self.Response = None
self.ResponseTime = None
self.ResultCode = None
self.StreamId = None
def _deserialize(self, params):
self.EventTime = params.get("EventTime")
self.EventType = params.get("EventType")
self.Request = params.get("Request")
self.Response = params.get("Response")
self.ResponseTime = params.get("ResponseTime")
self.ResultCode = params.get("ResultCode")
self.StreamId = params.get("StreamId")
memeber_set = set(params.keys())
for name, value in vars(self).items():
if name in memeber_set:
memeber_set.remove(name)
if len(memeber_set) > 0:
warnings.warn("%s fileds are useless." % ",".join(memeber_set))
class CancelCommonMixStreamRequest(AbstractModel):
"""CancelCommonMixStream请求参数结构体
"""
def __init__(self):
r"""
:param MixStreamSessionId: 混流会话(申请混流开始到取消混流结束)标识 | |
"""The functions for converting to and from the Hyperbolic simplex basis"""
from copy import deepcopy
import logging
import numpy as np
#from psych_metric.distrib.simplex.euclidean import EuclideanSimplexTransform
def cart2polar(vectors):
"""Convert from 2d Cartesian coordinates to polar coordinates.
Parameters
----------
vectors : np.ndarray
2-dimensional array where the first dimension is the samples and the
second is the 2-dimensional Cartesian coordinates.
Returns
-------
2-dimensional array where the first dimension is the samples and the
second dimension contains 2 elements consisting of the polar
coordinates where the first element is the radius, and then followed by
the angle.
"""
return np.concatenate(
(
np.linalg.norm(vectors, axis=1, keepdims=True),
np.arctan2(vectors[:, 1], vectors[:, 0]).reshape([-1, 1]),
),
axis=1,
)
def polar2cart(vectors):
"""Convert from polar to 2d Cartesian coordinates.
Parameters
----------
vectors : np.ndarray
2-dimensional array where the first dimension is the samples and the
second dimension contains 2 elements consisting of the polar
coordinates where the first element is the radius, and then followed by
the angle.
Results
-------
np.ndarray
2-dimensional array where the first dimension is the samples and the
second is the 2-dimensional Cartesian coordinates.
"""
return vectors[:, [0]] * np.concatenate(
(
np.cos(vectors[:, [1]]),
np.sin(vectors[:, [1]]),
),
axis=1,
)
def cartesian_to_hypersphere(vectors):
"""Convert from Cartesian coordinates to hyperspherical coordinates of the
same n-dimensions.
Parameters
----------
vectors : np.ndarray
2-dimensional array where the first dimension is the samples and the
second is the n-dimensional Cartesian coordinates.
Results
-------
np.ndarray
2-dimensional array where the first dimension is the samples and the
second dimension contains n elements consisting of the n-1-dimensional
hyperspherical coordinates where the first element is the radius, and
then followed by n-1 angles for each dimension.
"""
if len(vectors.shape) == 1:
# single sample
vectors = vectors.reshape([1, -1])
if vectors.shape[1] == 2:
return cart2polar(vectors)
elif vectors.shape[1] < 2:
raise ValueError(' '.join([
'Expected the number of coordinate dimensions to be >= 2, but',
f'recieved vectors with shape {vectors.shape}. and axis being',
f'1.',
]))
#radii = np.linalg.norm(vectors[:, 0])
flipped = np.fliplr(vectors)
cumsqrt = np.sqrt(np.cumsum(flipped ** 2, axis=1))
#radii = cumsqrt[:, -1]
angles = np.arccos(flipped / cumsqrt)
# angles 1 -- n-2 = np.fliplr(angles[2:])
last_angle = np.pi - 2 * np.arctan(
(flipped[:, 1] + cumsqrt[:, 1]) / flipped[:, 0]
)
# radius followed by ascending n-1 angles per row
return np.concatenate(
(
cumsqrt[:, [-1]],
np.fliplr(angles[:, 2:]),
last_angle.reshape([-1, 1]),
),
axis=1,
)
def hypersphere_to_cartesian(vectors):
"""Convert from hyperspherical coordinates to Cartesian coordinates of the
same n-dimensions.
Parameters
----------
vectors : np.ndarray
2-dimensional array where the first dimension is the samples and the
second dimension contains n elements consisting of the n-1-dimensional
hyperspherical coordinates where the first element is the radius, and
then followed by n-1 angles for each dimension.
Results
-------
np.ndarray
2-dimensional array where the first dimension is the samples and the
second is the n-dimensional Cartesian coordinates.
"""
if len(vectors.shape) == 1:
# single sample
vectors = vectors.reshape([1, -1])
if vectors.shape[1] == 2:
return polar2cart(vectors)
elif vectors.shape[1] < 2:
raise ValueError(' '.join([
'Expected the number of coordinate dimensions to be >= 2, but',
f'recieved vectors with shape {vectors.shape}. and axis being',
f'1.',
]))
# x1 = radius * cos(rho_1)
# xn-1 = radius * sin(rho_1) * ... * sin(rho_n-2) * cos(rho_n-1)
# xn = radius * sin(rho_1) * ... * sin(rho_n-1)
sin = np.concatenate(
(
np.ones([vectors.shape[0], 1]),
np.cumprod(np.sin(vectors[:, 1:]), axis=1)
),
axis=1,
)
cos = np.concatenate(
(np.cos(vectors[:, 1:]), np.ones([vectors.shape[0], 1])),
axis=1,
)
return vectors[:, 0].reshape(-1, 1) * sin * cos
def givens_rotation(dim, x, y, angle):
"""Creates a transposed Givens rotation matrix."""
rotate = np.eye(dim)
rotate[x, x] = np.cos(angle)
rotate[x, y] = np.sin(angle)
rotate[y, x] = -np.sin(angle)
rotate[y, y] = np.cos(angle)
return rotate
def rotate_around(rotation_simplex, angle):
"""General n-dimension rotation."""
if rotation_simplex.shape[0] > rotation_simplex.shape[0]:
# expects points to contained in the rows, elements in columns
rotation_simplex = rotation_simplex.T
# Transpose the simplex st the first point is centered at origin
translation_vector = rotation_simplex[0].copy()
v = rotation_simplex - translation_vector
n = rotation_simplex.shape[1]
mat = np.eye(n)
k = 0
for r in range(1, n-1):
for c in list(range(r, n))[::-1]:
k += 1
rot = givens_rotation(
n,
c,
c - 1,
np.arctan2(v[r, c], v[r, c - 1]),
)
v = v @ rot
mat = mat @ rot
return (
translation_vector,
mat @ givens_rotation(n, n - 2, n - 1, angle) @ np.linalg.inv(mat),
)
def get_simplex_boundary_pts(prob_vectors, copy=True):
"""Returns the boundary points of the regular simplex whose circumscribed
hypersphenre's radius intersects through the provided points in Barycentric
coordinates. The given points define the angle of the line that passes
through the center of the simplex, the given point, and the respectie point
on the boundary of the simplex.
Parameters
----------
prob_vectors : np.ndarray
Array of probability vectors, or Barycentric coordinates of a regular
simplex. Each point defines the angle of the ray that intersects the
given point, starts at the center of the simplex, and intersects the
corresponding boundary point of the simplex.
copy : bool
If True, copies the given prob_vectors np.ndarray, otherwise modifies
the original.
"""
if copy:
prob_vectors = prob_vectors.copy()
# Probability vectors are already in Barycentric coordinates
# select minimum coord(s) as dim to zero to get boundary pt on d simplex
row_min = np.min(prob_vectors, axis=1)
dim = prob_vectors.shape[1] - 1
for i, minimum in enumerate(row_min):
min_mask = prob_vectors[i] == minimum
prob_vectors[i, np.logical_not(min_mask)] += minimum / dim * min_mask.sum()
prob_vectors[i, min_mask] = 0
return prob_vectors
class Rotator(object):
"""Class the contains the process for rotating about some n-2 space."""
def __init__(self, rotation_simplex, angle):
self.translate, self.rotate_drop_dim = rotate_around(
rotation_simplex,
angle,
)
def rotate(self, vectors, drop_dim=False):
"""Rotates the vectors of the n-1 simplex from n dimensions to n-1
dimensions.
"""
# TODO expects shape of 2, add check on vectors
result = (vectors - self.translate) @ self.rotate_drop_dim \
+ self.translate
if drop_dim:
return result[:, 1:]
return result
def inverse(self, vectors):
"""Rotates the vectors of the n-1 simplex from n-1 dimensions to n
dimensions.
"""
# TODO expects shape of 2, add check on vectors
if vectors.shape[1] == len(self.translate):
return (vectors - self.translate) \
@ np.linalg.inv(self.rotate_drop_dim) + self.translate
return (
(
np.hstack((np.zeros([len(vectors), 1]), vectors))
- self.translate
)
@ np.linalg.inv(self.rotate_drop_dim)
+ self.translate
)
class ProbabilitySimplexTransform(object):
"""Creates and contains the objects needed to convert to and from the
Probability Simplex basis.
Attributes
----------
cart_simplex : np.ndarray
centroid : np.ndarray
rotator : Rotator
Used to find cart_simplex and for reverse transforming from the
cartesian simplex to the original probability simplex.
Properties
----------
input_dim : int
The number of dimensions of the input samples before being transformed.
output_dim : int
Note
----
This ProbabilitySimplexTransform takes more time and more memory than the
original that used QR or SVD to find the rotation matrix. However, this
version preserves the simplex dimensions, keeping the simplex regular,
while the QR and SVD found rotation matrices do not.
"""
def __init__(self, dim):
prob_simplex_verts = np.eye(dim)
# Get the angle to rotate about the n-2 space to zero out first dim
angle_to_rotate = -np.arctan2(
1.0,
np.linalg.norm([1 / (dim - 1)] * (dim - 1)),
)
# Rotate to zero out one arbitrary dimension, drop that zeroed dim.
self.rotator = Rotator(prob_simplex_verts[1:], angle_to_rotate)
self.cart_simplex = self.rotator.rotate(prob_simplex_verts)
# Center Simplex in (N-1)-dim (find centroid and adjust via that)
self.centroid = np.mean(self.cart_simplex, axis=0)
self.cart_simplex -= self.centroid
# Save the vertices of the rotated simplex, transposed for ease of comp
self.cart_simplex = self.cart_simplex.T
# TODO Decide if keeping the cart_simplex for going from prob simplex
# to cart simplex with only one matrix multiplication is worth keeping
# the (n,n) matrix.
def __copy__(self):
cls = self.__class__
new = cls.__new__(cls)
new.__dict__.update(self.__dict__)
return new
def __deepcopy__(self, memo):
cls = self.__class__
new = cls.__new__(cls)
memo[id(self)] = new
for k, v in self.__dict__.items():
setattr(new, k, deepcopy(v, memo))
return new
@property
def input_dim(self):
# TODO wrap all code for obtaining cart_simllex in ProbabilityTransform
#return self.euclid_simplex_transform.input_dim
return self.cart_simplex.shape[0]
@property
def output_dim(self):
#return self.euclid_simplex_transform.output_dim
return self.cart_simplex.shape[1]
def to(self, vectors, drop_dim=True):
"""Transform given vectors into hyperbolic probability simplex space."""
# Convert from probability simplex | |
<filename>fiscalyear.py
"""Utilities for managing the fiscal calendar."""
from __future__ import division, with_statement
__author__ = '<NAME>'
__version__ = '0.1.0'
import calendar
import contextlib
import datetime
# Number of months in each quarter
MONTHS_PER_QUARTER = 12 // 4
MIN_QUARTER = 1
MAX_QUARTER = 4
# These global variables control the start of the fiscal year.
# The default is to use the U.S. federal government's fiscal year,
# but they can be changed to use any other fiscal year.
START_YEAR = 'previous'
START_MONTH = 10
START_DAY = 1
@contextlib.contextmanager
def fiscal_calendar(start_year=None,
start_month=None,
start_day=None):
"""A context manager that lets you modify the start of the fiscal calendar
inside the scope of a with-statement.
:param start_year: Relationship between the start of the fiscal year and
the calendar year. Possible values: ``'previous'`` or ``'same'``.
:type start_year: str
:param start_month: The first month of the fiscal year
:type start_month: int or str
:param start_day: The first day of the first month of the fiscal year
:type start_day: int or str
:raises AssertionError: If ``start_year`` is not ``'previous'`` or ``'same'``
:raises TypeError: If ``start_month`` or ``start_day`` is not an int or int-like string
:raises ValueError: If ``start_month`` or ``start_day`` is out of range
"""
global START_YEAR
global START_MONTH
global START_DAY
# Use default values if not changed
if start_year is None:
start_year = START_YEAR
if start_month is None:
start_month = START_MONTH
if start_day is None:
start_day = START_DAY
assert isinstance(start_year, str)
assert start_year == 'previous' or start_year == 'same'
start_month = _check_month(start_month)
start_day = _check_day(start_month, start_day)
# Backup previous values
old_start_year = START_YEAR
old_start_month = START_MONTH
old_start_day = START_DAY
# Temporarily change global variables
START_YEAR = start_year
START_MONTH = start_month
START_DAY = start_day
yield
# Restore previous values
START_YEAR = old_start_year
START_MONTH = old_start_month
START_DAY = old_start_day
def _check_int(value):
"""Check if value is an int or int-like string.
:param value: The value to test
:return: The value
:rtype: int
:raises TypeError: If value is not an int or int-like string
"""
if isinstance(value, int):
return value
elif isinstance(value, str) and value.isdigit():
return int(value)
else:
raise TypeError('an int or int-like string is required (got %s)' % (
type(value).__name__))
def _check_year(year):
"""Check if year is a valid year.
:param year: The year to test
:return: The year
:rtype: int
:raises TypeError: If year is not an int or int-like string
:raises ValueError: If year is out of range
"""
year = _check_int(year)
if datetime.MINYEAR <= year <= datetime.MAXYEAR:
return year
else:
raise ValueError('year must be in %d..%d' % (
datetime.MINYEAR, datetime.MAXYEAR), year)
def _check_month(month):
"""Check if month is a valid month.
:param month: The month to test
:return: The month
:rtype: int
:raises TypeError: If month is not an int or int-like string
:raises ValueError: If month is out of range
"""
month = _check_int(month)
if 1 <= month <= 12:
return month
else:
raise ValueError('month must be in %d..%d' % (1, 12), month)
def _check_day(month, day):
"""Check if day is a valid day of month.
:param month: The month to test
:param day: The day to test
:return: The day
:rtype: int
:raises TypeError: If month or day is not an int or int-like string
:raises ValueError: If month or day is out of range
"""
month = _check_month(month)
day = _check_int(day)
# Find the last day of the month
# Use a non-leap year
max_day = calendar.monthrange(2001, month)[1]
if 1 <= day <= max_day:
return day
else:
raise ValueError('day must be in %d..%d' % (1, max_day), day)
def _check_quarter(quarter):
"""Check if quarter is a valid quarter.
:param quarter: The quarter to test
:return: The quarter
:rtype: int
:raises TypeError: If quarter is not an int or int-like string
:raises ValueError: If quarter is out of range
"""
quarter = _check_int(quarter)
if MIN_QUARTER <= quarter <= MAX_QUARTER:
return quarter
else:
raise ValueError('quarter must be in %d..%d' % (
MIN_QUARTER, MAX_QUARTER), quarter)
class FiscalYear(object):
"""A class representing a single fiscal year."""
__slots__ = '_fiscal_year'
def __new__(cls, fiscal_year):
"""Constructor.
:param fiscal_year: The fiscal year
:type fiscal_year: int or str
:returns: A newly constructed FiscalYear object
:rtype: FiscalYear
:raises TypeError: If fiscal_year is not an int or int-like string
:raises ValueError: If fiscal_year is out of range
"""
fiscal_year = _check_year(fiscal_year)
self = super(FiscalYear, cls).__new__(cls)
self._fiscal_year = fiscal_year
return self
def __repr__(self):
"""Convert to formal string, for repr().
>>> fy = FiscalYear(2017)
>>> repr(fy)
'FiscalYear(2017)'
"""
return '%s(%d)' % (self.__class__.__name__,
self._fiscal_year)
def __str__(self):
"""Convert to informal string, for str().
>>> fy = FiscalYear(2017)
>>> str(fy)
'FY2017'
"""
return 'FY%d' % (self._fiscal_year)
# TODO: Implement __format__ so that you can print
# fiscal year as 17 or 2017 (%y or %Y)
def __contains__(self, item):
"""Returns True if item in self, else False.
:param item: The item to check
:type item: FiscalYear, FiscalQuarter, FiscalDateTime,
datetime, FiscalDate, or date
:rtype: bool
"""
if isinstance(item, FiscalYear):
return self == item
elif isinstance(item, FiscalQuarter):
return self._fiscal_year == item.fiscal_year
elif (isinstance(item, FiscalDateTime) or
isinstance(item, datetime.datetime)):
return self.start <= item <= self.end
elif (isinstance(item, FiscalDate) or
isinstance(item, datetime.date)):
return self.start.date() <= item <= self.end.date()
else:
raise TypeError("can't compare '%s' to '%s'" % (
type(self).__name__, type(item).__name__))
# Read-only field accessors
@property
def fiscal_year(self):
""":returns: The fiscal year
:rtype: int
"""
return self._fiscal_year
@property
def prev_fiscal_year(self):
""":returns: The previous fiscal year
:rtype: FiscalYear
"""
return FiscalYear(self._fiscal_year - 1)
@property
def next_fiscal_year(self):
""":returns: The next fiscal year
:rtype: FiscalYear
"""
return FiscalYear(self._fiscal_year + 1)
@property
def start(self):
""":returns: Start of the fiscal year
:rtype: FiscalDateTime
"""
return self.q1.start
@property
def end(self):
""":returns: End of the fiscal year
:rtype: FiscalDateTime
"""
return self.q4.end
@property
def q1(self):
""":returns: The first quarter of the fiscal year
:rtype: FiscalQuarter
"""
return FiscalQuarter(self._fiscal_year, 1)
@property
def q2(self):
""":returns: The second quarter of the fiscal year
:rtype: FiscalQuarter
"""
return FiscalQuarter(self._fiscal_year, 2)
@property
def q3(self):
""":returns: The third quarter of the fiscal year
:rtype: FiscalQuarter
"""
return FiscalQuarter(self._fiscal_year, 3)
@property
def q4(self):
""":returns: The fourth quarter of the fiscal year
:rtype: FiscalQuarter
"""
return FiscalQuarter(self._fiscal_year, 4)
# Comparisons of FiscalYear objects with other
def __lt__(self, other):
if isinstance(other, FiscalYear):
return self._fiscal_year < other._fiscal_year
else:
raise TypeError("can't compare '%s' to '%s'" % (
type(self).__name__, type(other).__name__))
def __le__(self, other):
if isinstance(other, FiscalYear):
return self._fiscal_year <= other._fiscal_year
else:
raise TypeError("can't compare '%s' to '%s'" % (
type(self).__name__, type(other).__name__))
def __eq__(self, other):
if isinstance(other, FiscalYear):
return self._fiscal_year == other._fiscal_year
else:
raise TypeError("can't compare '%s' to '%s'" % (
type(self).__name__, type(other).__name__))
def __ne__(self, other):
if isinstance(other, FiscalYear):
return self._fiscal_year != other._fiscal_year
else:
raise TypeError("can't compare '%s' to '%s'" % (
type(self).__name__, type(other).__name__))
def __gt__(self, other):
if isinstance(other, FiscalYear):
return self._fiscal_year > other._fiscal_year
else:
raise TypeError("can't compare '%s' to '%s'" % (
type(self).__name__, type(other).__name__))
def __ge__(self, other):
if isinstance(other, FiscalYear):
return self._fiscal_year >= other._fiscal_year
else:
raise TypeError("can't compare '%s' to '%s'" % (
type(self).__name__, type(other).__name__))
class FiscalQuarter(object):
"""A class representing a single fiscal quarter."""
__slots__ = ['_fiscal_year', '_quarter']
def __new__(cls, fiscal_year, quarter):
"""Constructor.
:param fiscal_year: The fiscal year
:type fiscal_year: int or str
:param quarter: The fiscal quarter [1 - 4]
:type quarter: int or str
:returns: A newly constructed FiscalQuarter object
:rtype: FiscalQuarter
:raises TypeError: If fiscal_year or quarter is not
an int or int-like string
:raises ValueError: If fiscal_year or quarter is out of range
"""
fiscal_year = _check_year(fiscal_year)
quarter = _check_quarter(quarter)
self = super(FiscalQuarter, cls).__new__(cls)
self._fiscal_year = fiscal_year
self._quarter = quarter
return self
def __repr__(self):
"""Convert to formal string, for repr().
>>> q3 = FiscalQuarter(2017, 3)
>>> repr(q3)
'FiscalQuarter(2017, 3)'
"""
return '%s(%d, %d)' % (self.__class__.__name__,
self._fiscal_year,
self._quarter)
def __str__(self):
"""Convert to informal string, for str().
>>> q3 = FiscalQuarter(2017, 3)
>>> str(q3)
'FY2017 Q3'
"""
return 'FY%d Q%d' % (self._fiscal_year,
self._quarter)
# TODO: Implement __format__ so that you can print
# fiscal year as 17 or 2017 (%y or %Y)
def __contains__(self, item):
"""Returns True if item in self, else False.
:param item: The item to check
:type item: FiscalQuarter, FiscalDateTime, datetime,
FiscalDate, or date
:rtype: bool
"""
if isinstance(item, FiscalQuarter):
return self == item
| |
(0x91FE, 0), # East Asian ideograph
0x3A2F7C: (0x64C0, 0), # East Asian ideograph
0x395A2F: (0x58F2, 0), # East Asian ideograph
0x334F59: (0x7A93, 0), # East Asian ideograph
0x293C57: (0x8F79, 0), # East Asian ideograph
0x6F4F5A: (0xB989, 0), # Korean hangul
0x215C21: (0x901F, 0), # East Asian ideograph
0x215C22: (0x9017, 0), # East Asian ideograph
0x215C23: (0x901D, 0), # East Asian ideograph
0x215C24: (0x9010, 0), # East Asian ideograph
0x225C25: (0x74BF, 0), # East Asian ideograph
0x215C26: (0x900D, 0), # East Asian ideograph
0x215C27: (0x901E, 0), # East Asian ideograph
0x235C28: (0x9DBB, 0), # East Asian ideograph
0x274123: (0x6302, 0), # East Asian ideograph
0x215C2A: (0x900F, 0), # East Asian ideograph
0x215C2B: (0x9022, 0), # East Asian ideograph
0x215C2C: (0x9016, 0), # East Asian ideograph
0x215C2D: (0x901B, 0), # East Asian ideograph
0x215C2E: (0x9014, 0), # East Asian ideograph
0x214F5D: (0x7AA9, 0), # East Asian ideograph
0x215C30: (0x9035, 0), # East Asian ideograph
0x215C31: (0x9031, 0), # East Asian ideograph
0x235C32: (0x9DB9, 0), # East Asian ideograph
0x275C33: (0x8FDB, 0), # East Asian ideograph
0x275C34: (0x8FD0, 0), # East Asian ideograph
0x2D4F5E: (0x7AB0, 0), # East Asian ideograph
0x215C36: (0x9053, 0), # East Asian ideograph
0x215C37: (0x9042, 0), # East Asian ideograph
0x215C38: (0x9050, 0), # East Asian ideograph
0x275C39: (0x8FBE, 0), # East Asian ideograph
0x275C3A: (0x8FDD, 0), # East Asian ideograph
0x274F5F: (0x7A77, 0), # East Asian ideograph
0x275C3C: (0x8FC2, 0), # East Asian ideograph
0x215C3D: (0x904F, 0), # East Asian ideograph
0x235C3E: (0x9DD9, 0), # East Asian ideograph
0x215C3F: (0x904D, 0), # East Asian ideograph
0x215C40: (0x9051, 0), # East Asian ideograph
0x214F60: (0x7ABA, 0), # East Asian ideograph
0x215C42: (0x903E, 0), # East Asian ideograph
0x215C43: (0x9058, 0), # East Asian ideograph
0x275C44: (0x8FDC, 0), # East Asian ideograph
0x275C45: (0x900A, 0), # East Asian ideograph
0x215C46: (0x9063, 0), # East Asian ideograph
0x214F61: (0x7AC5, 0), # East Asian ideograph
0x275C48: (0x9012, 0), # East Asian ideograph
0x215C49: (0x9069, 0), # East Asian ideograph
0x215C4A: (0x906E, 0), # East Asian ideograph
0x215C4B: (0x9068, 0), # East Asian ideograph
0x215C4C: (0x906D, 0), # East Asian ideograph
0x214F62: (0x7AC4, 0), # East Asian ideograph
0x215C4E: (0x9074, 0), # East Asian ideograph
0x275C4F: (0x9009, 0), # East Asian ideograph
0x275C50: (0x8FDF, 0), # East Asian ideograph
0x215C51: (0x9077, 0), # East Asian ideograph
0x215C52: (0x907C, 0), # East Asian ideograph
0x275C53: (0x9057, 0), # East Asian ideograph
0x215C54: (0x907F, 0), # East Asian ideograph
0x215C55: (0x907D, 0), # East Asian ideograph
0x275C56: (0x8FC8, 0), # East Asian ideograph
0x235C57: (0x9DF2, 0), # East Asian ideograph
0x215C58: (0x9082, 0), # East Asian ideograph
0x215C59: (0x9080, 0), # East Asian ideograph
0x275C5A: (
0x8FE9,
0,
), # East Asian ideograph (variant of 2D5C5A which maps to 8FE9)
0x275C5B: (0x8FB9, 0), # East Asian ideograph
0x275C5C: (0x9026, 0), # East Asian ideograph
0x275C5D: (0x903B, 0), # East Asian ideograph
0x215C5E: (0x9091, 0), # East Asian ideograph
0x215C5F: (0x9095, 0), # East Asian ideograph
0x215C60: (0x90A3, 0), # East Asian ideograph
0x215C61: (0x90A2, 0), # East Asian ideograph
0x215C62: (0x90AA, 0), # East Asian ideograph
0x215C63: (0x90A6, 0), # East Asian ideograph
0x215C64: (0x90B5, 0), # East Asian ideograph
0x215C65: (0x90B1, 0), # East Asian ideograph
0x215C66: (0x90B8, 0), # East Asian ideograph
0x215C67: (0x90CE, 0), # East Asian ideograph
0x215C68: (0x90CA, 0), # East Asian ideograph
0x4B5564: (0x77C7, 0), # East Asian ideograph
0x235C6A: (0x9DED, 0), # East Asian ideograph
0x215C6B: (0x90E8, 0), # East Asian ideograph
0x215C6C: (0x90ED, 0), # East Asian ideograph
0x275C6D: (0x90AE, 0), # East Asian ideograph
0x215C6E: (0x90FD, 0), # East Asian ideograph
0x215C6F: (0x9102, 0), # East Asian ideograph
0x275C70: (0x4E61, 0), # East Asian ideograph
0x275C71: (0x90B9, 0), # East Asian ideograph
0x215C72: (0x9119, 0), # East Asian ideograph
0x275C73: (0x90D1, 0), # East Asian ideograph
0x275C74: (0x90BB, 0), # East Asian ideograph
0x275C75: (0x9093, 0), # East Asian ideograph
0x215C76: (0x9131, 0), # East Asian ideograph
0x214F69: (0x7AED, 0), # East Asian ideograph
0x215C78: (0x9149, 0), # East Asian ideograph
0x215C79: (0x914B, 0), # East Asian ideograph
0x215C7A: (0x914A, 0), # East Asian ideograph
0x215C7B: (0x9152, 0), # East Asian ideograph
0x215C7C: (0x914D, 0), # East Asian ideograph
0x215C7D: (0x914C, 0), # East Asian ideograph
0x215C7E: (0x9157, 0), # East Asian ideograph
0x6F5454: (0xC3DF, 0), # Korean hangul
0x6F5A34: (0xCF13, 0), # Korean hangul
0x214F6B: (0x7AF6, 0), # East Asian ideograph
0x213A39: (0x5AB3, 0), # East Asian ideograph
0x234237: (0x9226, 0), # East Asian ideograph
0x6F4F6D: (0xB9D8, 0), # Korean hangul
0x695A31: (0x64F6, 0), # East Asian ideograph
0x6F4E45: (0xB625, 0), # Korean hangul
0x234F6F: (0x9857, 0), # East Asian ideograph
0x27456A: (0x6988, 0), # East Asian ideograph
0x213E7D: (0x6108, 0), # East Asian ideograph
0x395821: (0x97E4, 0), # East Asian ideograph
0x274F70: (0x5DF4, 0), # East Asian ideograph (duplicate simplified)
0x213A3A: (0x5AE1, 0), # East Asian ideograph
0x224F71: (0x7052, 0), # East Asian ideograph
0x234F72: (0x9856, 0), # East Asian ideograph
0x295E7A: (0x9EFE, 0), # East Asian ideograph
0x224F73: (0x705C, 0), # East Asian ideograph
0x213F39: (0x6163, 0), # East Asian ideograph
0x6F4F74: (0xB9E4, 0), # Korean hangul
0x6F5456: (0xC3E8, 0), # Korean hangul
0x213E7E: (0x60F1, 0), # East Asian ideograph
0x214F75: (0x7B1B, 0), # East Asian ideograph
0x213A3B: (0x5AD7, 0), # East Asian ideograph
0x284E66: (0x6EE2, 0), # East Asian ideograph
0x213A21: (0x5A46, 0), # East Asian ideograph
0x234F77: (0x9862, 0), # East Asian ideograph
0x275235: (0x7F62, 0), # East Asian ideograph
0x224F78: (0x7059, 0), # East Asian ideograph
0x213A23: (0x5A6A, 0), # East Asian ideograph
0x274F79: (0x7B14, 0), # East Asian ideograph
0x213A24: (0x5A36, 0), # East Asian ideograph
0x6F5457: (0xC3ED, 0), # Korean hangul
0x22427E: (0x6AED, 0), # East Asian ideograph
0x214F7B: (0x7B50, 0), # East Asian ideograph
0x213A26: (0x5A40, 0), # East Asian ideograph
0x695E63: (0x6E82, 0), # East Asian ideograph
0x275679: (0x80E1, 0), # East Asian ideograph (duplicate simplified)
0x224F7C: (0x7061, 0), # East Asian ideograph
0x213A27: (0x5A66, 0), # East Asian ideograph
0x224F7D: (0x705D, 0), # East Asian ideograph
0x223A28: (0x6705, 0), # East Asian ideograph
0x335347: (0x81D9, 0), # East Asian ideograph
0x293B4F: (0x8F78, 0), # East Asian ideograph
0x234F7E: (0x9868, 0), # East Asian ideograph
0x6F4F7B: (0xB9F8, 0), # Korean hangul
0x6F5458: (0xC3F4, 0), # Korean hangul
0x6F5363: (0xC22D, 0), # Korean hangul
0x2D5D68: (0x8021, 0), # East Asian ideograph
0x394928: (0x6D5C, 0), # East Asian ideograph
0x29366A: (0x8D53, 0), # East Asian ideograph
0x227A2C: (0x81B5, 0), # East Asian ideograph
0x223173: (0x637F, 0), # East Asian ideograph
0x2D5179: (0x7E62, 0), # East Asian ideograph
0x213A2E: (0x5A92, 0), # East Asian ideograph
0x6F5459: (0xC3F5, 0), # Korean hangul
0x2D3A2F: (0x58FB, 0), # East Asian ideograph
0x4B3351: (0x5204, 0), # East Asian ideograph
0x215D21: (0x9163, 0), # East Asian ideograph
0x215D22: (0x9165, 0), # East Asian ideograph
0x215D23: (0x916C, 0), # East Asian ideograph
0x215D24: (0x9169, 0), # East Asian ideograph
0x215D25: (0x916A, 0), # East Asian ideograph
0x215D26: (0x9175, 0), # East Asian ideograph
0x215D27: (0x9178, 0), # East Asian ideograph
0x215D28: (0x9177, 0), # East Asian ideograph
0x215D29: (0x9187, 0), # East Asian ideograph
0x215D2A: (0x9189, 0), # East Asian ideograph
0x215D2B: (0x918B, 0), # East Asian ideograph
0x215D2C: (0x9183, 0), # East Asian ideograph
0x215D2D: (0x9192, 0), # East Asian ideograph
0x215D2E: (0x91A3, 0), # East Asian ideograph
0x275D2F: (0x915D, 0), # East Asian ideograph
0x215D30: (0x919C, 0), # East Asian ideograph
0x275D31: (0x533B, 0), # East Asian ideograph
0x225D32: (0x7512, 0), # East Asian ideograph
0x215D33: (0x91BA, 0), # East Asian ideograph
0x275D34: (0x917F, 0), # East Asian ideograph
0x275D35: (0x8845, 0), # East Asian ideograph
0x215D36: (0x91C7, 0), # East Asian ideograph
0x215D37: (0x91C9, 0), # East Asian ideograph
0x215D38: (0x91CB, 0), # East Asian ideograph
0x235D39: (0x9E1C, 0), # East Asian | |
"""
05/10/2021 - <NAME>
The functions in the file are used to generate the Dynamic
Quantum Variational Ansatz (DQVA) in a manner that is amenable
to circuit cutting.
"""
from qiskit import QuantumCircuit, AncillaRegister, converters
from qiskit.circuit import ControlledGate
from qiskit.circuit.library.standard_gates import XGate
from qiskit.transpiler.passes import Unroller
from qiskit.transpiler import PassManager
from utils.graph_funcs import *
from utils.helper_funcs import *
def apply_mixer(circ, alpha, init_state, G, barriers,
decompose_toffoli, mixer_order, subgraph_dict,
cut_nodes, hot_nodes, verbose=0):
"""
Apply the mixer unitary U_M(alpha) to circ
Input
-----
circ : QuantumCircuit
The current ansatz
alpha : list[float]
The angle values of the parametrized gates
init_state : str
The current initial state for the ansatz, bits which are "1" are hit
with an X-gate at the beginning of the circuit and their partial mixers
are turned off. Bitstring is little-endian ordered.
G : NetworkX Graph
The graph we want to solve MIS on
barriers : int
An integer from 0 to 2, with 0 having no barriers and 2 having the most
decompose_toffoli : int
An integer from 0 to 2, selecting 0 with apply custom open-controlled
toffoli gates to the ansatz. 1 will apply equivalent gates but using
instead X-gates and regular-controlled toffolis. 2 unrolls these gates
to basis gates (but not relevant to this function).
WARNING Qiskit cannot simulate circuits with decompose_toffoli=0
mixer_order : list[int]
The order that the partial mixers should be applied in. For a list
such as [1,2,0,3] qubit 1's mixer is applied, then qubit 2's, and so on
cut_nodes : list
List of nodes indicent to a cut
subgraph_dict : dict
A dictionary mapping qubit number to subgraph index
hot_nodes : list
A list of "hot nodes" incident to a cut, to which are are applying
mixers
verbose : int
0 is least verbose, 2 is most
"""
# Apply partial mixers V_i(alpha_i)
if mixer_order is None:
mixer_order = list(G.nodes)
if verbose > 0:
print('APPLYING MIXER UNITARY')
print('\tMixer order:', mixer_order, 'Cut nodes:', cut_nodes, 'Hot nodes:', hot_nodes)
# Pad the given alpha parameters to account for the zeroed angles
pad_alpha = [None]*len(init_state)
next_alpha = 0
for qubit in mixer_order:
bit = list(reversed(init_state))[qubit]
if bit == '1' or next_alpha >= len(alpha) \
or ( qubit in cut_nodes and qubit not in hot_nodes ):
continue
else:
pad_alpha[qubit] = alpha[next_alpha]
next_alpha += 1
if verbose > 0:
print('\tinit_state: {}\n\talpha: {}\n\tpad_alpha: {}'.format(init_state,
alpha, pad_alpha))
cuts = [] # initialize a trivial set of cuts
# identify the first qubit in the "second" subgraph, which is used to
# identify cut locations
swap_qubit = mixer_order[0]
for qubit in mixer_order[1:]:
if subgraph_dict[qubit] != subgraph_dict[swap_qubit]:
swap_qubit = qubit
break
if verbose:
print('\tSwap qubit =', swap_qubit)
for qubit in mixer_order:
# identify the location of cuts
if qubit == swap_qubit and pad_alpha[qubit] != None and len(hot_nodes) > 0:
# find all neighbors of the hot nodes
hot_neighbors = set.union(*[ set(G.neighbors(node)) for node in hot_nodes ])
# find all cut qubits in the non-hot graph
adj_cut_qubits = [ circ.qubits[node] for node in hot_neighbors
if subgraph_dict[node] != subgraph_dict[hot_nodes[0]] ]
# cut after all gates on adj_cut_nodes
cuts = [ ( qubit, num_gates(circ,qubit) ) for qubit in adj_cut_qubits ]
if verbose:
print('\tcuts:', cuts)
if pad_alpha[qubit] == None or not G.has_node(qubit):
# Turn off mixers for qubits which are already 1
continue
neighbors = list(G.neighbors(qubit))
anc_idx = subgraph_dict[qubit]
if verbose > 0:
print('\tqubit:', qubit, 'num_qubits =', len(circ.qubits),
'neighbors:', neighbors)
# construct a multi-controlled Toffoli gate, with open-controls on q's neighbors
# Qiskit has bugs when attempting to simulate custom controlled gates.
# Instead, wrap a regular toffoli with X-gates
ctrl_qubits = [circ.qubits[i] for i in neighbors]
if decompose_toffoli > 0:
# apply the multi-controlled Toffoli, targetting the ancilla qubit
for ctrl in ctrl_qubits:
circ.x(ctrl)
circ.mcx(ctrl_qubits, circ.ancillas[anc_idx])
for ctrl in ctrl_qubits:
circ.x(ctrl)
else:
mc_toffoli = ControlledGate('mc_toffoli', len(neighbors)+1, [],
num_ctrl_qubits=len(neighbors),
ctrl_state='0'*len(neighbors),
base_gate=XGate())
circ.append(mc_toffoli, ctrl_qubits + [circ.ancillas[anc_idx]])
# apply an X rotation controlled by the state of the ancilla qubit
circ.crx(2*pad_alpha[qubit], circ.ancillas[anc_idx], circ.qubits[qubit])
# apply the same multi-controlled Toffoli to uncompute the ancilla
if decompose_toffoli > 0:
for ctrl in ctrl_qubits:
circ.x(ctrl)
circ.mcx(ctrl_qubits, circ.ancillas[anc_idx])
for ctrl in ctrl_qubits:
circ.x(ctrl)
else:
circ.append(mc_toffoli, ctrl_qubits + [circ.ancillas[anc_idx]])
if barriers > 1:
circ.barrier()
return cuts
def apply_phase_separator(circ, gamma, G):
"""
Apply a parameterized Z-rotation to every qubit
"""
for qb in G.nodes:
circ.rz(2*gamma, qb)
# determine the number of gates applied to a given qubit in a circuit
def num_gates(circuit, qubit):
graph = converters.circuit_to_dag(circuit)
graph.remove_all_ops_named("barrier")
return sum([ qubit in node.qargs for node in graph.topological_op_nodes() ])
def gen_dqva(G, partition, cut_nodes, hot_nodes, P=1, params=[], init_state=None,
barriers=1, decompose_toffoli=1, mixer_order=None, verbose=0):
nq = len(G.nodes)
if P != 1:
raise Exception("P != 1 currently unsupported")
subgraph_dict = None
subgraphs, _ = get_subgraphs(G, partition)
# check that all hot nodes are in the same subgraph
# this assertion fails if there are *no* hot nodes,
# ... in which case you should not be using ciruit cutting!
assert len(set([ node in subgraphs[0] for node in hot_nodes ])) == 1
# identify the subgraph of every node
subgraph_dict = {}
for i, subgraph in enumerate(subgraphs):
for qubit in subgraph:
subgraph_dict[qubit] = i
# sort mixers by subgraph, with the "hot subgraph" first
if mixer_order is None:
mixer_order = list(G.nodes)
hot_subgraph = subgraph_dict[hot_nodes[0]]
def _node_in_hot_graph(node):
return subgraph_dict[node] == hot_subgraph
new_mixer_order = sorted(mixer_order, key=_node_in_hot_graph, reverse=True)
if new_mixer_order != mixer_order:
print(f"WARNING: mixer order changed from {mixer_order} to {new_mixer_order} to respect subgraph ordering")
mixer_order = new_mixer_order
# Step 1: Jump Start
# Run an efficient classical approximation algorithm to warm-start the optimization
if init_state is None:
init_state = '0'*nq
# Step 2: Mixer Initialization
# Select any one of the initial strings and apply two mixing unitaries separated by the phase separator unitary
dqva_circ = QuantumCircuit(nq, name='q')
# Add an ancilla qubit(s) for implementing the mixer unitaries
anc_num = len(partition)
anc_reg = AncillaRegister(anc_num, 'anc')
dqva_circ.add_register(anc_reg)
#print('Init state:', init_state)
for qb, bit in enumerate(reversed(init_state)):
if bit == '1':
dqva_circ.x(qb)
if barriers > 0:
dqva_circ.barrier()
# parse the variational parameters
# The dqva ansatz dynamically turns off partial mixers for qubits in |1>
# and adds extra mixers to the end of the circuit
num_nonzero = nq - hamming_weight(init_state)
# WARNING: this assertion is not performed for cutting because we are too lazy
# to figure out how many parameters there should actually be
#assert (len(params) == (nq + 1) * P), "Incorrect number of parameters!"
alpha_list = []
gamma_list = []
last_idx = 0
for p in range(P):
chunk = num_nonzero + 1
cur_section = params[p*chunk:(p+1)*chunk]
alpha_list.append(cur_section[:-1])
gamma_list.append(cur_section[-1])
last_idx = (p+1)*chunk
# Add the leftover parameters as extra mixers
if len(params[last_idx:]) > 0:
alpha_list.append(params[last_idx:])
if verbose > 0:
print('Parameters:')
for i in range(len(alpha_list)):
print('\talpha_{}: {}'.format(i, alpha_list[i]))
if i < len(gamma_list):
print('\tgamma_{}: {}'.format(i, gamma_list[i]))
# Construct the dqva ansatz
#for alphas, gamma in zip(alpha_list, gamma_list):
for i in range(len(alpha_list)):
alphas = alpha_list[i]
_cuts = apply_mixer(dqva_circ, alphas, init_state, G, barriers,
decompose_toffoli, mixer_order, subgraph_dict,
cut_nodes, hot_nodes, verbose=verbose)
print('i =', i, 'and cuts =', _cuts)
if barriers > 0:
dqva_circ.barrier()
if i < len(gamma_list):
gamma = gamma_list[i]
apply_phase_separator(dqva_circ, gamma, G)
if barriers > 0:
dqva_circ.barrier()
# fix set of cuts and eliminate hot nodes after first mixing layer
if i == 0:
cuts = _cuts
hot_nodes = []
print('Outside loop, cuts =', cuts)
if decompose_toffoli > 1:
#basis_gates = ['x', 'cx', 'barrier', 'crx', 'tdg', 't', 'rz', 'h']
basis_gates = ['x', 'h', 'cx', 'crx', 'rz', 't', 'tdg', 'u1']
pass_ = Unroller(basis_gates)
pm = PassManager(pass_)
dqva_circ = pm.run(dqva_circ)
# push cuts forward past single-qubit gates
# to (possibly) get rid of some trivial single-qubit fragments
circ_graph = converters.circuit_to_dag(dqva_circ)
circ_graph.remove_all_ops_named("barrier")
fixed_cuts = []
for qubit, cut_loc in cuts:
qubit_gates = 0
for node in circ_graph.topological_op_nodes():
if qubit not in node.qargs: continue
qubit_gates += 1
if qubit_gates <= cut_loc: continue
if len(node.qargs) == 1: cut_loc += 1
else: break
fixed_cuts.append( (qubit,cut_loc) )
# remove trivial cuts at the beginning or end of the circuit
fixed_cuts = [ (qubit,cut_loc) for | |
<reponame>securedataplane/preacher<filename>src/testing/TestON/tests/CHOtest/CHOtest.py
import sys
import os
import re
import time
import json
import itertools
class CHOtest:
def __init__( self ):
self.default = ''
def CASE1( self, main ):
"""
Startup sequence:
apply cell <name>
git pull
mvn clean install
onos-package
onos-verify-cell
onos-uninstall
onos-install
onos-start-cli
"""
import time
global intentState
main.threadID = 0
main.numCtrls = main.params[ 'CTRL' ][ 'numCtrl' ]
git_pull = main.params[ 'GIT' ][ 'autoPull' ]
git_branch = main.params[ 'GIT' ][ 'branch' ]
karafTimeout = main.params['CTRL']['karafCliTimeout']
main.checkIntentsDelay = int( main.params['timers']['CheckIntentDelay'] )
main.failSwitch = main.params['TEST']['pauseTest']
main.emailOnStop = main.params['TEST']['email']
main.intentCheck = int( main.params['TEST']['intentChecks'] )
main.linkCheck = int( main.params['TEST']['linkChecks'] )
main.topoCheck = int( main.params['TEST']['topoChecks'] )
main.numPings = int( main.params['TEST']['numPings'] )
main.pingSleep = int( main.params['timers']['pingSleep'] )
main.topoCheckDelay = int( main.params['timers']['topoCheckDelay'] )
main.pingTimeout = int( main.params['timers']['pingTimeout'] )
main.remHostDelay = int( main.params['timers']['remHostDelay'] )
main.remDevDelay = int( main.params['timers']['remDevDelay'] )
main.newTopo = ""
main.CLIs = []
main.failSwitch = True if main.failSwitch == "on" else False
main.emailOnStop = True if main.emailOnStop == "on" else False
for i in range( 1, int(main.numCtrls) + 1 ):
main.CLIs.append( getattr( main, 'ONOScli' + str( i ) ) )
main.case( "Set up test environment" )
main.log.report( "Set up test environment" )
main.log.report( "_______________________" )
main.step( "Apply Cell environment for ONOS" )
if ( main.onoscell ):
cellName = main.onoscell
cell_result = main.ONOSbench.setCell( cellName )
utilities.assert_equals( expect=main.TRUE, actual=cell_result,
onpass="Test step PASS",
onfail="Test step FAIL" )
else:
main.log.error( "Please provide onoscell option at TestON CLI to run CHO tests" )
main.log.error( "Example: ~/TestON/bin/cli.py run OnosCHO onoscell <cellName>" )
main.cleanup()
main.exit()
main.step( "Git checkout and pull " + git_branch )
if git_pull == 'on':
checkout_result = main.ONOSbench.gitCheckout( git_branch )
pull_result = main.ONOSbench.gitPull()
cp_result = ( checkout_result and pull_result )
else:
checkout_result = main.TRUE
pull_result = main.TRUE
main.log.info( "Skipped git checkout and pull" )
cp_result = ( checkout_result and pull_result )
utilities.assert_equals( expect=main.TRUE, actual=cp_result,
onpass="Test step PASS",
onfail="Test step FAIL" )
main.step( "mvn clean & install" )
if git_pull == 'on':
mvn_result = main.ONOSbench.cleanInstall()
utilities.assert_equals( expect=main.TRUE, actual=mvn_result,
onpass="Test step PASS",
onfail="Test step FAIL" )
else:
mvn_result = main.TRUE
main.log.info("Skipped mvn clean install as git pull is disabled in params file")
main.ONOSbench.getVersion( report=True )
main.step( "Create ONOS package" )
packageResult = main.ONOSbench.onosPackage()
utilities.assert_equals( expect=main.TRUE, actual=packageResult,
onpass="Test step PASS",
onfail="Test step FAIL" )
main.step( "Uninstall ONOS package on all Nodes" )
uninstallResult = main.TRUE
for i in range( int( main.numCtrls ) ):
main.log.info( "Uninstalling package on ONOS Node IP: " + main.onosIPs[i] )
u_result = main.ONOSbench.onosUninstall( main.onosIPs[i] )
utilities.assert_equals( expect=main.TRUE, actual=u_result,
onpass="Test step PASS",
onfail="Test step FAIL" )
uninstallResult = ( uninstallResult and u_result )
main.step( "Install ONOS package on all Nodes" )
installResult = main.TRUE
for i in range( int( main.numCtrls ) ):
main.log.info( "Installing package on ONOS Node IP: " + main.onosIPs[i] )
i_result = main.ONOSbench.onosInstall( node=main.onosIPs[i] )
utilities.assert_equals( expect=main.TRUE, actual=i_result,
onpass="Test step PASS",
onfail="Test step FAIL" )
installResult = ( installResult and i_result )
main.step( "Verify ONOS nodes UP status" )
statusResult = main.TRUE
for i in range( int( main.numCtrls ) ):
main.log.info( "ONOS Node " + main.onosIPs[i] + " status:" )
onos_status = main.ONOSbench.onosStatus( node=main.onosIPs[i] )
utilities.assert_equals( expect=main.TRUE, actual=onos_status,
onpass="Test step PASS",
onfail="Test step FAIL" )
statusResult = ( statusResult and onos_status )
main.step( "Start ONOS CLI on all nodes" )
cliResult = main.TRUE
main.log.step(" Start ONOS cli using thread ")
startCliResult = main.TRUE
pool = []
time1 = time.time()
for i in range( int( main.numCtrls) ):
t = main.Thread( target=main.CLIs[i].startOnosCli,
threadID=main.threadID,
name="startOnosCli",
args=[ main.onosIPs[i], karafTimeout ] )
pool.append(t)
t.start()
main.threadID = main.threadID + 1
for t in pool:
t.join()
startCliResult = startCliResult and t.result
time2 = time.time()
if not startCliResult:
main.log.info("ONOS CLI did not start up properly")
main.cleanup()
main.exit()
else:
main.log.info("Successful CLI startup")
startCliResult = main.TRUE
main.step( "Set IPv6 cfg parameters for Neighbor Discovery" )
cfgResult1 = main.CLIs[0].setCfg( "org.onosproject.proxyarp.ProxyArp", "ipv6NeighborDiscovery", "true" )
cfgResult2 = main.CLIs[0].setCfg( "org.onosproject.provider.host.impl.HostLocationProvider", "ipv6NeighborDiscovery", "true" )
cfgResult = cfgResult1 and cfgResult2
utilities.assert_equals( expect=main.TRUE, actual=cfgResult,
onpass="ipv6NeighborDiscovery cfg is set to true",
onfail="Failed to cfg set ipv6NeighborDiscovery" )
case1Result = installResult and uninstallResult and statusResult and startCliResult and cfgResult
main.log.info("Time for connecting to CLI: %2f seconds" %(time2-time1))
utilities.assert_equals( expect=main.TRUE, actual=case1Result,
onpass="Set up test environment PASS",
onfail="Set up test environment FAIL" )
def CASE20( self, main ):
"""
This test script Loads a new Topology (Att) on CHO setup and balances all switches
"""
import re
import time
import copy
main.prefix = 0
main.numMNswitches = int ( main.params[ 'TOPO1' ][ 'numSwitches' ] )
main.numMNlinks = int ( main.params[ 'TOPO1' ][ 'numLinks' ] )
main.numMNhosts = int ( main.params[ 'TOPO1' ][ 'numHosts' ] )
main.log.report(
"Load Att topology and Balance all Mininet switches across controllers" )
main.log.report(
"________________________________________________________________________" )
main.case(
"Assign and Balance all Mininet switches across controllers" )
main.step( "Start Mininet with Att topology" )
main.newTopo = main.params['TOPO1']['topo']
mininetDir = main.Mininet1.home + "/custom/"
topoPath = main.testDir + "/" + main.TEST + "/Dependencies/" + main.newTopo
main.ONOSbench.secureCopy(main.Mininet1.user_name, main.Mininet1.ip_address, topoPath, mininetDir, direction="to")
topoPath = mininetDir + main.newTopo
startStatus = main.Mininet1.startNet(topoFile = topoPath)
main.step( "Assign switches to controllers" )
for i in range( 1, ( main.numMNswitches + 1 ) ): # 1 to ( num of switches +1 )
main.Mininet1.assignSwController(
sw="s" + str( i ),
ip=main.onosIPs )
switch_mastership = main.TRUE
for i in range( 1, ( main.numMNswitches + 1 ) ):
response = main.Mininet1.getSwController( "s" + str( i ) )
print( "Response is " + str( response ) )
if re.search( "tcp:" + main.onosIPs[0], response ):
switch_mastership = switch_mastership and main.TRUE
else:
switch_mastership = main.FALSE
if switch_mastership == main.TRUE:
main.log.report( "Controller assignment successfull" )
else:
main.log.report( "Controller assignment failed" )
time.sleep(30) # waiting here to make sure topology converges across all nodes
main.step( "Balance devices across controllers" )
balanceResult = main.ONOScli1.balanceMasters()
# giving some breathing time for ONOS to complete re-balance
time.sleep( 5 )
topology_output = main.ONOScli1.topology()
topology_result = main.ONOSbench.getTopology( topology_output )
case2Result = ( switch_mastership and startStatus )
utilities.assert_equals(
expect=main.TRUE,
actual=case2Result,
onpass="Starting new Att topology test PASS",
onfail="Starting new Att topology test FAIL" )
def CASE21( self, main ):
"""
This test script Loads a new Topology (Chordal) on CHO setup and balances all switches
"""
import re
import time
import copy
main.prefix = 1
main.newTopo = main.params['TOPO2']['topo']
main.numMNswitches = int ( main.params[ 'TOPO2' ][ 'numSwitches' ] )
main.numMNlinks = int ( main.params[ 'TOPO2' ][ 'numLinks' ] )
main.numMNhosts = int ( main.params[ 'TOPO2' ][ 'numHosts' ] )
main.log.report(
"Load Chordal topology and Balance all Mininet switches across controllers" )
main.log.report(
"________________________________________________________________________" )
main.case(
"Assign and Balance all Mininet switches across controllers" )
main.step("Start Mininet with Chordal topology")
mininetDir = main.Mininet1.home + "/custom/"
topoPath = main.testDir + "/" + main.TEST + "/Dependencies/" + main.newTopo
main.ONOSbench.secureCopy(main.Mininet1.user_name, main.Mininet1.ip_address, topoPath, mininetDir, direction="to")
topoPath = mininetDir + main.newTopo
startStatus = main.Mininet1.startNet(topoFile = topoPath)
main.step( "Assign switches to controllers" )
for i in range( 1, ( main.numMNswitches + 1 ) ): # 1 to ( num of switches +1 )
main.Mininet1.assignSwController(
sw="s" + str( i ),
ip=main.onosIPs )
switch_mastership = main.TRUE
for i in range( 1, ( main.numMNswitches + 1 ) ):
response = main.Mininet1.getSwController( "s" + str( i ) )
print( "Response is " + str( response ) )
if re.search( "tcp:" + main.onosIPs[0], response ):
switch_mastership = switch_mastership and main.TRUE
else:
switch_mastership = main.FALSE
if switch_mastership == main.TRUE:
main.log.report( "Controller assignment successfull" )
else:
main.log.report( "Controller assignment failed" )
main.step( "Balance devices across controllers" )
balanceResult = main.ONOScli1.balanceMasters()
# giving some breathing time for ONOS to complete re-balance
time.sleep( 5 )
caseResult = switch_mastership
time.sleep(30)
utilities.assert_equals(
expect=main.TRUE,
actual=caseResult,
onpass="Starting new Chordal topology test PASS",
onfail="Starting new Chordal topology test FAIL" )
def CASE22( self, main ):
"""
This test script Loads a new Topology (Spine) on CHO setup and balances all switches
"""
import re
import time
import copy
main.prefix = 2
main.newTopo = main.params['TOPO3']['topo']
main.numMNswitches = int ( main.params[ 'TOPO3' ][ 'numSwitches' ] )
main.numMNlinks = int ( main.params[ 'TOPO3' ][ 'numLinks' ] )
main.numMNhosts = int ( main.params[ | |
<filename>cardboard/cards/sets/darksteel.py
from cardboard import types
from cardboard.ability import (
AbilityNotImplemented, spell, activated, triggered, static
)
from cardboard.cards import card, common, keywords, match
@card("Neurok Transmuter")
def neurok_transmuter(card, abilities):
def neurok_transmuter():
return AbilityNotImplemented
def neurok_transmuter():
return AbilityNotImplemented
return neurok_transmuter, neurok_transmuter,
@card("Death Cloud")
def death_cloud(card, abilities):
def death_cloud():
return AbilityNotImplemented
return death_cloud,
@card("Arcane Spyglass")
def arcane_spyglass(card, abilities):
def arcane_spyglass():
return AbilityNotImplemented
def arcane_spyglass():
return AbilityNotImplemented
return arcane_spyglass, arcane_spyglass,
@card("Slobad, Goblin Tinkerer")
def slobad_goblin_tinkerer(card, abilities):
def slobad_goblin_tinkerer():
return AbilityNotImplemented
return slobad_goblin_tinkerer,
@card("Angel's Feather")
def angels_feather(card, abilities):
def angels_feather():
return AbilityNotImplemented
return angels_feather,
@card("Synod Artificer")
def synod_artificer(card, abilities):
def synod_artificer():
return AbilityNotImplemented
def synod_artificer():
return AbilityNotImplemented
return synod_artificer, synod_artificer,
@card("Rebuking Ceremony")
def rebuking_ceremony(card, abilities):
def rebuking_ceremony():
return AbilityNotImplemented
return rebuking_ceremony,
@card("Leonin Bola")
def leonin_bola(card, abilities):
def leonin_bola():
return AbilityNotImplemented
def leonin_bola():
return AbilityNotImplemented
return leonin_bola, leonin_bola,
@card("Nourish")
def nourish(card, abilities):
def nourish():
return AbilityNotImplemented
return nourish,
@card("Turn the Tables")
def turn_the_tables(card, abilities):
def turn_the_tables():
return AbilityNotImplemented
return turn_the_tables,
@card("Darksteel Reactor")
def darksteel_reactor(card, abilities):
def darksteel_reactor():
return AbilityNotImplemented
def darksteel_reactor():
return AbilityNotImplemented
def darksteel_reactor():
return AbilityNotImplemented
return darksteel_reactor, darksteel_reactor, darksteel_reactor,
@card("Blinkmoth Nexus")
def blinkmoth_nexus(card, abilities):
def blinkmoth_nexus():
return AbilityNotImplemented
def blinkmoth_nexus():
return AbilityNotImplemented
def blinkmoth_nexus():
return AbilityNotImplemented
return blinkmoth_nexus, blinkmoth_nexus, blinkmoth_nexus,
@card("Kraken's Eye")
def krakens_eye(card, abilities):
def krakens_eye():
return AbilityNotImplemented
return krakens_eye,
@card("Leonin Battlemage")
def leonin_battlemage(card, abilities):
def leonin_battlemage():
return AbilityNotImplemented
def leonin_battlemage():
return AbilityNotImplemented
return leonin_battlemage, leonin_battlemage,
@card("Tears of Rage")
def tears_of_rage(card, abilities):
def tears_of_rage():
return AbilityNotImplemented
def tears_of_rage():
return AbilityNotImplemented
return tears_of_rage, tears_of_rage,
@card("Machinate")
def machinate(card, abilities):
def machinate():
return AbilityNotImplemented
return machinate,
@card("Echoing Decay")
def echoing_decay(card, abilities):
def echoing_decay():
return AbilityNotImplemented
return echoing_decay,
@card("Goblin Archaeologist")
def goblin_archaeologist(card, abilities):
def goblin_archaeologist():
return AbilityNotImplemented
return goblin_archaeologist,
@card("Emissary of Despair")
def emissary_of_despair(card, abilities):
def emissary_of_despair():
return AbilityNotImplemented
def emissary_of_despair():
return AbilityNotImplemented
return emissary_of_despair, emissary_of_despair,
@card("Myr Matrix")
def myr_matrix(card, abilities):
def myr_matrix():
return AbilityNotImplemented
def myr_matrix():
return AbilityNotImplemented
def myr_matrix():
return AbilityNotImplemented
return myr_matrix, myr_matrix, myr_matrix,
@card("Gemini Engine")
def gemini_engine(card, abilities):
def gemini_engine():
return AbilityNotImplemented
return gemini_engine,
@card("Hallow")
def hallow(card, abilities):
def hallow():
return AbilityNotImplemented
return hallow,
@card("Loxodon Mystic")
def loxodon_mystic(card, abilities):
def loxodon_mystic():
return AbilityNotImplemented
return loxodon_mystic,
@card("Arcbound Ravager")
def arcbound_ravager(card, abilities):
def arcbound_ravager():
return AbilityNotImplemented
def arcbound_ravager():
return AbilityNotImplemented
return arcbound_ravager, arcbound_ravager,
@card("Scrounge")
def scrounge(card, abilities):
def scrounge():
return AbilityNotImplemented
return scrounge,
@card("Screams from Within")
def screams_from_within(card, abilities):
def screams_from_within():
return AbilityNotImplemented
def screams_from_within():
return AbilityNotImplemented
def screams_from_within():
return AbilityNotImplemented
return screams_from_within, screams_from_within, screams_from_within,
@card("Razor Golem")
def razor_golem(card, abilities):
def razor_golem():
return AbilityNotImplemented
def razor_golem():
return AbilityNotImplemented
return razor_golem, razor_golem,
@card("Arcbound Worker")
def arcbound_worker(card, abilities):
def arcbound_worker():
return AbilityNotImplemented
return arcbound_worker,
@card("Retract")
def retract(card, abilities):
def retract():
return AbilityNotImplemented
return retract,
@card("Chittering Rats")
def chittering_rats(card, abilities):
def chittering_rats():
return AbilityNotImplemented
return chittering_rats,
@card("Tel-Jilad Outrider")
def teljilad_outrider(card, abilities):
def teljilad_outrider():
return AbilityNotImplemented
return teljilad_outrider,
@card("<NAME>")
def krarkclan_stoker(card, abilities):
def krarkclan_stoker():
return AbilityNotImplemented
return krarkclan_stoker,
@card("Shield of Kaldra")
def shield_of_kaldra(card, abilities):
def shield_of_kaldra():
return AbilityNotImplemented
def shield_of_kaldra():
return AbilityNotImplemented
def shield_of_kaldra():
return AbilityNotImplemented
return shield_of_kaldra, shield_of_kaldra, shield_of_kaldra,
@card("Last Word")
def last_word(card, abilities):
def last_word():
return AbilityNotImplemented
def last_word():
return AbilityNotImplemented
return last_word, last_word,
@card("Chromescale Drake")
def chromescale_drake(card, abilities):
def chromescale_drake():
return AbilityNotImplemented
def chromescale_drake():
return AbilityNotImplemented
def chromescale_drake():
return AbilityNotImplemented
return chromescale_drake, chromescale_drake, chromescale_drake,
@card("Hunger of the Nim")
def hunger_of_the_nim(card, abilities):
def hunger_of_the_nim():
return AbilityNotImplemented
return hunger_of_the_nim,
@card("Magnetic Flux")
def magnetic_flux(card, abilities):
def magnetic_flux():
return AbilityNotImplemented
return magnetic_flux,
@card("Second Sight")
def second_sight(card, abilities):
def second_sight():
return AbilityNotImplemented
def second_sight():
return AbilityNotImplemented
return second_sight, second_sight,
@card("Soulscour")
def soulscour(card, abilities):
def soulscour():
return AbilityNotImplemented
return soulscour,
@card("Echoing Calm")
def echoing_calm(card, abilities):
def echoing_calm():
return AbilityNotImplemented
return echoing_calm,
@card("Greater Harvester")
def greater_harvester(card, abilities):
def greater_harvester():
return AbilityNotImplemented
def greater_harvester():
return AbilityNotImplemented
return greater_harvester, greater_harvester,
@card("Pulse of the Dross")
def pulse_of_the_dross(card, abilities):
def pulse_of_the_dross():
return AbilityNotImplemented
return pulse_of_the_dross,
@card("Ritual of Restoration")
def ritual_of_restoration(card, abilities):
def ritual_of_restoration():
return AbilityNotImplemented
return ritual_of_restoration,
@card("Chimeric Egg")
def chimeric_egg(card, abilities):
def chimeric_egg():
return AbilityNotImplemented
def chimeric_egg():
return AbilityNotImplemented
return chimeric_egg, chimeric_egg,
@card("Spellbinder")
def spellbinder(card, abilities):
def spellbinder():
return AbilityNotImplemented
def spellbinder():
return AbilityNotImplemented
def spellbinder():
return AbilityNotImplemented
return spellbinder, spellbinder, spellbinder,
@card("Ageless Entity")
def ageless_entity(card, abilities):
def ageless_entity():
return AbilityNotImplemented
return ageless_entity,
@card("<NAME>")
def geths_grimoire(card, abilities):
def geths_grimoire():
return AbilityNotImplemented
return geths_grimoire,
@card("Skullclamp")
def skullclamp(card, abilities):
def skullclamp():
return AbilityNotImplemented
def skullclamp():
return AbilityNotImplemented
def skullclamp():
return AbilityNotImplemented
return skullclamp, skullclamp, skullclamp,
@card("Genesis Chamber")
def genesis_chamber(card, abilities):
def genesis_chamber():
return AbilityNotImplemented
return genesis_chamber,
@card("Grimclaw Bats")
def grimclaw_bats(card, abilities):
def grimclaw_bats():
return AbilityNotImplemented
def grimclaw_bats():
return AbilityNotImplemented
return grimclaw_bats, grimclaw_bats,
@card("Stir the Pride")
def stir_the_pride(card, abilities):
def stir_the_pride():
return AbilityNotImplemented
def stir_the_pride():
return AbilityNotImplemented
return stir_the_pride, stir_the_pride,
@card("Reap and Sow")
def reap_and_sow(card, abilities):
def reap_and_sow():
return AbilityNotImplemented
def reap_and_sow():
return AbilityNotImplemented
return reap_and_sow, reap_and_sow,
@card("Crazed Goblin")
def crazed_goblin(card, abilities):
def crazed_goblin():
return AbilityNotImplemented
return crazed_goblin,
@card("Echoing Courage")
def echoing_courage(card, abilities):
def echoing_courage():
return AbilityNotImplemented
return echoing_courage,
@card("Mirrodin's Core")
def mirrodins_core(card, abilities):
def mirrodins_core():
return AbilityNotImplemented
def mirrodins_core():
return AbilityNotImplemented
def mirrodins_core():
return AbilityNotImplemented
return mirrodins_core, mirrodins_core, mirrodins_core,
@card("Carry Away")
def carry_away(card, abilities):
def carry_away():
return AbilityNotImplemented
def carry_away():
return AbilityNotImplemented
def carry_away():
return AbilityNotImplemented
return carry_away, carry_away, carry_away,
@card("Hoverguard Observer")
def hoverguard_observer(card, abilities):
def hoverguard_observer():
return AbilityNotImplemented
def hoverguard_observer():
return AbilityNotImplemented
return hoverguard_observer, hoverguard_observer,
@card("Vedalken Engineer")
def vedalken_engineer(card, abilities):
def vedalken_engineer():
return AbilityNotImplemented
return vedalken_engineer,
@card("Nim Abomination")
def nim_abomination(card, abilities):
def nim_abomination():
return AbilityNotImplemented
return nim_abomination,
@card("Karstoderm")
def karstoderm(card, abilities):
def karstoderm():
return AbilityNotImplemented
def karstoderm():
return AbilityNotImplemented
return karstoderm, karstoderm,
@card("Arcbound Fiend")
def arcbound_fiend(card, abilities):
def arcbound_fiend():
return AbilityNotImplemented
def arcbound_fiend():
return AbilityNotImplemented
def arcbound_fiend():
return AbilityNotImplemented
return arcbound_fiend, arcbound_fiend, arcbound_fiend,
@card("Pulse of the Grid")
def pulse_of_the_grid(card, abilities):
def pulse_of_the_grid():
return AbilityNotImplemented
return pulse_of_the_grid,
@card("Echoing Ruin")
def echoing_ruin(card, abilities):
def echoing_ruin():
return AbilityNotImplemented
return echoing_ruin,
@card("Memnarch")
def memnarch(card, abilities):
def memnarch():
return AbilityNotImplemented
def memnarch():
return AbilityNotImplemented
return memnarch, memnarch,
@card("Pulse of the Fields")
def pulse_of_the_fields(card, abilities):
def pulse_of_the_fields():
return AbilityNotImplemented
return pulse_of_the_fields,
@card("Burden of Greed")
def burden_of_greed(card, abilities):
def burden_of_greed():
return AbilityNotImplemented
return burden_of_greed,
@card("Pulse of the Forge")
def pulse_of_the_forge(card, abilities):
def pulse_of_the_forge():
return AbilityNotImplemented
return pulse_of_the_forge,
@card("Infested Roothold")
def infested_roothold(card, abilities):
def infested_roothold():
return AbilityNotImplemented
def infested_roothold():
return AbilityNotImplemented
def infested_roothold():
return AbilityNotImplemented
return infested_roothold, infested_roothold, infested_roothold,
@card("Pulse of the Tangle")
def pulse_of_the_tangle(card, abilities):
def pulse_of_the_tangle():
return AbilityNotImplemented
return pulse_of_the_tangle,
@card("Demon's Horn")
def demons_horn(card, abilities):
def demons_horn():
return AbilityNotImplemented
return demons_horn,
@card("Darksteel Gargoyle")
def darksteel_gargoyle(card, abilities):
def darksteel_gargoyle():
return AbilityNotImplemented
def darksteel_gargoyle():
return AbilityNotImplemented
return darksteel_gargoyle, darksteel_gargoyle,
@card("Pristine Angel")
def pristine_angel(card, abilities):
def pristine_angel():
return AbilityNotImplemented
def pristine_angel():
return AbilityNotImplemented
def pristine_angel():
return AbilityNotImplemented
return pristine_angel, pristine_angel, pristine_angel,
@card("Fangren Firstborn")
def fangren_firstborn(card, abilities):
def fangren_firstborn():
return AbilityNotImplemented
return fangren_firstborn,
@card("<NAME>")
def teljilad_wolf(card, abilities):
def teljilad_wolf():
return AbilityNotImplemented
return teljilad_wolf,
@card("Emissary of Hope")
def emissary_of_hope(card, abilities):
def emissary_of_hope():
return AbilityNotImplemented
def emissary_of_hope():
return AbilityNotImplemented
return emissary_of_hope, emissary_of_hope,
@card("Test of Faith")
def test_of_faith(card, abilities):
def test_of_faith():
return AbilityNotImplemented
return test_of_faith,
@card("Psychic Overload")
def psychic_overload(card, abilities):
def psychic_overload():
return AbilityNotImplemented
def psychic_overload():
return AbilityNotImplemented
def psychic_overload():
return AbilityNotImplemented
def psychic_overload():
return AbilityNotImplemented
return psychic_overload, psychic_overload, psychic_overload, psychic_overload,
@card("Whispersilk Cloak")
def whispersilk_cloak(card, abilities):
def whispersilk_cloak():
return AbilityNotImplemented
def whispersilk_cloak():
return AbilityNotImplemented
def whispersilk_cloak():
return AbilityNotImplemented
return whispersilk_cloak, whispersilk_cloak, whispersilk_cloak,
@card("Sundering Titan")
def sundering_titan(card, abilities):
def sundering_titan():
return AbilityNotImplemented
return sundering_titan,
@card("Auriok Glaivemaster")
def auriok_glaivemaster(card, abilities):
def auriok_glaivemaster():
return AbilityNotImplemented
return auriok_glaivemaster,
@card("Scavenging Scarab")
def scavenging_scarab(card, abilities):
def scavenging_scarab():
return AbilityNotImplemented
return scavenging_scarab,
@card("AEther Snap")
def aether_snap(card, abilities):
def aether_snap():
return AbilityNotImplemented
return aether_snap,
@card("Dragon's Claw")
def dragons_claw(card, abilities):
def dragons_claw():
return AbilityNotImplemented
return dragons_claw,
@card("Steelshaper Apprentice")
def steelshaper_apprentice(card, abilities):
def steelshaper_apprentice():
return AbilityNotImplemented
return steelshaper_apprentice,
@card("Quicksilver Behemoth")
def quicksilver_behemoth(card, abilities):
def quicksilver_behemoth():
return AbilityNotImplemented
def quicksilver_behemoth():
return AbilityNotImplemented
return quicksilver_behemoth, quicksilver_behemoth,
@card("Darksteel Forge")
def darksteel_forge(card, abilities):
def darksteel_forge():
return AbilityNotImplemented
return darksteel_forge,
@card("Murderous Spoils")
def murderous_spoils(card, abilities):
def murderous_spoils():
return AbilityNotImplemented
return murderous_spoils,
@card("Roaring Slagwurm")
def roaring_slagwurm(card, abilities):
def roaring_slagwurm():
return AbilityNotImplemented
return roaring_slagwurm,
@card("Wurm's Tooth")
def wurms_tooth(card, abilities):
def wurms_tooth():
return AbilityNotImplemented
return wurms_tooth,
@card("Talon of Pain")
def talon_of_pain(card, abilities):
def talon_of_pain():
return AbilityNotImplemented
def talon_of_pain():
return AbilityNotImplemented
return talon_of_pain, talon_of_pain,
@card("Mephitic Ooze")
def mephitic_ooze(card, abilities):
def mephitic_ooze():
return AbilityNotImplemented
def mephitic_ooze():
return AbilityNotImplemented
return mephitic_ooze, mephitic_ooze,
@card("Drill-Skimmer")
def drillskimmer(card, abilities):
def drillskimmer():
return AbilityNotImplemented
def drillskimmer():
return AbilityNotImplemented
return drillskimmer, drillskimmer,
@card("Purge")
def purge(card, abilities):
def purge():
return AbilityNotImplemented
return purge,
@card("Furnace Dragon")
def furnace_dragon(card, abilities):
def furnace_dragon():
return AbilityNotImplemented
def furnace_dragon():
return AbilityNotImplemented
def furnace_dragon():
return AbilityNotImplemented
return furnace_dragon, furnace_dragon, furnace_dragon,
@card("Reshape")
def reshape(card, abilities):
def reshape():
return AbilityNotImplemented
def reshape():
return AbilityNotImplemented
return reshape, reshape,
@card("Savage Beating")
def savage_beating(card, abilities):
def savage_beating():
return AbilityNotImplemented
def savage_beating():
return AbilityNotImplemented
def savage_beating():
return AbilityNotImplemented
return savage_beating, savage_beating, savage_beating,
@card("Unforge")
def unforge(card, abilities):
def unforge():
return AbilityNotImplemented
return unforge,
@card("Viridian Zealot")
def viridian_zealot(card, abilities):
def viridian_zealot():
return AbilityNotImplemented
return viridian_zealot,
@card("Trinisphere")
def trinisphere(card, abilities):
def trinisphere():
return AbilityNotImplemented
return trinisphere,
@card("Sword of Fire and Ice")
def sword_of_fire_and_ice(card, abilities):
def sword_of_fire_and_ice():
return AbilityNotImplemented
def sword_of_fire_and_ice():
return AbilityNotImplemented
def sword_of_fire_and_ice():
return AbilityNotImplemented
return sword_of_fire_and_ice, sword_of_fire_and_ice, sword_of_fire_and_ice,
@card("Eater of Days")
def eater_of_days(card, abilities):
def eater_of_days():
return AbilityNotImplemented
def eater_of_days():
return AbilityNotImplemented
return eater_of_days, eater_of_days,
@card("Tangle Spider")
def tangle_spider(card, abilities):
def tangle_spider():
return AbilityNotImplemented
def tangle_spider():
return AbilityNotImplemented
return tangle_spider, tangle_spider,
@card("Arcbound Crusher")
def arcbound_crusher(card, abilities):
def arcbound_crusher():
return AbilityNotImplemented
def | |
<gh_stars>0
# -*- coding: utf-8 -*-
from __future__ import unicode_literals, print_function
import binascii
import codecs
import os
import platform
import re
import subprocess
import threading
import time
import frida
def main():
from colorama import Fore, Style
import json
from frida_tools.application import ConsoleApplication, input_with_cancellable
class TracerApplication(ConsoleApplication, UI):
def __init__(self):
super(TracerApplication, self).__init__(self._await_ctrl_c)
self._palette = [Fore.CYAN, Fore.MAGENTA, Fore.YELLOW, Fore.GREEN, Fore.RED, Fore.BLUE]
self._next_color = 0
self._attributes_by_thread_id = {}
self._last_event_tid = -1
def _add_options(self, parser):
pb = TracerProfileBuilder()
def process_builder_arg(option, opt_str, value, parser, method, **kwargs):
method(value)
parser.add_option("-I", "--include-module", help="include MODULE", metavar="MODULE",
type='string', action='callback', callback=process_builder_arg, callback_args=(pb.include_modules,))
parser.add_option("-X", "--exclude-module", help="exclude MODULE", metavar="MODULE",
type='string', action='callback', callback=process_builder_arg, callback_args=(pb.exclude_modules,))
parser.add_option("-i", "--include", help="include FUNCTION", metavar="FUNCTION",
type='string', action='callback', callback=process_builder_arg, callback_args=(pb.include,))
parser.add_option("-x", "--exclude", help="exclude FUNCTION", metavar="FUNCTION",
type='string', action='callback', callback=process_builder_arg, callback_args=(pb.exclude,))
parser.add_option("-a", "--add", help="add MODULE!OFFSET", metavar="MODULE!OFFSET",
type='string', action='callback', callback=process_builder_arg, callback_args=(pb.include_relative_address,))
parser.add_option("-T", "--include-imports", help="include program's imports",
action='callback', callback=process_builder_arg, callback_args=(pb.include_imports,))
parser.add_option("-t", "--include-module-imports", help="include MODULE imports", metavar="MODULE",
type='string', action='callback', callback=process_builder_arg, callback_args=(pb.include_imports,))
parser.add_option("-m", "--include-objc-method", help="include OBJC_METHOD", metavar="OBJC_METHOD",
type='string', action='callback', callback=process_builder_arg, callback_args=(pb.include_objc_method,))
parser.add_option("-M", "--exclude-objc-method", help="exclude OBJC_METHOD", metavar="OBJC_METHOD",
type='string', action='callback', callback=process_builder_arg, callback_args=(pb.exclude_objc_method,))
parser.add_option("-j", "--include-java-method", help="include JAVA_METHOD", metavar="JAVA_METHOD",
type='string', action='callback', callback=process_builder_arg, callback_args=(pb.include_java_method,))
parser.add_option("-J", "--exclude-java-method", help="exclude JAVA_METHOD", metavar="JAVA_METHOD",
type='string', action='callback', callback=process_builder_arg, callback_args=(pb.exclude_java_method,))
parser.add_option("-s", "--include-debug-symbol", help="include DEBUG_SYMBOL", metavar="DEBUG_SYMBOL",
type='string', action='callback', callback=process_builder_arg, callback_args=(pb.include_debug_symbol,))
parser.add_option("-q", "--quiet", help="do not format output messages", action='store_true', default=False)
parser.add_option("-d", "--decorate", help="add module name to generated onEnter log statement", action='store_true', default=False)
parser.add_option("-S", "--init-session", help="path to JavaScript file used to initialize the session", metavar="PATH",
type='string', action='append', default=[])
parser.add_option("-P", "--parameters", help="parameters as JSON, exposed as a global named 'parameters'", metavar="PARAMETERS_JSON",
type='string', action='store', default=None)
parser.add_option("-o", "--output", help="dump messages to file", metavar="OUTPUT", type='string')
self._profile_builder = pb
def _usage(self):
return "usage: %prog [options] target"
def _initialize(self, parser, options, args):
self._tracer = None
self._profile = self._profile_builder.build()
self._quiet = options.quiet
self._decorate = options.decorate
self._output = None
self._output_path = options.output
self._init_scripts = []
for path in options.init_session:
with codecs.open(path, 'rb', 'utf-8') as f:
source = f.read()
self._init_scripts.append(InitScript(path, source))
if options.parameters is not None:
try:
params = json.loads(options.parameters)
except Exception as e:
raise ValueError("failed to parse parameters argument as JSON: {}".format(e))
if not isinstance(params, dict):
raise ValueError("failed to parse parameters argument as JSON: not an object")
self._parameters = params
else:
self._parameters = {}
def _needs_target(self):
return True
def _start(self):
if self._output_path is not None:
self._output = OutputFile(self._output_path)
stage = 'early' if self._target[0] == 'file' else 'late'
self._tracer = Tracer(self._reactor, FileRepository(self._reactor, self._decorate), self._profile,
self._init_scripts, log_handler=self._log)
try:
self._tracer.start_trace(self._session, stage, self._parameters, self._runtime, self)
except Exception as e:
self._update_status("Failed to start tracing: {error}".format(error=e))
self._exit(1)
def _stop(self):
self._tracer.stop()
self._tracer = None
if self._output is not None:
self._output.close()
self._output = None
def _await_ctrl_c(self, reactor):
while True:
try:
input_with_cancellable(reactor.ui_cancellable)
except frida.OperationCancelledError:
break
except KeyboardInterrupt:
break
def on_trace_progress(self, status, *params):
if status == 'initializing':
self._update_status("Instrumenting...")
elif status == 'initialized':
self._resume()
elif status == 'started':
(count,) = params
if count == 1:
plural = ""
else:
plural = "s"
self._update_status("Started tracing %d function%s. Press Ctrl+C to stop." % (count, plural))
def on_trace_warning(self, message):
self._print(Fore.RED + Style.BRIGHT + "Warning" + Style.RESET_ALL + ": " + message)
def on_trace_error(self, message):
self._print(Fore.RED + Style.BRIGHT + "Error" + Style.RESET_ALL + ": " + message)
self._exit(1)
def on_trace_events(self, events):
no_attributes = Style.RESET_ALL
for timestamp, thread_id, depth, message in events:
if self._output is not None:
self._output.append(message + "\n")
elif self._quiet:
self._print(message)
else:
indent = depth * " | "
attributes = self._get_attributes(thread_id)
if thread_id != self._last_event_tid:
self._print("%s /* TID 0x%x */%s" % (attributes, thread_id, Style.RESET_ALL))
self._last_event_tid = thread_id
self._print("%6d ms %s%s%s%s" % (timestamp, attributes, indent, message, no_attributes))
def on_trace_handler_create(self, target, handler, source):
if self._quiet:
return
self._print("%s: Auto-generated handler at \"%s\"" % (target, source.replace("\\", "\\\\")))
def on_trace_handler_load(self, target, handler, source):
if self._quiet:
return
self._print("%s: Loaded handler at \"%s\"" % (target, source.replace("\\", "\\\\")))
def _get_attributes(self, thread_id):
attributes = self._attributes_by_thread_id.get(thread_id, None)
if attributes is None:
color = self._next_color
self._next_color += 1
attributes = self._palette[color % len(self._palette)]
if (1 + int(color / len(self._palette))) % 2 == 0:
attributes += Style.BRIGHT
self._attributes_by_thread_id[thread_id] = attributes
return attributes
app = TracerApplication()
app.run()
class TracerProfileBuilder(object):
def __init__(self):
self._spec = []
def include_modules(self, *module_name_globs):
for m in module_name_globs:
self._spec.append(('include', 'module', m))
return self
def exclude_modules(self, *module_name_globs):
for m in module_name_globs:
self._spec.append(('exclude', 'module', m))
return self
def include(self, *function_name_globs):
for f in function_name_globs:
self._spec.append(('include', 'function', f))
return self
def exclude(self, *function_name_globs):
for f in function_name_globs:
self._spec.append(('exclude', 'function', f))
return self
def include_relative_address(self, *address_rel_offsets):
for f in address_rel_offsets:
self._spec.append(('include', 'relative-function', f))
return self
def include_imports(self, *module_name_globs):
for m in module_name_globs:
self._spec.append(('include', 'imports', m))
return self
def include_objc_method(self, *function_name_globs):
for f in function_name_globs:
self._spec.append(('include', 'objc-method', f))
return self
def exclude_objc_method(self, *function_name_globs):
for f in function_name_globs:
self._spec.append(('exclude', 'objc-method', f))
return self
def include_java_method(self, *function_name_globs):
for f in function_name_globs:
self._spec.append(('include', 'java-method', f))
return self
def exclude_java_method(self, *function_name_globs):
for f in function_name_globs:
self._spec.append(('exclude', 'java-method', f))
return self
def include_debug_symbol(self, *function_name_globs):
for f in function_name_globs:
self._spec.append(('include', 'debug-symbol', f))
return self
def build(self):
return TracerProfile(self._spec)
class TracerProfile(object):
def __init__(self, spec):
self.spec = spec
class Tracer(object):
def __init__(self, reactor, repository, profile, init_scripts=[], log_handler=None):
self._reactor = reactor
self._repository = repository
self._profile = profile
self._script = None
self._agent = None
self._init_scripts = init_scripts
self._log_handler = log_handler
def start_trace(self, session, stage, parameters, runtime, ui):
def on_create(*args):
ui.on_trace_handler_create(*args)
self._repository.on_create(on_create)
def on_load(*args):
ui.on_trace_handler_load(*args)
self._repository.on_load(on_load)
def on_update(target, handler, source):
self._agent.update(target.identifier, target.display_name, handler)
self._repository.on_update(on_update)
def on_message(message, data):
self._reactor.schedule(lambda: self._on_message(message, data, ui))
ui.on_trace_progress('initializing')
data_dir = os.path.dirname(__file__)
if runtime == 'v8':
with codecs.open(os.path.join(data_dir, "tracer_agent.js"), 'r', 'utf-8') as f:
source = f.read()
script = session.create_script(name="tracer",
source=source,
runtime='v8')
else:
with open(os.path.join(data_dir, "tracer_agent.duk"), 'rb') as f:
bytecode = f.read()
script = session.create_script_from_bytes(name="tracer",
data=bytecode,
runtime='duk')
self._script = script
script.set_log_handler(self._log_handler)
script.on('message', on_message)
script.load()
self._agent = script.exports
raw_init_scripts = [{ 'filename': script.filename, 'source': script.source } for script in self._init_scripts]
self._agent.init(stage, parameters, raw_init_scripts, self._profile.spec)
def stop(self):
if self._script is not None:
try:
self._script.unload()
except:
pass
self._script = None
def _on_message(self, message, data, ui):
handled = False
if message['type'] == 'send':
try:
payload = message['payload']
mtype = payload['type']
params = (mtype, payload, data, ui)
except:
# As user scripts may use send() we need to be prepared for this.
params = None
if params is not None:
handled = self._try_handle_message(*params)
if not handled:
print(message)
def _try_handle_message(self, mtype, params, data, ui):
if mtype == "events:add":
events = [(timestamp, thread_id, depth, message) for target_id, timestamp, thread_id, depth, message in params['events']]
ui.on_trace_events(events)
return True
if mtype == "handlers:get":
flavor = params['flavor']
base_id = params['baseId']
scripts = []
response = {
'type': "reply:{}".format(base_id),
'scripts': scripts
}
repo = self._repository
next_id = base_id
for scope in params['scopes']:
scope_name = scope['name']
for member_name in scope['members']:
target = TraceTarget(next_id, flavor, scope_name, member_name)
next_id += 1
handler = repo.ensure_handler(target)
scripts.append(handler)
self._script.post(response)
return True
if mtype == "agent:initialized":
ui.on_trace_progress('initialized')
return True
if mtype == "agent:started":
self._repository.commit_handlers()
ui.on_trace_progress('started', params['count'])
return True
if mtype == "agent:warning":
ui.on_trace_warning(params['message'])
return True
if mtype == "agent:error":
ui.on_trace_error(params['message'])
return True
return False
class TraceTarget(object):
def __init__(self, identifier, flavor, scope, name):
self.identifier = identifier
self.flavor = flavor
self.scope = scope
if isinstance(name, list):
self.name = name[0]
self.display_name = name[1]
else:
self.name = name
self.display_name = name
def __str__(self):
return self.display_name
class Repository(object):
def __init__(self):
self._on_create_callback = None
self._on_load_callback = None
self._on_update_callback = None
self._decorate = False
def ensure_handler(self, target):
raise NotImplementedError("not implemented")
def commit_handlers(self):
pass
def on_create(self, callback):
self._on_create_callback = callback
def on_load(self, callback):
self._on_load_callback = callback
def on_update(self, callback):
self._on_update_callback = callback
def _notify_create(self, target, handler, source):
if self._on_create_callback is not None:
self._on_create_callback(target, handler, source)
def _notify_load(self, target, handler, source):
if self._on_load_callback is not None:
self._on_load_callback(target, handler, source)
def _notify_update(self, target, handler, source):
if self._on_update_callback is not None:
self._on_update_callback(target, handler, source)
def _create_stub_handler(self, target, decorate):
if target.flavor == 'java':
return self._create_stub_java_handler(target, decorate)
else:
return self._create_stub_native_handler(target, decorate)
def _create_stub_native_handler(self, target, decorate):
if target.flavor == 'objc':
state = {"index": 2}
def objc_arg(m):
index = state["index"]
r = ":' + args[%d] + ' " % index
state["index"] = index + 1
return r
log_str = "'" + re.sub(r':', objc_arg, target.display_name) + "'"
if log_str.endswith("' ]'"):
log_str = log_str[:-3] + "]'"
else:
for man_section in (2, 3):
args = []
try:
with open(os.devnull, 'w') as devnull:
man_argv = ["man"]
if platform.system() != "Darwin":
man_argv.extend(["-E", "UTF-8"])
man_argv.extend(["-P", | |
#!/usr/bin/env python
#
# imagewrapper.py - The ImageWrapper class.
#
# Author: <NAME> <<EMAIL>>
#
"""This module provides the :class:`ImageWrapper` class, which can be used
to manage data access to ``nibabel`` NIFTI images.
Terminology
-----------
There are some confusing terms used in this module, so it may be useful to
get their definitions straight:
- *Coverage*: The portion of an image that has been covered in the data
range calculation. The ``ImageWrapper`` keeps track of
the coverage for individual volumes within a 4D image (or
slices in a 3D image).
- *Slice*: Portion of the image data which is being accessed. A slice
comprises either a tuple of ``slice`` objects (or integers),
or a sequence of ``(low, high)`` tuples, specifying the
index range into each image dimension that is covered by
the slice.
- *Expansion*: A sequence of ``(low, high)`` tuples, specifying an
index range into each image dimension, that is used to
*expand* the *coverage* of an image, based on a given set
of *slices*.
- *Fancy slice*: Any object which is used to slice an array, and is not
an ``int``, ``slice``, or ``Ellipsis``, or sequence of
these.
"""
import logging
import collections
import collections.abc as abc
import itertools as it
import numpy as np
import nibabel as nib
import fsl.utils.notifier as notifier
import fsl.utils.naninfrange as nir
import fsl.utils.idle as idle
log = logging.getLogger(__name__)
class ImageWrapper(notifier.Notifier):
"""The ``ImageWrapper`` class is a convenience class which manages data
access to ``nibabel`` NIFTI images. The ``ImageWrapper`` class can be
used to:
- Control whether the image is loaded into memory, or kept on disk
- Incrementally update the known image data range, as more image
data is read in.
*In memory or on disk?*
The image data will be kept on disk, and accessed through the
``nibabel.Nifti1Image.dataobj`` (or ``nibabel.Nifti2Image.dataobj``) array
proxy, if:
- The ``loadData`` parameter to :meth:`__init__` is ``False``.
- The :meth:`loadData` method never gets called.
- The image data is not modified (via :meth:`__setitem__`.
If any of these conditions do not hold, the image data will be loaded into
memory and accessed directly.
*Image dimensionality*
The ``ImageWrapper`` abstracts away trailing image dimensions of length 1.
This means that if the header for a NIFTI image specifies that the image
has four dimensions, but the fourth dimension is of length 1, you do not
need to worry about indexing that fourth dimension. However, all NIFTI
images will be presented as having at least three dimensions, so if your
image header specifies a third dimension of length 1, you will still
need provide an index of 0 for that dimensions, for all data accesses.
*Data access*
The ``ImageWrapper`` can be indexed in one of two ways:
- With basic ``numpy``-like multi-dimensional array slicing (with step
sizes of 1)
- With boolean array indexing, where the boolean/mask array has the
same shape as the image data.
See https://docs.scipy.org/doc/numpy/reference/arrays.indexing.html for
more details on numpy indexing.
*Data range*
In order to avoid the computational overhead of calculating the image data
range (its minimum/maximum values) when an image is first loaded in, an
``ImageWrapper`` incrementally updates the known image data range as data
is accessed. The ``ImageWrapper`` keeps track of the image data *coverage*,
the portion of the image which has already been considered in the data
range calculation. When data from a region of the image not in the coverage
is accessed, the coverage is expanded to include this region. The coverage
is always expanded in a rectilinear manner, i.e. the coverage is always
rectangular for a 2D image, or cuboid for a 3D image.
For a 4D image, the ``ImageWrapper`` internally maintains a separate
coverage and known data range for each 3D volume within the image. For a 3D
image, separate coverages and data ranges are stored for each 2D slice.
The ``ImageWrapper`` implements the :class:`.Notifier` interface.
Listeners can register to be notified whenever the known image data range
is updated. The data range can be accessed via the :attr:`dataRange`
property.
The ``ImageWrapper`` class uses the following functions (also defined in
this module) to keep track of the portion of the image that has currently
been included in the data range calculation:
.. autosummary::
:nosignatures:
isValidFancySliceObj
canonicalSliceObj
sliceObjToSliceTuple
sliceTupleToSliceObj
sliceCovered
calcExpansion
adjustCoverage
"""
def __init__(self,
image,
name=None,
loadData=False,
dataRange=None,
threaded=False):
"""Create an ``ImageWrapper``.
:arg image: A ``nibabel.Nifti1Image`` or ``nibabel.Nifti2Image``.
:arg name: A name for this ``ImageWrapper``, solely used for
debug log messages.
:arg loadData: If ``True``, the image data is loaded into memory.
Otherwise it is kept on disk (and data access is
performed through the ``nibabel.Nifti1Image.dataobj``
array proxy).
:arg dataRange: A tuple containing the initial ``(min, max)`` data
range to use. See the :meth:`reset` method for
important information about this parameter.
:arg threaded: If ``True``, the data range is updated on a
:class:`.TaskThread`. Otherwise (the default), the
data range is updated directly on reads/writes.
"""
import fsl.data.image as fslimage
self.__image = image
self.__name = name
self.__taskThread = None
# Save the number of 'real' dimensions,
# that is the number of dimensions minus
# any trailing dimensions of length 1
self.__numRealDims = len(image.shape)
for d in reversed(image.shape):
if d == 1: self.__numRealDims -= 1
else: break
# Degenerate case - less
# than three real dimensions
if self.__numRealDims < 3:
self.__numRealDims = min(3, len(image.shape))
# And save the number of
# 'padding' dimensions too.
self.__numPadDims = len(image.shape) - self.__numRealDims
# Too many shapes! Figure out
# what shape we should present
# the data as (e.g. at least 3
# dimensions). This is used in
# __getitem__ to force the
# result to have the correct
# dimensionality.
self.__canonicalShape = fslimage.canonicalShape(image.shape)
# The internal state is stored
# in these attributes - they're
# initialised in the reset method.
self.__range = None
self.__coverage = None
self.__volRanges = None
self.__covered = False
self.reset(dataRange)
# We keep an internal ref to
# the data numpy array if/when
# it is loaded in memory
self.__data = None
if loadData or image.in_memory:
self.loadData()
if threaded:
self.__taskThread = idle.TaskThread()
self.__taskThread.daemon = True
self.__taskThread.start()
def __del__(self):
"""If this ``ImageWrapper`` was created with ``threaded=True``,
the :class:`.TaskThread` is stopped.
"""
self.__image = None
self.__data = None
if self.__taskThread is not None:
self.__taskThread.stop()
self.__taskThread = None
def getTaskThread(self):
"""If this ``ImageWrapper`` was created with ``threaded=True``,
this method returns the ``TaskThread`` that is used for running
data range calculation tasks. Otherwise, this method returns
``False``.
"""
return self.__taskThread
def reset(self, dataRange=None):
"""Reset the internal state and known data range of this
``ImageWrapper``.
:arg dataRange: A tuple containing the initial ``(min, max)`` data
range to use.
.. note:: The ``dataRange`` parameter is intended for situations where
the image data range is known in advance (e.g. it was
calculated earlier, and the image is being re-loaded). If a
``dataRange`` is passed in, it will *not* be overwritten by
any range calculated from the data, unless the calculated
data range is wider than the provided ``dataRange``.
"""
if dataRange is None:
dataRange = None, None
image = self.__image
ndims = self.__numRealDims - 1
nvols = image.shape[self.__numRealDims - 1]
# The current known image data range. This
# gets updated as more image data gets read.
self.__range = dataRange
# The coverage array is used to keep track of
# the portions of the image which have been
# considered in the data range calculation.
# We use this coverage to avoid unnecessarily
# re-calculating the data range on the same
# part of the image.
#
# First of all, we're going to store a separate
# 'coverage' for each 2D slice in the 3D image
# (or 3D volume for 4D images). This effectively
# means a seaprate coverage for each index in the
# last 'real' image dimension (see above).
#
# For each slice/volume, the the coverage is
# stored as sequences of (low, high) indices, one
# for each dimension in the slice/volume (e.g.
| |
i.e.
no possibility for noise. Lindblad SPAM operations are the "normal"
way to allow SPAM noise, in which case error terms up to weight
`maxSpamWeight` are included. Tensor-product operations require that
the state prep and POVM effects have a tensor-product structure; the
"tensorproduct" mode exists for historical reasons and is *deprecated*
in favor of `"lindblad"`; use it only if you know what you're doing.
addIdleNoiseToAllGates: bool, optional
Whether the global idle should be added as a factor following the
ideal action of each of the non-idle gates.
errcomp_type : {"gates","errorgens"}
How errors are composed when creating layer operations in the created
model. `"gates"` means that the errors on multiple gates in a single
layer are composed as separate and subsequent processes. Specifically,
the layer operation has the form `Composed(target,idleErr,cloudErr)`
where `target` is a composition of all the ideal gate operations in the
layer, `idleErr` is idle error (`.operation_blks['layers']['globalIdle']`),
and `cloudErr` is the composition (ordered as layer-label) of cloud-
noise contributions, i.e. a map that acts as the product of exponentiated
error-generator matrices. `"errorgens"` means that layer operations
have the form `Composed(target, error)` where `target` is as above and
`error` results from composing the idle and cloud-noise error
*generators*, i.e. a map that acts as the exponentiated sum of error
generators (ordering is irrelevant in this case).
independent_clouds : bool, optional
Currently this must be set to True. In a future version, setting to
true will allow all the clouds of a given gate name to have a similar
cloud-noise process, mapped to the full qubit graph via a stencil.
verbosity : int, optional
An integer >= 0 dictating how must output to send to stdout.
"""
printer = _VerbosityPrinter.build_printer(verbosity)
if custom_gates is None: custom_gates = {}
if nonstd_gate_unitaries is None: nonstd_gate_unitaries = {}
std_unitaries = _itgs.get_standard_gatename_unitaries()
#Get evotype
_, evotype = _gt.split_lindblad_paramtype(parameterization)
assert(evotype in ("densitymx", "svterm", "cterm")), "State-vector evolution types not allowed."
gatedict = _collections.OrderedDict()
for name in gate_names:
if name in custom_gates:
gatedict[name] = custom_gates[name]
else:
U = nonstd_gate_unitaries.get(name, std_unitaries.get(name, None))
if U is None:
raise KeyError("'%s' gate unitary needs to be provided by `nonstd_gate_unitaries` arg" % name)
if callable(U): # then assume a function: args -> unitary
U0 = U(None) # U fns must return a sample unitary when passed None to get size.
gatedict[name] = _opfactory.UnitaryOpFactory(U, U0.shape[0], evotype=evotype)
else:
gatedict[name] = _bt.change_basis(_gt.unitary_to_process_mx(U), "std", "pp")
# assume evotype is a densitymx or term type
#Add anything from custom_gates directly if it wasn't added already
for lbl, gate in custom_gates.items():
if lbl not in gate_names: gatedict[lbl] = gate
if qubit_labels is None:
qubit_labels = tuple(range(nQubits))
if not independent_clouds:
raise NotImplementedError("Non-independent noise clounds are not supported yet!")
if isinstance(geometry, _qgraph.QubitGraph):
qubitGraph = geometry
else:
qubitGraph = _qgraph.QubitGraph.common_graph(nQubits, geometry, directed=False,
qubit_labels=qubit_labels)
printer.log("Created qubit graph:\n" + str(qubitGraph))
#Process "auto" sim_type
if sim_type == "auto":
if evotype in ("svterm", "cterm"): sim_type = "termorder:1"
else: sim_type = "map" if nQubits > 2 else "matrix"
assert(sim_type in ("matrix", "map") or sim_type.startswith("termorder") or sim_type.startswith("termgap"))
#Global Idle
if maxIdleWeight > 0:
printer.log("Creating Idle:")
global_idle_layer = _build_nqn_global_noise(
qubitGraph, maxIdleWeight, sparse,
sim_type, parameterization, errcomp_type, printer - 1)
else:
global_idle_layer = None
#SPAM
if spamtype == "static" or maxSpamWeight == 0:
if maxSpamWeight > 0:
_warnings.warn(("`spamtype == 'static'` ignores the supplied "
"`maxSpamWeight=%d > 0`") % maxSpamWeight)
prep_layers = [_sv.ComputationalSPAMVec([0] * nQubits, evotype)]
povm_layers = {'Mdefault': _povm.ComputationalBasisPOVM(nQubits, evotype)}
elif spamtype == "tensorproduct":
_warnings.warn("`spamtype == 'tensorproduct'` is deprecated!")
basis1Q = _BuiltinBasis("pp", 4)
prep_factors = []; povm_factors = []
from ..construction import basis_build_vector
v0 = basis_build_vector("0", basis1Q)
v1 = basis_build_vector("1", basis1Q)
# Historical use of TP for non-term-based cases?
# - seems we could remove this. FUTURE REMOVE?
povmtyp = rtyp = "TP" if parameterization in \
("CPTP", "H+S", "S", "H+S+A", "S+A", "H+D+A", "D+A", "D") \
else parameterization
for i in range(nQubits):
prep_factors.append(
_sv.convert(_sv.StaticSPAMVec(v0), rtyp, basis1Q))
povm_factors.append(
_povm.convert(_povm.UnconstrainedPOVM(([
('0', _sv.StaticSPAMVec(v0)),
('1', _sv.StaticSPAMVec(v1))])), povmtyp, basis1Q))
prep_layers = [_sv.TensorProdSPAMVec('prep', prep_factors)]
povm_layers = {'Mdefault': _povm.TensorProdPOVM(povm_factors)}
elif spamtype == "lindblad":
prepPure = _sv.ComputationalSPAMVec([0] * nQubits, evotype)
prepNoiseMap = _build_nqn_global_noise(
qubitGraph, maxSpamWeight, sparse, sim_type, parameterization, errcomp_type, printer - 1)
prep_layers = [_sv.LindbladSPAMVec(prepPure, prepNoiseMap, "prep")]
povmNoiseMap = _build_nqn_global_noise(
qubitGraph, maxSpamWeight, sparse, sim_type, parameterization, errcomp_type, printer - 1)
povm_layers = {'Mdefault': _povm.LindbladPOVM(povmNoiseMap, None, "pp")}
else:
raise ValueError("Invalid `spamtype` argument: %s" % spamtype)
weight_maxhops_tuples_1Q = [(1, maxhops + extraWeight1Hops)] + \
[(1 + x, maxhops) for x in range(1, extraGateWeight + 1)]
cloud_maxhops_1Q = max([mx for wt, mx in weight_maxhops_tuples_1Q]) # max of max-hops
weight_maxhops_tuples_2Q = [(1, maxhops + extraWeight1Hops), (2, maxhops)] + \
[(2 + x, maxhops) for x in range(1, extraGateWeight + 1)]
cloud_maxhops_2Q = max([mx for wt, mx in weight_maxhops_tuples_2Q]) # max of max-hops
def build_cloudnoise_fn(lbl):
gate_nQubits = len(lbl.sslbls)
if gate_nQubits not in (1, 2):
raise ValueError("Only 1- and 2-qubit gates are supported. %s acts on %d qubits!"
% (str(lbl.name), gate_nQubits))
weight_maxhops_tuples = weight_maxhops_tuples_1Q if len(lbl.sslbls) == 1 else weight_maxhops_tuples_2Q
return _build_nqn_cloud_noise(
lbl.sslbls, qubitGraph, weight_maxhops_tuples,
errcomp_type=errcomp_type, sparse=sparse, sim_type=sim_type,
parameterization=parameterization, verbosity=printer - 1)
def build_cloudkey_fn(lbl):
cloud_maxhops = cloud_maxhops_1Q if len(lbl.sslbls) == 1 else cloud_maxhops_2Q
cloud_inds = tuple(qubitGraph.radius(lbl.sslbls, cloud_maxhops))
cloud_key = (tuple(lbl.sslbls), tuple(sorted(cloud_inds))) # (sets are unhashable)
return cloud_key
return cls(nQubits, gatedict, availability, qubit_labels, geometry,
global_idle_layer, prep_layers, povm_layers,
build_cloudnoise_fn, build_cloudkey_fn,
sim_type, evotype, errcomp_type,
addIdleNoiseToAllGates, sparse, printer)
def __init__(self, nQubits, gatedict, availability=None,
qubit_labels=None, geometry="line",
global_idle_layer=None, prep_layers=None, povm_layers=None,
build_cloudnoise_fn=None, build_cloudkey_fn=None,
sim_type="map", evotype="densitymx", errcomp_type="gates",
addIdleNoiseToAllGates=True, sparse=False, verbosity=0):
"""
Create a n-qubit model using a low-weight and geometrically local
error model with a common "global idle" operation.
This constructor relies on factory functions being passed to it
which generate the cloud-noise operators - noise thtat is specific
to a gate but may act on a neighborhood or cloud around the gate's
target qubits.
Parameters
----------
nQubits : int
The number of qubits
gatedict : dict
A dictionary (an `OrderedDict` if you care about insertion order) that
associates with string-type gate names (e.g. `"Gx"`) :class:`LinearOperator`,
`numpy.ndarray`, or :class:`OpFactory` objects. When the objects may act on
fewer than the total number of qubits (determined by their dimension/shape) then
they are repeatedly embedded into `nQubits`-qubit gates as specified by their
`availability`. These operations represent the ideal target operations, and
thus, any `LinearOperator` or `OpFactory` objects must be *static*, i.e., have
zero parameters.
availability : dict, optional
A dictionary whose keys are the same gate names as in
`gatedict` and whose values are lists of qubit-label-tuples. Each
qubit-label-tuple must have length equal to the number of qubits
the corresponding gate acts upon, and causes that gate to be
embedded to act on the specified qubits. For example,
`{ 'Gx': [(0,),(1,),(2,)], 'Gcnot': [(0,1),(1,2)] }` would cause
the `1-qubit `'Gx'`-gate to be embedded three times, acting on qubits
0, 1, and 2, and the 2-qubit `'Gcnot'`-gate to be embedded twice,
acting on qubits 0 & 1 and 1 & 2. Instead of a list of tuples,
values of `availability` may take the special values:
- `"all-permutations"` and `"all-combinations"` equate to all possible
permutations and combinations of the appropriate number of qubit labels
(deterined by the gate's dimension).
- `"all-edges"` equates to all the vertices, for 1Q gates, and all the
edges, for 2Q gates of the geometry.
- `"arbitrary"` or `"*"` means that the corresponding gate can be placed
on any target qubits via an :class:`EmbeddingOpFactory` (uses less
memory but slower than `"all-permutations"`.
If a gate name (a key of `gatedict`) is not present in `availability`,
the default is `"all-edges"`.
qubit_labels : tuple, optional
The circuit-line labels for each of the qubits, which can be integers
and/or strings. Must be of length `nQubits`. If None, then the
integers from 0 to `nQubits-1` are used.
geometry : {"line","ring","grid","torus"} or QubitGraph
The type of connectivity among the qubits, specifying a
graph used to define neighbor relationships. Alternatively,
a :class:`QubitGraph` object with node labels equal to
`qubit_labels` may be passed directly.
global_idle_layer : LinearOperator
A global idle operation which acts on all | |
assert batch.multiply(3, 4) is None
begin = time.time()
results = batch(oneway=True)
duration = time.time() - begin
assert duration < 0.1, "oneway batch with delay should return almost immediately"
assert results is None
def testPyroTracebackNormal(self):
with Pyro5.client.Proxy(self.objectUri) as p:
with pytest.raises(ZeroDivisionError) as x:
p.divide(999, 0) # force error here
# going to check if the magic pyro traceback attribute is available for batch methods too
tb = "".join(Pyro5.errors.get_pyro_traceback(x.type, x.value, x.tb))
assert "Remote traceback:" in tb # validate if remote tb is present
assert "ZeroDivisionError" in tb # the error
assert "return x // y" in tb # the statement
def testPyroTracebackBatch(self):
with Pyro5.client.Proxy(self.objectUri) as p:
batch = Pyro5.client.BatchProxy(p)
assert batch.divide(999, 0) is None # force an exception here
results = batch()
with pytest.raises(ZeroDivisionError) as x:
next(results)
# going to check if the magic pyro traceback attribute is available for batch methods too
tb = "".join(Pyro5.errors.get_pyro_traceback(x.type, x.value, x.tb))
assert "Remote traceback:" in tb # validate if remote tb is present
assert "ZeroDivisionError" in tb # the error
assert "return x // y" in tb # the statement
with pytest.raises(StopIteration):
next(results) # no more results should be available after the error
def testAutoProxy(self):
obj = ServerTestObject()
with Pyro5.client.Proxy(self.objectUri) as p:
result = p.echo(obj)
assert isinstance(result, ServerTestObject), "non-pyro object must be returned as normal class"
self.daemon.register(obj)
result = p.echo(obj)
assert isinstance(result, Pyro5.client.Proxy), "serialized pyro object must be a proxy"
self.daemon.register(ServerTestObject)
new_result = result.new_test_object()
assert isinstance(new_result, Pyro5.client.Proxy), "serialized pyro object must be a proxy"
self.daemon.unregister(ServerTestObject)
self.daemon.unregister(obj)
result = p.echo(obj)
assert isinstance(result, ServerTestObject), "unregistered pyro object must be normal class again"
def testConnectOnce(self):
with Pyro5.client.Proxy(self.objectUri) as proxy:
assert proxy._pyroBind(), "first bind should always connect"
assert not proxy._pyroBind(), "second bind should not connect again"
def testMaxMsgSize(self):
with Pyro5.client.Proxy(self.objectUri) as p:
bigobject = [42] * 1000
result = p.echo(bigobject)
assert bigobject == result
try:
config.MAX_MESSAGE_SIZE = 999
with pytest.raises(Pyro5.errors.ProtocolError):
_ = p.echo(bigobject) # message too large
finally:
config.MAX_MESSAGE_SIZE = 1024* 1024* 1024
def testIterator(self):
with Pyro5.client.Proxy(self.objectUri) as p:
iterator = p.iterator()
assert isinstance(iterator, Pyro5.client._StreamResultIterator)
assert next(iterator) == "one"
assert next(iterator) == "two"
assert next(iterator) == "three"
with pytest.raises(StopIteration):
next(iterator)
iterator.close()
def testGenerator(self):
with Pyro5.client.Proxy(self.objectUri) as p:
generator = p.generator()
assert isinstance(generator, Pyro5.client._StreamResultIterator)
assert next(generator) == "one"
assert next(generator) == "two"
assert next(generator) == "three"
assert next(generator) == "four"
assert next(generator) == "five"
with pytest.raises(StopIteration):
next(generator)
with pytest.raises(StopIteration):
next(generator)
generator.close()
generator = p.generator()
_ = [v for v in generator]
with pytest.raises(StopIteration):
next(generator)
generator.close()
def testCleanup(self):
p1 = Pyro5.client.Proxy(self.objectUri)
p2 = Pyro5.client.Proxy(self.objectUri)
p3 = Pyro5.client.Proxy(self.objectUri)
p1.echo(42)
p2.echo(42)
p3.echo(42)
# we have several active connections still up, see if we can cleanly shutdown the daemon
# (it should interrupt the worker's socket connections)
time.sleep(0.1)
self.daemon.shutdown()
self.daemon = None
p1._pyroRelease()
p2._pyroRelease()
p3._pyroRelease()
def testSerializedBlob(self):
sb = Pyro5.client.SerializedBlob("blobname", [1, 2, 3])
assert sb.info == "blobname"
assert sb.deserialized() == [1, 2, 3]
def testSerializedBlobMessage(self):
serializer = Pyro5.serializers.serializers["serpent"]
data = serializer.dumpsCall("object", "method", ([1, 2, 3],), {"kwarg": 42})
msg = Pyro5.protocol.SendingMessage(Pyro5.protocol.MSG_INVOKE, 0, 42, serializer.serializer_id, data)
sb = Pyro5.client.SerializedBlob("blobname", msg, is_blob=True)
assert sb.info == "blobname"
assert sb.deserialized() == ([1, 2, 3], )
def testProxySerializedBlobArg(self):
with Pyro5.client.Proxy(self.objectUri) as p:
blobinfo, blobdata = p.blob(Pyro5.client.SerializedBlob("blobname", [1, 2, 3]))
assert blobinfo == "blobname"
assert blobdata == [1, 2, 3]
def testResourceFreeing(self):
rsvc = ResourceService()
uri = self.daemon.register(rsvc)
with Pyro5.client.Proxy(uri) as p:
p.allocate("r1")
p.allocate("r2")
resources = {r.name: r for r in rsvc.resources}
p.free("r1")
rsc = p.list()
assert rsc == ["r2"]
assert resources["r1"].close_called
assert not resources["r2"].close_called
time.sleep(0.02)
assert resources["r1"].close_called
assert resources["r2"].close_called
with Pyro5.client.Proxy(uri) as p:
rsc = p.list()
assert rsc == [], "r2 must now be freed due to connection loss earlier"
class TestServerThreadNoTimeout:
SERVERTYPE = "thread"
COMMTIMEOUT = None
def setup_method(self):
config.SERIALIZER = "serpent"
config.LOGWIRE = True
config.POLLTIMEOUT = 0.1
config.SERVERTYPE = self.SERVERTYPE
config.COMMTIMEOUT = self.COMMTIMEOUT
self.daemon = Pyro5.server.Daemon(port=0)
obj = ServerTestObject()
uri = self.daemon.register(obj, "something")
self.objectUri = uri
self.daemonthread = DaemonLoopThread(self.daemon)
self.daemonthread.start()
self.daemonthread.running.wait()
time.sleep(0.05)
def teardown_method(self):
time.sleep(0.05)
self.daemon.shutdown()
self.daemonthread.join()
config.SERVERTYPE = "thread"
config.COMMTIMEOUT = None
def testConnectionStuff(self):
p1 = Pyro5.client.Proxy(self.objectUri)
p2 = Pyro5.client.Proxy(self.objectUri)
assert not p1._pyroConnection
assert not p2._pyroConnection
p1.ping()
p2.ping()
_ = p1.multiply(11, 5)
_ = p2.multiply(11, 5)
assert p1._pyroConnection
assert p2._pyroConnection
p1._pyroRelease()
p1._pyroRelease()
p2._pyroRelease()
p2._pyroRelease()
assert not p1._pyroConnection
assert not p2._pyroConnection
p1._pyroBind()
_ = p1.multiply(11, 5)
_ = p2.multiply(11, 5)
assert p1._pyroConnection
assert p2._pyroConnection
assert p1._pyroUri.protocol == "PYRO"
assert p2._pyroUri.protocol == "PYRO"
p1._pyroRelease()
p2._pyroRelease()
def testReconnectAndCompression(self):
# try reconnects
with Pyro5.client.Proxy(self.objectUri) as p:
assert not p._pyroConnection
p._pyroReconnect(tries=100)
assert p._pyroConnection
assert not p._pyroConnection
# test compression:
try:
with Pyro5.client.Proxy(self.objectUri) as p:
config.COMPRESSION = True
assert p.multiply(5, 11) == 55
assert p.multiply("*" * 500, 2) == "*" * 1000
finally:
config.COMPRESSION = False
def testOnewayMetaOn(self):
with Pyro5.client.Proxy(self.objectUri) as p:
assert p._pyroOneway == set() # when not bound, no meta info exchange has been done
p._pyroBind()
assert "oneway_multiply" in p._pyroOneway # after binding, meta info has been processed
assert p.multiply(5, 11) == 55 # not tagged as @Pyro5.server.oneway
assert p.oneway_multiply(5, 11) is None # tagged as @Pyro5.server.oneway
p._pyroOneway = set()
assert p.multiply(5, 11) == 55
assert p.oneway_multiply(5, 11) == 55
# check nonexisting method behavoir for oneway methods
with pytest.raises(AttributeError):
p.nonexisting_method()
p._pyroOneway.add("nonexisting_method")
# now it should still fail because of metadata telling Pyro what methods actually exist
with pytest.raises(AttributeError):
p.nonexisting_method()
def testOnewayWithProxySubclass(self):
class ProxyWithOneway(Pyro5.client.Proxy):
def __init__(self, arg):
super(ProxyWithOneway, self).__init__(arg)
self._pyroOneway = {"oneway_multiply", "multiply"}
with ProxyWithOneway(self.objectUri) as p:
assert p.oneway_multiply(5, 11) is None
assert p.multiply(5, 11) == 55
p._pyroOneway = set()
assert p.oneway_multiply(5, 11) == 55
assert p.multiply(5, 11) == 55
def testOnewayDelayed(self):
with Pyro5.client.Proxy(self.objectUri) as p:
p.ping()
now = time.time()
p.oneway_delay(1) # oneway so we should continue right away
time.sleep(0.01)
assert time.time() - now < 0.2, "delay should be running as oneway"
now = time.time()
assert p.multiply(5, 11), "expected a normal result from a non-oneway call" == 55
assert time.time() - now < 0.2, "delay should be running in its own thread"
def testSerializeConnected(self):
# online serialization tests
ser = Pyro5.serializers.serializers[config.SERIALIZER]
proxy = Pyro5.client.Proxy(self.objectUri)
proxy._pyroBind()
assert proxy._pyroConnection
p = ser.dumps(proxy)
proxy2 = ser.loads(p)
assert proxy2._pyroConnection is None
assert proxy._pyroConnection
assert proxy._pyroUri == proxy2._pyroUri
proxy2._pyroBind()
assert proxy2._pyroConnection
assert proxy2._pyroConnection is not proxy._pyroConnection
proxy._pyroRelease()
proxy2._pyroRelease()
assert proxy._pyroConnection is None
assert proxy2._pyroConnection is None
proxy.ping()
proxy2.ping()
# try copying a connected proxy
import copy
proxy3 = copy.copy(proxy)
assert proxy3._pyroConnection is None
assert proxy._pyroConnection
assert proxy._pyroUri == proxy3._pyroUri
assert proxy3._pyroUri is not proxy._pyroUri
proxy._pyroRelease()
proxy2._pyroRelease()
proxy3._pyroRelease()
def testException(self):
with Pyro5.client.Proxy(self.objectUri) as p:
with pytest.raises(ZeroDivisionError) as x:
p.divide(1, 0)
pyrotb = "".join(Pyro5.errors.get_pyro_traceback(x.type, x.value, x.tb))
assert "Remote traceback" in pyrotb
assert "ZeroDivisionError" in pyrotb
def testTimeoutCall(self):
config.COMMTIMEOUT = None
with Pyro5.client.Proxy(self.objectUri) as p:
p.ping()
start = time.time()
p.delay(0.5)
duration = time.time() - start
assert 0.4 < duration < 0.6
p._pyroTimeout = 0.1
start = time.time()
with pytest.raises(Pyro5.errors.TimeoutError):
p.delay(1)
duration = time.time() - start
assert duration < 0.3
def testTimeoutConnect(self):
# set up a unresponsive daemon
with Pyro5.server.Daemon(port=0) as d:
time.sleep(0.5)
obj = ServerTestObject()
uri = d.register(obj)
# we're not going to start the daemon's event loop
p = Pyro5.client.Proxy(uri)
p._pyroTimeout = 0.2
start = time.time()
with pytest.raises(Pyro5.errors.TimeoutError) as e:
p.ping()
assert str(e.value) == "receiving: timeout"
# XXX todo: add test about proxy thread ownership transfer
def testServerConnections(self):
# check if the server allows to grow the number of connections
proxies = [Pyro5.client.Proxy(self.objectUri) for _ in range(10)]
try:
for p in proxies:
p._pyroTimeout = 0.5
p._pyroBind()
for p in proxies:
p.ping()
finally:
for p in proxies:
p._pyroRelease()
def testGeneratorProxyClose(self):
p = Pyro5.client.Proxy(self.objectUri)
generator = p.generator()
p._pyroRelease()
with pytest.raises(Pyro5.errors.ConnectionClosedError):
next(generator)
def testGeneratorLinger(self):
orig_linger = config.ITER_STREAM_LINGER
orig_commt = config.COMMTIMEOUT
orig_pollt = config.POLLTIMEOUT
try:
config.ITER_STREAM_LINGER = 0.5
config.COMMTIMEOUT = 0.2
config.POLLTIMEOUT = 0.2
p = Pyro5.client.Proxy(self.objectUri)
generator = p.generator()
assert next(generator) == "one"
p._pyroRelease()
with pytest.raises(Pyro5.errors.ConnectionClosedError):
next(generator)
p._pyroReconnect()
assert next(generator), "generator should resume after reconnect" == "two"
# check that after the linger time passes, the generator *is* gone
p._pyroRelease()
time.sleep(2)
p._pyroReconnect()
with pytest.raises(Pyro5.errors.PyroError): # should not be resumable anymore
next(generator)
finally:
config.ITER_STREAM_LINGER = orig_linger
config.COMMTIMEOUT = orig_commt
config.POLLTIMEOUT = orig_pollt
def | |
\
optional, and may be empty if no secret is required. If the secret object \
contains more than one secret, all secret references are passed.
:param read_only: Specifies a read-only configuration for the volume. Defaults to \
false (read/write).
"""
def __init__(
self,
driver: str,
fs_type: str,
volume_attributes: dict,
node_publish_secret_ref: Optional[LocalObjectReference] = None,
read_only: Optional[bool] = None,
):
self.driver = driver
self.fsType = fs_type
self.volumeAttributes = volume_attributes
self.nodePublishSecretRef = node_publish_secret_ref
self.readOnly = read_only
class StorageOSVolumeSource(HelmYaml):
"""
:param fs_type: Filesystem type to mount. Must be a filesystem type supported by \
the host operating system. Ex. "ext4", "xfs", "ntfs". Implicitly inferred to \
be "ext4" if unspecified.
:param volume_name: VolumeName is the human-readable name of the StorageOS volume. \
Volume names are only unique within a namespace.
:param volume_namespace: VolumeNamespace specifies the scope of the volume within \
StorageOS. If no namespace is specified then the Pod's namespace will be \
used. This allows the Kubernetes name scoping to be mirrored within StorageOS \
for tighter integration. Set VolumeName to any name to override the default \
behaviour. Set to "default" if you are not using namespaces within StorageOS. \
Namespaces that do not pre-exist within StorageOS will be created.
:param read_only: Defaults to false (read/write). ReadOnly here will force the \
ReadOnly setting in VolumeMounts.
:param secret_ref: SecretRef specifies the secret to use for obtaining the \
StorageOS API credentials. If not specified, default values will be \
attempted.
"""
def __init__(
self,
fs_type: str,
volume_name: str,
volume_namespace: str,
read_only: Optional[bool] = None,
secret_ref: Optional[LocalObjectReference] = None,
):
self.fsType = fs_type
self.volumeName = volume_name
self.volumeNamespace = volume_namespace
self.readOnly = read_only
self.secretRef = secret_ref
class PhotonPersistentDiskVolumeSource(HelmYaml):
"""
:param fs_type: Filesystem type to mount. Must be a filesystem type supported by \
the host operating system. Ex. "ext4", "xfs", "ntfs". Implicitly inferred to \
be "ext4" if unspecified.
:param pd_id: ID that identifies Photon Controller persistent disk
"""
def __init__(self, fs_type: str, pd_id: str):
self.fsType = fs_type
self.pdID = pd_id
class GlusterfsVolumeSource(HelmYaml):
"""
:param endpoints: EndpointsName is the endpoint name that details Glusterfs \
topology. More info: \
https://examples.k8s.io/volumes/glusterfs/README.md#create-a-pod
:param path: Path is the Glusterfs volume path. More info: \
https://examples.k8s.io/volumes/glusterfs/README.md#create-a-pod
:param read_only: ReadOnly here will force the Glusterfs volume to be mounted with \
read-only permissions. Defaults to false. More info: \
https://examples.k8s.io/volumes/glusterfs/README.md#create-a-pod
"""
def __init__(self, endpoints: str, path: str, read_only: Optional[bool] = None):
self.endpoints = endpoints
self.path = path
self.readOnly = read_only
class AzureDiskVolumeSource(Core):
"""
:param caching_mode: Host Caching mode: None, Read Only, Read Write.
:param disk_name: The Name of the data disk in the blob storage
:param disk_uri: The URI the data disk in the blob storage
:param fs_type: Filesystem type to mount. Must be a filesystem type supported by \
the host operating system. Ex. "ext4", "xfs", "ntfs". Implicitly inferred to \
be "ext4" if unspecified.
:param read_only: Defaults to false (read/write). ReadOnly here will force the \
ReadOnly setting in VolumeMounts.
"""
def __init__(
self,
caching_mode: str,
disk_name: str,
disk_uri: str,
fs_type: str,
read_only: Optional[bool] = None,
):
self.cachingMode = caching_mode
self.diskName = disk_name
self.diskURI = disk_uri
self.fsType = fs_type
self.readOnly = read_only
class AzureFileVolumeSource(HelmYaml):
"""
:param secret_name: the name of secret that contains Azure Storage Account Name and \
Key
:param share_name: Share Name
:param read_only: Defaults to false (read/write). ReadOnly here will force the \
ReadOnly setting in VolumeMounts.
"""
def __init__(
self, secret_name: str, share_name: str, read_only: Optional[bool] = None
):
self.secretName = secret_name
self.shareName = share_name
self.readOnly = read_only
class SecretVolumeSource(HelmYaml):
"""
:param optional: Specify whether the Secret or its keys must be defined
:param secret_name: Name of the secret in the pod's namespace to use. More info: \
https://kubernetes.io/docs/concepts/storage/volumes#secret
:param default_mode: Optional: mode bits to use on created files by default. Must \
be a value between 0 and 0777. Defaults to 0644. Directories within the path \
are not affected by this setting. This might be in conflict with other options \
that affect the file mode, like fsGroup, and the result can be other mode bits \
set.
:param items: If unspecified, each key-value pair in the Data field of the \
referenced Secret will be projected into the volume as a file whose name is \
the key and content is the value. If specified, the listed keys will be \
projected into the specified paths, and unlisted keys will not be present. If \
a key is specified which is not present in the Secret, the volume setup will \
error unless it is marked optional. Paths must be relative and may not contain \
the '..' path or start with '..'.
"""
def __init__(
self,
optional: bool,
secret_name: str,
default_mode: Optional[int] = None,
items: Optional[List[KeyToPath]] = None,
):
self.optional = optional
self.secretName = secret_name
self.defaultMode = default_mode
self.items = items
class EmptyDirVolumeSource(HelmYaml):
"""
:param medium: What type of storage medium should back this directory. The default \
is "" which means to use the node's default medium. Must be an empty string \
(default) or Memory. More info: \
https://kubernetes.io/docs/concepts/storage/volumes#emptydir
:param size_limit: Total amount of local storage required for this EmptyDir volume. \
The size limit is also applicable for memory medium. The maximum usage on \
memory medium EmptyDir would be the minimum value between the SizeLimit \
specified here and the sum of memory limits of all containers in a pod. The \
default is nil which means that the limit is undefined. More info: \
http://kubernetes.io/docs/user-guide/volumes#emptydir
"""
def __init__(self, medium: Optional[str] = None, size_limit: Optional[str] = None):
self.medium = medium
self.sizeLimit = size_limit
class QuobyteVolumeSource(HelmYaml):
"""
:param registry: Registry represents a single or multiple Quobyte Registry services \
specified as a string as host:port pair (multiple entries are separated with \
commas) which acts as the central registry for volumes
:param tenant: Tenant owning the given Quobyte volume in the Backend Used with \
dynamically provisioned Quobyte volumes, value is set by the plugin
:param volume: Volume is a string that references an already created Quobyte volume \
by name.
:param group: Group to map volume access to Default is no group
:param read_only: ReadOnly here will force the Quobyte volume to be mounted with \
read-only permissions. Defaults to false.
:param user: User to map volume access to Defaults to serivceaccount user
"""
def __init__(
self,
registry: str,
tenant: str,
volume: str,
group: Optional[str] = None,
read_only: Optional[bool] = None,
user: Optional[str] = None,
):
self.registry = registry
self.tenant = tenant
self.volume = volume
self.group = group
self.readOnly = read_only
self.user = user
class RBDVolumeSource(HelmYaml):
"""
:param fs_type: Filesystem type of the volume that you want to mount. Tip: Ensure \
that the filesystem type is supported by the host operating system. Examples: \
"ext4", "xfs", "ntfs". Implicitly inferred to be "ext4" if unspecified. More \
info: https://kubernetes.io/docs/concepts/storage/volumes#rbd
:param image: The rados image name. More info: \
https://examples.k8s.io/volumes/rbd/README.md#how-to-use-it
:param monitors: A collection of Ceph monitors. More info: \
https://examples.k8s.io/volumes/rbd/README.md#how-to-use-it
:param keyring: Keyring is the path to key ring for RBDUser. Default is \
/etc/ceph/keyring. More info: \
https://examples.k8s.io/volumes/rbd/README.md#how-to-use-it
:param pool: The rados pool name. Default is rbd. More info: \
https://examples.k8s.io/volumes/rbd/README.md#how-to-use-it
:param read_only: ReadOnly here will force the ReadOnly setting in VolumeMounts. \
Defaults to false. More info: \
https://examples.k8s.io/volumes/rbd/README.md#how-to-use-it
:param secret_ref: SecretRef is name of the authentication secret for RBDUser. If \
provided overrides keyring. Default is nil. More info: \
https://examples.k8s.io/volumes/rbd/README.md#how-to-use-it
:param user: The rados user name. Default is admin. More info: \
https://examples.k8s.io/volumes/rbd/README.md#how-to-use-it
"""
def __init__(
self,
fs_type: str,
image: str,
monitors: List[str],
keyring: Optional[str] = None,
pool: Optional[str] = None,
read_only: Optional[bool] = None,
secret_ref: Optional[LocalObjectReference] = None,
user: Optional[str] = None,
):
self.fsType = fs_type
self.image = image
self.monitors = monitors
self.keyring = keyring
self.pool = pool
self.readOnly = read_only
self.secretRef = secret_ref
self.user = user
class | |
<filename>calc.py
#!/usr/bin python3
# -*- coding:UTF-8 -*-
# Author: nigo
import os
import json
import pandas as pd
import akshare as ak
import baostock as bs
import datetime
import numpy as np
from multiprocessing import Pool
import multiprocessing.pool as mpp
import istarmap
from tqdm import tqdm
import plotly.figure_factory as ff
from plotly.subplots import make_subplots
from tabulate import tabulate
import wcwidth
import time
import re
import plotly.express as px
import plotly.graph_objects as go
import sys
import requests
PATH_CONFIG = './config.json'
PATH_TMP ='./tmp.csv'
with open(PATH_CONFIG,'rb') as f:
json_str = json.load(f)
INDEX_LIST = json_str['index_list']
PATH_INFO = json_str['path_info']
PATH_INDEX = json_str['path_index']
PATH_WEIGHT = json_str['path_weight']
PATH_STOCK = json_str['path_stock']
PATH_MARKET = json_str['path_market']
def read_csv(path,**kw):
df = pd.read_csv(path,converters=kw)
return df
def full_code(code,is_index=True,is_dot=False):
"""补全证券代码
code:6位证券代码
is_index:是否是指数代码
return:补全的代码
"""
if is_dot:
sh = 'sh.'
sz = 'sz.'
else:
sh = 'sh'
sz = 'sz'
if is_index:
if code[0] == '0':
full = sh + code
else:
full = sz + code
else:
if code[0] == '6':
full = sh + code
else:
full = sz + code
return full
def convert_code(code):
"""指数代码小写转大写"""
if code.endswith('sh'):
return code[0:6] + '.SH'
elif code.endswith('sz'):
return code[0:6] + '.SZ'
else:
return code
def get_security_info(code=None):
"""获取指数基本信息
code:6位代码
return:代码对应信息
"""
path = os.path.join(PATH_INFO,'index_info.csv')
if os.path.exists(path):
df = read_csv(path,index_code=str)
else:
print('获取指数基本信息列表')
df = ak.index_stock_info()
df.to_csv(path,index=False)
if code:
df = df[df.index_code==code]
return df
def config_update_date(is_all=False):
"""获取配置文件中上次更新日期"""
with open('config.json',mode='rb') as f:
json_str = json.load(f)
if is_all:
update_date = json_str['update_date_all']
else:
update_date = json_str['update_date']
if update_date == "None":
return None
else:
return update_date
def judge_update_stock(is_all=False):
"""判断是否更新数据"""
update_date = config_update_date(is_all)
if not update_date:
return True
if update_date < str(datetime.date.today()):
return True
else:
return False
def get_file_date(file):
"""获取文件修改日期"""
filemt = time.localtime(os.stat(file).st_mtime)
file_time = time.strftime('%Y-%m-%d',filemt)
return file_time
def judge_update_weight(code,date):
"""判断给定日期是否需要进行更新
date:给定日期
"""
file_new = code + '-new.csv'
path_new = os.path.join(PATH_WEIGHT,file_new)
file_history = code + '-history.csv'
path_history = os.path.join(PATH_WEIGHT,file_history)
if os.path.exists(path_new) and os.path.exists(path_history):
new_date = get_file_date(path_new)
history_date = get_file_date(path_history)
update_date = min(new_date,history_date)
given_date = datetime.datetime.strptime(date,'%Y-%m-%d')
update_date = datetime.datetime.strptime(update_date,'%Y-%m-%d')
diff_month = (given_date.year - update_date.year) * 12 + given_date.month - update_date.month # 月份差
if diff_month<1:
return False
else:
return True
else:
return True
def get_exists_stocks_path():
"""获取本地文件已有数据的股票列表"""
stock_files = os.listdir(PATH_STOCK)
stocks = [ os.path.join(PATH_STOCK,file) for file in stock_files if file.endswith('csv')]
return stocks
def get_all_stocks():
"""获取本地文件已有数据的股票列表"""
stock_files = os.listdir(PATH_STOCK)
stocks = [ file.split('_')[0] for file in stock_files if file.endswith('csv')]
return stocks
def get_stocks(code,date):
"""计算给定日期的成分股"""
df_new,df_history = get_index_weight(code,date)
df_new = df_new[df_new.in_date <= date]
df_history = df_history[(df_history.out_date > date) & (df_history.in_date <= date)]
a = df_new['stock_code'].to_list()
b = df_history['stock_code'].to_list()
c = a + b
stocks = list(set(c))
return stocks
def get_all_index_stocks(index_list):
"""计算指数列表所涉及到的所有股票"""
stock_list = []
for code in index_list:
df_new,df_history = get_index_weight(code)
a = df_new['stock_code'].to_list()
b = df_history['stock_code'].to_list()
c = a + b
stock_list += c
stock_list = list(set(stock_list))
if os.path.exists(PATH_TMP):
os.remove(PATH_TMP)
return stock_list
def get_index_weight(code,date=None):
"""获取指数成分股"""
if not date:
date = str(datetime.date.today())
file_new = code + '-new.csv'
path_new = os.path.join(PATH_WEIGHT,file_new)
file_history = code + '-history.csv'
path_history = os.path.join(PATH_WEIGHT,file_history)
flag = judge_update_weight(code,date)
# 判断是不是更新过了,应对接口频率限制
if os.path.exists(PATH_TMP):
tmp = read_csv(PATH_TMP,code=str)
else:
tmp = pd.DataFrame(columns=['code'])
ignore_code_list = tmp['code'].to_list()
if code in ignore_code_list:
flag = False
# 判断是不是更新过了,应对接口频率限制
if os.path.exists(path_new) and not flag:
df_new = read_csv(path_new,publish_date=str,stock_code=str)
else:
print('获取%s的成分股最新数据' % code)
df_new = ak.index_stock_cons(code)
df_new.columns = ['stock_code','stock_name','in_date']
df_new.to_csv(path_new,index=False)
if os.path.exists(path_history) and not flag:
df_history = read_csv(path_history,publish_date=str,stock_code=str)
else:
print('获取%s的成分股历史数据' % code)
df_history = ak.index_stock_hist(full_code(code))
df_history.to_csv(path_history,index=False)
df_updated = pd.DataFrame([code],columns=['code'])
tmp = pd.concat([tmp,df_updated])
tmp.to_csv(PATH_TMP,index=False)
return df_new,df_history
def get_k_date(full_code,start_date,end_date):
"""获取单个股票数据"""
rs = bs.query_history_k_data_plus(full_code,
"date,code,open,high,low,close,preclose,volume,amount,adjustflag,turn,tradestatus,pctChg,peTTM,pbMRQ,psTTM,pcfNcfTTM,isST",
start_date=start_date, end_date=end_date,
frequency="d", adjustflag="3")
result_list = []
while (rs.error_code == '0') & rs.next():
result_list.append(rs.get_row_data())
df = pd.DataFrame(result_list, columns=rs.fields)
return df
def get_trade_date(start_date=None,end_date=None):
"""获取交易日历
start_date:开始日期
end_date:结束日期
"""
if not start_date:
start_date = '1990-01-01'
if not end_date:
end_date = datetime.date.today()
end_date = end_date.strftime('%Y-%m-%d')
path = os.path.join(PATH_INFO,'a_trade_date.csv')
flag = judge_update_stock() and judge_update_stock(is_all=True)
if not flag and os.path.exists(path):
print('读取交易日历')
df = read_csv(path)
else:
print('获取交易日历')
df = ak.tool_trade_date_hist_sina()
df.columns = ['trade_date']
df.to_csv(path,index=False)
df['trade_date'] = df['trade_date'].apply(lambda x: x.strftime('%Y-%m-%d'))
df = df[(df.trade_date<=end_date) & (df.trade_date>=start_date)]
return df['trade_date'].to_list()
def get_hk_stock(code):
"""获取香港PE\PB"""
df_pb = ak.stock_hk_eniu_indicator(symbol=code, indicator="市净率")
df_pb = df_pb.loc[:,['date','pb']]
df_pe = ak.stock_hk_eniu_indicator(symbol=code, indicator="市盈率")
df_pe = df_pe.loc[:,['date','pe']]
df = pd.merge(df_pe,df_pb,on='date')
df['psTTM'] = ''
df.columns = ['date','peTTM','pbMRQ','psTTM']
path = os.path.join(PATH_STOCK,'%s_indicator.csv' % code)
df.to_csv(path,index=False)
def update_stock_data(stocks,is_all=False,use_flag=False):
"""更新所有股票数据"""
bs.login()
today = datetime.date.today()
today = str(today)
yestoday = datetime.date.today() - datetime.timedelta(days=1)
yestoday = str(yestoday)
trade_date_list = get_trade_date(end_date=yestoday)
end_date = trade_date_list[-1]
pbar = tqdm(stocks)
for stock in pbar:
pbar.set_description("更新股票%s数据" % stock)
# print('更新股票%s数据' % stock)
path = os.path.join(PATH_STOCK,'%s_indicator.csv' % stock)
if use_flag:
flag = judge_update_stock(is_all)
else:
flag = True
if os.path.exists(path):
if flag:
try:
df = read_csv(path,code=str,date=str)
data_date = df.iloc[-1,0]
if end_date<=data_date:
continue
else:
if stock[:2] == 'hk':
get_hk_stock(stock)
else:
start_date = datetime.datetime.strptime(data_date,'%Y-%m-%d') + datetime.timedelta(days=1)
start_date = start_date.strftime('%Y-%m-%d')
df_single = get_k_date(full_code(stock,is_index=False,is_dot=True),start_date,today)
if not df_single.empty:
df_single.to_csv(path,index=False,header=False,mode='a')
except:
if stock[:2] == 'hk':
get_hk_stock(stock)
else:
df_single = get_k_date(full_code(stock,is_index=False,is_dot=True),'1990-01-01',today)
if not df_single.empty:
df_single.to_csv(path,index=False)
else:
if stock[:2] == 'hk':
get_hk_stock(stock)
else:
df_single = get_k_date(full_code(stock,is_index=False,is_dot=True),'1990-01-01',today)
if not df_single.empty:
df_single.to_csv(path,index=False)
bs.logout()
def calc_avg(numbers):
"""等权平均"""
return len(numbers) / sum([1 / p if p > 0 else 0 for p in numbers])
def calc_mid(numbers):
numbers = [ i for i in numbers if i>0]
if numbers:
numbers.sort()
half = len(numbers) // 2
return (numbers[half] + numbers[~half]) / 2
else:
return 0
def calc_average(*args,method='avg'):
"""根据pe_list,pb_list,ps_list取平均数
args: (pe_list,pb_list,ps_list)
method:计算模式 avg等权平均 mid中位数
"""
num = len(args[0]) # 列表中元素数量
result_list = []
for arg in args:
if method == 'mid':
result = calc_mid(arg)
else:
result = calc_avg(arg)
result = round(result,2)
result_list.append(result)
return tuple(result_list)
def get_index_pe_pb_date(date, stocks):
'''指定日期的指数PE_PB(等权重)'''
pe_list = []
pb_list = []
ps_list = []
for stock in stocks:
path = os.path.join(PATH_STOCK,'%s_indicator.csv' % stock)
if not os.path.exists(path):
print('股票%s文件不存在' % stock)
continue
df = read_csv(path,code=str,date=str)
df_tmp = df[df.date == date]
df_tmp = df_tmp.reset_index(drop=True) # 重置索引
if not df_tmp.empty:
pe_list.append(df_tmp.loc[0,'peTTM'])
pb_list.append(df_tmp.loc[0,'pbMRQ'])
ps_list.append(df_tmp.loc[0,'psTTM'])
if len(pe_list) > 0:
try :
(pe,pb,ps) = calc_average(pe_list,pb_list,ps_list)
return (date,round(pe, 2), round(pb, 2), round(ps,2) )
except:
return None
else:
return None
def combine_all_markt_stocks():
"""合并所有存在的股票数据"""
paths = get_exists_stocks_path()
df_list = []
for path in tqdm(paths,desc='合并所有股票文件'):
df_tmp = read_csv(path,code=str,date=str)
df_tmp = df_tmp.loc[:,['date','code','peTTM','pbMRQ','psTTM']]
df_list.append(df_tmp)
df = pd.concat(df_list)
return df
def get_all_market_pe_pb_date(df,date):
df_date = df[df.date == date]
pe_list = df_date['peTTM'].to_list()
pb_list = df_date['pbMRQ'].to_list()
ps_list = df_date['psTTM'].to_list()
(pe,pb,ps) = calc_average(pe_list,pb_list,ps_list,method='mid')
return (date,round(pe,2),round(pb,2),round(ps,2))
def get_all_market_pe_pb():
"""计算指定期间的全市场pe,pb"""
flag = judge_update_stock(is_all=True)
path = os.path.join(PATH_MARKET,'all_market_pe_pb.csv')
end_date = datetime.date.today() - datetime.timedelta(1)
end_date = end_date.strftime('%Y-%m-%d')
if os.path.exists(path):
df = read_csv(path,trade_date=str)
if flag:
updated_date = df.iloc[-1].trade_date
updated_date = datetime.datetime.strptime(updated_date,'%Y-%m-%d')
start_date = updated_date + datetime.timedelta(1)
start_date = start_date.strftime('%Y-%m-%d')
# df_tmp = get_index_pe_pb(start_date=start_date)
df_tmp = calc_all_market_pe_pb(start_date,end_date)
df = pd.concat([df, df_tmp])
else:
df = calc_all_market_pe_pb('1990-01-01', end_date)
# df = get_index_pe_pb(start_date='1990-01-01')
df.to_csv(path,index=False)
return df
def calc_all_market_pe_pb(start_date,end_date):
"""计算指定期间的全市场pe,pb"""
df = combine_all_markt_stocks()
df['code'] = df['code'].apply(lambda x:re.sub('\D','',x)) # 去除股票代码中非数字项
start = datetime.datetime.strptime(start_date,'%Y-%m-%d')
end = datetime.datetime.strptime(end_date,'%Y-%m-%d')
trade_date_list = get_trade_date()
date_range = pd.date_range(start=start, end=end,freq="D")
dates = [ date.strftime('%Y-%m-%d') for date in date_range if date.strftime('%Y-%m-%d') in trade_date_list ]
args = [(df,date) for date in dates]
result = []
qbar = tqdm(dates)
for date in qbar:
qbar.set_description('计算%s全市场估值' % date)
df_date = df[df.date == date]
if not df_date.empty:
pe_list = df_date['peTTM'].to_list()
pb_list = df_date['pbMRQ'].to_list()
ps_list = df_date['psTTM'].to_list()
(pe,pb,ps) = calc_average(pe_list,pb_list,ps_list,method='mid')
result.append([date,pe,pb,ps])
df = pd.DataFrame(result,columns=['trade_date','PE','PB','PS'])
return df
def all_market_pe_pb_legu():
"""乐咕全市场"""
path_pe = './all_market/all_market_pe_legu.csv'
path_pb = './all_market/all_market_pb_legu.csv'
flag = judge_update_stock(is_all=True)
if os.path.exists(path_pe):
if flag:
df_pe = ak.stock_a_ttm_lyr()
df_pe.to_csv(path_pe,index=False)
else:
df_pe = read_csv(path_pe)
else:
df_pe = ak.stock_a_ttm_lyr()
df_pe.to_csv(path_pe,index=False)
if os.path.exists(path_pb):
if flag:
df_pb = ak.stock_a_all_pb()
df_pb.to_csv(path_pb,index=False)
else:
df_pb = read_csv(path_pb)
else:
df_pb = ak.stock_a_all_pb()
df_pb.to_csv(path_pb,index=False)
pe_ratio = df_pe.iloc[-1].quantileInAllHistoryMiddlePeTtm * 100 # 历史百分位
pb_ratio = df_pb.iloc[-1].quantileInAllHistoryMiddlePB * 100 # 历史百分位
df_pe = df_pe.loc[:,['date','middlePETTM']]
df_pb = df_pb.loc[:,['date','middlePB']]
df_pe.columns = ['trade_date','PE']
df_pb.columns = ['trade_date','PB']
df = pd.merge(df_pe,df_pb,on='trade_date')
date = df.iloc[-1].trade_date
pe = df.iloc[-1].PE
pb = df.iloc[-1].PB
# (pe_ratio,pb_ratio) = calc_ratio(df,'PE','PB')
title='%s全市场中位数PE、PB 当前PE:%.2f,当前PB:%.2f 当前PE百分位:%.2f,当前PB百分位:%.2f' % (date,pe,pb,pe_ratio,pb_ratio)
plot(df,title)
write_update_date(is_all=True)
def all_market_value(years=None):
"""全市场pe,pb"""
get_all_k_data() # 更新所有股票数据
df = get_all_market_pe_pb()
if years:
df = filter_recent_years(df,years)
else:
df = df[df.PE > 0]
(pe_ratio,pb_ratio) = calc_ratio(df,'PE','PB')
df = df.reset_index(drop=True)
min_pe = df['PE'].min()
max_pe = df['PE'].max()
mid_pe = df['PE'].median()
desc_pe = calc_state(pe_ratio)
min_pb = df['PB'].min()
max_pb = df['PB'].max()
mid_pb = df['PB'].median()
desc_pb = calc_state(pb_ratio)
init_date = df.loc[0,'trade_date']
columns=[
'日期','PE','PE百分位','PE估值','PB','PB百分位','PB估值',
'PE最小值','PE最大值','PE中位值','PB最小值','PB最大值','PB中位值',
'起始日期'
]
df = pd.DataFrame(
[df.iloc[-1].trade_date,
df.iloc[-1].PE,
'%.2f' % pe_ratio,
desc_pe,
df.iloc[-1].PB,
'%.2f' % pb_ratio,
desc_pb,
min_pe,max_pe,mid_pe,
min_pb,max_pb,mid_pb,
init_date]
)
df = df.T
df.columns = columns
write_update_date(is_all=True)
fmt = 'fancy_grid'
print(tabulate(df, headers='keys', tablefmt=fmt))
def calc_index_pe_pb(date,code=None):
"""计算指数一天的pe,pb,ps"""
# print('计算指数%s在日期%s的估值' % (code,date))
if code:
stocks = get_stocks(code,date)
else:
stocks = get_all_stocks()
pe_pb = get_index_pe_pb_date(date, stocks)
return pe_pb
def get_index_pe_pb(start_date=None, end_date=None,code=None):
'''指数历史PE_PB'''
if code:
init_date = get_security_info(code).iloc[0,-1] # 获取指数信息中的publish_date
else:
init_date | |
# -*- coding: utf-8 -*-
import os, sys, requests, json, logging, time, base64, imghdr
if sys.version_info[0] > 2:
import http.client
else:
import httplib
assert httplib
from optparse import OptionParser
from datetime import datetime
__version__ = '2.60.0'
FORMAT = "%(message)s"
logging.basicConfig(format=FORMAT)
logger = logging.getLogger('testdroid')
logger.setLevel(logging.INFO)
class RequestTimeout(Exception):
def __init__(self, msg):
super(Exception, self).__init__(msg)
class ConnectionError(Exception):
def __init__(self, msg):
super(Exception, self).__init__(msg)
class RequestResponseError(Exception):
def __init__(self, msg, status_code):
super(Exception, self).__init__("Request Error: code %s: %s" %
(status_code, msg) )
self.status_code = status_code
""" Format unix timestamp to human readable. Automatically detects timestamps with seconds or milliseconds.
"""
def ts_format(timestamp):
if len(str(timestamp)) > 11:
return datetime.fromtimestamp(timestamp/1000).strftime('%x %X %z')
else:
return datetime.fromtimestamp(timestamp).strftime('%x %X %z')
#
# Inspiration from https://code.google.com/p/corey-projects/source/browse/trunk/python2/progress_bar.py
#
class DownloadProgressBar:
def __init__(self):
self.percent_done = 0
self.started = time.time()
self.prog_bar = ' []'
self.fill_char = '#'
self.width = 40
self.pos = 0
self.total = 0
self.eta = 'N/A'
def update(self, pos, total):
self.pos = pos
self.total = total
percent_done = int(round(100.0 * pos / total)) if total > 0 else 0
all_full = self.width - 2
num_hashes = int(round((percent_done / 100.0) * all_full))
self.prog_bar = ' [' + self.fill_char * num_hashes + ' ' * (all_full - num_hashes) + ']'
pct_place = (len(self.prog_bar) // 2) - len(str(percent_done))
pct_string = '%d%%' % percent_done
self.duration = int(round(time.time()-self.started))
self.eta = int(round( self.duration / (percent_done / 100.0)))-self.duration if percent_done > 5 else 'N/A'
self.prog_bar = self.prog_bar[0:pct_place] + \
(pct_string + self.prog_bar[pct_place + len(pct_string):])
self.prog_bar += ' %s/%s bytes, %ss' % (self.pos, self.total, self.duration)
if pos < total:
self.prog_bar += ' (E.T.A.: %ss)' % self.eta
else:
self.prog_bar += ' '
if sys.platform.lower().startswith('win'):
print(str(self) + '\r')
else:
print(str(self) + chr(27) + '[A')
def __str__(self):
return str(self.prog_bar)
class Testdroid:
# Cloud URL (not including API path)
url = None
# Api Key for authentication
api_key = None
# Oauth access token
access_token = None
# Oauth refresh token
refresh_token = None
# Unix timestamp (seconds) when token expires
token_expiration_time = None
# Buffer size used for downloads
download_buffer_size = 65536
# polling interval when awaiting for test run completion
polling_interval_mins = 10
""" Constructor, defaults against cloud.bitbar.com
"""
def __init__(self, **kwargs):
self.api_key = kwargs.get('apikey')
self.username = kwargs.get('username')
self.password = kwargs.get('password')
self.cloud_url = kwargs.get('url') or "https://cloud.bitbar.com"
self.download_buffer_size = kwargs.get('download_buffer_size') or 65536
def set_apikey(self, apikey):
self.api_key = apikey
def set_username(self, username):
self.username = username
def set_password(self, password):
self.password = password
def set_url(self, url):
self.cloud_url = url
def set_download_buffer_size(self, download_buffer_size):
self.download_buffer_size = download_buffer_size
def set_polling_interval_mins(self, polling_interval_mins):
self.polling_interval_mins = polling_interval_mins
""" Get Oauth2 token
"""
def get_token(self):
if not self.access_token:
# TODO: refresh
url = "%s/oauth/token" % self.cloud_url
payload = {
"client_id": "testdroid-cloud-api",
"grant_type": "password",
"username": self.username,
"password": <PASSWORD>
}
res = requests.post(
url,
data = payload,
headers = { "Accept": "application/json" }
)
if res.status_code not in list(range(200, 300)):
raise RequestResponseError(res.text, res.status_code)
reply = res.json()
self.access_token = reply['access_token']
self.refresh_token = reply['refresh_token']
self.token_expiration_time = time.time() + reply['expires_in']
elif self.token_expiration_time < time.time():
url = "%s/oauth/token" % self.cloud_url
payload = {
"client_id": "testdroid-cloud-api",
"grant_type": "refresh_token",
"refresh_token": self.refresh_token
}
res = requests.post(
url,
data = payload,
headers = { "Accept": "application/json" }
)
if res.status_code not in list(range(200, 300)):
print("FAILED: Unable to get a new access token using refresh token")
self.access_token = None
return self.get_token()
reply = res.json()
self.access_token = reply['access_token']
self.refresh_token = reply['refresh_token']
self.token_expiration_time = time.time() + reply['expires_in']
return self.access_token
""" Helper method for getting necessary headers to use for API calls, including authentication
"""
def _build_headers(self):
if self.api_key:
apikey = {'Authorization' : 'Basic %s' % base64.b64encode((self.api_key+":").encode(encoding='utf_8')).decode(), 'Accept' : 'application/json' }
return apikey
else:
return { 'Authorization': 'Bearer %s' % self.get_token(), 'Accept': 'application/json' }
""" Download file from API resource
"""
def download(self, path=None, filename=None, payload={}, callback=None):
url = "%s/api/v2/%s" % (self.cloud_url, path)
try:
res = requests.get(url, params=payload, headers=self._build_headers(), stream=True, timeout=(60.0))
if res.status_code in range(200, 300):
logger.info("Downloading %s (%s bytes)" % (filename, res.headers["Content-Length"]))
pos = 0
total = res.headers['content-length']
# Check if the system is Windows or not.
if os.name == 'nt':
fd = os.open(filename, os.O_RDWR|os.O_CREAT|os.O_BINARY)
else:
fd = os.open(filename, os.O_RDWR|os.O_CREAT)
for chunk in res.iter_content(self.download_buffer_size):
os.write(fd, chunk)
if callback:
pos += len(chunk)
callback(pos, total)
time.sleep(0.1)
os.close(fd)
else:
raise RequestResponseError(res.text, res.status_code)
res.close()
except requests.exceptions.Timeout:
logger.info("")
logger.info("Download has failed. Please try to restart your download")
raise RequestTimeout("Download has failed. Please try to restart your download")
except requests.exceptions.ConnectionError:
logger.info("")
logger.info("Download has failed. Please try to restart your download")
raise ConnectionError("Download has failed. Please try to restart your download")
""" Upload file to API resource
"""
def upload(self, path=None, filename=None):
# TOOD: where's the error handling?
with open(filename, 'rb') as f:
url = "%s/api/v2/%s" % (self.cloud_url, path)
files = {'file': f}
res = requests.post(url, files=files, headers=self._build_headers())
if res.status_code not in list(range(200, 300)):
raise RequestResponseError(res.text, res.status_code)
return res
""" GET from API resource
"""
def get(self, path=None, payload={}, headers={}):
if path.find('v2/') >= 0:
cut_path = path.split('v2/')
path = cut_path[1]
url = "%s/api/v2/%s" % (self.cloud_url, path)
headers = dict(list(self._build_headers().items()) + list(headers.items()))
res = requests.get(url, params=payload, headers=headers)
if res.status_code not in list(range(200, 300)):
raise RequestResponseError(res.text, res.status_code)
logger.debug(res.text)
if headers['Accept'] == 'application/json':
return res.json()
else:
return res.text
""" POST against API resources
"""
def post(self, path=None, payload=None, headers={}):
headers = dict(list(self._build_headers().items()) + list(headers.items()))
url = "%s/api/v2/%s" % (self.cloud_url, path)
res = requests.post(url, payload, headers=headers)
if res.status_code not in list(range(200, 300)):
raise RequestResponseError(res.text, res.status_code)
return res.json()
""" DELETE API resource
"""
def delete(self, path=None, payload=None, headers={}):
headers = dict(list(self._build_headers().items()) + list(headers.items()))
url = "%s/api/v2/%s" % (self.cloud_url, path)
res = requests.delete(url, headers=headers)
if res.status_code not in list(range(200, 300)):
raise RequestResponseError(res.text, res.status_code)
return res
""" Returns user details
"""
def get_me(self):
return self.get("me")
""" Returns list of device groups
"""
def get_device_groups(self, limit=0):
return self.get("me/device-groups", payload = {'limit': limit})
""" Returns list of frameworks
"""
def get_frameworks(self, limit=0):
return self.get("me/available-frameworks", payload = {'limit': limit})
""" Returns list of devices
"""
def get_devices(self, limit=0):
return self.get(path = "devices", payload = {'limit': limit})
""" Print input files
"""
def print_input_files(self, limit=0):
for input_file in self.get_input_files(limit)['data']:
print("id:{} name:{} size:{} type:{}".format(input_file['id'],input_file['name'],input_file['size'],input_file['inputType']))
""" Print device groups
"""
def print_device_groups(self, limit=0):
for device_group in self.get_device_groups(limit)['data']:
print("%s %s %s %s devices" % (str(device_group['id']).ljust(12), device_group['displayName'].ljust(30), device_group['osType'].ljust(10), device_group['deviceCount']))
""" Print available free Android devices
"""
def print_available_free_android_devices(self, limit=0):
print("")
print("Available Free Android Devices")
print("------------------------------")
for device in self.get_devices(limit)['data']:
if device['creditsPrice'] == 0 and device['locked'] == False and device['osType'] == "ANDROID":
print(device['displayName'])
print("")
""" Print available frameworks
"""
def print_available_frameworks(self, os_type=None, limit=0):
print("")
print("Available frameworks")
print("------------------------------")
for framework in self.get_frameworks(limit)['data']:
print("id: {}\tosType:{}\tname:{}".format(framework['id'], framework['osType'], framework['name']))
print("")
""" Print available free iOS devices
"""
def print_available_free_ios_devices(self, limit=0):
print("")
print("Available Free iOS Devices")
print("--------------------------")
for device in self.get_devices(limit)['data']:
if device['creditsPrice'] == 0 and device['locked'] == False and device['osType'] == "IOS":
print(device['displayName'])
print("")
""" Print available free devices
"""
def print_available_free_devices(self, limit=0):
self.print_available_free_android_devices(limit)
self.print_available_free_ios_devices(limit)
""" Create a project
"""
def create_project(self, project_name, project_type):
project = self.post(path="me/projects", payload={"name": project_name, "type": project_type})
print(project)
logger.info("Project %s: %s (%s) created" % (project['id'], project['name'], project['type'] ))
return project
""" Delete a project
"""
def delete_project(self, project_id):
project = self.get_project(project_id)
if project:
self.delete("me/projects/%s" % project_id)
""" Returns projects for user
"""
def get_projects(self, limit=0):
return self.get(path="me/projects", payload = {'limit': limit})
""" Returns a single project
"""
def get_project(self, project_id):
return self.get("me/projects/%s" % project_id)
""" Print projects
"""
def print_projects(self, limit=0):
me = self.get_me()
print("Projects for %s <%s>:" % (me['name'], me['email']))
for project in self.get_projects(limit)['data']:
print("%s %s \"%s\"" % (str(project['id']).ljust(10), project['type'].ljust(15), project['name']))
""" Upload application file to project
"""
def upload_application_file(self, project_id, filename):
me = self.get_me()
path = "users/%s/projects/%s/files/application" % (me['id'], project_id)
self.upload(path=path, filename=filename)
""" Upload application file to project
"""
def upload_file(self, filename):
me = self.get_me()
path = "users/%s/files" % (me['id'])
res = self.upload(path=path, filename=filename).json()
print("ID:%s Name:%s Size:%s" % (str(res['id']).ljust(10), res['name'].ljust(15), res['size']))
""" Upload test file to project
"""
def upload_test_file(self, project_id, filename):
me = self.get_me()
path = "users/%s/projects/%s/files/test" % (me['id'], project_id)
self.upload(path=path, filename=filename)
""" Delete project parameter
"""
def delete_project_parameters(self, project_id, parameter_id):
me = self.get_me()
path = "/users/%s/projects/%s/config/parameters/%s" % ( me['id'], project_id, parameter_id )
return self.delete(path=path)
""" Get project parameters
"""
def get_project_parameters(self, project_id):
path = | |
<reponame>webhacking/finance<filename>finance/models.py<gh_stars>0
import collections
import functools
import operator
from datetime import datetime, timedelta
import os
from sqlalchemy import create_engine, desc
from sqlalchemy import (
BigInteger,
Column,
DateTime,
Enum,
ForeignKey,
ForeignKeyConstraint,
Integer,
Numeric,
String,
Text,
UniqueConstraint,
)
from sqlalchemy.dialects.postgresql import JSON
from sqlalchemy.exc import IntegrityError, InvalidRequestError
from sqlalchemy.ext.declarative import declarative_base
from sqlalchemy.ext.indexable import index_property
from sqlalchemy.orm import relationship, sessionmaker
import uuid64
from finance.exceptions import (
AccountNotFoundException,
AssetNotFoundException,
AssetValueUnavailableException,
InvalidTargetAssetException,
)
from finance.utils import date_range
from typing import Any # noqa
JsonType = String().with_variant(JSON(), "postgresql")
Base = declarative_base()
is_testing = bool(os.environ.get("TESTING", ""))
db_url = os.environ["SBF_DB_URL" if is_testing else "SBF_TEST_DB_URL"]
engine = create_engine(db_url, echo=False)
Session = sessionmaker(bind=engine)
session = Session()
def balance_adjustment(account, asset, quantity, date=None, transaction=None):
return Record.create(
account=account,
asset=asset,
quantity=quantity,
type=RecordType.balance_adjustment,
created_at=date,
transaction=transaction,
)
def deposit(account, asset, quantity, date=None, transaction=None):
return Record.create(
account=account,
asset=asset,
quantity=quantity,
created_at=date,
transaction=transaction,
)
def get_asset_by_fund_code(code: str):
"""Gets an Asset instance mapped to the given fund code.
:param code: A fund code
"""
# NOTE: I know this looks really stupid, but we'll stick with this
# temporary workaround until we figure out how to create an instance of
# Asset model from a raw query result
# (sqlalchemy.engine.result.RowProxy)
query = "SELECT * FROM asset WHERE data->>'code' = :code LIMIT 1"
raw_asset = session.execute(query, {"code": code}).first()
if raw_asset is None:
raise AssetNotFoundException(
"Fund code {} is not mapped to any asset".format(code)
)
asset_id = raw_asset[0]
return Asset.query.get(asset_id)
class ClassPropertyDescriptor(object):
def __init__(self, fget, fset=None):
self.fget = fget
self.fset = fset
def __get__(self, obj, class_=None):
if class_ is None:
class_ = type(obj)
return self.fget.__get__(obj, class_)()
def __set__(self, obj, value):
if not self.fset:
raise AttributeError("Cannot set attribute")
type_ = type(obj)
return self.fset.__get__(obj, type_)(value)
def setter(self, func):
if not isinstance(func, (classmethod, staticmethod)):
func = classmethod(func)
self.fset = func
return self
def classproperty(func):
if not isinstance(func, (classmethod, staticmethod)):
func = classmethod(func)
return ClassPropertyDescriptor(func)
class CRUDMixin(object):
"""Copied from https://realpython.com/blog/python/python-web-applications-with-flask-part-ii/""" # noqa
__table_args__ = {"extend_existing": True} # type: Any
id = Column(
BigInteger, primary_key=True, autoincrement=False, default=uuid64.issue()
)
@classproperty
def query(cls):
return session.query(cls)
@classmethod
def create(cls, commit=True, ignore_if_exists=False, **kwargs):
if "id" not in kwargs:
kwargs.update(dict(id=uuid64.issue()))
instance = cls(**kwargs)
if hasattr(instance, "created_at") and getattr(instance, "created_at") is None:
instance.created_at = datetime.utcnow()
try:
return instance.save(commit=commit)
except (IntegrityError, InvalidRequestError):
if ignore_if_exists:
session.rollback()
return cls.find(**kwargs)
else:
raise
@classmethod
def get(cls, id):
return cls.query.get(id)
# We will also proxy Flask-SqlAlchemy's get_or_404
# for symmetry
@classmethod
def get_or_404(cls, id):
return cls.query.get_or_404(id)
@classmethod
def find(cls, **kwargs):
return cls.query.filter_by(**kwargs).first()
@classmethod
def exists(cls, **kwargs):
row = cls.find(**kwargs)
return row is not None
def update(self, commit=True, **kwargs):
for attr, value in kwargs.iteritems():
setattr(self, attr, value)
return commit and self.save() or self
def save(self, commit=True):
session.add(self)
if commit:
session.commit()
return self
def delete(self, commit=True):
session.delete(self)
return commit and session.commit()
def __iter__(self):
for column in self.__table__.columns:
yield column.name, str(getattr(self, column.name))
class User(CRUDMixin, Base): # type: ignore
__tablename__ = "user"
given_name = Column(String)
family_name = Column(String)
email = Column(String, unique=True)
#: Arbitrary data
data = Column(JsonType)
accounts = relationship("Account", backref="user", lazy="dynamic")
def __repr__(self):
return "User <{}>".format(self.name)
@property
def name(self):
# TODO: i18n
return u"{}, {}".format(self.family_name, self.given_name)
# TODO: Need a way to keep track of the value of volatile assets such as stocks
# TODO: Need a way to convert one asset's value to another (e.g., currency
# conversion, stock evaluation, etc.)
class Granularity(object):
sec = "1sec"
min = "1min"
five_min = "5min"
hour = "1hour"
day = "1day"
week = "1week"
month = "1month"
year = "1year"
@classmethod
def is_valid(cls, value):
return value in (
cls.sec,
cls.min,
cls.five_min,
cls.hour,
cls.day,
cls.week,
cls.month,
cls.year,
)
class AssetValue(CRUDMixin, Base): # type: ignore
"""Represents a unit price of an asset at a particular point of time. The
granularity of the 'particular point of time' may range from one second
to a year. See `Granularity` for more details.
"""
__tablename__ = "asset_value"
__table_args__ = (
UniqueConstraint("asset_id", "evaluated_at", "granularity"),
{},
) # type: Any
asset_id = Column(BigInteger, ForeignKey("asset.id"))
base_asset_id = Column(BigInteger, ForeignKey("asset.id"))
base_asset = relationship("Asset", uselist=False, foreign_keys=[base_asset_id])
evaluated_at = Column(DateTime(timezone=False))
source = Column(Enum("yahoo", "google", "kofia", "test", name="asset_value_source"))
granularity = Column(
Enum(
"1sec",
"1min",
"5min",
"1hour",
"1day",
"1week",
"1month",
"1year",
name="granularity",
)
)
# NOTE: Should we also store `fetched_at`?
open = Column(Numeric(precision=20, scale=4))
high = Column(Numeric(precision=20, scale=4))
low = Column(Numeric(precision=20, scale=4))
close = Column(Numeric(precision=20, scale=4))
volume = Column(Integer)
def __repr__(self):
return (
"AssetValue(evaluated_at={0}, open={1}, high={2}, low={3}, "
"close={4}, volume={5})".format(
self.evaluated_at,
self.open,
self.high,
self.low,
self.close,
self.volume,
)
)
class AssetType(object):
currency = "currency"
stock = "stock"
bond = "bond"
p2p_bond = "p2p_bond"
security = "security" # NOTE: Is this necessary?
fund = "fund"
commodity = "commodity"
asset_types = (
AssetType.currency,
AssetType.stock,
AssetType.bond,
AssetType.p2p_bond,
AssetType.security,
AssetType.fund,
AssetType.commodity,
)
class Asset(CRUDMixin, Base): # type: ignore
"""Represents an asset."""
__tablename__ = "asset"
__mapper_args__ = {
"polymorphic_identity": "asset",
"polymorphic_on": "type",
}
type = Column(Enum(*asset_types, name="asset_type"))
name = Column(String)
# FIXME: Rename this as `symbol` or rename `get_by_symbol` -> `get_by_code`
code = Column(String, unique=True)
isin = Column(String)
description = Column(Text)
#: Arbitrary data
data = Column(JsonType)
asset_values = relationship(
"AssetValue",
backref="asset",
foreign_keys=[AssetValue.asset_id],
lazy="dynamic",
cascade="all,delete-orphan",
)
base_asset_values = relationship(
"AssetValue",
foreign_keys=[AssetValue.base_asset_id],
lazy="dynamic",
cascade="all,delete-orphan",
)
records = relationship(
"Record", backref="asset", lazy="dynamic", cascade="all,delete-orphan"
)
def __repr__(self):
name = self.code if self.code is not None else self.name
return "Asset <{} ({})>".format(name, self.description)
@property
def unit_price(self):
raise NotImplementedError
@property
def current_value(self):
raise NotImplementedError
@classmethod
def get_by_symbol(cls, symbol):
"""Gets an asset by symbol (e.g., AMZN, NVDA)
NOTE: We may need to rename this method, when we find a more suitable
name (rather than 'symbol').
"""
asset = cls.query.filter(cls.code == symbol).first()
if asset is None:
raise AssetNotFoundException(symbol)
else:
return asset
@classmethod
def get_by_isin(cls, isin):
"""Gets an asset by ISIN
:param isin: International Securities Identification Numbers
"""
asset = cls.query.filter(cls.isin == isin).first()
if asset is None:
raise AssetNotFoundException(isin)
else:
return asset
class BondAsset(Asset):
__tablename__ = "asset"
__mapper_args__ = {
"polymorphic_identity": "bond",
}
class CommodityAsset(Asset):
__tablename__ = "asset"
__mapper_args__ = {
"polymorphic_identity": "commodity",
}
class CurrencyAsset(Asset):
__tablename__ = "asset"
__mapper_args__ = {
"polymorphic_identity": "currency",
}
class FundAsset(Asset):
__tablename__ = "asset"
__mapper_args__ = {
"polymorphic_identity": "fund",
}
class P2PBondAsset(Asset):
__tablename__ = "asset"
__mapper_args__ = {
"polymorphic_identity": "p2p_bond",
}
def is_delayed(self):
raise NotImplementedError
def is_defaulted(self):
raise NotImplementedError
def last_payment(self):
raise NotImplementedError
def principle(self):
return self.asset_values.order_by(AssetValue.evaluated_at).first().close
def returned_principle(self):
now = datetime.now()
return (
self.asset_values.filter(AssetValue.evaluated_at <= now)
.order_by(AssetValue.evaluated_at.desc())
.first()
.close
)
class SecurityAsset(Asset):
__tablename__ = "asset"
__mapper_args__ = {
"polymorphic_identity": "security",
}
class StockAsset(Asset):
__tablename__ = "asset"
__mapper_args__ = {
"polymorphic_identity": "stock",
}
bps = index_property("data", "bps")
eps = index_property("data", "eps")
class AccountType(object):
checking = "checking"
savings = "savings"
investment = "investment"
credit_card = "credit card"
virtual = "virtual"
account_types = (
AccountType.checking,
AccountType.savings,
AccountType.investment,
AccountType.credit_card,
AccountType.virtual,
)
class Account(CRUDMixin, Base): # type: ignore
"""Represents an account. An account may contain multiple records based
on different assets. For example, a single bank account may have a balance
in different foreign currencies."""
__tablename__ = "account"
__table_args__ = (UniqueConstraint("institution", "number"), {}) # type: Any
user_id = Column(BigInteger, ForeignKey("user.id"))
portfolio_id = Column(BigInteger, ForeignKey("portfolio.id"))
type = Column(Enum(*account_types, name="account_type"))
name = Column(String)
institution = Column(String) # Could be a routing number (US)
number = Column(String) # Account number
description = Column(Text)
#: Arbitrary data
data = Column(JsonType)
# NOTE: Transaction-Account relationship is many-to-many
# transactions = relationship('Transaction', backref='account',
# lazy='dynamic')
records = relationship("Record", backref="account", lazy="dynamic")
def __repr__(self):
return "Account <{} ({})>".format(self.name, self.type)
@classmethod
def get_by_number(cls, institution: str, number: str):
account = (
cls.query.filter(cls.institution == institution)
.filter(cls.number == number)
.first()
)
if account is None:
raise AccountNotFoundException((institution, number))
else:
return account
def assets(self):
"""Returns all assets under this account."""
raise NotImplementedError
def balance(self, evaluated_at=None):
"""Calculates the account balance on a given date."""
if evaluated_at is None:
evaluated_at = datetime.utcnow()
# FIMXE: Consider open transactions
records = Record.query.filter(
Record.account == self, Record.created_at <= evaluated_at
).order_by(Record.created_at)
# Sum all transactions to produce {asset: sum(quantity)} dictionary
bs = {}
rs = [(r.asset, r.quantity, r.type) for r in records]
for asset, quantity, type_ in rs:
bs.setdefault(asset, 0)
if type_ == RecordType.balance_adjustment:
# Previous records will be ignored when 'balance_adjustment'
# is seen.
bs[asset] = quantity
else:
bs[asset] += quantity
return bs
def net_worth(
self,
evaluated_at=None,
granularity=Granularity.day,
approximation=False,
base_asset=None,
):
"""Calculates the net worth of the account on a particular datetime.
If approximation=True and the asset value | |
#!/usr/bin/python2.5 # pylint: disable-msg=C6301,C6409
#
# Copyright 2009 Google Inc.
# All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Border Gateway Protocol - various constants and functions."""
__author__ = "<EMAIL> (<NAME>)"
__version__ = "4.0"
import array
import math
import socket
import struct
import sys
import indent
# In general, see RFC4271 for details.
#
# The length of the fixed header part of a BGP message.
#
HEADER_LEN = 19
MIN_LENGTH = HEADER_LEN
MAX_LENGTH = 4096
# Message types, see RFC4271 and RFC2918.
#
OPEN = 1
UPDATE = 2
NOTIFICATION = 3
KEEPALIVE = 4
ROUTE_REFRESH = 5
MSG_TYPE_STR = {OPEN: "OPEN",
UPDATE: "UPDATE",
NOTIFICATION: "NOTIFICATION",
KEEPALIVE: "KEEPALIVE",
ROUTE_REFRESH: "ROUTE-REFRESH"}
# Attribute types.
#
ATTR_TYPE_ORIGIN = 1
ATTR_TYPE_AS_PATH = 2
ATTR_TYPE_NEXT_HOP = 3
ATTR_TYPE_MULTI_EXIT_DISC = 4
ATTR_TYPE_LOCAL_PREF = 5
ATTR_TYPE_ATOMIC_AGGREGATE = 6
ATTR_TYPE_AGGEGATOR = 7
ATTR_TYPE_COMMUNITIES = 8
ATTR_TYPE_ORIGINATOR_ID = 9
ATTR_TYPE_CLUSTER_LIST = 10
ATTR_TYPE_DPA = 11
ATTR_TYPE_ADVERTISER = 12
ATTR_TYPE_RCID_PATH = 13
ATTR_TYPE_MP_REACH_NLRI = 14
ATTR_TYPE_MP_UNREACH_NLRI = 15
ATTR_TYPE_EXTENDED_COMMUNITIES = 16
ATTR_TYPE_AS4_PATH = 17
ATTR_TYPE_AS4_AGGREGATOR = 18
ATTR_TYPE_STR = {ATTR_TYPE_ORIGIN: "ORIGIN",
ATTR_TYPE_AS_PATH: "AS_PATH",
ATTR_TYPE_NEXT_HOP: "NEXT_HOP",
ATTR_TYPE_MULTI_EXIT_DISC: "MULTI_EXIT_DISC",
ATTR_TYPE_LOCAL_PREF: "LOCAL_PREF",
ATTR_TYPE_ATOMIC_AGGREGATE: "ATOMIC_AGGREGATE",
ATTR_TYPE_AGGEGATOR: "AGGREGATOR",
ATTR_TYPE_COMMUNITIES: "COMMUNITIES",
ATTR_TYPE_ORIGINATOR_ID: "ORIGINATOR_ID",
ATTR_TYPE_CLUSTER_LIST: "CLUSTER_LIST",
ATTR_TYPE_DPA: "DPA",
ATTR_TYPE_ADVERTISER: "ADVERTISER",
ATTR_TYPE_RCID_PATH: "RCID_PATH",
ATTR_TYPE_MP_REACH_NLRI: "MP_REACH_NLRI",
ATTR_TYPE_MP_UNREACH_NLRI: "MP_UNREACH_NLRI",
ATTR_TYPE_EXTENDED_COMMUNITIES: "EXTENDED_COMMUNITIES",
ATTR_TYPE_AS4_PATH: "AS4_PATH",
ATTR_TYPE_AS4_AGGREGATOR: "AS4_AGGREGATOR"}
# Attribute flag values.
#
ATTR_FLAG_OPTIONAL = 128
ATTR_FLAG_TRANSITIVE = 64
ATTR_FLAG_PARTIAL = 32
ATTR_FLAG_EXT_LEN = 16
# Values for the ORIGIN attribute.
#
ORIGIN_IGP = 0
ORIGIN_EGP = 1
ORIGIN_INCOMPLETE = 2
ORIGIN_STR = {ORIGIN_IGP: "IGP",
ORIGIN_EGP: "EGP",
ORIGIN_INCOMPLETE: "incomplete"}
# AS_PATH attribute path segment type codes.
#
AS_SET = 1
AS_SEQUENCE = 2
AS_CONFED_SET = 3
AS_CONFED_SEQUENCE = 4
AS_PATH_SEG_STR = {AS_SET: "set",
AS_SEQUENCE: "sequence",
AS_CONFED_SET: "confed_set",
AS_CONFED_SEQUENCE: "confed_seq"}
AS_PATH_SEG_FORMAT = {AS_SET: "{ %s }",
AS_SEQUENCE: "%s",
AS_CONFED_SET: "( %s )",
AS_CONFED_SEQUENCE: "( %s )"}
# NOTIFICATION codes.
#
MSG_HEADER_ERR = 1
OPEN_MSG_ERR = 2
UPD_MSG_ERR = 3
HOLD_TIMER_EXPIRED = 4
FSM_ERR = 5
CEASE = 6
NOTIFICATION_CODE = {MSG_HEADER_ERR: "Message Header Error",
OPEN_MSG_ERR: "Open Message Error",
UPD_MSG_ERR: "Update Message Error",
HOLD_TIMER_EXPIRED: "Hold Timer Expired",
FSM_ERR: "FSM Error",
CEASE: "Cease"}
# NOTIFICATION subcodes.
#
NOTIFICATION_SUBCODE = {MSG_HEADER_ERR: {1: "Connection Not Synchronized",
2: "Bad Message Length",
3: "Bad Message Type"},
OPEN_MSG_ERR: {1: "Unsupported Version Number",
2: "Bad Peer AS",
3: "Bad BGP Identifier",
4: "Unsupported Optional Parameter",
5: "[Deprecated per RFC4271]",
6: "Unacceptable Hold Time"},
UPD_MSG_ERR: {1: "Malformed Attribute List",
2: "Unrecognized Well-known Attribute",
3: "Missing Well-known Attribute",
4: "Attribute Flags Error",
5: "Attribute Length Error",
6: "Invalid ORIGIN Attribute",
7: "[Deprecated per RFC4271]",
8: "Invalid NEXT_HOP Attribute",
9: "Optional Attribute Error",
10: "Invalid Network Field",
11: "Malformed AS_PATH"}}
# Well-known community values.
#
WELL_KNOWN_COMM = {0xFFFFFF01: "NO_EXPORT",
0xFFFFFF02: "NO_ADVERTISE",
0xFFFFFF03: "NO_EXPORT_SUBCONFED"}
# Address families, per RFC1700.
#
AF_IP = 1
AF_IP6 = 2
AF_STR = {AF_IP: "IPv4",
AF_IP6: "IPv6"}
# Multiprotocol Subsequent Address Family Identifier (SAFI) per RFC2858.
#
MP_SAFI_UNICAST = 1
MP_SAFI_MULTICAST = 2
MP_SAFI_STR = {1: "unicast",
2: "multicast",
3: "unicast+multicast",
128: "MPLS labeled VPN-IPv6"}
def BytesForPrefix(prefix_len):
"""Determine # of octets required to hold a prefix of length per RFC4271.
Args:
prefix_len: length of the prefix in bits.
Returns:
An int indicating how many octets are used to hold the prefix.
Raises:
ValueError: indicates that prefix_len has an invalid value
"""
if prefix_len < 0 or prefix_len > 128:
raise ValueError("prefix_len %d is out of range" % prefix_len)
return int(math.ceil(prefix_len / 8.0))
def BytesForPrefixAndLabels(prefix_len):
"""Determine # of octets required to hold a prefix of length per RFC4760.
Args:
prefix_len: length of the prefix in bits.
Returns:
An int indicating how many octets are used to hold the prefix.
An int indicating the number of labels.
Raises:
ValueError: indicates that prefix_len has an invalid value
"""
# if prefix_len < 0 or prefix_len > 128:
# raise ValueError("prefix_len %d is out of range" % prefix_len)
return int(math.ceil(prefix_len / 8.0)), 0
def BytesForSnpa(snpa_len):
"""Determine # of octets required to hold an SNPA per RFC2858.
Args:
snpa_len: length of the SNPA in semi-octets.
Returns:
An int indicating how many octets are used to hold the prefix.
Raises:
ValueError: indicates that snpa_len has an invalid value
"""
# You have to read RFC2858 to believe this. SNPA lengths are expressed
# in semi-octets.
#
if snpa_len < 1 or snpa_len > 256:
raise ValueError("snpa_len %d is out of range" % snpa_len)
return int(math.ceil(snpa_len / 2.0))
def DumpHexString(buff, start, length):
"""Convert hex data to text.
Args:
buff: a buffer of hex data to convert to text.
start: starting offset of data to convert.
length: length of data to convert.
Returns:
String of text.
"""
hex_dump = []
for x in range(length):
hex_dump.append("%02x" % struct.unpack_from("B", buff, x + start))
return " ".join(hex_dump)
def ParseBgpAsPath(update, start, end, rfc4893_updates):
"""Parse BGP AS_PATH path attribute information into text per RFC4271.
Args:
update: a buffer containing a BGP message.
start: offset at which AS_PATH parsing is to start.
end: offset at which AS_PATH parsing is to stop.
rfc4893_updates: true if AS_PATH conforms to RFC4893, otherwise false
Returns:
A list of strings containing the text representation of the path attribute.
"""
path_text = []
# We're going to try this with the default value of rfc4893_updates,
# and try again with it forced to True if we get an exception.
#
try:
offset = start
# Walk through the path segments.
#
while offset < end:
# Get type and length.
#
path_seg_type = update[offset]
offset += 1
path_seg_len = update[offset]
offset += 1
path_seg_val = []
# Step through AS numbers in path.
#
for _ in range(path_seg_len):
# RFC4893-style updates have 4-octet ASNs, otherwise 2-octet ASNs.
#
if rfc4893_updates:
path_seg_val.append(str(struct.unpack_from(">L",
update,
offset)[0]))
offset += 4
else:
path_seg_val.append(str(struct.unpack_from(">H",
update,
offset)[0]))
offset += 2
# Turn the list of AS numbers into text, using a format string
# appropriate to the segment type.
#
path_seg_str = " ".join(path_seg_val)
path_text.append(AS_PATH_SEG_FORMAT[path_seg_type] % path_seg_str)
# If we get a KeyError exception and the rfc4893_updates flag is not
# set, tell the user to try using the rfc4893 switch; reraise the exception.
#
except KeyError, esc:
if not rfc4893_updates:
sys.stderr.write("ParseBgpAsPath parsing error, try --rfc4893 switch\n")
raise esc
return path_text
def ParseBgpCommunities(update, start, end):
"""Parse BGP COMMUNITIES path attribute information into text.
Args:
update: a buffer containing a BGP message.
start: offset at which community parsing is to start.
end: offset at which community parsing is to stop.
Returns:
A list of strings containing the text representation of the path attribute.
"""
comm_text = []
offset = start
# Walk through the community values.
#
while offset < end:
# Get a value.
#
x = struct.unpack_from(">L", update, offset)[0]
# If a well-known community, use its name; else unpack it again for
# presentation.
#
if x in WELL_KNOWN_COMM:
comm_text.append(WELL_KNOWN_COMM[x])
else:
high, low = struct.unpack_from(">HH", update, offset)
comm_text.append("%d:%d" % (high, low))
# On to the next.
#
offset += 4
return comm_text
def ParseBgpHeader(header, verbose=False):
"""Parse a BGP header into text, see RFC4271 section 4.1.
Args:
header: a buffer containing a BGP message header.
verbose: be chatty, or not.
Returns:
An int indicating the length of the rest of the BGP message,
an int indication the type of the message,
a list of strings to print.
Raises:
ValueError: an invalid value was found in the message.
"""
print_msg = []
indent_str = indent.IndentLevel(indent.BGP_HEADER_INDENT)
try:
# Verify that the marker is correct, raise a ValueError exception if
# it is not.
#
for x in range(0, 15):
if header[x] != 255:
raise ValueError("BGP marker octet %d != 255" % x)
# Unpack the length and type.
#
length, msg_type = struct.unpack(">HB", header[16:19])
if length < MIN_LENGTH or length > MAX_LENGTH:
raise ValueError("BGP message length %d incorrect" % length)
if msg_type not in MSG_TYPE_STR:
raise ValueError("BGP message type %d unknown" % msg_type)
print_msg.append("%sBGP %s" % (indent_str, MSG_TYPE_STR[msg_type]))
if verbose:
print_msg.append(" length %d\n" % (length - HEADER_LEN))
else:
print_msg.append("\n")
# Return the length of the rest of the PDU, its type, and the list
# of strings | |
owner_id:
raise commands.errors.CheckFailure
if new_volume == def_v:
return await ctx.invoke(self.defvolume_delete)
if 0 <= new_volume <= 100:
settings = await SettingsDB.get_instance().get_guild_settings(ctx.guild.id)
settings.volume = new_volume
await SettingsDB.get_instance().set_guild_settings(settings)
await ctx.send(get_str(ctx, "music-defvolume-set").format(f"`{new_volume}%`"))
else:
await ctx.send(get_str(ctx, "music-volume-unreasonable-volume").format(new_volume), delete_after=20)
@defvolume.command(name="reset", aliases=["remove", "delete", "stop", "end", "off", "clean", "clear"])
async def defvolume_delete(self, ctx):
"""
{command_prefix}defvolume reset
{help}
"""
if not ctx.channel.permissions_for(ctx.author).manage_guild and not ctx.author.id == owner_id:
raise commands.errors.CheckFailure
settings = await SettingsDB.get_instance().get_guild_settings(ctx.guild.id)
del settings.volume
await SettingsDB.get_instance().set_guild_settings(settings)
await ctx.send(get_str(ctx, "music-defvolume-reset").format(f"`{def_v}%`"))
@commands.group(aliases=["autodisconnect", "autotime", "autotimer", "leaveafter", "timer", "timerdisconnect", "autoleaveafter", "al", "ad", "customtime"], invoke_without_command=True)
async def autoleave(self, ctx, *, leftover_args):
"""
{command_prefix}autoleave set [seconds]
{command_prefix}autoleave never
{command_prefix}autoleave reset
{command_prefix}autoleave now
{help}
"""
if not ctx.invoked_subcommand:
if leftover_args.isdigit():
return await ctx.invoke(self.autoleave_set, new_time=int(leftover_args))
await self.bot.send_cmd_help(ctx)
@autoleave.command(name="now", aliases=["queue", "display", "list", "liste", "info", "songlist"])
async def autoleave_list(self, ctx):
"""
{command_prefix}autoleave now
{help}
"""
settings = await SettingsDB.get_instance().get_guild_settings(ctx.guild.id)
if isinstance(settings.timer, int):
if settings.timer:
value = round(settings.timer / 60, 2)
await ctx.send(get_str(ctx, "music-autoleave-now").format(f"`{settings.timer} {get_str(ctx, 'cmd-nextep-seconds')} ({value} {get_str(ctx, 'cmd-nextep-minutes') if value > 1 else get_str(ctx, 'cmd-nextep-minute')})`"))
else:
await ctx.send(get_str(ctx, "music-autoleave-now-never"))
else:
await ctx.send(get_str(ctx, "music-autoleave-now").format(f"`{def_time} {get_str(ctx, 'cmd-nextep-seconds')} (3 {get_str(ctx, 'cmd-nextep-minutes')})`"))
@autoleave.command(name="set", aliases=["add", "are", "config"])
async def autoleave_set(self, ctx, *, new_time: int):
"""
{command_prefix}autoleave set [number]
{help}
"""
if not ctx.channel.permissions_for(ctx.author).manage_guild and not ctx.author.id == owner_id:
raise commands.errors.CheckFailure
settings = await SettingsDB.get_instance().get_guild_settings(ctx.guild.id)
if new_time == def_time:
await ctx.invoke(self.autoleave_delete)
min_val = 0
max_val = 1800
if min_val <= new_time <= max_val:
settings.timer = new_time
await SettingsDB.get_instance().set_guild_settings(settings)
if ctx.guild.id in self.bot.lavalink.players.players:
player = await self.get_player(ctx.guild)
player.timer_value = new_time
await ctx.send(get_str(ctx, "music-autoleave-set").format(f"`{settings.timer} {get_str(ctx, 'cmd-nextep-seconds') if new_time > 1 else get_str(ctx, 'cmd-nextep-second')}`"))
else:
if (await self.bot.server_is_claimed(ctx.guild.id)):
return await ctx.send(get_str(ctx, "music-autoleave-unreasonable-patron").format(f"`{max_val}`", "`{}autoleave never`".format(get_server_prefixes(ctx.bot, ctx.guild))), delete_after=20)
elif await is_patron(self.bot, ctx.author.id):
e = discord.Embed(description=get_str(
ctx, "music-autoleave-need-claim").format(f"`{get_server_prefixes(ctx.bot, ctx.guild)}claim`"))
else:
e = discord.Embed(description=get_str(ctx, "music-autoleave-unreasonable").format(
f"`{min_val}`", f"`{max_val}`") + "\n\n" + "**[Patreon](https://www.patreon.com/watora)**")
try:
await ctx.send(embed=e)
except discord.Forbidden:
await ctx.send(get_str(ctx, "music-autoleave-unreasonable").format(f"`{min_val}`", f"`{max_val}`"))
@autoleave.command(name="reset", aliases=["remove", "delete", "disable", "stop", "end", "off", "clean", "clear"])
async def autoleave_delete(self, ctx):
"""
{command_prefix}autoleave reset
{help}
"""
if not ctx.channel.permissions_for(ctx.author).manage_guild and not ctx.author.id == owner_id:
raise commands.errors.CheckFailure
settings = await SettingsDB.get_instance().get_guild_settings(ctx.guild.id)
del settings.timer
await SettingsDB.get_instance().set_guild_settings(settings)
player = None
if ctx.guild.id in self.bot.lavalink.players.players:
player = await self.get_player(ctx.guild)
player.timer_value = def_time
await ctx.send(get_str(ctx, "music-autoleave-reset").format(f"`{def_time} {get_str(ctx, 'cmd-nextep-seconds')} (3 {get_str(ctx, 'cmd-nextep-minutes')})`"))
@autoleave.command(name="never", aliases=["jamais", "infinity", "infinite"])
async def autoleave_never(self, ctx):
"""
{command_prefix}autoleave never
{help}
"""
if not ctx.channel.permissions_for(ctx.author).manage_guild and not ctx.author.id == owner_id:
raise commands.errors.CheckFailure
min_val = 0
max_val = 1800
if (await self.bot.server_is_claimed(ctx.guild.id)):
settings = await SettingsDB.get_instance().get_guild_settings(ctx.guild.id)
settings.timer = False
await SettingsDB.get_instance().set_guild_settings(settings)
if ctx.guild.id in self.bot.lavalink.players.players:
player = await self.get_player(ctx.guild)
player.timer_value = False
await ctx.send(get_str(ctx, "music-autoleave-set").format(f"`{get_str(ctx, 'music-autoleave-never')}`"))
else:
if await is_patron(self.bot, ctx.author.id):
e = discord.Embed(description=get_str(
ctx, "music-autoleave-need-claim").format(f"`{get_server_prefixes(ctx.bot, ctx.guild)}claim`"))
else:
e = discord.Embed(description=get_str(ctx, "music-autoleave-unreasonable").format(
f"`{min_val}`", f"`{max_val}`") + "\n\n" + "**[Patreon](https://www.patreon.com/watora)**")
try:
await ctx.send(embed=e)
except discord.Forbidden:
await ctx.send(get_str(ctx, "music-autoleave-unreasonable").format(f"`{min_val}`", f"`{max_val}`"))
@commands.group(aliases=["autonp", "autonpmsg", "npmsgchannel", "npchannel", "nowplayingmsg", "nowplayingmessage", "anp", "autonowplaying", "autonowplayingmessage", "autonpmessage"], invoke_without_command=True)
async def npmsg(self, ctx, *, leftover_args):
"""
{command_prefix}npmsg set [channel]
{command_prefix}npmsg never
{command_prefix}npmsg reset
{command_prefix}npmsg now
{help}
"""
if not ctx.invoked_subcommand:
await ctx.invoke(self.npmsg_set, new_channel=leftover_args)
@npmsg.command(name="now", aliases=["queue", "display", "list", "liste", "info", "songlist"])
async def npmsg_list(self, ctx):
"""
{command_prefix}npmsg now
{help}
"""
settings = await SettingsDB.get_instance().get_guild_settings(ctx.guild.id)
if settings.channel:
channel = ctx.guild.get_channel(settings.channel)
if not channel:
settings.channel = None
await SettingsDB.get_instance().set_guild_settings(settings)
await ctx.send(get_str(ctx, "music-npmsg-now-default"))
else:
await ctx.send(get_str(ctx, "music-npmsg-now").format(f"`{channel}`"))
elif settings.channel is None:
await ctx.send(get_str(ctx, "music-npmsg-now-default"))
else:
await ctx.send(get_str(ctx, "music-npmsg-now-never"))
@npmsg.command(name="set", aliases=["add", "are", "config"])
async def npmsg_set(self, ctx, *, new_channel):
"""
{command_prefix}npmsg set [channel]
{help}
"""
if not ctx.channel.permissions_for(ctx.author).manage_guild and not ctx.author.id == owner_id:
raise commands.errors.CheckFailure
try:
channel = [c for c in ctx.guild.channels if (str(c.id) == new_channel or isinstance(
new_channel, str) and c.name.lower() == new_channel.lower()) and isinstance(c, discord.TextChannel)][0]
new_channel = channel.id
except IndexError:
return await ctx.send(get_str(ctx, "music-npmsg-not-found").format(f"`{new_channel}`"))
settings = await SettingsDB.get_instance().get_guild_settings(ctx.guild.id)
settings.channel = new_channel
if ctx.guild.id in self.bot.lavalink.players.players:
player = await self.get_player(ctx.guild)
player.channel = new_channel
await SettingsDB.get_instance().set_guild_settings(settings)
await ctx.send(get_str(ctx, "music-npmsg-set").format(f"`{channel}`"))
@npmsg.command(name="reset", aliases=["remove", "delete", "enable", "stop", "end", "off", "clean", "clear"])
async def npmsg_delete(self, ctx):
"""
{command_prefix}npmsg reset
{help}
"""
if not ctx.channel.permissions_for(ctx.author).manage_guild and not ctx.author.id == owner_id:
raise commands.errors.CheckFailure
settings = await SettingsDB.get_instance().get_guild_settings(ctx.guild.id)
settings.channel = None
if ctx.guild.id in self.bot.lavalink.players.players:
player = await self.get_player(ctx.guild)
player.channel = ctx.channel.id
await SettingsDB.get_instance().set_guild_settings(settings)
await ctx.send(get_str(ctx, "music-npmsg-reset"))
@npmsg.command(name="never", aliases=["0", "jamais", "disable", "nowhere", "no"])
async def npmsg_never(self, ctx):
"""
{command_prefix}npmsg never
{help}
"""
if not ctx.channel.permissions_for(ctx.author).manage_guild and not ctx.author.id == owner_id:
raise commands.errors.CheckFailure
settings = await SettingsDB.get_instance().get_guild_settings(ctx.guild.id)
settings.channel = False
if ctx.guild.id in self.bot.lavalink.players.players:
player = await self.get_player(ctx.guild)
player.channel = None
await SettingsDB.get_instance().set_guild_settings(settings)
await ctx.send(get_str(ctx, "music-npmsg-set-disabled"))
@commands.guild_only()
@commands.cooldown(rate=1, per=1.5, type=commands.BucketType.user)
@commands.command(aliases=["lazyer", 'manager'])
async def lazy(self, ctx, *, text=None):
"""
{command_prefix}lazy
{help}
"""
if not ctx.channel.permissions_for(ctx.author).manage_guild and not ctx.author.id == owner_id:
raise commands.errors.CheckFailure
settings = await SettingsDB.get_instance().get_guild_settings(ctx.guild.id)
settings.lazy = not settings.lazy
await SettingsDB.get_instance().set_guild_settings(settings)
await ctx.send(get_str(ctx, "cmd-lazy-status") + ' ' + get_str(ctx, "cmd-lazy-{}".format(['disabled', 'enabled'][settings.lazy])))
@commands.group(aliases=["defaultvote", "threshold", "thresholdvote", "votethreshold", "defvot", "dvote", "setvote", "customvote"], invoke_without_command=True)
async def defvote(self, ctx, *, leftover_args):
"""
{command_prefix}defvote set [number]
{command_prefix}defvote reset
{command_prefix}defvote now
{help}
"""
if not ctx.invoked_subcommand:
if leftover_args.isdigit():
return await ctx.invoke(self.defvote_set, new_volume=int(leftover_args))
await self.bot.send_cmd_help(ctx)
@defvote.command(name="now", aliases=["queue", "display", "list", "liste", "info", "songlist"])
async def defvote_list(self, ctx):
"""
{command_prefix}defvote now
{help}
"""
settings = await SettingsDB.get_instance().get_guild_settings(ctx.guild.id)
await ctx.send(get_str(ctx, "music-defvote-now").format(f"`{settings.vote}%`"))
@defvote.command(name="set", aliases=["add", "are", "config"])
async def defvote_set(self, ctx, *, new_volume: int):
"""
{command_prefix}defvote set [number]
{help}
"""
if not ctx.channel.permissions_for(ctx.author).manage_guild and not ctx.author.id == owner_id:
raise commands.errors.CheckFailure
if new_volume == def_vote:
return await ctx.invoke(self.defvote_delete)
if 1 <= new_volume <= 100:
settings = await SettingsDB.get_instance().get_guild_settings(ctx.guild.id)
settings.vote = new_volume
await SettingsDB.get_instance().set_guild_settings(settings)
await ctx.send(get_str(ctx, "music-defvote-set").format(f"`{new_volume}%`"))
else:
await ctx.send(get_str(ctx, "music-volume-unreasonable-volume").format(new_volume), delete_after=20)
@defvote.command(name="reset", aliases=["remove", "delete", "stop", "end", "off", "clean", "clear"])
async def defvote_delete(self, ctx):
"""
{command_prefix}defvote reset
{help}
"""
if not ctx.channel.permissions_for(ctx.author).manage_guild and not ctx.author.id == owner_id:
raise commands.errors.CheckFailure
settings = await SettingsDB.get_instance().get_guild_settings(ctx.guild.id)
del settings.vote
await SettingsDB.get_instance().set_guild_settings(settings)
await ctx.send(get_str(ctx, "music-defvote-reset").format(f"`{def_vote}%`"))
@commands.group(aliases=["autoco", "ac", "autosong", "autosongs", "autojoin", "autoconnects", "songauto", "songsauto", "as"], invoke_without_command=True)
async def autoconnect(self, ctx, channel: discord.VoiceChannel, *, query: str = None):
"""
{command_prefix}autoconnect add [VoiceChannel] [query|url|radio|autoplaylist]
{command_prefix}autoconnect remove [VoiceChannel]
{command_prefix}autoconnect list
{command_prefix}autoconnect reset
{help}
"""
if not ctx.invoked_subcommand:
if not ctx.channel.permissions_for(ctx.author).manage_guild and not ctx.author.id == owner_id:
raise commands.errors.CheckFailure
return await ctx.invoke(self.autoconnect_set, channel=channel, query=query)
@checks.has_permissions(manage_guild=True)
@autoconnect.command(name="set", aliases=["add", "are", "config"])
async def autoconnect_set(self, ctx, channel: discord.VoiceChannel, *, query: str = None):
"""
{command_prefix}autoconnect add [VoiceChannel]
{command_prefix}autoconnect add [VoiceChannel] [query|url]
{command_prefix}autoconnect add [VoiceChannel] autoplaylist:[autoplaylist_name]
{command_prefix}autoconnect add [VoiceChannel] radio:[RadioName]
{command_prefix}autoconnect add [VoiceChannel] radio:[RadioName] | autoplaylist:[autoplaylist_name] | ...
{help}
"""
settings = await SettingsDB.get_instance().get_guild_settings(ctx.guild.id)
if query:
parts = query.split('|')
for part in query.split('|'):
if not part.strip():
parts.remove(part)
elif 'radio:' in part:
if part.replace('radio:', '').strip() not in self.list_radiolist:
return await ctx.send(get_str(ctx, "music-radio-invalid-syntax").format("`{}radio list`".format(get_server_prefixes(ctx.bot, ctx.guild))))
elif 'autoplaylist:' in part:
glob_settings = await SettingsDB.get_instance().get_glob_settings()
file_name = part.replace('autoplaylist:', '').strip()
if str(file_name.lower()) not in glob_settings.autoplaylists:
file_name = format_mentions(file_name)
return await ctx.send(get_str(ctx, "music-plstart-doesnt-exists").format(f"**{file_name}**", "`{}plnew`".format(get_server_prefixes(ctx.bot, ctx.guild))), delete_after=30)
query = '|'.join(parts)
settings.autosongs[str(channel.id)] = query
if ctx.guild.id not in self.bot.autosongs_map:
self.bot.autosongs_map[ctx.guild.id] = {}
self.bot.autosongs_map[ctx.guild.id][str(channel.id)] = query
await SettingsDB.get_instance().set_guild_settings(settings)
await ctx.send(":ballot_box_with_check:")
@checks.has_permissions(manage_guild=True)
@autoconnect.command(name="remove", aliases=['-', 'd', 'r', 'delete'])
async def autoconnect_remove(self, ctx, *, channel: discord.VoiceChannel):
"""
{command_prefix}autoconnect remove [VoiceChannel]
{help}
"""
settings = await SettingsDB.get_instance().get_guild_settings(ctx.guild.id)
if ctx.guild.id in self.bot.autosongs_map:
self.bot.autosongs_map[ctx.guild.id].pop(str(channel.id), None)
settings.autosongs.pop(str(channel.id), None)
await SettingsDB.get_instance().set_guild_settings(settings)
await ctx.send(":ballot_box_with_check:")
@autoconnect.command(name="now", aliases=["queue", "display", "list", "liste", "info", "songlist"])
async def autoconnect_list(self, ctx):
"""
{command_prefix}autoconnect list
{help}
"""
settings = await SettingsDB.get_instance().get_guild_settings(ctx.guild.id)
desc = ''
for m, k in enumerate(settings.autosongs.items(), start=1):
modified = (f' : **[{k[1]}]({k[1][:40]})**' if match_url(k[1])
else f' : `{k[1]}`') if k[1] else ''
desc += '`{}.` **{}**{}\n'.format(m,
ctx.guild.get_channel(int(k[0])), modified)
if desc:
embed = discord.Embed(description=desc[:1900])
embed.set_author(name='Autoconnect list',
icon_url=ctx.guild.icon_url)
await ctx.send(embed=embed)
else:
await ctx.send(get_str(ctx, "music-autoconnect-list-empty").format("`{}autoconnect add`".format(get_server_prefixes(ctx.bot, ctx.guild))))
@checks.has_permissions(manage_guild=True)
@autoconnect.command(name="reset", aliases=["off", "stop", "rien", "clear", "clean"])
async def autoconnect_reset(self, ctx):
"""
{command_prefix}autoconnect reset
{help}
"""
settings = await SettingsDB.get_instance().get_guild_settings(ctx.guild.id)
settings.autosongs = {}
self.bot.autosongs_map.pop(ctx.guild.id, None)
await SettingsDB.get_instance().set_guild_settings(settings)
await ctx.send(":ballot_box_with_check:")
@commands.group(aliases=["confighost", "ch", "hc", "hg", "gh"], invoke_without_command=True)
async def hostconfig(self, ctx, ip: str, password: str = "<PASSWORD>", port: int = 2333):
"""
{command_prefix}hostconfig set [ip]
{command_prefix}hostconfig set [ip] [password]
{command_prefix}hostconfig set [ip] [password] [port]
{command_prefix}hostconfig remove
{command_prefix}hostconfig now
{command_prefix}hostconfig switch
{command_prefix}hostconfig link
Allows to manage your credentials for your node to host yourself your music.
Please read the documentation [**here**](https://docs.watora.xyz/features/self-hosting).
"""
if not ctx.invoked_subcommand: # TODO: Move all those commands to another node
return await | |
import pandas as pd
import numpy as np
import copy
from sklearn.naive_bayes import GaussianNB
from sklearn.model_selection import cross_val_score, train_test_split, GridSearchCV
from sklearn.feature_selection import mutual_info_classif, SelectKBest
import matplotlib.pyplot as plt
from sklearn import svm
from sklearn.neighbors import KNeighborsClassifier
from datetime import datetime
from os import listdir
from os.path import isfile, join
import sys
import math
from sklearn.metrics import accuracy_score, f1_score
import re
from Extractor import get_word_length_matrix, get_word_length_matrix_with_interval, get_average_word_length, \
get_word_length_matrix_with_margin, get_char_count, get_digits, get_sum_digits, get_word_n_grams, \
get_char_affix_n_grams, get_char_word_n_grams, get_char_punct_n_grams, get_pos_tags_n_grams, get_bow_matrix, \
get_yules_k, get_special_char_matrix, get_function_words, get_pos_tags, get_sentence_end_start, \
get_flesch_reading_ease_vector, get_sentence_count, get_word_count
from sklearn.preprocessing import StandardScaler, Normalizer
# Chapter 7.1.1. method to trim a feature with low sum e.g. ngrams lower then 5
def trim_df_sum_feature(par_df, par_n):
par_df = par_df.fillna(value=0)
columns = par_df.columns.to_numpy()
data_array = par_df.to_numpy(dtype=float)
sum_arr = data_array.sum(axis=0)
# reduce n if 0 features would be returned
while len(par_df.columns) - len(np.where(sum_arr < par_n)[0]) == 0:
par_n -= 1
positions = list(np.where(sum_arr < par_n))
columns = np.delete(columns, positions)
data_array = np.delete(data_array, positions, axis=1)
return pd.DataFrame(data=data_array, columns=columns)
# Chapter 7.1.1. method to trim feature with low occurrence over all article
def trim_df_by_occurrence(par_df, n):
df_masked = par_df.notnull().astype('int')
word_rate = df_masked.sum()
columns = []
filtered_bow = pd.DataFrame()
for i in range(0, len(word_rate)):
if word_rate[i] > n:
columns.append(word_rate.index[i])
for c in columns:
filtered_bow[c] = par_df[c]
return filtered_bow
# Chapter 7.1.1. Process of filtering the data with low occurrence and save the filtered features in a new file
def filter_low_occurrence():
df_bow = pd.read_csv("daten/raw/bow.csv", sep=',', encoding="utf-8", nrows=2500)
print(f"BOW before: {len(df_bow.columns)}")
df_bow = trim_df_by_occurrence(df_bow, 1)
print(f"BOW after: {len(df_bow.columns)}")
df_bow.to_csv(f"daten/2_filter_low_occurrence/bow.csv", index=False)
for n in range(2, 7):
word_n_gram = pd.read_csv(f"daten/raw/word_{n}_gram.csv", sep=',', encoding="utf-8", nrows=2500)
print(f"Word_{n}_gram before: {len(word_n_gram.columns)}")
word_n_gram = trim_df_by_occurrence(word_n_gram, 1)
print(f"Word_{n}_gram after: {len(word_n_gram.columns)}")
word_n_gram.to_csv(f"daten/2_filter_low_occurrence/word_{n}_gram.csv", index=False)
for n in range(2, 6):
char_affix_n_gram = pd.read_csv(f"daten/trimmed_occ_greater_one/char_affix_{n}_gram_1.csv", sep=',',
encoding="utf-8", nrows=2500)
print(f"char_affix_{n}_gram before: {len(char_affix_n_gram.columns)}")
char_affix_n_gram = trim_df_sum_feature(char_affix_n_gram, 5)
print(f"char_affix_{n}_gram after: {len(char_affix_n_gram.columns)}")
char_affix_n_gram.to_csv(f"daten/2_filter_low_occurrence/char_affix_{n}_gram.csv", index=False)
char_word_n_gram = pd.read_csv(f"daten/trimmed_occ_greater_one/char_word_{n}_gram_1.csv", sep=',',
encoding="utf-8", nrows=2500)
print(f"char_word_{n}_gram before: {len(char_word_n_gram.columns)}")
char_word_n_gram = trim_df_sum_feature(char_word_n_gram, 5)
print(f"char_word_{n}_gram after: {len(char_word_n_gram.columns)}")
char_word_n_gram.to_csv(f"daten/2_filter_low_occurrence/char_word_{n}_gram.csv", index=False)
char_punct_n_gram = pd.read_csv(f"daten/trimmed_occ_greater_one/char_punct_{n}_gram_1.csv", sep=',',
encoding="utf-8", nrows=2500)
print(f"char_punct_{n}_gram before: {len(char_punct_n_gram.columns)}")
char_punct_n_gram = trim_df_sum_feature(char_punct_n_gram, 5)
print(f"char_punct_{n}_gram after: {len(char_punct_n_gram.columns)}")
char_punct_n_gram.to_csv(f"daten/2_filter_low_occurrence/char_punct_{n}_gram.csv", index=False)
df_f_word = pd.read_csv("daten/raw/function_words.csv", sep=',', encoding="utf-8", nrows=2500)
print(f"Function Words before: {len(df_f_word.columns)}")
df_f_word = trim_df_by_occurrence(df_f_word, 1)
print(f"Function Words after: {len(df_f_word.columns)}")
df_f_word.to_csv(f"daten/2_filter_low_occurrence/function_words.csv", index=False)
for n in range(2, 6):
pos_tags_n_gram = pd.read_csv(f"daten/raw/pos_tag_{n}_gram.csv", sep=',', encoding="utf-8", nrows=2500)
print(f"pos_tag_{n}_gram before: {len(pos_tags_n_gram.columns)}")
pos_tags_n_gram = trim_df_by_occurrence(pos_tags_n_gram, 1)
print(f"pos_tag_{n}_gram after: {len(pos_tags_n_gram.columns)}")
pos_tags_n_gram.to_csv(f"daten/2_filter_low_occurrence/pos_tag_{n}_gram.csv", index=False)
# Chapter 7.1.2. method to filter words based on document frequency
def trim_df_by_doc_freq(par_df, par_doc_freq):
df_masked = par_df.notnull().astype('int')
word_rate = df_masked.sum() / len(par_df)
columns = []
filtered_bow = pd.DataFrame()
for i in range(0, len(word_rate)):
if word_rate[i] < par_doc_freq:
columns.append(word_rate.index[i])
for c in columns:
filtered_bow[c] = par_df[c]
return filtered_bow
# Chapter 7.1.2 Process of filtering the data with high document frequency and save the filtered features in a new file
def filter_high_document_frequency():
# Filter words with high document frequency
df_bow = pd.read_csv("daten/2_filter_low_occurrence/bow.csv", sep=',', encoding="utf-8", nrows=2500)
print(f"BOW before: {len(df_bow.columns)}")
df_bow = trim_df_by_doc_freq(df_bow, 0.5)
print(f"BOW after: {len(df_bow.columns)}")
df_bow.to_csv(f"daten/3_fiter_high_frequency/bow.csv", index=False)
df_f_word = pd.read_csv("daten/2_filter_low_occurrence/function_words.csv", sep=',', encoding="utf-8", nrows=2500)
print(f"Function Word before: {len(df_f_word.columns)}")
df_f_word = trim_df_by_doc_freq(df_f_word, 0.5)
print(f"Function Word after: {len(df_f_word.columns)}")
df_f_word.to_csv(f"daten/3_fiter_high_frequency/function_words.csv", index=False)
for n in range(2, 7):
word_n_gram = pd.read_csv(f"daten/2_filter_low_occurrence/word_{n}_gram.csv", sep=',', encoding="utf-8",
nrows=2500)
print(f"Word_{n}_gram before: {len(word_n_gram.columns)}")
word_n_gram = trim_df_by_doc_freq(word_n_gram, 0.5)
print(f"Word_{n}_gram after: {len(word_n_gram.columns)}")
word_n_gram.to_csv(f"daten/3_fiter_high_frequency/word_{n}_gram.csv", index=False)
# Chapter 7.1.4. get the relative frequency based on a length metric (char, word, sentence)
def get_rel_frequency(par_df_count, par_df_len_metric_vector):
df_rel_freq = pd.DataFrame(columns=par_df_count.columns)
for index, row in par_df_count.iterrows():
df_rel_freq = df_rel_freq.append(row.div(par_df_len_metric_vector[index]))
return df_rel_freq
# Chapter 7.1.4. whole process of the chapter. Get the individual relative frequency of a feature and compare
# the correlation to the article length from the absolute and relative feature, save the feature with the estimated
# relative frequency in a new file
def individual_relative_frequency():
df_len_metrics = pd.read_csv(f"daten/1_raw/length_metrics.csv", sep=',', encoding="utf-8", nrows=2500)
# different metrics for individual relative frequencies
metrics = ['word_count', 'char_count', 'sentence_count']
for m in metrics:
# The csv is placed in a folder based on the metric for the individual relative frequency
path = f'daten/4_relative_frequency/{m}'
files = [f for f in listdir(path) if isfile(join(path, f))]
for f in files:
x = pd.read_csv(f"daten/4_relative_frequency/{m}/{f}",
sep=',', encoding="utf-8", nrows=2500).fillna(value=0)
x_rel = get_rel_frequency(x, df_len_metrics[m])
# Save the CSV with relative frequency
x_rel.to_csv(
f"daten/4_relative_frequency/{f.split('.')[0]}"
f"_rel.csv", index=False)
# Correlation is always between the metrics and the word_count
x['word_count'] = df_len_metrics['word_count']
x_rel['word_count'] = df_len_metrics['word_count']
# only on the test data 60/40 split
x_train, x_test = train_test_split(x, test_size=0.4, random_state=42)
x_train_rel, x_test_rel = train_test_split(x_rel, test_size=0.4, random_state=42)
# Calculate the median correlation
print(f"{f}_abs: {x_train.corr(method='pearson', min_periods=1)['word_count'].iloc[:-1].mean()}")
print(f"{f}_rel: {x_train_rel.corr(method='pearson', min_periods=1)['word_count'].iloc[:-1].mean()}")
# Chapter 7.2.1 First step of the iterative filter: Rank the features
def sort_features_by_score(par_x, par_y, par_select_metric):
# Get a sorted ranking of all features by the selected metric
selector = SelectKBest(par_select_metric, k='all')
selector.fit(par_x, par_y)
# Sort the features by their score
return pd.DataFrame(dict(feature_names=par_x.columns, scores=selector.scores_)).sort_values('scores',
ascending=False)
# Chapter 7.2.1 method to get the best percentile for GNB
def get_best_percentile_gnb(par_x_train, par_y_train, par_iter, par_df_sorted_features, step):
result_list = []
gnb = GaussianNB()
best_perc_round = par_iter - 1 # If no other point is found, highest amount of features (-1 starts to count from 0)
# define the splits for the hyperparameter tuning, cannot be greater than the number of members in each class
if len(par_y_train.index) / len(np.unique(par_y_train.values).tolist()) < 10:
cv = int(len(par_y_train.index) / len(np.unique(par_y_train.values).tolist())) - 1
else:
cv = 10
for perc_features in np.arange(step, par_iter + 1, step):
start_time = datetime.now()
# 1%*i best features to keep and create new dataframe with those only
number_of_features = int(perc_features * (len(par_x_train.columns) / 100))
# minimum one feature
number_of_features = 1 if number_of_features < 1 else number_of_features
feature_list = par_df_sorted_features['feature_names'][: number_of_features].tolist()
x_new_training = copy.deepcopy(par_x_train[feature_list])
# GNB Training
result_list.append(
cross_val_score(gnb, x_new_training, par_y_train, cv=cv, n_jobs=-1, scoring='accuracy').mean())
# Compares the accuracy with the 5 following points => needs 6 points minimum
if len(result_list) > 5:
# list starts to count at 0, subtract one more from len
difference_list_p2p = [result_list[p + 1] - result_list[p] for p in
range(len(result_list) - 6, len(result_list) - 1)]
difference_list_1p = [result_list[p + 1] - result_list[len(result_list) - 6] for p in
range(len(result_list) - 6, len(result_list) - 1)]
# Find the best percent if 5 following points were lower then the point before or had a deviation <= 0.5%
# or all points are 2% lower then the first point
if all(point_y <= 0 for point_y in difference_list_p2p) or \
all(-0.005 <= point_y <= 0.005 for point_y in difference_list_1p) or \
all(point_y < -0.02 for point_y in difference_list_1p):
# the best perc is the results - 6 point in the result list
best_perc_round = len(result_list) - 6
break
# Console Output
print(f"GNB Round {perc_features / step}: {datetime.now() - start_time}")
# Optimization of the best percent
# If any point with a lower percent is higher, it is the new optimum
if any(point_y > result_list[best_perc_round] for point_y in result_list[:len(result_list) - 5]):
best_perc_round = result_list.index(max(result_list[:len(result_list) - 5]))
# Tradeoff of 0.5% accuracy for lesser percent of features
# As long as there is a lesser maximum with 1% lesser accuracy, which has a minimum of 2% less percent features
better_perc_exists = True
best_accuracy_tradeoff = result_list[best_perc_round] - 0.01
# If there are no 5% left for the tradeoff there is no better perc
if best_perc_round - int(2 / step) < 0:
better_perc_exists = False
while better_perc_exists:
earliest_pos = best_perc_round - int(2 / step)
# if its less then 0 it starts to count backside
earliest_pos = 0 if earliest_pos < 0 else earliest_pos
if any(point_y > best_accuracy_tradeoff for point_y in result_list[:earliest_pos]):
best_perc_round = result_list.index(max(result_list[:earliest_pos]))
else:
better_perc_exists = False
# the best percent of the features is calculated by the percent start plus the rounds * step
best_perc = step + step * best_perc_round
print(best_perc)
return best_perc, best_perc_round, result_list
# Chapter 7.2.1 method to get the best percentile for SVC
def get_best_percentile_svc(par_x_train, par_y_train, par_iter, par_df_sorted_features, step):
result_list = []
# Parameter for SVC
param_grid_svc = {'C': (0.001, 0.01, 0.1, 1, 10),
'kernel': ('linear', 'poly', 'rbf'),
'gamma': ('scale', 'auto')}
# define the splits for the hyperparameter tuning, cannot be greater than the number of members in each class
if len(par_y_train.index) / len(np.unique(par_y_train.values).tolist()) < 10:
cv = int(len(par_y_train.index) / len(np.unique(par_y_train.values).tolist())) - 1
else:
cv = 10
best_perc_round = par_iter - 1 # If no other point is found, highest | |
255, 255);")
self.title_senha_16.setAlignment(QtCore.Qt.AlignRight|QtCore.Qt.AlignTrailing|QtCore.Qt.AlignVCenter)
self.title_senha_16.setObjectName("title_senha_16")
self.title_email_16 = QtWidgets.QLabel(self.registro_16)
self.title_email_16.setGeometry(QtCore.QRect(0, 90, 80, 30))
self.title_email_16.setMinimumSize(QtCore.QSize(0, 30))
self.title_email_16.setMaximumSize(QtCore.QSize(10000, 30))
font = QtGui.QFont()
font.setFamily("Segoe UI Black")
font.setPointSize(14)
font.setBold(False)
font.setItalic(False)
font.setWeight(10)
self.title_email_16.setFont(font)
self.title_email_16.setStyleSheet("font: 87 14pt \"Segoe UI Black\";\n"
"color: rgb(255, 255, 255);")
self.title_email_16.setAlignment(QtCore.Qt.AlignRight|QtCore.Qt.AlignTrailing|QtCore.Qt.AlignVCenter)
self.title_email_16.setObjectName("title_email_16")
self.text_site_16 = QtWidgets.QLabel(self.registro_16)
self.text_site_16.setGeometry(QtCore.QRect(80, 30, 441, 30))
self.text_site_16.setStyleSheet("font: 87 12pt \"Segoe UI Black\";\n"
"color: rgb(240, 240, 240);")
self.text_site_16.setText("")
self.text_site_16.setAlignment(QtCore.Qt.AlignCenter)
self.text_site_16.setObjectName("text_site_16")
self.text_senha_16 = QtWidgets.QLabel(self.registro_16)
self.text_senha_16.setGeometry(QtCore.QRect(80, 60, 441, 30))
self.text_senha_16.setStyleSheet("font: 87 12pt \"Segoe UI Black\";\n"
"color: rgb(240, 240, 240);")
self.text_senha_16.setText("")
self.text_senha_16.setAlignment(QtCore.Qt.AlignCenter)
self.text_senha_16.setObjectName("text_senha_16")
self.text_email_16 = QtWidgets.QLabel(self.registro_16)
self.text_email_16.setGeometry(QtCore.QRect(80, 90, 441, 30))
self.text_email_16.setStyleSheet("font: 87 12pt \"Segoe UI Black\";\n"
"color: rgb(240, 240, 240);")
self.text_email_16.setText("")
self.text_email_16.setAlignment(QtCore.Qt.AlignCenter)
self.text_email_16.setObjectName("text_email_16")
self.button_copiar_site_16 = QtWidgets.QPushButton(self.registro_16)
self.button_copiar_site_16.setGeometry(QtCore.QRect(580, 60, 32, 32))
self.button_copiar_site_16.setMinimumSize(QtCore.QSize(0, 32))
self.button_copiar_site_16.setMaximumSize(QtCore.QSize(32, 32))
self.button_copiar_site_16.setCursor(QtGui.QCursor(QtCore.Qt.PointingHandCursor))
self.button_copiar_site_16.setStyleSheet("border-radius:1px")
self.button_copiar_site_16.setText("")
self.button_copiar_site_16.setObjectName("button_copiar_site_16")
self.button_deletar_registro_16 = QtWidgets.QPushButton(self.registro_16)
self.button_deletar_registro_16.setGeometry(QtCore.QRect(200, 2, 221, 27))
font = QtGui.QFont()
font.setFamily("Segoe UI Black")
font.setPointSize(9)
font.setBold(True)
font.setWeight(75)
self.button_deletar_registro_16.setFont(font)
self.button_deletar_registro_16.setCursor(QtGui.QCursor(QtCore.Qt.PointingHandCursor))
self.button_deletar_registro_16.setStyleSheet("QPushButton{\n"
" \n"
" color:rgb(255, 56, 56);\n"
" border-radius:10px;\n"
" background-color: rgb(72, 72, 72);\n"
"}\n"
"\n"
"QPushButton:hover{\n"
" color:rgb(255, 152, 152);\n"
" background-color: rgb(255, 56, 56);\n"
"}")
self.button_deletar_registro_16.setObjectName("button_deletar_registro_16")
self.qual_site_16 = QtWidgets.QLabel(self.registro_16)
self.qual_site_16.setGeometry(QtCore.QRect(522, 29, 32, 32))
self.qual_site_16.setText("")
self.qual_site_16.setObjectName("qual_site_16")
self.verticalLayout_3.addWidget(self.registro_16)
self.registro_17 = QtWidgets.QFrame(self.scrollAreaWidgetContents)
self.registro_17.setMinimumSize(QtCore.QSize(621, 121))
self.registro_17.setMaximumSize(QtCore.QSize(100000, 10000))
self.registro_17.setStyleSheet(" background-color: rgb(31, 31, 31);\n"
"")
self.registro_17.setFrameShape(QtWidgets.QFrame.NoFrame)
self.registro_17.setFrameShadow(QtWidgets.QFrame.Raised)
self.registro_17.setObjectName("registro_17")
self.title_site_17 = QtWidgets.QLabel(self.registro_17)
self.title_site_17.setGeometry(QtCore.QRect(0, 30, 80, 30))
self.title_site_17.setMinimumSize(QtCore.QSize(0, 30))
self.title_site_17.setMaximumSize(QtCore.QSize(10000, 30))
font = QtGui.QFont()
font.setFamily("Segoe UI Black")
font.setPointSize(14)
font.setBold(False)
font.setItalic(False)
font.setWeight(10)
self.title_site_17.setFont(font)
self.title_site_17.setStyleSheet("font: 87 14pt \"Segoe UI Black\";\n"
"color: rgb(255, 255, 255);")
self.title_site_17.setAlignment(QtCore.Qt.AlignRight|QtCore.Qt.AlignTrailing|QtCore.Qt.AlignVCenter)
self.title_site_17.setObjectName("title_site_17")
self.title_senha_17 = QtWidgets.QLabel(self.registro_17)
self.title_senha_17.setGeometry(QtCore.QRect(0, 60, 80, 30))
self.title_senha_17.setMinimumSize(QtCore.QSize(0, 30))
self.title_senha_17.setMaximumSize(QtCore.QSize(10000, 30))
font = QtGui.QFont()
font.setFamily("Segoe UI Black")
font.setPointSize(14)
font.setBold(False)
font.setItalic(False)
font.setWeight(10)
self.title_senha_17.setFont(font)
self.title_senha_17.setStyleSheet("font: 87 14pt \"Segoe UI Black\";\n"
"color: rgb(255, 255, 255);")
self.title_senha_17.setAlignment(QtCore.Qt.AlignRight|QtCore.Qt.AlignTrailing|QtCore.Qt.AlignVCenter)
self.title_senha_17.setObjectName("title_senha_17")
self.title_email_17 = QtWidgets.QLabel(self.registro_17)
self.title_email_17.setGeometry(QtCore.QRect(0, 90, 80, 30))
self.title_email_17.setMinimumSize(QtCore.QSize(0, 30))
self.title_email_17.setMaximumSize(QtCore.QSize(10000, 30))
font = QtGui.QFont()
font.setFamily("Segoe UI Black")
font.setPointSize(14)
font.setBold(False)
font.setItalic(False)
font.setWeight(10)
self.title_email_17.setFont(font)
self.title_email_17.setStyleSheet("font: 87 14pt \"Segoe UI Black\";\n"
"color: rgb(255, 255, 255);")
self.title_email_17.setAlignment(QtCore.Qt.AlignRight|QtCore.Qt.AlignTrailing|QtCore.Qt.AlignVCenter)
self.title_email_17.setObjectName("title_email_17")
self.text_site_17 = QtWidgets.QLabel(self.registro_17)
self.text_site_17.setGeometry(QtCore.QRect(80, 30, 441, 30))
self.text_site_17.setStyleSheet("font: 87 12pt \"Segoe UI Black\";\n"
"color: rgb(240, 240, 240);")
self.text_site_17.setText("")
self.text_site_17.setAlignment(QtCore.Qt.AlignCenter)
self.text_site_17.setObjectName("text_site_17")
self.text_senha_17 = QtWidgets.QLabel(self.registro_17)
self.text_senha_17.setGeometry(QtCore.QRect(80, 60, 441, 30))
self.text_senha_17.setStyleSheet("font: 87 12pt \"Segoe UI Black\";\n"
"color: rgb(240, 240, 240);")
self.text_senha_17.setText("")
self.text_senha_17.setAlignment(QtCore.Qt.AlignCenter)
self.text_senha_17.setObjectName("text_senha_17")
self.text_email_17 = QtWidgets.QLabel(self.registro_17)
self.text_email_17.setGeometry(QtCore.QRect(80, 90, 441, 30))
self.text_email_17.setStyleSheet("font: 87 12pt \"Segoe UI Black\";\n"
"color: rgb(240, 240, 240);")
self.text_email_17.setText("")
self.text_email_17.setAlignment(QtCore.Qt.AlignCenter)
self.text_email_17.setObjectName("text_email_17")
self.button_copiar_site_17 = QtWidgets.QPushButton(self.registro_17)
self.button_copiar_site_17.setGeometry(QtCore.QRect(580, 60, 32, 32))
self.button_copiar_site_17.setMinimumSize(QtCore.QSize(0, 32))
self.button_copiar_site_17.setMaximumSize(QtCore.QSize(32, 32))
self.button_copiar_site_17.setCursor(QtGui.QCursor(QtCore.Qt.PointingHandCursor))
self.button_copiar_site_17.setStyleSheet("border-radius:1px")
self.button_copiar_site_17.setText("")
self.button_copiar_site_17.setObjectName("button_copiar_site_17")
self.button_deletar_registro_17 = QtWidgets.QPushButton(self.registro_17)
self.button_deletar_registro_17.setGeometry(QtCore.QRect(200, 2, 221, 27))
font = QtGui.QFont()
font.setFamily("Segoe UI Black")
font.setPointSize(9)
font.setBold(True)
font.setWeight(75)
self.button_deletar_registro_17.setFont(font)
self.button_deletar_registro_17.setCursor(QtGui.QCursor(QtCore.Qt.PointingHandCursor))
self.button_deletar_registro_17.setStyleSheet("QPushButton{\n"
" \n"
" color:rgb(255, 56, 56);\n"
" border-radius:10px;\n"
" background-color: rgb(72, 72, 72);\n"
"}\n"
"\n"
"QPushButton:hover{\n"
" color:rgb(255, 152, 152);\n"
" background-color: rgb(255, 56, 56);\n"
"}")
self.button_deletar_registro_17.setObjectName("button_deletar_registro_17")
self.qual_site_17 = QtWidgets.QLabel(self.registro_17)
self.qual_site_17.setGeometry(QtCore.QRect(522, 29, 32, 32))
self.qual_site_17.setText("")
self.qual_site_17.setObjectName("qual_site_17")
self.verticalLayout_3.addWidget(self.registro_17)
self.registro_18 = QtWidgets.QFrame(self.scrollAreaWidgetContents)
self.registro_18.setMinimumSize(QtCore.QSize(621, 121))
self.registro_18.setMaximumSize(QtCore.QSize(100000, 10000))
self.registro_18.setStyleSheet(" background-color: rgb(31, 31, 31);\n"
"")
self.registro_18.setFrameShape(QtWidgets.QFrame.NoFrame)
self.registro_18.setFrameShadow(QtWidgets.QFrame.Raised)
self.registro_18.setObjectName("registro_18")
self.title_site_18 = QtWidgets.QLabel(self.registro_18)
self.title_site_18.setGeometry(QtCore.QRect(0, 30, 80, 30))
self.title_site_18.setMinimumSize(QtCore.QSize(0, 30))
self.title_site_18.setMaximumSize(QtCore.QSize(10000, 30))
font = QtGui.QFont()
font.setFamily("Segoe UI Black")
font.setPointSize(14)
font.setBold(False)
font.setItalic(False)
font.setWeight(10)
self.title_site_18.setFont(font)
self.title_site_18.setStyleSheet("font: 87 14pt \"Segoe UI Black\";\n"
"color: rgb(255, 255, 255);")
self.title_site_18.setAlignment(QtCore.Qt.AlignRight|QtCore.Qt.AlignTrailing|QtCore.Qt.AlignVCenter)
self.title_site_18.setObjectName("title_site_18")
self.title_senha_18 = QtWidgets.QLabel(self.registro_18)
self.title_senha_18.setGeometry(QtCore.QRect(0, 60, 80, 30))
self.title_senha_18.setMinimumSize(QtCore.QSize(0, 30))
self.title_senha_18.setMaximumSize(QtCore.QSize(10000, 30))
font = QtGui.QFont()
font.setFamily("Segoe UI Black")
font.setPointSize(14)
font.setBold(False)
font.setItalic(False)
font.setWeight(10)
self.title_senha_18.setFont(font)
self.title_senha_18.setStyleSheet("font: 87 14pt \"Segoe UI Black\";\n"
"color: rgb(255, 255, 255);")
self.title_senha_18.setAlignment(QtCore.Qt.AlignRight|QtCore.Qt.AlignTrailing|QtCore.Qt.AlignVCenter)
self.title_senha_18.setObjectName("title_senha_18")
self.title_email_18 = QtWidgets.QLabel(self.registro_18)
self.title_email_18.setGeometry(QtCore.QRect(0, 90, 80, 30))
self.title_email_18.setMinimumSize(QtCore.QSize(0, 30))
self.title_email_18.setMaximumSize(QtCore.QSize(10000, 30))
font = QtGui.QFont()
font.setFamily("Segoe UI Black")
font.setPointSize(14)
font.setBold(False)
font.setItalic(False)
font.setWeight(10)
self.title_email_18.setFont(font)
self.title_email_18.setStyleSheet("font: 87 14pt \"Segoe UI Black\";\n"
"color: rgb(255, 255, 255);")
self.title_email_18.setAlignment(QtCore.Qt.AlignRight|QtCore.Qt.AlignTrailing|QtCore.Qt.AlignVCenter)
self.title_email_18.setObjectName("title_email_18")
self.text_site_18 = QtWidgets.QLabel(self.registro_18)
self.text_site_18.setGeometry(QtCore.QRect(80, 30, 441, 30))
self.text_site_18.setStyleSheet("font: 87 12pt \"Segoe UI Black\";\n"
"color: rgb(240, 240, 240);")
self.text_site_18.setText("")
self.text_site_18.setAlignment(QtCore.Qt.AlignCenter)
self.text_site_18.setObjectName("text_site_18")
self.text_senha_18 = QtWidgets.QLabel(self.registro_18)
self.text_senha_18.setGeometry(QtCore.QRect(80, 60, 441, 30))
self.text_senha_18.setStyleSheet("font: 87 12pt \"Segoe UI Black\";\n"
"color: rgb(240, 240, 240);")
self.text_senha_18.setText("")
self.text_senha_18.setAlignment(QtCore.Qt.AlignCenter)
self.text_senha_18.setObjectName("text_senha_18")
self.text_email_18 = QtWidgets.QLabel(self.registro_18)
self.text_email_18.setGeometry(QtCore.QRect(80, 90, 441, 30))
self.text_email_18.setStyleSheet("font: 87 12pt \"Segoe UI Black\";\n"
"color: rgb(240, 240, 240);")
self.text_email_18.setText("")
self.text_email_18.setAlignment(QtCore.Qt.AlignCenter)
self.text_email_18.setObjectName("text_email_18")
self.button_copiar_site_18 = QtWidgets.QPushButton(self.registro_18)
self.button_copiar_site_18.setGeometry(QtCore.QRect(580, 60, 32, 32))
self.button_copiar_site_18.setMinimumSize(QtCore.QSize(0, 32))
self.button_copiar_site_18.setMaximumSize(QtCore.QSize(32, 32))
self.button_copiar_site_18.setCursor(QtGui.QCursor(QtCore.Qt.PointingHandCursor))
self.button_copiar_site_18.setStyleSheet("border-radius:1px")
self.button_copiar_site_18.setText("")
self.button_copiar_site_18.setObjectName("button_copiar_site_18")
self.button_deletar_registro_18 = QtWidgets.QPushButton(self.registro_18)
self.button_deletar_registro_18.setGeometry(QtCore.QRect(200, 2, 221, 27))
font = QtGui.QFont()
font.setFamily("Segoe UI Black")
font.setPointSize(9)
font.setBold(True)
font.setWeight(75)
self.button_deletar_registro_18.setFont(font)
self.button_deletar_registro_18.setCursor(QtGui.QCursor(QtCore.Qt.PointingHandCursor))
self.button_deletar_registro_18.setStyleSheet("QPushButton{\n"
" \n"
" color:rgb(255, 56, 56);\n"
" border-radius:10px;\n"
" background-color: rgb(72, 72, 72);\n"
"}\n"
"\n"
"QPushButton:hover{\n"
" color:rgb(255, 152, 152);\n"
" background-color: rgb(255, 56, 56);\n"
"}")
self.button_deletar_registro_18.setObjectName("button_deletar_registro_18")
self.qual_site_18 = QtWidgets.QLabel(self.registro_18)
self.qual_site_18.setGeometry(QtCore.QRect(522, 29, 32, 32))
self.qual_site_18.setText("")
self.qual_site_18.setObjectName("qual_site_18")
self.verticalLayout_3.addWidget(self.registro_18)
self.registro_19 = QtWidgets.QFrame(self.scrollAreaWidgetContents)
self.registro_19.setMinimumSize(QtCore.QSize(621, 121))
self.registro_19.setMaximumSize(QtCore.QSize(100000, 10000))
self.registro_19.setStyleSheet(" background-color: rgb(31, 31, 31);\n"
"")
self.registro_19.setFrameShape(QtWidgets.QFrame.NoFrame)
self.registro_19.setFrameShadow(QtWidgets.QFrame.Raised)
self.registro_19.setObjectName("registro_19")
self.title_site_19 = QtWidgets.QLabel(self.registro_19)
self.title_site_19.setGeometry(QtCore.QRect(0, 30, 80, 30))
self.title_site_19.setMinimumSize(QtCore.QSize(0, 30))
self.title_site_19.setMaximumSize(QtCore.QSize(10000, 30))
font = QtGui.QFont()
font.setFamily("Segoe UI Black")
font.setPointSize(14)
font.setBold(False)
font.setItalic(False)
font.setWeight(10)
self.title_site_19.setFont(font)
self.title_site_19.setStyleSheet("font: 87 14pt \"Segoe UI Black\";\n"
"color: rgb(255, 255, 255);")
self.title_site_19.setAlignment(QtCore.Qt.AlignRight|QtCore.Qt.AlignTrailing|QtCore.Qt.AlignVCenter)
self.title_site_19.setObjectName("title_site_19")
self.title_senha_19 = QtWidgets.QLabel(self.registro_19)
self.title_senha_19.setGeometry(QtCore.QRect(0, 60, 80, 30))
self.title_senha_19.setMinimumSize(QtCore.QSize(0, 30))
self.title_senha_19.setMaximumSize(QtCore.QSize(10000, 30))
font = QtGui.QFont()
font.setFamily("Segoe UI Black")
font.setPointSize(14)
font.setBold(False)
font.setItalic(False)
font.setWeight(10)
self.title_senha_19.setFont(font)
self.title_senha_19.setStyleSheet("font: 87 14pt \"Segoe UI Black\";\n"
"color: rgb(255, 255, 255);")
self.title_senha_19.setAlignment(QtCore.Qt.AlignRight|QtCore.Qt.AlignTrailing|QtCore.Qt.AlignVCenter)
self.title_senha_19.setObjectName("title_senha_19")
self.title_email_19 = QtWidgets.QLabel(self.registro_19)
self.title_email_19.setGeometry(QtCore.QRect(0, 90, 80, 30))
self.title_email_19.setMinimumSize(QtCore.QSize(0, 30))
self.title_email_19.setMaximumSize(QtCore.QSize(10000, 30))
font = QtGui.QFont()
font.setFamily("Segoe UI Black")
font.setPointSize(14)
font.setBold(False)
font.setItalic(False)
font.setWeight(10)
self.title_email_19.setFont(font)
self.title_email_19.setStyleSheet("font: 87 14pt \"Segoe UI Black\";\n"
"color: rgb(255, 255, 255);")
self.title_email_19.setAlignment(QtCore.Qt.AlignRight|QtCore.Qt.AlignTrailing|QtCore.Qt.AlignVCenter)
self.title_email_19.setObjectName("title_email_19")
self.text_site_19 = QtWidgets.QLabel(self.registro_19)
self.text_site_19.setGeometry(QtCore.QRect(80, 30, 441, 30))
self.text_site_19.setStyleSheet("font: 87 12pt \"Segoe UI Black\";\n"
"color: rgb(240, 240, 240);")
self.text_site_19.setText("")
self.text_site_19.setAlignment(QtCore.Qt.AlignCenter)
self.text_site_19.setObjectName("text_site_19")
self.text_senha_19 = QtWidgets.QLabel(self.registro_19)
self.text_senha_19.setGeometry(QtCore.QRect(80, 60, 441, 30))
self.text_senha_19.setStyleSheet("font: 87 12pt \"Segoe UI Black\";\n"
"color: rgb(240, 240, 240);")
self.text_senha_19.setText("")
self.text_senha_19.setAlignment(QtCore.Qt.AlignCenter)
self.text_senha_19.setObjectName("text_senha_19")
self.text_email_19 = QtWidgets.QLabel(self.registro_19)
self.text_email_19.setGeometry(QtCore.QRect(80, 90, 441, 30))
self.text_email_19.setStyleSheet("font: 87 12pt \"Segoe UI Black\";\n"
"color: rgb(240, 240, 240);")
self.text_email_19.setText("")
self.text_email_19.setAlignment(QtCore.Qt.AlignCenter)
self.text_email_19.setObjectName("text_email_19")
self.button_copiar_site_19 = QtWidgets.QPushButton(self.registro_19)
self.button_copiar_site_19.setGeometry(QtCore.QRect(580, 60, 32, 32))
self.button_copiar_site_19.setMinimumSize(QtCore.QSize(0, 32))
self.button_copiar_site_19.setMaximumSize(QtCore.QSize(32, 32))
self.button_copiar_site_19.setCursor(QtGui.QCursor(QtCore.Qt.PointingHandCursor))
self.button_copiar_site_19.setStyleSheet("border-radius:1px")
self.button_copiar_site_19.setText("")
self.button_copiar_site_19.setObjectName("button_copiar_site_19")
self.button_deletar_registro_19 = QtWidgets.QPushButton(self.registro_19)
self.button_deletar_registro_19.setGeometry(QtCore.QRect(200, 2, 221, 27))
font = QtGui.QFont()
font.setFamily("Segoe UI Black")
font.setPointSize(9)
font.setBold(True)
font.setWeight(75)
self.button_deletar_registro_19.setFont(font)
self.button_deletar_registro_19.setCursor(QtGui.QCursor(QtCore.Qt.PointingHandCursor))
self.button_deletar_registro_19.setStyleSheet("QPushButton{\n"
" \n"
" color:rgb(255, 56, 56);\n"
" border-radius:10px;\n"
" background-color: rgb(72, 72, 72);\n"
"}\n"
"\n"
"QPushButton:hover{\n"
" color:rgb(255, 152, 152);\n"
" background-color: rgb(255, 56, 56);\n"
"}")
self.button_deletar_registro_19.setObjectName("button_deletar_registro_19")
self.qual_site_19 = QtWidgets.QLabel(self.registro_19)
self.qual_site_19.setGeometry(QtCore.QRect(522, 29, 32, 32))
self.qual_site_19.setText("")
self.qual_site_19.setObjectName("qual_site_19")
self.verticalLayout_3.addWidget(self.registro_19)
self.registro_20 = QtWidgets.QFrame(self.scrollAreaWidgetContents)
self.registro_20.setMinimumSize(QtCore.QSize(621, 121))
self.registro_20.setMaximumSize(QtCore.QSize(100000, 10000))
self.registro_20.setStyleSheet(" background-color: rgb(31, 31, 31);\n"
"")
self.registro_20.setFrameShape(QtWidgets.QFrame.NoFrame)
self.registro_20.setFrameShadow(QtWidgets.QFrame.Raised)
self.registro_20.setObjectName("registro_20")
self.title_site_20 = QtWidgets.QLabel(self.registro_20)
self.title_site_20.setGeometry(QtCore.QRect(0, 30, 80, 30))
self.title_site_20.setMinimumSize(QtCore.QSize(0, 30))
self.title_site_20.setMaximumSize(QtCore.QSize(10000, 30))
font = QtGui.QFont()
font.setFamily("Segoe UI Black")
font.setPointSize(14)
font.setBold(False)
font.setItalic(False)
font.setWeight(10)
self.title_site_20.setFont(font)
self.title_site_20.setStyleSheet("font: 87 14pt \"Segoe UI Black\";\n"
"color: rgb(255, 255, 255);")
self.title_site_20.setAlignment(QtCore.Qt.AlignRight|QtCore.Qt.AlignTrailing|QtCore.Qt.AlignVCenter)
self.title_site_20.setObjectName("title_site_20")
self.title_senha_20 = QtWidgets.QLabel(self.registro_20)
self.title_senha_20.setGeometry(QtCore.QRect(0, 60, 80, 30))
self.title_senha_20.setMinimumSize(QtCore.QSize(0, 30))
self.title_senha_20.setMaximumSize(QtCore.QSize(10000, 30))
font = QtGui.QFont()
font.setFamily("Segoe UI Black")
font.setPointSize(14)
font.setBold(False)
font.setItalic(False)
font.setWeight(10)
self.title_senha_20.setFont(font)
self.title_senha_20.setStyleSheet("font: 87 14pt \"Segoe UI Black\";\n"
"color: rgb(255, 255, 255);")
self.title_senha_20.setAlignment(QtCore.Qt.AlignRight|QtCore.Qt.AlignTrailing|QtCore.Qt.AlignVCenter)
self.title_senha_20.setObjectName("title_senha_20")
self.title_email_20 = QtWidgets.QLabel(self.registro_20)
self.title_email_20.setGeometry(QtCore.QRect(0, 90, 80, 30))
self.title_email_20.setMinimumSize(QtCore.QSize(0, 30))
self.title_email_20.setMaximumSize(QtCore.QSize(10000, 30))
font = QtGui.QFont()
font.setFamily("Segoe UI Black")
font.setPointSize(14)
font.setBold(False)
font.setItalic(False)
font.setWeight(10)
self.title_email_20.setFont(font)
self.title_email_20.setStyleSheet("font: 87 14pt \"Segoe UI Black\";\n"
"color: rgb(255, 255, 255);")
self.title_email_20.setAlignment(QtCore.Qt.AlignRight|QtCore.Qt.AlignTrailing|QtCore.Qt.AlignVCenter)
self.title_email_20.setObjectName("title_email_20")
self.text_site_20 = QtWidgets.QLabel(self.registro_20)
self.text_site_20.setGeometry(QtCore.QRect(80, 30, 441, 30))
self.text_site_20.setStyleSheet("font: 87 12pt \"Segoe UI Black\";\n"
"color: rgb(240, 240, 240);")
self.text_site_20.setText("")
self.text_site_20.setAlignment(QtCore.Qt.AlignCenter)
self.text_site_20.setObjectName("text_site_20")
self.text_senha_20 = QtWidgets.QLabel(self.registro_20)
self.text_senha_20.setGeometry(QtCore.QRect(80, 60, 441, 30))
self.text_senha_20.setStyleSheet("font: 87 12pt \"Segoe UI Black\";\n"
"color: rgb(240, 240, 240);")
self.text_senha_20.setText("")
self.text_senha_20.setAlignment(QtCore.Qt.AlignCenter)
self.text_senha_20.setObjectName("text_senha_20")
self.text_email_20 = QtWidgets.QLabel(self.registro_20)
self.text_email_20.setGeometry(QtCore.QRect(80, 90, 441, 30))
self.text_email_20.setStyleSheet("font: 87 12pt \"Segoe UI Black\";\n"
"color: rgb(240, 240, 240);")
self.text_email_20.setText("")
self.text_email_20.setAlignment(QtCore.Qt.AlignCenter)
self.text_email_20.setObjectName("text_email_20")
self.button_copiar_site_20 = QtWidgets.QPushButton(self.registro_20)
self.button_copiar_site_20.setGeometry(QtCore.QRect(580, 60, 32, 32))
self.button_copiar_site_20.setMinimumSize(QtCore.QSize(0, 32))
self.button_copiar_site_20.setMaximumSize(QtCore.QSize(32, 32))
self.button_copiar_site_20.setCursor(QtGui.QCursor(QtCore.Qt.PointingHandCursor))
self.button_copiar_site_20.setStyleSheet("border-radius:1px")
self.button_copiar_site_20.setText("")
self.button_copiar_site_20.setObjectName("button_copiar_site_20")
self.button_deletar_registro_20 = QtWidgets.QPushButton(self.registro_20)
self.button_deletar_registro_20.setGeometry(QtCore.QRect(200, 2, 221, 27))
font = QtGui.QFont()
font.setFamily("Segoe UI Black")
font.setPointSize(9)
font.setBold(True)
font.setWeight(75)
self.button_deletar_registro_20.setFont(font)
self.button_deletar_registro_20.setCursor(QtGui.QCursor(QtCore.Qt.PointingHandCursor))
self.button_deletar_registro_20.setStyleSheet("QPushButton{\n"
" \n"
" color:rgb(255, 56, 56);\n"
" border-radius:10px;\n"
" background-color: rgb(72, 72, 72);\n"
"}\n"
"\n"
"QPushButton:hover{\n"
" color:rgb(255, 152, 152);\n"
" background-color: rgb(255, 56, 56);\n"
"}")
self.button_deletar_registro_20.setObjectName("button_deletar_registro_20")
self.qual_site_20 = QtWidgets.QLabel(self.registro_20)
self.qual_site_20.setGeometry(QtCore.QRect(522, 29, 32, 32))
self.qual_site_20.setText("")
self.qual_site_20.setObjectName("qual_site_20")
self.verticalLayout_3.addWidget(self.registro_20)
self.scrollArea.setWidget(self.scrollAreaWidgetContents)
self.verticalLayout.addWidget(self.scrollArea)
self.entrada = QtWidgets.QFrame(self.centralwidget)
self.entrada.setMinimumSize(QtCore.QSize(64, 170))
self.entrada.setFrameShape(QtWidgets.QFrame.StyledPanel)
self.entrada.setFrameShadow(QtWidgets.QFrame.Raised)
self.entrada.setObjectName("entrada")
self.input_site = QtWidgets.QLineEdit(self.entrada)
self.input_site.setGeometry(QtCore.QRect(120, 8, 400, 30))
font = QtGui.QFont()
font.setFamily("Segoe UI")
font.setPointSize(10)
font.setBold(True)
font.setWeight(75)
self.input_site.setFont(font)
self.input_site.setStyleSheet("QLineEdit{\n"
" color: white;\n"
" background-color: rgb(31, 31, 31);\n"
" border-radius:10px;\n"
" border: 2px solid;\n"
" padding: 1px 15px\n"
"}\n"
"\n"
"QLineEdit:hover{\n"
" color: rgb(135, 135, 135);\n"
" border: 2px solid white;\n"
"}\n"
"\n"
"QLineEdit:focus{\n"
" border: 2px solid rgb(85, 85, 255);\n"
" color: rgb(255, 255, 255);\n"
"}")
self.input_site.setMaxLength(50)
self.input_site.setObjectName("input_site")
self.input_senha = QtWidgets.QLineEdit(self.entrada)
self.input_senha.setGeometry(QtCore.QRect(120, 42, 400, 30))
font = QtGui.QFont()
font.setFamily("Segoe UI")
font.setPointSize(10)
font.setBold(True)
font.setWeight(75)
self.input_senha.setFont(font)
self.input_senha.setStyleSheet("QLineEdit{\n"
" color: white;\n"
" background-color: rgb(31, 31, 31);\n"
" border-radius:10px;\n"
" border: 2px solid;\n"
" padding: 1px 15px\n"
"}\n"
"\n"
"QLineEdit:hover{\n"
" color: rgb(135, 135, 135);\n"
" border: 2px solid white;\n"
"}\n"
"\n"
"QLineEdit:focus{\n"
" border: 2px solid rgb(85, 85, 255);\n"
" color: rgb(255, 255, 255);\n"
"}")
self.input_senha.setMaxLength(50)
self.input_senha.setObjectName("input_senha")
self.input_email = QtWidgets.QLineEdit(self.entrada)
self.input_email.setGeometry(QtCore.QRect(120, 76, 400, 30))
font = QtGui.QFont()
font.setFamily("Segoe UI")
font.setPointSize(10)
font.setBold(True)
font.setWeight(75)
self.input_email.setFont(font)
self.input_email.setStyleSheet("QLineEdit{\n"
" color: white;\n"
" background-color: rgb(31, 31, 31);\n"
" border-radius:10px;\n"
" border: 2px solid;\n"
" padding: 1px 15px\n"
"}\n"
"\n"
"QLineEdit:hover{\n"
" color: rgb(135, 135, 135);\n"
" border: 2px solid white;\n"
"}\n"
"\n"
"QLineEdit:focus{\n"
" border: 2px solid rgb(85, 85, 255);\n"
" color: rgb(255, 255, 255);\n"
"}")
self.input_email.setMaxLength(50)
self.input_email.setObjectName("input_email")
self.btn_salvar_registro = QtWidgets.QPushButton(self.entrada)
self.btn_salvar_registro.setGeometry(QtCore.QRect(184, 120, 261, 41))
font = QtGui.QFont()
font.setFamily("Rockwell Extra Bold")
font.setPointSize(10)
font.setBold(True)
font.setWeight(75)
self.btn_salvar_registro.setFont(font)
self.btn_salvar_registro.setCursor(QtGui.QCursor(QtCore.Qt.PointingHandCursor))
self.btn_salvar_registro.setStyleSheet("QPushButton{\n"
" border: 2px solid;\n"
" border-radius: 15px;\n"
" color:black\n"
"}\n"
"\n"
"QPushButton:hover{\n"
" color:rgb(85, 85, 255);\n"
"}\n"
"QPushButton:pressed{\n"
" border: 2px solid;\n"
" color: white;\n"
" background-color:rgb(85, 85, 255)\n"
"}\n"
"")
self.btn_salvar_registro.setObjectName("btn_salvar_registro")
| |
<reponame>jinju-rhee/microeconometrics_replication
from localreg import *
import pandas as pd
import numpy as np
import econtools
import econtools.metrics as mt
from auxiliary.auxiliary_subset import *
from auxiliary.auxiliary_tables import *
##===============================
## For descriptive statistics table
def descriptive_main(data):
variables = data[["tk","ab","dist1","ecs1"]]
table = pd.DataFrame()
mean = variables.mean()
table['Mean'] = mean.round(2)
table['Standard Deviation'] = variables.std()
table = table.astype(float).round(2)
table['Variable'] = ["Capital transfers","Alignment","Regional incumbent’s bloc vote margin (v)", "Regional seat margin"]
table = table.set_index('Variable')
table['RDD frame'] = ["outcome variable","treatment","forcing variable","heterogeneity effects"]
table['Definition'] = ["Capital transfers from the Regional government per capita",
"Dummy equal to one if the party of the mayor is the same as that of the president of the AC",
"% of votes cast at the local elections that have to be added (subtracted from) to the ideological bloc of the Regional incumbent to win (lose) a majority of seats in the local council",
"Difference between the seat share of the parties in the regional government and the seat share of the main opposition parties in the previous regional election. This variable is demeaned. "]
table.style.set_properties(subset= ['Definition'], **{'width-min': '300px'})
return(table)
def descriptive_controls(data):
variables = data[["debt","tipo","vcp","pob","density","pob_mes6591","pob_5_1491",
"extr_noeu91","unempl","income","educ2","presscirc","regre",
"regde","meanden","regterm"]]
table = pd.DataFrame()
mean = variables.mean()
table['Mean'] = mean.round(2)
table['Standard Deviation'] = variables.std()
table = table.astype(float).round(2)
table['Variable'] = ["Debt burden","Property tax rate","Property value", "Population","Population density","% Old",
"% Young","% Immigrant","% Unemployed","Income indicator","% Educated","Press circulation",
"Regional revenues pc","Regional debt","Municipal density","Tenure in office"]
table = table.set_index('Variable')
table['Definition'] = ["Debt burden (capital, item 9 of the spending budget, + interest, item 3), as a share of current revenues",
"Nominal property tax rate (IBI), % on assessed property value",
"Assessed property value (thousands of EUR) per capita",
"Resident population",
"Population per square kilometer",
"% resident population older than 65 years",
"% resident population younger than 14 years",
"% resident population non-EU immigrant",
"% resident population unemployed",
"Residents’ income level, as estimated from objective indicators (e.g., cars, bank deposits, etc.)",
"Percentage of people with primary and secondary education. This variable is demeaned",
"Newspaper copies (at the province level) per 1000 inhabitants. This variable is demeaned",
"Current revenues per capita in each region. This variable is demeaned",
"Debt burden (capital, item 9 of the spending budget, + interest, item 3) as a share of current revenues. This variable is demeaned",
"Average population density (population per square kilometer) of the municipalities in each region. This variable is demeaned",
"Dummy equal to one if it is the regional incumbent was not in office the previous term"]
table.style.set_properties(subset= ['Definition'], **{'width-min': '300px'})
return(table)
def descriptive_confounders(data):
variables = data[["ecs1","regre","regde","meanden","regterm","presscirc","educ2"]]
table1 = pd.DataFrame()
table1['Mean'] = variables.mean()
table1['Standard Deviation'] = variables.std()
table1 = table1.astype(float).round(2)
table1['Confounders'] = ["Regional seat margin","Regional revenues pc","Regional debt", "Municipal density",
"Tenure in office","Press circulation","% Educated"]
table1 = table1.set_index('Confounders')
table1['Definition'] = ["Gap btw the seat share of the parties in the regional government and the opposition parties",
"Current revenues per capita in each region",
"Debt burden as a share of current revenues",
"Average population density (population per km^2) of the municipalities in each region",
"Dummy equal to one if it is the regional incumbent was not in office the previous term",
"Newspaper copies (at the province level) per 1000 inhabitants",
"Percentage of people with primary and secondary education"]
table1 = table1.round(2).style.set_properties(subset= ['Definition'], **{'width-min': '300px'})
table1
##===============================
## For table 1
def first_stage_2SLS_global(data,cluster_var,covariates):
# cluster_var = codiine -> for the coefficient
# cluster_var = codccaa -> for the p_value
df = data[["ab","dab","dist1","dist2","vda","vda2","codiine","codccaa",
"dca2","dca3","dca4","dca5","dca6","dca7","dca8","dca9","dca10",
"dca11","dca12","dca13","dca14","dca15","lpob", "density", "debt", "vcp", "tipo"]]
y = 'ab'
X = ['dab', 'dist1', 'dist2', 'vda','vda2',
"dca2","dca3","dca4","dca5","dca6","dca7","dca8","dca9","dca10",
"dca11","dca12","dca13","dca14","dca15"]
if covariates == 1:
X = X + ["lpob", "density", "debt", "vcp", "tipo"]
elif covariates == 0:
X = X
results = mt.reg(
df, # DataFrame
y, # Dependent var (string)
X, # Independent var(s) (string or list of strings)
cluster=cluster_var, # Cluster var (string)
addcons=True
)
return(results)
def first_stage_2SLS_local(data,bandwidth,cluster_var,covariates):
# calculated optimal bandwidth:
# 2h* = 0.386
# h* = 0.193
# h*/2 = 0.0965
# h*/4 = 0.048
df = data[["ab","dab","dist1","dist2","vda","vda2","codiine","codccaa",
"dca2","dca3","dca4","dca5","dca6","dca7","dca8","dca9","dca10",
"dca11","dca12","dca13","dca14","dca15","lpob", "density", "debt", "vcp", "tipo"]]
df_h = df[abs(df.dist1)<bandwidth]
y = 'ab'
X = ['dab', 'dist1', 'vda',
"dca2","dca3","dca4","dca5","dca6","dca7","dca8","dca9","dca10",
"dca11","dca12","dca13","dca14","dca15"]
if covariates == 1:
X = X + ["lpob", "density", "debt", "vcp", "tipo"]
elif covariates == 0:
X = X
results = mt.reg(
df_h, # DataFrame
y, # Dependent var (string)
X, # Independent var(s) (string or list of strings)
cluster=cluster_var, # Cluster var (string)
addcons=True
)
return(results)
def second_stage_2SLS_global(data,cluster_var,covariates):
df = data[["ab","dab","dist1","dist2","vsa","vsa2","vda","vda2","tk",
"codiine","codccaa",
"dca2","dca3","dca4","dca5","dca6","dca7","dca8","dca9","dca10",
"dca11","dca12","dca13","dca14","dca15","lpob", "density", "debt", "vcp", "tipo"]]
y = "tk" # dependent var
E = ["ab","vsa","vsa2"] # endo reg
Z = ["dab","vda","vda2"] # instrumental
X = ["dist1","dist2",
"dca2","dca3","dca4","dca5","dca6","dca7","dca8","dca9","dca10",
"dca11","dca12","dca13","dca14","dca15"] # exo reg
if covariates == 1:
X = X + ["lpob", "density", "debt", "vcp", "tipo"]
elif covariates == 0:
X = X
results = mt.ivreg(df, y, E, Z, X, cluster=cluster_var, addcons=True)
return(results)
def second_stage_2SLS_global_codiine(data,covariates):
df = data[["ab","dab","dist1","dist2","vsa","vsa2","vda","vda2","tk",
"codiine","codccaa",
"dca2","dca3","dca4","dca5","dca6","dca7","dca8","dca9","dca10",
"dca11","dca12","dca13","dca14","dca15","lpob", "density", "debt", "vcp", "tipo"]]
y = "tk" # dependent var
E = ["ab","vsa","vsa2"] # endo reg
Z = ["dab","vda","vda2"] # instrumental
X = ["dist1","dist2",
"dca2","dca3","dca4","dca5","dca6","dca7","dca8","dca9","dca10",
"dca11","dca12","dca13","dca14","dca15"] # exo reg
if covariates == 1:
X = X + ["lpob", "density", "debt", "vcp", "tipo"]
elif covariates == 0:
X = X
results = mt.ivreg(df, y, E, Z, X, cluster='codiine', addcons=True)
return(results)
def second_stage_2SLS_global_codccaa(data,covariates):
df = data[["ab","dab","dist1","dist2","vsa","vsa2","vda","vda2","tk",
"codiine","codccaa",
"dca2","dca3","dca4","dca5","dca6","dca7","dca8","dca9","dca10",
"dca11","dca12","dca13","dca14","dca15","lpob", "density", "debt", "vcp", "tipo"]]
y = "tk" # dependent var
E = ["ab","vsa","vsa2"] # endo reg
Z = ["dab","vda","vda2"] # instrumental
X = ["dist1","dist2",
"dca2","dca3","dca4","dca5","dca6","dca7","dca8","dca9","dca10",
"dca11","dca12","dca13","dca14","dca15"] # exo reg
if covariates == 1:
X = X + ["lpob", "density", "debt", "vcp", "tipo"]
elif covariates == 0:
X = X
results = mt.ivreg(df, y, E, Z, X, cluster='codccaa', addcons=True)
return(results)
def second_stage_2SLS_local(data,bandwidth,cluster_var,covariates):
# calculated optimal bandwidth:
# 2h* = 0.386
# h* = 0.193
# h*/2 = 0.0965
# h*/4 = 0.048
df = data[["ab","dab","dist1","dist2","vsa","vsa2","vda","vda2","tk",
"codiine","codccaa",
"dca2","dca3","dca4","dca5","dca6","dca7","dca8","dca9","dca10",
"dca11","dca12","dca13","dca14","dca15","lpob", "density", "debt", "vcp", "tipo"]]
df_h = df[abs(df.dist1)<bandwidth]
y = "tk" # dependent var
E = ["ab","vsa"] # endo reg
Z = ["dab","vda"] # instrumental
X = ["dist1",
"dca2","dca3","dca4","dca5","dca6","dca7","dca8","dca9","dca10",
"dca11","dca12","dca13","dca14","dca15"] # exo reg
if covariates == 1:
X = X + ["lpob", "density", "debt", "vcp", "tipo"]
elif covariates == 0:
X = X
results = mt.ivreg(df_h, y, E, Z, X, cluster=cluster_var,addcons=True)
return(results)
def table1(data,covariates):
table = pd.DataFrame({'2nd_stage': [], 'Std.err(2)': [], 'P-Value(2)': [],
'1st_stage': [], 'Std.err(1)': [], 'P-Value(1)': [],
'Observations': []})
case = ('Global','Local(bd=2h*)','Local(bd=h*)','Local(bd=h*/2)','Local(bd=h*/4)')
table['RD'] = case
table = table.set_index('RD')
#Global estimate
r1 = first_stage_2SLS_global(data,cluster_var = "codiine", covariates = covariates)
p1 = first_stage_2SLS_global(data,cluster_var = "codccaa", covariates = covariates)
#r2 = second_stage_2SLS_global(data,cluster_var = "codiine", covariates = covariates)
r2 = second_stage_2SLS_global_codiine(data,covariates= covariates)
#p2 = second_stage_2SLS_global(data,cluster_var = "codccaa", covariates = covariates)
p2 = second_stage_2SLS_global_codccaa(data,covariates= covariates)
rg = [r2.beta['ab'] ,r2.se['ab'], p2.pt['ab'],
r1.beta['dab'], r1.se['dab'], p1.pt['dab'], r2.N]
table.loc["Global"] = rg
#Local estimates
local = ('Local(bd=2h*)','Local(bd=h*)','Local(bd=h*/2)','Local(bd=h*/4)')
for a in local:
if a == 'Local(bd=2h*)':
bandwidth = 0.386
elif a == 'Local(bd=h*)':
bandwidth = 0.193
elif a == 'Local(bd=h*/2)':
bandwidth = 0.0965
elif a == 'Local(bd=h*/4)':
bandwidth = .048
rslt1 = first_stage_2SLS_local(data,bandwidth = bandwidth,cluster_var = "codiine", covariates = covariates)
pval1 = first_stage_2SLS_local(data,bandwidth = bandwidth,cluster_var = "codccaa", covariates = covariates)
rslt2 = second_stage_2SLS_local(data, bandwidth = bandwidth,cluster_var = "codiine", covariates = covariates)
pval2 = second_stage_2SLS_local(data, bandwidth = bandwidth,cluster_var = "codiine", covariates = covariates)
result = [rslt2.beta['ab'] , rslt2.se['ab'], pval2.pt['ab'],
rslt1.beta['dab'], rslt1.se['dab'], pval1.pt['dab'], rslt2.N]
table.loc[a] = result
return table
##===============================
## For table 2
def effect_of_competition_global(data):
dca_abi = []
dca_vsai = []
dca_2vsai = []
dca_dabi = []
dca_vdai = []
dca_2vdai = []
for i in range(1,16):
dca_abi.append("dca_ab"+str(i))
dca_vsai.append("dca_vsa"+str(i))
dca_2vsai.append("dca_2vsa"+str(i))
dca_dabi.append("dca_dab"+str(i))
dca_vdai.append("dca_vda"+str(i))
dca_2vdai.append("dca_2vda"+str(i))
regional_columns = dca_abi + dca_vsai + dca_2vsai + dca_dabi + dca_vdai + dca_2vdai
other_columns = ["ab","dab","dist1","dist2","ecs1","vsa","vsa2","vda","vda2","tk","codiine",
"codccaa","esas1","vsa_ecs1", "vsa2_ecs1","edas1","vda_ecs1", "vda2_ecs1",
"dist1_ecs1", "dist2_ecs1",
"dca2","dca3","dca4","dca5","dca6","dca7","dca8","dca9","dca10",
"dca11","dca12","dca13","dca14","dca15"]
rc = data[regional_columns]
oc = data[other_columns]
df = pd.concat([rc,oc], axis=1).reindex(rc.index)
y = "tk" # dependent var
e = dca_abi + dca_vsai + dca_2vsai
e_ = ["esas1", "vsa_ecs1", | |
# -*- coding: utf-8 -*-
import operator
import sys
import numpy as np
import tensorflow as tf
from jtr.nn.models import get_total_trainable_variables
from jtr.util.tfutil import tfrun
class Vocab(object):
"""
Vocab objects for use in jtr pipelines.
Example:
>>> #Test Vocab without pre-trained embeddings
>>> vocab = Vocab()
>>> print(vocab("blah"))
1
>>> print(vocab("bluh"))
2
>>> print(vocab("bleh"))
3
>>> print(vocab("bluh"))
2
>>> print(vocab("hello"))
4
>>> print(vocab("world"))
5
>>> #Sym2id before freezing:
>>> for k in sorted(vocab.sym2id.keys()):
... print(k,' : ',vocab.sym2id[k])
<UNK> : 0
blah : 1
bleh : 3
bluh : 2
hello : 4
world : 5
>>> #Sym2id after freezing (no difference, because no pre-trained embeddings used):
>>> vocab.freeze()
>>> for k in sorted(vocab.sym2id.keys()):
... print(k,' : ',vocab.sym2id[k])
<UNK> : 0
blah : 1
bleh : 3
bluh : 2
hello : 4
world : 5
>>> #Test Vocab with pre-trained embeddings
>>> def emb(w):
... v = {'blah':[1.7,0,.3],'bluh':[0,1.5,0.5],'bleh':[0,0,2]}
... return None if not w in v else v[w]
>>> vocab = Vocab(emb=emb)
>>> print(vocab("blah"))
-1
>>> print(vocab("bluh"))
-2
>>> print(vocab("bleh"))
-3
>>> print(vocab("bluh"))
-2
>>> print(vocab("hello"))
1
>>> print(vocab("world"))
2
>>> #Sym2id before freezing:
>>> for k in sorted(vocab.sym2id.keys()):
... print(k,' : ',vocab.sym2id[k])
<UNK> : 0
blah : -1
bleh : -3
bluh : -2
hello : 1
world : 2
>>> #Sym2id after freezing: normalized (positive) ids, also for pre-trained terms
>>> vocab.freeze()
>>> for k in sorted(vocab.sym2id.keys()):
... print(k,' : ',vocab.sym2id[k])
<UNK> : 0
blah : 3
bleh : 5
bluh : 4
hello : 1
world : 2
>>> #Test pretrained and out-of-vocab id's before freezing
>>> vocab.unfreeze()
>>> vocab.get_ids_pretrained()
[-1, -2, -3]
>>> vocab.get_ids_oov()
[0, 1, 2]
>>> #Test pretrained and out-of-vocab id's after freezing
>>> vocab.freeze()
>>> vocab.get_ids_pretrained()
[3, 4, 5]
>>> vocab.get_ids_oov()
[0, 1, 2]
>>> #Test calling frozen Vocab object
>>> vocab(['bluh','world','wake','up']) #last 2 are new words, hence unknown
[4, 2, 0, 0]
>>> #Test calling unfrozen Vocab object
>>> vocab.unfreeze()
>>> vocab(['bluh','world','wake','up']) #last 2 are new words, hence added to Vocab
[-2, 2, 3, 4]
>>> #Test sym2id after freezing again
>>> vocab.freeze()
>>> for k in sorted(vocab.sym2id.keys()):
... print(k,' : ',vocab.sym2id[k])
<UNK> : 0
blah : 5
bleh : 7
bluh : 6
hello : 1
up : 4
wake : 3
world : 2
"""
DEFAULT_UNK = "<UNK>"
def __init__(self, unk=DEFAULT_UNK, emb=None, init_from_embeddings=False):
"""
Creates Vocab object.
Args:
`unk`: symbol for unknown term (default: "<UNK>").
If set to `None`, and `None` is not included as symbol while unfrozen,
it will return `None` upon calling `get_id(None)` when frozen.
`emb`: function handle; returns pre-trained embedding (fixed-size numerical list or ndarray)
for a given symbol, and None for unknown symbols.
"""
self.next_neg = -1
self.unk = unk
self.emb = emb if emb is not None else lambda _:None #if emb is None: same behavior as for o-o-v words
if init_from_embeddings and emb is not None:
self.sym2id = dict(emb.vocabulary.word2idx)
self.id2sym = {v: k for k, v in emb.vocabulary.word2idx.items()}
if unk is not None and unk not in self.sym2id:
self.sym2id[unk] = len(self.sym2id)
self.id2sym[len(self.id2sym)] = unk
self.sym2freqs = {w: emb.vocabulary.get_word_count(w) for w in self.sym2id}
self.frozen = True
self.next_pos = 0
else:
self.sym2id = {}
# with pos and neg indices
self.id2sym = {}
self.next_pos = 0
self.sym2freqs = {}
if unk is not None:
self.sym2id[unk] = 0
# with pos and neg indices
self.id2sym[0] = unk
self.next_pos = 1
self.sym2freqs[unk] = 0
self.frozen = False
if emb is not None and hasattr(emb, "lookup") and isinstance(emb.lookup, np.ndarray):
self.emb_length = emb.lookup.shape[1]
else:
self.emb_length = None
def freeze(self):
"""Freeze current Vocab object (set `self.frozen` to True).
To be used after loading symbols from a given corpus;
transforms all internal symbol id's to positive indices (for use in tensors).
- additional calls to the __call__ method will return the id for the unknown symbold
- out-of-vocab id's are positive integers and do not change
- id's of symbols with pre-trained embeddings are converted to positive integer id's,
counting up from the all out-of-vocab id's.
"""
# if any pretrained have been encountered
if not self.frozen and self.next_neg < -1:
sym2id = {sym: self._normalize(id) for sym,id in self.sym2id.items()}
id2sym = {self._normalize(id): sym for id,sym in self.id2sym.items()}
self.sym2id = sym2id
self.id2sym = id2sym
self.frozen = True
def unfreeze(self):
"""Unfreeze current Vocab object (set `self.frozen` to False).
Caution: use with care! Unfreezing a Vocab, adding new terms, and again Freezing it,
will result in shifted id's for pre-trained symbols.
- maps all normalized id's to the original internal id's.
- additional calls to __call__ will allow adding new symbols to the vocabulary.
"""
if self.frozen and self.next_neg < -1:
sym2id = {sym: self._denormalize(id) for sym, id in self.sym2id.items()}
id2sym = {self._denormalize(id): sym for id, sym in self.id2sym.items()}
self.sym2id = sym2id
self.id2sym = id2sym
self.frozen = False
def get_id(self, sym):
"""
Returns the id of `sym`; different behavior depending on the state of the Vocab:
- In case self.frozen==False (default): returns internal id,
that is, positive for out-of-vocab symbol, negative for symbol
found in `self.emb`. If `sym` is a new symbol, it is added to the Vocab.
- In case self.frozen==True (after explicit call to 'freeze()', or after building a `NeuralVocab` with it):
Returns normalized id (positive integer, also for symbols with pre-trained embedding)
If `sym` is a new symbol, the id for unknown terms is returned, if available,
and otherwise `None` (only possible when input argument `unk` for `Vocab.__init__()` was set to `None`, e.g. ;
for classification labels; it is assumed action is taken in the pipeline
creating or calling the `Vocab` object, when `None` is encountered).
Args:
`sym`: symbol (e.g., token)
"""
if not self.frozen:
vec = self.emb(sym)
if self.emb_length is None and vec is not None:
self.emb_length = len(vec) if isinstance(vec, list) else vec.shape[0]
if sym not in self.sym2id:
if vec is None:
self.sym2id[sym] = self.next_pos
self.id2sym[self.next_pos] = sym
self.next_pos += 1
else:
self.sym2id[sym] = self.next_neg
self.id2sym[self.next_neg] = sym
self.next_neg -= 1
self.sym2freqs[sym] = 1
else:
self.sym2freqs[sym] += 1
if sym in self.sym2id:
return self.sym2id[sym]
else:
if self.unk in self.sym2id:
return self.sym2id[self.unk]
# can happen for `Vocab` initialized with `unk` argument set to `None`
else:
return None
def get_sym(self, id):
"""returns symbol for a given id (consistent with the `self.frozen` state), and None if not found."""
return None if not id in self.id2sym else self.id2sym[id]
def __call__(self, *args, **kwargs):
"""
calls the `get_id` function for the provided symbol(s), which adds symbols to the Vocab if needed and allowed,
and returns their id(s).
Args:
*args: a single symbol, a list of symbols, or multiple symbols
"""
symbols = args
if len(args) == 1:
if isinstance(args[0], list):
symbols = args[0]
else:
return self.get_id(args[0])
return [self.get_id(sym) for sym in symbols]
def __len__(self):
"""returns number of unique symbols (including the unknown symbol)"""
return len(self.id2sym)
def __contains__(self, sym):
"""checks if `sym` already in the Vocab object"""
return sym in self.sym2id
def _normalize(self, id):
"""map original (pos/neg) ids to normalized (non-neg) ids: first new symbols, then those in emb"""
# e.g. -1 should be mapped to self.next_pos + 0
# e.g. -3 should be mapped to self.next_pos + 2
return id if id >=0 else self.next_pos - id - 1
def _denormalize(self,id):
# self.next_pos + i is mapped back to -1-i
return id if id < self.next_pos else -1-(id-self.next_pos)
def get_ids_pretrained(self):
"""return internal or normalized id's (depending on frozen/unfrozen state)
for symbols that have an embedding in `self.emb` """
if self.frozen:
return list(range(self.next_pos,self.next_pos+self.count_pretrained()))
else:
return list(range(-1,self.next_neg,-1))
def get_ids_oov(self):
"""return out-of-vocab id's (indep. of frozen/unfrozen state)"""
return list(range(self.next_pos))
def count_pretrained(self):
"""equivalent to `len(get_ids_pretrained())`"""
return -self.next_neg - 1
def count_oov(self):
"""equivalent to `len(get_ids_oov())`"""
return self.next_pos
def prune(self, min_freq=5, max_size=sys.maxsize):
"""returns new Vocab object, pruned based on minimum symbol frequency"""
pruned_vocab = Vocab(unk=self.unk, emb=self.emb)
cnt = 0
for sym, freq in sorted(self.sym2freqs.items(), key=operator.itemgetter(1), reverse=True):
# for sym in self.sym2freqs:
# freq = self.sym2freqs[sym]
cnt += 1
if | |
sub-model
if isinstance(value, dict):
for entrykey, entryval in value.items():
if entry_err_attr[entrykey] == numDSs:
raise PyDSTool_AttributeError('Parameter does not' +\
' exist in any sub-model: %s = %f'%(entrykey,
entryval))
if entry_err_val[entrykey] == numDSs:
raise PyDSTool_ValueError('Parameter value error in' +\
' every sub-model: %s = %f'%(entrykey, entryval))
else:
# can't think of other ways for this error to crop up
pass
else:
if entry_err_attr == numDSs:
raise PyDSTool_AttributeError('Parameter does not exist' +\
' in any sub-model: %s'%key)
if entry_err_val == numDSs:
raise PyDSTool_ValueError('Parameter value error in' +\
' every sub-model: %s'%key)
del(entry_err_attr)
del(entry_err_val)
self._generateParamInfo()
def __getitem__(self, trajname):
try:
return self.trajectories[trajname]
except KeyError:
raise ValueError('No such trajectory.')
def __delitem__(self, trajname):
self._delTraj(trajname)
def _delTraj(self, trajname):
"""Delete a named trajectory from the database."""
try:
traj = self.trajectories[trajname]
except KeyError:
# a trajectory piece may have been created without
# the top-level trajectory ever being completed
# (e.g. after an unxpected error or ^C interruption)
##raise ValueError('No such trajectory.')
l = len(trajname)
for m in self.registry.values():
# delete all matching pieces (of form trajname + '_' + <digits>)
for n in m.trajectories.keys():
if n[:l] == trajname and n[l] == '_' and n[l+1:].isdigit():
m._delTraj(trajname)
else:
# propagate deletions down through registry
if not isinstance(traj.modelNames, six.string_types):
for i, model_name_i in enumerate(traj.modelNames):
del(self.registry[model_name_i][trajname+'_%i'%i])
del(self.trajectories[trajname])
def __call__(self, trajname, t, coords=None, asGlobalTime=True,
asmap=False):
"""Evaluate position of hybrid trajectory at time t.
if optional argument asmap == True then t must be an integer in
[0, #segments].
"""
try:
traj = self.trajectories[trajname]
except KeyError:
raise ValueError("trajectory '"+trajname+"' unknown")
else:
return traj(t, coords=coords, asGlobalTime=asGlobalTime,
asmap=asmap)
def getEndPoint(self, trajname, end=1):
"""Returns endpoint of specified trajectory as Point.
trajname: name of selected trajectory
end: (default=1) index of trajectory endpoint.
0 => first, 1 => last
"""
xdict = {}
if end not in [0,1]:
raise ValueError("end must be 0, 1")
endtraj = self.trajectories[trajname].trajSeq[-end]
for xname in endtraj.coordnames:
try:
xdict[endtraj._FScompatibleNamesInv(xname)] = \
endtraj.variables[xname].output.datapoints[1][-end]
except KeyError:
# auxiliary var didn't need calling
pass
except AttributeError:
# non-point based output attributes of a Variable need
# to be called ...
tend = endtraj.indepdomain[end]
xdict[endtraj._FScompatibleNamesInv(xname)] = \
endtraj.variables[xname](tend)
except PyDSTool_BoundsError:
print("Value out of bounds in variable call:")
print(" variable '%s' was called at time %f"%(xname, tend))
raise
return Point({'coorddict': xdict,
'coordnames': endtraj._FScompatibleNamesInv(endtraj.coordnames),
'coordtype': float,
'norm': self._normord})
def getEndTime(self, trajname, end=1):
"""Returns end time of specified trajectory.
trajname: name of selected trajectory
end: (default=1) index of trajectory endpoint.
0 => first, 1 => last
"""
if end not in [0,1]:
raise ValueError("end must be 0, 1")
endtraj = self.trajectories[trajname].trajSeq[-end]
tend = endtraj.indepdomain[end]
return tend
def _validateVarNames(self, names):
"""Check types and uniqueness of variable names."""
namelist = [] # records those seen so far
for vname in names:
assert isinstance(vname, six.string_types), \
'variable name must be a string'
assert vname not in namelist, ('variable names must be unique for'
' model')
namelist.append(vname)
def forceObsVars(self, varnames):
"""Force variables to be the observables in the Model.
May also promote auxiliary variables."""
r = remain(varnames, self.obsvars+self.intvars+self.auxvars)
if len(r) > 0:
# then there are names given that are not known as
# obs, int, or aux
raise ValueError("Unknown variable names: "+str(r))
for v in remain(varnames, self.obsvars):
# only include names that are not already observables
self.obsvars.append(v)
# remove any vars that are now observables
self.intvars = remain(self.intvars, varnames)
self.auxvars = remain(self.auxvars, varnames)
self.obsvars.sort()
self.intvars.sort()
self.auxvars.sort()
self.allvars = self.obsvars + self.intvars
self.allvars.sort()
self.dimension = len(self.allvars)
def forceIntVars(self, varnames):
"""Force variables to become internal variables in the Model.
May also promote auxiliary variables."""
r = remain(varnames, self.obsvars+self.intvars+self.auxvars)
if len(r) > 0:
# then there are names given that are not known as
# obs, int, or aux
raise ValueError("Unknown variable names: "+str(r))
for v in remain(varnames, self.intvars):
# only include names that are not already internals
self.intvars.append(v)
# remove any vars that are now internals
self.obsvars = remain(self.obsvars, varnames)
self.auxvars = remain(self.auxvars, varnames)
self.obsvars.sort()
self.intvars.sort()
self.auxvars.sort()
self.allvars = self.obsvars + self.intvars
self.allvars.sort()
self.dimension = len(self.allvars)
def defaultVars(self):
"""(Re)set to default observable and internal variable names."""
obsvars, intvars, auxvars = self._makeDefaultVarNames()
self._validateVarNames(obsvars + intvars + auxvars)
# OK to store these permanently after varname validation
self.obsvars = obsvars
self.intvars = intvars
self.auxvars = auxvars
self.obsvars.sort()
self.intvars.sort()
self.auxvars.sort()
self.allvars = self.obsvars + self.intvars
self.allvars.sort()
self.dimension = len(self.allvars)
def info(self, verboselevel=1):
print(self._infostr(verboselevel))
def __repr__(self):
return self._infostr(verbose=0)
__str__ = __repr__
def __copy__(self):
pickledself = pickle.dumps(self)
return pickle.loads(pickledself)
def __deepcopy__(self, memo=None, _nil=[]):
pickledself = pickle.dumps(self)
return pickle.loads(pickledself)
def renameTraj(self, trajname, newname, force=False):
"""Rename stored trajectory. Force option (default False)
will overwrite any existing trajectory with the new name.
"""
try:
traj = self.trajectories[trajname]
except KeyError:
raise ValueError('No such trajectory name %s'%trajname)
if trajname != newname:
if newname not in self.trajectories or force:
self.trajectories[newname] = traj
del self.trajectories[trajname]
traj.name = newname
else:
raise ValueError("Name %s already exists"%newname)
def getTrajModelName(self, trajname, t=None):
"""Return the named trajectory's associated sub-model(s) used
to create it, specific to time t if given."""
try:
modelNames = self.trajectories[trajname].modelNames
except KeyError:
raise ValueError('No such trajectory name %s'%trajname)
if t is None:
# return list of Generators for associated hybrid trajectory
return modelNames
else:
parts = self.getTrajTimePartitions(trajname)
pix = 0
for pinterval in parts:
if pinterval.contains(t) is not notcontained:
return modelNames[pix]
else:
pix += 1
def getTrajEventTimes(self, trajname, events=None):
"""Return the named trajectory's Generator-flagged event times.
events argument can be singleton string name of an event,
returning the event data, or events can be a list of event names,
returning a dictionary of event name -> event data.
Event names should use hierarchical naming convention, if
applicable."""
try:
return self.trajectories[trajname].getEventTimes(events)
except KeyError:
raise
#raise ValueError('No such trajectory name')
def getTrajEvents(self, trajname, events=None):
"""Return the named trajectory's Generator-flagged events.
events argument can be singleton string name of an event,
returning the event data, or events can be a list of event names,
returning a dictionary of event name -> event data.
Event names should use hierarchical naming convention, if
applicable."""
try:
return self.trajectories[trajname].getEvents(events)
except KeyError:
raise ValueError('No such trajectory name')
def getTrajEventStruct(self, trajname):
"""Return the named trajectory's model event structure (representing
external constraints), as present when it was computed.
"""
try:
return self.trajectories[trajname].modelEventStructs
except KeyError:
raise ValueError('No such trajectory name')
def getTrajTimeInterval(self, trajname):
"""Return the named trajectory's time domain,
over which it is defined, in a single interval."""
try:
return self.trajectories[trajname].indepdomain
except KeyError:
raise ValueError('No such trajectory name')
def getTrajTimePartitions(self, trajname):
"""Return the named trajectory's time domain,
over which it is defined, as a list of time interval partitions."""
try:
return self.trajectories[trajname].timePartitions
except KeyError:
raise ValueError('No such trajectory name')
def getDSAlgPars(self, target, par, idx=None):
"""
Returns value of given algorithmic parameter for selected sub-model.
target -- name of sub-model in model (cannot be list).
par -- name of algorithmic parameter.
idx -- (optional) index into value if algorithmic parameter val is a
list of values.
"""
if target in self.registry.keys():
algpars = self.registry[target].get('algparams')
if par in algpars.keys():
if isinstance(algpars[par], list):
if idx is not None:
if isinstance(idx, list):
val = [algpars[par][x] for x in idx]
else:
val = algpars[par][idx]
else:
val = algpars[par]
else:
val = algpars[par]
else:
val = None
else:
raise ValueError("Target sub-model name not found")
return val
def setDSAlgPars(self, target, par, val):
"""
Set value of algorithmic parameter in a specific generator.
target -- name or list of generators in model.
par -- name of algorithmic parameter is to be set.
val -- value to which the algorithmic parameter is to be set.
if target is a list, then algorithmic pararameter 'par' is
set to 'val' for every generator in the list, if par exists for that
generator.
WARNING: THIS FUNCTION IS NOT 'SAFE' -- IT DOES NOT CHECK THAT VALS
ARE APPROPRIATE TO PARAMETERS!!!
"""
if isinstance(target, list):
subModelList = target
else:
subModelList = [target]
for dsName in subModelList:
algpars = self.registry[dsName].get('algparams')
if par in algpars.keys():
# If target is a list, make sure that the input list is | |
<gh_stars>0
"""sls.py
An implementation of the robust adaptive controller.
Both FIR SLS version with CVXPY and the common
Lyapunov relaxation.
"""
import numpy as np
import cvxpy as cvx
import utils
import logging
import math
import scipy.linalg
from abc import ABC, abstractmethod
from adaptive import AdaptiveMethod
class SLSInfeasibleException(Exception):
def __init__(self, msg=None):
super().__init__(msg)
def make_state_space_controller(Phi_x, Phi_u, n, p):
"""
Converts FIR transfer functions to a state
space realization of the dynamic controller,
mapping states to inputs.
"""
assert len(Phi_x.shape) == 2
assert len(Phi_u.shape) == 2
assert Phi_x.shape[1] == n
assert Phi_u.shape[1] == n
nT, _ = Phi_x.shape
pT, _ = Phi_u.shape
assert (nT % n) == 0
assert (pT % p) == 0
T = nT // n
assert T == (pT // p)
# See Theorem 2 of:
# https://nikolaimatni.github.io/papers/sls_state_space.pdf
Z = np.diag(np.ones(n*(T-2)), k=-n)
assert Z.shape == ((T-1)*n, (T-1)*n)
calI = np.zeros((n*(T-1), n))
calI[:n, :] = np.eye(n)
Rhat = np.hstack([Phi_x[n*k:n*(k+1), :] for k in range(1, T)])
Mhat = np.hstack([Phi_u[p*k:p*(k+1), :] for k in range(1, T)])
M1 = Phi_u[:p, :]
R1 = Phi_x[:n, :]
A = Z - calI.dot(Rhat)
B = -calI
C = M1.dot(Rhat) - Mhat
D = M1
return (A, B, C, D)
def h2_squared_norm(A, B, Phi_x, Phi_u, Q, R, sigma_w):
"""
Gets the squared infinite horizon LQR cost for system
(A,B) in feedback with the controller defined by Phi_x
and Phi_u.
"""
n, p = B.shape
A_k, B_k, C_k, D_k = make_state_space_controller(Phi_x, Phi_u, n, p)
A_cl = np.block([
[A + B.dot(D_k), B.dot(C_k)],
[B_k, A_k]
])
Q_sqrt = utils.psd_sqrt(Q)
R_sqrt = utils.psd_sqrt(R)
C_cl = np.block([
[Q_sqrt, np.zeros((n, A_k.shape[0]))],
[R_sqrt.dot(D_k), R_sqrt.dot(C_k)]
])
B_cl = np.vstack((np.eye(n), np.zeros((A_k.shape[0], n))))
P = utils.solve_discrete_lyapunov(A_cl.T, B_cl.dot(B_cl.T))
return (sigma_w ** 2) * np.trace(C_cl.dot(P).dot(C_cl.T))
def _assert_AB_consistent(A, B):
assert len(A.shape) == 2 and A.shape[0] == A.shape[1]
assert len(B.shape) == 2
assert A.shape[0] == B.shape[0]
def _assert_ABCD_consistent(A, B, C, D):
_assert_AB_consistent(A, B)
assert len(C.shape) == 2
assert len(D.shape) == 2
assert C.shape[1] == A.shape[0]
assert C.shape[0] == D.shape[0]
assert D.shape[1] == B.shape[1]
def roll_forward(A, B, K, x0, psi0, sigma_w, horizon, rng=None):
"""Apply an LTI controller K = (A_k,B_k,C_k,D_k)
Roll the true system (A, B) forward with the SS realization of the LTI
controller given. horizon is the length of the trajectory, and
sigma_w is the stddev of the Gaussian process noise.
"""
if rng is None:
rng = np.random
_assert_AB_consistent(A, B)
A_k, B_k, C_k, D_k = K
_assert_ABCD_consistent(A_k, B_k, C_k, D_k)
state_dim, input_dim = B.shape
psi_dim = A_k.shape[0]
assert C_k.shape[0] == input_dim
assert B_k.shape[1] == state_dim
if x0 is None:
x0 = np.zeros((state_dim,))
if psi0 is None:
psi0 = np.zeros((psi_dim,))
assert x0.shape == (state_dim,)
assert psi0.shape == (psi_dim,)
process = sigma_w*rng.normal(size=(horizon, state_dim))
xt = np.array(x0)
psit = np.array(psi0)
states = np.zeros((horizon+1, state_dim))
inputs = np.zeros((horizon, input_dim))
controller_states = np.zeros((horizon+1, psi_dim))
states[0, :] = x0
controller_states[0, :] = psi0
for t in range(horizon):
psitp1 = A_k.dot(psit) + B_k.dot(xt)
ut = C_k.dot(psit) + D_k.dot(xt)
xtp1 = A.dot(xt) + B.dot(ut) + process[t]
inputs[t, :] = ut
states[t+1, :] = xtp1
controller_states[t+1, :] = psitp1
xt = xtp1
psit = psitp1
return states, inputs, controller_states
def sls_synth(Q, R, Ahat, Bhat, eps_A, eps_B, T, gamma, alpha, logger=None):
"""
Solves the SLS synthesis problem for length T FIR filters
using CVXPY
"""
assert len(Q.shape) == 2 and Q.shape[0] == Q.shape[1]
assert len(R.shape) == 2 and R.shape[0] == R.shape[1]
assert len(Ahat.shape) == 2 and Ahat.shape[0] == Ahat.shape[1]
assert len(Bhat.shape) == 2 and Bhat.shape[0] == Ahat.shape[0]
assert Q.shape[0] == Ahat.shape[0]
assert R.shape[0] == Bhat.shape[1]
assert eps_A >= 0
assert eps_B >= 0
assert T >= 1
assert gamma > 0 and gamma < 1
assert alpha > 0 and alpha < 1
if logger is None:
logger = logging.getLogger(__name__)
n, p = Bhat.shape
Q_sqrt = utils.psd_sqrt(Q)
R_sqrt = utils.psd_sqrt(R)
# Phi_x = \sum_{k=1}^{T} Phi_x[k] z^{-k}
Phi_x = cvx.Variable((T*n, n), name="Phi_x")
# Phi_u = \sum_{k=1}^{T} Phi_u[k] z^{-k}
Phi_u = cvx.Variable((T*p, n), name="Phi_u")
# htwo_cost
htwo_cost = cvx.Variable(name="htwo_cost")
# subspace constraint:
# [zI - Ah, -Bh] * [Phi_x; Phi_u] = I
#
# Note that:
# z Phi_x = \sum_{k=0}^{T-1} Phi_x[k+1] z^{-k}
#
# This means that:
# 1) Phi_x[1] = I
# 2) Phi_x[k+1] = Ah*Phi_x[k] + Bh*Phi_u[k] for k=1, ..., T-1
# 3) Ah*Phi_x[T] + Bh*Phi_u[T] = 0
constr = []
constr.append(Phi_x[:n, :] == np.eye(n))
for k in range(T-1):
constr.append(Phi_x[n*(k+1):n*(k+1+1), :] == Ahat*Phi_x[n*k:n*(k+1), :] + Bhat*Phi_u[p*k:p*(k+1), :])
constr.append(Ahat*Phi_x[n*(T-1):, :] + Bhat*Phi_u[p*(T-1):, :] == 0)
# H2 constraint:
# By Parseval's identity, this is equal (up to constants) to
#
# frobenius_norm(
# [ Q_sqrt*Phi_x[1] ;
# ...
# Q_sqrt*Phi_x[T] ;
# R_sqrt*Phi_u[1] ;
# ...
# R_sqrt*Phi_u[T]
# ]
# ) <= htwo_cost
# TODO: what is the best way to implement this in cvxpy?
constr.append(
cvx.norm(
cvx.bmat(
[[Q_sqrt*Phi_x[n*k:n*(k+1), :]] for k in range(T)] +
[[R_sqrt*Phi_u[p*k:p*(k+1), :]] for k in range(T)]),
'fro') <= htwo_cost)
# H-infinity constraint
#
# We want to enforce ||H(z)||_inf <= gamma, where
#
# H(z) = \sum_{k=1}^{T} [ mult_x * Phi_x[k] ; mult_u * Phi_u[k] ] z^{-k}.
#
# Here, each of the FIR coefficients has size (n+p) x n. Since n+p>n, we enforce
# the constraint on the transpose system H^T(z). The LMI constraint
# for this comes from Theorem 5.8 of
# Positive trigonometric polynomials and signal processing applications (2007) by
# <NAME>.
#
# Here is a table to map the variable names in the text to this program
#
# Text Program Comment
# -------------------------------------------------------------
# p n Output dim
# m n+p Input dim
# n T FIR horizon
# p(n+1) n(T+1) SDP variable size
# p(n+1) x m n(T+1) x (n+p)
mult_x = eps_A/np.sqrt(alpha)
mult_u = eps_B/np.sqrt(1-alpha)
# Hbar has size (T+1)*n x (n+p)
Hbar = cvx.bmat(
[[np.zeros((n, n)), np.zeros((n, p))]] +
[[mult_x*Phi_x[n*k:n*(k+1), :].T, mult_u*Phi_u[p*k:p*(k+1), :].T] for k in range(T)])
Q = cvx.Variable((n*(T+1), n*(T+1)), name="Q", PSD=True)
# Constraint (5.44)
# Case k==0: the block diag of Q has to sum to gamma^2 * eye(n)
gamma_sq = gamma ** 2
constr.append(
sum([Q[n*t:n*(t+1), n*t:n*(t+1)] for t in range(T+1)]) == gamma_sq*np.eye(n))
# Case k>0: the block off-diag of Q has to sum to zero
for k in range(1, T+1):
constr.append(
sum([Q[n*t:n*(t+1), n*(t+k):n*(t+1+k)] for t in range(T+1-k)]) == np.zeros((n, n)))
# Constraint (5.45)
constr.append(
cvx.bmat([
[Q, Hbar],
[Hbar.T, np.eye(n+p)]]) == cvx.Variable((n*(T+1) + (n+p), n*(T+1) + (n+p)), PSD=True))
prob = cvx.Problem(cvx.Minimize(htwo_cost), constr)
prob.solve(solver=cvx.SCS)
if prob.status == cvx.OPTIMAL:
logging.debug("successfully solved!")
Phi_x = np.array(Phi_x.value)
Phi_u = np.array(Phi_u.value)
return (True, prob.value, Phi_x, Phi_u)
else:
logging.debug("could not solve: {}".format(prob.status))
return (False, None, None, None)
def sls_common_lyapunov(A, B, Q, R, eps_A, eps_B, tau, logger=None):
"""
Solves the common Lyapunov relaxation to the robust
synthesis problem.
Taken from
lstd-lqr/blob/master/code/policy_iteration.ipynb
learning-lqr/experiments/matlab/sls_synth_yalmip/common_lyap_synth_var2_alpha.m
"""
if logger is None:
logger = logging.getLogger(__name__)
d, p = B.shape
X = cvx.Variable((d, d), symmetric=True) # inverse Lyapunov function
Z = cvx.Variable((p, d)) # -K*X
W_11 = cvx.Variable((d, d), symmetric=True)
W_12 = cvx.Variable((d, p))
W_22 = cvx.Variable((p, p), symmetric=True)
alph = cvx.Variable() # scalar for tuning the H_inf constraint
constraints = []
# H2 cost: trace(W)=H2 cost
mat1 = cvx.bmat([
[X, X, Z.T],
[X, W_11, W_12],
[Z, W_12.T, W_22]])
constraints.append(mat1 == cvx.Variable((2*d + p, 2*d + p), PSD=True))
# H_infinity constraint
mat2 = cvx.bmat([
[X-np.eye(d), (A*X+B*Z), np.zeros((d, d)), np.zeros((d, p))],
[(X*A.T+Z.T*B.T), X, eps_A*X, eps_B*Z.T],
[np.zeros((d, d)), eps_A*X, alph*(tau**2)*np.eye(d), np.zeros((d, p))],
[np.zeros((p, d)), eps_B*Z, np.zeros((p, d)), (1-alph)*(tau**2)*np.eye(p)]])
constraints.append(mat2 == cvx.Variable((3*d + p, 3*d + p), PSD=True))
# constrain alpha to be in [0,1]:
constraints.append(alph >= 0)
constraints.append(alph <= 1)
# Solve!
objective = cvx.Minimize(cvx.trace(Q*W_11) + cvx.trace(R*W_22))
prob = cvx.Problem(objective, constraints)
try:
obj = prob.solve(solver=cvx.MOSEK)
except cvx.SolverError:
logger.warn("SolverError encountered")
return (False, None, None, None)
if prob.status == cvx.OPTIMAL:
logging.debug("common_lyapunov: found optimal solution")
X_value = np.array(X.value)
P_value = scipy.linalg.solve(X_value, np.eye(d), sym_pos=True)
# NOTE: the K returned here is meant to be used
# as A + BK **NOT** A - BK
K_value = np.array(Z.value).dot(P_value)
return (True, obj, P_value, K_value)
else:
logging.debug("common_lyapunov: could not solve (status={})".format(prob.status))
return (False, None, None, None)
class SLS_Implementation(ABC):
@abstractmethod
def open(self):
"""
"""
| |
# coding: utf-8
# -----------------------------------------------------------------------------------
# <copyright company="Aspose Pty Ltd" file="viewer_api.py">
# Copyright (c) 2003-2021 Aspose Pty Ltd
# </copyright>
# <summary>
# Permission is hereby granted, free of charge, to any person obtaining a copy
# of this software and associated documentation files (the "Software"), to deal
# in the Software without restriction, including without limitation the rights
# to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
# copies of the Software, and to permit persons to whom the Software is
# furnished to do so, subject to the following conditions:
#
# The above copyright notice and this permission notice shall be included in all
# copies or substantial portions of the Software.
#
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
# AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
# SOFTWARE.
# </summary>
# -----------------------------------------------------------------------------------
from __future__ import absolute_import
import re # noqa: F401
# python 2 and python 3 compatibility library
import six
from groupdocs_viewer_cloud.auth import Auth
from groupdocs_viewer_cloud.api_client import ApiClient
from groupdocs_viewer_cloud.api_exception import ApiException
from groupdocs_viewer_cloud.configuration import Configuration
class ViewApi(object):
"""
GroupDocs.Viewer Cloud API
:param configuration: API configuration
"""
def __init__(self, configuration):
api_client = ApiClient(configuration)
self.auth = Auth(configuration, api_client)
self.api_client = api_client
self.configuration = configuration
def close(self): # noqa: E501
"""
Closes thread pool. This method should be called when
methods are executed asynchronously (is_async=True is passed as parameter)
and this instance of ViewApi is not going to be used any more.
"""
if self.api_client is not None:
if(self.api_client.pool is not None):
self.api_client.pool.close()
self.api_client.pool.join()
self.api_client.pool = None
@classmethod
def from_keys(cls, app_sid, app_key):
"""
Initializes new instance of ViewApi with API keys
:param app_sid Application identifier (App SID)
:param app_key Application private key (App Key)
"""
configuration = Configuration(app_sid, app_key)
return ViewApi(configuration)
@classmethod
def from_config(cls, configuration):
"""
Initializes new instance of ViewApi with configuration options
:param configuration API configuration
"""
return ViewApi(configuration)
def create_view(self, request,**kwargs): # noqa: E501
"""Render document pages # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass is_async=True
:param is_async bool
:param ViewOptions view_options: View options (required)
:return: ViewResult
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('is_async'):
return self._create_view_with_http_info(request, **kwargs) # noqa: E501
(data) = self._create_view_with_http_info(request, **kwargs) # noqa: E501
return data
def _create_view_with_http_info(self, request, **kwargs): # noqa: E501
"""Render document pages # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass is_async=True
:param is_async bool
:param CreateViewRequest request object with parameters
:return: ViewResult
If the method is called asynchronously,
returns the request thread.
"""
params = locals()
params['is_async'] = ''
params['_return_http_data_only'] = False
params['_preload_content'] = True
params['_request_timeout'] = ''
for key, val in six.iteritems(params['kwargs']):
if key not in params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method create_view" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'view_options' is set
if request.view_options is None:
raise ValueError("Missing the required parameter `view_options` when calling `create_view`") # noqa: E501
collection_formats = {}
path = '/viewer/view'
path_params = {}
query_params = []
header_params = {}
form_params = []
local_var_files = []
body_params = None
if request.view_options is not None:
body_params = request.view_options
# HTTP header `Accept`
header_params['Accept'] = self.api_client.select_header_accept(
['application/json']) # noqa: E501
# HTTP header `Content-Type`
header_params['Content-Type'] = self.api_client.select_header_content_type( # noqa: E501
['application/json']) # noqa: E501
call_kwargs = {
'resource_path':path,
'method':'POST',
'path_params':path_params,
'query_params':query_params,
'header_params':header_params,
'body':body_params,
'post_params':form_params,
'files':local_var_files,
'response_type':'ViewResult', # noqa: E501
'auth_settings':self.auth.get_auth_settings(),
'is_async':params.get('is_async'),
'_return_http_data_only':params.get('_return_http_data_only'),
'_preload_content':params.get('_preload_content', True),
'_request_timeout':params.get('_request_timeout'),
'collection_formats':collection_formats
}
return self.api_client.call_api(**call_kwargs) # noqa: E501
def delete_view(self, request,**kwargs): # noqa: E501
"""Delete rendered pages # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass is_async=True
:param is_async bool
:param DeleteViewOptions delete_view_options: Delete options (required)
:return: None
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('is_async'):
return self._delete_view_with_http_info(request, **kwargs) # noqa: E501
self._delete_view_with_http_info(request, **kwargs) # noqa: E501
def _delete_view_with_http_info(self, request, **kwargs): # noqa: E501
"""Delete rendered pages # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass is_async=True
:param is_async bool
:param DeleteViewRequest request object with parameters
:return: None
If the method is called asynchronously,
returns the request thread.
"""
params = locals()
params['is_async'] = ''
params['_return_http_data_only'] = False
params['_preload_content'] = True
params['_request_timeout'] = ''
for key, val in six.iteritems(params['kwargs']):
if key not in params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method delete_view" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'delete_view_options' is set
if request.delete_view_options is None:
raise ValueError("Missing the required parameter `delete_view_options` when calling `delete_view`") # noqa: E501
collection_formats = {}
path = '/viewer/view'
path_params = {}
query_params = []
header_params = {}
form_params = []
local_var_files = []
body_params = None
if request.delete_view_options is not None:
body_params = request.delete_view_options
# HTTP header `Accept`
header_params['Accept'] = self.api_client.select_header_accept(
['application/json']) # noqa: E501
# HTTP header `Content-Type`
header_params['Content-Type'] = self.api_client.select_header_content_type( # noqa: E501
['application/json']) # noqa: E501
call_kwargs = {
'resource_path':path,
'method':'DELETE',
'path_params':path_params,
'query_params':query_params,
'header_params':header_params,
'body':body_params,
'post_params':form_params,
'files':local_var_files,
'response_type':None, # noqa: E501
'auth_settings':self.auth.get_auth_settings(),
'is_async':params.get('is_async'),
'_return_http_data_only':params.get('_return_http_data_only'),
'_preload_content':params.get('_preload_content', True),
'_request_timeout':params.get('_request_timeout'),
'collection_formats':collection_formats
}
return self.api_client.call_api(**call_kwargs) # noqa: E501
def __downcase_first_letter(self, s):
if len(s) == 0:
return str
else:
return s[0].lower() + s[1:]
# coding: utf-8
# --------------------------------------------------------------------------------
# <copyright company="Aspose Pty Ltd" file="create_view_request.py">
# Copyright (c) 2003-2021 Aspose Pty Ltd
# </copyright>
# <summary>
# Permission is hereby granted, free of charge, to any person obtaining a copy
# of this software and associated documentation files (the "Software"), to deal
# in the Software without restriction, including without limitation the rights
# to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
# copies of the Software, and to permit persons to whom the Software is
# furnished to do so, subject to the following conditions:
#
# The above copyright notice and this permission notice shall be included in all
# copies or substantial portions of the Software.
#
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
# AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
# SOFTWARE.
# </summary>
# --------------------------------------------------------------------------------
class CreateViewRequest(object):
"""
Request model for create_view operation.
:param view_options View options
"""
def __init__(self, view_options):
"""Initializes new instance of CreateViewRequest.""" # noqa: E501
self.view_options = view_options
# coding: utf-8
# --------------------------------------------------------------------------------
# <copyright company="Aspose Pty Ltd" file="delete_view_request.py">
# Copyright (c) 2003-2021 Aspose Pty Ltd
# </copyright>
# <summary>
# Permission is hereby granted, free of charge, to any person obtaining a copy
# of this software and associated documentation files (the "Software"), to deal
# in the Software without restriction, including without limitation the rights
# to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
# copies of the Software, and to permit persons to whom the Software is
# furnished to do so, subject to the following conditions:
#
# The above copyright notice and this permission notice shall be included in all
# copies or substantial portions of the Software.
#
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
# AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, | |
selector: [{"title":"notepad"},{"title":"Cancel"}]
"""
lResultFlag = False
lSecsSleep = 1 # Настроечный параметр
lSecsDone = 0
lResultList = None
# Цикл проверки
while lResultFlag == False and lSecsDone < inWaitSecs:
# pdb.set_trace()
lResultList = []
# Итерация проверки
lIndex = 0
for lItem in inSpecificationListList:
lItemResultFlag = UIOSelector_Exist_Bool(lItem)
# Если обнаружен элемент - добавить его индекс в массив
if lItemResultFlag:
lResultList.append(lIndex)
# Инкремент индекса
lIndex = lIndex + 1
# Проверка в зависимости от флага
if inFlagWaitAllInMoment and len(lResultList) == len(inSpecificationListList):
# Условие выполнено
lResultFlag = True
elif not inFlagWaitAllInMoment and len(lResultList) > 0:
# Условие выполнено
lResultFlag = True
# Если флаг не изменился - увеличить время и уснуть
if lResultFlag == False:
lSecsDone = lSecsDone + lSecsSleep
time.sleep(lSecsSleep)
return lResultList
#################################################################################################
# Wait for UIO is Disappear (at least one of them or all at the same time)
# inSpecificationListList - List of the UIOSelector
# inWaitSecs - Время ожидания пропажи объекта в секундах
# inFlagWaitAllInMoment - доп. условие - ожидать пропажу всех UIOSelector одновременно
# return: [0,1,2] - index of UIOSpecification, which is Disappear
# old name - -
#####Внимание#####
##Функция ожидания пропажи элементов (тк элементы могут быть недоступны, неизвестно в каком фреймворке каждый из них может появиться)
def UIOSelectorsSecs_WaitDisappear_List(inSpecificationListList, inWaitSecs, inFlagWaitAllInMoment=False):
'''
Wait for many UI object will disappear in GUI for inWaitSecs seconds.
:param inSpecificationListList: UIOSelector list.
Example: [
[{"title":"notepad"},{"title":"OK"}],
[{"title":"notepad"},{"title":"Cancel"}]
]
:param inWaitSecs: Float value (seconds) for wait UI element disappear in GUI
:param inFlagWaitAllInMoment: True - Wait all UI objects from the UIOSelector list to be disappeared.
:return: List of index, which UI object UIO will be disappeared. Example: [1] # Disappear only UI object with UIO selector: [{"title":"notepad"},{"title":"Cancel"}]
:return:
'''
lResultFlag = False
lSecsSleep = 1 # Настроечный параметр
lSecsDone = 0
lResultList = None
# Цикл проверки
while lResultFlag == False and lSecsDone < inWaitSecs:
# pdb.set_trace()
lResultList = []
# Итерация проверки
lIndex = 0
for lItem in inSpecificationListList:
lItemResultFlag = UIOSelector_Exist_Bool(lItem)
# Если обнаружен элемент - добавить его индекс в массив
if not lItemResultFlag:
lResultList.append(lIndex)
# Инкремент индекса
lIndex = lIndex + 1
# Проверка в зависимости от флага
if inFlagWaitAllInMoment and len(lResultList) == len(inSpecificationListList):
# Условие выполнено
lResultFlag = True
elif not inFlagWaitAllInMoment and len(lResultList) > 0:
# Условие выполнено
lResultFlag = True
# Если флаг не изменился - увеличить время и уснуть
if lResultFlag == False:
lSecsDone = lSecsDone + lSecsSleep
time.sleep(lSecsSleep)
return lResultList
#################################################################################################
# Wait for UIO is appear (at least one of them or all at the same time)
# inSpecificationList - UIOSelector
# inWaitSecs - Время ожидания объекта в секундах
# return: Bool - True - UIO is appear
# old name - -
def UIOSelectorSecs_WaitAppear_Bool(inSpecificationList, inWaitSecs):
"""
Wait for UI object will appear in GUI for inWaitSecs seconds.
:param inSpecificationList: UIOSelector. Example: [{"title":"notepad"},{"title":"OK"}]
:param inWaitSecs: Float value (seconds) for wait UI element appear in GUI
:return: True - UI object will appear. False - else case
"""
lWaitAppearList = UIOSelectorsSecs_WaitAppear_List([inSpecificationList], inWaitSecs)
lResult = False
if len(lWaitAppearList) > 0:
lResult = True
return lResult
#################################################################################################
# Wait for UIO is disappear (at least one of them or all at the same time)
# inSpecificationList - UIOSelector
# inWaitSecs - Время ожидания пропажи объекта в секундах
# return: Bool - True - UIO is Disappear
# old name - -
def UIOSelectorSecs_WaitDisappear_Bool(inSpecificationList, inWaitSecs):
"""
Wait for UI object will disappear in GUI for inWaitSecs seconds.
:param inSpecificationList: UIOSelector.
Example: [{"title":"notepad"},{"title":"OK"}]
:param inWaitSecs: Float value (seconds) for wait UI element disappear in GUI
:return: True - UI object will disappear. False - else case
"""
lWaitDisappearList = UIOSelectorsSecs_WaitDisappear_List([inSpecificationList], inWaitSecs)
lResult = False
if len(lWaitDisappearList) > 0:
lResult = True
return lResult
#################################################################################################
# Get process bitness (32 or 64)
# inSpecificationList - UIOSelector
# old name - None
# return None (if Process not found), int 32, or int 64
def UIOSelector_Get_BitnessInt(inSpecificationList):
"""
Detect process bitness by the UI Object UIO Selector.
:param inSpecificationList: UIOSelector. Example: [{"title":"notepad"},{"title":"OK"}]
:return: int 32 or int 64
"""
lResult = None
# Получить объект Application (Для проверки разрядности)
lRootElement = PWASpecification_Get_PWAApplication(inSpecificationList)
if lRootElement is not None:
if lRootElement.is64bit():
lResult = 64
else:
lResult = 32
return lResult
#################################################################################################
# Get process bitness ("32" or "64")
# inSpecificationList - UIOSelector
# old name - None
# return None (if Process not found), int 32, or int 64
def UIOSelector_Get_BitnessStr(inSpecificationList):
"""
Detect process bitness by the UI Object UIO Selector.
:param inSpecificationList: UIOSelector. Example: [{"title":"notepad"},{"title":"OK"}]
:return: str "32" or str "64"
"""
lResult = None
# Получить объект Application (Для проверки разрядности)
lRootElement = PWASpecification_Get_PWAApplication(inSpecificationList)
if lRootElement is not None:
if lRootElement.is64bit():
lResult = "64"
else:
lResult = "32"
return lResult
#################################################################################################
# Get OS bitness (32 or 64)
# old name - None
# return int 32, or int 64
def Get_OSBitnessInt():
"""
Detect OS bitness.
:return: int 32 or int 64
"""
lResult = 32;
if pywinauto.sysinfo.is_x64_OS():
lResult = 64;
return lResult;
#################################################################################################
# Safe get other process or None if destination app is the other/same bitness
# inUIOSelector - selector of the destination
# return None or process (of the other bitness)
def UIOSelector_SafeOtherGet_Process(inUIOSelector):
"""
Safe get other process or None if destination app is the other/same bitness
:param inUIOSelector: UIO Selector of the UI object
:return: None or process (of the other bitness)
"""
# Default value
lResult = None
# Go check bitness if selector exists
if inUIOSelector:
# Get selector bitness
lUIOSelectorAppBitness = UIOSelector_Get_BitnessStr(inUIOSelector)
if lUIOSelectorAppBitness and Utils.ProcessBitness.mSettingsDict[
"BitnessProcessCurrent"] != lUIOSelectorAppBitness:
lResult = Utils.ProcessBitness.OtherProcessGet()
return lResult
##################################################################################################
# inControlSpecificationArray - List of dict, dict in pywinauto.find_windows notation
# Backend selection - attribute "backend" ("win32" || "uia") in 1-st list element
# return list of UIO object
# old name - GetControl
def PWASpecification_Get_UIO(inControlSpecificationArray):
"""
#Backend def selection - attribute "backend" ("win32" || "uia") in 1-st list element
#old name - GetControl
:param inControlSpecificationArray: List of dict, dict in pywinauto.find_windows notation
:return: list of UIO object
"""
# Определение backend
lBackend = mDefaultPywinautoBackend
if "backend" in inControlSpecificationArray[0]:
lBackend = inControlSpecificationArray[0]["backend"]
inControlSpecificationArray[0].pop("backend")
# Подготовка входного массива
inControlSpecificationOriginArray = copy.deepcopy(inControlSpecificationArray)
inControlSpecificationArray = UIOSelector_SearchProcessNormalize_UIOSelector(inControlSpecificationArray)
# Выполнить идентификацию объектов, если передан массив
lResultList = [];
lTempObject = None
if len(inControlSpecificationArray) > 0:
# Сформировать выборку элементов, которые подходят под первый уровень спецификации
lSpecificationLvL1List = pywinauto.findwindows.find_elements(**inControlSpecificationArray[0])
for lItem in lSpecificationLvL1List:
# Сделать независимую копию и установить информацию о process_id и handle
lItemControlSpecificationArray = copy.deepcopy(inControlSpecificationArray)
lItemControlSpecificationArray[0]["process_id"] = lItem.process_id
lItemControlSpecificationArray[0]["handle"] = lItem.handle
lItemControlSpecificationOriginArray = copy.deepcopy(inControlSpecificationOriginArray)
lItemControlSpecificationOriginArray[0]["process_id"] = lItem.process_id
lItemControlSpecificationOriginArray[0]["handle"] = lItem.handle
# Выполнить подключение к объекту
lRPAApplication = pywinauto.Application(backend=lBackend)
# Проверка разрядности
try:
lRPAApplication.connect(**lItemControlSpecificationArray[0])
except Exception as e:
UIOSelector_TryRestore_Dict(lItemControlSpecificationArray)
try:
lRPAApplication.connect(**lItemControlSpecificationArray[0])
except Exception as e:
lRPAApplication = None
if lRPAApplication is not None:
# lTempObject=lRPAApplication.window(**lItemControlSpecificationArray[0])
# Скорректировано из-за недопонимания структуры
lTempObject = lRPAApplication
# Нормализация массива для целей выборки объекта (удаление конфликтующих ключей)
lItemControlSpecificationArray = UIOSelector_SearchUIONormalize_UIOSelector(
lItemControlSpecificationOriginArray)
# Циклическое прохождение к недрам объекта
for lWindowSpecification in lItemControlSpecificationArray[0:]:
lTempObject = lTempObject.window(**lWindowSpecification)
# Добавить объект в результирующий массив
lResultList.append(lTempObject)
return lResultList
##################################################################################################
# inControlSpecificationArray - List of dict, dict in pywinauto.find_windows notation
# Backend selection - attribute "backend" ("win32" || "uia") in 1-st list element
# return process application object
# old name - None
def PWASpecification_Get_PWAApplication(inControlSpecificationArray):
"""
#Backend selection - attribute "backend" ("win32" || "uia") in 1-st list element
:param inControlSpecificationArray: List of dict, dict in pywinauto.find_windows notation
:return: process application object
"""
inControlSpecificationArray =inControlSpecificationArray
inControlSpecificationArray = copy.deepcopy(inControlSpecificationArray)
# Определение backend
lBackend = mDefaultPywinautoBackend
if "backend" in inControlSpecificationArray[0]:
lBackend = inControlSpecificationArray[0]["backend"]
inControlSpecificationArray[0].pop("backend")
# Подготовка входного массива
inControlSpecificationOriginArray = inControlSpecificationArray
inControlSpecificationArray = UIOSelector_SearchProcessNormalize_UIOSelector(inControlSpecificationArray)
# Выполнить идентификацию объектов, если передан массив
lResultList = [];
lTempObject = None
if len(inControlSpecificationArray) > 0:
# Выполнить подключение к объекту
lRPAApplication = pywinauto.Application(backend=lBackend)
# Проверка разрядности
try:
lRPAApplication.connect(**inControlSpecificationArray[0])
except Exception as e:
UIOSelector_TryRestore_Dict(inControlSpecificationArray)
try:
lRPAApplication.connect(**inControlSpecificationArray[0])
except Exception as e:
lRPAApplication = None
if lRPAApplication is not None:
# lTempObject=lRPAApplication.window(**inControlSpecificationArray[0])
# Скорректировано из-за недопонимания структуры
lTempObject = lRPAApplication
return lTempObject
###########################################################################################################
# inElementSpecificationList = UIOSelector (see description on the top of the document)
# result = pywinauto element wrapper instance or None
# old name - AutomationSearchMouseElement
def UIOSelector_SearchChildByMouse_UIO(inElementSpecification):
"""
UIOSelector (see description on the top of the document)
#old name - AutomationSearchMouseElement
:param inElementSpecification: | |
4)
for i in range(len(values)):
buffer[(i*4): (i*4+4)] = struct.pack('>f',values[i])
return buffer
def DoubleArrayTransByte(self, values ):
'''double数组变量转化缓存数据,需要传入double数组 -> bytearray'''
if (values == None) : return None
buffer = bytearray(len(values) * 8)
for i in range(len(values)):
buffer[(i*8): (i*8+8)] = struct.pack('>d',values[i])
return buffer
class ReverseWordTransform(ByteTransform):
'''按照字节错位的数据转换类'''
def __init__(self):
'''初始化方法,重新设置DataFormat'''
self.DataFormat = DataFormat.ABCD
IsStringReverse = False
def ReverseBytesByWord( self, buffer, index, length ):
'''按照字节错位的方法 -> bytearray'''
if buffer == None: return None
data = self.TransByteArray(buffer,index,length)
for i in range(len(data)//2):
data[i*2+0],data[i*2+1]= data[i*2+1],data[i*2+0]
return data
def ReverseAllBytesByWord( self, buffer ):
'''按照字节错位的方法 -> bytearray'''
return self.ReverseBytesByWord(buffer,0,len(buffer))
def TransInt16( self, buffer, index ):
'''从缓存中提取short结果'''
data = self.ReverseBytesByWord(buffer,index,2)
return struct.unpack('<h',data)[0]
def TransUInt16(self, buffer, index ):
'''从缓存中提取ushort结果'''
data = self.ReverseBytesByWord(buffer,index,2)
return struct.unpack('<H',data)[0]
def TransString( self, buffer, index, length, encoding ):
'''从缓存中提取string结果,使用指定的编码'''
data = self.TransByteArray(buffer,index,length)
if self.IsStringReverse:
return self.ReverseAllBytesByWord(data).decode(encoding)
else:
return data.decode(encoding)
def Int16ArrayTransByte(self, values ):
'''short数组变量转化缓存数据,需要传入short数组'''
buffer = super().Int16ArrayTransByte(values)
return self.ReverseAllBytesByWord(buffer)
def UInt16ArrayTransByte(self, values ):
'''ushort数组变量转化缓存数据,需要传入ushort数组'''
buffer = super().UInt16ArrayTransByte(values)
return self.ReverseAllBytesByWord(buffer)
def StringTransByte(self, value, encoding ):
'''使用指定的编码字符串转化缓存数据,需要传入string值及编码信息'''
buffer = value.encode(encoding)
buffer = SoftBasic.BytesArrayExpandToLengthEven(buffer)
if self.IsStringReverse:
return self.ReverseAllBytesByWord( buffer )
else:
return buffer
class ByteTransformHelper:
'''所有数据转换类的静态辅助方法'''
@staticmethod
def GetBoolResultFromBytes( result, byteTransform ):
'''将指定的OperateResult类型转化'''
try:
if result.IsSuccess:
return OperateResult.CreateSuccessResult(byteTransform.TransBool(result.Content , 0 ))
else:
return OperateResult.CreateFailedResult(result)
except Exception as ex:
return OperateResult( msg = "数据转化失败,源数据:" + SoftBasic.ByteToHexString( result.Content ) + " 消息:" + str(ex))
@staticmethod
def GetByteResultFromBytes( result, byteTransform ):
'''将指定的OperateResult类型转化'''
try:
if result.IsSuccess:
return OperateResult.CreateSuccessResult(byteTransform.TransByte(result.Content , 0 ))
else:
return OperateResult.CreateFailedResult(result)
except Exception as ex:
return OperateResult( msg = "数据转化失败,源数据:" + SoftBasic.ByteToHexString( result.Content ) + " 消息:" + str(ex))
@staticmethod
def GetInt16ResultFromBytes( result, byteTransform ):
'''将指定的OperateResult类型转化'''
try:
if result.IsSuccess:
return OperateResult.CreateSuccessResult(byteTransform.TransInt16(result.Content , 0 ))
else:
return OperateResult.CreateFailedResult(result)
except Exception as ex:
return OperateResult( msg = "数据转化失败,源数据:" + SoftBasic.ByteToHexString( result.Content ) + " 消息:" + str(ex))
@staticmethod
def GetUInt16ResultFromBytes( result, byteTransform ):
'''将指定的OperateResult类型转化'''
try:
if result.IsSuccess:
return OperateResult.CreateSuccessResult(byteTransform.TransUInt16(result.Content , 0 ))
else:
return OperateResult.CreateFailedResult(result)
except Exception as ex:
return OperateResult( msg = "数据转化失败,源数据:" + SoftBasic.ByteToHexString( result.Content ) + " 消息:" + str(ex))
@staticmethod
def GetInt32ResultFromBytes( result, byteTransform ):
'''将指定的OperateResult类型转化'''
try:
if result.IsSuccess:
return OperateResult.CreateSuccessResult(byteTransform.TransInt32(result.Content , 0 ))
else:
return OperateResult.CreateFailedResult(result)
except Exception as ex:
return OperateResult( msg = "数据转化失败,源数据:" + SoftBasic.ByteToHexString( result.Content ) + " 消息:" + str(ex))
@staticmethod
def GetUInt32ResultFromBytes( result, byteTransform ):
'''将指定的OperateResult类型转化'''
try:
if result.IsSuccess:
return OperateResult.CreateSuccessResult(byteTransform.TransUInt32(result.Content , 0 ))
else:
return OperateResult.CreateFailedResult(result)
except Exception as ex:
return OperateResult( msg = "数据转化失败,源数据:" + SoftBasic.ByteToHexString( result.Content ) + " 消息:" + str(ex))
@staticmethod
def GetInt64ResultFromBytes( result, byteTransform ):
'''将指定的OperateResult类型转化'''
try:
if result.IsSuccess:
return OperateResult.CreateSuccessResult(byteTransform.TransInt64(result.Content , 0 ))
else:
return OperateResult.CreateFailedResult(result)
except Exception as ex:
return OperateResult( msg = "数据转化失败,源数据:" + SoftBasic.ByteToHexString( result.Content ) + " 消息:" + str(ex))
@staticmethod
def GetUInt64ResultFromBytes( result, byteTransform ):
'''将指定的OperateResult类型转化'''
try:
if result.IsSuccess:
return OperateResult.CreateSuccessResult(byteTransform.TransUInt64(result.Content , 0 ))
else:
return OperateResult.CreateFailedResult(result)
except Exception as ex:
return OperateResult( msg = "数据转化失败,源数据:" + SoftBasic.ByteToHexString( result.Content ) + " 消息:" + str(ex))
@staticmethod
def GetSingleResultFromBytes( result, byteTransform ):
'''将指定的OperateResult类型转化'''
try:
if result.IsSuccess:
return OperateResult.CreateSuccessResult(byteTransform.TransSingle(result.Content , 0 ))
else:
return OperateResult.CreateFailedResult(result)
except Exception as ex:
return OperateResult( msg = "数据转化失败,源数据:" + SoftBasic.ByteToHexString( result.Content ) + " 消息:" + str(ex))
@staticmethod
def GetDoubleResultFromBytes( result, byteTransform ):
'''将指定的OperateResult类型转化'''
try:
if result.IsSuccess:
return OperateResult.CreateSuccessResult(byteTransform.TransDouble(result.Content , 0 ))
else:
return OperateResult.CreateFailedResult(result)
except Exception as ex:
return OperateResult( msg = "数据转化失败,源数据:" + SoftBasic.ByteToHexString( result.Content ) + " 消息:" + str(ex))
@staticmethod
def GetStringResultFromBytes( result, byteTransform ):
'''将指定的OperateResult类型转化'''
try:
if result.IsSuccess:
return OperateResult.CreateSuccessResult(byteTransform.TransString(result.Content , 0, len(result.Content), 'ascii' ))
else:
return OperateResult.CreateFailedResult(result)
except Exception as ex:
return OperateResult( msg = "数据转化失败,源数据:" + SoftBasic.ByteToHexString( result.Content ) + " 消息:" + str(ex))
class DeviceAddressBase:
'''所有设备通信类的地址基础类'''
Address = 0
def AnalysisAddress( self, address ):
'''解析字符串的地址'''
self.Address = int(address)
class SoftBasic:
'''系统运行的基础方法,提供了一些基本的辅助方法'''
@staticmethod
def GetSizeDescription(size):
'''获取指定数据大小的文本描述字符串'''
if size < 1000:
return str(size) + " B"
elif size < (1000 * 1000):
data = float(size) / 1024
return '{:.2f}'.format(data) + " Kb"
elif size < (1000 * 1000 * 1000):
data = float(size) / 1024 / 1024
return '{:.2f}'.format(data) + " Mb"
else:
data = float(size) / 1024 / 1024 / 1024
return '{:.2f}'.format(data) + " Gb"
@staticmethod
def ByteToHexString(inBytes,segment=' '):
'''将字节数组转换成十六进制的表示形式,需要传入2个参数,数据和分隔符,该方法还存在一点问题'''
str_list = []
for byte in inBytes:
str_list.append('{:02X}'.format(byte))
if segment != None:
return segment.join(str_list)
else:
return ''.join(str_list)
@staticmethod
def ByteToBoolArray( InBytes, length ):
'''从字节数组中提取bool数组变量信息'''
if InBytes == None:
return None
if length > len(InBytes) * 8:
length = len(InBytes) * 8
buffer = []
for i in range(length):
index = i // 8
offect = i % 8
temp = 0
if offect == 0 : temp = 0x01
elif offect == 1 : temp = 0x02
elif offect == 2 : temp = 0x04
elif offect == 3 : temp = 0x08
elif offect == 4 : temp = 0x10
elif offect == 5 : temp = 0x20
elif offect == 6 : temp = 0x40
elif offect == 7 : temp = 0x80
if (InBytes[index] & temp) == temp:
buffer.append(True)
else:
buffer.append(False)
return buffer
@staticmethod
def BoolArrayToByte( array ):
'''从bool数组变量变成byte数组'''
if (array == None) : return None
length = 0
if len(array) % 8 == 0:
length = int(len(array) / 8)
else:
length = int(len(array) / 8) + 1
buffer = bytearray(length)
for i in range(len(array)):
index = i // 8
offect = i % 8
temp = 0
if offect == 0 : temp = 0x01
elif offect == 1 : temp = 0x02
elif offect == 2 : temp = 0x04
elif offect == 3 : temp = 0x08
elif offect == 4 : temp = 0x10
elif offect == 5 : temp = 0x20
elif offect == 6 : temp = 0x40
elif offect == 7 : temp = 0x80
if array[i] : buffer[index] += temp
return buffer
@staticmethod
def HexStringToBytes( hex ):
'''将hex字符串转化为byte数组'''
return bytes.fromhex(hex)
@staticmethod
def BytesArrayExpandToLengthEven(array):
'''扩充一个整型的数据长度为偶数个'''
if len(array) % 2 == 1:
array.append(0)
return array
@staticmethod
def IsTwoBytesEquel( b1, start1, b2, start2, length ):
'''判断两个字节的指定部分是否相同'''
if b1 == None or b2 == None: return False
for ii in range(length):
if b1[ii+start1] != b2[ii+start2]: return False
return True
@staticmethod
def TokenToBytes( token ):
'''将uuid的token值转化成统一的bytes数组,方便和java,C#通讯'''
buffer = bytearray(token.bytes)
buffer[0],buffer[1],buffer[2],buffer[3] = buffer[3],buffer[2],buffer[1],buffer[0]
buffer[4],buffer[5] = buffer[5],buffer[4]
buffer[6],buffer[7] = buffer[7],buffer[6]
return buffer
@staticmethod
def ArrayExpandToLength( value, length ):
'''将数组扩充到指定的长度'''
buffer = bytearray(length)
if len(value) >= length:
buffer[0:] = value[0:len(value)]
else:
buffer[0:len(value)] = value
return buffer
@staticmethod
def ArrayExpandToLengthEven( value ):
'''将数组扩充到偶数的长度'''
if len(value) % 2 == 0:
return value
else:
buffer = bytearray(len(value)+1)
buffer[0:len(value)] = value
return value
@staticmethod
def StringToUnicodeBytes( value ):
'''获取字符串的unicode编码字符'''
if value == None: return bytearray(0)
buffer = value.encode('utf-16')
if len(buffer) > 1 and buffer[0] == 255 and buffer[1] == 254:
buffer = buffer[2:len(buffer)]
return buffer
@staticmethod
def GetUniqueStringByGuidAndRandom():
'''获取一串唯一的随机字符串,长度为20,由Guid码和4位数的随机数组成,保证字符串的唯一性'''
return SoftBasic.ByteToHexString(SoftBasic.TokenToBytes(uuid.uuid1()), None) + str(random.randint(12, 20))
class HslSecurity:
@staticmethod
def ByteEncrypt( enBytes ):
'''加密方法,只对当前的程序集开放'''
if (enBytes == None) : return None
result = bytearray(len(enBytes))
for i in range(len(enBytes)):
result[i] = enBytes[i] ^ 0xB5
return result
@staticmethod
def ByteDecrypt( deBytes ):
'''解密方法,只对当前的程序集开放'''
return HslSecurity.ByteEncrypt(deBytes)
class SoftZipped:
'''一个负责压缩解压数据字节的类'''
@staticmethod
def CompressBytes( inBytes ):
'''压缩字节数据'''
if inBytes == None : return None
return gzip.compress( inBytes )
@staticmethod
def Decompress( inBytes ):
'''解压字节数据'''
if inBytes == None : return None
return gzip.decompress( inBytes )
class HslProtocol:
'''用于本程序集访问通信的暗号说明'''
@staticmethod
def HeadByteLength():
'''规定所有的网络传输指令头都为32字节'''
return 32
@staticmethod
def ProtocolBufferSize():
'''所有网络通信中的缓冲池数据信息'''
return 1024
@staticmethod
def ProtocolCheckSecends():
'''用于心跳程序的暗号信息'''
return 1
@staticmethod
def ProtocolClientQuit():
'''客户端退出消息'''
return 2
@staticmethod
def ProtocolClientRefuseLogin():
'''因为客户端达到上限而拒绝登录'''
return 3
@staticmethod
def ProtocolClientAllowLogin():
return 4
@staticmethod
def ProtocolUserString():
'''说明发送的只是文本信息'''
return 1001
@staticmethod
def ProtocolUserBytes():
'''发送的数据就是普通的字节数组'''
return 1002
@staticmethod
def ProtocolUserBitmap():
'''发送的数据就是普通的图片数据'''
return 1003
@staticmethod
def ProtocolUserException():
'''发送的数据是一条异常的数据,字符串为异常消息'''
return 1004
@staticmethod
def ProtocolFileDownload():
'''请求文件下载的暗号'''
return 2001
@staticmethod
def ProtocolFileUpload():
'''请求文件上传的暗号'''
return 2002
@staticmethod
def ProtocolFileDelete():
'''请求删除文件的暗号'''
return 2003
@staticmethod
def ProtocolFileCheckRight():
'''文件校验成功'''
return 2004
@staticmethod
def ProtocolFileCheckError():
'''文件校验失败'''
return 2005
@staticmethod
def ProtocolFileSaveError():
'''文件保存失败'''
return 2006
@staticmethod
def ProtocolFileDirectoryFiles():
'''请求文件列表的暗号'''
return 2007
@staticmethod
def ProtocolFileDirectories():
'''请求子文件的列表暗号'''
return 2008
@staticmethod
def ProtocolProgressReport():
'''进度返回暗号'''
return 2009
@staticmethod
def ProtocolNoZipped():
'''不压缩数据字节'''
return 3001
@staticmethod
def ProtocolZipped():
'''压缩数据字节'''
return 3002
@staticmethod
def CommandBytesBase( command, customer, token, data ):
'''生成终极传送指令的方法,所有的数据均通过该方法出来'''
_zipped = HslProtocol.ProtocolNoZipped()
buffer = None
_sendLength = 0
if data == None:
buffer = bytearray(HslProtocol.HeadByteLength())
else:
data = HslSecurity.ByteEncrypt( data )
if len(data) > 102400:
data = SoftZipped.CompressBytes( data )
_zipped = HslProtocol.ProtocolZipped()
buffer = bytearray( HslProtocol.HeadByteLength() + len(data) )
_sendLength = len(data)
buffer[0:4] = struct.pack( '<i', command )
buffer[4:8] = struct.pack( '<i', customer )
buffer[8:12] = struct.pack( '<i', _zipped)
buffer[12:28] = SoftBasic.TokenToBytes(token)
buffer[28:32] = struct.pack( '<i', _sendLength)
if _sendLength>0:
buffer[32:_sendLength+32]=data
return buffer
@staticmethod
def CommandAnalysis( head, content ):
'''解析接收到数据,先解压缩后进行解密'''
if content != None:
_zipped = struct.unpack('<i', head[8:12])[0]
if _zipped == HslProtocol.ProtocolZipped():
content = SoftZipped.Decompress( content )
return HslSecurity.ByteEncrypt(content)
return bytearray(0)
@staticmethod
def CommandBytes( customer, token, data ):
'''获取发送字节数据的实际数据,带指令头'''
return HslProtocol.CommandBytesBase( HslProtocol.ProtocolUserBytes(), customer, token, data )
@staticmethod
def CommandString( customer, token, data ):
'''获取发送字节数据的实际数据,带指令头'''
if data == None:
return HslProtocol.CommandBytesBase( HslProtocol.ProtocolUserString(), customer, token, None )
else:
buffer = SoftBasic.StringToUnicodeBytes(data)
return HslProtocol.CommandBytesBase( HslProtocol.ProtocolUserString(), customer, token, buffer )
class NetworkBase:
'''网络基础类的核心'''
Token = uuid.UUID('{00000000-0000-0000-0000-000000000000}')
CoreSocket = socket.socket()
def Receive(self,socket,length):
'''接收固定长度的字节数组'''
totle = 0
data = bytearray()
try:
while totle < length:
data.extend(socket.recv(length-totle))
totle += len(data)
return OperateResult.CreateSuccessResult(data)
except Exception as e:
result = OperateResult()
result.Message = str(e)
return result
def Send(self,socket,data):
'''发送消息给套接字,直到完成的时候返回'''
try:
socket.send(data)
return OperateResult.CreateSuccessResult()
except Exception as e:
return OperateResult( msg = str(e))
def CreateSocketAndConnect(self,ipAddress,port,timeout = 10000):
'''创建一个新的socket对象并连接到远程的地址,默认超时时间为10秒钟'''
try:
socketTmp = socket.socket()
socketTmp.connect((ipAddress,port))
return OperateResult.CreateSuccessResult(socketTmp)
except Exception as e:
return OperateResult( msg = str(e))
def ReceiveMessage( self, socket, timeOut, netMsg ):
'''接收一条完整的数据,使用异步接收完成,包含了指令头信息'''
result = OperateResult()
headResult = self.Receive( socket, netMsg.ProtocolHeadBytesLength() )
if headResult.IsSuccess == False:
result.CopyErrorFromOther(headResult)
return result
netMsg.HeadBytes = headResult.Content
if netMsg.CheckHeadBytesLegal( SoftBasic.TokenToBytes(self.Token) ) == False:
# 令牌校验失败
if socket != None: socket.close()
result.Message = StringResources.TokenCheckFailed()
return result
contentLength = netMsg.GetContentLengthByHeadBytes( )
if contentLength == 0:
netMsg.ContentBytes = bytearray(0)
else:
contentResult = self.Receive( socket, contentLength )
if contentResult.IsSuccess == False:
result.CopyErrorFromOther( contentResult )
return result
netMsg.ContentBytes = contentResult.Content
if netMsg.ContentBytes == None: netMsg.ContentBytes = bytearray(0)
result.Content = netMsg
result.IsSuccess = True
return result
class NetworkDoubleBase(NetworkBase):
'''支持长连接,短连接两个模式的通用客户端基类'''
byteTransform = ByteTransform()
ipAddress = "127.0.0.1"
port = 10000
isPersistentConn = False
isSocketError = False
receiveTimeOut = 10000
isUseSpecifiedSocket = False
interactiveLock = threading.Lock()
iNetMessage = INetMessage()
def SetPersistentConnection( self ):
'''在读取数据之前可以调用本方法将客户端设置为长连接模式,相当于跳过了ConnectServer的结果验证,对异形客户端无效'''
self.isPersistentConn = True
def ConnectServer( self ):
'''切换短连接模式到长连接模式,后面的每次请求都共享一个通道'''
self.isPersistentConn = True
result = OperateResult( )
# 重新连接之前,先将旧的数据进行清空
if self.CoreSocket != None:
self.CoreSocket.close()
rSocket = self.CreateSocketAndInitialication( )
if rSocket.IsSuccess == False:
self.isSocketError = True
rSocket.Content = None
result.Message = rSocket.Message
else:
self.CoreSocket = rSocket.Content
result.IsSuccess = True
return result
def ConnectClose( self ):
'''在长连接模式下,断开服务器的连接,并切换到短连接模式'''
result = OperateResult( )
self.isPersistentConn = False
self.interactiveLock.acquire()
# 额外操作
result = self.ExtraOnDisconnect( self.CoreSocket )
# 关闭信息
if self.CoreSocket != None : self.CoreSocket.close()
self.CoreSocket = None
self.interactiveLock.release( )
return result
# 初始化的信息方法和连接结束的信息方法,需要在继承类里面进行重新实现
def InitializationOnConnect( self, socket ):
'''连接上服务器后需要进行的初始化操作'''
return OperateResult.CreateSuccessResult()
def ExtraOnDisconnect( self, socket ):
'''在将要和服务器进行断开的情况下额外的操作,需要根据对应协议进行重写'''
return OperateResult.CreateSuccessResult()
def GetAvailableSocket( self ):
'''获取本次操作的可用的网络套接字'''
if self.isPersistentConn :
# 如果是异形模式
if self.isUseSpecifiedSocket :
if self.isSocketError:
return OperateResult( msg = '连接不可用' )
else:
return OperateResult.CreateSuccessResult( self.CoreSocket )
else:
# 长连接模式
if self.isSocketError or self.CoreSocket == None :
connect = self.ConnectServer( )
if connect.IsSuccess == False:
self.isSocketError = True
return OperateResult( msg = connect.Message )
else:
self.isSocketError = False
return OperateResult.CreateSuccessResult( self.CoreSocket )
else:
return OperateResult.CreateSuccessResult( self.CoreSocket )
else:
# 短连接模式
return self.CreateSocketAndInitialication( )
def CreateSocketAndInitialication( self ):
'''连接并初始化网络套接字'''
result = self.CreateSocketAndConnect( self.ipAddress, self.port, 10000 )
if result.IsSuccess:
# 初始化
initi = self.InitializationOnConnect( result.Content )
if initi.IsSuccess == False:
if result.Content !=None : result.Content.close( )
result.IsSuccess = initi.IsSuccess
result.CopyErrorFromOther( initi )
return result
def ReadFromCoreSocketServer( self, socket, send ):
'''在其他指定的套接字上,使用报文来通讯,传入需要发送的消息,返回一条完整的数据指令'''
read = self.ReadFromCoreServerBase( socket, send )
if read.IsSuccess == False: return OperateResult.CreateFailedResult( read )
# 拼接结果数据
Content = bytearray(len(read.Content1) + len(read.Content2))
if len(read.Content1) > | |
if has_super:
count -= w_super.total_field_cnt
for i in range(offset, offset + count):
write_loop(self._ref(i), port, env)
port.write(" ")
def write(self, port, env):
w_type = self.struct_type()
typename = w_type.name.utf8value
if w_type.isprefab:
self.write_prefab(port, env)
elif w_type.all_opaque():
port.write("#<%s>" % typename)
else:
w_val = w_type.read_prop(w_prop_custom_write)
if w_val is not None:
pycketconfig = env.toplevel_env()._pycketconfig
assert isinstance(w_val, values_vector.W_Vector)
w_write_proc = w_val.ref(0)
# #t for write mode, #f for display mode,
# or 0 or 1 indicating the current quoting depth for print mode
mode = values.w_true
w_write_proc.call_interpret([self, port, mode])
else:
port.write("(%s " % typename)
self.write_values(port, w_type, env)
port.write(")")
def tostring(self):
w_type = self.struct_type()
typename = w_type.name.utf8value
if w_type.isprefab:
return self.tostring_prefab()
elif w_type.all_opaque():
# import pdb;pdb.set_trace()
# ret_str = "#<%s" % typename
# for i in range(0, self._get_size_list()):
# ret_str += ":%s" % self._ref(i).tostring()
# ret_str += ">"
#return ret_str
return "#<%s>" % typename
else:
fields = [None] * w_type.total_field_cnt
self.tostring_values(fields=fields, w_type=w_type, is_super=False)
custom_huh = w_type.read_prop(w_prop_custom_write)
return "(%s %s)" % (typename, self._string_from_list(fields))
"""
This method generates a new structure class with inline stored immutable #f
values on positions from constant_false array. If a new structure instance get
immutable #f fields on the same positions, this class will be used, thereby
reducing its size.
"""
def generate_struct_class(constant_false):
if not len(constant_false):
return W_Struct
unrolling_constant_false = unrolling_iterable(constant_false)
clsname = 'W_ImmutableBooleanStruct_' + \
'_'.join([str(i) for i in constant_false])
@jit.unroll_safe
def _ref(self, i):
pos = i
for j in unrolling_constant_false:
if i > j:
pos -= 1
elif i == j:
return values.w_false
# original index
immutable = self.struct_type().is_immutable_field_index(i)
# altered index
w_res = self._get_list(pos)
if not immutable:
assert isinstance(w_res, values.W_Cell)
w_res = w_res.get_val()
return w_res
@jit.unroll_safe
def _set(self, i, val):
pos = i
for j in unrolling_constant_false:
if i > j:
pos -= 1
# altered index
w_cell = self._get_list(pos)
assert isinstance(w_cell, values.W_Cell)
w_cell.set_val(val)
cls = type(clsname, (W_Struct,), {'_ref':_ref, '_set': _set})
cls = inline_small_list(sizemax=min(11,CONST_FALSE_SIZE),
immutable=True,
attrname="storage",
unbox_num=True)(cls)
return cls
if config.immutable_boolean_field_elision:
CONST_FALSE_SIZE = 5 # the complexity grows exponentially
else:
CONST_FALSE_SIZE = 0 # disabled
struct_classes = []
for i in range(0, CONST_FALSE_SIZE):
for comb in itertools.combinations(range(CONST_FALSE_SIZE), i+1):
struct_classes.append(generate_struct_class(comb))
struct_class_iter = unrolling_iterable(enumerate(struct_classes))
@jit.elidable
def fac(n):
return n * fac(n-1) if n > 1 else 1
@jit.elidable
def ncr(n,r):
if n == 0:
return 0
return fac(n) / fac(r) / fac(n-r)
@jit.unroll_safe
def lookup_struct_class(constant_false):
if CONST_FALSE_SIZE and constant_false and constant_false[-1] < CONST_FALSE_SIZE:
n = CONST_FALSE_SIZE
pos = 0
# offset of combinations with smaller amount of fields
for r in range(1, len(constant_false)):
pos += ncr(n, r)
# and the precise position
r = len(constant_false)
last_idx = 0
for idx in constant_false:
pos += ncr(n, r) - ncr(n-idx+last_idx, r)
n -= idx - last_idx + 1
r -= 1
last_idx = idx + 1
# lookup class by its position
for i, cls in struct_class_iter:
if i == pos:
return cls
return W_Struct
@jit.unroll_safe
def reduce_field_values(field_values, constant_false):
reduced_field_values = [None] * (len(field_values) - len(constant_false))
k = 0
for i, val in enumerate(field_values):
found = False
for j in constant_false:
if j == i:
found = True
if not found:
reduced_field_values[k] = val
k += 1
return reduced_field_values
@jit.unroll_safe
def splice_array(array, index, insertion):
array_len = len(array)
insertion_len = len(insertion)
new_array = [None] * (array_len + insertion_len)
for pre_index in range(index):
new_array[pre_index] = array[pre_index]
for insert_index in range(insertion_len):
new_array[index + insert_index] = insertion[insert_index]
for post_index in range(index, array_len):
new_array[post_index + insertion_len] = array[post_index]
return new_array
@jit.unroll_safe
def construct_struct_final(struct_type, field_values, env, cont):
from pycket.interpreter import return_value
assert len(field_values) == struct_type.total_field_cnt
if CONST_FALSE_SIZE:
constant_false = []
else:
constant_false = None
for i, value in enumerate(field_values):
if not struct_type.is_immutable_field_index(i):
value = values.W_Cell(value)
field_values[i] = value
elif CONST_FALSE_SIZE and value is values.w_false:
constant_false.append(i)
cls = lookup_struct_class(constant_false)
if cls is not W_Struct:
field_values = reduce_field_values(field_values, constant_false)
result = cls.make(field_values, struct_type)
return return_value(result, env, cont)
def construct_struct_loop(init_type, struct_type, field_values, env, cont):
from pycket.interpreter import return_multi_vals
struct_type = jit.promote(struct_type)
if not isinstance(struct_type, W_StructType):
return construct_struct_final(init_type, field_values, env, cont)
auto_field_start = struct_type.total_init_field_cnt
guard = struct_type.guard
if guard is values.w_false:
return construct_struct_loop_body(init_type, struct_type, field_values,
auto_field_start, env, cont)
assert auto_field_start >= 0
typename = init_type.name
args = field_values[:auto_field_start] + [typename]
cont = receive_guard_values_cont(init_type, struct_type, field_values,
auto_field_start, env, cont)
return guard.call(args, env, cont)
def construct_struct_loop_body(init_type, struct_type, field_values,
auto_field_start, env, cont):
# Figure out where in the array the auto values start for this struct type.
# Recall, the struct is built from the bottom up in the inheritance heirarchy.
auto_values = struct_type.auto_values
field_values = splice_array(field_values, auto_field_start, auto_values)
super_type = struct_type.super
return construct_struct_loop(init_type, super_type, field_values, env, cont)
@continuation
def receive_guard_values_cont(init_type, struct_type, field_values,
auto_field_start, env, cont, _vals):
assert _vals.num_values() == auto_field_start, "XXX Turn me into an exception"
for i in range(auto_field_start):
field_values[i] = _vals.get_value(i)
return construct_struct_loop_body(init_type, struct_type, field_values,
auto_field_start, env, cont)
class W_StructConstructor(values.W_Procedure):
_attrs_ = _immutable_fields_ = ["type"]
import_from_mixin(SingleResultMixin)
def __init__(self, type):
self.type = type
@make_call_method(simple=False)
def call_with_extra_info(self, args, env, cont, app):
type = jit.promote(self.type)
arity = type.constructor_arity
if not arity.arity_includes(len(args)):
raise SchemeException("%s: wrong number of arguments; expected %s but got %s" % (self.tostring(),arity.tostring(), len(args)))
return construct_struct_loop(type, type, args, env, cont)
def get_arity(self, promote=False):
if promote:
self = jit.promote(self)
return self.type.constructor_arity
def tostring(self):
return "#<procedure:%s>" % self.type.name.variable_name()
class W_StructPredicate(values.W_Procedure):
errorname = "struct-predicate"
_attrs_ = _immutable_fields_ = ["type"]
import_from_mixin(SingleResultMixin)
def __init__(self, type):
self.type = type
@make_call_method([values.W_Object])
@jit.unroll_safe
def call(self, struct):
from pycket.impersonators import get_base_object
struct = get_base_object(struct)
if isinstance(struct, W_RootStruct):
struct_type = struct.struct_type()
while isinstance(struct_type, W_StructType):
if struct_type is self.type:
return values.w_true
struct_type = struct_type.super
return values.w_false
def get_arity(self, promote=False):
return Arity.ONE
def tostring(self):
return "#<procedure:%s?>" % self.type.name.variable_name()
class W_StructFieldAccessor(values.W_Procedure):
errorname = "struct-field-accessor"
_attrs_ = _immutable_fields_ = ["accessor", "field", "field_name"]
import_from_mixin(SingleResultMixin)
def __init__(self, accessor, field, field_name):
assert isinstance(accessor, W_StructAccessor)
self.accessor = accessor
self.field = field
self.field_name = field_name
def get_absolute_index(self, type):
return type.get_offset(self.accessor.type) + self.field
def get_arity(self, promote=False):
return Arity.ONE
@make_call_method([values.W_Object], simple=False,
name="<struct-field-accessor-method>")
def call_with_extra_info(self, struct, env, cont, app):
jit.promote(self)
return self.accessor.access(struct, self.field, env, cont, app)
def tostring(self):
name = self.accessor.type.name.variable_name()
return "#<procedure:%s-%s>" % (name, self.field_name.variable_name())
class W_StructAccessor(values.W_Procedure):
errorname = "struct-accessor"
_attrs_ = _immutable_fields_ = ["type"]
import_from_mixin(SingleResultMixin)
def __init__(self, type):
self.type = type
def get_arity(self, promote=False):
return Arity.TWO
def access(self, struct, field, env, cont, app=None):
self = jit.promote(self)
st = jit.promote(struct.struct_type())
if st is None:
raise SchemeException("%s got %s" % (self.tostring(), struct.tostring()))
offset = st.get_offset(self.type)
if offset == -1:
raise SchemeException("%s: expected a %s but got a %s" % (self.tostring(), self.type.name.tostring(), st.name.tostring()))
return struct.ref_with_extra_info(field + offset, app, env, cont)
@make_call_method([values.W_Object, values.W_Fixnum], simple=False,
name="<struct-accessor-method>")
def call_with_extra_info(self, struct, field, env, cont, app):
return self.access(struct, field.value, env, cont, app)
def tostring(self):
return "#<procedure:%s-ref>" % self.type.name.utf8value
class W_StructFieldMutator(values.W_Procedure):
errorname = "struct-field-mutator"
_attrs_ = _immutable_fields_ = ["mutator", "field", "field_name"]
import_from_mixin(SingleResultMixin)
def __init__ (self, mutator, field, field_name):
assert isinstance(mutator, W_StructMutator)
self.mutator = mutator
self.field = field
self.field_name = field_name
def get_arity(self, promote=False):
return Arity.TWO
def get_absolute_index(self, type):
return type.get_offset(self.mutator.type) + self.field
@make_call_method([values.W_Object, values.W_Object], simple=False,
name="<struct-field-mutator-method>")
def call_with_extra_info(self, struct, val, env, cont, app):
return self.mutator.mutate(struct, self.field, val, env, cont, app)
def tostring(self):
return "#<procedure:%s-%s!>" % (self.mutator.type.name, self.field_name.variable_name())
class W_StructMutator(values.W_Procedure):
errorname = "struct-mutator"
_attrs_ = _immutable_fields_ = ["type"]
import_from_mixin(SingleResultMixin)
def __init__(self, type):
self.type = type
def get_arity(self, promote=False):
return Arity.THREE
def mutate(self, struct, field, val, env, cont, app=None):
self = jit.promote(self)
st = jit.promote(struct.struct_type())
if st is None:
raise SchemeException("%s got %s" % (self.tostring(), struct.tostring()))
offset = st.get_offset(self.type)
if offset == -1:
raise SchemeException("cannot reference an identifier before its definition")
return struct.set_with_extra_info(field + offset, val, app, env, cont)
@make_call_method([values.W_Object, values.W_Fixnum, values.W_Object],
simple=False, name="<struct-mutator-method>")
def call_with_extra_info(self, struct, field, val, env, cont, app):
return self.mutate(struct, field.value, val, env, cont, app)
def tostring(self):
return "#<procedure:%s-set!>" % self.type.name
class W_StructProperty(values.W_Object):
errorname = "struct-type-property"
_attrs_ = _immutable_fields_ = ["name", "guard", "supers", "can_imp"]
def __init__(self, name, guard, supers=values.w_null, can_imp=False):
self.name = name.utf8value
self.guard = guard
self.supers = values.from_list(supers)
self.can_imp = can_imp
@jit.elidable
def isinstance(self, prop):
if self is prop:
return True
for super in self.supers:
if super.car().isinstance(prop):
return True
return False
def tostring(self):
return "#<struct-type-property:%s>"%self.name
sym = values.W_Symbol.make
w_prop_object_name = W_StructProperty(sym("prop:object-name"), values.w_false)
w_prop_authentic = W_StructProperty(sym("prop:authentic"), values.w_false)
#FIXME: check if these propeties need guards or not
w_prop_procedure = W_StructProperty(sym("prop:procedure"), values.w_false)
w_prop_checked_procedure = W_StructProperty(sym("prop:checked-procedure"), values.w_false)
w_prop_arity_string = W_StructProperty(sym("prop:arity-string"), values.w_false)
w_prop_incomplete_arity = W_StructProperty(sym("prop:incomplete-arity"), values.w_false)
w_prop_custom_write = W_StructProperty(sym("prop:custom-write"), values.w_false)
w_prop_equal_hash = W_StructProperty(sym("prop:equal+hash"), values.w_false)
w_prop_chaperone_unsafe_undefined = W_StructProperty(sym("prop:chaperone-unsafe-undefined"), values.w_false)
w_prop_set_bang_transformer = W_StructProperty(sym("prop:set!-transformer"), values.w_false)
w_prop_rename_transformer = W_StructProperty(sym("prop:rename-transformer"), values.w_false)
w_prop_expansion_contexts = | |
#!/usr/bin/python3
# -*- coding: utf-8 -*-
# -*- mode: Python -*-
'''
lj23layers v0.7.6 for LJ v0.8+
LJ functions (API) for python plugins/clients
"layers" version :
- "PL" has been replaced by "layer"
- "Client"
Each program using this API to manage complexity, should declare itself by calling Config, but it's not mandatory
Config(redisIP, client number, name)
Basic Draw :
- PolyLineOneColor, rPolyLineOneColor, LineTo, Line
- PolyLineRGB, rPolyLineRGB, LineRGBTo, LineRGB
- rgb2int(r,g,b)
- Drawlayer (point list number) : once you stacked all wanted elements, like 2 polylines, send them to lasers.
- DrawDests(): Draw all requested destinations for each layer .
High level draw :
- Text(word, integercolor, layer , xpos, ypos, resize, rotx, roty, rotz) : Display a word
- TextRGB(word, red, green, blue, ...)
- Embeded font1
Laser objects (name and convenient group of parameters for one or several point lists)
- RelativeObject
- FixedObject
"Destinations" : Tell for given Layer a scene/Laser ("destination").
Each Layer can have different destination (i.e to display same stuff on different laser)
OSC and plugins functions :
SendLJ(adress,message) LJ remote control. See commands.py
SendResol(address,message) Send OSC message to Resolume.
WebStatus(message) display message on webui
SendIntensity(laser, intensity)
Sendkpps(laser, kpps)
Ljscene(client) Change scene number in redis keys
Ljlayer(layer) Change layer number in redis keys = laser target.
ClosePlugin(name) Send UI closing info of given plugin
OSCstart() Start the OSC system.
OSCframe() Handle incoming OSC message. Calling the right callback
OSCstop() Properly close the OSC system
OSCping() /ping Answer to LJ pings by sending /pong name
OSCquit() /quit Exit calling script using name in terminal
OSCadddest(layer, scene, laser) Add a destination
OSCdeldest(layer , scene, laser) Delete a destination
OSCobj() /name/obj objectname attribute value for automation
OSCvar() /name/var variablename value for automation
OSCdebug()
** Joystick management is removed. Get it back in todolist **
setup_controls(joystick)
XboxController getLeftHori, getLeftVert, getRightHori, getRightVert, getLeftTrigger, getRightTrigger
Ps3Controller getLeftHori, getLeftVert, getRightHori, getRightVert, getLeftTrigger, getRightTrigger, getUp, getDown, getLeft, getRight, getFire1, getFire2(self):
MySaitekController getLeftHori,getLeftVert, getRightHori,getRightVert, getLeftTrigger,getRightTrigger
MyThrustController getLeftHori, getLeftVert, getRightHori, getRightVert, getLeftTrigger, getRightTrigger
CSLController getLeftHori,getLeftVert,getRightHori, getRightVert,getLeftTrigger,getRightTrigger,getFire1,getFire2
my USB Joystick getUp,getDown,getLeft,getRight,etLeftTrigger, getRightTrigger,getFire1, getFire2
Class management manuals:
https://stackoverflow.com/questions/739882/iterating-over-object-instances-of-a-given-class-in-python
https://stackoverflow.com/questions/8628123/counting-instances-of-a-class
http://effbot.org/pyfaq/how-do-i-get-a-list-of-all-instances-of-a-given-class.htm
LICENCE : CC
<NAME>
'''
import math
import redis
import sys
import weakref
import struct
import numpy as np
import gstt
from multiprocessing import Process, Queue, TimeoutError
is_py2 = sys.version[0] == '2'
if is_py2:
from OSC import OSCServer, OSCClient, OSCMessage
#print ("Importing lj23 and OSC from libs...")
else:
from OSC3 import OSCServer, OSCClient, OSCMessage
#print ("Importing lj23 and OSC3 from libs...")
#redisIP = '127.0.0.1'
#r = redis.StrictRedis(host=redisIP, port=6379, db=0)
ClientNumber = 0
name = "noname"
oscrun = True
point_list = []
layers = [[],[],[],[],[],[],[],[],[],[]]
fft3Groups = [-1,-1,-1,-1]
Dests = dict()
oscIPresol = "127.0.0.1"
oscPORTresol = 7000
# 3D to 2D projection parameters
fov = 256
viewer_distance = 100
'''
Laser "objects"
set a name and convenient group of parameters for one or several point lists
RelativeObject is for point lists around 0,0 with builtin move/rotation.
How to init with object color, xpos,... :
osciObj = lj.RelativeObject('osciObj', True, 255, [], white, red, green,blue,0 , False, centerX , centerY , 1 , Xrot , Yrot , Zrot)
How to use in drawing functions : you're free to use 0, some or all of any laserobject attributes
- draw one or several pointlists with 'A' laserobject color and 'B' laserobject xpos ypos ?
- Change color of 'main' object and all other objects using it will change also
how to change attribute :
osciObj.resize = 2 or /pluginame/change 'OsciObj' 'resize' 2
'''
class RelativeObject:
kind = 'relative'
counter = 0
def __init__(self, name, active, intensity, xy, color, red, green, blue, layer , closed, xpos , ypos , resize , rotx , roty , rotz):
self.name = name
self.active = active # True/False
self.intensity = intensity
self.xy = [] # Dots list
self.color = color # RGB color in int
self.red = red
self.green = green
self.blue = blue
self.layer = layer
self.closed = closed
self.xpos = xpos
self.ypos = ypos
self.resize = resize
self.rotx = rotx
self.roty = roty
self.rotz = rotz
RelativeObject.counter += 1
#type(self).counter += 1
def __del__(self):
RelativeObject.counter -= 1
# Fixed Laser object : point list in 'pygame' space (top left = 0,0 / bottom right)
class FixedObject:
kind = 'fixed'
counter = 0
def __init__(self, name, intensity, active, xy, color, red, green, blue, layer , closed):
self.name = name
self.active = active # True/False
self.intensity = intensity
self.xy = []
self.color = color
self.red = red
self.green = green
self.blue = blue
self.layer = layer
self.closed = closed
FixedObject.counter += 1
def __del__(self):
FixedObject.counter -= 1
'''
class IterDest(type):
def __new__ (cls, name, bases, dct):
dct['_instances'] = []
return super().__new__(cls, name, bases, dct)
def __call__(cls, *args, **kwargs):
instance = super().__call__(*args, **kwargs)
cls._instances.append(instance)
return instance
def __iter__(cls):
return iter(cls._instances)
class DestObject():
# class Destinations(metaclass=IterDest):
__metaclass__ = IterDest
counter = 0
def __init__(self, name, number, active, layer , scene, laser):
self.name = name
self.number = number
self.active = active
self.layer = layer
self.scene = scene
self.laser = laser
DestObject.counter += 1
def __del__(self):
DestObject.counter -= 1
'''
class DestObject():
# class Destinations(metaclass=IterDest):
_instances = set()
counter = 0
def __init__(self, name, number, active, layer , scene, laser):
self.name = name
self.number = number
self.active = active
self.layer = layer
self.scene = scene
self.laser = laser
self._instances.add(weakref.ref(self))
DestObject.counter += 1
@classmethod
def getinstances(cls):
dead = set()
for ref in cls._instances:
obj = ref()
if obj is not None:
yield obj
else:
dead.add(ref)
cls._instances -= dead
def __del__(self):
DestObject.counter -= 1
def Config(redIP,client,myname):
global ClientNumber, name, redisIP, r
redisIP = redIP
r = redis.StrictRedis(host=redisIP, port=6379, db=0)
# ClientNumber 255 are not drawing anything like artnet
ClientNumber = client
#print ("client configured",ClientNumber)
name = myname
print ("lj23layers : Plugin declare its name :",name)
#print layer
return r
def LjClient(client):
global ClientNumber
ClientNumber = client
def Ljlayer(somelayer):
global layer
layer = somelayer
def fromRedis(n):
encoded = r.get(n)
#print("")
#print('fromredis key',n,":",encoded)
h, w = struct.unpack('>II',encoded[:8])
#print("fromredis array size",n,":",h,w)
a = np.frombuffer(encoded, dtype=np.int16, offset=8).reshape(h,w)
#print("fromredis array",n,":",a)
return a
# Store Numpy array 'a' in Redis key 'n'
# Write also in redis key 'a' numpy array, its 2 dimensions size : h time w values
def toRedis(n,a):
#print("array.shape", a.shape, len(a.shape) )
if len(a.shape) == 1:
h = a.shape[0]
w = 1
else:
h,w = a.shape
#print("toredis", n,"h",h,"w",w,"a",a)
shape = struct.pack('>II',h,w)
#shape = struct.pack('>II',len(a),1)
#print("toredis",n,a)
encoded = shape + a.tobytes()
# Store encoded data in Redis
return r.set(n,encoded)
#
# OSC functions
#
# OSC clients
def SendLJ(oscaddress,oscargs=''):
oscmsg = OSCMessage()
oscmsg.setAddress(oscaddress)
oscmsg.append(oscargs)
osclientlj = OSCClient()
osclientlj.connect((redisIP, 8002))
#print("lj23layers for", name, "sending OSC message :", oscmsg, "to", redisIP, ":8002")
if gstt.debug >0:
print("lj23layers for", name, "sending OSC message :", oscmsg, "to", redisIP, ":8002")
try:
osclientlj.sendto(oscmsg, (redisIP, 8002))
oscmsg.clearData()
except:
print ('Connection to LJ refused : died ?')
pass
#time.sleep(0.001
# Resolume OSC Arena client.
# sendresol(oscaddress, [arg1, arg2,...])
# example : sendresol("/noteon",note)
def SendResol(oscaddress,oscargs):
oscmsg = OSCMessage()
oscmsg.setAddress(oscaddress)
oscmsg.append(oscargs)
osclientresol = OSCClient()
osclientresol.connect((oscIPresol, oscPORTresol))
print("lj23layers sending OSC message : ", oscmsg, "to Resolume", oscIPresol, ":", oscPORTresol)
try:
osclientresol.sendto(oscmsg, (oscIPresol, oscPORTresol))
oscmsg.clearData()
except:
print ('Connection to Resolume refused : died ?')
pass
def SendIntensity(laser, intensity):
r.set('/intensity/' + str(laser), str(intensity))
r.set('/order/'+str(laser), 6)
SendLJ("/kpps/" + str(layer)+ " " + str(int(args[1])))
def Sendkpps(laser, kpps):
r.set('/kpps/' + str(laser), str(kpps))
r.set('/order/'+str(laser), 7)
def WebStatus(message):
SendLJ("/status", message)
# Closing plugin messages to LJ
def ClosePlugin():
WebStatus(name+" Exiting")
SendLJ("/"+name+"/start",0)
# RAW OSC Frame available ?
def OSCframe():
# clear timed_out flag
#print "oscframe"
oscserver.timed_out = False
# handle all pending requests then return
while not oscserver.timed_out:
oscserver.handle_request()
# Answer to LJ pings with /pong value
def OSCping(path, tags, args, source):
#def OSCping():
if gstt.debug >0:
print(name, "lj23layers got /ping from LJ -> reply /pong", name)
SendLJ("/pong",name)
# Properly close the system.
def OSCstop():
oscserver.close()
# change debug level (0-2)
def OSCdebug(path, tags, args, source):
print("new debug level", args[0] )
gstt.debug = int(args[0])
# /quit
def OSCquit(path, tags, args, source):
global oscrun
oscrun = False
print('lj23layers got /quit for',name)
#WebStatus(name + " quit.")
#SendLJ("/"+name+"/start",0)
#print("Stopping OSC...")
#OSCstop()
#sys.exit()
# default handler
def OSChandler(path, tags, args, source):
oscaddress = ''.join(path.split("/"))
print("lj23layers Default OSC Handler for",name,": msg from Client :" + str(source[0]),)
print("OSC address", path)
if len(args) > 0:
print("with args", args)
#oscIPout = str(source[0])
#osclient.connect((oscIPout, oscPORTout))
# for any laser object : /pluginame/obj objectname attribute value
# like : /pluginname/obj 'fft' 'xpos' 100
# attributes for all lj Objects: name, xy_list, c, layer | |
structure. The existnece of the data can be tested
###### with has_external_data and it is then retrieved with get_filename (again).
## def add_external_data(self, resource_name, file_name):
## """
## Method to add a link to an utterance file resource (e.g. file of MFCCs)
## by a path relative to the *.utt file.
## Take (absolute) file_name and convert it to be relative to self.utterance_location
## to make it less long-winded and more portable. Attach the relative name
## to utterance as the value of attribute resource_name.
## """
## fullpath = os.path.abspath(os.path.realpath(file_name))
## relative_path = os.path.relpath(fullpath, self.utterance_location)
## self.set(resource_name, relative_path)
def has_external_data(self, resource_name):
"""
If utt file is ``<PATH>/utt/name.utt`` and resource_name is ``lab``,
check if ``<PATH>/lab/name.lab`` exists as a file.
"""
return os.path.isfile(self.get_filename(resource_name))
def save(self, fname=None):
'''
Save utterance structure as XML to file.
:param fname: write here if specified, otherwise use ``utterance_location``
if it is set.
'''
if not fname:
if self.utterance_location:
fname = os.path.join(self.utterance_location, self.get("utterance_name") + ".utt")
else: ##
assert self.has_attribute("utterance_filename"), "No file name --- use kwarg"
fname = self.get("utterance_filename")
##print "Write utterance to %s ..."%(fname)
ElementTree(self.data).write(fname, encoding="utf-8", pretty_print=True)
def archive(self, fname=None, visualise=True):
"""
Store an archived version of an utterance that will not be overwritten,
and also a PDF of a visualisation.
"""
if not fname:
fname = self.get_utterance_filename()
## Make a new unique file by appending numbers to filename till we
## have one that does not yet exist:
i = 0
while os.path.isfile(fname + "." + str(i).zfill(6) + ".archive"):
i += 1
fname = fname + "." + str(i).zfill(6) + ".archive"
self.save(fname=fname)
self.visualise(fname + ".pdf")
def pretty_print(self):
'''
[Reroute to self.data]
'''
print tostring(ElementTree(self.data), pretty_print=True, encoding="utf-8")
def all_nodes(self):
"""Get nodes in *document order*"""
return [el for el in self.data.iterdescendants()] + [self.data]
## reroute some methods of UtteranceElement to work on an utterance's *data*...
## there must be a nicer way of doing this:
def has_attributes(self):
"""Return list of attribute names stored in utterance."""
return self.data.attrib
def has_attribute(self, attribute):
'''
[Reroute to self.data]
'''
return self.data.has_attribute(attribute)
def add_child(self, child_node):
'''
[Reroute to self.data]
'''
self.data.add_child(child_node)
def xpath(self, path):
'''
[Reroute to self.data]
'''
# return self.data.xpath(path)
try:
data = self.data.xpath(path)
except lxml.etree.XPathEvalError:
sys.exit('Problem evaluating this XPATH: ' + path)
return data
def iterdescendants(self):
'''
[Reroute to self.data]
'''
return self.data.iterdescendants()
def append(self, node):
'''
[Reroute to self.data]
'''
self.data.append(node)
def insert(self, index, node):
'''
[Reroute to self.data]
'''
self.data.insert(index, node)
def remove(self, *args):
'''
[Reroute to self.data]
'''
self.data.remove(*args)
def get(self, key):
'''
[Reroute to self.data]
Get attribute key's value at *root node* of utterance structure.
'''
return self.data.get(key)
def set(self, key, value):
'''
[Reroute to self.data]
Set attribute key's value at *root node* of utterance structure.
'''
self.data.set(key, value)
def visualise(self, image_file, force_rows=True, full_label=True, highlight_nodes=None, exclude_tags=[]):
"""
Use GraphViz to make an image of utterance structure (extension specifies image type).
:keyword force_rows: put nodes with same tag on same vertical level -- true by default
:keyword full_label: plot all node attributes -- true by default, otherwise, just safetext
"""
graphviz_data = ['graph "G"{ \n node [style=rounded]']
node_list = self.all_nodes() ## nodes in document order
node_list = [node for node in node_list if node.tag not in exclude_tags]
highlight_ids = []
if highlight_nodes:
highlight_ids = [id(node) for node in highlight_nodes]
print highlight_ids
## Node names
for node in node_list:
graphviz_data.append("%s ;" % (id(node))) ## ids will be node name for graphviz.
## "Declare" these first to ensure good L-R ordering
## Node labels
for node in node_list:
if not full_label:
if node.get("safetext") == None:
label = "%s" % (node.tag)
else:
label = "%s:\\n%s" % (node.tag, node.get("safetext"))
else:
## strip characters which will break dot ("):
bad_characters = ['"']
data = []
for (attribute, value) in node.items():
for character in bad_characters:
value = value.replace(character, '')
data.append((attribute, value))
label = ["%s: %s" % (attribute, value) for (attribute, value) in data]
label = "\\n".join(label)
label = node.tag + "\\n" + label
colour_string = ""
if id(node) in highlight_ids:
colour_string = ' color="yellow" '
graphviz_data.append('%s [label="%s" shape="box" %s ] ;' % (id(node), label, colour_string))
## Add arcs:
for node in node_list:
if node.getparent() != None:
parent = node.getparent()
graphviz_data.append('%s -- %s ;' % (id(parent), id(node)))
if force_rows:
tag_dict = {}
for node in node_list:
if node.tag not in tag_dict:
tag_dict[node.tag] = []
tag_dict[node.tag].append(node)
for (i, (tag, node_list)) in enumerate(tag_dict.items()):
ids = [str(id(node)) for node in node_list]
subgraph = "{ rank=same; " + " ; ".join(ids) + "}"
graphviz_data.append(subgraph)
graphviz_data.append("}")
dotfile = remove_extension(image_file) + ".dot"
writelist(graphviz_data, dotfile, uni=True)
image_type = image_file[-3:]
comm = "dot %s -T%s -o%s" % (dotfile, image_type, image_file)
os.system(comm)
def dump_attribute_value_data(self, regex_string):
"""
Find values for all attributes (matching the supplied regex) from any
node of utterance. Do not unique the values (instances not types).
Return e.g. {"attrib1": ["val1", "val2"], "attrib2": ["val1", "val2", "val3"]}
"""
dumped = {}
att_patt = re.compile(regex_string)
for node in self.all_nodes():
for attribute in node.keys():
if re.match(att_patt, attribute):
if attribute not in dumped:
dumped[attribute] = []
dumped[attribute].append(node.get(attribute))
return dumped
def dump_features(self, target_nodes, context_list, return_dict=False):
"""
For each utterance node matching ``target_nodes``, get values for the list of
contexts at that node.
"""
targets = self.xpath(target_nodes)
# print targets
if len(targets) == 0:
# sys.exit('Pattern %s matches no nodes of utterance %s'%(target_nodes, \
# get_basename(self.get_utterance_filename())))
print 'Warning: Pattern %s matches no nodes of utterance %s' % (
target_nodes, get_basename(self.get_utterance_filename()))
features = []
for node in targets:
if return_dict: ## keep keys and return as dict
values = dict(node.get_context_vector(context_list))
else: ## remove keys
values = [value for (name, value) in node.get_context_vector(context_list)]
features.append(values)
return features
def enrich_with_acoustic_statistics(self, target_nodes, features_dims):
'''
Get stats with get_acoustic_statistics for each node in given target_nodes xpath
and add them to the XML
'''
key_string = []
value_string = []
for node in self.xpath(target_nodes):
for (feature, dim) in features_dims:
interp = False
if 'f0' in feature:
interp = True
stats = self.get_acoustic_statistics(node, feature, dim=dim, \
interpolate_fzero=interp)
for (key, value) in stats.items():
key = '%s_%s_%s' % (feature, dim, key)
key_string.append(key)
value_string.append(str(value))
key_string = ','.join(key_string)
value_string = ','.join(value_string)
node.set('acoustic_stats_names', key_string)
node.set('acoustic_stats_values', value_string)
def get_acoustic_statistics(self, node, feature, dim=0, acoustic_filetype='cmp', interpolate_fzero=False):
raw_data = self.get_acoustic_features(node, feature, dim=dim, \
acoustic_filetype=acoustic_filetype, interpolate_fzero=interpolate_fzero)
stats = ac_stats.get_stats_over_subsections(raw_data)
return stats
def get_acoustic_features(self, node, feature, dim=0, acoustic_filetype='cmp', interpolate_fzero=False):
'''
Get some raw acoustics associated with the given node of the utterance.
'''
FRAMESHIFT = 5
## get_acoustic_features is likely to be called mutliple times for an utterance,
## so store the acoustics in self.acoustic_features on the first call:
if self.acoustic_features == None:
# print 'Loading acoustic features....'
self.load_acoustic_features(acoustic_filetype)
## work out which dims to gather -- now only allow a single dim to be selected at once
(from_dim, static_width) = self.acoustic_stream_info[feature]
if dim > (static_width - 1):
sys.exit('cannot select dim %s from stream %s' % (n_dims, feature))
selected_dim = from_dim + dim
## work out which frames to gather:
from_frame = int(node.get('start')) / FRAMESHIFT
to_frame = int(node.get('end')) / FRAMESHIFT
data = self.acoustic_features[from_frame:to_frame, selected_dim] # from_dim:to_dim]
if interpolate_fzero:
# m,n = numpy.shape(data)
# assert n==1, 'to interpolate f0, input must be only 1 dimension'
# data = data[:,0]
## trim start of voiced regions, linear interpolate with some smoothing:-
data = spline_smooth_fzero(data, trim_n_frames=3, s=100, k=1)
# data = numpy.reshape(data, (m,1))
return data
def add_acoustic_stream_info(self, stream_names, static_stream_sizes):
assert len(stream_names) == len(static_stream_sizes)
streams = ','.join(stream_names)
static_dims = ','.join([str(val) for val in static_stream_sizes])
self.set('acoustic_stream_names', streams)
self.set('acoustic_stream_dims', static_dims)
def get_acoustic_stream_info(self):
'''
Populate self.acoustic_stream_info and self.acoustic_dim from information
stored in XML
'''
DELTA = 3 ## assume static + delta + delta-delta
assert 'acoustic_stream_names' in self.attrib
assert 'acoustic_stream_dims' in self.attrib
streams = self.get('acoustic_stream_names').split(',')
static_dims = [int(val) for val in self.get('acoustic_stream_dims').split(',')]
assert len(streams) == len(static_dims)
self.acoustic_dim = sum(static_dims) * DELTA ## assume delta and delta-delta
self.acoustic_stream_info = {}
start = 0
for (stream, width) in zip(streams, static_dims):
self.acoustic_stream_info[stream] = (start, width)
start += width | |
!= proposed20xxVersion and origMainBuildNumber != proposedMainBuildNumber: # These are quite different versions
if not tkMessageBox.askyesno( 'Warning! 20XX File Version Mismatch', """The CSS file you're """ + 'importing, "' + os.path.basename(newExternalFilePath) + """", was not """
'designed for to be used with this version of 20XX and may not work. Alternatively, you can extract '
"textures from this file and import them manually if you'd like.\n\nAre you sure you want to continue with this import?" ): return False
# Import the file. The original fileSize value is intentionally preserved, for later comparison during the evaluation for saving.
Gui.isoFileTree.item( iid, values=('Ready to be replaced...', entity, isoOffset, fileSize, isoPath, 'path', newExternalFilePath), tags='changed' )
# If this is a character file and this is 20XX beyond version 3, generate new CSP trim colors for this costume (if the option is enabled)
filename = os.path.basename( iid ) # Checking iid because newExternalFilePath might be named something completely different than the standard naming convention
if generalBoolSettings['autoGenerateCSPTrimColors'].get() and candidateForTrimColorUpdate( filename, orig20xxVersion, origMainBuildNumber ):
generateTrimColors( fileIid=iid, autonomousMode=True )
return True
def candidateForTrimColorUpdate( filename, orig20xxVersion, origMainBuildNumber ):
# Check if this is an appropriate version of 20XX HP
if not orig20xxVersion or not ( origMainBuildNumber > 3 or 'BETA' in orig20xxVersion ):
return False
# Check that it's a character file (pl = Player)
elif filename[:2] != 'pl':
return False
# Check that this is a Left-alt or Right-alt file (latter condition is for Falcon's red alts)
elif filename[-4:] not in ( '.lat', '.rat' ) and filename[-6:] not in ( 'rl.usd', 'rr.usd' ):
return False
# Exclude Master Hand and Crazy Hand
elif filename[2:4] in ( 'mh', 'ch' ):
return False
return True
def importSingleIsoFile(): # i.e. replace an existing file in the disc
if not discDetected(): return
iidSelectionsTuple = Gui.isoFileTree.selection() # Will be an empty string if nothing is selected, or a tuple of iids
if not iidSelectionsTuple: msg( "Please select a file to replace." ) #\n\nIf you'd like to replace multiple files, "
#"use the 'Import Multiple Files' option in the Disc Operations menu." )
elif len( iidSelectionsTuple ) == 1:
iidValues = Gui.isoFileTree.item( iidSelectionsTuple[0], 'values' )
_, entity, _, _, isoPath, _, _ = iidValues # description, entity, isoOffset, fileSize, isoPath, source, data
if entity == 'file':
ext = os.path.splitext( iidSelectionsTuple[0] )[1]
# Set the default filetypes to choose from in the dialog box (the filetype dropdown)
fileTypeOptions = [ ('Texture data files', '*.dat *.usd *.lat *.rat'), ('Audio files', '*.hps *.ssm'),
('System files', '*.bin *.ldr *.dol *.toc'), ('Video files', '*.mth *.thp'), ('All files', '*.*') ]
for typeTuple in fileTypeOptions:
extensions = typeTuple[1].split()
if '*' + ext in extensions or ( typeTuple[0] == 'Texture data files' and ext[-2:] == 'at' ):
orderedFileTypes = [ typeTuple ]
break
else: orderedFileTypes = [ ('Same type', '*'+ext) ]
# Populate the rest of the possible types to choose from in the dialog box (the filetype dropdown)
for typeTuple in fileTypeOptions:
if typeTuple not in orderedFileTypes: orderedFileTypes.append( typeTuple )
# Prompt the user to choose a file to import
filePath = tkFileDialog.askopenfilename(
title="Choose a file to import.",
initialdir=settings.get( 'General Settings', 'defaultSearchDirectory' ),
filetypes=orderedFileTypes ) # Should include the appropriate default file types first
if filePath:
# Update the default directory to start in when opening or exporting files.
settings.set( 'General Settings', 'defaultSearchDirectory', os.path.dirname(filePath) )
with open( settingsFile, 'w' ) as theSettingsFile: settings.write( theSettingsFile )
# Check if this is a version of 20XX, and if so, get its main build number
orig20xxVersion = globalDiscDetails['is20XX']
if orig20xxVersion:
if 'BETA' in orig20xxVersion: origMainBuildNumber = int( orig20xxVersion[-1] )
else: origMainBuildNumber = int( orig20xxVersion[0] )
else: origMainBuildNumber = 0
# Check that this is an appropriate replacement file, and if so, replace it
fileReplaced = replaceFileInDisc( iidSelectionsTuple[0], filePath, iidValues, orig20xxVersion, origMainBuildNumber )
if fileReplaced:
global unsavedDiscChanges
unsavedDiscChanges.append( '"' + isoPath.split('/')[-1] + '" to be replaced with "' + os.path.basename( filePath ) + '".' )
updateProgramStatus( 'File Replaced. Awaiting Save' )
else: msg( "Please choose a file to replace for this operation. If you'd like to add new files to this folder, choose 'Add File(s) to Disc'." )
else: msg( "When selecting files on the Disc File Tree to replace, please only select one file. If you'd like to replace multiple files, "
"use the 'Import Multiple Files' option in the Disc Operations menu." )
def importMultipleIsoFiles(): # i.e. replace multiple existing files in the disc
if not discDetected(): return
filepaths = tkFileDialog.askopenfilename(
title="Choose files to import.",
initialdir=settings.get( 'General Settings', 'defaultSearchDirectory' ),
multiple=True,
filetypes=[ ('Texture data files', '*.dat *.usd *.lat *.rat'), ('Audio files', '*.hps *.ssm'),
('System files', '*.bin *.ldr *.dol *.toc'), ('Video files', '*.mth *.thp'), ('All files', '*.*') ]
)
if filepaths != '':
# Update the default directory to start in when opening or exporting files.
settings.set( 'General Settings', 'defaultSearchDirectory', os.path.dirname(filepaths[-1]) )
with open( settingsFile, 'w') as theSettingsFile: settings.write( theSettingsFile )
gameId = globalDiscDetails['gameId'].lower()
filesNotInIso = []
filesReadyForReplacement = 0
cspColorGenerationTempDisabled = False
# Check if this is a version of 20XX, and if so, get its main build number
orig20xxVersion = globalDiscDetails['is20XX']
if orig20xxVersion:
if 'BETA' in orig20xxVersion: origMainBuildNumber = int( orig20xxVersion[-1] )
else: origMainBuildNumber = int( orig20xxVersion[0] )
else: origMainBuildNumber = 0
# Offer to temporarily disable CSP Trim color generation if importing many files
if generalBoolSettings['autoGenerateCSPTrimColors'].get():
# Check if there are many character files being imported that would need CSP Trim color updates
totalTrimColorGenerations = 0
for filepath in filepaths:
filename = os.path.basename( filepath ).lower()
if candidateForTrimColorUpdate( filename, orig20xxVersion, origMainBuildNumber ):
totalTrimColorGenerations += 1
if totalTrimColorGenerations > 15: break # We've seen enough
if totalTrimColorGenerations > 15:
cspColorGenerationTempDisabled = tkMessageBox.askyesno( 'Skip CSP Trim Color Generation?',
"When importing many alternate character costume files, CSP Trim Color Generation for them all can take a little while. Would you like to temporarily disable "
"""the option "Auto-Generate CSP Trim Colors" for this operation?\n\nTip: The CSP Trim color data is stored in the MnSlChr (CSS) file, from 0x3A3C90 to """
"0x3A45E0. So if you'd like to move all of it from one game/file to another, simply open the file in a hex editor and copy that region to your new CSS file "
"(be sure you are overwriting, rather than inserting). Alternatively, you can use the names in the data table to help you do this for only specific characters." )
if cspColorGenerationTempDisabled: generalBoolSettings['autoGenerateCSPTrimColors'].set( False )
# Add the files to the file tree, check for pre-existing files of the same name, and prep the files to import
for filepath in filepaths: # Folder paths will be excluded by askopenfilename
fileName = os.path.basename( filepath ).replace( ' ', '_' ).replace( '-', '/' )
iid = gameId + '/' + fileName.lower()
if not Gui.isoFileTree.exists( iid ): filesNotInIso.append( fileName )
else:
# Update this file's treeview values
if replaceFileInDisc( iid, filepath, Gui.isoFileTree.item(iid, 'values'), orig20xxVersion, origMainBuildNumber ):
filesReadyForReplacement += 1
if filesReadyForReplacement > 0:
global unsavedDiscChanges
unsavedDiscChanges.append( str( filesReadyForReplacement ) + ' files ready to be replaced.' )
updateProgramStatus( 'Files Replaced. Awaiting Save' )
# Restore the CSP Color Generation option if it was temporarily disabled
if cspColorGenerationTempDisabled:
generalBoolSettings['autoGenerateCSPTrimColors'].set( True )
if filesNotInIso != []: cmsg( 'These files will be skipped, because they could not be found in the disc:\n\n' + '\n'.join(filesNotInIso) )
def determineNewEntryPlacement():
# Determine the location (parent, index, and a disc path) for the new file in the treeview
targetIid = Gui.isoFileTree.selection()
if targetIid:
targetIid = targetIid[-1] # Simply selects the lowest position item selected
_, entity, isoOffset, _, isoPath, _, _ = Gui.isoFileTree.item( targetIid, 'values' ) # description, entity, isoOffset, fileSize, isoPath, source, data
# Remove the last portion of the disc path if it's a file or Convenience Folder
if entity == 'file' or isoOffset == 'notNative': # The latter case means it's not originally part of the disc's file structure
isoPath = '/'.join( isoPath.split('/')[:-1] )
parent = Gui.isoFileTree.parent( targetIid )
index = Gui.isoFileTree.index( targetIid )
else:
parent = globalDiscDetails['gameId'].lower()
index = 'end'
isoPath = globalDiscDetails['gameId']
return parent, index, isoPath
def addFilesToIso(): # Adds files which did not previously exist in the disc to its filesystem
if not discDetected(): return
# Prompt for one or more files to add.
filepaths = tkFileDialog.askopenfilename(
title='Choose one or more files (of any format) to add to the disc image.',
initialdir=settings.get( 'General Settings', 'defaultSearchDirectory' ),
multiple=True,
filetypes=[ ('All files', '*.*'), ('Texture data files', '*.dat *.usd *.lat *.rat'), ('Audio files', '*.hps *.ssm'),
('System files', '*.bin *.ldr *.dol *.toc'), ('Video files', '*.mth *.thp') ]
)
if filepaths:
origParent, index, origIsoPath = determineNewEntryPlacement()
if origParent == globalDiscDetails['gameId'].lower() + '/sys':
msg( 'Directories or files cannot be added to the system files folder.' )
return
firstItemAdded = ''
preexistingFiles = []
filenamesTooLong = []
# Add the files to the file tree, check for pre-existing files of | |
# SPDX-FileCopyrightText: 2020 <NAME> for Adafruit Industries
#
# SPDX-License-Identifier: MIT
"""
`adafruit_clue`
================================================================================
A high level library representing all the features of the Adafruit CLUE.
* Author(s): <NAME>
Implementation Notes
--------------------
**Hardware:**
.. "* `Adafruit CLUE - nRF52840 Express with Bluetooth LE <https://www.adafruit.com/product/4500>`_"
**Software and Dependencies:**
* Adafruit CircuitPython firmware for the supported boards:
https://github.com/adafruit/circuitpython/releases
* Adafruit's Bus Device library: https://github.com/adafruit/Adafruit_CircuitPython_BusDevice
* Adafruit's Register library: https://github.com/adafruit/Adafruit_CircuitPython_Register
* Adafruit's LSM6DS CircuitPython Library:
https://github.com/adafruit/Adafruit_CircuitPython_LSM6DS
* Adafruit's LIS3MDL CircuitPython Library:
https://github.com/adafruit/Adafruit_CircuitPython_LIS3MDL
* Adafruit's APDS9960 CircuitPython Library:
https://github.com/adafruit/Adafruit_CircuitPython_APDS9960
* Adafruit's BMP280 CircuitPython Library:
https://github.com/adafruit/Adafruit_CircuitPython_BMP280
* Adafruit's SHT31D CircuitPython Library:
https://github.com/adafruit/Adafruit_CircuitPython_SHT31D
* Adafruit's NeoPixel CircuitPython Library:
https://github.com/adafruit/Adafruit_CircuitPython_NeoPixel
"""
import time
import array
import math
import board
import digitalio
import neopixel
import adafruit_apds9960.apds9960
import adafruit_bmp280
import adafruit_lis3mdl
import adafruit_lsm6ds.lsm6ds33
import adafruit_sht31d
import audiobusio
import audiopwmio
import audiocore
import gamepad
import touchio
__version__ = "2.2.7"
__repo__ = "https://github.com/adafruit/Adafruit_CircuitPython_CLUE.git"
class _ClueSimpleTextDisplay:
"""Easily display lines of text on CLUE display."""
def __init__( # pylint: disable=too-many-arguments
self,
title=None,
title_color=0xFFFFFF,
title_scale=1,
text_scale=1,
font=None,
colors=None,
):
# pylint: disable=import-outside-toplevel
import displayio
import terminalio
from adafruit_display_text import label
# pylint: enable=import-outside-toplevel
if not colors:
colors = (
Clue.VIOLET,
Clue.GREEN,
Clue.RED,
Clue.CYAN,
Clue.ORANGE,
Clue.BLUE,
Clue.MAGENTA,
Clue.SKY,
Clue.YELLOW,
Clue.PURPLE,
)
self._colors = colors
self._label = label
self._display = board.DISPLAY
self._font = terminalio.FONT
if font:
self._font = font
self.text_group = displayio.Group(max_size=20, scale=text_scale)
if title:
# Fail gracefully if title is longer than 60 characters.
if len(title) > 60:
raise ValueError("Title must be 60 characters or less.")
title = label.Label(
self._font,
text=title,
max_glyphs=60,
color=title_color,
scale=title_scale,
)
title.x = 0
title.y = 8
self._y = title.y + 18
self.text_group.append(title)
else:
self._y = 3
self._lines = []
for num in range(1):
self._lines.append(self.add_text_line(color=colors[num % len(colors)]))
def __getitem__(self, item):
"""Fetch the Nth text line Group"""
if len(self._lines) - 1 < item:
for _ in range(item - (len(self._lines) - 1)):
self._lines.append(
self.add_text_line(color=self._colors[item % len(self._colors)])
)
return self._lines[item]
def add_text_line(self, color=0xFFFFFF):
"""Adds a line on the display of the specified color and returns the label object."""
text_label = self._label.Label(self._font, text="", max_glyphs=45, color=color)
text_label.x = 0
text_label.y = self._y
self._y = text_label.y + 13
self.text_group.append(text_label)
return text_label
def show(self):
"""Call show() to display the data list."""
self._display.show(self.text_group)
def show_terminal(self):
"""Revert to terminalio screen."""
self._display.show(None)
class Clue: # pylint: disable=too-many-instance-attributes, too-many-public-methods
"""Represents a single CLUE."""
# Color variables available for import.
RED = (255, 0, 0)
YELLOW = (255, 255, 0)
ORANGE = (255, 150, 0)
GREEN = (0, 255, 0)
TEAL = (0, 255, 120)
CYAN = (0, 255, 255)
BLUE = (0, 0, 255)
PURPLE = (180, 0, 255)
MAGENTA = (255, 0, 150)
WHITE = (255, 255, 255)
BLACK = (0, 0, 0)
GOLD = (255, 222, 30)
PINK = (242, 90, 255)
AQUA = (50, 255, 255)
JADE = (0, 255, 40)
AMBER = (255, 100, 0)
VIOLET = (255, 0, 255)
SKY = (0, 180, 255)
RAINBOW = (RED, ORANGE, YELLOW, GREEN, BLUE, PURPLE)
def __init__(self):
# Define I2C:
self._i2c = board.I2C()
# Define touch:
# Initially, self._touches stores the pin used for a particular touch. When that touch is
# used for the first time, the pin is replaced with the corresponding TouchIn object.
# This saves a little RAM over using a separate read-only pin tuple.
# For example, after `clue.touch_2`, self._touches is equivalent to:
# [board.D0, board.D1, touchio.TouchIn(board.D2)]
self._touches = [board.D0, board.D1, board.D2]
self._touch_threshold_adjustment = 0
# Define buttons:
self._a = digitalio.DigitalInOut(board.BUTTON_A)
self._a.switch_to_input(pull=digitalio.Pull.UP)
self._b = digitalio.DigitalInOut(board.BUTTON_B)
self._b.switch_to_input(pull=digitalio.Pull.UP)
self._gamepad = gamepad.GamePad(self._a, self._b)
# Define LEDs:
self._white_leds = digitalio.DigitalInOut(board.WHITE_LEDS)
self._white_leds.switch_to_output()
self._pixel = neopixel.NeoPixel(board.NEOPIXEL, 1)
self._red_led = digitalio.DigitalInOut(board.L)
self._red_led.switch_to_output()
# Define audio:
self._mic = audiobusio.PDMIn(
board.MICROPHONE_CLOCK,
board.MICROPHONE_DATA,
sample_rate=16000,
bit_depth=16,
)
self._sample = None
self._samples = None
self._sine_wave = None
self._sine_wave_sample = None
# Define sensors:
# Accelerometer/gyroscope:
self._accelerometer = adafruit_lsm6ds.lsm6ds33.LSM6DS33(self._i2c)
# Magnetometer:
self._magnetometer = adafruit_lis3mdl.LIS3MDL(self._i2c)
# DGesture/proximity/color/light sensor:
self._sensor = adafruit_apds9960.apds9960.APDS9960(self._i2c)
# Humidity sensor:
self._humidity = adafruit_sht31d.SHT31D(self._i2c)
# Barometric pressure sensor:
self._pressure = adafruit_bmp280.Adafruit_BMP280_I2C(self._i2c)
# Create displayio object for passing.
self.display = board.DISPLAY
def _touch(self, i):
if not isinstance(self._touches[i], touchio.TouchIn):
# First time referenced. Get the pin from the slot for this touch
# and replace it with a TouchIn object for the pin.
self._touches[i] = touchio.TouchIn(self._touches[i])
self._touches[i].threshold += self._touch_threshold_adjustment
return self._touches[i].value
@property
def touch_0(self):
"""Detect touch on capacitive touch pad 0.
.. image :: ../docs/_static/pad_0.jpg
:alt: Pad 0
This example prints when pad 0 is touched.
To use with the CLUE:
.. code-block:: python
from adafruit_clue import clue
while True:
if clue.touch_0:
print("Touched pad 0")
"""
return self._touch(0)
@property
def touch_1(self):
"""Detect touch on capacitive touch pad 1.
.. image :: ../docs/_static/pad_1.jpg
:alt: Pad 1
This example prints when pad 1 is touched.
To use with the CLUE:
.. code-block:: python
from adafruit_clue import clue
while True:
if clue.touch_1:
print("Touched pad 1")
"""
return self._touch(1)
@property
def touch_2(self):
"""Detect touch on capacitive touch pad 2.
.. image :: ../docs/_static/pad_2.jpg
:alt: Pad 2
This example prints when pad 2 is touched.
To use with the CLUE:
.. code-block:: python
from adafruit_clue import clue
while True:
if clue.touch_2:
print("Touched pad 2")
"""
return self._touch(2)
@property
def button_a(self):
"""``True`` when Button A is pressed. ``False`` if not.
.. image :: ../docs/_static/button_a.jpg
:alt: Button A
This example prints when button A is pressed.
To use with the CLUE:
.. code-block:: python
from adafruit_clue import clue
while True:
if clue.button_a:
print("Button A pressed")
"""
return not self._a.value
@property
def button_b(self):
"""``True`` when Button B is pressed. ``False`` if not.
.. image :: ../docs/_static/button_b.jpg
:alt: Button B
This example prints when button B is pressed.
To use with the CLUE:
.. code-block:: python
from adafruit_clue import clue
while True:
if clue.button_b:
print("Button B pressed")
"""
return not self._b.value
@property
def were_pressed(self):
"""Returns a set of the buttons that have been pressed.
.. image :: ../docs/_static/button_b.jpg
:alt: Button B
To use with the CLUE:
.. code-block:: python
from adafruit_clue import clue
while True:
print(clue.were_pressed)
"""
ret = set()
pressed = self._gamepad.get_pressed()
for button, mask in (("A", 0x01), ("B", 0x02)):
if mask & pressed:
ret.add(button)
return ret
def shake(self, shake_threshold=30, avg_count=10, total_delay=0.1):
"""
Detect when the accelerometer is shaken. Optional parameters:
:param shake_threshold: Increase or decrease to change shake sensitivity. This
requires a minimum value of 10. 10 is the total
acceleration if the board is not moving, therefore
anything less than 10 will erroneously report a constant
shake detected. (Default 30)
:param avg_count: The number of readings taken and used for the average
acceleration. (Default 10)
:param total_delay: The total time in seconds it takes to obtain avg_count
readings from acceleration. (Default 0.1)
"""
shake_accel = (0, 0, 0)
for _ in range(avg_count):
# shake_accel creates a list of tuples from acceleration data.
# zip takes multiple tuples and zips them together, as in:
# In : zip([-0.2, 0.0, 9.5], [37.9, 13.5, -72.8])
# Out: [(-0.2, 37.9), (0.0, 13.5), (9.5, -72.8)]
# map applies sum to each member of this tuple, resulting in a
# 3-member list. tuple converts this list into a tuple which is
# used as shake_accel.
shake_accel = tuple(map(sum, zip(shake_accel, self.acceleration)))
time.sleep(total_delay / avg_count)
avg = tuple(value / avg_count for value in shake_accel)
total_accel = math.sqrt(sum(map(lambda x: x * x, avg)))
return total_accel > shake_threshold
@property
def acceleration(self):
"""Obtain acceleration data from the x, y and z axes.
.. image :: ../docs/_static/accelerometer.jpg
:alt: Accelerometer
This example prints the values. Try moving the board to see how the printed values change.
To use with the CLUE:
.. code-block:: python
from adafruit_clue import clue
while True:
print("Accel: {:.2f} {:.2f} {:.2f}".format(*clue.acceleration))
"""
return self._accelerometer.acceleration
@property
def gyro(self):
"""Obtain x, y, z angular velocity values in degrees/second.
.. image :: ../docs/_static/accelerometer.jpg
:alt: Gyro
This example prints the values. Try moving the board to see how the printed values change.
To use with the CLUE:
.. code-block:: python
from adafruit_clue import clue
while True:
print("Gyro: {:.2f} {:.2f} {:.2f}".format(*clue.gyro))
"""
return self._accelerometer.gyro
@property
def magnetic(self):
"""Obtain x, y, z magnetic values in microteslas.
.. image :: ../docs/_static/magnetometer.jpg
:alt: Magnetometer
This example prints the values. Try moving the board to see how | |
<gh_stars>0
import discord
from discord.ext import commands
import datetime
import json
import asyncpg
import typing
import asyncio
import aiohttp
import humanfriendly
import functools
from random import randint
from fire.converters import TextChannel
from fire.invite import findinvite
from fire.youtube import findchannel, findvideo
print("settings.py has been loaded")
with open('config_prod.json', 'r') as cfg:
config = json.load(cfg)
def isadmin(ctx):
"""Checks if the author is an admin"""
if str(ctx.author.id) not in config['admins']:
admin = False
else:
admin = True
return admin
def byteify(input):
if isinstance(input, dict):
return {byteify(key): byteify(value)
for key, value in input.iteritems()}
elif isinstance(input, list):
return [byteify(element) for element in input]
elif isinstance(input, unicode):
return input.encode('utf-8')
else:
return input
# byteify example
# byteify(json.loads(u"[ 'A','B','C' , ' D']".replace('\'','"')))
# may have a use in the future ¯\_(ツ)_/¯
watchedcmds = ['purge']
region = {
'amsterdam': '🇳🇱 Amsterdam',
'brazil': '🇧🇷 Brazil',
'eu-central': '🇪🇺 Central Europe',
'eu-west': '🇪🇺 Western Europe',
'frakfurt': '🇩🇪 Frankfurt',
'hongkong': '🇭🇰 Hong Kong',
'india': '🇮🇳 India',
'japan': '🇯🇵 Japan',
'england': '🇬🇧 England',
'russia': '🇷🇺 Russia',
'singapore': '🇸🇬 Singapore',
'southafrica': '🇿🇦 South Africa',
'sydney': '🇦🇺 Sydney',
'us-central': '🇺🇸 Central US',
'us-south': '🇺🇸 US South',
'us-east': '🇺🇸 US East',
'us-west': '🇺🇸 US West',
'vip-us-east': '🇺🇸 US East (VIP)',
'vip-us-west': '🇺🇸 US West (VIP)',
'vip-amsterdam': '🇳🇱 Amsterdam (VIP)'
}
class settings(commands.Cog, name="Settings"):
def __init__(self, bot):
self.bot = bot
self.logchannels = {}
self.invitefiltered = []
self.gbancheck = []
self.autodecancer = []
self.autodehoist = []
async def loadSettings(self):
self.logchannels = {}
self.invitefiltered = []
self.gbancheck = []
self.autodecancer = []
self.autodehoist = []
query = 'SELECT * FROM settings;'
settings = await self.bot.db.fetch(query)
for s in settings:
guild = s['gid']
if s['inviteblock'] == 1:
self.invitefiltered.append(guild)
if s['globalbans'] == 1:
self.gbancheck.append(guild)
if s['autodecancer'] == 1:
self.autodecancer.append(guild)
if s['autodehoist'] == 1:
self.autodehoist.append(guild)
if s['modlogs'] == 0:
modlogs = False
else:
modlogs = s['modlogs']
if s['actionlogs'] == 0:
actionlogs = False
else:
actionlogs = s['actionlogs']
guildobj = self.bot.get_guild(guild)
if not guildobj:
pass
if modlogs:
cmodlogs = discord.utils.get(guildobj.channels, id=modlogs)
if type(cmodlogs) != discord.TextChannel:
modlogs = False
if actionlogs:
cactionlogs = discord.utils.get(guildobj.channels, id=actionlogs)
if type(cactionlogs) != discord.TextChannel:
actionlogs = False
self.logchannels[guild] = {
"modlogs": modlogs,
"actionlogs": actionlogs
}
@commands.Cog.listener()
async def on_ready(self):
await asyncio.sleep(5)
await self.loadSettings()
print('Settings loaded (on_ready)!')
@commands.command(name='loadsettings', description='Load settings', hidden=True)
async def loadthesettings(self, ctx):
'''PFXloadsettings'''
if await self.bot.is_team_owner(ctx.author):
await self.loadSettings()
await ctx.send('Loaded data!')
else:
await ctx.send('no.')
@commands.Cog.listener()
async def on_message_delete(self, message):
if message.guild and not message.author.bot:
if message.channel.id == 600068336331522079:
return
logid = self.logchannels[message.guild.id] if message.guild.id in self.logchannels else None
if logid:
logch = message.guild.get_channel(logid['actionlogs'])
else:
return
if logch:
if message.system_content == None or message.system_content == '':
message.content = 'I was unable to get the message that was deleted. Maybe it was a system message?'
embed = discord.Embed(color=message.author.color, timestamp=message.created_at, description=f'{message.author.mention}\'**s message in** {message.channel.mention} **was deleted**\n{message.system_content}')
embed.set_author(name=message.author, icon_url=str(message.author.avatar_url))
if message.attachments:
embed.add_field(name = 'Attachment(s)', value = '\n'.join([attachment.filename for attachment in message.attachments]) + '\n\n__Attachment URLs are invalidated once the message is deleted.__')
embed.set_footer(text=f"Author ID: {message.author.id} | Message ID: {message.id} | Channel ID: {message.channel.id}")
try:
await logch.send(embed=embed)
except Exception:
pass
@commands.Cog.listener()
async def on_message_edit(self, before, after):
if after.channel.type == discord.ChannelType.news and after.author.permissions_in(after.channel).manage_messages:
raw = await self.bot.http.get_message(after.channel.id, after.id)
if raw['flags'] == 2:
logid = self.logchannels[after.guild.id] if after.guild.id in self.logchannels else None
if logid:
logch = after.guild.get_channel(logid['actionlogs'])
else:
return
if logch:
embed = discord.Embed(color=discord.Color.green(), timestamp=after.created_at, description=f'**A message was published in** {after.channel.mention}')
embed.set_author(name=after.guild.name, icon_url=str(after.guild.icon_url))
embed.add_field(name='Message Author', value=after.author.mention, inline=False)
embed.add_field(name='Message', value=f'[Click Here]({after.jump_url})', inline=False)
embed.set_footer(text=f"Author ID: {after.author.id} | Message ID: {after.id} | Channel ID: {after.channel.id}")
try:
return await logch.send(embed=embed)
except Exception:
pass
if before.content == after.content:
return
message = after
code = findinvite(message.system_content)
invite = None
if code:
if '/' in code:
return
invalidinvite = False
if isinstance(message.author, discord.Member):
if not message.author.permissions_in(message.channel).manage_messages:
if message.guild.me.permissions_in(message.channel).manage_messages:
if message.guild.id in self.invitefiltered:
try:
invite = await self.bot.fetch_invite(url=code)
if invite.guild.id == message.guild.id:
pass
else:
await message.delete()
except Exception:
pass
try:
ohmygod = False
if code.lower() in self.bot.vanity_urls and 'oh-my-god.wtf' in message.system_content:
invite = self.bot.vanity_urls[code]
ohmygod = True
if isinstance(message.author, discord.Member):
if not message.author.permissions_in(message.channel).manage_messages:
if message.guild.me.permissions_in(message.channel).manage_messages:
if message.guild.id in self.invitefiltered:
if invite['gid'] != message.guild.id:
try:
await message.delete()
except Exception:
pass
else:
if not invite or type(invite) != discord.Invite:
invite = await self.bot.fetch_invite(url=code)
except discord.NotFound or discord.HTTPException as e:
invalidinvite = True
if message.guild:
if message.author.bot:
return
logid = self.logchannels[message.guild.id] if message.guild.id in self.logchannels else None
if logid:
logch = message.guild.get_channel(logid['actionlogs'])
else:
return
if logch:
embed = discord.Embed(color=message.author.color, timestamp=message.created_at, description=f'**Invite link sent in** {message.channel.mention}')
embed.set_author(name=message.author, icon_url=str(message.author.avatar_url))
if invalidinvite:
if '.png' in code:
return
embed.add_field(name='Invite Code', value=code, inline=False)
embed.add_field(name='Valid?', value='false', inline=False)
elif ohmygod:
invite = await self.bot.fetch_invite(url=invite['invite'])
embed.add_field(name='Invite Code', value=code, inline=False)
embed.add_field(name='Vanity URL', value=f'[oh-my-god.wtf/{code}](https://oh-my-god.wtf/{code})', inline=False)
embed.add_field(name='Guild', value=f'{invite.guild.name}({invite.guild.id})', inline=False)
embed.add_field(name='Channel', value=f'#{invite.channel.name}({invite.channel.id})', inline=False)
embed.add_field(name='Members', value=f'{invite.approximate_member_count} ({invite.approximate_presence_count} active)', inline=False)
elif invite and not ohmygod:
embed.add_field(name='Invite Code', value=code, inline=False)
embed.add_field(name='Guild', value=f'{invite.guild.name}({invite.guild.id})', inline=False)
embed.add_field(name='Channel', value=f'#{invite.channel.name}({invite.channel.id})', inline=False)
embed.add_field(name='Members', value=f'{invite.approximate_member_count} ({invite.approximate_presence_count} active)', inline=False)
embed.set_footer(text=f"Author ID: {message.author.id}")
try:
return await logch.send(embed=embed)
except Exception:
pass
if before.system_content == after.system_content:
return
if after.guild and not after.author.bot:
logid = self.logchannels[after.guild.id] if after.guild.id in self.logchannels else None
if logid:
logch = after.guild.get_channel(logid['actionlogs'])
else:
return
if logch:
embed = discord.Embed(color=after.author.color, timestamp=after.created_at, description=f'{after.author.mention} **edited a message in** {after.channel.mention}')
embed.set_author(name=after.author, icon_url=str(after.author.avatar_url))
bcontent = before.system_content [:300] + (before.system_content [300:] and '...')
acontent = after.system_content [:300] + (after.system_content [300:] and '...')
embed.add_field(name='Before', value=bcontent, inline=False)
embed.add_field(name='After', value=acontent, inline=False)
embed.set_footer(text=f"Author ID: {after.author.id} | Message ID: {after.id} | Channel ID: {after.channel.id}")
try:
await logch.send(embed=embed)
except Exception:
pass
@commands.Cog.listener()
async def on_guild_channel_create(self, channel):
if channel.guild:
logid = self.logchannels[channel.guild.id] if channel.guild.id in self.logchannels else None
if logid:
logch = channel.guild.get_channel(logid['actionlogs'])
else:
return
if logch:
embed = discord.Embed(color=discord.Color.green(), timestamp=channel.created_at, description=f'**New channel created: #{channel.name}**')
embed.set_author(name=channel.guild.name, icon_url=str(channel.guild.icon_url))
embed.set_footer(text=f"Channel ID: {channel.id} | Guild ID: {channel.guild.id}")
try:
await logch.send(embed=embed)
except Exception:
pass
@commands.Cog.listener()
async def on_guild_channel_delete(self, channel):
if channel.guild:
logid = self.logchannels[channel.guild.id] if channel.guild.id in self.logchannels else None
if logid:
logch = channel.guild.get_channel(logid['actionlogs'])
else:
return
if logch:
embed = discord.Embed(color=discord.Color.red(), timestamp=channel.created_at, description=f'**Channel deleted: #{channel.name}**')
embed.set_author(name=channel.guild.name, icon_url=str(channel.guild.icon_url))
embed.set_footer(text=f"Channel ID: {channel.id} | Guild ID: {channel.guild.id}")
try:
await logch.send(embed=embed)
except Exception:
pass
@commands.Cog.listener()
async def on_message(self, message):
code = findinvite(message.system_content)
invite = None
if code:
if '/' in code:
return
invalidinvite = False
if isinstance(message.author, discord.Member):
if not message.author.permissions_in(message.channel).manage_messages:
if message.guild.me.permissions_in(message.channel).manage_messages:
if message.guild.id in self.invitefiltered:
try:
invite = await self.bot.fetch_invite(url=code)
if invite.guild.id == message.guild.id:
pass
else:
await message.delete()
except Exception:
pass
try:
ohmygod = False
if code.lower() in self.bot.vanity_urls:
invite = self.bot.vanity_urls[code]
ohmygod = True
if isinstance(message.author, discord.Member):
if not message.author.permissions_in(message.channel).manage_messages:
if message.guild.me.permissions_in(message.channel).manage_messages:
if message.guild.id in self.invitefiltered:
if invite['gid'] != message.guild.id:
await message.delete()
else:
if not invite or type(invite) != discord.Invite:
invite = await self.bot.fetch_invite(url=code)
except discord.NotFound or discord.HTTPException as e:
invalidinvite = True
if message.guild:
if message.author.bot:
return
logid = self.logchannels[message.guild.id] if message.guild.id in self.logchannels else None
if logid:
logch = message.guild.get_channel(logid['actionlogs'])
else:
return
if logch:
embed = discord.Embed(color=message.author.color, timestamp=message.created_at, description=f'**Invite link sent in** {message.channel.mention}')
embed.set_author(name=message.author, icon_url=str(message.author.avatar_url))
if invalidinvite:
if '.png' in code:
return
embed.add_field(name='Invite Code', value=code, inline=False)
embed.add_field(name='Valid?', value='false', inline=False)
elif ohmygod:
invite = await self.bot.fetch_invite(url=invite['invite'])
embed.add_field(name='Invite Code', value=code, inline=False)
embed.add_field(name='Vanity URL', value=f'[oh-my-god.wtf/{code}](https://oh-my-god.wtf/{code})', inline=False)
embed.add_field(name='Guild', value=f'{invite.guild.name}({invite.guild.id})', inline=False)
embed.add_field(name='Channel', value=f'#{invite.channel.name}({invite.channel.id})', inline=False)
embed.add_field(name='Members', value=f'{invite.approximate_member_count} ({invite.approximate_presence_count} active)', inline=False)
elif invite and not ohmygod:
embed.add_field(name='Invite Code', value=code, inline=False)
embed.add_field(name='Guild', value=f'{invite.guild.name}({invite.guild.id})', inline=False)
embed.add_field(name='Channel', value=f'#{invite.channel.name}({invite.channel.id})', inline=False)
embed.add_field(name='Members', value=f'{invite.approximate_member_count} ({invite.approximate_presence_count} active)', inline=False)
embed.set_footer(text=f"Author ID: {message.author.id}")
try:
await logch.send(embed=embed)
except Exception:
pass
@commands.Cog.listener()
async def on_command_completion(self, ctx):
await self.bot.loop.run_in_executor(None, func=functools.partial(self.bot.datadog.increment, 'commands.used'))
if ctx.command.name in watchedcmds:
if ctx.guild:
logid = self.logchannels[ctx.guild.id] if ctx.guild.id in self.logchannels else None
if logid:
logch = ctx.guild.get_channel(logid['actionlogs'])
else:
return
if logch:
embed = discord.Embed(color=ctx.author.color, timestamp=datetime.datetime.utcnow(), description=f'`{ctx.command.name}` **was used in** {ctx.channel.mention} **by {ctx.author.name}**')
embed.set_author(name=ctx.author, icon_url=str(ctx.author.avatar_url))
embed.add_field(name='Message', value=ctx.message.system_content, inline=False)
embed.set_footer(text=f"Author ID: {ctx.author.id} | Channel ID: {ctx.channel.id}")
if ctx.command.name == 'purge':
try:
purged = self.bot.recentpurge[ctx.channel.id]
except KeyError as e:
purged = None
if purged:
async with aiohttp.ClientSession() as s:
async with s.post('https://hasteb.in/documents', data=json.dumps(self.bot.recentpurge[ctx.channel.id], indent=4)) as r:
j = await r.json()
key = j['key'] + '.json'
embed.add_field(name='Purged Messages', value=f'https://hasteb.in/{key}', inline=False)
try:
await logch.send(embed=embed)
except Exception:
pass
@commands.Cog.listener()
async def on_member_join(self, member):
await self.bot.loop.run_in_executor(None, func=functools.partial(self.bot.datadog.increment, 'members.join'))
logid = self.logchannels[member.guild.id] if member.guild.id in self.logchannels else None
if logid:
logch = member.guild.get_channel(logid['modlogs'])
else:
return
if logch:
#https://giphy.com/gifs/pepsi-5C0a8IItAWRebylDRX
embed = discord.Embed(title='Member Joined', url='https://i.giphy.com/media/Nx0rz3jtxtEre/giphy.gif', color=discord.Color.green(), timestamp=datetime.datetime.utcnow())
embed.set_author(name=f'{member}', icon_url=str(member.avatar_url))
embed.add_field(name='Account Created', value=humanfriendly.format_timespan(datetime.datetime.utcnow() - member.created_at) + ' ago', inline=False)
embed.set_footer(text=f'User ID: {member.id}')
try:
await logch.send(embed=embed)
except Exception:
pass
try:
if member.guild.id in self.autodecancer:
decancered = False
if not self.bot.isascii(member.name):
num = member.discriminator
decancered = True
await member.edit(nick=f'<NAME> {num}')
logid = self.logchannels[member.guild.id] if member.guild.id in self.logchannels else None
if logid:
logch = member.guild.get_channel(logid['modlogs'])
else:
return
if logch:
embed = discord.Embed(color=discord.Color.green(), timestamp=datetime.datetime.utcnow())
embed.set_author(name=f'Auto-Decancer | {member}', icon_url=str(member.avatar_url))
embed.add_field(name='User', value=f'{member}({member.id})', inline=False)
embed.add_field(name='Reason', value='Username contains non-ascii characters', inline=False)
embed.set_footer(text=f'User ID: {member.id}')
try:
await logch.send(embed=embed)
except Exception:
pass
if member.guild.id in self.autodehoist:
if self.bot.ishoisted(member.name) and not decancered:
num = member.discriminator
await member.edit(nick=f'<NAME> {num}')
logid = self.logchannels[member.guild.id] if member.guild.id in self.logchannels else None
if logid:
logch = member.guild.get_channel(logid['modlogs'])
else:
return
if logch:
embed = discord.Embed(color=discord.Color.green(), timestamp=datetime.datetime.utcnow())
embed.set_author(name=f'Auto-Dehoist | {member}', icon_url=str(member.avatar_url))
embed.add_field(name='User', value=f'{member}({member.id})', inline=False)
embed.add_field(name='Reason', value='Username starts with a non A-Z character', inline=False)
embed.set_footer(text=f'User ID: {member.id}')
try:
await logch.send(embed=embed)
except Exception:
pass
except Exception:
pass
@commands.Cog.listener()
async def on_member_remove(self, member):
await self.bot.loop.run_in_executor(None, func=functools.partial(self.bot.datadog.increment, 'members.leave'))
logid = self.logchannels[member.guild.id] if member.guild.id in self.logchannels else None
if logid:
logch = member.guild.get_channel(logid['modlogs'])
else:
return
if logch:
embed = discord.Embed(title='Member Left', url='https://i.giphy.com/media/5C0a8IItAWRebylDRX/source.gif', color=discord.Color.red(), timestamp=datetime.datetime.utcnow())
embed.set_author(name=f'{member}', icon_url=str(member.avatar_url))
embed.add_field(name='Nickname', value=member.nick or member.name, inline=False)
roles = [role.mention for role in member.roles if role != member.guild.default_role]
embed.add_field(name='Roles', value=', '.join(roles) if roles != [] else 'No roles', inline=False)
embed.set_footer(text=f'User ID: {member.id}')
try:
await logch.send(embed=embed)
except Exception:
pass
@commands.Cog.listener()
async def on_member_update(self, before, after):
if after.nick != None and f'<NAME> {after.discriminator}' in after.nick:
return
if before.nick != after.nick:
try:
if after.guild.id in self.autodecancer:
nitroboosters = discord.utils.get(after.guild.roles, name='Nitro Booster')
if after.guild_permissions.manage_nicknames or nitroboosters in after.roles:
pass
else:
decancered = False
if not after.nick:
nick = after.name
tochange = 'Username'
else:
nick = after.nick
tochange = 'Nickname'
if not self.bot.isascii(nick):
num = after.discriminator
decancered = True
await after.edit(nick=f'<NAME> {num}')
logid = self.logchannels[after.guild.id] if after.guild.id in self.logchannels else None
if logid:
logch = after.guild.get_channel(logid['modlogs'])
else:
return
if logch:
embed = discord.Embed(color=discord.Color.green(), timestamp=datetime.datetime.utcnow())
embed.set_author(name=f'Auto-Decancer | {after}', icon_url=str(after.avatar_url))
embed.add_field(name='User', value=f'{after}({after.id})', inline=False)
embed.add_field(name='Reason', value=f'{tochange} contains non-ascii characters', inline=False)
if tochange == 'Nickname':
embed.add_field(name='Nickname', value=nick, inline=False)
embed.set_footer(text=f'User ID: {after.id}')
try:
return await logch.send(embed=embed)
except Exception:
pass
if after.guild.id in self.autodehoist:
nitroboosters = discord.utils.get(after.guild.roles, name='Nitro Booster')
if after.guild_permissions.manage_nicknames or nitroboosters in after.roles:
pass
else:
dehoisted = False
if not after.nick:
nick = after.name
tochange = 'Username'
else:
nick = after.nick
tochange = 'Nickname'
if self.bot.ishoisted(nick) and not decancered:
num = after.discriminator
dehoisted = True
await after.edit(nick=f'<NAME> {num}')
logid = self.logchannels[after.guild.id] if after.guild.id in self.logchannels else None
if logid:
logch = after.guild.get_channel(logid['modlogs'])
else:
return
if logch:
embed = discord.Embed(color=discord.Color.green(), timestamp=datetime.datetime.utcnow())
embed.set_author(name=f'Auto-Dehoist | {after}', icon_url=str(after.avatar_url))
embed.add_field(name='User', value=f'{after}({after.id})', inline=False)
embed.add_field(name='Reason', value=f'{tochange} starts with a non A-Z character', inline=False)
if tochange == 'Nickname':
embed.add_field(name='Nickname', value=nick, inline=False)
embed.set_footer(text=f'User ID: {after.id}')
try:
return await logch.send(embed=embed)
except Exception:
pass
except Exception:
pass
logid = self.logchannels[after.guild.id] if after.guild.id in self.logchannels else None
if logid:
logch = after.guild.get_channel(logid['actionlogs'])
else:
return
if logch:
embed = discord.Embed(color=after.color, timestamp=datetime.datetime.utcnow(), description=f'{after.mention}\'**s nickname was changed**')
embed.set_author(name=after, icon_url=str(after.avatar_url))
embed.add_field(name='Before', value=before.nick, inline=False)
embed.add_field(name='After', value=after.nick, inline=False)
embed.set_footer(text=f"Author ID: {after.id}")
try:
await logch.send(embed=embed)
except Exception:
pass
if before.roles != after.roles:
logid = self.logchannels[after.guild.id] if after.guild.id in self.logchannels else None
if logid:
logch = after.guild.get_channel(logid['actionlogs'])
else:
return
if logch:
broles = []
aroles = []
changed = []
for role in before.roles:
broles.append(role.name)
for role in after.roles:
aroles.append(role.name)
s = set(aroles)
removed = [x for x in broles if x not in s]
s = set(broles)
added = [x for x | |
identifiers[y]
if key2 == key1:
continue
if param_loglevel >= 3:
print "#############################################"
print "# comparing %s to %s" % (key1, key2)
seq_master = mali[key2]
ref_exons = exons[key2]
map_cmp2ref = MaliIO.getMapFromMali(seq, seq_master, gap_char)
# map exon boundaries to reference sequence
cmp_exons = []
if param_loglevel >= 5:
print str(alignlib_lite.AlignmentFormatEmissions(map_cmp2ref))
for e in exons[key1]:
ne = e.GetCopy()
ne.mPeptideFrom = MyMap(map_cmp2ref, e.mPeptideFrom + 1, 3, -1)
ne.mPeptideTo = MyMap(map_cmp2ref, e.mPeptideTo, 3, 0)
cmp_exons.append(ne)
# massage boundaries for terminal exons:
if cmp_exons[0].mPeptideFrom <= 0:
cmp_exons[0].mPeptideFrom = ref_exons[0].mPeptideFrom
if cmp_exons[-1].mPeptideTo <= 0:
cmp_exons[-1].mPeptideTo = ref_exons[-1].mPeptideTo
if param_loglevel >= 4:
for e in exons[key1]:
print "# exon", str(e)
if param_loglevel >= 3:
for e in cmp_exons:
print "# exon", str(e)
for e in ref_exons:
print "# exon", str(e)
# do exon comparison
comparison = Exons.CompareGeneStructures(
cmp_exons,
ref_exons,
threshold_min_pide=0,
threshold_slipping_exon_boundary=param_threshold_splipping_exon_boundary,
threshold_terminal_exon=param_threshold_terminal_exon)
if param_loglevel >= 3:
print comparison.Pretty(prefix="# EVAL: ")
# analyse results
min_nexons = min(len(cmp_exons), len(ref_exons))
max_nexons = max(len(cmp_exons), len(ref_exons))
similarity = (max_nexons - comparison.mNumIdenticalExons) * \
(abs(comparison.mNumDifferenceExons))
is_perfect = False
is_ok = False
status = []
# non-equivalent exon pairs
ne = len(cmp_exons) - comparison.mNumIdenticalExons - \
comparison.mNumSkippedExons
is_perfect = False
is_ok = False
if comparison.mNumIdenticalExons == 0:
# F: complete and utter failure, no excuses
status.append("F")
else:
if ne == 0:
# P: perfect conservation
status.append("=")
is_ok = True
is_perfect = True
elif ne == min_nexons - comparison.mNumSkippedExons:
# D: completely different predictions
status.append("D")
elif ne in (1, 2):
# A: almost conserved
status.append("A")
is_ok = True
elif ne > 2:
# M : mostly conserved (in case of long proteins that is
# good enough).
if (100 * comparison.mNumIdenticalExons) / max_nexons > param_evaluate_min_percent_exon_identity:
status.append("M")
else:
# S : spuriously conserved
status.append("S")
else:
# U: unconserved
status.append("U")
if len(cmp_exons) > len(ref_exons):
status.append(">")
elif len(ref_exons) < len(cmp_exons):
status.append("<")
else:
status.append("=")
if min_nexons == max_nexons and min_nexons == 1:
status.append("S")
elif min_nexons == 1 and max_nexons == 2:
status.append("s")
elif min_nexons == 2 and max_nexons == 2:
status.append("D")
elif min_nexons == 2 and max_nexons > 2:
status.append("d")
elif min_nexons == max_nexons:
status.append("M")
elif min_nexons > 2 and max_nexons > 2:
status.append("m")
else:
status.append("U")
status = string.join(status, "")
structure_compatibility = 100
if is_ok:
nok += 1
structure_compatibility = 100 - 100 * \
(comparison.mNumIdenticalExons +
comparison.mNumSkippedExons) / len(cmp_exons)
if is_perfect:
nperfect += 1
structure_compatibility = 0
if abs(comparison.mNumDifferenceExons) > param_max_exons_difference:
compatibility_value = 100
else:
compatibility_value = structure_compatibility
t = comparison.mNumRefBoundaries + comparison.mNumCmpBoundaries
if t == 0:
compatibility_value = 0
else:
compatibility_value = 100 * \
(comparison.mNumMissedRefBoundaries +
comparison.mNumMissedCmpBoundaries) / t
matrix_compatibility[x][y] = compatibility_value
nidentical_exons += comparison.mNumIdenticalExons
nskipped_exons += comparison.mNumSkippedExons
ntotal_exons += len(cmp_exons)
if param_loglevel >= 2:
print "%s\tgenepair\t%s\t%s\t%s\t%i\t%i\t%i\t%s" % (prefix, key1, key2, status, compatibility_value,
len(cmp_exons), len(ref_exons), str(comparison))
# comparison to reference: count separately:
if rx.search(key2):
ref_nidentical_exons += comparison.mNumIdenticalExons
ref_nskipped_exons += comparison.mNumSkippedExons
ref_ntotal_exons += len(cmp_exons)
if is_ok:
ref_nok += 1
if is_perfect:
ref_nperfect += 1
ref_ntotal += 1
ntotal = wmali * (wmali - 1)
print "%s\tallstructure\t%i\t%i\t%i\t%6.4f\t%6.4f\t%i\t%i\t%i\t%6.4f\t%6.4f" % (prefix,
ntotal, nperfect, nok,
float(
nperfect) / ntotal, float(nok) / ntotal,
ntotal_exons, nidentical_exons, nskipped_exons,
float(
nidentical_exons) / ntotal_exons,
float(nidentical_exons + nskipped_exons) / ntotal_exons)
if ref_ntotal > 0:
if ref_ntotal_exons == 0:
raise "no exons in reference : ref_ntotal_exons = 0, ref_ntotal = %i" % (
ref_ntotal)
print "%s\trefstructure\t%i\t%i\t%i\t%6.4f\t%6.4f\t%i\t%i\t%i\t%6.4f\t%6.4f" % (prefix,
ref_ntotal, ref_nperfect, ref_nok,
float(
ref_nperfect) / ref_ntotal, float(ref_nok) / ref_ntotal,
ref_ntotal_exons, ref_nidentical_exons, ref_nskipped_exons,
float(
ref_nidentical_exons) / ref_ntotal_exons,
float(ref_nidentical_exons + ref_nskipped_exons) / ref_ntotal_exons)
print "%s\tnexons\t%i\t%i\t" % (prefix,
len(anexons), ref_nexons) +\
string.join(map(lambda x: "%.2f" % x, (min(anexons),
max(anexons),
scipy.mean(
anexons),
scipy.median(
anexons),
numpy.std(anexons))), "\t")
return matrix_compatibility
# ------------------------------------------------------------------------
def ClusterMatrixClosestDistance(identifiers,
master_pattern,
species_pattern,
matrix_coverage,
matrix_compatibility,
matrix_identity):
"""cluster identifiers by their coverage.
"""
rxm = re.compile(master_pattern)
rxs = re.compile(species_pattern)
# positions of masters in identifier list
master_ids = []
# members of each clusters
clusters = {}
# set of species per cluster
species = {}
# unassigned identifiers (no masters)
unassigned = {}
# initialize, each cluster contains one master
for x in range(len(identifiers)):
i = identifiers[x]
g = rxs.search(i)
if g:
s = g.groups()[0]
species[i] = {}
if rxm.search(i):
master_ids.append(x)
clusters[i] = [(i, x, "", 0.0, True)]
else:
unassigned[i] = 1
if len(master_ids) == 0:
clusters = {None: identifiers}
return clusters, []
if param_loglevel >= 2:
print "# CLUSTERING: at start: %i/%i clusters for %i unassigned" % (len(clusters), len(master_ids), len(unassigned))
# merge clusters if masters are similar
skip = {}
new = []
for x in range(0, len(master_ids)):
xm = master_ids[x]
xi = identifiers[xm]
if xi in clusters:
new.append(xm)
for y in range(x + 1, len(master_ids)):
if y == x:
continue
ym = master_ids[y]
yi = identifiers[ym]
if yi not in clusters:
continue
if matrix_coverage[xm][ym] >= param_cluster_merging_min_coverage and \
matrix_identity[xm][ym] >= param_cluster_merging_min_identity:
if param_loglevel >= 2:
print "# merging: adding %s to %s: cov=%5.2f, pid=%5.2f" % (yi, xi, matrix_coverage[xm][ym], matrix_identity[xm][ym])
clusters[xi] += clusters[yi]
del clusters[yi]
else:
if param_loglevel >= 3:
print "# not merging %s to %s: cov=%5.2f, pid=%5.2f" % (yi, xi, matrix_coverage[xm][ym], matrix_identity[xm][ym])
if param_loglevel >= 2:
print "# CLUSTERING: after merging: %i clusters for %i unassigned" % (len(clusters), len(unassigned))
sys.stdout.flush()
master_ids = new
# assign matches to closest cluster (in terms of percent identity).
for x in range(len(identifiers)):
id = identifiers[x]
g = rxs.search(id)
if g:
species = g.groups()[0]
else:
species = None
if id not in unassigned:
continue
# best: best entry per identifier (in terms of pide)
# with compatible gene structure and good overlap
best = None
best_m = None
for m in master_ids:
if param_loglevel >= 5:
print "# pair:", id, identifiers[m], matrix_identity[x][m], matrix_coverage[x][m], matrix_compatibility[x][m]
if matrix_coverage[x][m] < param_clustering_min_coverage:
continue
if matrix_identity[x][m] < param_clustering_min_identity:
continue
if matrix_compatibility[x][m] > param_clustering_max_compatibility:
continue
if best is None or matrix_identity[x][m] > best:
best = matrix_identity[x][m]
best_m = m
if best is not None:
if param_loglevel >= 2:
print "# assigning %s to %s: pid=%5.2f, cov=%5.2f, cmp=%5.2f" % (id, identifiers[best_m],
matrix_identity[
x][best_m],
matrix_coverage[
x][best_m],
matrix_compatibility[x][best_m])
clusters[identifiers[best_m]].append((id, x, species, best, False))
del unassigned[id]
if param_loglevel >= 2:
print "# CLUSTERING: after assignment: %i clusters for %i unassigned" % (len(clusters), len(unassigned))
# for each cluster sort according to compatibility and keep best
for m in master_ids:
new = []
to_sort = []
master_id = identifiers[m]
for id, index, species, identity, is_master in clusters[master_id]:
if is_master:
new.append(id)
else:
to_sort.append((species,
matrix_compatibility[index][m],
-identity,
matrix_coverage[index][m],
id))
# this sorts by species, compatibility and percent identity
to_sort.sort()
last_species = None
for species, compatibility, identity, coverage, id in to_sort:
if last_species == species:
if param_loglevel >= 2:
print "# cluster: %s: removing %s at pid=%5.2f, cmp=%5.2f, cov=%5.2f" % (master_id, id, -identity, compatibility, coverage)
unassigned[id] = 1
continue
else:
if param_loglevel >= 2:
print "# cluster: %s: keeping %s at pid=%5.2f, cmp=%5.2f, cov=%5.2f" % (master_id, id, -identity, compatibility, coverage)
last_species = species
new.append(id)
clusters[master_id] = new
if param_loglevel >= 2:
print "# CLUSTERING: after compatiblitity: %i clusters for %i unassigned" % (len(clusters), len(unassigned))
return clusters, unassigned.keys()
# ------------------------------------------------------------------------
def WriteSpeciesCoverage(identifiers, species_pattern, prefix=""):
"""write number of species present in identifiers."""
species = {}
nunknown = 0
rx = re.compile(species_pattern)
for i in identifiers:
x = rx.search(i)
if x:
s = x.groups()[0]
else:
nunknown += 1
s = "unknown"
if s not in species:
species[s] = 0
species[s] += 1
if len(species) == 0:
max_species = 0
else:
max_species = max(species.values())
print "%s\tspecies\t%i\t%i\t%i\t%i" % (prefix,
len(species),
max_species,
len(filter(
lambda x: x >= 2, species.values())),
nunknown)
# ------------------------------------------------------------------------
def WriteCodonSummary(mali, identifiers, frame_columns, prefix="", gap_char="-"):
"""write codon summary."""
new_mali = {}
aligned = []
codons = []
stops = []
nclean = 0
total_no_stops = 0
for key, seq in core_mali.items():
new_mali[key], naligned, ncodons, nstops = MaliIO.getCodonSequence(
seq, frame_columns, param_gap_char, remove_stops=True)
aligned.append(naligned)
codons.append(ncodons)
stops.append(nstops)
if nstops == 0:
total_no_stops += 1
if naligned == ncodons and nstops == 0:
nclean += 1
print "%s\tcodons\t%i\t%i\t" % (prefix, nclean, total_no_stops) +\
string.join(map(lambda x: "%.2f" % x, (min(aligned),
max(aligned),
scipy.mean(aligned),
scipy.median(aligned),
numpy.std(aligned))), "\t") + "\t" +\
string.join(map(lambda x: "%.2f" % x, (min(codons),
max(codons),
scipy.mean(codons),
| |
else:
np.testing.assert_array_almost_equal(diff, 0, decimal=15)
def test_concentration_profile(self):
"""Test that the concentration in the centre of the negative particles is
greater than the average concentration in the particle and also that the
concentration on the surface of the negative particle is less than the average
concentration in the particle. Test opposite is true for the positive
particle."""
# TODO: add an output for average particle concentration
def test_fluxes(self):
"""Test that no flux holds in the centre of the particle. Test that surface
flux in the negative particles is greater than zero and that the flux in the
positive particles is less than zero during a discharge."""
t, x_n, x_p, r_n, r_p = (
self.t,
self.x_n,
self.x_p,
self.r_n_edge,
self.r_p_edge,
)
if self.model.options["particle"] == "uniform profile":
# Fluxes are zero everywhere since the concentration is uniform
np.testing.assert_array_almost_equal(self.N_s_n(t, x_n, r_n), 0)
np.testing.assert_array_almost_equal(self.N_s_p(t, x_p, r_p), 0)
else:
if self.operating_condition == "discharge":
if self.model.options["particle"] == "quartic profile":
# quartic profile has a transient at the beginning where
# the concentration "rearranges" giving flux of the opposite
# sign, so ignore first three times
np.testing.assert_array_less(0, self.N_s_n(t[3:], x_n, r_n[1:]))
np.testing.assert_array_less(self.N_s_p(t[3:], x_p, r_p[1:]), 0)
else:
np.testing.assert_array_less(
-1e-16, self.N_s_n(t[1:], x_n, r_n[1:])
)
np.testing.assert_array_less(self.N_s_p(t[1:], x_p, r_p[1:]), 1e-16)
if self.operating_condition == "charge":
np.testing.assert_array_less(self.N_s_n(t[1:], x_n, r_n[1:]), 1e-16)
np.testing.assert_array_less(-1e-16, self.N_s_p(t[1:], x_p, r_p[1:]))
if self.operating_condition == "off":
np.testing.assert_array_almost_equal(self.N_s_n(t, x_n, r_n), 0)
np.testing.assert_array_almost_equal(self.N_s_p(t, x_p, r_p), 0)
np.testing.assert_array_almost_equal(0, self.N_s_n(t, x_n, r_n[0]), decimal=4)
np.testing.assert_array_almost_equal(0, self.N_s_p(t, x_p, r_p[0]), decimal=4)
def test_all(self):
self.test_concentration_increase_decrease()
self.test_concentration_limits()
self.test_conservation()
self.test_concentration_profile()
self.test_fluxes()
class ElectrolyteConcentrationTests(BaseOutputTest):
def __init__(self, model, param, disc, solution, operating_condition):
super().__init__(model, param, disc, solution, operating_condition)
self.c_e = solution["Electrolyte concentration"]
self.c_e_n = solution["Negative electrolyte concentration"]
self.c_e_s = solution["Separator electrolyte concentration"]
self.c_e_p = solution["Positive electrolyte concentration"]
self.c_e_av = solution["X-averaged electrolyte concentration"]
self.c_e_n_av = solution["X-averaged negative electrolyte concentration"]
self.c_e_s_av = solution["X-averaged separator electrolyte concentration"]
self.c_e_p_av = solution["X-averaged positive electrolyte concentration"]
self.c_e_tot = solution["Total lithium in electrolyte [mol]"]
self.N_e_hat = solution["Electrolyte flux"]
# self.N_e_hat = solution["Reduced cation flux"]
def test_concentration_limit(self):
"""Test that the electrolyte concentration is always greater than zero."""
np.testing.assert_array_less(-self.c_e(self.t, self.x), 0)
def test_conservation(self):
"""Test conservation of species in the electrolyte."""
# sufficient to check average concentration is constant
diff = (
self.c_e_tot(self.solution.t[1:]) - self.c_e_tot(self.solution.t[:-1])
) / self.c_e_tot(self.solution.t[:-1])
np.testing.assert_array_almost_equal(diff, 0)
def test_concentration_profile(self):
"""Test continuity of the concentration profile. Test average concentration is
as expected and that the concentration in the negative electrode is greater
than the average and the concentration in the positive is less than the average
during a discharge."""
# TODO: uncomment when have average concentrations
# small number so that can use array less
# epsilon = 0.001
# if self.operating_condition == "discharge":
# np.testing.assert_array_less(
# -self.c_e_n_av.entries, self.c_e_av.entries + epsilon
# )
# np.testing.assert_array_less(
# self.c_e_p_av.entries, self.c_e_av.entries + epsilon
# )
# elif self.operating_condition == "charge":
# np.testing.assert_array_less(
# -self.c_e_n_av.entries, self.c_e_av.entries + epsilon
# )
# np.testing.assert_array_less(
# self.c_e_p_av.entries, self.c_e_av.entries + epsilon
# )
# elif self.operating_condition == "off":
# np.testing.assert_array_equal(self.c_e_n_av.entries, self.c_e_av.entries)
# np.testing.assert_array_equal(self.c_e_s_av.entries, self.c_e_av.entries)
# np.testing.assert_array_equal(self.c_e_p_av.entries, self.c_e_av.entries)
def test_fluxes(self):
"""Test current collector fluxes are zero. Tolerance reduced for surface form
models (bug in implementation of boundary conditions?)"""
t, x = self.t, self.x_edge
np.testing.assert_array_almost_equal(self.N_e_hat(t, x[0]), 0, decimal=3)
np.testing.assert_array_almost_equal(self.N_e_hat(t, x[-1]), 0, decimal=3)
def test_splitting(self):
"""Test that when splitting the concentrations and fluxes by negative electrode,
separator, and positive electrode, we get the correct behaviour: continuous
solution and recover combined through concatenation."""
t, x_n, x_s, x_p, x = self.t, self.x_n, self.x_s, self.x_p, self.x
c_e_combined = np.concatenate(
(self.c_e_n(t, x_n), self.c_e_s(t, x_s), self.c_e_p(t, x_p)), axis=0
)
np.testing.assert_array_equal(self.c_e(t, x), c_e_combined)
def test_all(self):
self.test_concentration_limit()
self.test_conservation()
self.test_concentration_profile()
self.test_fluxes()
self.test_splitting()
class PotentialTests(BaseOutputTest):
def __init__(self, model, param, disc, solution, operating_condition):
super().__init__(model, param, disc, solution, operating_condition)
self.phi_s_n = solution["Negative electrode potential [V]"]
self.phi_s_p = solution["Positive electrode potential [V]"]
self.phi_s_n_av = solution["X-averaged negative electrode potential [V]"]
self.phi_s_p_av = solution["X-averaged positive electrode potential [V]"]
self.phi_e = solution["Electrolyte potential [V]"]
self.phi_e_n = solution["Negative electrolyte potential [V]"]
self.phi_e_s = solution["Separator electrolyte potential [V]"]
self.phi_e_p = solution["Positive electrolyte potential [V]"]
self.phi_e_n_av = solution["X-averaged negative electrolyte potential [V]"]
self.phi_e_p_av = solution["X-averaged positive electrolyte potential [V]"]
self.delta_phi_n = solution[
"Negative electrode surface potential difference [V]"
]
self.delta_phi_p = solution[
"Positive electrode surface potential difference [V]"
]
self.delta_phi_n_av = solution[
"X-averaged negative electrode surface potential difference [V]"
]
self.delta_phi_p_av = solution[
"X-averaged positive electrode surface potential difference [V]"
]
self.grad_phi_e = solution["Gradient of electrolyte potential"]
self.grad_phi_e_n = solution["Gradient of negative electrolyte potential"]
self.grad_phi_e_s = solution["Gradient of separator electrolyte potential"]
self.grad_phi_e_p = solution["Gradient of positive electrolyte potential"]
def test_negative_electrode_potential_profile(self):
"""Test that negative electrode potential is zero on left boundary. Test
average negative electrode potential is less than or equal to zero."""
np.testing.assert_array_almost_equal(self.phi_s_n(self.t, x=0), 0, decimal=5)
def test_positive_electrode_potential_profile(self):
"""Test average positive electrode potential is less than the positive electrode
potential on the right current collector."""
# TODO: add these when have averages
def test_potential_differences(self):
"""Test that potential differences are the difference between electrode
potential and electrolyte potential"""
t, x_n, x_p = self.t, self.x_n, self.x_p
np.testing.assert_array_almost_equal(
self.phi_s_n(t, x_n) - self.phi_e_n(t, x_n), self.delta_phi_n(t, x_n)
)
np.testing.assert_array_almost_equal(
self.phi_s_p(t, x_p) - self.phi_e_p(t, x_p),
self.delta_phi_p(t, x_p),
decimal=5,
)
def test_average_potential_differences(self):
"""Test that average potential differences are the difference between electrode
potential and electrolyte potential"""
t = self.t
np.testing.assert_array_almost_equal(
self.phi_s_n_av(t) - self.phi_e_n_av(t), self.delta_phi_n_av(t)
)
np.testing.assert_array_almost_equal(
self.phi_s_p_av(t) - self.phi_e_p_av(t), self.delta_phi_p_av(t)
)
def test_gradient_splitting(self):
t, x_n, x_s, x_p, x = self.t, self.x_n, self.x_s, self.x_p, self.x
grad_phi_e_combined = np.concatenate(
(
self.grad_phi_e_n(t, x_n),
self.grad_phi_e_s(t, x_s),
self.grad_phi_e_p(t, x_p),
),
axis=0,
)
np.testing.assert_array_equal(self.grad_phi_e(t, x), grad_phi_e_combined)
def test_all(self):
self.test_negative_electrode_potential_profile()
self.test_positive_electrode_potential_profile()
self.test_potential_differences()
self.test_average_potential_differences()
class CurrentTests(BaseOutputTest):
def __init__(self, model, param, disc, solution, operating_condition):
super().__init__(model, param, disc, solution, operating_condition)
self.j = solution["Interfacial current density"]
self.j0 = solution["Exchange current density"]
self.j_n = solution["Negative electrode interfacial current density"]
self.j_p = solution["Positive electrode interfacial current density"]
self.j_n_av = solution[
"X-averaged negative electrode interfacial current density"
]
self.j_p_av = solution[
"X-averaged positive electrode interfacial current density"
]
self.j_n_sei = solution["SEI interfacial current density"]
self.j_n_sei_av = solution["X-averaged SEI interfacial current density"]
self.j0_n = solution["Negative electrode exchange current density"]
self.j0_p = solution["Positive electrode exchange current density"]
self.i_s_n = solution["Negative electrode current density"]
self.i_s_p = solution["Positive electrode current density"]
self.i_s = solution["Electrode current density"]
self.i_e = solution["Electrolyte current density"]
self.a_n = solution["Negative electrode surface area to volume ratio"]
self.a_p = solution["Positive electrode surface area to volume ratio"]
def test_interfacial_current_average(self):
"""Test that average of the surface area density distribution (in x)
multiplied by the interfacial current density is equal to the true
value."""
np.testing.assert_array_almost_equal(
np.mean(
self.a_n(self.t, self.x_n)
* (self.j_n(self.t, self.x_n) + self.j_n_sei(self.t, self.x_n)),
axis=0,
),
self.i_cell / self.l_n,
decimal=3,
)
np.testing.assert_array_almost_equal(
np.mean(
self.a_p(self.t, self.x_p) * self.j_p(self.t, self.x_p),
axis=0,
),
-self.i_cell / self.l_p,
decimal=4,
)
def test_conservation(self):
"""Test sum of electrode and electrolyte current densities give the applied
current density"""
t, x_n, x_s, x_p = self.t, self.x_n, self.x_s, self.x_p
current_param = self.model.param.current_with_time
i_cell = self.param.process_symbol(current_param).evaluate(t=t)
for x in [x_n, x_s, x_p]:
np.testing.assert_array_almost_equal(
self.i_s(t, x) + self.i_e(t, x), i_cell, decimal=2
)
np.testing.assert_array_almost_equal(
self.i_s(t, x_n), self.i_s_n(t, x_n), decimal=3
)
np.testing.assert_array_almost_equal(
self.i_s(t, x_p), self.i_s_p(t, x_p), decimal=3
)
def test_current_density_boundaries(self):
"""Test the boundary values of the current densities"""
t, x_n, x_p = self.t, self.x_n_edge, self.x_p_edge
current_param = self.model.param.current_with_time
i_cell = self.param.process_symbol(current_param).evaluate(t=t)
np.testing.assert_array_almost_equal(self.i_s_n(t, x_n[0]), i_cell, decimal=2)
np.testing.assert_array_almost_equal(self.i_s_n(t, x_n[-1]), 0, decimal=4)
np.testing.assert_array_almost_equal(self.i_s_p(t, x_p[-1]), i_cell, decimal=3)
np.testing.assert_array_almost_equal(self.i_s_p(t, x_p[0]), 0, decimal=4)
def test_all(self):
self.test_conservation()
self.test_current_density_boundaries()
# Skip average current test if capacitance is used, since average interfacial
# current density will be affected slightly by capacitance effects
if self.model.options["surface form"] != "differential":
self.test_interfacial_current_average()
class VelocityTests(BaseOutputTest):
def __init__(self, model, param, disc, solution, operating_condition):
super().__init__(model, param, disc, solution, operating_condition)
self.v_box = solution["Volume-averaged velocity"]
self.i_e = solution["Electrolyte current density"]
self.dVbox_dz = solution["Transverse volume-averaged acceleration"]
def test_velocity_boundaries(self):
"""Test the boundary values of the current densities"""
L_x = self.x_edge[-1]
np.testing.assert_array_almost_equal(self.v_box(self.t, 0), 0, decimal=4)
np.testing.assert_array_almost_equal(self.v_box(self.t, L_x), 0, decimal=4)
def test_vertical_velocity(self):
"""Test the boundary values of the current densities"""
L_x = self.x_edge[-1]
np.testing.assert_array_equal(self.dVbox_dz(self.t, 0), 0)
np.testing.assert_array_less(self.dVbox_dz(self.t, 0.5 * L_x), 0)
np.testing.assert_array_equal(self.dVbox_dz(self.t, L_x), 0)
def test_velocity_vs_current(self):
"""Test the boundary values of the current densities"""
t, x_n, x_p = self.t, self.x_n, self.x_p
beta_n = self.model.param.beta_n
beta_n = self.param.evaluate(beta_n)
beta_p = self.model.param.beta_p
| |
<gh_stars>0
# -*- coding: utf-8 -*-
"""Cisco Identity Services Engine Certificates API wrapper.
Copyright (c) 2021 Cisco and/or its affiliates.
Permission is hereby granted, free of charge, to any person obtaining a copy
of this software and associated documentation files (the "Software"), to deal
in the Software without restriction, including without limitation the rights
to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
copies of the Software, and to permit persons to whom the Software is
furnished to do so, subject to the following conditions:
The above copyright notice and this permission notice shall be included in all
copies or substantial portions of the Software.
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
SOFTWARE.
"""
from __future__ import (
absolute_import,
division,
print_function,
unicode_literals,
)
from builtins import *
from past.builtins import basestring
from ...restsession import RestSession
from ...utils import (
check_type,
dict_from_items_with_values,
apply_path_params,
dict_of_str,
get_next_page,
)
import urllib.parse
class Certificates(object):
"""Identity Services Engine Certificates API (version: 3.0.0).
Wraps the Identity Services Engine Certificates
API and exposes the API as native Python
methods that return native Python objects.
"""
def __init__(self, session, object_factory, request_validator):
"""Initialize a new Certificates
object with the provided RestSession.
Args:
session(RestSession): The RESTful session object to be used for
API calls to the Identity Services Engine service.
Raises:
TypeError: If the parameter types are incorrect.
"""
check_type(session, RestSession)
super(Certificates, self).__init__()
self._session = session
self._object_factory = object_factory
self._request_validator = request_validator
def import_trusted_certificate(self,
allow_basic_constraint_cafalse=None,
allow_out_of_date_cert=None,
allow_sha1_certificates=None,
data=None,
description=None,
name=None,
trust_for_certificate_based_admin_auth=None,
trust_for_cisco_services_auth=None,
trust_for_client_auth=None,
trust_for_ise_auth=None,
validate_certificate_extensions=None,
headers=None,
payload=None,
active_validation=True,
**query_parameters):
"""Purpose of the API is to add root certificate to the ISE
truststore.
Args:
allow_basic_constraint_cafalse(boolean): Allow
Certificates with Basic Constraints CA
Field as False (required), property of
the request body.
allow_out_of_date_cert(boolean): Allow out of date
certificates (required), property of the
request body.
allow_sha1_certificates(boolean): Allow SHA1 based
certificates (required), property of the
request body.
data(string): Certificate content (required), property
of the request body.
description(string): Description of the certificate,
property of the request body.
name(string): Name of the certificate, property of the
request body.
trust_for_certificate_based_admin_auth(boolean): Trust
for Certificate based Admin
authentication, property of the request
body.
trust_for_cisco_services_auth(boolean): Trust for
authentication of Cisco Services,
property of the request body.
trust_for_client_auth(boolean): Trust for client
authentication and Syslog, property of
the request body.
trust_for_ise_auth(boolean): Trust for authentication
within ISE, property of the request
body.
validate_certificate_extensions(boolean): Validate trust
certificate extension, property of the
request body.
headers(dict): Dictionary of HTTP Headers to send with the Request
.
payload(dict): A JSON serializable Python object to send in the
body of the Request.
active_validation(bool): Enable/Disable payload validation.
Defaults to True.
**query_parameters: Additional query parameters (provides
support for parameters that may be added in the future).
Returns:
RestResponse: REST response with following properties:
- headers(MyDict): response headers.
- response(MyDict): response body as a MyDict object. Access the object's properties by using the dot notation
or the bracket notation.
- content(bytes): representation of the request's response
- text(str): representation of the request's response
Raises:
TypeError: If the parameter types are incorrect.
MalformedRequest: If the request body created is invalid.
ApiError: If the Identity Services Engine cloud returns an error.
"""
check_type(headers, dict)
if headers is not None:
pass
with_custom_headers = False
_headers = self._session.headers or {}
if headers:
_headers.update(dict_of_str(headers))
with_custom_headers = True
is_xml_payload = 'application/xml' in _headers.get('Content-Type', [])
if active_validation and is_xml_payload:
check_type(payload, basestring)
if active_validation and not is_xml_payload:
check_type(payload, dict)
_params = {
}
_params.update(query_parameters)
_params = dict_from_items_with_values(_params)
path_params = {
}
if is_xml_payload:
_payload = payload
else:
_payload = {
'name':
name,
'data':
data,
'description':
description,
'validateCertificateExtensions':
validate_certificate_extensions,
'allowSHA1Certificates':
allow_sha1_certificates,
'allowOutOfDateCert':
allow_out_of_date_cert,
'allowBasicConstraintCAFalse':
allow_basic_constraint_cafalse,
'trustForIseAuth':
trust_for_ise_auth,
'trustForClientAuth':
trust_for_client_auth,
'trustForCiscoServicesAuth':
trust_for_cisco_services_auth,
'trustForCertificateBasedAdminAuth':
trust_for_certificate_based_admin_auth,
}
_payload.update(payload or {})
_payload = dict_from_items_with_values(_payload)
if active_validation and not is_xml_payload:
self._request_validator('jsd_c8cd2f618b655d988ce626e579486596_v3_0_0')\
.validate(_payload)
e_url = ('/api/v1/certs/trusted-certificate/import')
endpoint_full_url = apply_path_params(e_url, path_params)
request_params = {'data': _payload} if is_xml_payload else {'json': _payload}
if with_custom_headers:
_api_response = self._session.post(endpoint_full_url, params=_params,
headers=_headers,
**request_params)
else:
_api_response = self._session.post(endpoint_full_url, params=_params,
**request_params)
return self._object_factory('bpm_c8cd2f618b655d988ce626e579486596_v3_0_0', _api_response)
def import_system_certificate(self,
admin=None,
allow_extended_validity=None,
allow_out_of_date_cert=None,
allow_replacement_of_certificates=None,
allow_replacement_of_portal_group_tag=None,
allow_sha1_certificates=None,
allow_wild_card_certificates=None,
data=None,
eap=None,
ims=None,
name=None,
password=<PASSWORD>,
portal=None,
portal_group_tag=None,
private_key_data=None,
pxgrid=None,
radius=None,
saml=None,
validate_certificate_extensions=None,
headers=None,
payload=None,
active_validation=True,
**query_parameters):
"""Purpose of the API is to import system certificate into ISE.
Args:
admin(boolean): Use certificate to authenticate the ISE
Admin Portal, property of the request
body.
allow_extended_validity(boolean): Allow import of
certificates with validity greater than
398 days, property of the request body.
allow_out_of_date_cert(boolean): Allow out of date
certificates (required), property of the
request body.
allow_replacement_of_certificates(boolean): Allow
Replacement of certificates (required),
property of the request body.
allow_replacement_of_portal_group_tag(boolean): Allow
Replacement of Portal Group Tag
(required), property of the request
body.
allow_sha1_certificates(boolean): Allow SHA1 based
certificates (required), property of the
request body.
allow_wild_card_certificates(boolean): Allow Wildcard
Certificates, property of the request
body.
data(string): Certificate Content (required), property
of the request body.
eap(boolean): Use certificate for EAP protocols that use
SSL/TLS tunneling, property of the
request body.
ims(boolean): Use certificate for the ISE Messaging
Service, property of the request body.
name(string): Name of the certificate, property of the
request body.
password(string): Certificate Password (required).,
property of the request body.
portal(boolean): Use for portal, property of the request
body.
portal_group_tag(string): Set Group tag, property of the
request body.
private_key_data(string): Private Key data (required),
property of the request body.
pxgrid(boolean): Use certificate for the pxGrid
Controller, property of the request
body.
radius(boolean): Use certificate for the RADSec server,
property of the request body.
saml(boolean): Use certificate for SAML Signing,
property of the request body.
validate_certificate_extensions(boolean): Validate
Certificate Extensions, property of the
request body.
headers(dict): Dictionary of HTTP Headers to send with the Request
.
payload(dict): A JSON serializable Python object to send in the
body of the Request.
active_validation(bool): Enable/Disable payload validation.
Defaults to True.
**query_parameters: Additional query parameters (provides
support for parameters that may be added in the future).
Returns:
RestResponse: REST response with following properties:
- headers(MyDict): response headers.
- response(MyDict): response body as a MyDict object. Access the object's properties by using the dot notation
or the bracket notation.
- content(bytes): representation of the request's response
- text(str): representation of the request's response
Raises:
TypeError: If the parameter types are incorrect.
MalformedRequest: If the request body created is invalid.
ApiError: If the Identity Services Engine cloud returns an error.
"""
check_type(headers, dict)
if headers is not None:
pass
with_custom_headers = False
_headers = self._session.headers or {}
if headers:
_headers.update(dict_of_str(headers))
with_custom_headers = True
is_xml_payload = 'application/xml' in _headers.get('Content-Type', [])
if active_validation and is_xml_payload:
check_type(payload, basestring)
if active_validation and not is_xml_payload:
check_type(payload, dict)
_params = {
}
_params.update(query_parameters)
_params = dict_from_items_with_values(_params)
path_params = {
}
if is_xml_payload:
_payload = payload
else:
_payload = {
'name':
name,
'password':
password,
'data':
data,
'privateKeyData':
private_key_data,
'portalGroupTag':
portal_group_tag,
'admin':
admin,
'eap':
eap,
'radius':
radius,
'pxgrid':
pxgrid,
'saml':
saml,
'portal':
portal,
'ims':
ims,
'allowWildCardCertificates':
allow_wild_card_certificates,
'validateCertificateExtensions':
validate_certificate_extensions,
'allowSHA1Certificates':
allow_sha1_certificates,
'allowOutOfDateCert':
allow_out_of_date_cert,
'allowReplacementOfCertificates':
allow_replacement_of_certificates,
'allowReplacementOfPortalGroupTag':
allow_replacement_of_portal_group_tag,
'allowExtendedValidity':
allow_extended_validity,
}
_payload.update(payload or {})
_payload = dict_from_items_with_values(_payload)
if active_validation and not is_xml_payload:
self._request_validator('jsd_e6c7251a8508597f1b7ae61cbf953_v3_0_0')\
.validate(_payload)
e_url = ('/api/v1/certs/system-certificate/import')
endpoint_full_url = apply_path_params(e_url, path_params)
request_params = {'data': _payload} if is_xml_payload else {'json': _payload}
if with_custom_headers:
_api_response = self._session.post(endpoint_full_url, params=_params,
headers=_headers,
**request_params)
else:
_api_response = self._session.post(endpoint_full_url, params=_params,
**request_params)
return self._object_factory('bpm_e6c7251a8508597f1b7ae61cbf953_v3_0_0', _api_response)
def bind_csr(self,
admin=None,
allow_extended_validity=None,
allow_out_of_date_cert=None,
allow_replacement_of_certificates=None,
allow_replacement_of_portal_group_tag=None,
data=None,
eap=None,
host_name=None,
id=None,
ims=None,
name=None,
portal=None,
portal_group_tag=None,
pxgrid=None,
radius=None,
saml=None,
validate_certificate_extensions=None,
headers=None,
payload=None,
active_validation=True,
**query_parameters):
"""Purpose of the API is to Bind CA Signed Certificate.
Args:
admin(boolean): Use certificate to authenticate the ISE
Admin Portal, property of the request
body.
allow_extended_validity(boolean): Allow import of
certificates with validity greater than
398 days, property of the request body.
allow_out_of_date_cert(boolean): Allow out of date
certificates (required), property of the
request body.
allow_replacement_of_certificates(boolean): Allow
Replacement of certificates (required),
property | |
in child.get_children():
print('\n Grandchild:')
gchild.info()
"""
def test_node_remove_supports(self):
#
# Split and balance
#
node = QuadNode()
node.split()
sw_child = node.children['SW']
sw_child.split()
sw_ne_grandchild = sw_child.children['NE']
sw_ne_grandchild.split()
node.balance()
#
# Coarsen and remove supports
#
sw_ne_grandchild.merge()
node.remove_supports()
self.assertFalse(node.children['NW'].has_children(),\
'NW child should not have children after coarsening.')
self.assertFalse(node.children['SE'].has_children(),\
'SE child should not have children after coarsening.')
class TestCell(unittest.TestCase):
"""
Test Cell Class
"""
def test_get_vertices(self):
# 1D
cell1d = BiCell()
cell1d_vertices = np.array([[0],[1],[0.5]])
self.assertTrue(np.allclose(cell1d.get_vertices(),cell1d_vertices),\
'BiCell vertices not correct.')
self.assertTrue(np.allclose(cell1d.get_vertices('L'), np.array([[0]])),\
'BiCell get specific vertex not correct.')
# 2D
cell2d = QuadCell()
cell2d_vertices = np.array([[0,0],[0.5,0],[1,0],[1,0.5],[1,1],\
[0.5,1],[0,1],[0,0.5],[0.5,0.5]])
self.assertTrue(np.allclose(cell2d.get_vertices(),cell2d_vertices),\
'QuadCell vertices not correct.')
self.assertTrue(np.allclose(cell2d.get_vertices('M'), np.array([[0.5,0.5]])),\
'QuadCell get specific vertex not correct.')
def test_traverse(self):
#
# 1D
#
# Standard
cell = BiCell()
cell.split()
cell.children['L'].split()
cell.children['L'].children['R'].remove()
addresses = {'breadth-first': [[],[0],[1],[0,0]],
'depth-first': [[],[0],[0,0],[1]]}
for mode in ['depth-first','breadth-first']:
count = 0
for leaf in cell.traverse(mode=mode):
self.assertEqual(leaf.address, addresses[mode][count],
'Bicell traversal incorrect.')
count += 1
#
# Standard QuadCell
#
cell = QuadCell()
cell.split()
cell.children['SE'].split()
cell.children['SE'].children['NW'].remove()
addresses = [[],[0],[1],[2],[3],[1,0],[1,1],[1,3]]
count = 0
for n in cell.traverse(mode='breadth-first'):
self.assertEqual(n.address, addresses[count],\
'Incorrect address.')
count += 1
def test_find_leaves(self):
#
# 1D
#
cell = BiCell()
leaves = cell.get_leaves()
self.assertEqual(leaves, [cell], 'Cell should be its own leaf.')
#
# Split cell and L child - find leaves
#
cell.split()
l_child = cell.children['L']
l_child.split()
leaves = cell.get_leaves()
self.assertEqual(len(leaves),3, 'Cell should have 3 leaves.')
#
# Depth first order
#
addresses_depth_first = [[0,0],[0,1],[1]]
leaves = cell.get_leaves(nested=False)
for i in range(len(leaves)):
leaf = leaves[i]
self.assertEqual(leaf.address, addresses_depth_first[i],
'Incorrect order, depth first search.')
#
# Breadth first order
#
addresses_breadth_first = [[1],[0,0],[0,1]]
leaves = cell.get_leaves(nested=True)
for i in range(len(leaves)):
leaf = leaves[i]
self.assertEqual(leaf.address, addresses_breadth_first[i],
'Incorrect order, breadth first search.')
cell.children['L'].children['L'].mark('1')
cell.children['R'].mark('1')
leaves = cell.get_leaves(flag='1', nested='True')
self.assertEqual(len(leaves),2, \
'There should only be 2 flagged leaves')
#
# 2D
#
cell = QuadCell()
#
# Split cell and SW child - find leaves
#
cell.split()
sw_child = cell.children['SW']
sw_child.split()
leaves = cell.get_leaves()
self.assertEqual(len(leaves), 7, 'Node should have 7 leaves.')
#
# Nested traversal
#
leaves = cell.get_leaves(nested=True)
self.assertEqual(leaves[0].address,[1], \
'The first leaf in the nested enumeration should have address [1]')
leaves = cell.get_leaves()
self.assertEqual(leaves[0].address, [0,0], \
'First leaf in un-nested enumeration should be [0,0].')
#
# Merge SW child - find leaves
#
for child in sw_child.get_children():
child.remove()
leaves = cell.get_leaves()
self.assertEqual(len(leaves), 4, 'Node should have 4 leaves.')
def test_cells_at_depth(self):
pass
def test_get_root(self):
for rootcell in [BiCell(), QuadCell()]:
cell = rootcell
for _ in range(10):
cell.split()
i = np.random.randint(0,2)
pos = cell._child_positions[i]
cell = cell.children[pos]
self.assertEqual(cell.get_root(), rootcell, \
'Root cell not found')
def test_has_children(self):
marked_children = ['L','SW']
count = 0
for cell in [BiCell(), QuadCell()]:
self.assertFalse(cell.has_children(),
'Cell should not have children')
cell.split()
self.assertTrue(cell.has_children(),
'Cell should have children')
self.assertFalse(cell.has_children(flag='1'),
'Cell should not have marked children')
cell.children[marked_children[count]].mark('1')
self.assertTrue(cell.has_children(flag='1'),
'Cell should have a child marked "1".')
count += 1
def test_has_parent(self):
for cell in [BiCell(), QuadCell()]:
self.assertFalse(cell.has_parent(),
'Root cell should not have a parent')
cell.split()
for child in cell.get_children():
self.assertTrue(child.has_parent(),
'Child cell should have a parent.')
def test_marking(self):
for cell in [BiCell(), QuadCell()]:
cell.mark()
self.assertTrue(cell.is_marked(),'Cell should be marked.')
cell.unmark()
self.assertFalse(cell.is_marked(),'Cell should not be marked.')
cell.mark('66')
self.assertTrue(cell.is_marked(),
'Cell should be marked.')
self.assertFalse(cell.is_marked('o'),
'Cell should not be marked "o".')
self.assertTrue(cell.is_marked('66'),
'Cell should be marked 66.')
class TestBiCell(unittest.TestCase):
"""
Test the BiCell Clas
"""
def test_constructor(self):
pass
def test_area(self):
bicell = BiCell()
self.assertEqual(bicell.area(), 1, \
'area incorrect: default bicell')
def test_find_neighbor(self):
bicell = BiCell()
self.assertIsNone(bicell.get_neighbor('L'), \
'neighbor should be None.')
bicell.split()
l_child = bicell.children['L']
self.assertEqual(l_child.get_neighbor('R'), bicell.children['R'],\
'neighbor interior to parent cell not identified.')
l_child.split()
lr_grandchild = l_child.children['R']
self.assertEqual(lr_grandchild.get_neighbor('R'),
bicell.children['R'],
'neighbor exterior to parent cell not identified.')
bicell.children['R'].split()
self.assertEqual(lr_grandchild.get_neighbor('R'),\
bicell.children['R'].children['L'],\
'neighbor exterior to parent cell not identified.')
"""
bicell = BiCell(grid_size=3)
bicell.split()
lchild = bicell.children[0]
self.assertEqual(lchild.get_neighbor('L'),None,
'neighbor of gridded cell not identified as None.')
self.assertEqual(lchild.get_neighbor('R'),bicell.children[1],
'neighbor of gridded cell not identified.')
"""
def test_get_root(self):
bicell = BiCell()
cell = bicell
for _ in range(10):
cell.split()
cell = cell.children['L']
self.assertEqual(bicell, cell.get_root(),\
'Root cell incorrectly identified.')
def test_contains_point(self):
bicell = BiCell()
self.assertFalse(bicell.contains_point(3))
self.assertTrue(all(bicell.contains_point(np.array([1,2]))==\
np.array([True,False],dtype=np.bool)),
'Inclusion of vector in cell incorrectly determined')
def test_locate_point(self):
bicell = BiCell()
cell = bicell
for _ in range(5):
cell.split()
cell = cell.children['L']
self.assertEqual(bicell.locate_point(1/64),cell,
'Smallest cell containing point incorrectly detrmnd')
def test_reference_map(self):
#
# Backward Map
#
bicell = BiCell(corner_vertices=[-2,10])
y, jac = bicell.reference_map([-2,10], jacobian=True, mapsto='reference')
self.assertTrue(np.allclose(y, np.array([0,1])),\
'Points incorrectly mapped to reference.')
self.assertAlmostEqual(jac[0], 1/12,'Derivative incorrectly computed.')
#
# Forward Map
#
bicell = BiCell(corner_vertices=[-2,10])
y, jac = bicell.reference_map([0,1], jacobian=True, mapsto='physical')
self.assertTrue(np.allclose(y, np.array([-2,10])),\
'Points incorrectly mapped from reference.')
self.assertAlmostEqual(jac[0], 12,'Derivative incorrectly computed.')
def test_split(self):
# Ungridded
bicell = BiCell()
self.assertFalse(bicell.has_children(),
'Bicell should not have children')
bicell.split()
self.assertTrue(bicell.has_children(),
'Bicell should have children.')
self.assertEqual(bicell.children['L'].box(), (0,0.5),
'Bicell left child incorrect bounds.')
# gridded
grid = DCEL(dim=1, resolution=(3,))
bicell = BiCell(grid=grid, position=0)
self.assertFalse(bicell.has_children(),
'Bicell should not have children')
bicell.split()
count = 0
for _ in bicell.get_children():
count += 1
self.assertEqual(count, 2, 'Bicell should have 3 children.')
def test_pos2id(self):
grid = DCEL(dim=1, resolution=(3,))
bicell = BiCell(grid=grid, position=0)
bicell.split()
self.assertEqual(bicell.pos2id(0), 0,
'Position in grid incorrectly converted.')
class TestQuadCell(unittest.TestCase):
"""
Test QuadCell Class
"""
def test_constructor(self):
# TODO: unfinished
# Define basic QuadCell
box = [0.,1.,0.,1.]
Q1 = QuadCell()
#_,ax0 = plt.subplots()
#Q1.plot(ax0)
"""
_,ax1 = plt.subplots()
Q2 = QuadCell(box=box, grid_size=(2,2))
Q2.plot(ax1)
plt.title('No Refinement')
_,ax2 = plt.subplots()
Q2.split()
Q2.plot(ax2)
plt.title('First Refinement')
Q2_00 = Q2.children[0,0]
Q2_00.split()
q2002 = Q2_00.children['NE']
print('-'*10)
print(q2002.address)
print('-'*10)
print('Neighbors')
for direction in ['N','S','E','W']:
nb = q2002.get_neighbor(direction)
print('{0}: {1}'.format(direction, repr(nb.box())))
q2002.split()
q2002.children['NW'].split()
#plt.plot(Q2.vertices[v].coordinates(),'.')
_,ax = plt.subplots()
Q2.plot(ax)
plt.title('Second Refinement')
#x = numpy.linspace(0,1,100)
#plt.plot(x,numpy.sin(x))
plt.show()
"""
def test_area(self):
#
# Standard Quadcell
#
cell = QuadCell()
self.assertEqual(cell.area(), 1, 'Area should be 1: default cell.')
# TODO: test nonstandard case
def test_is_rectangle(self):
#
# Standard QuadCell
#
cell = QuadCell()
self.assertTrue(cell.is_rectangle(),'Cell should be a rectangle')
#
# Non-rectangular cell
#
cnr_vs = [Vertex((0,0)), Vertex((1,1)), Vertex((0,2)), Vertex((0,1))]
cell = QuadCell(corner_vertices=cnr_vs)
self.assertFalse(cell.is_rectangle(), 'Cell should not be a rectangle')
#
# Rectangular cell
#
cnr_vs = [Vertex((0,0)), Vertex((2,0)), Vertex((2,5)), Vertex((0,5))]
cell = QuadCell(corner_vertices=cnr_vs)
self.assertTrue(cell.is_rectangle(), 'Cell should be a rectangle')
#
# Cell in regular grid
#
grid = DCEL(box=[0,1,0,1], resolution=(2,4))
cell = QuadCell(position=0, grid=grid)
self.assertTrue(cell.is_rectangle(), 'Cell should be a rectangle')
def test_mark(self):
box = [0.,1.,0.,1.]
qcell = QuadCell(corner_vertices=box)
qcell.mark()
self.assertTrue(qcell.is_marked(),'Quadcell should be marked.')
def test_unmark(self):
#
# 3 Generations of marked cells
#
box = [0.,1.,0.,1.]
qcell = QuadCell(corner_vertices=box)
qcell.mark()
qcell.split()
sw_child = qcell.children['SW']
sw_child.mark()
sw_child.split()
sw_sw_child = sw_child.children['SW']
sw_sw_child.mark()
#
# Unmark only SW child
#
sw_child.unmark()
self.assertTrue(qcell.is_marked(),'Quadcell should be marked.')
self.assertFalse(sw_child.is_marked(), 'SW child should not be marked.')
self.assertTrue(sw_sw_child.is_marked(), 'SW-SW child should be marked')
# Restore
sw_child.mark()
#
# Unmark recursively
#
sw_child.unmark(recursive=True)
self.assertTrue(qcell.is_marked(),'Quadcell should be marked.')
self.assertFalse(sw_child.is_marked(), 'SW child should not be marked.')
self.assertFalse(sw_sw_child.is_marked(), 'SW-SW child should not be marked.')
# Restore
sw_child.mark()
sw_sw_child.mark()
#
# Unmark all
#
sw_child.get_root().unmark(recursive=True)
self.assertFalse(qcell.is_marked(),'Quadcell should not be marked.')
self.assertFalse(sw_child.is_marked(), 'SW child should not be marked.')
self.assertFalse(sw_sw_child.is_marked(), 'SW-SW child should not be marked')
def test_unit_normal(self):
box = [0.,1.,0.,1.]
qc = QuadCell(corner_vertices=box)
ew = qc.get_edges('W')
ee = qc.get_edges('E')
es = qc.get_edges('S')
en = qc.get_edges('N')
self.assertEqual(np.sum(np.array([-1.,0])-qc.unit_normal(ew)),0.0,
'Unit normal should be [-1,0].')
self.assertEqual(np.sum(np.array([1.,0])-qc.unit_normal(ee)),0.0,
'Unit normal should be [1,0].')
self.assertEqual(np.sum(np.array([0.,-1.])-qc.unit_normal(es)),0.0,
'Unit normal should be [0,-1].')
self.assertEqual(np.sum(np.array([0.,1.])-qc.unit_normal(en)),0.0,
'Unit normal should be [0,1].')
cnr = [Vertex((0,0)), Vertex((3,1)), Vertex((2,3)), | |
#!/bin/python2
# Copyright (c) 2019 ZettaDB inc. All rights reserved.
# This source code is licensed under Apache 2.0 License,
# combined with Common Clause Condition 1.0, as detailed in the NOTICE file.
import sys
import json
import getpass
import argparse
from cluster_common import *
def generate_storage_startstop(args, machines, node, idx, filesmap):
mach = machines.get(node['ip'])
storagedir = "kunlun-storage-%s" % args.product_version
nodemgr = node['nodemgr']
envfname = 'env.sh.%d' % nodemgr['brpc_http_port']
# start wrapper
startname = '%d-start-storage-%d.sh' % (idx, node['port'])
startname_to = 'start-storage-%d.sh' % node['port']
startf = open('clustermgr/%s' % startname, 'w')
startf.write("#! /bin/bash\n")
startf.write("cd %s || exit 1\n" % mach['basedir'])
startf.write("test -f %s && . ./%s\n" % (envfname, envfname))
startf.write("cd instance_binaries/storage/%s/%s/dba_tools || exit 1\n" % (str(node['port']), storagedir))
startf.write("bash startmysql.sh %d\n" % node['port'])
startf.close()
addNodeToFilesListMap(filesmap, node, startname, './%s' % startname_to)
# stop wrapper, actually may not be necessary.
stopname = '%d-stop-storage-%d.sh' % (idx, node['port'])
stopname_to = 'stop-storage-%d.sh' % node['port']
stopf = open('clustermgr/%s' % stopname, 'w')
stopf.write("#! /bin/bash\n")
stopf.write("cd %s || exit 1\n" % mach['basedir'])
stopf.write("test -f %s && . ./%s\n" % (envfname, envfname))
stopf.write("cd instance_binaries/storage/%s/%s/dba_tools || exit 1\n" % (str(node['port']), storagedir))
stopf.write("bash stopmysql.sh %d\n" % node['port'])
stopf.close()
addNodeToFilesListMap(filesmap, node, stopname, './%s' % stopname_to)
def generate_storage_service(args, machines, commandslist, node, idx, filesmap):
mach = machines.get(node['ip'])
nodemgrobj = node['nodemgr']
storagedir = "kunlun-storage-%s" % args.product_version
fname = "%d-kunlun-storage-%d.service" % (idx, node['port'])
servname = "kunlun-storage-%d" % node['port']
fname_to = "kunlun-storage-%d.service" % node['port']
servicef = open('clustermgr/%s' % fname, 'w')
servicef.write("# kunlun-storage-%d systemd service file\n\n" % node['port'])
servicef.write("[Unit]\n")
servicef.write("Description=kunlun-storage-%d\n" % node['port'])
servicef.write("After=network.target\n\n")
servicef.write("[Install]\n")
servicef.write("WantedBy=multi-user.target\n\n")
servicef.write("[Service]\n")
servicef.write("Type=forking\n")
servicef.write("User=%s\n" % mach['user'])
servicef.write("Restart=on-failure\n")
servicef.write("WorkingDirectory=%s\n" % (mach['basedir']))
servicef.write("ExecStart=/bin/bash start-storage-%d.sh\n" % (node['port']))
servicef.write("ExecStop=/bin/bash stop-storage-%d.sh\n" % (node['port']))
servicef.close()
addNodeToFilesListMap(filesmap, node, fname, './%s' % fname_to)
addToCommandsList(commandslist, node['ip'], '.', "sudo cp -f %s /usr/lib/systemd/system/" % fname_to)
addToCommandsList(commandslist, node['ip'], '.', "sudo systemctl enable %s" % servname)
def generate_clustermgr_service(args, machines, commandslist, node, idx, filesmap):
mach = machines.get(node['ip'])
clustermgrdir = "kunlun-cluster-manager-%s" % args.product_version
fname = "%d-kunlun-cluster-manager-%d.service" % (idx, node['brpc_raft_port'])
servname = "kunlun-cluster-manager-%d" % node['brpc_raft_port']
fname_to = "kunlun-cluster-manager-%d.service" % node['brpc_raft_port']
servicef = open('clustermgr/%s' % fname, 'w')
servicef.write("# kunlun-cluster-manager-%d systemd service file\n\n" % node['brpc_raft_port'])
servicef.write("[Unit]\n")
servicef.write("Description=kunlun-cluster-manager-%d\n" % node['brpc_raft_port'])
servicef.write("After=network.target\n\n")
servicef.write("[Install]\n")
servicef.write("WantedBy=multi-user.target\n\n")
servicef.write("[Service]\n")
servicef.write("Type=forking\n")
servicef.write("User=%s\n" % mach['user'])
servicef.write("Restart=on-failure\n")
servicef.write("WorkingDirectory=%s/%s/bin\n" % (mach['basedir'], clustermgrdir))
servicef.write("ExecStart=/bin/bash start_cluster_mgr.sh\n")
servicef.write("ExecStop=/bin/bash stop_cluster_mgr.sh\n")
servicef.close()
addNodeToFilesListMap(filesmap, node, fname, './%s' % fname_to)
addToCommandsList(commandslist, node['ip'], '.', "sudo cp -f %s /usr/lib/systemd/system/" % fname_to)
addToCommandsList(commandslist, node['ip'], '.', "sudo systemctl enable %s" % servname)
def generate_nodemgr_startstop(args, machines, node, idx, filesmap):
mach = machines.get(node['ip'])
nodemgrdir = "kunlun-node-manager-%s" % args.product_version
envfname = 'env.sh.%d' % node['brpc_http_port']
# start wrapper
startname = '%d-start-nodemgr-%d.sh' % (idx, node['brpc_http_port'])
startname_to = 'start-nodemgr-%d.sh' % node['brpc_http_port']
startf = open('clustermgr/%s' % startname, 'w')
startf.write("#! /bin/bash\n")
startf.write("cd %s || exit 1\n" % mach['basedir'])
startf.write("test -f %s && . ./%s\n" % (envfname, envfname))
startf.write("cd %s/bin || exit 1\n" % nodemgrdir)
startf.write("bash start_node_mgr.sh\n")
startf.close()
addNodeToFilesListMap(filesmap, node, startname, './%s' % startname_to)
# stop wrapper, actually may not be necessary.
stopname = '%d-stop-nodemgr-%d.sh' % (idx, node['brpc_http_port'])
stopname_to = 'stop-nodemgr-%d.sh' % node['brpc_http_port']
stopf = open('clustermgr/%s' % stopname, 'w')
stopf.write("#! /bin/bash\n")
stopf.write("cd %s || exit 1\n" % mach['basedir'])
stopf.write("test -f %s && . ./%s\n" % (envfname, envfname))
stopf.write("cd %s/bin || exit 1\n" % nodemgrdir)
stopf.write("bash stop_node_mgr.sh\n")
stopf.close()
addNodeToFilesListMap(filesmap, node, stopname, './%s' % stopname_to)
def generate_nodemgr_service(args, machines, commandslist, node, idx, filesmap):
mach = machines.get(node['ip'])
fname = "%d-kunlun-node-manager-%d.service" % (idx, node['brpc_http_port'])
servname = "kunlun-node-manager-%d" % node['brpc_http_port']
fname_to = "kunlun-node-manager-%d.service" % node['brpc_http_port']
servicef = open('clustermgr/%s' % fname, 'w')
servicef.write("# kunlun-node-manager-%d systemd service file\n\n" % node['brpc_http_port'])
servicef.write("[Unit]\n")
servicef.write("Description=kunlun-node-manager-%d\n" % node['brpc_http_port'])
servicef.write("After=network.target\n\n")
servicef.write("[Install]\n")
servicef.write("WantedBy=multi-user.target\n\n")
servicef.write("[Service]\n")
servicef.write("Type=forking\n")
servicef.write("User=%s\n" % mach['user'])
servicef.write("Restart=on-failure\n")
servicef.write("WorkingDirectory=%s\n" % mach['basedir'])
servicef.write("ExecStart=/bin/bash start-nodemgr-%d.sh\n" % node['brpc_http_port'])
servicef.write("ExecStop=/bin/bash stop-nodemgr-%d.sh\n" % node['brpc_http_port'])
servicef.close()
addNodeToFilesListMap(filesmap, node, fname, './%s' % fname_to)
addToCommandsList(commandslist, node['ip'], '.', "sudo cp -f %s /usr/lib/systemd/system/" % fname_to)
addToCommandsList(commandslist, node['ip'], '.', "sudo systemctl enable %s" % servname)
def generate_nodemgr_env(args, machines, node, idx, filesmap):
mach = machines.get(node['ip'])
jdk = "jdk1.8.0_131"
hadoop = "hadoop-3.3.1"
fname = '%d-env.sh.%d' % (idx, node['brpc_http_port'])
fname_to = 'env.sh.%d' % node['brpc_http_port']
envf = open('clustermgr/%s' % fname, 'w')
envf.write("#! /bin/bash\n")
envf.write("JAVA_HOME=%s/program_binaries/%s\n" % (mach['basedir'], jdk))
envf.write("PATH=$JAVA_HOME/bin:$PATH\n")
envf.write("HADOOP_HOME=%s/program_binaries/%s\n" % (mach['basedir'], hadoop))
envf.write("PATH=$HADOOP_HOME/bin:$PATH\n")
envf.write("export JAVA_HOME\n")
envf.write("export HADOOP_HOME\n")
envf.write("export PATH\n")
envf.close()
addNodeToFilesListMap(filesmap, node, fname, './%s' % fname_to)
def get_default_nodemgr(args, machines, ip):
mach = machines.get(ip)
defpaths = {
"server_datadirs": "server_datadir",
"storage_datadirs": "storage_datadir",
"storage_logdirs": "storage_logdir",
"storage_waldirs": "storage_waldir",
}
node = {
'ip': ip,
'brpc_http_port': args.defbrpc_http_port_nodemgr,
"tcp_port": args.deftcp_port_nodemgr
}
for item in ["server_datadirs", "storage_datadirs", "storage_logdirs", "storage_waldirs"]:
node[item] = "%s/%s" % (mach['basedir'], defpaths[item])
return node
def install_meta_env(comf, node, machines, args):
storagedir = "kunlun-storage-%s" % args.product_version
serverdir = "kunlun-server-%s" % args.product_version
ip = node['ip']
mach = machines.get(ip)
# Set up the files
process_command_setenv(comf, args, machines, ip, '.', 'mkdir -p %s' % node['program_dir'])
process_file(comf, args, machines, ip, 'clustermgr/%s.tgz' % storagedir, '%s/%s' % (mach['basedir'], node['program_dir']))
process_file(comf, args, machines, ip, 'clustermgr/%s.tgz' % serverdir, '%s/%s' % (mach['basedir'], node['program_dir']))
process_command_setenv(comf, args, machines, ip, node['program_dir'], 'tar -xzf %s.tgz' % storagedir)
process_command_setenv(comf, args, machines, ip, node['program_dir'], 'tar -xzf %s.tgz' % serverdir)
comstr = "bash %s/process_deps.sh"
process_command_setenv(comf, args, machines, ip,
"%s/%s/lib" % (node['program_dir'], storagedir), comstr % mach['basedir'], "storage")
process_command_setenv(comf, args, machines, ip,
"%s/%s/lib" % (node['program_dir'], serverdir), comstr % mach['basedir'], "computing")
comstr = "test -d etc && echo > etc/instances_list.txt 2>/dev/null; exit 0"
process_command_setenv(comf, args, machines, ip, "%s/%s" % (node['program_dir'], storagedir), comstr)
def install_nodemgr_env(comf, mach, machines, args):
progname = "kunlun-node-manager-%s" % args.product_version
ip = mach['ip']
# Set up the files
process_file(comf, args, machines, ip, 'clustermgr/%s.tgz' % progname, mach['basedir'])
process_command_noenv(comf, args, machines, ip, mach['basedir'], 'tar -xzf %s.tgz' % progname)
def setup_nodemgr_commands(args, idx, machines, node, commandslist, dirmap, filesmap, metaseeds):
cmdpat = "bash change_config.sh %s '%s' '%s'\n"
nodemgrdir = "kunlun-node-manager-%s" % args.product_version
storagedir = "kunlun-storage-%s" % args.product_version
serverdir = "kunlun-server-%s" % args.product_version
confpath = "%s/conf/node_mgr.cnf" % nodemgrdir
mach = machines.get(node['ip'])
targetdir = "program_binaries"
setup_mgr_common(commandslist, dirmap, filesmap, machines, node, targetdir, storagedir, serverdir)
for item in ["server_datadirs", "storage_datadirs", "storage_logdirs", "storage_waldirs"]:
nodedirs = node[item].strip()
for d in nodedirs.split(","):
addToDirMap(dirmap, node['ip'], d.strip())
addNodeToFilesListMap(filesmap, node, "hadoop-3.3.1.tar.gz", targetdir)
addNodeToFilesListMap(filesmap, node, "jdk-8u131-linux-x64.tar.gz", targetdir)
addToCommandsList(commandslist, node['ip'], targetdir, "tar -xzf hadoop-3.3.1.tar.gz")
addToCommandsList(commandslist, node['ip'], targetdir, "tar -xzf jdk-8u131-linux-x64.tar.gz")
addToCommandsList(commandslist, node['ip'], nodemgrdir, "chmod a+x bin/util/*")
script_name = "setup_nodemgr_%d.sh" % idx
scriptf = open('clustermgr/%s' % script_name, 'w')
scriptf.write("#! /bin/bash\n")
scriptf.write(cmdpat % (confpath, 'meta_group_seeds', metaseeds))
scriptf.write(cmdpat % (confpath, 'brpc_http_port', node['brpc_http_port']))
scriptf.write(cmdpat % (confpath, 'nodemgr_tcp_port', node['tcp_port']))
scriptf.write(cmdpat % (confpath, 'local_ip', node['ip']))
scriptf.write(cmdpat % (confpath, 'program_binaries_path', '%s/program_binaries' % mach['basedir']))
scriptf.write(cmdpat % (confpath, 'instance_binaries_path', '%s/instance_binaries' % mach['basedir']))
scriptf.write(cmdpat % (confpath, 'prometheus_path', '%s/program_binaries/prometheus' % mach['basedir']))
scriptf.write(cmdpat % (confpath, 'storage_prog_package_name', storagedir))
scriptf.write(cmdpat % (confpath, 'computer_prog_package_name', serverdir))
scriptf.close()
addNodeToFilesListMap(filesmap, node, script_name, '.')
addNodeToFilesListMap(filesmap, node, 'clear_instances.sh', '.')
addToCommandsList(commandslist, node['ip'], '.', "bash ./%s" % script_name)
def install_clustermgr_env(comf, mach, machines, args):
progname = "kunlun-cluster-manager-%s" % args.product_version
ip = mach['ip']
# Set up the files
process_file(comf, args, machines, ip, 'clustermgr/%s.tgz' % progname, mach['basedir'])
process_command_noenv(comf, args, machines, ip, mach['basedir'], 'tar -xzf %s.tgz' % progname)
def setup_clustermgr_commands(args, idx, machines, node, commandslist, dirmap, filesmap, metaseeds, initmember, initcommon):
cmdpat = "bash change_config.sh %s '%s' '%s'\n"
clustermgrdir = "kunlun-cluster-manager-%s" % args.product_version
storagedir = "kunlun-storage-%s" % args.product_version
serverdir = "kunlun-server-%s" % args.product_version
confpath = "%s/conf/cluster_mgr.cnf" % clustermgrdir
mach = machines.get(node['ip'])
targetdir = "program_binaries"
if initcommon:
setup_mgr_common(commandslist, dirmap, filesmap, machines, node, targetdir, storagedir, serverdir)
script_name = "setup_clustermgr_%d.sh" % idx
scriptf = open('clustermgr/%s' % script_name, 'w')
scriptf.write("#! /bin/bash\n")
scriptf.write(cmdpat % (confpath, 'meta_group_seeds', metaseeds))
scriptf.write(cmdpat % (confpath, 'brpc_raft_port', node['brpc_raft_port']))
scriptf.write(cmdpat % (confpath, 'brpc_http_port', node['brpc_http_port']))
scriptf.write(cmdpat % (confpath, 'local_ip', node['ip']))
scriptf.write(cmdpat % (confpath, 'raft_group_member_init_config', initmember))
scriptf.write(cmdpat % (confpath, 'program_binaries_path', '%s/program_binaries' % mach['basedir']))
scriptf.write(cmdpat % (confpath, 'instance_binaries_path', '%s/instance_binaries' % mach['basedir']))
scriptf.write(cmdpat % (confpath, 'prometheus_path', '%s/program_binaries/prometheus' % mach['basedir']))
scriptf.write(cmdpat % (confpath, 'storage_prog_package_name', storagedir))
scriptf.write(cmdpat % (confpath, 'computer_prog_package_name', serverdir))
scriptf.close()
addNodeToFilesListMap(filesmap, node, script_name, '.')
addToCommandsList(commandslist, node['ip'], '.', "bash ./%s" % script_name)
def install_clustermgr(args):
jscfg = get_json_from_file(args.config)
machines = {}
setup_machines2(jscfg, machines, args)
validate_and_set_config2(jscfg, machines, args)
comf = open(r'clustermgr/install.sh', 'w')
comf.write('#! /bin/bash\n')
install_with_config(jscfg, comf, machines, args)
comf.close()
def stop_clustermgr(args):
jscfg = get_json_from_file(args.config)
machines = {}
setup_machines2(jscfg, machines, args)
validate_and_set_config2(jscfg, machines, args)
comf = open(r'clustermgr/stop.sh', 'w')
comf.write('#! /bin/bash\n')
stop_with_config(jscfg, comf, machines, args)
comf.close()
def start_clustermgr(args):
jscfg = get_json_from_file(args.config)
machines = {}
setup_machines2(jscfg, machines, args)
validate_and_set_config2(jscfg, machines, args)
comf = open(r'clustermgr/start.sh', 'w')
comf.write('#! /bin/bash\n')
start_with_config(jscfg, comf, machines, args)
comf.close()
def clean_clustermgr(args):
jscfg = get_json_from_file(args.config)
machines = {}
setup_machines2(jscfg, machines, args)
validate_and_set_config2(jscfg, machines, args)
comf = open(r'clustermgr/clean.sh', 'w')
comf.write('#! /bin/bash\n')
clean_with_config(jscfg, comf, machines, args)
| |
atom[1] to 0,0,0 origin
atomsR.append(numpy.array([[0], [0], [0], [1]],
dtype=numpy.float64))
# a0r is len1 up from a1 on Z axis, X=Y=0
atomsR[0][2][0] = self.len1
# a2r X is sin( sar ) * len3
atomsR[2][0][0] = numpy.sin(sar) * self.len3
# a2r Z is -(cos( sar ) * len3)
atomsR[2][2][0] = - (numpy.cos(sar) * self.len3)
self.atomsR = tuple(atomsR)
self.atoms_updated = True
@staticmethod
def _get_dad(acs):
"""Get distance, angle, distance for 3 atoms.
:param acs: list[3] of numpy arrays [4][[1]]
"""
a0 = acs[0].squeeze()
a1 = acs[1].squeeze()
a2 = acs[2].squeeze()
a0a1 = numpy.linalg.norm(a0 - a1)
a1a2 = numpy.linalg.norm(a1 - a2)
a0a2 = numpy.linalg.norm(a0 - a2)
a0a1a2 = numpy.rad2deg(
numpy.arccos(
((a0a1 * a0a1) + (a1a2 * a1a2)
- (a0a2 * a0a2)) / (2 * a0a1 * a1a2)
)
)
return a0a1, a0a1a2, a1a2
def hedron_from_atoms(self, atom_coords):
"""Compute length, angle, length for hedron for residue atom coords."""
acs = self.gen_acs(atom_coords)
len1, angle2, len3 = Hedron._get_dad(acs)
self.len1 = set_accuracy_95(len1)
self.angle2 = set_accuracy_95(angle2)
self.len3 = set_accuracy_95(len3)
# self.atoms_updated = False
self.init_pos()
def get_angle(self):
"""Get this hedron angle."""
return self.angle2
def set_angle(self, angle_deg):
"""Set this hedron angle; clears atoms_updated."""
self.angle2 = set_accuracy_95(angle_deg)
self.atoms_updated = False
def get_length(self, ak_tpl):
"""Get bond length for specified atom pair.
:param ak_tpl: tuple of AtomKeys
pair of atoms in this Hedron
"""
if 2 > len(ak_tpl):
return None
if all(ak in self.aks[:2] for ak in ak_tpl):
return self.len1
if all(ak in self.aks[1:] for ak in ak_tpl):
return self.len3
return None
def set_length(self, ak_tpl, newLength):
"""Set bond length for specified atom pair; clears atoms_updated.
:param ak_tpl: tuple of AtomKeys
pair of atoms in this Hedron
"""
if 2 > len(ak_tpl):
return
elif all(ak in self.akl[:2] for ak in ak_tpl):
self.len1 = newLength
elif all(ak in self.akl[1:] for ak in ak_tpl):
self.len1 = newLength
else:
return
self.atoms_updated = False
class Dihedron(Edron):
"""Class to represent four joined atoms forming a dihedral angle.
Attributes
----------
dihedral1 : float
Measurement or specification of dihedral angle
hedron1, hedron2 : Hedron object references
The two hedra which form the dihedral angle
h1key, h2key : tuples of AtomKeys
Hash keys for hedron1 and hedron2
id3,id32 : tuples of AtomKeys
First 3 and second 3 atoms comprising dihedron; hxkey orders may differ
initial_coords : tuple[4] of numpy arrays [4][1]
Local atom coords for 4 atoms, [0] on XZ plane, [1] at origin,
[2] on +Z, [3] rotated by dihedral1
a4_pre_rotation : numpy array [4][1]
4th atom of dihedral aligned to XZ plane (dihedral1 not applied)
IC_Residue : IC_Residue object reference
IC_Residue object containing this dihedral
reverse : bool
Indicates order of atoms in dihedron is reversed from order of atoms
in hedra (configured by _set_hedra())
Methods
-------
init_pos()
Find Hedron objects for self.IC_Residue, set initial_coords
and a4_pre_rotation
dihedron_from_atoms()
Compute dihedral and bond lengths, angles from IC_Residue atom_coords
set_dihedral()
Store new dihedral angle and update initial_coords accordingly
"""
def __init__(self, *args, **kwargs):
"""Initialize Dihedron with sequence of AtomKeys and optional dihedral angle.
Acceptable input:
As for Edron, plus optional 'dihedral1' keyworded angle value.
"""
super().__init__(*args, **kwargs)
# hedra making up this dihedron; set by self:_set_hedra()
self.hedron1 = None
self.hedron2 = None
self.h1key = None
self.h2key = None
self.id3 = tuple(self.aks[0:3])
self.id32 = tuple(self.aks[1:4])
# 4 matrices specifying hedron space coordinates of constituent atoms,
# in this space atom 3 is on on +Z axis
# see coord_space()
self.initial_coords = None
self.a4_pre_rotation = None
# IC_Residue object which includes this dihedron;
# set by Residue:linkDihedra()
self.IC_Residue = None
# order of atoms in dihedron is reversed from order of atoms in hedra
self.reverse = False
if 'dihedral1' in kwargs:
self.dihedral1 = float(kwargs['dihedral1'])
# self.init_pos() # can't do here because need adjacent residues
else:
self.dihedral1 = None
# print(self, self.dclass)
def __str__(self):
"""Print string for Dihedron object."""
return ('4-' + str(self.id) + ' ' + self.rdh_class + ' ' +
str(self.dihedral1) + ' (' + str(self.IC_Residue) + ')')
@staticmethod
def _get_hedron(pic_res, id3):
"""Find specified hedron on this residue or its adjacent neighbors."""
hedron = pic_res.hedra.get(id3, None)
if (not hedron and 0 < len(pic_res.rprev)):
for rp in pic_res.rprev:
hedron = rp.hedra.get(id3, None)
if hedron is not None:
break
if (not hedron and 0 < len(pic_res.rnext)):
for rn in pic_res.rnext:
hedron = rn.hedra.get(id3, None)
if hedron is not None:
break
return hedron
def _set_hedra(self):
"""Work out hedra keys and set rev flag."""
rev = False
res = self.IC_Residue
h1key = self.id3
hedron1 = Dihedron._get_hedron(res, h1key)
if not hedron1:
rev = True
h1key = tuple(self.aks[2::-1])
hedron1 = Dihedron._get_hedron(res, h1key)
h2key = tuple(self.aks[3:0:-1])
else:
h2key = self.id32
if not hedron1:
raise HedronMatchError(
"can't find 1st hedron for key %s dihedron %s" % (h1key, self))
hedron2 = Dihedron._get_hedron(res, h2key)
if not hedron2:
raise HedronMatchError(
"can't find 2nd hedron for key %s dihedron %s" % (h2key, self))
self.hedron1 = hedron1
self.h1key = h1key
self.hedron2 = hedron2
self.h2key = h2key
self.reverse = rev
return rev, hedron1, hedron2
def init_pos(self, updating=False):
"""Set hedron-space atom coords with dihedral1 applied.
:param updating: bool
skip _set_hedra if True
"""
hedron1 = self.hedron1
if updating and hedron1 is not None:
rev = self.reverse
hedron2 = self.hedron2
else:
rev, hedron1, hedron2 = self._set_hedra()
acount = 0
for a in hedron1.atoms:
if a is not None:
acount += 1
for a in hedron2.atoms:
if a is not None:
acount += 1
if 6 > acount:
raise MissingAtomError('dihedron: hedra missing atoms: ' + self)
initial = []
if not rev:
initial.append(hedron1.atoms[0].copy())
initial.append(hedron1.atoms[1].copy())
initial.append(hedron1.atoms[2].copy())
a4_pre_rotation = hedron2.atomsR[2].copy()
a4shift = hedron2.len1
else:
initial.append(hedron1.atomsR[2].copy())
initial.append(hedron1.atomsR[1].copy())
initial.append(hedron1.atomsR[0].copy())
a4_pre_rotation = hedron2.atoms[0].copy()
a4shift = hedron2.len3
# a4 to +Z
a4_pre_rotation[2][0] *= -1
# hedron2 shift up so a2 at 0,0,0
a4_pre_rotation[2][0] += a4shift
mrz = homog_rot_mtx(numpy.deg2rad(self.dihedral1), 'z')
# initial.append(mrz @ a4_pre_rotation)
initial.append(mrz.dot(a4_pre_rotation))
self.initial_coords = tuple(initial)
self.a4_pre_rotation = a4_pre_rotation
self.atoms_updated = True
"""
# unused - get Biopython Residue atoms to test dihedral calculations
def find_bp_atom(self, ak):
bpa = self.IC_Residue.bp_atoms.get(ak, None)
if bpa is not None:
return bpa
if self.IC_Residue.rnext:
bpa = self.IC_Residue.rnext.bp_atoms.get(ak, None)
if bpa is not None:
return bpa
if self.IC_Residue.rprev:
bpa = self.IC_Residue.rprev.bp_atoms.get(ak, None)
return bpa
"""
def set_angle(self, dangle_deg):
"""Save new dihedral angle and update initial_coords.
:param dangle_deg: float
New dihedral angle in degrees
"""
self.dihedral1 = dangle_deg
self.atoms_updated = False
# if self.atoms_updated:
# if self.hedron1 is not None: # then we are updating
# initial = list(self.initial_coords)
# mrz = homog_rot_mtx(numpy.deg2rad(dangle_deg, 'z'))
# # initial[3] = mrz @ self.a4_pre_rotation
# initial[3] = mrz.dot(self.a4_pre_rotation)
# self.initial_coords = tuple(initial)
# # self.atoms_updated = True still valid
# else:
# self.init_pos()
def get_angle(self):
"""Get this object dihedral angle."""
return self.dihedral1
@staticmethod
def _get_dadad(acs):
"""Get distance, angle, distance, angle, distance for 4 atoms.
:param acs: list[4] of numpy [4][1] array
Atom coordinates
"""
a0 = acs[0].squeeze()
a1 = acs[1].squeeze()
a2 = acs[2].squeeze()
a3 = acs[3].squeeze()
a0a1 = numpy.linalg.norm(a0 - a1)
a1a2 = numpy.linalg.norm(a1 - a2)
a2a3 = numpy.linalg.norm(a2 - a3)
a0a2 = numpy.linalg.norm(a0 - a2)
a1a3 = numpy.linalg.norm(a1 - a3)
sqr_a1a2 = a1a2 * a1a2
a0a1a2 = numpy.rad2deg(
numpy.arccos(
((a0a1 * a0a1) + sqr_a1a2 - (a0a2 * a0a2)) / (2 * a0a1 * a1a2)
)
)
a1a2a3 = numpy.rad2deg(
numpy.arccos(
(sqr_a1a2 + (a2a3 * a2a3) - (a1a3 * a1a3)) / (2 * a1a2 * a2a3)
)
)
return a0a1, a0a1a2, a1a2, a1a2a3, a2a3
def dihedron_from_atoms(self):
"""Compute residue dihedral, bond angles, bond lengths.
Source data is Biopython Residue.Atom coords.
Call link_dihedra before this so can find Residue.Atom coords.
Updates hedron and dihedron values, then all local atom coords
for both hedra and this dihedron.
"""
rev, hed1, hed2 = self._set_hedra()
atom_coords = self.IC_Residue.atom_coords
acs = self.gen_acs(atom_coords)
mt = coord_space(acs[:3])
# do4 = mt @ acs[3]
do4 = mt.dot(acs[3])
dh1r = numpy.rad2deg(numpy.arctan2(do4[1][0], do4[0][0]))
self.dihedral1 = dh1r
a0a1, a0a1a2, a1a2, a1a2a3, a2a3 = Dihedron._get_dadad(acs)
if not rev:
hed1.len1 = set_accuracy_95(a0a1)
hed1.len3 = hed2.len1 = set_accuracy_95(a1a2)
hed2.len3 = set_accuracy_95(a2a3)
else:
hed1.len3 = set_accuracy_95(a0a1)
hed1.len1 = hed2.len3 = set_accuracy_95(a1a2)
hed2.len1 = set_accuracy_95(a2a3)
hed1.angle2 | |
The new notification level for the guild.
explicit_content_filter: The new explicit content filter level for the guild.
afk_channel: The voice channel that should be the new AFK channel.
afk_timeout: How many seconds does a member need to be afk before they get moved to the AFK channel. Must be either `60`, `300`, `900`, `1800` or `3600`, otherwise HTTPException will be raised.
icon: The new icon. Requires a bytes like object or a path to an image.
owner: The new owner of the guild. You, the bot, need to be owner for this to work.
splash: The new invite splash image. Requires a bytes like object or a path to an image.
discovery_splash: The new discovery image. Requires a bytes like object or a path to an image.
banner: The new banner image. Requires a bytes like object or a path to an image.
system_channel: The text channel where new system messages should appear. This includes boosts and welcome messages.
system_channel_flags: The new settings for the system channel.
rules_channel: The text channel where your rules and community guidelines are displayed.
public_updates_channel: The text channel where updates from discord should appear.
preferred_locale: The new preferred locale of the guild. Must be an ISO 639 code.
features: The enabled guild features
reason: An optional reason for the audit log.
"""
await self._client.http.modify_guild(
guild_id=self.id,
name=name,
description=description,
verification_level=int(verification_level) if verification_level else MISSING,
default_message_notifications=int(default_message_notifications)
if default_message_notifications
else MISSING,
explicit_content_filter=int(explicit_content_filter) if explicit_content_filter else MISSING,
afk_channel_id=to_snowflake(afk_channel) if afk_channel else MISSING,
afk_timeout=afk_timeout,
icon=to_image_data(icon) if icon else MISSING,
owner_id=to_snowflake(owner) if owner else MISSING,
splash=to_image_data(splash) if splash else MISSING,
discovery_splash=to_image_data(discovery_splash) if discovery_splash else MISSING,
banner=to_image_data(banner) if banner else MISSING,
system_channel_id=to_snowflake(system_channel) if system_channel else MISSING,
system_channel_flags=int(system_channel_flags) if system_channel_flags else MISSING,
rules_channel_id=to_snowflake(rules_channel) if rules_channel else MISSING,
public_updates_channel_id=to_snowflake(public_updates_channel) if public_updates_channel else MISSING,
preferred_locale=preferred_locale,
features=features,
reason=reason,
)
async def create_custom_emoji(
self,
name: str,
imagefile: UPLOADABLE_TYPE,
roles: Absent[List[Union[Snowflake_Type, "models.Role"]]] = MISSING,
reason: Absent[Optional[str]] = MISSING,
) -> "models.CustomEmoji":
"""
Create a new custom emoji for the guild.
Args:
name: Name of the emoji
imagefile: The emoji image. (Supports PNG, JPEG, WebP, GIF)
roles: Roles allowed to use this emoji.
reason: An optional reason for the audit log.
Returns:
The new custom emoji created.
"""
data_payload = {
"name": name,
"image": to_image_data(imagefile),
"roles": to_snowflake_list(roles) if roles else MISSING,
}
emoji_data = await self._client.http.create_guild_emoji(data_payload, self.id, reason=reason)
return self._client.cache.place_emoji_data(self.id, emoji_data)
async def create_guild_template(self, name: str, description: Absent[str] = MISSING) -> "models.GuildTemplate":
"""
Create a new guild template based on this guild.
Args:
name: The name of the template (1-100 characters)
description: The description for the template (0-120 characters)
Returns:
The new guild template created.
"""
template = await self._client.http.create_guild_template(self.id, name, description)
return GuildTemplate.from_dict(template, self._client)
async def fetch_guild_templates(self) -> List["models.GuildTemplate"]:
"""
Fetch all guild templates for this guild.
Returns:
A list of guild template objects.
"""
templates = await self._client.http.get_guild_templates(self.id)
return GuildTemplate.from_list(templates, self._client)
async def fetch_all_custom_emojis(self) -> List["models.CustomEmoji"]:
"""
Gets all the custom emoji present for this guild.
Returns:
A list of custom emoji objects.
"""
emojis_data = await self._client.http.get_all_guild_emoji(self.id)
return [self._client.cache.place_emoji_data(self.id, emoji_data) for emoji_data in emojis_data]
async def fetch_custom_emoji(self, emoji_id: Snowflake_Type) -> Optional["models.CustomEmoji"]:
"""
Fetches the custom emoji present for this guild, based on the emoji id.
Args:
emoji_id: The target emoji to get data of.
Returns:
The custom emoji object. If the emoji is not found, returns None.
"""
try:
return await self._client.cache.fetch_emoji(self.id, emoji_id)
except NotFound:
return None
def get_custom_emoji(self, emoji_id: Snowflake_Type) -> Optional["models.CustomEmoji"]:
"""
Gets the custom emoji present for this guild, based on the emoji id.
Args:
emoji_id: The target emoji to get data of.
Returns:
The custom emoji object.
"""
emoji_id = to_snowflake(emoji_id)
emoji = self._client.cache.get_emoji(emoji_id)
if emoji and emoji._guild_id == self.id:
return emoji
return None
async def create_channel(
self,
channel_type: Union[ChannelTypes, int],
name: str,
topic: Absent[Optional[str]] = MISSING,
position: Absent[Optional[int]] = MISSING,
permission_overwrites: Absent[
Union[dict, "models.PermissionOverwrite", List[Union[dict, "models.PermissionOverwrite"]]]
] = MISSING,
category: Union[Snowflake_Type, "models.GuildCategory"] = None,
nsfw: bool = False,
bitrate: int = 64000,
user_limit: int = 0,
rate_limit_per_user: int = 0,
reason: Absent[Optional[str]] = MISSING,
) -> "models.TYPE_GUILD_CHANNEL":
"""
Create a guild channel, allows for explicit channel type setting.
Args:
channel_type: The type of channel to create
name: The name of the channel
topic: The topic of the channel
position: The position of the channel in the channel list
permission_overwrites: Permission overwrites to apply to the channel
category: The category this channel should be within
nsfw: Should this channel be marked nsfw
bitrate: The bitrate of this channel, only for voice
user_limit: The max users that can be in this channel, only for voice
rate_limit_per_user: The time users must wait between sending messages
reason: The reason for creating this channel
Returns:
The newly created channel.
"""
channel_data = await self._client.http.create_guild_channel(
self.id,
name,
channel_type,
topic,
position,
models.process_permission_overwrites(permission_overwrites),
to_optional_snowflake(category),
nsfw,
bitrate,
user_limit,
rate_limit_per_user,
reason,
)
return self._client.cache.place_channel_data(channel_data)
async def create_text_channel(
self,
name: str,
topic: Absent[Optional[str]] = MISSING,
position: Absent[Optional[int]] = MISSING,
permission_overwrites: Absent[
Union[dict, "models.PermissionOverwrite", List[Union[dict, "models.PermissionOverwrite"]]]
] = MISSING,
category: Union[Snowflake_Type, "models.GuildCategory"] = None,
nsfw: bool = False,
rate_limit_per_user: int = 0,
reason: Absent[Optional[str]] = MISSING,
) -> "models.GuildText":
"""
Create a text channel in this guild.
Args:
name: The name of the channel
topic: The topic of the channel
position: The position of the channel in the channel list
permission_overwrites: Permission overwrites to apply to the channel
category: The category this channel should be within
nsfw: Should this channel be marked nsfw
rate_limit_per_user: The time users must wait between sending messages
reason: The reason for creating this channel
Returns:
The newly created text channel.
"""
return await self.create_channel(
channel_type=ChannelTypes.GUILD_TEXT,
name=name,
topic=topic,
position=position,
permission_overwrites=permission_overwrites,
category=category,
nsfw=nsfw,
rate_limit_per_user=rate_limit_per_user,
reason=reason,
)
async def create_news_channel(
self,
name: str,
topic: Absent[Optional[str]] = MISSING,
position: Absent[Optional[int]] = MISSING,
permission_overwrites: Absent[
Union[dict, "models.PermissionOverwrite", List[Union[dict, "models.PermissionOverwrite"]]]
] = MISSING,
category: Union[Snowflake_Type, "models.GuildCategory"] = None,
nsfw: bool = False,
reason: Absent[Optional[str]] = MISSING,
) -> "models.GuildNews":
"""
Create a news channel in this guild.
Args:
name: The name of the channel
topic: The topic of the channel
position: The position of the channel in the channel list
permission_overwrites: Permission overwrites to apply to the channel
category: The category this channel should be within
nsfw: Should this channel be marked nsfw
reason: The reason for creating this channel
Returns:
The newly created news channel.
"""
return await self.create_channel(
channel_type=ChannelTypes.GUILD_NEWS,
name=name,
topic=topic,
position=position,
permission_overwrites=permission_overwrites,
category=category,
nsfw=nsfw,
reason=reason,
)
async def create_voice_channel(
self,
name: str,
topic: Absent[Optional[str]] = MISSING,
position: Absent[Optional[int]] = MISSING,
permission_overwrites: Absent[
Union[dict, "models.PermissionOverwrite", List[Union[dict, "models.PermissionOverwrite"]]]
] = MISSING,
category: Union[Snowflake_Type, "models.GuildCategory"] = None,
nsfw: bool = False,
bitrate: int = 64000,
user_limit: int = 0,
reason: Absent[Optional[str]] = MISSING,
) -> "models.GuildVoice":
"""
Create a guild voice channel.
Args:
name: The name of the channel
topic: The topic of the channel
position: The position of the channel in the channel list
permission_overwrites: Permission overwrites to apply to the channel
category: The category this channel should be within
nsfw: Should this channel be marked nsfw
bitrate: The bitrate of this channel, only for voice
user_limit: The max users that can be in this channel, only for voice
reason: The reason for creating this channel
Returns:
The newly created voice channel.
"""
return await self.create_channel(
channel_type=ChannelTypes.GUILD_VOICE,
name=name,
topic=topic,
position=position,
permission_overwrites=permission_overwrites,
category=category,
nsfw=nsfw,
bitrate=bitrate,
user_limit=user_limit,
reason=reason,
)
async def create_stage_channel(
self,
name: str,
topic: Absent[Optional[str]] = MISSING,
position: Absent[Optional[int]] = MISSING,
permission_overwrites: Absent[
Union[dict, "models.PermissionOverwrite", List[Union[dict, "models.PermissionOverwrite"]]]
] = MISSING,
category: Absent[Union[Snowflake_Type, "models.GuildCategory"]] = MISSING,
bitrate: int = 64000,
user_limit: int = 0,
reason: Absent[Optional[str]] = MISSING,
) -> "models.GuildStageVoice":
"""
Create a guild stage channel.
Args:
name: The name of the channel
topic: The topic of the channel
position: The position of the channel in the channel list
permission_overwrites: Permission overwrites to apply to the channel
category: The category this channel should be within
bitrate: The bitrate of this channel, only for voice
| |
type: bool
savehtml=False, # type: int
outputfile=None, # type: bool
fp=None, # type: Optional[IO]
ccmode=CC_PROCESSES # type: str
):
# type: (...) -> int
"""Reformats the files according to style with all option values. Then it groups the
different results and generates an overview about what source code differences result from
the different options.
"""
ret = OK
cache = formatter.cache
varfiles = []
for filename, (job, jobres) in zip(filenames,
format_with_styles(formatter, [style],
filenames,
cache=cache,
ccmode=ccmode)):
if formatter.valid_job_result(job, jobres):
tmpfile = shatempfile(filename, jobres.stdout)
formatter.add_tempfile(tmpfile, mode=GLOBALTMP)
varfiles.append(tmpfile)
normedfiles, normstyle = varfiles, style
optstyles = [[normstyle]]
for option in styledef_options(formatter.styledefinition):
if option_name(option) in ignoreopts:
continue
stylecombo = formatter.variants_for(option)
if stylecombo:
styles = [copy_with_optgroup(normstyle, optgroup) for optgroup in stylecombo
if not formatter.contains_major_style(optgroup)]
if len(styles) > 0:
optstyles.append(styles)
# Reformat the source with every values of every options.
runs = format_with_styles(formatter,
itertools.chain.from_iterable(optstyles),
normedfiles,
cache=cache,
ccmode=ccmode)
result = group_differences(normstyle, formatter, optstyles, runs, len(normedfiles),
numlines, condensed)
all_stylediff_pairs, num_relevant_options = result
normstyletext = formatter.styletext(normstyle)
fmtheader = formatter.exe + '\n\n' + normstyletext
iprint(INFO_USER, green('\n' + fmtheader), fp=fp)
sys.stdout.flush()
sys.stderr.flush()
alltables = []
htmldiffer = HtmlMultiDiff(tabsize=8, wrapcolumn=wrapcolumn)
headerhtml = '<pre>' + escape(unistr(fmtheader)) + '</pre>'
legend = display == 'html'
prev_progress = ''
prev_tidx = None
for tables, tidx, total in iter_tables(all_stylediff_pairs,
enc=enc,
numhunks=numhunks,
numlines=numlines,
wrapcolumn=wrapcolumn,
ccmode=ccmode):
alltables.extend(tables)
if display == 'ansi':
html = make_custom_html(htmldiffer, headerhtml, tables, enc=enc, legend=legend)
htmldiff2ansi(html, enc, linenumbers=linenumbers, fp=fp)
sys.stdout.flush()
else:
if tidx != prev_tidx:
prev_tidx = tidx
ptext = progresspair(tidx + 1, total)
if tidx + 1 == total:
ptext += '\n'
prev_progress = report_text('\rCalculating style option differences %s' %
ptext,
prev=prev_progress)
if display in ['html', 'ansihtml']:
html = make_custom_html(htmldiffer, headerhtml, alltables, enc=enc, legend=legend)
htmldata = unescape_ill_surrencode(html, enc=enc)
htmldata = translate_non_sgml_chars(htmldata, enc=enc)
if display == 'ansihtml':
html = surrdecode(htmldata, enc=enc)
htmldata = html2ansihtml(html, fmtheader + '\n', enc=enc, linenumbers=linenumbers)
if not outputfile:
fd, outputfile = tempfile.mkstemp(suffix='.html', prefix='whatstyle_')
os.close(fd)
else:
outputfile = os.path.abspath(outputfile)
writebinary(outputfile, htmldata)
iprint(INFO_USER, 'The HTML was written to "%s"' % unifilename(outputfile), fp=fp)
if display in ['html', 'ansihtml'] and not savehtml:
url = urljoin('file:', pathname2url(outputfile))
if not webbrowser.open(url):
ret = ERROR
iprint(INFO_USER, '\n' + green('Summary:'), fp=fp)
iprint(INFO_USER,
green('Found %s options where a different value changed the result.' %
num_relevant_options),
fp=fp)
iprint(INFO_USER,
green('From these we generated %s style differences.' % len(alltables)),
fp=fp)
return ret
def html2ansihtml(html, header='', enc='utf-8', linenumbers=False):
# type: (str, str, str, bool) -> bytes
buf = BytesIO()
htmldiff2ansi(html, enc, linenumbers=linenumbers, fp=buf)
ansidata = buf.getvalue()
if header:
ansidata = bytestr(header) + buf.getvalue()
resulthtml = ansi2html(ansidata, enc=enc)
htmldata = surrencode(resulthtml, enc=enc)
return htmldata
def make_custom_html(htmldiffer, headerhtml, tables, enc='utf-8', legend=True):
# type: (HtmlMultiDiff, str, List[str], str, bool) -> str
extracss = """
html, body, pre {
font: 16px monospace;
line-height: 1.2em;
margin: 0;
padding: 0;
border: 0;
}
th.diff_header {text-align:left; vertical-align: text-top;}
"""
alltables = '<br/>\n'.join([''] + tables) + '\n'
alltables = '%s\n%s' % (headerhtml, alltables)
return make_html(htmldiffer, alltables, enc=enc, extracss=extracss, legend=legend)
def iter_tables(all_stylediff_pairs, # type: List[StyleDiffSourcePairs]
enc='utf-8', # type: str
numhunks=1, # type: int
numlines=2, # type: int
wrapcolumn=0, # type: int
ccmode=CC_PROCESSES # type: str
):
# type: (...) -> Iterator[Tuple[List[str], int, int]]
def left_diff(sdp):
# type: (StyleDiffSourcePairs) -> str
return '\n'.join(set([sdtexts[1] for sdtexts in sdp.keys()]))
def sdkeys(item):
# type: (StyleDiffSourcePairs) -> List[bytes]
return list(item.keys())
idx = 0
grouped_sdpairs = itertools.groupby(all_stylediff_pairs, left_diff)
groups = [] # type: List[CallArgs]
grouped_sdp = sorted([(key, list(pairs)) for key, pairs in grouped_sdpairs])
for sdleft, stylediff_pairs in grouped_sdp:
args_lists = []
for sdpairs in sorted(stylediff_pairs, key=sdkeys):
for from_to_texts, pairs in sorted(sdpairs.items()):
args_lists.append((from_to_texts, pairs, numhunks, numlines, wrapcolumn, idx,
enc))
idx += 1
grouparg = (args_lists, ), {} # type: CallArgs
groups.append(grouparg)
for tidx, tables in enumerate(iter_parallel(calc_diff_groups, groups, ccmode=ccmode)):
yield tables, tidx, len(groups)
def calc_diff_groups(args_lists):
# type: (List[Tuple[TextPair, List[BytesPair], int, int, int, int, str]]) -> List[str]
"""Yields diff tables with the minimum number of unique diff hunks needed to show the
difference between option values.
Let us look at an example what this means.
The best style uses the value Attach for the option BreakBeforeBraces.
Changing this option to the 6 different values
Linux, Stroustrup, WebKit, Mozilla, GNU, Allman for the test example file lstate.c
only produces 4 different results: {Linux,Stroustrup,WebKit}, Mozilla, GNU, Allman.
We would like to show only as many hunks as needed that the difference of these 4
different option values between themselves is displayed clearly.
A diff we get from HtmlDiff consists of a number of hunkpairs.
First we iterate over all differences that has BreakBeforeBraces=Attach on the left
side and register all style differences that contain a certain hunkpair.
Let's say the 'Attach vs Allman' diff has the hunkpairs hunkA, hunkB, hunkC
and 'Attach vs GNU' has the hunkpairs hunkA, hunkB, hunkD.
When we iterate over the hunks for display, we show hunkA because this is new for the
set([Allman, GNU]). We skip hunkB because it is the same set.
We display hunkC which is new as set([Allman]) and hunkD as well set([GNU]).
In this example 3-4 hunks are sufficient to display all unique sets of this option.
"""
sdhunks = defaultdict(set) # type: Dict[str, Set[TextPair]]
all_htmldiffs = []
tables = []
r_diffparts = r'(<td nowrap="nowrap">.*?</td>)|(<tr>)|(<td class="diff_header">)'
re_diffparts = re.compile(r_diffparts)
for from_to_texts, pairs, numhunks, numlines, wrapcolumn, uniqueidx, enc in args_lists:
htmldiffs = []
fromtext = unifilename(from_to_texts[0])
totext = unifilename(from_to_texts[1])
for tablestart, tbody, tableend in table_iter(pairs,
uniqueidx,
enc=enc,
fromdesc=fromtext,
todesc=totext,
numlines=numlines,
wrapcolumn=wrapcolumn):
# Extract the textual differences from the diff hunks.
fragments = []
for m in re_diffparts.finditer(tbody):
fragments.append(m.group(0))
relevantdiffs = ''.join(fragments)
# Register the stylediffs that apply for each individual diff hunk.
sdhunks[relevantdiffs].add(from_to_texts)
htmldiffs.append((relevantdiffs, tablestart, tbody, tableend))
all_htmldiffs.append((htmldiffs, numhunks))
for htmldiffs, numhunks in all_htmldiffs:
tbodies = []
uniquesets = set() # type: Set[Tuple[Tuple[str, str], ...]]
bodyparts = []
num_crucial = 0
for relevantdiffs, start, tbody, end in htmldiffs:
hunkstyles = tuple(sorted(sdhunks.get(relevantdiffs)))
if hunkstyles not in uniquesets:
uniquesets.add(hunkstyles)
crucial = True
num_crucial += 1
else:
crucial = False
bodyparts.append((start, tbody, end, crucial))
if numhunks < 0:
numhunks = len(bodyparts)
num_uncrucial = numhunks - num_crucial
# We always show all crucial hunks and additionally as many uncrucial ones
# as numhunks permits.
start, tbody, end = '', '', ''
for start, tbody, end, crucial in bodyparts:
if crucial:
tbodies.append(tbody)
elif num_uncrucial > 0:
tbodies.append(tbody)
num_uncrucial -= 1
if tbodies:
table = ''.join([start] + tbodies + [end])
tables.append(table)
return tables
def table_iter(pairs, # type: List[BytesPair]
uniqueidx, # type: int
enc='utf-8', # type: str
fromdesc='', # type: str
todesc='', # type: str
numlines=2, # type: int
wrapcolumn=0 # type: int
):
# type: (...) -> Iterator[Tuple[str, str, str]]
htmldiffer = HtmlMultiDiff(tabsize=8, wrapcolumn=wrapcolumn)
htmldiffer.uniqueidx = uniqueidx
table = htmldiffer.table_from_pairs(pairs,
enc,
fromdesc=fromdesc,
todesc=todesc,
context=True,
numlines=numlines)
for tablestart, tbody, tableend in iter_tbodies(table):
yield tablestart, tbody, tableend
def iter_tbodies(table):
# type: (str) -> Iterator[Tuple[str, str, str]]
fragments = re.split(r'</?tbody>', table, flags=re.MULTILINE)
if len(fragments) <= 1:
return
tbodies = fragments[1:-1:2]
tablestart, tableend = fragments[0], fragments[-1]
for tbody in tbodies:
yield tablestart, '<tbody>%s</tbody>\n ' % tbody, tableend
def concat_sources(s, numlines=2):
# type: (Iterable[bytes], int) -> bytes
sep = b'\n\n' * numlines
return sep.join(s)
def group_differences(normstyle, # type: Style
formatter, # type: CodeFormatter
optstyles, # type: List[List[Style]]
runs, # type: Iterable[Tuple[ExeCall, ExeResult]]
num_files, # type: int
numlines, # type: int
condensed # type: bool
):
# type: (...) -> Tuple[List[StyleDiffSourcePairs], int]
"""Returns (all_stylediff_pairs, num_relevant_options).
all_stylediff_pairs a list of StyleDiffSourcePairs where the key is the textual style
difference between contentA and contentB and the values are a list of (contentA, contentB)
pairs. num_relevant_options is the number of options where tweaking a value actually made
a difference.
"""
# Regroup the flat result list into the styleruns of each option.
num_relevant_options = 0
basesource = None
all_stylediff_pairs = []
all_option_style_runs = []
for option_style_runs in iter_option_style_runs(formatter, optstyles, num_files, runs):
variants = {}
contentstyles = defaultdict(list) # type: Dict[bytes, List[Style]]
for _, osrs in itertools.groupby(option_style_runs, operator.itemgetter(0)):
osrslist = list(osrs)
fmtsources = [jobres.stdout
for _, _, _, (job, jobres) in osrslist] # type: ignore
fmtsource = concat_sources(fmtsources, numlines=numlines)
styleindices = set() | |
event.make_event_embed(ctx)
await ctx.send(_("Please wait for an someone to approve your event request."))
admin_msg = await approval_channel.send(embed=em)
start_adding_reactions(admin_msg, ReactionPredicate.YES_OR_NO_EMOJIS)
pred = ReactionPredicate.yes_or_no(admin_msg)
reaction, user = await ctx.bot.wait_for("reaction_add", check=pred)
if pred.result:
event.approver = user.id
await self.post_event(ctx, event)
else:
msg = _("{author}, your event request was denied by an admin.").format(
author=ctx.author.mention
)
if version_info >= VersionInfo.from_str("3.4.6"):
await ctx.reply(msg)
else:
await ctx.send(msg)
return
async def post_event(self, ctx: commands.Context, event: Event):
em = await event.make_event_embed(ctx)
ping = await self.config.guild(ctx.guild).ping()
sanitize = {}
if version_info >= VersionInfo.from_str("3.4.0"):
sanitize = {"allowed_mentions": discord.AllowedMentions(everyone=True, roles=True)}
announcement_channel = ctx.guild.get_channel(event.channel)
posted_message = await announcement_channel.send(ping, embed=em, **sanitize)
event.message = posted_message.id
async with self.config.guild(ctx.guild).events() as cur_events:
cur_events[str(event.hoster)] = event.to_json()
if ctx.guild.id not in self.event_cache:
self.event_cache[ctx.guild.id] = {}
self.event_cache[ctx.guild.id][posted_message.id] = event
try:
start_adding_reactions(posted_message, EVENT_EMOJIS)
except discord.errors.Forbidden:
pass
@commands.command(name="clearevent", aliases=["endevent"])
@commands.guild_only()
@commands.bot_has_permissions(embed_links=True)
async def clear_event(self, ctx: commands.Context, clear: bool = False) -> None:
"""
Delete a stored event so you can create more
`[clear]` yes/no to clear your current running event.
"""
if str(ctx.author.id) not in await self.config.guild(ctx.guild).events():
return await ctx.send(_("You don't have any events running."))
elif not clear:
event_data = await self.config.guild(ctx.guild).events()
event = Event.from_json(self.bot, event_data[str(ctx.author.id)])
if not event:
async with self.config.guild(ctx.guild).events() as events:
# clear the broken event
del events[str(ctx.author.id)]
del self.event_cache[ctx.guild.id][event.message]
return await ctx.send(_("You don't have any events running."))
em = await event.make_event_embed(ctx)
return await ctx.send(
_(
"{author}, you're currently hosting. "
"Type `{prefix}clearevent yes` to clear it."
).format(author=ctx.author.display_name, prefix=ctx.clean_prefix),
embed=em,
)
else:
async with self.config.guild(ctx.guild).events() as events:
event = Event.from_json(self.bot, events[str(ctx.author.id)])
await event.edit(ctx, content=_("This event has ended."))
del events[str(ctx.author.id)]
del self.event_cache[ctx.guild.id][event.message]
await ctx.tick()
@commands.command(name="showevent")
@commands.guild_only()
@commands.bot_has_permissions(embed_links=True)
async def show_event(self, ctx: commands.Context, member: discord.Member = None) -> None:
"""Show current event being run by a member"""
if member is None:
member = ctx.author
if str(member.id) not in await self.config.guild(ctx.guild).events():
return await ctx.send(
_("{member} does not have any events running.").format(member=member)
)
event_data = await self.config.guild(ctx.guild).events()
event = Event.from_json(self.bot, event_data[str(member.id)])
if not event:
async with self.config.guild(ctx.guild).events() as events:
# clear the broken event
del events[str(ctx.author.id)]
del self.event_cache[ctx.guild.id][event.message]
return await ctx.send(
_("{member} is not currently hosting an event.").format(member=member.display_name)
)
em = await event.make_event_embed(ctx)
await ctx.send(
_(
"{member} is currently hosting. " "Type `{prefix}clearevent yes` to clear it."
).format(member=member.display_name, prefix=ctx.clean_prefix),
embed=em,
)
@commands.command(name="join")
@commands.guild_only()
async def join_event(
self,
ctx: commands.Context,
hoster: discord.Member,
) -> None:
"""Join an event being hosted"""
if str(hoster.id) not in await self.config.guild(ctx.guild).events():
return await ctx.send(
_("{hoster} is not currently hosting any events.").format(
hoster=hoster.display_name
)
)
event_data = await self.config.guild(ctx.guild).events()
event = Event.from_json(self.bot, event_data[str(hoster.id)])
if not event:
async with self.config.guild(ctx.guild).events() as events:
# clear the broken event
del events[str(ctx.author.id)]
del self.event_cache[ctx.guild.id][event.message]
return await ctx.send(
_("{hoster} is not currently hosting any events.").format(
hoster=hoster.display_name
)
)
if ctx.author.id in event.members:
return await ctx.send(_("You're already participating in this event!"))
await self.add_user_to_event(ctx.author, event)
await ctx.tick()
@commands.command(name="leaveevent")
@commands.guild_only()
async def leave_event(self, ctx: commands.Context, hoster: discord.Member) -> None:
"""Leave an event being hosted"""
if str(hoster.id) not in await self.config.guild(ctx.guild).events():
return await ctx.send(
_("{hoster} is not currently hosting any events.").format(
hoster=hoster.display_name
)
)
event_data = await self.config.guild(ctx.guild).events()
event = Event.from_json(self.bot, event_data[str(hoster.id)])
if not event:
async with self.config.guild(ctx.guild).events() as events:
# clear the broken event
del events[str(ctx.author.id)]
del self.event_cache[ctx.guild.id][event.message]
return await ctx.send(
_("{hoster} is not currently hosting any events.").format(
hoster=hoster.display_name
)
)
if ctx.author.id not in event.members:
return await ctx.send(_("You're not participating in this event!"))
await self.remove_user_from_event(ctx.author, event)
await ctx.tick()
@commands.command(name="removefromevent")
@commands.guild_only()
async def remove_from_event(
self, ctx: commands.Context, member: discord.Member, hoster: discord.Member = None
) -> None:
"""
Remove a user from an event you're hosting
`<member>` The member to remove from your event
`<hoster>` mod/admin only to specify whos event to remove a user from.
"""
if hoster and not await self.is_mod_or_admin(ctx.author):
return await ctx.send(_("You cannot remove a member from someone elses event"))
if hoster is None:
hoster = ctx.author
if member is hoster:
return await ctx.send(_("You cannot remove the hoster from this event."))
if str(hoster.id) not in await self.config.guild(ctx.guild).events():
return await ctx.send(_("You are not currently hosting any events."))
event_data = await self.config.guild(ctx.guild).events()
event = Event.from_json(self.bot, event_data[str(ctx.author.id)])
if not event:
async with self.config.guild(ctx.guild).events() as events:
# clear the broken event
del events[str(ctx.author.id)]
del self.event_cache[ctx.guild.id][event.message]
return await ctx.send(_("That user is not currently hosting any events."))
if member.id not in event.members:
return await ctx.send(_("That member is not participating in that event!"))
await self.remove_from_event(member, event)
await ctx.tick()
@commands.group(name="eventedit", aliases=["editevent"])
@commands.guild_only()
async def event_edit(self, ctx: commands.Context):
"""
Edit various things in events
"""
pass
@event_edit.command()
@commands.guild_only()
async def title(self, ctx: commands.Context, *, new_description: str):
"""
Edit the title of your event
`<new_description>` The new description for your event
"""
announcement_channel, approval_channel = await self.get_channels(ctx)
if not announcement_channel:
return
if str(ctx.author.id) not in await self.config.guild(ctx.guild).events():
return await ctx.send(_("You don't have an event to edit right now."))
for message_id, event in self.event_cache[ctx.guild.id].items():
if event.hoster == ctx.author.id:
event.event = new_description
em = await event.make_event_embed(ctx)
await event.edit(ctx, embed=em)
async with self.config.guild(ctx.guild).events() as cur_events:
cur_events[str(event.hoster)] = event.to_json()
self.event_cache[ctx.guild.id][event.message] = event
await ctx.tick()
break
async def get_channels(
self, ctx: commands.Context
) -> Tuple[Optional[discord.TextChannel], Optional[discord.TextChannel]]:
approval_channel = ctx.guild.get_channel(
await self.config.guild(ctx.guild).approval_channel()
)
announcement_channel = ctx.guild.get_channel(
await self.config.guild(ctx.guild).announcement_channel()
)
if not approval_channel and not await self.config.guild(ctx.guild).bypass_admin():
await ctx.send(
_(
"No admin channel has been setup on this server. "
"Use `{prefix}eventset approvalchannel` to add one."
).format(prefix=ctx.clean_prefix)
)
return None, None
if not announcement_channel:
await ctx.send(
_(
"No announcement channel has been setup on this server. "
"Use `{prefix}eventset channel` to add one."
).format(prefix=ctx.clean_prefix)
)
return None, None
return announcement_channel, approval_channel
@event_edit.command()
@commands.guild_only()
async def slots(self, ctx: commands.Context, new_slots: Optional[int] = None):
"""
Edit the number of slots available for your event
`<new_slots>` The number of available slots for your events activity
"""
announcement_channel, approval_channel = await self.get_channels(ctx)
if not announcement_channel:
return
if str(ctx.author.id) not in await self.config.guild(ctx.guild).events():
return await ctx.send(_("You don't have an event to edit right now."))
for message_id, event in self.event_cache[ctx.guild.id].items():
if event.hoster == ctx.author.id:
event.max_slots = new_slots
em = await event.make_event_embed(ctx)
await event.edit(ctx, embed=em)
async with self.config.guild(ctx.guild).events() as cur_events:
cur_events[str(event.hoster)] = event.to_json()
self.event_cache[ctx.guild.id][event.message] = event
await ctx.tick()
break
@event_edit.command()
@commands.guild_only()
async def remaining(self, ctx: commands.Context):
"""
Show how long until your event will be automatically ended if available.
"""
announcement_channel, approval_channel = await self.get_channels(ctx)
if not announcement_channel:
return
if str(ctx.author.id) not in await self.config.guild(ctx.guild).events():
return await ctx.send(_("You don't have an event to edit right now."))
for message_id, event in self.event_cache[ctx.guild.id].items():
if event.hoster == ctx.author.id:
seconds = await self.config.guild(ctx.guild).cleanup_seconds()
if seconds is None:
return await ctx.send(
_("There is no automatic timeout of events in this server.")
)
await ctx.send(
_("Your event has {time} remaining until it is ended automatically.").format(
time=event.remaining(seconds)
)
)
return
@event_edit.group()
@commands.guild_only()
async def members(self, ctx: commands.Context):
"""Edit event members"""
pass
@members.command(name="add")
@commands.guild_only()
async def members_add(self, ctx: commands.Context, *new_members: discord.Member):
"""
Add members to your event (hopefully not against their will)
`[new_members...]` The members you want to add to your event
"""
if not new_members:
return await ctx.send_help()
announcement_channel, approval_channel = await self.get_channels(ctx)
if not announcement_channel:
return
if str(ctx.author.id) not in await self.config.guild(ctx.guild).events():
return await ctx.send(_("You don't have an event to edit right now."))
for message_id, event in self.event_cache[ctx.guild.id].items():
if event.hoster == ctx.author.id:
for m in new_members:
await self.add_user_to_event(m, event)
async with self.config.guild(ctx.guild).events() as cur_events:
cur_events[str(event.hoster)] = event.to_json()
self.event_cache[ctx.guild.id][event.message] = event
await ctx.tick()
break
@members.command(name="remove", aliases=["rem"])
@commands.guild_only()
async def members_remove(self, ctx: commands.Context, *members: discord.Member):
"""
Remove members from your event (hopefully not against their will)
`[members...]` The members you want to add to your event
"""
if not members:
return await ctx.send_help()
announcement_channel, approval_channel = await self.get_channels(ctx)
if not announcement_channel:
return
if str(ctx.author.id) not in await self.config.guild(ctx.guild).events():
return await ctx.send(_("You don't have an event to edit right now."))
for message_id, event in self.event_cache[ctx.guild.id].items():
if event.hoster == ctx.author.id:
for m in members:
if m.id in event.members or m.id in event.maybe:
await self.remove_user_from_event(m, event)
async with self.config.guild(ctx.guild).events() as cur_events:
cur_events[str(event.hoster)] = event.to_json()
self.event_cache[ctx.guild.id][event.message] = event
await ctx.tick()
break
@event_edit.group()
@commands.guild_only()
async def maybe(self, ctx: commands.Context):
"""Edit event members"""
pass
@maybe.command(name="add")
| |
order
Yields
------
str
The name of the fields, one by one.
"""
yield self.field.name
for other_field in self.other_fields:
yield other_field.name
def _can_filter_fields(self, fields_and_suffixes):
"""Tell if the index can handle the given fields + suffixes
For the parameters, see ``_MultiFieldsIndexMixin._can_filter_fields``
"""
filters = defaultdict(set)
for field_name, suffix in fields_and_suffixes:
filters[field_name].add(suffix)
handled = []
for field_name in self.handled_field_names:
if field_name not in filters:
return []
if not filters[field_name].intersection(self.handled_suffixes):
return []
handled.append((field_name, filters[field_name]))
return handled
def get_filtered_keys(self, suffix, *args, **kwargs):
"""Return the set used by the index for the given "value" (`args`)
Parameters
----------
kwargs['related_filters'] : Dict[str, Tuple(List, Union[str, None])]
Mandatory named argument that contains the filters for the other fields than the one
to which this index is attached
All the other fields defined in the index must be present in this dict.
The keys or the dict are the field names.
The values are a tuple for each entry of the dict, containing two elements:
- all the "values" to take into account for this field (see ``args`` argument of
``BaseIndex.get_filtered_keys``)
- the suffix for this field (see ``suffix`` argument of ``BaseIndex.get_filtered_keys``)
For the others parameters, see ``BaseIndex.get_filtered_keys``
"""
self._check_key_accepted_key_types(kwargs.get('accepted_key_types'))
related_filters = kwargs['related_filters']
# if we have a `in` suffix, we have to handle it in a special way
if suffix == 'in' or 'in' in {related_filters[field.name][1] for field in self.other_fields}:
by_field = {}
for field_name, field_args, field_suffix in chain(
[(self.field.name, args, suffix)],
[(field.name, ) + related_filters[field.name] for field in self.other_fields]
):
field_args = list(field_args)
value = field_args.pop()
if field_suffix == 'in':
values = set(value)
if not values:
return []
else:
values = [value]
by_field[field_name] = [(field_args, value) for value in values]
in_keys = []
for all_fields_args in (zip(by_field, product_values) for product_values in product(*by_field.values())):
call_args, call_kwargs = [], {}
for field_name, [field_args, field_value] in all_fields_args:
if field_name == self.field.name:
call_args = field_args + [field_value]
else:
call_kwargs[field_name] = [field_args + [field_value]]
in_keys.extend(
key for key, __
in self.get_storage_keys(
None, *call_args, other_args=call_kwargs,
transform_value=False
)
)
tmp_key = self._unique_key('tmp')
self.union_filtered_in_keys(tmp_key, *in_keys)
return [(tmp_key, 'set', True)]
return [
(key, 'set', False)
for key, __
in self.get_storage_keys(None, *args, other_args={
field.name: [related_filters[field.name][0]]
for field in self.other_fields
}, transform_value=False)
]
def get_storage_keys(self, pk, *args, **kwargs):
"""Return the redis key where to store the index for the given "values" (`args`)
It can return many keys depending on the type of the other fields tied to this index.
By default, it will get the keys by retrieving all the values for the other fields for the
pk (will be only one for SingleValueField and many for MultiValuesField)
If some of these other fields are filled in ``other_args``, the values from this dict
will be used instead of fetching them.
Parameters
----------
pk : Optional[Any]
The primary key of the instance for which we want the keys.
If ``None``, all fields are expected to be found in `other_args`.
args: tuple
All the "values" to take into account for the field tied to the index to get the storage
keys (see ``EqualIndex.get_storage_key``)
kwargs: dict
other_args : Optional[Dict[str, List[Tuple]]
If set, will contains values of some others fields of the index we want the key for.
For example if another field is a ``ListField`` having values 1 and 2, here we can
say that we want only the keys for the value 2.
The keys of the dict are name of fields tied to this index.
The values are, for each one, a list of tuples, each of these tuples having the same
info like in `args`: one tuple for each value for this field.
transform_value: bool
Default to ``True``. Tell the call to ``normalize_value`` to transform
the value or not
Returns
-------
List[Tuple[str, Union[Callable, None]]]
Will return a list with one entry for each keys, each entry containing a tuple with two
entries.
The first is the key.
The second is ``None`` if the index is not unique, else it's a callable to return
the value to display in the UniquenessError message.
"""
other_args = kwargs.get('other_args') or {}
if args[-1] is None:
return []
args = list(args[:-1]) + [
self.normalize_value(args[-1], transform=kwargs.get('transform_value', True))
]
for field in self.other_fields:
if field.name not in other_args:
if not pk:
raise ImplementationError(
"Cannot get filtering storage key for index %s on "
"%s.%s without all fields given" % (
self.__class__.__name__,
self.model.__name__,
self.field.name,
)
)
other_args[field.name] = field.get_for_instance(pk)._prepare_index_data(pk)
# stop early if no data for a field to avoid fetching other fields
if all([field_args[-1] is None for field_args in other_args[field.name]]):
return []
entries = product([args], *[other_args[field.name] for field in self.other_fields])
base_parts = [
self.model._name,
self.field.name,
]
if self.prefix:
base_parts.append(self.prefix)
if self.key:
base_parts.append(self.key)
keys = []
for key_entry in entries:
if any([key_part[-1] is None for key_part in key_entry]):
# at least one field has no data, so we skip this entry
break
parts = base_parts[:]
parts.extend(chain.from_iterable(key_entry))
def get_unique_value_func(key_entry_):
# this will only be called in case of UniqunessError
return lambda: ', '.join([
'%s=%s' % ('__'.join(map(str, (field_name, ) + tuple(key_parts[:-1]))), key_parts[-1])
for field_name, key_parts
in zip(
[self.field.name] + [field.name for field in self.other_fields],
key_entry_
)
])
get_unique_value = get_unique_value_func(key_entry) if self.unique else None
keys.append((self.field.make_key(*parts), get_unique_value))
return keys
@property
def unique_index_name(self):
"""Get a string to describe the index in case of UniquenessError"""
return 'unique together fields [%s] on %s' % (
', '.join([self.field.name] + [field.name for field in self.other_fields]),
self.model.__name__
)
def check_uniqueness_at_init(self, values):
"""If the index is ``unique``, check that ``values`` are unique and can be inserted
For the parameters, see ``_MultiFieldsIndexMixin.check_uniqueness_at_init``
"""
if not self.unique:
return
args = [values.pop(self.field.name)]
other_args = {
field.name: [
(val, ) for val in values[field.name]
] if isinstance(self.model.get_field(field.name), MultiValuesField)
else [(values[field.name], )]
for field in self.other_fields
}
keys = self.get_storage_keys(None, *args, other_args=other_args)
self._check_uniqueness_in_keys(None, keys)
def _check_uniqueness_in_keys(self, pk, keys):
"""Check uniqueness of pks in the given keys
Parameters
----------
pk: Any
The pk of the instance for which its ok to have the value.
keys: List[Tuple[str, Union[Callable, None]]]
A list with one entry for each keys to check, each entry containing a tuple with two
entries.
The first is the key.
The second is ``None`` if the index is not unique, else it's a callable to return
the value to display in the UniquenessError message.
Returns
-------
"""
if not self.unique:
return
for key, get_unique_value in keys:
pks = self.get_uniqueness_members(key)
self.assert_pks_uniqueness(pks, pk, get_unique_value)
def add(self, pk, *args, **kwargs):
"""Add the instance tied to the field for the given "value" (via `args`) to the index
Parameters
----------
kwargs['other_args'] : dict
Values for other fields. See ``get_storage_keys``.
For the other parameters, see ``BaseIndex.add``
"""
keys = self.get_storage_keys(pk, *args, other_args=kwargs.get('other_args'))
if not keys:
return
if self.unique:
self._check_uniqueness_in_keys(pk, keys)
for key, __ in keys:
logger.debug("adding %s to index %s" % (pk, key))
if self.store(key, pk):
self._get_rollback_cache(pk)['indexed_values'].add(tuple(args))
def remove(self, pk, *args, **kwargs):
"""Remove the instance tied to the field for the given "value" (via `args`) from the index
Parameters
----------
kwargs['other_args'] : dict
Values for other fields. See ``get_storage_keys``.
For the other parameters, see ``BaseIndex.remove``
"""
keys = self.get_storage_keys(pk, *args, other_args=kwargs.get('other_args'))
if not keys:
return
for key, __ in keys:
logger.debug("removing %s from index %s" % (pk, key))
if self.unstore(key, pk):
self._get_rollback_cache(pk)['deindexed_values'].add(tuple(args)) # TODO
def other_add(self, pk, field_name, *args):
"""Called by the related index on a related field to update the index when changed
Parameters
----------
pk : Any
The primary key of the instance for which the other field was updated
field_name : str
The name of the other updated field
args: tuple
All the values to take into account to define the index entry for the other field
"""
if args[-1] is None:
return
for parts in self.field._prepare_index_data(pk):
if parts[-1] is None:
continue
self.add(pk, *parts, other_args={field_name:[args]})
self._reset_rollback_cache(pk)
def other_remove(self, pk, field_name, *args):
"""Called by the related index on a related field to update the index when changed
Parameters
----------
pk : Any
The primary key of the | |
<gh_stars>10-100
import math, sys
import maya.api.OpenMaya as om
import maya.api.OpenMayaUI as omui
import maya.api.OpenMayaAnim as oma
import maya.api.OpenMayaRender as omr
from maya.OpenMaya import MGlobal
# This is insane. There are two Python APIs in Maya, and both of them are missing lots of
# stuff, and you can't mix them except in specific careful ways.
import maya.OpenMayaRender as v1omr
glRenderer = v1omr.MHardwareRenderer.theRenderer()
glFT = glRenderer.glFunctionTable()
def maya_useNewAPI(): pass
# Be careful when changing the order of these shapes. Their index is the value of the .shape
# enum, so this affects the file format.
def _make_pyramid():
return {
'quads': [
(-0.5, 0, +0.5),
(+0.5, 0, +0.5),
(+0.5, 0, -0.5),
(-0.5, 0, -0.5),
],
omr.MUIDrawManager.kTriangles: [
(-0.5, 0, +0.5),
(-0.5, 0, -0.5),
(+0.0, 1, -0.0),
(+0.5, 0, +0.5),
(+0.5, 0, -0.5),
(+0.0, 1, -0.0),
(-0.5, 0, -0.5),
(+0.5, 0, -0.5),
(+0.0, 1, -0.0),
(+0.5, 0, +0.5),
(-0.5, 0, +0.5),
(+0.0, 1, -0.0),
]
}
def _make_ball():
points = []
p1 = (1.0) / 2.0
p2 = (0.5) / 2.0
for x in (1,-1):
points.append((x*p1, -p2, -p2))
points.append((x*p1, +p2, -p2))
points.append((x*p1, +p2, +p2))
points.append((x*p1, -p2, +p2))
points.append((-p2, x*p1, -p2))
points.append((+p2, x*p1, -p2))
points.append((+p2, x*p1, +p2))
points.append((-p2, x*p1, +p2))
points.append((-p2, -p2, x*p1))
points.append((+p2, -p2, x*p1))
points.append((+p2, +p2, x*p1))
points.append((-p2, +p2, x*p1))
for y in (1,-1):
points.append((-p2, x*+p2, y*+p1))
points.append((+p2, x*+p2, y*+p1))
points.append((+p2, x*+p1, y*+p2))
points.append((-p2, x*+p1, y*+p2))
points.append((x*+p2, -p2, y*+p1))
points.append((x*+p2, +p2, y*+p1))
points.append((x*+p1, +p2, y*+p2))
points.append((x*+p1, -p2, y*+p2))
points.append((x*+p2, y*+p1, -p2))
points.append((x*+p2, y*+p1, +p2))
points.append((x*+p1, y*+p2, +p2))
points.append((x*+p1, y*+p2, -p2))
tris = []
for x in (1, -1):
for y in (1, -1):
for z in (1, -1):
tris.append((x*-p1, y*-p2, z*p2))
tris.append((x*-p2, y*-p1, z*p2))
tris.append((x*-p2, y*-p2, z*p1))
return {
'quads': points,
omr.MUIDrawManager.kTriangles: tris,
}
shapes = [{
'name': 'Ball',
'geometry': _make_ball(),
}, {
'name': 'Pyramid',
'geometry': _make_pyramid(),
}]
def _convert_shape(shape):
geometry = shape['geometry']
lines = geometry.setdefault(omr.MUIDrawManager.kLines, [])
# Add edge lines for quads.
if 'quads' in geometry:
quads = geometry['quads']
for i in xrange(0, len(quads), 4):
lines.append(quads[i+0])
lines.append(quads[i+1])
lines.append(quads[i+1])
lines.append(quads[i+2])
lines.append(quads[i+2])
lines.append(quads[i+3])
lines.append(quads[i+3])
lines.append(quads[i+0])
# Add edge lines for tris.
if omr.MUIDrawManager.kTriangles in geometry:
tris = geometry[omr.MUIDrawManager.kTriangles]
for i in xrange(0, len(tris), 3):
lines.append(tris[i+0])
lines.append(tris[i+1])
lines.append(tris[i+1])
lines.append(tris[i+2])
lines.append(tris[i+2])
lines.append(tris[i+0])
# Convert quads to tris.
if 'quads' in geometry:
tris = geometry.setdefault(omr.MUIDrawManager.kTriangles, [])
quads = geometry.pop('quads')
for i in xrange(0, len(quads), 4):
tris.append(quads[i+0])
tris.append(quads[i+1])
tris.append(quads[i+2])
tris.append(quads[i+2])
tris.append(quads[i+3])
tris.append(quads[i+0])
for key, data in geometry.items():
array = om.MPointArray()
for point in data:
array.append(om.MPoint(*point))
geometry[key] = array
return shape
shapes = [_convert_shape(shape) for shape in shapes]
def _getCustomShape(node):
# Return the shape connected to customMeshAttr.
depNode = om.MFnDependencyNode(node)
obj = depNode.userNode()
dataBlock = obj.forceCache()
meshHandle = dataBlock.inputValue(zRigHandle.customMeshAttr)
try:
it = om.MItMeshPolygon(meshHandle.asMesh())
except RuntimeError:
# We'll get "kInvalidParameter: Argument is a NULL pointer" if there's no
# mesh connection. How do we check this?
return shapes[0]['geometry']
tris = []
lines = []
while not it.isDone():
face = it.getPoints(om.MSpace.kObject)
# The data from the iterator doesn't stay valid, so make a copy of the point.
face = [om.MPoint(v) for v in face]
if len(face) == 3:
tris.extend(face)
lines.extend(face)
elif len(face) == 4:
tris.extend((face[0], face[1], face[2], face[2], face[3], face[0]))
lines.extend((face[0], face[1], face[1], face[2], face[2], face[3], face[3], face[0]))
else:
# We don't currently support meshes with more than four faces. We could
# triangulate with MFnMesh.polyTriangulate, but I'm not sure it's worth
# the bother.
pass
it.next(1)
return {
omr.MUIDrawManager.kTriangles: tris,
omr.MUIDrawManager.kLines: lines,
}
def getShapeBounds(shape):
boundingBox = om.MBoundingBox()
for item in shape.values():
for point in item:
boundingBox.expand(point)
return boundingBox
def _transformShape(shape, transform):
result = {}
for key, data in shape.items():
result[key] = om.MPointArray([v*transform for v in data])
return result
class zRigHandle(om.MPxSurfaceShape):
id = om.MTypeId(0x124743)
drawDbClassification = "drawdb/geometry/zRigHandle"
drawRegistrantId = "zRigHandlePlugin"
def __init__(self):
om.MPxSurfaceShape.__init__(self)
@classmethod
def creator(cls):
return cls()
@classmethod
def initialize(cls):
nAttr = om.MFnNumericAttribute()
enumAttr = om.MFnEnumAttribute()
matAttr = om.MFnMatrixAttribute()
uAttr = om.MFnUnitAttribute()
typedAttr = om.MFnTypedAttribute()
cls.shapeAttr = enumAttr.create('shape', 'sh', 0)
enumAttr.addField('Custom', -1)
for idx, shape in enumerate(shapes):
enumAttr.addField(shape['name'], idx)
enumAttr.channelBox = True
cls.addAttribute(cls.shapeAttr)
cls.customMeshAttr = typedAttr.create("inCustomMesh", "icm", om.MFnMeshData.kMesh)
typedAttr.storable = False
# The kReset constant is missing from the Python 2.0 API.
typedAttr.disconnectBehavior = 1
cls.addAttribute(cls.customMeshAttr)
cls.transformAttr = matAttr.create('transform', 't', om.MFnMatrixAttribute.kFloat)
matAttr.keyable = False
cls.addAttribute(cls.transformAttr)
localRotateX = uAttr.create('localRotateX', 'lrx', om.MFnUnitAttribute.kAngle, 0.0)
localRotateY = uAttr.create('localRotateY', 'lry', om.MFnUnitAttribute.kAngle, 0.0)
localRotateZ = uAttr.create('localRotateZ', 'lrz', om.MFnUnitAttribute.kAngle, 0.0)
cls.localRotateAttr = nAttr.create('localRotate', 'lr', localRotateX, localRotateY, localRotateZ)
nAttr.channelBox = True
nAttr.keyable = False
cls.addAttribute(cls.localRotateAttr)
cls.localTranslateAttr = nAttr.createPoint('localPosition', 'lp')
nAttr.channelBox = True
nAttr.keyable = False
cls.addAttribute(cls.localTranslateAttr)
localScaleX = nAttr.create('localScaleX', 'lsx', om.MFnNumericData.kFloat, 1)
localScaleY = nAttr.create('localScaleY', 'lsy', om.MFnNumericData.kFloat, 1)
localScaleZ = nAttr.create('localScaleZ', 'lsz', om.MFnNumericData.kFloat, 1)
cls.localScaleAttr = nAttr.create('localScale', 'ls', localScaleX, localScaleY, localScaleZ)
nAttr.channelBox = True
nAttr.keyable = False
cls.addAttribute(cls.localScaleAttr)
cls.colorAttr = nAttr.createColor('color', 'dc')
nAttr.default = (.38,0,0.02)
cls.addAttribute(cls.colorAttr)
cls.alphaAttr = nAttr.create('alpha', 'a', om.MFnNumericData.kFloat, 0.333)
nAttr.setSoftMin(0)
nAttr.setSoftMax(1)
nAttr.keyable = False
cls.addAttribute(cls.alphaAttr)
cls.borderColorAttr = nAttr.createColor('borderColor', 'bc')
nAttr.default = (-1,-1,-1)
cls.addAttribute(cls.borderColorAttr)
cls.borderAlphaAttr = nAttr.create('borderAlpha', 'ba', om.MFnNumericData.kFloat, 1)
nAttr.setSoftMin(0)
nAttr.setSoftMax(1)
nAttr.keyable = False
cls.addAttribute(cls.borderAlphaAttr)
cls.xrayAttr = nAttr.create('xray', 'xr', om.MFnNumericData.kBoolean, True)
nAttr.keyable = False
nAttr.channelBox = True
cls.addAttribute(cls.xrayAttr)
def postConstructor(self):
self.isRenderable = True
depNode = om.MFnDependencyNode(self.thisMObject())
depNode.setName("rigHandleShape#");
def setDependentsDirty(self, plug, affectedPlugs):
if plug.isChild:
plug = plug.parent()
if plug in (self.transformAttr, self.localTranslateAttr, self.localRotateAttr, self.localScaleAttr):
# Discard our transformed shape.
if hasattr(self, 'transformedShape'): del self.transformedShape
if plug in (self.transformAttr, self.shapeAttr,
self.localTranslateAttr, self.localRotateAttr, self.localScaleAttr,
self.colorAttr, self.alphaAttr, self.borderColorAttr, self.borderAlphaAttr,
self.xrayAttr):
self.childChanged(self.kBoundingBoxChanged)
omr.MRenderer.setGeometryDrawDirty(self.thisMObject(), True)
if plug in (self.shapeAttr, self.customMeshAttr):
# Discard our shape cache. We can't set the new one now, since the new
# plug value hasn't actually been set yet, so we'll do it on the next
# render.
if hasattr(self, 'transformedShape'): del self.transformedShape
if hasattr(self, 'shape'): del self.shape
self.childChanged(self.kBoundingBoxChanged)
return super(zRigHandle, self).setDependentsDirty(plug, affectedPlugs)
def getShapeSelectionMask(self):
# Set both kSelectMeshes, so tumble on pivot sees the object, and kSelectJoints, so we're
# higher priority for selection than meshes that are in front of us. Xray alone won't do
# this.
mask = om.MSelectionMask()
mask.addMask(om.MSelectionMask.kSelectMeshes)
mask.addMask(om.MSelectionMask.kSelectJoints)
return mask
def isBounded(self):
return True
def getShapeIdx(self):
return om.MPlug(self.thisMObject(), self.shapeAttr).asInt()
def getShape(self):
# If the shape isn't cached, cache it now.
if not hasattr(self, 'shape'):
self.shape = self._getShapeFromPlug()
if not hasattr(self, 'transformedShape'):
shape = self.shape
transform = self._getLocalTransform()
self.transformedShape = _transformShape(shape, transform)
return self.transformedShape
def _getShapeFromPlug(self):
idx = self.getShapeIdx()
if idx == -1:
shape = _getCustomShape(self.thisMObject())
else:
shape = shapes[idx]['geometry']
return shape
def _getLocalTransform(self):
node = self.thisMObject()
transformPlug = om.MPlug(node, self.transformAttr)
transform = om.MFnMatrixData(transformPlug.asMObject()).matrix()
mat = om.MTransformationMatrix(transform)
# Apply local translation.
localTranslatePlug = om.MPlug(node, self.localTranslateAttr)
localTranslation = om.MVector(*[localTranslatePlug.child(idx).asFloat() for idx in range(3)])
mat.translateBy(localTranslation, om.MSpace.kObject)
# Apply local rotation.
localRotatePlug = om.MPlug(node, self.localRotateAttr)
localRotatePlugs = [localRotatePlug.child(idx) for idx in range(3)]
localRotate = om.MVector(*[localRotatePlugs[idx].asMAngle().asRadians() for idx in range(3)])
mat.rotateBy(om.MEulerRotation(localRotate), om.MSpace.kObject)
# Apply local scale.
scalePlug = om.MPlug(node, self.localScaleAttr)
scale = om.MFnNumericData(scalePlug.asMObject()).getData()
mat.scaleBy(scale, om.MSpace.kObject)
return mat.asMatrix()
@property
def xray(self):
return om.MPlug(self.thisMObject(), self.xrayAttr).asBool()
def boundingBox(self):
return getShapeBounds(self.getShape())
def _hitTestShape(view, shape):
# Hit test shape within view.
for itemType, data in shape.items():
view.beginSelect()
glFT.glBegin(v1omr.MGL_TRIANGLES)
for v in data:
glFT.glVertex3f(v.x, v.y, v.z)
glFT.glEnd()
# Check the hit test.
if view.endSelect() > 0:
return True
return False
# This object isn't created in 2016.5 VP2.
class zRigHandleShapeUI(omui.MPxSurfaceShapeUI):
def __init__(self):
omui.MPxSurfaceShapeUI.__init__(self)
@staticmethod
def creator():
return zRigHandleShapeUI()
def select(self, selectInfo, selectionList, worldSpaceSelectPts):
shape = self.surfaceShape().getShape()
# Hit test the selection against the shape.
if not _hitTestShape(selectInfo.view(), shape):
return False
item = om.MSelectionList()
item.add(selectInfo.selectPath())
# Get the world space position of the node. We'll set the position of the selection here,
# so the camera focuses on it.
mat = item.getDagPath(0).inclusiveMatrix()
transformation = om.MTransformationMatrix(mat)
pos = transformation.translation(om.MSpace.kWorld)
priorityMask = om.MSelectionMask(om.MSelectionMask.kSelectJoints)
selectInfo.addSelection(item, om.MPoint(pos), selectionList, worldSpaceSelectPts, priorityMask, False)
return True
def isPathSelected(objPath):
sel = om.MGlobal.getActiveSelectionList()
if sel.hasItem(objPath):
return True
objPath = om.MDagPath(objPath)
objPath.pop()
if sel.hasItem(objPath):
return True
return False
class zRigHandleDrawOverride(omr.MPxDrawOverride):
@staticmethod
def creator(obj):
return zRigHandleDrawOverride(obj)
@staticmethod
def draw(context, data):
return
def __init__(self, obj):
args = [self, obj, zRigHandleDrawOverride.draw]
if MGlobal.apiVersion() >= 201700:
# This argument is only present in 2017, and improves performance substantially.
args.append(False)
omr.MPxDrawOverride.__init__(*args)
def supportedDrawAPIs(self):
return omr.MRenderer.kOpenGL | omr.MRenderer.kDirectX11 | omr.MRenderer.kOpenGLCoreProfile
def isBounded(self, objPath, cameraPath):
return True
def boundingBox(self, objPath, cameraPath):
depNode = om.MFnDependencyNode(objPath.node())
obj = depNode.userNode()
return obj.boundingBox()
def disableInternalBoundingBoxDraw(self):
return True
def prepareForDraw(self, objPath, cameraPath, frameContext, oldData):
depNode = om.MFnDependencyNode(objPath.node())
obj = depNode.userNode()
isSelected = isPathSelected(objPath)
self.xray = obj.xray
plug = | |
<gh_stars>1-10
# -*- coding: utf-8 -*-
"""
@authors: <NAME>, <NAME>
"""
from selenium.webdriver.firefox.options import Options
from selenium.webdriver.support.ui import Select
from selenium import webdriver
from scipy.constants import minute, hour, milli, nano, micro
from pprint import pprint
from chardet import detect
from glob import glob
from math import floor
import datetime
import pandas
import numpy
import warnings, json, time, re, os
def isnumber(string):
try:
num = float(string)
return num
except:
try:
num = int(string)
return num
except:
return False
def average(num_1, num_2 = None):
if isnumber(num_1):
if isnumber(num_2):
numbers = [isnumber(num_1), isnumber(num_2)]
return sum(numbers) / len(numbers)
else:
return num_1
elif type(num_1) is list:
summation = total = 0
for num in num_1:
if num is not None:
summation += num
total += 1
if total > 0:
return summation/total
return None # raise ValueError(f'The arguments {num_1} & {num_2} must be numbers or a list of numbers')
elif isnumber(num_2):
return num_2
else:
return None # raise ValueError(f'The arguments {num_1} & {num_2} must be numbers or a list of numbers')
# encoding the final JSON
class NumpyEncoder(json.JSONEncoder): # sourced from https://github.com/hmallen/numpyencoder
""" Custom encoder for numpy data types """
def default(self, obj):
if isinstance(obj, (numpy.int_, numpy.intc, numpy.intp, numpy.int8,
numpy.int16, numpy.int32, numpy.int64, numpy.uint8,
numpy.uint16, numpy.uint32, numpy.uint64)):
return int(obj)
elif isinstance(obj, (numpy.float_, numpy.float16, numpy.float32, numpy.float64)):
return float(obj)
elif isinstance(obj, (numpy.complex_, numpy.complex64, numpy.complex128)):
return {'real': obj.real, 'imag': obj.imag}
elif isinstance(obj, (numpy.ndarray,)):
return obj.tolist()
elif isinstance(obj, (numpy.bool_)):
return bool(obj)
elif isinstance(obj, (numpy.void)):
return None
return json.JSONEncoder.default(self, obj)
# allows case insensitive dictionary searches
class CaseInsensitiveDict(dict): # sourced from https://stackoverflow.com/questions/2082152/case-insensitive-dictionary
@classmethod
def _k(cls, key):
return key.lower() if isinstance(key, str) else key
def __init__(self, *args, **kwargs):
super(CaseInsensitiveDict, self).__init__(*args, **kwargs)
self._convert_keys()
def __getitem__(self, key):
return super(CaseInsensitiveDict, self).__getitem__(self.__class__._k(key))
def __setitem__(self, key, value):
super(CaseInsensitiveDict, self).__setitem__(self.__class__._k(key), value)
def __delitem__(self, key):
return super(CaseInsensitiveDict, self).__delitem__(self.__class__._k(key))
def __contains__(self, key):
return super(CaseInsensitiveDict, self).__contains__(self.__class__._k(key))
def has_key(self, key):
return super(CaseInsensitiveDict, self).has_key(self.__class__._k(key))
def pop(self, key, *args, **kwargs):
return super(CaseInsensitiveDict, self).pop(self.__class__._k(key), *args, **kwargs)
def get(self, key, *args, **kwargs):
return super(CaseInsensitiveDict, self).get(self.__class__._k(key), *args, **kwargs)
def setdefault(self, key, *args, **kwargs):
return super(CaseInsensitiveDict, self).setdefault(self.__class__._k(key), *args, **kwargs)
def update(self, E=None, **F):
super(CaseInsensitiveDict, self).update(self.__class__(E))
super(CaseInsensitiveDict, self).update(self.__class__(**F))
def _convert_keys(self):
for k in list(self.keys()):
v = super(CaseInsensitiveDict, self).pop(k)
self.__setitem__(k, v)
class SABIO_scraping():
# __slots__ = (str(x) for x in [progress_file_prefix, xls_download_prefix, is_scraped_prefix, is_scraped_entryids_prefix, sel_raw_data, processed_csv, entry_json, scraped_model, bigg_model_name_suffix, output_directory, progress_path, raw_data, is_scraped, is_scraped_entryids_path, xls_csv_file_path, entryids_json_file_path, model_kinetics_path, bigg_model, step_number, cwd])
def __init__(self,
bigg_model_path: str, # the JSON version of the BiGG model
bigg_model_name: str = None, # the name of the BiGG model
export_model_content: bool = False,
verbose: bool = False,
printing: bool = True
):
self.export_model_content = export_model_content
self.verbose = verbose
self.printing = printing
self.step_number = 1
self.count = 0
self.paths = {}
# initial parameters and variables
self.parameters = {}
self.parameters['general_delay'] = 2
self.variables = {}
self.variables['is_scraped'] = {}
self.variables['is_scraped_entryids'] = {}
self.variables['entryids'] = {}
# load BiGG dictionary content
self.paths['bigg_model_path'] = bigg_model_path
self.paths['root_path'] = os.path.dirname(__file__)
self.bigg_to_sabio_metabolites = json.load(open(os.path.join(self.paths['root_path'],'BiGG_metabolites, parsed.json')))
self.sabio_to_bigg_metabolites = json.load(open(os.path.join(self.paths['root_path'],'BiGG_metabolite_names, parsed.json')))
self.sabio_insensitive = CaseInsensitiveDict(self.sabio_to_bigg_metabolites)
self.bigg_insensitive = CaseInsensitiveDict(self.bigg_to_sabio_metabolites)
self.bigg_reactions = json.load(open(os.path.join(self.paths['root_path'],'BiGG_reactions, parsed.json')))
# load the BiGG model content
if os.path.exists(self.paths['bigg_model_path']):
self.model = json.load(open(self.paths['bigg_model_path']))
else:
raise ValueError('The BiGG model file does not exist')
self.bigg_model_name = bigg_model_name
if bigg_model_name is None:
self.bigg_model_name = re.search("([\w+\.?\s?]+)(?=\.json)", self.paths['bigg_model_path']).group()
# define folder paths
self.paths['cwd'] = os.path.dirname(os.path.realpath(self.paths['bigg_model_path']))
self.paths['output_directory'] = os.path.join(self.paths['cwd'],f"scraping-{self.bigg_model_name}")
self.paths['raw_data'] = os.path.join(self.paths['output_directory'], 'downloaded')
if not os.path.isdir(self.paths['output_directory']):
os.mkdir(self.paths['output_directory'])
if not os.path.isdir(self.paths['raw_data']):
os.mkdir(self.paths['raw_data'])
# define file paths
self.paths['progress_path'] = os.path.join(self.paths['output_directory'], "current_progress.txt")
self.paths['model_kinetics_path'] = os.path.join(self.paths['output_directory'], "model_kinetics.json")
self.paths['concatenated_data'] = os.path.join(self.paths['raw_data'], "concatenated_data.csv")
self.paths['is_scraped'] = os.path.join(self.paths['raw_data'], "is_scraped.json")
self.paths['is_scraped_entryids'] = os.path.join(self.paths['raw_data'], "is_scraped_entryids.json")
self.paths['entryids_path'] = os.path.join(self.paths['raw_data'], "entryids.json")
self.paths['model_contents'] = os.path.join(self.paths['raw_data'], f'processed_{self.bigg_model_name}_model.json')
# parse the model contents
self._progress_update(self.step_number)
self.model_contents = {}
for enzyme in self.model['reactions']:
annotations = enzyme['annotation']
enzyme_id = enzyme['id']
enzyme_name = enzyme['name']
og_reaction_string = self.bigg_reactions[enzyme_id]['reaction_string']
reaction_string, sabio_chemicals, bigg_compounds = self._split_reaction(og_reaction_string)
self.model_contents[enzyme_name] = {
'reaction': {
'original': og_reaction_string,
'substituted': reaction_string,
},
'bigg_chemicals': bigg_compounds,
'sabio_chemicals': sabio_chemicals,
'annotations': annotations
}
# ==================== HELPER FUNCTIONS =======================
#Clicks a HTML element with selenium by id
def _click_element_id(self,n_id):
element = self.driver.find_element_by_id(n_id)
element.click()
time.sleep(self.parameters['general_delay'])
def _wait_for_id(self,n_id):
while True:
try:
element = self.driver.find_element_by_id(n_id) #!!! what is the purpose of this catch?
break
except:
time.sleep(self.parameters['general_delay'])
#Selects a choice from a HTML dropdown element with selenium by id
def _select_dropdown_id(self,n_id, n_choice):
element = Select(self.driver.find_element_by_id(n_id))
element.select_by_visible_text(n_choice)
time.sleep(self.parameters['general_delay'])
def _progress_update(self, step):
if not re.search('[0-5]', str(step)):
print(f'--> ERROR: The {step} step is not acceptable.')
f = open(self.paths['progress_path'], "w")
f.write(str(step))
f.close()
def _previous_scrape(self):
if os.path.exists(self.paths['progress_path']):
with open(self.paths['progress_path'], "r") as f:
self.step_number = int(f.read(1))
if not re.search('[1-5]',str(self.step_number)):
raise ImportError(f"Progress file malformed. Create a < current_progress.txt > file with a < 1-5 > digit to signify the current scrapping progress.")
print(f'Continuing Step {self.step_number}')
# define file paths and import content from an interupted scrapping
if os.path.exists(self.paths['is_scraped']):
with open(self.paths['is_scraped'], 'r') as f:
self.variables['is_scraped'] = json.load(f)
if os.path.exists(self.paths['is_scraped_entryids']):
with open(self.paths['is_scraped_entryids'], 'r') as f:
try:
self.variables['is_scraped_entryids'] = json.load(f)
except:
raise ImportError('The < entryids.json > file is corrupted or empty.')
if os.path.exists(self.paths['entryids_path']):
with open(self.paths['entryids_path'], 'r') as f:
try:
self.variables['entryids'] = json.load(f)
except:
raise ImportError('The < entryids.json > file is corrupted or empty.')
def _open_driver(self,):
self.options = Options()
self.options.headless = True
self.fp = webdriver.FirefoxProfile(os.path.join(self.paths['root_path'],"l2pnahxq.scraper"))
self.fp.set_preference("browser.download.folderList", 2)
self.fp.set_preference("browser.download.manager.showWhenStarting", False)
self.fp.set_preference("browser.download.dir", self.paths["raw_data"])
self.fp.set_preference("browser.helperApps.neverAsk.saveToDisk", "application/octet-stream")
self.driver = webdriver.Firefox(firefox_profile=self.fp, executable_path=os.path.join(self.paths['root_path'],"geckodriver.exe"))
self.driver.get("http://sabiork.h-its.org/newSearch/index")
def complete(self,):
self._previous_scrape()
while True:
if self.step_number == 1:
self.scrape_bigg_xls()
elif self.step_number == 2:
self.to_fba()
break
print("Execution complete.")
# os.remove(self.paths['progress_path'])
"""
---------------------------------------------------------------------------------------------------------
STEP 1: SCRAPE SABIO WEBSITE BY DOWNLOAD XLS FOR GIVEN REACTIONS IN BIGG MODEL
---------------------------------------------------------------------------------------------------------
"""
def _scrape_csv(self,reaction_identifier, search_option):
quantity_of_xls_files = len([file for file in glob(os.path.join(self.paths['raw_data'], '*.xls'))])
self.driver.get("http://sabiork.h-its.org/newSearch/index")
time.sleep(self.parameters['general_delay'])
self._click_element_id("resetbtn")
time.sleep(self.parameters['general_delay']*2)
self._click_element_id("option")
self._select_dropdown_id("searchterms", search_option)
text_area = self.driver.find_element_by_id("searchtermField")
text_area.send_keys(reaction_identifier)
time.sleep(self.parameters['general_delay'])
self._click_element_id("addsearch")
time.sleep(self.parameters['general_delay'])
result_num = ""
try:
result_num_ele = self.driver.find_element_by_id("numberofKinLaw")
for char in result_num_ele.text:
if re.search('[0-9]', char):
result_num += char
result_num = int(result_num)
except:
#self.driver.close()
self.driver.get("http://sabiork.h-its.org/newSearch/index")
return False
time.sleep(self.parameters['general_delay'])
self._select_dropdown_id("max", "100")
element = Select(self.driver.find_element_by_id("max"))
element.select_by_visible_text("100")
time.sleep(self.parameters['general_delay'])
if result_num > 0 and result_num <= 100:
self._click_element_id("allCheckbox")
time.sleep(self.parameters['general_delay'])
elif result_num > 100:
loops = floor(result_num/100)
if result_num % 100 == 0:
loops -= 1
self._click_element_id("allCheckbox")
for i in range(loops):
element = self.driver.find_element_by_xpath("//*[@class = 'nextLink']")
element.click()
time.sleep(self.parameters['general_delay'])
self._click_element_id("allCheckbox")
time.sleep(self.parameters['general_delay'])
else:
#self.driver.close()
self.driver.get("http://sabiork.h-its.org/newSearch/index")
return False
self.driver.get("http://sabiork.h-its.org/newSearch/spreadsheetExport")
time.sleep(self.parameters['general_delay']*7.5)
element = self.driver.find_element_by_xpath("//*[text()[contains(., 'Add all')]]")
element.click()
time.sleep(self.parameters['general_delay']*2.5)
self._click_element_id("excelExport")
time.sleep(self.parameters['general_delay']*2.5)
new_quantity_of_xls_files = len([file for file in glob(os.path.join(self.paths['raw_data'], '*.xls'))])
loop = 0
while new_quantity_of_xls_files != quantity_of_xls_files+1:
if loop == 0:
print(f'The search result for {reaction_identifier} has not downloaded. We will wait until it downloads.')
new_quantity_of_xls_files = len([file for file in glob(os.path.join(self.paths['raw_data'], '*.xls'))])
time.sleep(self.parameters['general_delay'])
loop += 1
if loop > 0:
time.sleep(self.parameters['general_delay']*30)
if self.verbose:
string = 'The search result for {} downloaded after {} seconds.'.format(reaction_identifier, self.parameters['general_delay']*loop)
print(string)
return True
# def _expand_shadow_element(self, element):
# shadow_root = self.driver.execute_script('return arguments[0].shadowRoot', element)
# return shadow_root
def _split_reaction(self,
reaction_string, # the sabio or bigg reaction string
sabio = False # specifies how the reaction string will be split
):
def __parse_stoich(met):
stoich = ''
ch_number = 0
denom = False
numerator = denominator = 0
while re.search('[0-9\./]', met[ch_number]):
stoich += met[ch_number]
if met[ch_number] == '/':
numerator = stoich
denom = True
if denom:
denominator += met[ch_number]
ch_number += 1
if denom:
stoich = f'{numerator}/{denominator}'
return stoich
def __met_parsing(met):
# print(met)
met = met.strip()
met = re.sub('_\w$', '', met)
if re.search('(\d\s\w|\d\.\d\s|\d/\d\s)', met):
coefficient = __parse_stoich(met)
coefficient = '{} '.format(coefficient)
else:
coefficient = ''
met = re.sub(coefficient, '', met)
# print(met, coefficient)
return met, coefficient
def __reformat_met_name(met_name, sabio = False):
met_name = re.sub(' - ', '-', met_name)
# if not sabio:
# met_name = re.sub(' ', '_', met_name)
return met_name
def __name_refinement(met_name):
if met_name == 'NAD\+':
met_name = 'Nicotinamide adenine dinucleotide'
elif met_name == 'NADH':
met_name = 'Nicotinamide adenine dinucleotide - reduced'
return met_name
def __check_existence(met, coefficient, bigg_chemicals, sabio = False):
original_length = len(bigg_chemicals)
if met in | |
creates a context for cut uv tool. This context only works in the UV editor.
Flags:
- adjustSize : asz (bool) [edit]
If true, puts the tool into the mode where dragging the mouse will edit the brush size. If false, puts the tool back
into the previous mode.
- displayShellBorders : dsb (bool) [query,edit]
Toggle the display of shell borders.
- edgeSelectSensitive : ess (float) [query,edit]
Set the value of the edge selection sensitivity.
- exists : ex (bool) [create]
Returns true or false depending upon whether the specified object exists. Other flags are ignored.
- history : ch (bool) [create]
If this is a tool command, turn the construction history on for the tool in question.
- image1 : i1 (unicode) [create,query,edit]
First of three possible icons representing the tool associated with the context.
- image2 : i2 (unicode) [create,query,edit]
Second of three possible icons representing the tool associated with the context.
- image3 : i3 (unicode) [create,query,edit]
Third of three possible icons representing the tool associated with the context.
- mode : m (unicode) [query,edit]
Specifies the type of effect the brush will perform, Cut or Sew.
- moveRatio : mvr (float) [query,edit]
The cut open ratio relative to edge length.
- name : n (unicode) [create]
If this is a tool command, name the tool appropriately.
- size : sz (float) [query,edit]
Brush size value of the brush ring.
- steadyStroke : ss (bool) [query,edit]
Turn on steady stroke or not.
- steadyStrokeDistance : ssd (float) [query,edit]
The distance for steady stroke.
- touchToSew : tts (bool) [query,edit]
Toggle the touch to sew mode. Flag can have multiple arguments, passed either as a tuple or a list.
Derived from mel command `maya.cmds.texCutContext`
"""
pass
def rollCtx(*args, **kwargs):
"""
Create, edit, or query a roll context.
Flags:
- alternateContext : ac (bool) [create,query]
Set the ALT+MMB and ALT+SHIFT+MMB to refer to this context.
- exists : ex (bool) [create]
Returns true or false depending upon whether the specified object exists. Other flags are ignored.
- history : ch (bool) [create]
If this is a tool command, turn the construction history on for the tool in question.
- image1 : i1 (unicode) [create,query,edit]
First of three possible icons representing the tool associated with the context.
- image2 : i2 (unicode) [create,query,edit]
Second of three possible icons representing the tool associated with the context.
- image3 : i3 (unicode) [create,query,edit]
Third of three possible icons representing the tool associated with the context.
- name : n (unicode) [create]
If this is a tool command, name the tool appropriately.
- rollScale : rs (float) [create,query,edit]
In degrees of rotation per 100 pixels of cursor drag.
- toolName : tn (unicode) [create,query]
Name of the specific tool to which this command refers. Flag can have multiple arguments, passed either
as a tuple or a list.
Derived from mel command `maya.cmds.rollCtx`
"""
pass
def artAttrPaintVertexCtx(*args, **kwargs):
"""
This is a context command to set the flags on the artAttrContext, which is the base context for attribute painting
operations. All commands require the name of the context as the last argument as this provides the name of the context
to create, edit or query. This is a context command to set the flags on the Paint color on vertex Tool context. In query
mode, return type is based on queried flag.
Flags:
- accopacity : aco (bool) [create,query,edit]
Sets opacity accumulation on/off. C: Default is false (Except for sculpt tool for which it is true by default). Q: When
queried, it returns a boolean.
- activeListChangedProc : alp (unicode) [create,query,edit]
Accepts a string that contains a MEL command that is invoked whenever the active list changes. There may be some
situations where the UI, for example, needs to be updated, when objects are selected/deselected in the scene. In query
mode, the name of the currently registered MEL command is returned and this will be an empty string if none is defined.
- afterStrokeCmd : asc (unicode) [create,query,edit]
The passed string is executed as a MEL command immediately after the end of a stroke. C: Default is no command. Q: When
queried, it returns the current command
- alphaclamp : alc (unicode) [create,query,edit]
Specifies if the weight value should be alpha clamped to the lower and upper bounds. There are four options here: none-
no clamping is performed, lower- clamps only to the lower bound, upper- clamps only to the upper bounds, both- clamps to
the lower and upper bounds. C: Default is none. Q: When queried, it returns a string.
- alphaclamplower : acl (float) [create,query,edit]
Specifies the lower bound for the alpha values. C: Default is 0.0. Q: When queried, it returns a float.
- alphaclampupper : acu (float) [create,query,edit]
Specifies the upper bound for the alpha values. C: Default is 1.0. Q: When queried, it returns a float.
- attrSelected : asl (unicode) [query]
Returns a name of the currently selected attribute. Q: When queried, it returns a string.
- beforeStrokeCmd : bsc (unicode) [create,query,edit]
The passed string is executed as a MEL command immediately before the start of a stroke. C: Default is no command. Q:
When queried, it returns the current command
- brushalignment : bra (bool) [create,query,edit]
Specifies the path brush alignemnt. If true, the brush will align to stroke path, otherwise it will align to up vector.
C: Default is true. Q: When queried, it returns a boolean.
- brushfeedback : brf (bool) [create,query,edit]
Specifies if the brush additional feedback should be drawn. C: Default is TRUE. Q: When queried, it returns a boolean.
- clamp : cl (unicode) [create,query,edit]
Specifies if the weight value should be clamped to the lower and upper bounds. There are four options here: none- no
clamping is performed, lower- clamps only to the lower bound, upper- clamps only to the upper bounds, both- clamps to
the lower and upper bounds. C: Default is none. Q: When queried, it returns a string.
- clamplower : cll (float) [create,query,edit]
Specifies the lower bound for the values. C: Default is 0.0. Q: When queried, it returns a float.
- clampupper : clu (float) [create,query,edit]
Specifies the upper bound for the values. C: Default is 1.0. Q: When queried, it returns a float.
- clear : clr (bool) [create,edit]
Floods all cvs/vertices to the current value.
- colorAlphaValue : cl1 (float) []
- colorRGBAValue : cl4 (float, float, float, float) []
- colorRGBValue : cl3 (float, float, float) []
- colorRamp : cr (unicode) [create,query,edit]
Allows a user defined color ramp to be used to map values to colors.
- colorfeedback : cf (bool) [create,query,edit]
Sets on/off the color feedback display. C: Default is FALSE. Q: When queried, it returns a boolean.
- colorfeedbackOverride : cfo (bool) []
- colorrangelower : crl (float) [create,query,edit]
Specifies the value that maps to black when color feedback mode is on. C: Default is 0.0. Q: When queried, it returns a
float.
- colorrangeupper : cru (float) [create,query,edit]
Specifies the value that maps to the maximum color when color feedback mode is on. C: Default is 1.0. Q: When queried,
it returns a float.
- dataTypeIndex : dti (int) [query,edit]
When the selected paintable attribute is a vectorArray, it specifies which field to paint on.
- disablelighting : dl (bool) [create,query,edit]
If color feedback is on, this flag determines whether lighting is disabled or not for the surfaces that | |
show=False):
"""
title::
generate_defender_utilities_plot
description::
This method will plot 3c and 3d, and provide other stats used in the
paper (ttests and percent change).
NOTE: Only plots/reports results if there are values other than zero
in the array. If all values are zero, it does not plot/report results.
Furthermore, if there are any None values from solutions, they will be
reported but then ignored for producing results.
attributes::
resultsGamma
Abbreviations: objective value = OV, recalculated (i.e., computed
as though no uncertainty, recalculated with true uncertainty) =
recalc, original (i.e., computed as though no uncertainty) = orig,
target (i.e., attacker best response target) = tgt, eta (i.e.,
attacker best response behavior) = eta
Depth: both OV, recalc OV, orig OV, both tgt, recalc tgt, orig
tgt, both eta, both recalc, both orig, CONDITIONAL PROBABILITY MEAN
Rows: Results for graphs
Columns: Results for gammas
resultsUncert
Same as above, but no conditional probability mean (therefore, only 9
columns), and kappa instead of gamma
U_dc
U_+^d (defender utility when defender successfully protects target)
U_du
U_-^d (defender utility when defender fails to protect target)
U_ac
U_+^a (attacker utility when defender successfully protects target)
U_au
U_-^a (attacker utility when defender fails to protect target)
save
Set to True if you wish to save plots as files [default is False]
show
Set to True if you wish to show the plots (i.e., plt.show) [default
is False] NOTE: do not set both save and show to True, one may be
blank
author::
<NAME> (<EMAIL>)
<NAME>, <NAME>, <NAME>
disclaimer::
This source code is provided "as is" and without warranties as to
performance or merchantability. The author and/or distributors of
this source code may have made statements about this source code.
Any such statements do not constitute warranties and shall not be
relied on by the user in deciding whether to use this source code.
This source code is provided without any express or implied warranties
whatsoever. Because of the diversity of conditions and hardware under
which this source code may be used, no warranty of fitness for a
particular purpose is offered. The user is advised to test the source
code thoroughly before relying on it. The user must assume the entire
risk of using the source code.
"""
#NOTE: Warnings may arise if ttest or mean on array of all zeros (which may
#happen with -nu or -ng options, for example). Check for these first.
checkZeroUncert = True
checkZeroGamma = True
if np.all(resultsUncert == 0):
checkZeroUncert = False
if np.all(resultsGamma == 0):
checkZeroGamma = False
#NOTE: Ignoring nan's and computing without them.
countNanUncert = np.where(np.isnan(resultsUncert) == True)
countNanGamma = np.where(np.isnan(resultsGamma) == True)
if countNanUncert[0].shape[0] > 0:
print('nan exists in resultsUncert: ', countNanUncert)
if countNanGamma[0].shape[0] > 0:
print('nan exists in resultsGamma: ', countNanGamma)
#t-test comparing optimal objective value and that from ignoring.
if checkZeroUncert:
ttest3d = stats.ttest_ind(resultsUncert[:,:,0],
resultsUncert[:,:,1],
axis=0,
nan_policy='omit')
print('ttest 3d: ', ttest3d)
if checkZeroGamma:
ttest3c = stats.ttest_ind(resultsGamma[:,:,0],
resultsGamma[:,:,1],
axis=0,
nan_policy='omit')
print('ttest 3c: ', ttest3c)
#Mean over different graphs (print where nan, but ignore for mean).
if checkZeroUncert:
resultsUncertMean = np.nanmean(resultsUncert, axis=0)
if checkZeroGamma:
resultsGammaMean = np.nanmean(resultsGamma, axis=0)
#Get maximum (absolute value) utility.
maxUtil = max(np.abs(U_dc).max(),
np.abs(U_du).max(),
np.abs(U_ac).max(),
np.abs(U_au).max())
#Calculate percent change in objective including, ignoring uncertainty.
if checkZeroUncert:
percentChangeObsWith = calculate_percent_change(resultsUncertMean[:,0]\
/ maxUtil)
percentChangeObsWithout = calculate_percent_change( \
resultsUncertMean[:,1] / maxUtil)
print('percent change 3d including, ignoring uncertainty: ',
percentChangeObsWith, percentChangeObsWithout)
if checkZeroGamma:
percentChangeDetWith = calculate_percent_change(resultsGammaMean[:,0] \
/ maxUtil)
percentChangeDetWithout = calculate_percent_change( \
resultsGammaMean[:,1] / maxUtil)
print('percent change 3c including, ignoring uncertainty: ',
percentChangeDetWith, percentChangeDetWithout)
#Plot varying uncertainty matrix. (3d)
if checkZeroUncert:
matplotlib.pyplot.rcParams.update({'font.size': 22})
matplotlib.pyplot.figure()
matplotlib.pyplot.xlabel('Amount of Observational Uncertainty'+\
' ($\kappa$)')
matplotlib.pyplot.ylabel('Defender Expected Utility')
matplotlib.pyplot.plot(np.arange(0,1,0.1), resultsUncertMean[:,0],
label='GUARDSS')
matplotlib.pyplot.plot(np.arange(0,1,0.1), resultsUncertMean[:,1],
'^--',label='Def. Ignoring\nObservational '+\
'Uncertainty')
matplotlib.pyplot.legend(bbox_to_anchor=(0., 1.02, 1.01, .102), loc=3,
ncol=1, mode="expand", borderaxespad=0.)
if save:
matplotlib.pyplot.savefig('3d.pdf',
bbox_inches='tight')
#Plot varying gamma. (3c)
if checkZeroGamma:
matplotlib.pyplot.figure()
matplotlib.pyplot.xlabel('Amount of Detection Uncertainty ($\gamma$)')
matplotlib.pyplot.ylabel('Defender Expected Utility')
matplotlib.pyplot.plot(np.arange(0,1,0.1), resultsGammaMean[:,0],
label='GUARDSS')
matplotlib.pyplot.plot(np.arange(0,1,0.1), resultsGammaMean[:,1],
'^--',label='Def. Ignoring Detection\nUncertainty')
matplotlib.pyplot.legend(bbox_to_anchor=(0., 1.02, 1., .102), loc=3,
ncol=1, mode="expand", borderaxespad=0.)
if save:
matplotlib.pyplot.savefig('3c.pdf',
bbox_inches='tight')
if show:
matplotlib.pyplot.show()
def generate_conditional_prob_plot(gammas,
resultsGamma,
save=True,
show=False):
"""
title::
generate_conditional_prob_plot
description::
This method will plot 3e, and provide other stats used in the paper
(ttests).
attributes::
gammas
The gammas that should be plotted (e.g., np.arange(0,1,0.1)).
resultsGamma
Abbreviations: objective value = OV, recalculated (i.e., computed
as though no uncertainty, recalculated with true uncertainty) =
recalc, original (i.e., computed as though no uncertainty) = orig,
target (i.e., attacker best response target) = tgt, eta (i.e.,
attacker best response behavior) = eta
Depth: both OV, recalc OV, orig OV, both tgt, recalc tgt, orig
tgt, both eta, both recalc, both orig, CONDITIONAL PROBABILITY MEAN
Rows: Results for graphs
Columns: Results for gammas
save
Set to True if you wish to save plots as files [default is False]
show
Set to True if you wish to show the plots (i.e., plt.show) [default
is False] NOTE: do not set both save and show to True, one may be
blank
author::
<NAME> (<EMAIL>)
<NAME>, <NAME>, <NAME>
disclaimer::
This source code is provided "as is" and without warranties as to
performance or merchantability. The author and/or distributors of
this source code may have made statements about this source code.
Any such statements do not constitute warranties and shall not be
relied on by the user in deciding whether to use this source code.
This source code is provided without any express or implied warranties
whatsoever. Because of the diversity of conditions and hardware under
which this source code may be used, no warranty of fitness for a
particular purpose is offered. The user is advised to test the source
code thoroughly before relying on it. The user must assume the entire
risk of using the source code.
"""
#Plot conditional probability. (3e)
matplotlib.pyplot.rcParams.update({'font.size':22})
matplotlib.pyplot.figure()
matplotlib.pyplot.xlabel('Amount of Detection Uncertainty ($\gamma$)')
matplotlib.pyplot.ylabel('P(fake | strong signal)')
matplotlib.pyplot.plot(gammas, np.mean(resultsGamma[:,:,9], axis=0))
matplotlib.pyplot.tight_layout()
if save:
matplotlib.pyplot.savefig('3e.pdf')
if show:
matplotlib.pyplot.show()
print('ttest 3e',stats.ttest_ind(resultsGamma[:,0,9], resultsGamma[:,9,9],
axis=0))
def calculate_percent_change(defEU):
"""
title::
calculate_percent_change
description::
This method will calculate the percent change used to describe 3c and
3d in the paper. The input should be normalized by the maximum utility
to get the reported results (see generate_defender_utilities_plot).
attributes::
defEU
Array of objective values at different uncertainty levels. i.e., 1
row of objective values, columns with increasing uncertainty (see
generate_defender_utilities_plot, where we use the mean objective
values over NUMBEROFGRAPH, and either the solution with uncertainty
or the recalculated solution when uncertainty was ignored, as
plotted in 3c/3d)
returns::
Percent change
author::
<NAME> (<EMAIL>)
<NAME>, <NAME>, <NAME>
disclaimer::
This source code is provided "as is" and without warranties as to
performance or merchantability. The author and/or distributors of
this source code may have made statements about this source code.
Any such statements do not constitute warranties and shall not be
relied on by the user in deciding whether to use this source code.
This source code is provided without any express or implied warranties
whatsoever. Because of the diversity of conditions and hardware under
which this source code may be used, no warranty of fitness for a
particular purpose is offered. The user is advised to test the source
code thoroughly before relying on it. The user must assume the entire
risk of using the source code.
"""
first = defEU[0]
last = defEU[-1]
return ((first - last) / first) * 100
def calculate_percent_difference(defEU):
"""
title::
calculate_percent_difference
description::
This method will calculate the percent difference.
attributes::
defEU
Array of objective values at different uncertainty levels. i.e., 1
| |
import numpy as np
import sys
np.set_printoptions(linewidth=np.inf)
np.set_printoptions(precision=3)
import matplotlib.pyplot as plt
import math
n_x=30
n_y=30
dx=1.0/n_x
dy=1.0/n_y
Re=100
def momentum_link_coefficients(u_star,u_face,v_face,p,source_x,source_y,A_p,A_e,A_w,A_n,A_s):
D_e=dy/(dx*Re)
D_w=dy/(dx*Re)
D_n=dx/(dy*Re)
D_s=dx/(dy*Re)
#interior cells
for i in range(2,n_y):
for j in range(2,n_x):
F_e=dy*u_face[i,j]
F_w=dy*u_face[i,j-1]
F_n=dx*v_face[i-1,j]
F_s=dx*v_face[i,j]
A_e[i,j]=D_e + max(0.0,-F_e)
A_w[i,j]=D_w + max(0.0,F_w)
A_n[i,j]=D_n + max(0.0,-F_n)
A_s[i,j]=D_s + max(0.0,F_s)
A_p[i,j]=A_w[i,j] + A_e[i,j] + A_n[i,j] + A_s[i,j] + (F_e-F_w) + (F_n-F_s)
source_x[i,j]=0.5*(p[i,j-1]-p[i,j+1])*dx
source_y[i,j]=0.5*(p[i+1,j]-p[i-1,j])*dy
#left wall
j=1
for i in range(2,n_y):
F_e=dy*u_face[i,j]
F_w=dy*u_face[i,j - 1] #left face velocity is initialised to zero
F_n=dx*v_face[i - 1,j]
F_s=dx*v_face[i,j]
A_e[i,j]=D_e + max(0.0,-F_e)
A_w[i,j]=2*D_w + max(0.0,F_w)
A_n[i,j]=D_n + max(0.0,-F_n)
A_s[i,j]=D_s + max(0.0,F_s)
A_p[i,j]=A_w[i,j] + A_e[i,j] + A_n[i,j] + A_s[i,j] + (F_e - F_w) + (F_n - F_s)
source_x[i,j]=0.5*(p[i,j] - p[i,j + 1])*dx # P_o - 0.5(P_o+P_e)
source_y[i,j]=0.5*(p[i + 1,j] - p[i - 1,j])*dy
#bottom wall
i=n_y
for j in range(2,n_x):
F_e=dy*u_face[i,j]
F_w=dy*u_face[i,j - 1]
F_n=dx*v_face[i - 1,j]
F_s=dx*v_face[i,j] #bottom wall v-velocity is already initialised to zero
A_e[i,j]=D_e + max(0.0,-F_e)
A_w[i,j]=D_w + max(0.0,F_w)
A_n[i,j]=D_n + max(0.0,-F_n)
A_s[i,j]=2*D_s + max(0.0,F_s)
A_p[i,j]=A_w[i,j] + A_e[i,j] + A_n[i,j] + A_s[i,j] + (F_e - F_w) + (F_n - F_s)
source_x[i,j]=0.5*(p[i,j - 1] - p[i,j + 1])*dx
source_y[i,j]=0.5*(p[i,j] - p[i - 1,j])*dy #P_o - 0.5(P_o+P_n)
#right wall
j=n_x
for i in range(2,n_y):
F_e=dy*u_face[i,j]
F_w=dy*u_face[i,j - 1] #right face velocity is initialised to zero
F_n=dx*v_face[i - 1,j]
F_s=dx*v_face[i,j]
A_e[i,j]=D_e + max(0.0,-F_e)
A_w[i,j]=2*D_w + max(0.0,F_w)
A_n[i,j]=D_n + max(0.0,-F_n)
A_s[i,j]=D_s + max(0.0,F_s)
A_p[i,j]=A_w[i,j] + A_e[i,j] + A_n[i,j] + A_s[i,j] + (F_e - F_w) + (F_n - F_s)
source_x[i,j]=0.5*(p[i,j-1] - p[i,j ])*dx #0.5(P_w+P_o)-P_o
source_y[i,j]=0.5*(p[i + 1,j] - p[i - 1,j])*dy
#top wall
i=1
for j in range(2,n_y):
F_e=dy*u_face[i,j]
F_w=dy*u_face[i,j - 1]
F_n=dx*v_face[i - 1,j]
F_s=dx*v_face[i,j]
A_e[i,j]=D_e + max(0.0,-F_e)
A_w[i,j]=D_w + max(0.0,F_w)
A_n[i,j]=2*D_n + max(0.0,-F_n)
A_s[i,j]=D_s + max(0.0,F_s)
A_p[i,j]=A_w[i,j] + A_e[i,j] + A_n[i,j] + A_s[i,j] + (F_e - F_w) + (F_n - F_s)
source_x[i,j]=0.5*(p[i,j - 1] - p[i,j + 1])*dx
source_y[i,j]=0.5*(p[i + 1,j] - p[i,j])*dy #0.5(P_s+P_o) - P_o
#top left corner
i=1
j=1
F_e=dy*u_face[i,j]
F_w=dy*u_face[i,j - 1]
F_n=dx*v_face[i - 1,j]
F_s=dx*v_face[i,j]
A_e[i,j]=D_e + max(0.0,-F_e)
A_w[i,j]=2*D_w + max(0.0,F_w)
A_n[i,j]=2*D_n + max(0.0,-F_n)
A_s[i,j]=D_s + max(0.0,F_s)
A_p[i,j]=A_w[i,j] + A_e[i,j] + A_n[i,j] + A_s[i,j] + (F_e - F_w) + (F_n - F_s)
source_x[i,j]=0.5*(p[i,j] - p[i,j + 1])*dx # P_o - 0.5(P_o+P_e)
source_y[i,j]=0.5*(p[i + 1,j] - p[i,j])*dy #0.5(P_s+P_o) - P_o
#top right corner
i=1
j=n_x
F_e=dy*u_face[i,j]
F_w=dy*u_face[i,j - 1] #right face velocity is initialised to zero
F_n=dx*v_face[i - 1,j]
F_s=dx*v_face[i,j]
A_e[i,j]=D_e + max(0.0,-F_e)
A_w[i,j]=2*D_w + max(0.0,F_w)
A_n[i,j]=2*D_n + max(0.0,-F_n)
A_s[i,j]=D_s + max(0.0,F_s)
A_p[i,j]=A_w[i,j] + A_e[i,j] + A_n[i,j] + A_s[i,j] + (F_e - F_w) + (F_n - F_s)
source_x[i,j]=0.5*(p[i,j - 1] - p[i,j])*dx #0.5(P_w+P_o)-P_o
source_y[i,j]=0.5*(p[i + 1,j] - p[i,j])*dy #0.5(P_s+P_o) - P_o
#bottom left corner
i=n_y
j=1
F_e=dy*u_face[i,j]
F_w=dy*u_face[i,j - 1] #left face velocity is initialised to zero
F_n=dx*v_face[i - 1,j]
F_s=dx*v_face[i,j]
A_e[i,j]=D_e + max(0.0,-F_e)
A_w[i,j]=2*D_w + max(0.0,F_w)
A_n[i,j]=D_n + max(0.0,-F_n)
A_s[i,j]=2*D_s + max(0.0,F_s)
A_p[i,j]=A_w[i,j] + A_e[i,j] + A_n[i,j] + A_s[i,j] + (F_e - F_w) + (F_n - F_s)
source_x[i,j]=0.5*(p[i,j] - p[i,j + 1])*dx # P_o - 0.5(P_o+P_e)
source_y[i,j]=0.5*(p[i,j] - p[i - 1,j])*dy #P_o - 0.5(P_o+P_n)
#bottom right corner
i=n_y
j=n_x
F_e=dy*u_face[i,j]
F_w=dy*u_face[i,j - 1] #right face velocity is initialised to zero
F_n=dx*v_face[i - 1,j]
F_s=dx*v_face[i,j]
A_e[i,j]=2*D_e + max(0.0,-F_e)
A_w[i,j]=2*D_w + max(0.0,F_w)
A_n[i,j]=D_n + max(0.0,-F_n)
A_s[i,j]=D_s + max(0.0,F_s)
A_p[i,j]=A_w[i,j] + A_e[i,j] + A_n[i,j] + A_s[i,j] + (F_e - F_w) + (F_n - F_s)
source_x[i,j]=0.5*(p[i,j - 1] - p[i,j])*dx #0.5(P_w+P_o)-P_o
source_y[i,j]=0.5*(p[i,j] - p[i - 1,j])*dy #P_o - 0.5(P_o+P_n)
return A_p,A_e,A_w,A_n,A_s,source_x,source_y
def solve(u,u_star,A_p,A_e,A_w,A_n,A_s,source_x,alpha,epsilon,max_inner_iteration,l2_norm):
for n in range(1,max_inner_iteration+1):
l2_norm=0
for i in range(1,n_y+1):
for j in range(1,n_x+1):
u[i,j]= alpha*(A_e[i,j]*u[i,j+1] + A_w[i,j]*u[i,j-1] + A_n[i,j]*u[i-1,j] + A_s[i,j]*u[i+1,j] + source_x[i,j])/A_p[i,j] + (1-alpha)*u_star[i,j]
l2_norm+=(u[i,j] - alpha*(A_e[i,j]*u[i,j+1] + A_w[i,j]*u[i,j-1] + A_n[i,j]*u[i - 1,j] + A_s[i,j]*u[i+1,j] +source_x[i,j])/A_p[i,j] - (1-alpha)*u_star[i,j])**2
for i in range(1,n_y+1):
for j in range(1,n_x+1):
l2_norm+=(u[i,j] - alpha*(A_e[i,j]*u[i,j+1] + A_w[i,j]*u[i,j-1] + A_n[i,j]*u[i - 1,j] + A_s[i,j]*u[i+1,j] +source_x[i,j])/A_p[i,j] - (1-alpha)*u_star[i,j])**2
if(n==1):
norm=math.sqrt(l2_norm)
l2_norm=math.sqrt(l2_norm)
if(l2_norm<epsilon):
#print("Converged in ",n, " iterations")
break
return u,norm
def face_velocity(u,v,u_face,v_face,p,A_p,alpha_uv):
#uface velocity
for i in range(1,n_y+1):
for j in range(1,n_x):
u_face[i,j]=0.5*(u[i,j] + u[i,j + 1]) + 0.25*alpha_uv*(p[i,j + 1] - p[i,j - 1])*dy/A_p[
i,j] + 0.25*alpha_uv*(p[i,j + 2] - p[i,j])*dy/A_p[i,j + 1]\
- 0.5*alpha_uv*(1/A_p[i,j] + 1/A_p[i,j + 1])*(p[i,j + 1] - p[i,j])*dy
#v face velocity
for i in range(2,n_y+1):
for j in range(1,n_x+1):
v_face[i-1,j]=0.5*(v[i,j] + v[i - 1,j]) + 0.25*alpha_uv*(p[i - 1,j] - p[i + 1,j])*dy/A_p[i,j] + 0.25*alpha_uv*(
p[i - 2,j] - p[i,j])*dy/A_p[i - 1,j]\
- 0.5*alpha_uv*(1/A_p[i,j] + 1/A_p[i - 1,j])*(p[i - 1,j] - p[i,j])*dy
return u_face,v_face
def pressure_correction_link_coefficients(u,u_face,v_face,Ap_p,Ap_e,Ap_w,Ap_n,Ap_s,source_p,A_p,A_e,A_w,A_n,A_s,alpha_uv):
#interior cells
for i in range(2,n_y):
for j in range(2,n_x):
Ap_e[i,j]=0.5*alpha_uv*(1/A_p[i,j] + 1/A_p[i,j + 1])*(dy**2)
Ap_w[i,j]=0.5*alpha_uv*(1/A_p[i,j] + 1/A_p[i,j - 1])*(dy**2)
Ap_n[i,j]=0.5*alpha_uv*(1/A_p[i,j] + 1/A_p[i - 1,j])*(dx**2)
Ap_s[i,j]=0.5*alpha_uv*(1/A_p[i,j] + 1/A_p[i + 1,j])*(dx**2)
Ap_p[i,j]=Ap_e[i,j] + Ap_w[i,j] + Ap_n[i,j] + Ap_s[i,j]
source_p[i,j]=-(u_face[i,j] - u_face[i,j - 1])*dy - (v_face[i - 1,j] - v_face[i,j])*dx
#top
i=1
for j in range(2,n_x):
Ap_e[i,j]=0.5*alpha_uv*(1/A_p[i,j] + 1/A_p[i,j + 1])*(dy**2)
Ap_w[i,j]=0.5*alpha_uv*(1/A_p[i,j] + 1/A_p[i,j - 1])*(dy**2)
Ap_n[i,j]=0
Ap_s[i,j]=0.5*alpha_uv*(1/A_p[i,j] + 1/A_p[i + 1,j])*(dx**2)
Ap_p[i,j]=Ap_e[i,j] + Ap_w[i,j] + Ap_n[i,j] + Ap_s[i,j]
source_p[i,j]=-(u_face[i,j] - u_face[i,j - 1])*dy - (v_face[i - 1,j] - v_face[i,j])*dx
#left
j=1
for i in range(2,n_y):
Ap_e[i,j]=0.5*alpha_uv*(1/A_p[i,j] + 1/A_p[i,j + 1])*(dy**2)
Ap_w[i,j]=0
Ap_n[i,j]=0.5*alpha_uv*(1/A_p[i,j] + 1/A_p[i - 1,j])*(dx**2)
Ap_s[i,j]=0.5*alpha_uv*(1/A_p[i,j] + 1/A_p[i + 1,j])*(dx**2)
Ap_p[i,j]=Ap_e[i,j] + Ap_w[i,j] + Ap_n[i,j] + Ap_s[i,j]
source_p[i,j]=-(u_face[i,j] - u_face[i,j - 1])*dy - (v_face[i - 1,j] - v_face[i,j])*dx
#right
j=n_x
for i in range(2,n_y):
Ap_e[i,j]=0
Ap_w[i,j]=0.5*alpha_uv*(1/A_p[i,j] + 1/A_p[i,j - 1])*(dy**2)
Ap_n[i,j]=0.5*alpha_uv*(1/A_p[i,j] + 1/A_p[i - 1,j])*(dx**2)
Ap_s[i,j]=0.5*alpha_uv*(1/A_p[i,j] + 1/A_p[i + 1,j])*(dx**2)
Ap_p[i,j]=Ap_e[i,j] + Ap_w[i,j] + Ap_n[i,j] + Ap_s[i,j]
source_p[i,j]=-(u_face[i,j] - u_face[i,j - 1])*dy - (v_face[i - 1,j] - v_face[i,j])*dx
#bottom
i=n_y
for j in range(2,n_x):
Ap_e[i,j]=0.5*alpha_uv*(1/A_p[i,j] + 1/A_p[i,j + 1])*(dy**2)
Ap_w[i,j]=0.5*alpha_uv*(1/A_p[i,j] + 1/A_p[i,j - 1])*(dy**2)
Ap_n[i,j]=0.5*alpha_uv*(1/A_p[i,j] + 1/A_p[i - 1,j])*(dx**2)
Ap_s[i,j]=0
Ap_p[i,j]=Ap_e[i,j] + Ap_w[i,j] + Ap_n[i,j] + Ap_s[i,j]
source_p[i,j]=-(u_face[i,j] - u_face[i,j - 1])*dy - (v_face[i - 1,j] - v_face[i,j])*dx
#top left corner
i=1
j=1
Ap_e[i,j]=0.5*alpha_uv*(1/A_p[i,j] + 1/A_p[i,j + 1])*(dy**2)
Ap_w[i,j]=0
Ap_n[i,j]=0
Ap_s[i,j]=0.5*alpha_uv*(1/A_p[i,j] + 1/A_p[i + 1,j])*(dx**2)
Ap_p[i,j]=Ap_e[i,j] + Ap_w[i,j] + Ap_n[i,j] + Ap_s[i,j]
source_p[i,j]=-(u_face[i,j] - u_face[i,j - 1])*dy - (v_face[i - 1,j] - v_face[i,j])*dx
#top right corner
i=1
j=n_x
Ap_e[i,j]=0
Ap_w[i,j]=0.5*alpha_uv*(1/A_p[i,j] + 1/A_p[i,j - 1])*(dy**2)
Ap_n[i,j]=0
Ap_s[i,j]=0.5*alpha_uv*(1/A_p[i,j] + 1/A_p[i + 1,j])*(dx**2)
Ap_p[i,j]=Ap_e[i,j] + Ap_w[i,j] + Ap_n[i,j] + Ap_s[i,j]
source_p[i,j]=-(u_face[i,j] - u_face[i,j - 1])*dy - (v_face[i - 1,j] - v_face[i,j])*dx
#bottom left corner
i=n_y
j=1
Ap_e[i,j]=0.5*alpha_uv*(1/A_p[i,j] + 1/A_p[i,j + 1])*(dy**2)
Ap_w[i,j]=0
Ap_n[i,j]=0.5*alpha_uv*(1/A_p[i,j] + 1/A_p[i - 1,j])*(dx**2)
Ap_s[i,j]=0
Ap_p[i,j]=Ap_e[i,j] + Ap_w[i,j] + Ap_n[i,j] + Ap_s[i,j]
source_p[i,j]=-(u_face[i,j] - u_face[i,j - 1])*dy - (v_face[i - 1,j] - v_face[i,j])*dx
#bottom right corner
i=n_y
j=n_x
Ap_e[i,j]=0
Ap_w[i,j]=0.5*alpha_uv*(1/A_p[i,j] + 1/A_p[i,j - 1])*(dy**2)
Ap_n[i,j]=0.5*alpha_uv*(1/A_p[i,j] + 1/A_p[i - 1,j])*(dx**2)
Ap_s[i,j]=0
Ap_p[i,j]=Ap_e[i,j] + Ap_w[i,j] + Ap_n[i,j] + Ap_s[i,j]
source_p[i,j]=-(u_face[i,j] - u_face[i,j - 1])*dy - (v_face[i - 1,j] - v_face[i,j])*dx
return Ap_p,Ap_e,Ap_w,Ap_n,Ap_s,source_p
def correct_pressure(p_star,p,p_prime,alpha_p):
p_star=p+alpha_p*p_prime
#BC
#top wall
p_star[0,1:n_x+1]=p_star[1,1:n_x+1]
#left wall
p_star[1:n_y+1,0]=p_star[1:n_y+1,1]
#right wall
p_star[1:n_y+1,n_x+1]=p_star[1:n_y+1,n_x]
#bottom wall
p_star[n_y+1,1:n_x+1]=p_star[n_y,1:n_x+1]
#top left corner
p_star[0,0]=(p_star[1,2]+p_star[0,1]+p_star[1,0])/3
#top right corner
p_star[0,n_x+1]=(p_star[0,n_x]+p_star[1,n_x]+p_star[1,n_x+1])/3
#bottom left corner
p_star[n_y+1,0]=(p_star[n_y,0]+p_star[n_y,1]+p_star[n_y+1,1])/3
#bottom right corner
p_star[n_y+1,n_x+1]=(p_star[n_y,n_x+1]+p_star[n_y+1,n_x]+p_star[n_y,n_x])/3
return p_star
def correct_cell_center_velocity(u,v,u_star,v_star,p_prime,A_p,alpha_uv):
#u velocity
#interior cells
for i in range(1,n_y+1):
for j in range(2,n_x ):
u_star[i,j]= u[i,j] + 0.5*alpha_uv*(p_prime[i,j-1]-p_prime[i,j+1])*dy/A_p[i,j]
#left
j=1
for i in range(1,n_y+1):
u_star[i,j]=u[i,j] + 0.5*alpha_uv*(p_prime[i,j] - p_prime[i,j+1])*dy/A_p[i,j]
#right
j=n_x
for i in range(1,n_y+1):
u_star[i,j]=u[i,j] + 0.5*alpha_uv*(p_prime[i,j-1] - p_prime[i,j])*dy/A_p[i,j]
#v velocity
for i in range(2,n_y):
for j in range(1,n_x+1):
v_star[i,j]=v[i,j] + 0.5*alpha_uv*(p_prime[i+1,j]-p_prime[i-1,j])*dx/A_p[i,j]
#top
i=1
for j in range(1,n_x + 1):
v_star[i,j]=v[i,j] + 0.5*alpha_uv*(p_prime[i + 1,j] - p_prime[i,j])*dx/A_p[i,j]
#bottom
i=n_y
for j in range(1,n_x + 1):
v_star[i,j]=v[i,j] + 0.5*alpha_uv*(p_prime[i,j] - p_prime[i - 1,j])*dx/A_p[i,j]
return u_star,v_star
def correct_face_velocity(u_face,v_face,p_prime,A_p,alpha_uv):
for i in range(1,n_y+1):
for j in range(1,n_x):
u_face[i,j]=u_face[i,j]+ 0.5*alpha_uv*(1/A_p[i,j]+1/A_p[i,j+1])*(p_prime[i,j]-p_prime[i,j+1])*dy
for i in range(2,n_y+1):
for j in range(1,n_x+1):
v_face[i-1,j]=v_face[i-1,j] + 0.5*alpha_uv*(1/A_p[i,j]+1/A_p[i-1,j])*(p_prime[i,j]-p_prime[i-1,j])*dx
return u_face,v_face
def post_processing(u_star,v_star,p_star,X,Y,x,y):
#u velocity contours
plt.figure(1)
plt.contourf(X,Y,np.flipud(u_star),levels=50,cmap='jet')
plt.colorbar()
plt.title('U contours')
plt.show()
#v velocity contours
plt.figure(2)
plt.contourf(X,Y,np.flipud(v_star),levels=50,cmap='jet')
plt.colorbar()
plt.title('V contours' )
plt.show()
#pressure contours
plt.figure(3)
plt.contourf(X,Y,np.flipud(p_star),levels=50,cmap='jet')
plt.colorbar()
plt.title('P contours')
plt.show()
#u centerline velocity
plt.figure(4)
plt.plot(1-y,u_star[:,round(n_x/2)])
plt.xlabel('y')
plt.ylabel('u')
plt.title('U centerline velocity')
plt.show()
#v centerline velocity
plt.figure(5)
plt.plot(x,v_star[round(n_y/2),:])
plt.xlabel('x')
plt.ylabel('v')
plt.title('V centerline velocity')
plt.show()
#Declaring primitive variables
u=np.zeros((n_y+2,n_x+2),dtype=np.float64)
u_star=np.zeros((n_y+2,n_x+2),dtype=np.float64)
v=np.zeros((n_y+2,n_x+2),dtype=np.float64)
v_star=np.zeros((n_y+2,n_x+2),dtype=np.float64)
p_star=np.zeros((n_y+2,n_x+2),dtype=np.float64)
p=np.zeros((n_y+2,n_x+2),dtype=np.float64)
p_prime=np.zeros((n_y+2,n_x+2),dtype=np.float64)
#Momentum link coeffficients
A_p=np.ones((n_y+2,n_x+2),dtype=np.float64)
A_e=np.ones((n_y+2,n_x+2),dtype=np.float64)
A_w=np.ones((n_y+2,n_x+2),dtype=np.float64)
A_s=np.ones((n_y+2,n_x+2),dtype=np.float64)
A_n=np.ones((n_y+2,n_x+2),dtype=np.float64)
#Pressure correction link coeffficients
Ap_p=np.ones((n_y+2,n_x+2),dtype=np.float64)
Ap_e=np.ones((n_y+2,n_x+2),dtype=np.float64)
Ap_w=np.ones((n_y+2,n_x+2),dtype=np.float64)
Ap_s=np.ones((n_y+2,n_x+2),dtype=np.float64)
Ap_n=np.ones((n_y+2,n_x+2),dtype=np.float64)
#Declaring source terms
source_x=np.zeros((n_y+2,n_x+2),dtype=np.float64)
source_y=np.zeros((n_y+2,n_x+2),dtype=np.float64)
source_p=np.zeros((n_y+2,n_x+2),dtype=np.float64)
#Declaring face velocities
u_face=np.zeros((n_y+2,n_x+1),dtype=np.float64)
v_face=np.zeros((n_y+1,n_x+2),dtype=np.float64)
#Grid
x=np.array([0.0],dtype=np.float64)
y=np.array([0.0],dtype=np.float64)
x=np.append(x,np.linspace(dx/2,1-dx/2,n_x))
x=np.append(x,[1.0])
y=np.append(y,np.linspace(dy/2,1-dy/2,n_y))
y=np.append(y,[1.0])
X,Y=np.meshgrid(x,y)
#BC
u[0,1:n_x+1]=1
u_star[0,1:n_x+1]=1
u_face[0,1:n_x]=1
l2_norm_x=0
alpha_uv=0.7
epsilon_uv=1e-3
max_inner_iteration_uv=50
l2_norm_y=0
l2_norm_p=0
max_inner_iteration_p=200
dummy_alpha_p=1
epsilon_p=1e-4
alpha_p=0.2
max_outer_iteration=200
for n in range(1,max_outer_iteration+1):
A_p,A_e,A_w,A_n,A_s,source_x,source_y=momentum_link_coefficients(u_star,u_face,v_face,p,source_x,source_y,A_p,A_e,A_w,A_n,A_s)
u,l2_norm_x=solve(u,u_star,A_p,A_e,A_w,A_n,A_s,source_x,alpha_uv,epsilon_uv,max_inner_iteration_uv,l2_norm_x)
v,l2_norm_y=solve(v,v_star,A_p,A_e,A_w,A_n,A_s,source_y,alpha_uv,epsilon_uv,max_inner_iteration_uv,l2_norm_y)
u_face,v_face=face_velocity(u,v,u_face,v_face,p,A_p,alpha_uv)
Ap_p,Ap_e,Ap_w,Ap_n,Ap_s,source_p=pressure_correction_link_coefficients(u,u_face,v_face,Ap_p,Ap_e,Ap_w,Ap_n,Ap_s,source_p,A_p,A_e,A_w,A_n,A_s,alpha_uv)
p_prime,l2_norm_p=solve(p_prime,p_prime,Ap_p,Ap_e,Ap_w,Ap_n,Ap_s,source_p,dummy_alpha_p,epsilon_p,max_inner_iteration_p,l2_norm_p)
p_star=correct_pressure(p_star,p,p_prime,alpha_p)
u_star,v_star=correct_cell_center_velocity(u,v,u_star,v_star,p_prime,A_p,alpha_uv)
u_face,v_face=correct_face_velocity(u_face,v_face,p_prime,A_p,alpha_uv)
p=np.copy(p_star)
| |
1.0 1.0
1.0 1.0 1.0 1.0 1.0
1.0 1.0 1.0 1.0 1.0
1.0 1.0 1.0 1.0 1.0
1.0 1.0 1.0 1.0 1.0
1.0 1.0 1.0 1.0 1.0
1.0 1.0 1.0 1.0
1.0
1.0 1.0 1.0 1.0 1.0
1.0 1.0
1.0 1.0 1.0
1.0 1.0 1.0 1.0 1.0
Element: 202
Faces:
-1 -1 -1 -1 -1 -1
Nodes:
217 225 218 226 313 321 314 322
Scale factors:
1.0 1.0 1.0 1.0 1.0
1.0 1.0 1.0 1.0 1.0
1.0 1.0 1.0 1.0 1.0
1.0 1.0 1.0 1.0 1.0
1.0 1.0 1.0 1.0 1.0
1.0 1.0 1.0 1.0 1.0
1.0 1.0 1.0 1.0 1.0
1.0 1.0 1.0 1.0 1.0
1.0 1.0 1.0 1.0 1.0
1.0 1.0 1.0 1.0 1.0
1.0 1.0 1.0 1.0 1.0
1.0 1.0 1.0 1.0 1.0
1.0 1.0 1.0 1.0
1.0
1.0 1.0 1.0 1.0 1.0
1.0 1.0
1.0 1.0 1.0
1.0 1.0 1.0 1.0 1.0
Element: 203
Faces:
-1 -1 -1 -1 -1 -1
Nodes:
218 226 219 227 314 322 315 323
Scale factors:
1.0 1.0 1.0 1.0 1.0
1.0 1.0 1.0 1.0 1.0
1.0 1.0 1.0 1.0 1.0
1.0 1.0 1.0 1.0 1.0
1.0 1.0 1.0 1.0 1.0
1.0 1.0 1.0 1.0 1.0
1.0 1.0 1.0 1.0 1.0
1.0 1.0 1.0 1.0 1.0
1.0 1.0 1.0 1.0 1.0
1.0 1.0 1.0 1.0 1.0
1.0 1.0 1.0 1.0 1.0
1.0 1.0 1.0 1.0 1.0
1.0 1.0 1.0 1.0
1.0
1.0 1.0 1.0 1.0 1.0
1.0 1.0
1.0 1.0 1.0
1.0 1.0 1.0 1.0 1.0
Element: 204
Faces:
-1 -1 -1 -1 -1 -1
Nodes:
219 227 220 228 315 323 316 324
Scale factors:
1.0 1.0 1.0 1.0 1.0
1.0 1.0 1.0 1.0 1.0
1.0 1.0 1.0 1.0 1.0
1.0 1.0 1.0 1.0 1.0
1.0 1.0 1.0 1.0 1.0
1.0 1.0 1.0 1.0 1.0
1.0 1.0 1.0 1.0 1.0
1.0 1.0 1.0 1.0 1.0
1.0 1.0 1.0 1.0 1.0
1.0 1.0 1.0 1.0 1.0
1.0 1.0 1.0 1.0 1.0
1.0 1.0 1.0 1.0 1.0
1.0 1.0 1.0 1.0
1.0
1.0 1.0 1.0 1.0 1.0
1.0 1.0
1.0 1.0 1.0
1.0 1.0 1.0 1.0 1.0
Element: 205
Faces:
-1 -1 -1 -1 -1 -1
Nodes:
220 228 221 229 316 324 317 325
Scale factors:
1.0 1.0 1.0 1.0 1.0
1.0 1.0 1.0 1.0 1.0
1.0 1.0 1.0 1.0 1.0
1.0 1.0 1.0 1.0 1.0
1.0 1.0 1.0 1.0 1.0
1.0 1.0 1.0 1.0 1.0
1.0 1.0 1.0 1.0 1.0
1.0 1.0 1.0 1.0 1.0
1.0 1.0 1.0 1.0 1.0
1.0 1.0 1.0 1.0 1.0
1.0 1.0 1.0 1.0 1.0
1.0 1.0 1.0 1.0 1.0
1.0 1.0 1.0 1.0
1.0
1.0 1.0 1.0 1.0 1.0
1.0 1.0
1.0 1.0 1.0
1.0 1.0 1.0 1.0 1.0
Element: 206
Faces:
-1 -1 -1 -1 -1 -1
Nodes:
221 229 222 230 317 325 318 326
Scale factors:
1.0 1.0 1.0 1.0 1.0
1.0 1.0 1.0 1.0 1.0
1.0 1.0 1.0 1.0 1.0
1.0 1.0 1.0 1.0 1.0
1.0 1.0 1.0 1.0 1.0
1.0 1.0 1.0 1.0 1.0
1.0 1.0 1.0 1.0 1.0
1.0 1.0 1.0 1.0 1.0
1.0 1.0 1.0 1.0 1.0
1.0 1.0 1.0 1.0 1.0
1.0 1.0 1.0 1.0 1.0
1.0 1.0 1.0 1.0 1.0
1.0 1.0 1.0 1.0
1.0
1.0 1.0 1.0 1.0 1.0
1.0 1.0
1.0 1.0 1.0
1.0 1.0 1.0 1.0 1.0
Element: 207
Faces:
-1 -1 -1 -1 -1 -1
Nodes:
222 230 223 231 318 326 319 327
Scale factors:
1.0 1.0 1.0 1.0 1.0
1.0 1.0 1.0 1.0 1.0
1.0 1.0 1.0 1.0 1.0
1.0 1.0 1.0 1.0 1.0
1.0 1.0 1.0 1.0 1.0
1.0 1.0 1.0 1.0 1.0
1.0 1.0 1.0 1.0 1.0
1.0 1.0 1.0 1.0 1.0
1.0 1.0 1.0 1.0 1.0
1.0 1.0 1.0 1.0 1.0
1.0 1.0 1.0 1.0 1.0
1.0 1.0 1.0 1.0 1.0
1.0 1.0 1.0 1.0
1.0
1.0 1.0 1.0 1.0 1.0
1.0 1.0
1.0 1.0 1.0
1.0 1.0 1.0 1.0 1.0
Element: 208
Faces:
-1 -1 -1 -1 -1 -1
Nodes:
223 231 224 232 319 327 320 328
Scale factors:
1.0 1.0 1.0 1.0 1.0
1.0 1.0 1.0 1.0 1.0
1.0 1.0 1.0 1.0 1.0
1.0 1.0 1.0 1.0 1.0
1.0 1.0 1.0 1.0 1.0
1.0 1.0 1.0 1.0 1.0
1.0 1.0 1.0 1.0 1.0
1.0 1.0 1.0 1.0 1.0
1.0 1.0 1.0 1.0 1.0
1.0 1.0 1.0 1.0 1.0
1.0 1.0 1.0 1.0 1.0
1.0 1.0 1.0 1.0 1.0
1.0 1.0 1.0 1.0
1.0
1.0 1.0 1.0 1.0 1.0
1.0 1.0
1.0 1.0 1.0
1.0 1.0 1.0 1.0 1.0
Element: 209
Faces:
-1 -1 -1 -1 -1 -1
Nodes:
232 240 225 233 328 336 321 329
Scale factors:
1.0 1.0 1.0 1.0 1.0
1.0 1.0 1.0 1.0 1.0
1.0 1.0 1.0 1.0 1.0
1.0 1.0 1.0 1.0 1.0
1.0 1.0 1.0 1.0 1.0
1.0 1.0 1.0 1.0 1.0
1.0 1.0 1.0 1.0 1.0
1.0 1.0 1.0 1.0 1.0
1.0 1.0 1.0 1.0 1.0
1.0 1.0 1.0 1.0 1.0
1.0 1.0 1.0 1.0 1.0
1.0 1.0 1.0 1.0 1.0
1.0 1.0 1.0 1.0
1.0
1.0 1.0 1.0 1.0 1.0
1.0 1.0
1.0 1.0 1.0
1.0 1.0 1.0 1.0 1.0
Element: 210
Faces:
-1 -1 -1 -1 -1 -1
Nodes:
225 233 226 234 321 329 322 330
Scale factors:
1.0 1.0 1.0 1.0 1.0
1.0 1.0 1.0 1.0 1.0
1.0 1.0 1.0 1.0 1.0
1.0 1.0 1.0 1.0 1.0
1.0 1.0 1.0 1.0 1.0
1.0 1.0 1.0 1.0 1.0
1.0 1.0 1.0 1.0 1.0
1.0 1.0 1.0 1.0 1.0
1.0 1.0 1.0 1.0 1.0
1.0 1.0 1.0 1.0 1.0
1.0 1.0 1.0 1.0 1.0
1.0 1.0 1.0 1.0 1.0
1.0 1.0 1.0 1.0
1.0
1.0 1.0 1.0 1.0 1.0
1.0 1.0
1.0 1.0 1.0
1.0 1.0 1.0 1.0 1.0
Element: 211
Faces:
-1 -1 -1 -1 -1 -1
Nodes:
226 234 227 235 322 330 323 331
Scale factors:
1.0 1.0 1.0 1.0 1.0
1.0 1.0 1.0 1.0 1.0
1.0 1.0 1.0 1.0 1.0
1.0 1.0 1.0 1.0 1.0
1.0 1.0 1.0 1.0 1.0
1.0 1.0 1.0 1.0 1.0
1.0 1.0 1.0 1.0 1.0
1.0 1.0 1.0 1.0 1.0
1.0 1.0 1.0 1.0 1.0
1.0 1.0 1.0 1.0 1.0
1.0 1.0 1.0 1.0 1.0
1.0 1.0 1.0 1.0 1.0
1.0 1.0 1.0 1.0
1.0
1.0 1.0 1.0 1.0 1.0
1.0 1.0
1.0 1.0 1.0
1.0 1.0 1.0 1.0 1.0
Element: 212
Faces:
-1 -1 -1 -1 -1 -1
Nodes:
227 235 228 236 323 331 324 332
Scale factors:
1.0 1.0 1.0 1.0 1.0
1.0 1.0 1.0 1.0 1.0
1.0 1.0 1.0 1.0 1.0
1.0 1.0 1.0 1.0 1.0
1.0 1.0 1.0 1.0 1.0
1.0 1.0 1.0 1.0 1.0
1.0 1.0 1.0 1.0 1.0
1.0 1.0 1.0 1.0 1.0
1.0 1.0 1.0 1.0 1.0
1.0 1.0 1.0 1.0 1.0
1.0 1.0 1.0 1.0 1.0
1.0 1.0 1.0 1.0 1.0
1.0 1.0 1.0 1.0
1.0
1.0 1.0 1.0 1.0 1.0
1.0 1.0
1.0 1.0 1.0
1.0 1.0 1.0 1.0 1.0
Element: 213
Faces:
-1 -1 -1 -1 -1 -1
Nodes:
228 236 229 237 324 332 325 333
Scale factors:
1.0 1.0 1.0 1.0 1.0
1.0 1.0 1.0 1.0 1.0
1.0 1.0 1.0 1.0 1.0
1.0 1.0 1.0 1.0 1.0
1.0 1.0 1.0 1.0 1.0
1.0 1.0 1.0 1.0 1.0
1.0 1.0 1.0 1.0 1.0
1.0 1.0 1.0 1.0 1.0
1.0 1.0 1.0 1.0 1.0
1.0 1.0 1.0 1.0 1.0
1.0 1.0 1.0 1.0 1.0
1.0 1.0 1.0 1.0 1.0
1.0 1.0 1.0 1.0
1.0
1.0 1.0 1.0 1.0 1.0
1.0 1.0
1.0 1.0 1.0
1.0 1.0 1.0 1.0 1.0
Element: 214
Faces:
-1 -1 -1 -1 -1 -1
Nodes:
229 237 230 238 325 333 326 334
Scale factors:
1.0 1.0 1.0 1.0 1.0
1.0 1.0 1.0 1.0 1.0
1.0 1.0 1.0 1.0 1.0
1.0 1.0 1.0 1.0 1.0
1.0 1.0 1.0 1.0 1.0
1.0 | |
"""Class implementing TRS client."""
from functools import partial
import json
import logging
from pathlib import Path
import re
import shutil
import socket
import sys
from typing import (Dict, List, Optional, Tuple, Type, Union)
import urllib3
from urllib.parse import quote
import pydantic
from pydantic.main import ModelMetaclass
import requests
from trs_cli.errors import (
exception_handler,
ContentTypeUnavailable,
FileInformationUnavailable,
InvalidURI,
InvalidResourceIdentifier,
InvalidResponseError,
)
from trs_cli.models import (
Error,
FileType,
FileWrapper,
Service,
ServiceRegister,
Tool,
ToolClass,
ToolClassRegister,
ToolFile,
ToolRegister,
ToolVersion,
ToolVersionRegister,
)
logger = logging.getLogger(__name__)
sys.excepthook = partial(exception_handler, print_traceback=False)
class TRSClient():
"""Client to communicate with a GA4GH TRS instance. Supports additional
endpoints defined in TRS-Filer
(https://github.com/elixir-cloud-aai/trs-filer).
Arguments:
uri: Either the base URI of the TRS instance to connect to in either
'https' or 'http' schema (note that fully compliant TRS instances
will use 'https' exclusively), e.g., `https://my-trs.app`, OR a
hostname-based TRS URI, cf.
https://ga4gh.github.io/tool-registry-service-schemas/DataModel/#trs_uris
port: Override default port at which the TRS instance can be accessed.
Only required for TRS instances that are not fully spec-compliant,
as the default port is defined in the TRS documentation, cf.
https://ga4gh.github.io/tool-registry-service-schemas/DataModel/#trs_uris
base-path: Override default path at which the TRS API is accessible at
the given TRS instance. Only required for TRS instances that are
not fully spec-compliant, as the default port is defined in the TRS
documentation, cf.
https://ga4gh.github.io/tool-registry-service-schemas/DataModel/#trs_uris
use_http: Set the URI schema of the TRS instance to `http` instead of
`https`when a TRS URI was provided to `uri`.
token: Bearer token to send along with TRS API requests. Set if
required by TRS implementation. Alternatively, specify in API
endpoint access methods.
Attributes:
uri: URI to TRS endpoints, built from `uri`, `port` and `base_path`,
e.g.,"https://my-trs.app:443/ga4gh/trs/v2".
token: Bearer token for gaining access to TRS endpoints.
headers: Dictionary of request headers.
"""
# set regular expressions as private class variables
_RE_DOMAIN_PART = r'[a-z0-9]([a-z0-9-]{,61}[a-z0-9])?'
_RE_DOMAIN = rf"({_RE_DOMAIN_PART}\.)+{_RE_DOMAIN_PART}\.?"
_RE_TRS_ID = r'([a-z0-9-_~\.%#]+)'
_RE_VERSION_ID = rf"^(?P<version_id>{_RE_TRS_ID})$"
_RE_HOST = (
rf"^(?P<schema>trs|http|https):\/\/(?P<host>{_RE_DOMAIN})(\/\S+)?$"
)
_RE_TRS_URI_OR_TOOL_ID = (
rf"^(trs:\/\/{_RE_DOMAIN}\/)?(?P<tool_id>{_RE_TRS_ID})"
rf"(\/(?P<version_id>{_RE_TRS_ID}))?$"
)
# class configuration variables
no_validate: bool = False
@classmethod
def config(
cls,
debug: bool = False,
no_validate: bool = False,
) -> None:
"""Class configuration.
Args:
debug: Set to print error tracebacks.
no_validate: Set to skip validation of error responses.
"""
if debug:
sys.excepthook = partial(exception_handler, print_traceback=True)
else:
sys.excepthook = partial(exception_handler, print_traceback=False)
cls.no_validate = no_validate
def __init__(
self,
uri: str,
port: int = None,
base_path: str = 'ga4gh/trs/v2',
use_http: bool = False,
token: Optional[str] = None,
) -> None:
"""Class constructor."""
schema, host = self._get_host(uri)
if schema == 'trs':
schema = 'http' if use_http else 'https'
if port is None:
port = 80 if schema == 'http' else 443
self.uri = f"{schema}://{host}:{port}/{base_path}"
self.token = token
self.headers = {}
logger.info(f"Instantiated client for: {self.uri}")
def post_service_info(
self,
payload: Dict,
token: Optional[str] = None,
) -> None:
"""Register service info.
Arguments:
payload: Service info data.
token: Bearer token for authentication. Set if required by TRS
implementation and if not provided when instatiating client or
if expired.
Raises:
requests.exceptions.ConnectionError: A connection to the provided
TRS instance could not be established.
pydantic.ValidationError: The object data payload could not
be validated against the API schema.
trs_cli.errors.InvalidResponseError: The response could not be
validated against the API schema.
"""
# validate requested content type and get request headers
self._get_headers(
content_type='application/json',
token=token,
)
# build request URL
url = f"{self.uri}/service-info"
logger.info(f"Connecting to '{url}'...")
# validate payload
ServiceRegister(**payload).dict()
# send request
response = self._send_request_and_validate_response(
url=url,
method='post',
payload=payload,
)
logger.info(
"Registered service info"
)
return response # type: ignore
def get_service_info(
self,
accept: str = 'application/json',
token: Optional[str] = None,
) -> Union[Service, Error]:
"""Retrieve service info.
Arguments:
accept: Requested content type.
token: Bearer token for authentication. Set if required by TRS
implementation and if not provided when instatiating client or
if expired.
Returns:
Unmarshalled TRS response as either an instance of `Service`
in case of a `200` response, or an instance of `Error` for all
other JSON reponses.
Raises:
requests.exceptions.ConnectionError: A connection to the provided
TRS instance could not be established.
trs_cli.errors.InvalidResponseError: The response could not be
validated against the API schema.
"""
# validate requested content type and get request headers
self._validate_content_type(
requested_type=accept,
available_types=['application/json'],
)
self._get_headers(
content_accept=accept,
token=token,
)
# build request URL
url = f"{self.uri}/service-info"
logger.info(f"Connecting to '{url}'...")
# send request
response = self._send_request_and_validate_response(
url=url,
json_validation_class=Service,
)
logger.info(
"Retrieved service info"
)
return response # type: ignore
def post_tool_class(
self,
payload: Dict,
accept: str = 'application/json',
token: Optional[str] = None,
) -> str:
"""Register a tool class.
Arguments:
payload: Tool class data.
accept: Requested content type.
token: Bearer token for authentication. Set if required by TRS
implementation and if not provided when instatiating client or
if expired.
Returns:
ID of registered TRS toolClass in case of a `200` response, or an
instance of `Error` for all other responses.
Raises:
requests.exceptions.ConnectionError: A connection to the provided
TRS instance could not be established.
pydantic.ValidationError: The object data payload could not
be validated against the API schema.
trs_cli.errors.InvalidResponseError: The response could not be
validated against the API schema.
"""
# validate requested content type and get request headers
self._validate_content_type(
requested_type=accept,
available_types=['application/json'],
)
self._get_headers(
content_accept=accept,
content_type='application/json',
token=token,
)
# build request URL
url = f"{self.uri}/toolClasses"
logger.info(f"Connecting to '{url}'...")
# validate payload
ToolClassRegister(**payload).dict()
# send request
response = self._send_request_and_validate_response(
url=url,
method='post',
payload=payload,
json_validation_class=str,
)
logger.info(
"Registered tool class"
)
return response # type: ignore
def put_tool_class(
self,
id: str,
payload: Dict,
accept: str = 'application/json',
token: Optional[str] = None,
) -> str:
"""
Create a tool class with a predefined unique ID.
Overwrites any existing tool object with the same ID.
Arguments:
id: Identifier of tool class to be created/overwritten.
payload: Tool class data.
accept: Requested content type.
token: Bearer token for authentication. Set if required by TRS
implementation and if not provided when instatiating client or
if expired.
Returns:
ID of registered TRS toolClass in case of a `200` response, or an
instance of `Error` for all other responses.
Raises:
requests.exceptions.ConnectionError: A connection to the provided
TRS instance could not be established.
pydantic.ValidationError: The object data payload could not
be validated against the API schema.
trs_cli.errors.InvalidResponseError: The response could not be
validated against the API schema.
"""
# validate requested content type and get request headers
self._validate_content_type(
requested_type=accept,
available_types=['application/json'],
)
self._get_headers(
content_accept=accept,
content_type='application/json',
token=token,
)
# build request URL
url = f"{self.uri}/toolClasses/{id}"
logger.info(f"Connecting to '{url}'...")
# validate payload
ToolClassRegister(**payload).dict()
# send request
response = self._send_request_and_validate_response(
url=url,
method='put',
payload=payload,
json_validation_class=str,
)
logger.info(
f"Registered tool class with id : {id}"
)
return response # type: ignore
def delete_tool_class(
self,
id: str,
accept: str = 'application/json',
token: Optional[str] = None,
) -> str:
"""Delete a tool class.
Arguments:
id: Identifier of tool class to be deleted.
accept: Requested content type.
token: Bearer token for authentication. Set if required by TRS
implementation and if not provided when instatiating client or
if expired.
Returns:
ID of deleted TRS toolClass in case of a `200` response, or an
instance of `Error` for all other responses.
Raises:
requests.exceptions.ConnectionError: A connection to the provided
TRS instance could not be established.
trs_cli.errors.InvalidResponseError: The response could not be
validated against the API schema.
"""
# validate requested content type and get request headers
self._validate_content_type(
requested_type=accept,
available_types=['application/json'],
)
self._get_headers(
content_accept=accept,
token=token,
)
# build request URL
url = f"{self.uri}/toolClasses/{id}"
logger.info(f"Connecting to '{url}'...")
# send request
response = self._send_request_and_validate_response(
url=url,
method='delete',
json_validation_class=str,
)
logger.info(
"Deleted tool class"
)
return response # type: ignore
def post_tool(
self,
payload: Dict,
accept: str = 'application/json',
token: Optional[str] = None,
) -> str:
"""Register a tool.
Arguments:
payload: Tool data.
accept: Requested content type.
token: Bearer token for authentication. Set if required by TRS
implementation and if not provided when instatiating client or
if expired.
Returns:
ID of registered TRS tool in case of a `200` response, or an
instance of `Error` for all other responses.
Raises:
requests.exceptions.ConnectionError: A connection to the provided
TRS instance could not be established.
pydantic.ValidationError: The object data payload could not
be validated against the API schema.
trs_cli.errors.InvalidResponseError: The response could not | |
False
# The date format used by logging formatters for %(asctime)s
# c.IPClusterEngines.log_datefmt = '%Y-%m-%d %H:%M:%S'
# Deprecated, use engine_launcher_class
# c.IPClusterEngines.engine_launcher = None
# Create a massive crash report when IPython encounters what may be an internal
# error. The default is to append a short message to the usual traceback
# c.IPClusterEngines.verbose_crash = False
# The name of the IPython directory. This directory is used for logging
# configuration (through profiles), history storage, etc. The default is usually
# $HOME/.ipython. This option can also be specified through the environment
# variable IPYTHONDIR.
# c.IPClusterEngines.ipython_dir = u''
# The Logging format template
# c.IPClusterEngines.log_format = '[%(name)s]%(highlevel)s %(message)s'
# Whether to overwrite existing config files when copying
# c.IPClusterEngines.overwrite = False
#------------------------------------------------------------------------------
# LocalControllerLauncher configuration
#------------------------------------------------------------------------------
# Launch a controller as a regular external process.
# command-line args to pass to ipcontroller
# c.LocalControllerLauncher.controller_args = ['--log-to-file', '--log-level=20']
# Popen command to launch ipcontroller.
# c.LocalControllerLauncher.controller_cmd = ['/homes/scollis/anaconda/bin/python', '-m', 'IPython.parallel.controller']
#------------------------------------------------------------------------------
# LocalEngineLauncher configuration
#------------------------------------------------------------------------------
# Launch a single engine as a regular externall process.
# command-line arguments to pass to ipengine
# c.LocalEngineLauncher.engine_args = ['--log-to-file', '--log-level=20']
# command to launch the Engine.
# c.LocalEngineLauncher.engine_cmd = ['/homes/scollis/anaconda/bin/python', '-m', 'IPython.parallel.engine']
#------------------------------------------------------------------------------
# LocalEngineSetLauncher configuration
#------------------------------------------------------------------------------
# Launch a set of engines as regular external processes.
# LocalEngineSetLauncher will inherit config from: LocalEngineLauncher
# delay (in seconds) between starting each engine after the first. This can help
# force the engines to get their ids in order, or limit process flood when
# starting many engines.
# c.LocalEngineSetLauncher.delay = 0.1
# command-line arguments to pass to ipengine
# c.LocalEngineSetLauncher.engine_args = ['--log-to-file', '--log-level=20']
# command to launch the Engine.
# c.LocalEngineSetLauncher.engine_cmd = ['/homes/scollis/anaconda/bin/python', '-m', 'IPython.parallel.engine']
#------------------------------------------------------------------------------
# MPILauncher configuration
#------------------------------------------------------------------------------
# Launch an external process using mpiexec.
# The command line arguments to pass to mpiexec.
# c.MPILauncher.mpi_args = []
# The mpiexec command to use in starting the process.
# c.MPILauncher.mpi_cmd = ['mpiexec']
#------------------------------------------------------------------------------
# MPIControllerLauncher configuration
#------------------------------------------------------------------------------
# Launch a controller using mpiexec.
# MPIControllerLauncher will inherit config from: MPILauncher
# The command line arguments to pass to mpiexec.
# c.MPIControllerLauncher.mpi_args = []
# The mpiexec command to use in starting the process.
# c.MPIControllerLauncher.mpi_cmd = ['mpiexec']
# command-line args to pass to ipcontroller
# c.MPIControllerLauncher.controller_args = ['--log-to-file', '--log-level=20']
# Popen command to launch ipcontroller.
# c.MPIControllerLauncher.controller_cmd = ['/homes/scollis/anaconda/bin/python', '-m', 'IPython.parallel.controller']
#------------------------------------------------------------------------------
# MPIEngineSetLauncher configuration
#------------------------------------------------------------------------------
# Launch engines using mpiexec
# MPIEngineSetLauncher will inherit config from: MPILauncher
# The command line arguments to pass to mpiexec.
# c.MPIEngineSetLauncher.mpi_args = []
# The mpiexec command to use in starting the process.
# c.MPIEngineSetLauncher.mpi_cmd = ['mpiexec']
# command-line arguments to pass to ipengine
# c.MPIEngineSetLauncher.engine_args = ['--log-to-file', '--log-level=20']
# command to launch the Engine.
# c.MPIEngineSetLauncher.engine_cmd = ['/homes/scollis/anaconda/bin/python', '-m', 'IPython.parallel.engine']
#------------------------------------------------------------------------------
# SSHLauncher configuration
#------------------------------------------------------------------------------
# A minimal launcher for ssh.
#
# To be useful this will probably have to be extended to use the ``sshx`` idea
# for environment variables. There could be other things this needs as well.
# hostname on which to launch the program
# c.SSHLauncher.hostname = ''
# command for starting ssh
# c.SSHLauncher.ssh_cmd = ['ssh']
# user@hostname location for ssh in one setting
# c.SSHLauncher.location = ''
# List of (local, remote) files to send before starting
# c.SSHLauncher.to_send = []
# command for sending files
# c.SSHLauncher.scp_cmd = ['scp']
# List of (remote, local) files to fetch after starting
# c.SSHLauncher.to_fetch = []
# args to pass to ssh
# c.SSHLauncher.ssh_args = ['-tt']
# username for ssh
# c.SSHLauncher.user = ''
#------------------------------------------------------------------------------
# SSHControllerLauncher configuration
#------------------------------------------------------------------------------
# SSHControllerLauncher will inherit config from: SSHClusterLauncher,
# SSHLauncher
# hostname on which to launch the program
# c.SSHControllerLauncher.hostname = ''
# Popen command to launch ipcontroller.
# c.SSHControllerLauncher.controller_cmd = ['/homes/scollis/anaconda/bin/python', '-m', 'IPython.parallel.controller']
# command-line args to pass to ipcontroller
# c.SSHControllerLauncher.controller_args = ['--log-to-file', '--log-level=20']
# command for starting ssh
# c.SSHControllerLauncher.ssh_cmd = ['ssh']
# The remote profile_dir to use.
#
# If not specified, use calling profile, stripping out possible leading homedir.
# c.SSHControllerLauncher.remote_profile_dir = ''
# user@hostname location for ssh in one setting
# c.SSHControllerLauncher.location = ''
# List of (local, remote) files to send before starting
# c.SSHControllerLauncher.to_send = []
# command for sending files
# c.SSHControllerLauncher.scp_cmd = ['scp']
# List of (remote, local) files to fetch after starting
# c.SSHControllerLauncher.to_fetch = []
# args to pass to ssh
# c.SSHControllerLauncher.ssh_args = ['-tt']
# username for ssh
# c.SSHControllerLauncher.user = ''
#------------------------------------------------------------------------------
# SSHEngineLauncher configuration
#------------------------------------------------------------------------------
# SSHEngineLauncher will inherit config from: SSHClusterLauncher, SSHLauncher
# hostname on which to launch the program
# c.SSHEngineLauncher.hostname = ''
# command to launch the Engine.
# c.SSHEngineLauncher.engine_cmd = ['/homes/scollis/anaconda/bin/python', '-m', 'IPython.parallel.engine']
# command-line arguments to pass to ipengine
# c.SSHEngineLauncher.engine_args = ['--log-to-file', '--log-level=20']
# command for starting ssh
# c.SSHEngineLauncher.ssh_cmd = ['ssh']
# The remote profile_dir to use.
#
# If not specified, use calling profile, stripping out possible leading homedir.
# c.SSHEngineLauncher.remote_profile_dir = ''
# user@hostname location for ssh in one setting
# c.SSHEngineLauncher.location = ''
# List of (local, remote) files to send before starting
# c.SSHEngineLauncher.to_send = []
# command for sending files
# c.SSHEngineLauncher.scp_cmd = ['scp']
# List of (remote, local) files to fetch after starting
# c.SSHEngineLauncher.to_fetch = []
# args to pass to ssh
# c.SSHEngineLauncher.ssh_args = ['-tt']
# username for ssh
# c.SSHEngineLauncher.user = ''
#------------------------------------------------------------------------------
# SSHEngineSetLauncher configuration
#------------------------------------------------------------------------------
# SSHEngineSetLauncher will inherit config from: LocalEngineSetLauncher,
# LocalEngineLauncher
# delay (in seconds) between starting each engine after the first. This can help
# force the engines to get their ids in order, or limit process flood when
# starting many engines.
# c.SSHEngineSetLauncher.delay = 0.1
# command-line arguments to pass to ipengine
# c.SSHEngineSetLauncher.engine_args = ['--log-to-file', '--log-level=20']
# dict of engines to launch. This is a dict by hostname of ints, corresponding
# to the number of engines to start on that host.
# c.SSHEngineSetLauncher.engines = {}
# command to launch the Engine.
# c.SSHEngineSetLauncher.engine_cmd = ['/homes/scollis/anaconda/bin/python', '-m', 'IPython.parallel.engine']
#------------------------------------------------------------------------------
# SSHProxyEngineSetLauncher configuration
#------------------------------------------------------------------------------
# Launcher for calling `ipcluster engines` on a remote machine.
#
# Requires that remote profile is already configured.
# SSHProxyEngineSetLauncher will inherit config from: SSHClusterLauncher,
# SSHLauncher
#
# c.SSHProxyEngineSetLauncher.ipcluster_cmd = ['ipcluster']
# hostname on which to launch the program
# c.SSHProxyEngineSetLauncher.hostname = ''
# command for starting ssh
# c.SSHProxyEngineSetLauncher.ssh_cmd = ['ssh']
# The remote profile_dir to use.
#
# If not specified, use calling profile, stripping out possible leading homedir.
# c.SSHProxyEngineSetLauncher.remote_profile_dir = ''
# user@hostname location for ssh in one setting
# c.SSHProxyEngineSetLauncher.location = ''
# List of (local, remote) files to send before starting
# c.SSHProxyEngineSetLauncher.to_send = []
# command for sending files
# c.SSHProxyEngineSetLauncher.scp_cmd = ['scp']
# List of (remote, local) files to fetch after starting
# c.SSHProxyEngineSetLauncher.to_fetch = []
# args to pass to ssh
# c.SSHProxyEngineSetLauncher.ssh_args = ['-tt']
# username for ssh
# c.SSHProxyEngineSetLauncher.user = ''
#------------------------------------------------------------------------------
# WindowsHPCLauncher configuration
#------------------------------------------------------------------------------
# A regular expression used to get the job id from the output of the
# submit_command.
# c.WindowsHPCLauncher.job_id_regexp = '\\d+'
# The filename of the instantiated job script.
# c.WindowsHPCLauncher.job_file_name = u'ipython_job.xml'
# The command for submitting jobs.
# c.WindowsHPCLauncher.job_cmd = 'job'
# The hostname of the scheduler to submit the job to.
# c.WindowsHPCLauncher.scheduler = ''
#------------------------------------------------------------------------------
# WindowsHPCControllerLauncher configuration
#------------------------------------------------------------------------------
# WindowsHPCControllerLauncher will inherit config from: WindowsHPCLauncher
# A regular expression used to get the job id from the output of the
# submit_command.
# c.WindowsHPCControllerLauncher.job_id_regexp = '\\d+'
# WinHPC xml job file.
# c.WindowsHPCControllerLauncher.job_file_name = u'ipcontroller_job.xml'
# The command for submitting jobs.
# c.WindowsHPCControllerLauncher.job_cmd = 'job'
# The hostname of the scheduler to submit the job to.
# c.WindowsHPCControllerLauncher.scheduler = ''
#------------------------------------------------------------------------------
# WindowsHPCEngineSetLauncher configuration
#------------------------------------------------------------------------------
# WindowsHPCEngineSetLauncher will inherit config from: WindowsHPCLauncher
# A regular expression used to get the job id from the output of the
# submit_command.
# c.WindowsHPCEngineSetLauncher.job_id_regexp = '\\d+'
# jobfile for ipengines job
# c.WindowsHPCEngineSetLauncher.job_file_name = u'ipengineset_job.xml'
# The command for submitting jobs.
# c.WindowsHPCEngineSetLauncher.job_cmd = 'job'
# The hostname of the scheduler to submit the job to.
# c.WindowsHPCEngineSetLauncher.scheduler = ''
#------------------------------------------------------------------------------
# PBSLauncher configuration
#------------------------------------------------------------------------------
# A BatchSystemLauncher subclass for PBS.
# PBSLauncher will inherit config from: BatchSystemLauncher
# Regular expresion for identifying the job ID [r'\d+']
# c.PBSLauncher.job_id_regexp = '\\d+'
# The group we wish to match in job_id_regexp (0 to match all)
# c.PBSLauncher.job_id_regexp_group = 0
# The PBS Queue.
# c.PBSLauncher.queue = u''
# The string that is the batch script template itself.
# c.PBSLauncher.batch_template = ''
# The PBS submit command ['qsub']
# c.PBSLauncher.submit_command = ['qsub']
# The PBS delete command ['qsub']
# c.PBSLauncher.delete_command = ['qdel']
# The filename of the instantiated batch script.
# c.PBSLauncher.batch_file_name = u'batch_script'
# The file that contains the batch template.
# c.PBSLauncher.batch_template_file = u''
#------------------------------------------------------------------------------
# PBSControllerLauncher configuration
#------------------------------------------------------------------------------
# Launch a controller using PBS.
# PBSControllerLauncher will inherit config from: PBSLauncher,
# BatchSystemLauncher
# Regular expresion for identifying the job ID [r'\d+']
# c.PBSControllerLauncher.job_id_regexp = '\\d+'
# The group we wish to match in job_id_regexp (0 to match all)
# c.PBSControllerLauncher.job_id_regexp_group = 0
# The PBS Queue.
# c.PBSControllerLauncher.queue = u''
# The string that is the batch script template itself.
# c.PBSControllerLauncher.batch_template = ''
# The PBS submit command ['qsub']
# c.PBSControllerLauncher.submit_command = ['qsub']
# The PBS delete command ['qsub']
# c.PBSControllerLauncher.delete_command = ['qdel']
# batch file name for the controller job.
# c.PBSControllerLauncher.batch_file_name = u'pbs_controller'
# The file that contains the batch template.
# c.PBSControllerLauncher.batch_template_file = u''
#------------------------------------------------------------------------------
# PBSEngineSetLauncher configuration
#------------------------------------------------------------------------------
# | |
<reponame>jochenater/catboost
from __future__ import unicode_literals
from ctypes import windll, byref, ArgumentError, c_char, c_long, c_ulong, c_uint, pointer
from ctypes.wintypes import DWORD, HANDLE
from prompt_toolkit.renderer import Output
from prompt_toolkit.styles import ANSI_COLOR_NAMES
from prompt_toolkit.win32_types import CONSOLE_SCREEN_BUFFER_INFO, STD_OUTPUT_HANDLE, STD_INPUT_HANDLE, COORD, SMALL_RECT
import os
import six
__all__ = (
'Win32Output',
)
def _coord_byval(coord):
"""
Turns a COORD object into a c_long.
This will cause it to be passed by value instead of by reference. (That is what I think at least.)
When runing ``ptipython`` is run (only with IPython), we often got the following error::
Error in 'SetConsoleCursorPosition'.
ArgumentError("argument 2: <class 'TypeError'>: wrong type",)
argument 2: <class 'TypeError'>: wrong type
It was solved by turning ``COORD`` parameters into a ``c_long`` like this.
More info: http://msdn.microsoft.com/en-us/library/windows/desktop/ms686025(v=vs.85).aspx
"""
return c_long(coord.Y * 0x10000 | coord.X & 0xFFFF)
#: If True: write the output of the renderer also to the following file. This
#: is very useful for debugging. (e.g.: to see that we don't write more bytes
#: than required.)
_DEBUG_RENDER_OUTPUT = False
_DEBUG_RENDER_OUTPUT_FILENAME = r'prompt-toolkit-windows-output.log'
class NoConsoleScreenBufferError(Exception):
"""
Raised when the application is not running inside a Windows Console, but
the user tries to instantiate Win32Output.
"""
def __init__(self):
# Are we running in 'xterm' on Windows, like git-bash for instance?
xterm = 'xterm' in os.environ.get('TERM', '')
if xterm:
message = ('Found %s, while expecting a Windows console. '
'Maybe try to run this program using "winpty" '
'or run it in cmd.exe instead. Or otherwise, '
'in case of Cygwin, use the Python executable '
'that is compiled for Cygwin.' % os.environ['TERM'])
else:
message = 'No Windows console found. Are you running cmd.exe?'
super(NoConsoleScreenBufferError, self).__init__(message)
class Win32Output(Output):
"""
I/O abstraction for rendering to Windows consoles.
(cmd.exe and similar.)
"""
def __init__(self, stdout, use_complete_width=False):
self.use_complete_width = use_complete_width
self._buffer = []
self.stdout = stdout
self.hconsole = HANDLE(windll.kernel32.GetStdHandle(STD_OUTPUT_HANDLE))
self._in_alternate_screen = False
self.color_lookup_table = ColorLookupTable()
# Remember the default console colors.
info = self.get_win32_screen_buffer_info()
self.default_attrs = info.wAttributes if info else 15
if _DEBUG_RENDER_OUTPUT:
self.LOG = open(_DEBUG_RENDER_OUTPUT_FILENAME, 'ab')
def fileno(self):
" Return file descriptor. "
return self.stdout.fileno()
def encoding(self):
" Return encoding used for stdout. "
return self.stdout.encoding
def write(self, data):
self._buffer.append(data)
def write_raw(self, data):
" For win32, there is no difference between write and write_raw. "
self.write(data)
def get_size(self):
from prompt_toolkit.layout.screen import Size
info = self.get_win32_screen_buffer_info()
# We take the width of the *visible* region as the size. Not the width
# of the complete screen buffer. (Unless use_complete_width has been
# set.)
if self.use_complete_width:
width = info.dwSize.X
else:
width = info.srWindow.Right - info.srWindow.Left
height = info.srWindow.Bottom - info.srWindow.Top + 1
# We avoid the right margin, windows will wrap otherwise.
maxwidth = info.dwSize.X - 1
width = min(maxwidth, width)
# Create `Size` object.
return Size(rows=height, columns=width)
def _winapi(self, func, *a, **kw):
"""
Flush and call win API function.
"""
self.flush()
if _DEBUG_RENDER_OUTPUT:
self.LOG.write(('%r' % func.__name__).encode('utf-8') + b'\n')
self.LOG.write(b' ' + ', '.join(['%r' % i for i in a]).encode('utf-8') + b'\n')
self.LOG.write(b' ' + ', '.join(['%r' % type(i) for i in a]).encode('utf-8') + b'\n')
self.LOG.flush()
try:
return func(*a, **kw)
except ArgumentError as e:
if _DEBUG_RENDER_OUTPUT:
self.LOG.write((' Error in %r %r %s\n' % (func.__name__, e, e)).encode('utf-8'))
def get_win32_screen_buffer_info(self):
"""
Return Screen buffer info.
"""
# NOTE: We don't call the `GetConsoleScreenBufferInfo` API through
# `self._winapi`. Doing so causes Python to crash on certain 64bit
# Python versions. (Reproduced with 64bit Python 2.7.6, on Windows
# 10). It is not clear why. Possibly, it has to do with passing
# these objects as an argument, or through *args.
# The Python documentation contains the following - possibly related - warning:
# ctypes does not support passing unions or structures with
# bit-fields to functions by value. While this may work on 32-bit
# x86, it's not guaranteed by the library to work in the general
# case. Unions and structures with bit-fields should always be
# passed to functions by pointer.
# Also see:
# - https://github.com/ipython/ipython/issues/10070
# - https://github.com/jonathanslenders/python-prompt-toolkit/issues/406
# - https://github.com/jonathanslenders/python-prompt-toolkit/issues/86
self.flush()
sbinfo = CONSOLE_SCREEN_BUFFER_INFO()
success = windll.kernel32.GetConsoleScreenBufferInfo(self.hconsole, byref(sbinfo))
# success = self._winapi(windll.kernel32.GetConsoleScreenBufferInfo,
# self.hconsole, byref(sbinfo))
if success:
return sbinfo
else:
raise NoConsoleScreenBufferError
def set_title(self, title):
"""
Set terminal title.
"""
assert isinstance(title, six.text_type)
self._winapi(windll.kernel32.SetConsoleTitleW, title)
def clear_title(self):
self._winapi(windll.kernel32.SetConsoleTitleW, '')
def erase_screen(self):
start = COORD(0, 0)
sbinfo = self.get_win32_screen_buffer_info()
length = sbinfo.dwSize.X * sbinfo.dwSize.Y
self.cursor_goto(row=0, column=0)
self._erase(start, length)
def erase_down(self):
sbinfo = self.get_win32_screen_buffer_info()
size = sbinfo.dwSize
start = sbinfo.dwCursorPosition
length = ((size.X - size.X) + size.X * (size.Y - sbinfo.dwCursorPosition.Y))
self._erase(start, length)
def erase_end_of_line(self):
"""
"""
sbinfo = self.get_win32_screen_buffer_info()
start = sbinfo.dwCursorPosition
length = sbinfo.dwSize.X - sbinfo.dwCursorPosition.X
self._erase(start, length)
def _erase(self, start, length):
chars_written = c_ulong()
self._winapi(windll.kernel32.FillConsoleOutputCharacterA,
self.hconsole, c_char(b' '), DWORD(length), _coord_byval(start),
byref(chars_written))
# Reset attributes.
sbinfo = self.get_win32_screen_buffer_info()
self._winapi(windll.kernel32.FillConsoleOutputAttribute,
self.hconsole, sbinfo.wAttributes, length, _coord_byval(start),
byref(chars_written))
def reset_attributes(self):
" Reset the console foreground/background color. "
self._winapi(windll.kernel32.SetConsoleTextAttribute, self.hconsole,
self.default_attrs)
def set_attributes(self, attrs):
fgcolor, bgcolor, bold, underline, italic, blink, reverse = attrs
# Start from the default attributes.
attrs = self.default_attrs
# Override the last four bits: foreground color.
if fgcolor is not None:
attrs = attrs & ~0xf
attrs |= self.color_lookup_table.lookup_fg_color(fgcolor)
# Override the next four bits: background color.
if bgcolor is not None:
attrs = attrs & ~0xf0
attrs |= self.color_lookup_table.lookup_bg_color(bgcolor)
# Reverse: swap these four bits groups.
if reverse:
attrs = (attrs & ~0xff) | ((attrs & 0xf) << 4) | ((attrs & 0xf0) >> 4)
self._winapi(windll.kernel32.SetConsoleTextAttribute, self.hconsole, attrs)
def disable_autowrap(self):
# Not supported by Windows.
pass
def enable_autowrap(self):
# Not supported by Windows.
pass
def cursor_goto(self, row=0, column=0):
pos = COORD(x=column, y=row)
self._winapi(windll.kernel32.SetConsoleCursorPosition, self.hconsole, _coord_byval(pos))
def cursor_up(self, amount):
sr = self.get_win32_screen_buffer_info().dwCursorPosition
pos = COORD(sr.X, sr.Y - amount)
self._winapi(windll.kernel32.SetConsoleCursorPosition, self.hconsole, _coord_byval(pos))
def cursor_down(self, amount):
self.cursor_up(-amount)
def cursor_forward(self, amount):
sr = self.get_win32_screen_buffer_info().dwCursorPosition
# assert sr.X + amount >= 0, 'Negative cursor position: x=%r amount=%r' % (sr.X, amount)
pos = COORD(max(0, sr.X + amount), sr.Y)
self._winapi(windll.kernel32.SetConsoleCursorPosition, self.hconsole, _coord_byval(pos))
def cursor_backward(self, amount):
self.cursor_forward(-amount)
def flush(self):
"""
Write to output stream and flush.
"""
if not self._buffer:
# Only flush stdout buffer. (It could be that Python still has
# something in its buffer. -- We want to be sure to print that in
# the correct color.)
self.stdout.flush()
return
data = ''.join(self._buffer)
if _DEBUG_RENDER_OUTPUT:
self.LOG.write(('%r' % data).encode('utf-8') + b'\n')
self.LOG.flush()
# Print characters one by one. This appears to be the best soluton
# in oder to avoid traces of vertical lines when the completion
# menu disappears.
for b in data:
written = DWORD()
retval = windll.kernel32.WriteConsoleW(self.hconsole, b, 1, byref(written), None)
assert retval != 0
self._buffer = []
def get_rows_below_cursor_position(self):
info = self.get_win32_screen_buffer_info()
return info.srWindow.Bottom - info.dwCursorPosition.Y + 1
def scroll_buffer_to_prompt(self):
"""
To be called before drawing the prompt. This should scroll the console
to left, with the cursor at the bottom (if possible).
"""
# Get current window size
info = self.get_win32_screen_buffer_info()
sr = info.srWindow
cursor_pos = info.dwCursorPosition
result = SMALL_RECT()
# Scroll to the left.
result.Left = 0
result.Right = sr.Right - sr.Left
# Scroll vertical
win_height = sr.Bottom - sr.Top
if 0 < sr.Bottom - cursor_pos.Y < win_height - 1:
# no vertical scroll if cursor already on the screen
result.Bottom = sr.Bottom
else:
result.Bottom = max(win_height, cursor_pos.Y)
result.Top = result.Bottom - win_height
# Scroll API
self._winapi(windll.kernel32.SetConsoleWindowInfo, self.hconsole, True, byref(result))
def enter_alternate_screen(self):
"""
Go to alternate screen buffer.
"""
if not self._in_alternate_screen:
GENERIC_READ = 0x80000000
GENERIC_WRITE = 0x40000000
# Create a new console buffer and activate that one.
handle = HANDLE(self._winapi(windll.kernel32.CreateConsoleScreenBuffer, GENERIC_READ|GENERIC_WRITE,
DWORD(0), None, DWORD(1), None))
self._winapi(windll.kernel32.SetConsoleActiveScreenBuffer, handle)
self.hconsole = handle
self._in_alternate_screen = True
def quit_alternate_screen(self):
"""
Make stdout again the active buffer.
"""
if self._in_alternate_screen:
stdout = HANDLE(self._winapi(windll.kernel32.GetStdHandle, STD_OUTPUT_HANDLE))
self._winapi(windll.kernel32.SetConsoleActiveScreenBuffer, stdout)
self._winapi(windll.kernel32.CloseHandle, self.hconsole)
self.hconsole = stdout
self._in_alternate_screen = False
def enable_mouse_support(self):
ENABLE_MOUSE_INPUT = 0x10
handle = HANDLE(windll.kernel32.GetStdHandle(STD_INPUT_HANDLE))
original_mode = DWORD()
self._winapi(windll.kernel32.GetConsoleMode, handle, pointer(original_mode))
self._winapi(windll.kernel32.SetConsoleMode, handle, original_mode.value | ENABLE_MOUSE_INPUT)
def disable_mouse_support(self):
ENABLE_MOUSE_INPUT = 0x10
handle = HANDLE(windll.kernel32.GetStdHandle(STD_INPUT_HANDLE))
original_mode = DWORD()
self._winapi(windll.kernel32.GetConsoleMode, handle, pointer(original_mode))
self._winapi(windll.kernel32.SetConsoleMode, handle, original_mode.value & ~ ENABLE_MOUSE_INPUT)
def hide_cursor(self):
pass
def show_cursor(self):
pass
@classmethod
def win32_refresh_window(cls):
| |
##
## Name: dnd.py
## Purpose: Interface to Dartmouth Name Directory services
##
## Copyright (C) 2004-2007 <NAME>, All Rights Reserved.
##
## Permission is hereby granted, free of charge, to any person
## obtaining a copy of this software and associated documentation
## files (the "Software"), to deal in the Software without
## restriction, including without limitation the rights to use, copy,
## modify, merge, publish, distribute, sublicense, and/or sell copies
## of the Software, and to permit persons to whom the Software is
## furnished to do so, subject to the following conditions:
##
## The above copyright notice and this permission notice shall be
## included in all copies or substantial portions of the Software.
##
## THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
## EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF
## MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
## NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT
## HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY,
## WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
## OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER
## DEALINGS IN THE SOFTWARE.
##
import errno, os, socket, re, sys, weakref
from Crypto.Cipher import DES
__version__ = "1.8"
# Characters that are legal in name fields apart from letters/digits
nf_extras = "- ._\(\)&\+\*\"'"
# A regex matching a legal "name" query string
name_query_re = re.compile(r"^([A-Za-z0-9]|[%s])+$" % nf_extras)
# A regex matching a legal UID query
uid_query_re = re.compile(r'#\d+$')
# A regex matching a legal DCTSNUM field query (Dartmouth only)
dcts_query_re = re.compile(r'\*(?:hd)?\d{5}[A-Z]$', re.IGNORECASE)
def check_query_key(q):
"""Check whether the given query key q is valid as a DND query.
Returns True or False.
"""
return name_query_re.match(q) is not None or \
uid_query_re.match(q) is not None or \
dcts_query_re.match(q) is not None
def encrypt_challenge(rnd, key):
"""Encrypt a random challenge from the DND using the user's key.
rnd -- Octal-encoded challenge from the DND (str)
key -- User's cleartext password (str)
Note: Due to the limitations of the DND protocol, the user's key
may be at most DES.key_size in length, i.e., 8 characters.
"""
rnd = decode_octal(rnd)
if len(key) < DES.key_size:
pad = chr(0) * (DES.key_size - len(key))
key += pad
dkey = DES.new(key, DES.MODE_ECB)
result = dkey.encrypt(rnd)
del (dkey)
return encode_octal(result)
def encrypt_change(old, new):
"""Encrypt old and new passwords for a DND change password
request. Returns a tuple consisting of the old password encrypted
using the new one as a key, and the new password encrypted using
the old one as a key, both encoded as a string of ASCII octal
digits as required by the DND protocol.
old -- Old cleartext password (str)
new -- New cleartext password (str)
"""
if len(old) < DES.key_size:
pad = chr(0) * (DES.key_size - len(old))
old += pad
if len(new) < DES.key_size:
pad = chr(0) * (DES.key_size - len(new))
new += pad
okey = DES.new(old, DES.MODE_ECB)
nkey = DES.new(new, DES.MODE_ECB)
old_w_new = nkey.encrypt(old)
del (nkey)
new_w_old = okey.encrypt(new)
del (okey)
return (encode_octal(old_w_new), encode_octal(new_w_old))
def encode_octal(s):
"""Encode the characters of an ASCII string as octal digits. Each
character is represented by a sequence of 3 consecutive octal
digits representing its ASCII value.
"""
return ''.join("%03o" % ord(v) for v in s)
def decode_octal(s):
"""Decode a sequence of octal digits into a string. Each block of
three octal digits convey the value of a single ASCII character.
The input is left-padded with zeroes if the length is not a
multiple of three.
"""
while len(s) % 3 != 0:
s = '0' + s
return ''.join(
chr(int(x, 8)) for x in (s[x:x + 3] for x in xrange(0, len(s), 3)))
def enquote_string(s):
"""Enquotes a value according to the DND protocol rules. All
interior quotation marks are doubled, and the resulting string is
enclosed in double quotations.
"""
return '"' + s.replace('"', '""') + '"'
def dequote_string(s):
"""Removes quotation marks from a string according to the DND
protocol rules. Internal quotations ("") are replaced with single
quotes, and the leading and trailing quotes are stripped.
See also: enquote_string()
"""
if s.startswith('"'):
s = s[1:]
if s.endswith('"'):
s = s[:-1]
return s.replace('""', '"')
def lookup(query, fields=(), **config):
"""This is a wrapper function that creates a DNDSession object and
uses it to issue the specified query.
query -- the query to send to the DND (str).
fields -- a sequence of field names to look up.
Additional keyword arguments are passed to the DNDSession
constructor.
"""
d = DNDSession(**config)
try:
return d.lookup(query, *fields)
finally:
d.close()
def lookup_unique(query, fields=(), **config):
"""This is a wrapper function that creates a DNDSession object and
uses it to issue the specified query.
query -- the query to send to the DND (str).
fields -- a sequence of field names to look up.
Additional keyword arguments are passed to the DNDSession
constructor.
"""
d = DNDSession(**config)
try:
return d.lookup_unique(query, *fields)
finally:
d.close()
class DNDError(Exception):
"""The root class for DND errors."""
class DNDProtocolError(DNDError):
"""An exception representing protocol errors encountered during
interaction with a DND server. The `key' field gives the numeric
error code, the `value' field gives the descriptive text returned
by the server.
"""
def __init__(self, key, value=''):
self.key = key
self.value = value
def __str__(self):
return ` self.value `
class DNDLostConnection(DNDError):
pass
class DNDNotConnected(DNDError):
pass
class DNDField(object):
"""Represents a field key in the DND. Fields have permissions
associated with them, determining who can read and write the
contents of the field. The general permission scheme is:
A -- anyone may do this operation (unauthenticated)
U -- the user whose record this is may do this operation
N -- nobody may perform this operation (administrator only)
T -- trusted users may perform this operation
"""
_ptypes = {
'all': 'A',
'any': 'A',
'everyone': 'A',
'user': 'U',
'owner': 'U',
'self': 'U',
'none': 'N',
'nobody': 'N',
'root': 'N',
'trust': 'T',
'trusted': 'T',
'admin': 'T',
'a': 'A',
'u': 'U',
'n': 'N',
't': 'T'
}
def __init__(self, name, rd, wr):
"""Initialize a new DNDField instance:
name -- the name of the field (str)
rd -- who has read access to the field (str)
wr -- who has write access to the field (str)
"""
self._name = name
self._read = self.permtype(rd)
self._write = self.permtype(wr)
name = property(lambda self: self._name, doc="The name of the field")
read = property(lambda self: self._read, doc="Who has read permission")
write = property(lambda self: self._write, doc="Who has write permission")
def is_readable(self, bywhom='any'):
"""Returns True if the field is readable by the specified
category. This may either be a string, or a sequence of
strings; in the latter case, True is returned if at least one
of the categories listed can read the field. If the field is
not readable, False is returned.
"""
if isinstance(bywhom, basestring):
pt = set((self.permtype(bywhom), ))
else:
pt = set(self.permtype(x) for x in bywhom)
return self.read == 'A' or self.read in pt
def is_writable(self, bywhom='user'):
"""Returns True if the field is writable by the specified
category. This may either be a string, or a sequence of
strings; in the latter case, True is returned if at least one
of the categories listed can write the field. If the field is
not writable, False is returned.
"""
if isinstance(bywhom, basestring):
pt = set((self.permtype(bywhom), ))
else:
pt = set(self.permtype(x) for x in bywhom)
return self.write == 'A' or self.write in pt
@staticmethod
def permtype(key):
"""Map a string describing a category of permissions to the
DND's corresponding permission letter.
"""
try:
return DNDField._ptypes[key.lower()]
except KeyError:
raise ValueError("Unknown permission category: %s" % key)
def __repr__(self):
return '#<%s %s read=%s write=%s>' % \
(type(self).__name__,
self._name, self._read, self._write)
def __eq__(self, other):
"""Two DNDFields are equal if their names are equal without
respect to case. A DNDField is equal to a string if the
string is equal to the name of the field.
"""
try:
self._name.lower() == other._name.lower()
except AttributeError:
return self._name.lower() == other.lower()
def __hash__(self):
return hash(self._name.lower())
class DNDRecord(dict):
"""This class represents a record in the DND. It inherits from a
dictionary, so you can use ordinary dictionary | |
<filename>physt/special.py<gh_stars>0
"""Transformed histograms.
These histograms use a transformation from input values to bins
in a different coordinate system.
There are three basic classes:
* PolarHistogram
* CylindricalHistogram
* SphericalHistogram
Apart from these, there are their projections into lower dimensions.
And of course, it is possible to re-use the general transforming functionality
by adding `TransformedHistogramMixin` among the custom histogram
class superclasses.
"""
from __future__ import absolute_import, division
from functools import reduce
import numpy as np
from .histogram_nd import HistogramND
from .histogram1d import Histogram1D
from . import binnings, histogram_nd
class TransformedHistogramMixin(object):
"""Histogram with non-cartesian (or otherwise transformed) axes.
This is a mixin, providing transform-aware find_bin, fill and fill_n.
When implementing, you are required to provide tbe following:
- `transform` method to convert rectangular (suggested to make it classmethod)
- `bin_sizes` property
In certain cases, you may want to have default axis names + projections.
Look at PolarHistogram / SphericalHistogram / CylindricalHistogram as
an example.
"""
@classmethod
def transform(cls, value):
"""Convert cartesian (general) coordinates into internal ones.
Parameters
----------
value : array_like
This method should accept both scalars and numpy arrays.
If multiple values are to be transformed, it should of
(nvalues, ndim) shape.
Returns
-------
float or array_like
"""
raise NotImplementedError("TransformedHistogramMixin descendant must implement transform method.")
def find_bin(self, value, axis=None, transformed=False):
"""
Parameters
----------
value : array_like
Value with dimensionality equal to histogram.
transformed : bool
If true, the value is already transformed and has same axes as the bins.
"""
if axis is None and not transformed:
value = self.transform(value)
return HistogramND.find_bin(self, value, axis=axis)
@property
def bin_sizes(self):
raise NotImplementedError("TransformedHistogramMixin descendant must implement bin_sizes property.")
def fill(self, value, weight=1, transformed=False):
return HistogramND.fill(self, value=value, weight=weight, transformed=transformed)
def fill_n(self, values, weights=None, dropna=True, transformed=False):
if not transformed:
values = self.transform(values)
HistogramND.fill_n(self, values=values, weights=weights, dropna=dropna)
_projection_class_map = {}
def projection(self, *axes, **kwargs):
"""Projection to lower-dimensional histogram.
The inheriting class should implement the _projection_class_map
class attribute to suggest class for the projection. If the
arguments don't match any of the map keys, HistogramND is used.
"""
axes, _ = self._get_projection_axes(*axes)
axes = tuple(sorted(axes))
if axes in self._projection_class_map:
klass = self._projection_class_map[axes]
return HistogramND.projection(self, *axes, type=klass, **kwargs)
else:
return HistogramND.projection(self, *axes, **kwargs)
class RadialHistogram(Histogram1D):
"""Projection of polar histogram to 1D with respect to radius.
This is a special case of a 1D histogram with transformed coordinates.
"""
@property
def bin_sizes(self):
return (self.bin_right_edges ** 2 - self.bin_left_edges ** 2) * np.pi
def fill_n(self, values, weights=None, dropna=True):
# TODO: Implement?
raise NotImplementedError("Radial histogram is not (yet) modifiable")
def fill(self, value, weight=1):
# TODO: Implement?
raise NotImplementedError("Radial histogram is not (yet) modifiable")
class AzimuthalHistogram(Histogram1D):
"""Projection of polar histogram to 1D with respect to phi.
This is a special case of a 1D histogram with transformed coordinates.
"""
# TODO: What about fill(_n)? Should it be 1D or 2D?
# TODO: Add special plotting (polar bar, polar ring)
def fill_n(self, values, weights=None, dropna=True):
raise NotImplementedError("Azimuthal histogram is not (yet) modifiable")
def fill(self, value, weight=1):
raise NotImplementedError("Azimuthal histogram is not (yet) modifiable")
class PolarHistogram(TransformedHistogramMixin, HistogramND):
"""2D histogram in polar coordinates.
This is a special case of a 2D histogram with transformed coordinates:
- r as radius in the (0, +inf) range
- phi as azimuthal angle in the (0, 2*pi) range
"""
def __init__(self, binnings, frequencies=None, **kwargs):
if not "axis_names" in kwargs:
kwargs["axis_names"] = ("r", "phi")
if "dim" in kwargs:
kwargs.pop("dim")
super(PolarHistogram, self).__init__(2, binnings=binnings, frequencies=frequencies, **kwargs)
@property
def bin_sizes(self):
sizes = 0.5 * (self.get_bin_right_edges(0) ** 2 - self.get_bin_left_edges(0) ** 2)
sizes = np.outer(sizes, self.get_bin_widths(1))
return sizes
@classmethod
def transform(cls, value):
value = np.asarray(value, dtype=np.float64)
assert value.shape[-1] == 2
result = np.empty_like(value)
result[...,0] = np.hypot(value[...,1], value[...,0])
result[...,1] = np.arctan2(value[...,1], value[...,0]) % (2 * np.pi)
return result
_projection_class_map = {
(0,) : RadialHistogram,
(1,) : AzimuthalHistogram
}
class DirectionalHistogram(TransformedHistogramMixin, HistogramND):
"""2D histogram in spherical coordinates.
This is a special case of a 2D histogram with transformed coordinates:
- theta as angle between z axis and the vector, in the (0, 2*pi) range
- phi as azimuthal angle (in the xy projection) in the (0, 2*pi) range
"""
@property
def bin_sizes(self):
sizes1 = np.cos(self.get_bin_left_edges(0)) - np.cos(self.get_bin_right_edges(0))
sizes2 = self.get_bin_widths(1)
return reduce(np.multiply, np.ix_(sizes1, sizes2))
def __init__(self, binnings, frequencies=None, radius=1, **kwargs):
if "axis_names" not in kwargs:
kwargs["axis_names"] = ("theta", "phi")
if "dim" in kwargs:
kwargs.pop("dim")
super(DirectionalHistogram, self).__init__(2, binnings=binnings, frequencies=frequencies, **kwargs)
self.radius = radius
@property
def radius(self):
"""Radius of the surface.
Useful for calculating densities.
"""
return self._meta_data.get("radius", 1)
@radius.setter
def radius(self, value):
self._meta_data["radius"] = value
class SphericalHistogram(TransformedHistogramMixin, HistogramND):
"""3D histogram in spherical coordinates.
This is a special case of a 3D histogram with transformed coordinates:
- r as radius in the (0, +inf) range
- theta as angle between z axis and the vector, in the (0, 2*pi) range
- phi as azimuthal angle (in the xy projection) in the (0, 2*pi) range
"""
def __init__(self, binnings, frequencies=None, **kwargs):
if "axis_names" not in kwargs:
kwargs["axis_names"] = ("r", "theta", "phi")
kwargs.pop("dim", False)
super(SphericalHistogram, self).__init__(3, binnings=binnings, frequencies=frequencies, **kwargs)
@classmethod
def transform(cls, value):
value = np.asarray(value, dtype=np.float64)
result = np.empty_like(value)
x, y, z = value.T
xy = np.hypot(x, y)
result[..., 0] = np.hypot(xy, z)
result[..., 1] = np.arctan2(xy, z) % (2 * np.pi)
result[..., 2] = np.arctan2(y, x) % (2 * np.pi)
return result
@property
def bin_sizes(self):
sizes1 = (self.get_bin_right_edges(0) ** 3 - self.get_bin_left_edges(0) ** 3) / 3
sizes2 = np.cos(self.get_bin_left_edges(1)) - np.cos(self.get_bin_right_edges(1))
sizes3 = self.get_bin_widths(2)
# Hopefully correct
return reduce(np.multiply, np.ix_(sizes1, sizes2,sizes3))
#return np.outer(sizes, sizes2, self.get_bin_widths(2)) # Correct
_projection_class_map = {
(1, 2) : DirectionalHistogram,
}
class CylinderSurfaceHistogram(TransformedHistogramMixin, HistogramND):
"""2D histogram in coordinates on cylinder surface.
This is a special case of a 2D histogram with transformed coordinates:
- phi as azimuthal angle (in the xy projection) in the (0, 2*pi) range
- z as the last direction without modification, in (-inf, +inf) range
Attributes
----------
radius: float
The radius of the surface. Useful for plotting
"""
def __init__(self, binnings, frequencies=None, radius=1, **kwargs):
if not "axis_names" in kwargs:
kwargs["axis_names"] = ("phi", "z")
if "dim" in kwargs:
kwargs.pop("dim")
super(CylinderSurfaceHistogram, self).__init__(2, binnings=binnings,
frequencies=frequencies, **kwargs)
self.radius = radius
@property
def radius(self):
"""Radius of the cylindrical surface.
Useful for calculating densities.
Returns
-------
float
"""
return self._meta_data.get("radius", 1)
@radius.setter
def radius(self, value):
self._meta_data["radius"] = float(value)
_projection_class_map = {
(0,) : AzimuthalHistogram
}
class CylindricalHistogram(TransformedHistogramMixin, HistogramND):
"""3D histogram in cylindrical coordinates.
This is a special case of a 3D histogram with transformed coordinates:
- r as radius projection to xy plane in the (0, +inf) range
- phi as azimuthal angle (in the xy projection) in the (0, 2*pi) range
- z as the last direction without modification, in (-inf, +inf) range
"""
def __init__(self, binnings, frequencies=None, **kwargs):
if not "axis_names" in kwargs:
kwargs["axis_names"] = ("rho", "phi", "z")
kwargs.pop("dim", False)
super(CylindricalHistogram, self).__init__(3, binnings=binnings,
frequencies=frequencies, **kwargs)
@classmethod
def transform(cls, value):
value = np.asarray(value, dtype=np.float64)
result = np.empty_like(value)
x, y, z = value.T
result[..., 0] = np.hypot(x, y) # tho
result[..., 1] = np.arctan2(y, x) % (2 * np.pi) # phi
result[..., 2] = z
return result
@property
def bin_sizes(self):
sizes1 = 0.5 * (self.get_bin_right_edges(0) ** 2 - self.get_bin_left_edges(0) ** 2)
sizes2 = self.get_bin_widths(1)
sizes3 = self.get_bin_widths(2)
return reduce(np.multiply, np.ix_(sizes1, sizes2, sizes3))
_projection_class_map = {
(0, 1) : PolarHistogram,
(1, 2) : CylinderSurfaceHistogram
}
def projection(self, *args, **kwargs):
result = TransformedHistogramMixin.projection(self, *args, **kwargs)
if isinstance(result, CylinderSurfaceHistogram):
result.radius = self.get_bin_right_edges(0)[-1]
return result
def _prepare_data(data, transformed, klass, *args, **kwargs):
"""Transform data for binning.
Returns
-------
np.ndarray
"""
# TODO: Maybe include in the class itself?
data = np.asarray(data)
if not transformed:
data = klass.transform(data)
dropna = kwargs.get("dropna", False)
if dropna:
data = data[~np.isnan(data).any(axis=1)]
return data
def polar_histogram(xdata, ydata, radial_bins="numpy", phi_bins=16,
transformed=False, *args, **kwargs):
"""Facade construction function for the PolarHistogram.
Parameters
----------
transformed : bool
phi_range : Optional[tuple]
range
"""
dropna = kwargs.pop("dropna", True)
data = np.concatenate([xdata[:, np.newaxis], ydata[:, np.newaxis]], axis=1)
data = _prepare_data(data, transformed=transformed, klass=PolarHistogram, dropna=dropna)
if isinstance(phi_bins, int):
phi_range = (0, 2 * np.pi)
if "phi_range" in "kwargs":
phi_range = kwargs["phi_range"]
elif "range" in "kwargs":
phi_range = kwargs["range"][1]
phi_range = list(phi_range) + [phi_bins + | |
<reponame>tlapusan/dtreeviz
import numpy as np
import pandas as pd
import matplotlib.patches as patches
import matplotlib.pyplot as plt
from matplotlib.collections import PatchCollection
from colour import Color
from PIL import ImageColor
from dtreeviz.colors import adjust_colors, GREY
from dtreeviz.trees import add_classifier_legend
from dtreeviz import utils
def clfviz(model, X: np.ndarray, y: np.ndarray,
ntiles=50, tile_fraction=.9,
binary_threshold=0.5,
show=['instances', 'boundaries', 'probabilities', 'misclassified', 'legend'],
feature_names=None, target_name=None, class_names=None,
markers=None,
boundary_marker='o', boundary_markersize=.8,
fontsize=9, fontname="Arial",
dot_w=25,
yshift=.08,
sigma=.013,
colors: dict = None,
ax=None) -> None:
"""
Two-variable case:
Draw a tiled grid over a 2D classifier feature space where each tile is colored by
the coordinate probabilities or coordinate predicted class. The X,y instances
are drawn on top of the tiling. The decision boundaries are indicated
by dots in between the classes. You can specify a threshold for the binary
classification case. Misclassified instances are highlighted.
One-variable case:
Draw a strip plot over a 1D feature space, one strip per class. A narrow rectangle
along the bottom indicates a color combined probabilities from all classes. The
color associated with the most likely class will dominate the probabilities rectangle.
Misclassified instances are highlighted. Decision boundaries, where the predicted
class shifts from one to another, are indicated by vertical dashed lines.
TODO: assumes classes are contiguous and 0..k-1
:param model: an sklearn classifier model or any other model that can answer
method predict_proba(X)
:param X: A 1- or 2-column dataframe or numpy array with the one or two features to plot
:param y: The target column with integers indicating the true instance classes;
currently these must be contiguous 0..k-1 for k classes.
:param ntiles: How many tiles to draw across the x1, x2 feature space
:param tile_fraction: A value between 0..1 indicating how much of a tile
should be colored; e.g., .9 indicates the tile should leave
10% whitespace around the colored portion.
:param boundary_marker: The marker symbol from matplotlib to use for the boundary;
default is a circle 'o'.
:param boundary_markersize: The boundary marker size; default is .8
:param feature_names: A list of strings indicating the one or two X variable names.
If None, no axes labels are showing
:param target_name: If showing legend, this is the title of the legend box.
:param class_names: If showing legend, these are the class names in the legend box
:param show: Which elements to show, includes elements from
['instances','boundaries','probabilities','misclassified','legend']
:param markers: By default, just small circles are shown for each X instance, but
if not None, this is a list of matplotlib marker strings like ['X','s'].
:param fontsize: Font size for tick labels and axis labels
:param fontname: The font name for tick labels and axis labels
:param colors: A dictionary with adjustments to the colors
:param dot_w: How wide should the circles be when drawing the instances
:param yshift: For univariate case. If you'd like to play around with the strip plot,
this variable shifts the class clusters; a shifted zero puts them on
top of each other.
:param sigma: For univariate case. The standard deviation of the noise added to make
the strip plot.
:param ax: An optional matplotlib "axes" upon which this method should draw. If you
send in your own figure, it should be wide but not tall like shape 4,1
"""
if isinstance(X, pd.DataFrame):
X = X.values
if isinstance(y, pd.Series):
y = y.values
if len(X.shape) == 1 or (len(X.shape)==2 and X.shape[1] == 1):
clfviz_univar(model=model, x=X, y=y,
ntiles=ntiles,
binary_threshold=binary_threshold,
show=show,
feature_name=feature_names[0] if feature_names is not None else None,
target_name=target_name,
class_names=class_names,
markers=markers,
fontsize=fontsize, fontname=fontname,
dot_w=dot_w,
sigma=sigma,
yshift=yshift,
colors=colors,
ax=ax)
elif len(X.shape) == 2 and X.shape[1] == 2:
clfviz_bivar(model=model, X=X, y=y,
ntiles=ntiles, tile_fraction=tile_fraction,
binary_threshold=binary_threshold,
show=show,
feature_names=feature_names, target_name=target_name,
class_names=class_names,
markers=markers,
boundary_marker=boundary_marker,
boundary_markersize=boundary_markersize,
fontsize=fontsize, fontname=fontname,
dot_w=dot_w, colors=colors,
ax=ax)
else:
raise ValueError(f"Expecting 2D data not {X.shape}")
def clfviz_bivar(model, X:np.ndarray, y:np.ndarray,
ntiles=50, tile_fraction=.9,
binary_threshold=0.5,
show=['instances','boundaries','probabilities','misclassified','legend'],
feature_names=None, target_name=None, class_names=None,
markers=None,
boundary_marker='o', boundary_markersize=.8,
fontsize=9, fontname="Arial",
dot_w=25, colors:dict=None,
ax=None) -> None:
"""
See comment and parameter descriptions for clfviz() above.
"""
if isinstance(X, pd.DataFrame):
X = X.values
if isinstance(y, pd.Series):
y = y.values
if len(X.shape)==1 or (len(X.shape)==2 and X.shape[1]!=2) or len(X.shape)>2:
raise ValueError(f"Expecting 2D data not {X.shape}")
if ax is None:
fig, ax = plt.subplots(1, 1, figsize=(5, 3.5))
# Created grid over the range of x1 and x2 variables, get probabilities, predictions
grid_points, grid_proba, grid_pred_as_matrix, w, x_, class_X, class_values = \
_compute_tiling(model, X, y, binary_threshold, ntiles, tile_fraction)
x_proba = _predict_proba(model, X)
if len(np.unique(y)) == 2: # is k=2 binary?
X_pred = np.where(x_proba[:, 1] >= binary_threshold, 1, 0)
else:
X_pred = np.argmax(x_proba, axis=1) # TODO: assumes classes are 0..k-1
class_X_pred = [X_pred[y == cl] for cl in class_values]
if markers is None:
markers = ['o']*len(class_X)
colors = adjust_colors(colors)
class_values = np.unique(y) # returns sorted
# Get class to color map for probabilities and predictions
color_map, grid_pred_colors, grid_proba_colors = \
_get_grid_colors(grid_proba, grid_pred_as_matrix, class_values, colors)
# Draw probabilities or class prediction grid
facecolors = grid_proba_colors if 'probabilities' in show else grid_pred_colors
_draw_tiles(ax, grid_points, facecolors, colors['tile_alpha'], x_, w)
# Get grid with class predictions with coordinates (x,y)
# e.g., y_pred[0,0] is lower left pixel and y_pred[5,5] is top-right pixel
# for npoints=5
grid_pred_as_matrix = grid_pred_as_matrix.reshape(ntiles, ntiles)
if 'boundaries' in show:
_draw_boundary_edges(ax, grid_points, grid_pred_as_matrix,
boundary_marker, boundary_markersize,
colors, w, x_)
# Draw the X instances circles
if 'instances' in show:
for i, x_ in enumerate(class_X):
if 'misclassified' in show:
# Show correctly classified markers
good_x = x_[class_X_pred[i] == class_values[i],:]
ax.scatter(good_x[:, 0], good_x[:, 1],
s=dot_w, c=color_map[i],
marker=markers[i],
alpha=colors['scatter_marker_alpha'],
edgecolors=colors['scatter_edge'],
lw=.5)
# Show misclassified markers (can't have alpha per marker so do in 2 calls)
bad_x = x_[class_X_pred[i] != class_values[i],:]
ax.scatter(bad_x[:, 0], bad_x[:, 1],
s=dot_w, c=color_map[i],
marker=markers[i],
alpha=1.0,
edgecolors=colors['warning'],
lw=.5)
else:
ax.scatter(x_[:, 0], x_[:, 1],
s=dot_w, c=color_map[i],
marker=markers[i],
alpha=colors['scatter_marker_alpha'],
edgecolors=colors['scatter_edge'],
lw=.5)
if feature_names is not None:
ax.set_xlabel(f"{feature_names[0]}", fontsize=fontsize, fontname=fontname, color=colors['axis_label'])
ax.set_ylabel(f"{feature_names[1]}", fontsize=fontsize, fontname=fontname, color=colors['axis_label'])
if 'legend' in show:
class_names = utils._normalize_class_names(class_names, nclasses=len(class_values))
add_classifier_legend(ax, class_names, class_values, color_map, target_name, colors,
fontsize=fontsize, fontname=fontname)
ax.tick_params(axis='both', which='major', width=.3, labelcolor=colors['tick_label'],
labelsize=fontsize)
for tick in ax.get_xticklabels():
tick.set_fontname(fontname)
for tick in ax.get_yticklabels():
tick.set_fontname(fontname)
ax.spines['top'].set_visible(False) # turns off the top "spine" completely
ax.spines['right'].set_visible(False)
ax.spines['left'].set_linewidth(.5)
ax.spines['bottom'].set_linewidth(.5)
def _compute_tiling(model, X:np.ndarray, y:np.ndarray, binary_threshold,
ntiles, tile_fraction):
"""
Create grid over the range of x1 and x2 variables; use the model to
compute the probabilities with model.predict_proba(), which will work with sklearn
and, I think, XGBoost. Later we will have to figure out how to get probabilities
out of the other models we support.
The predictions are computed simply by picking the argmax of probabilities, which
assumes classes are 0..k-1. TODO: update to allow disjoint integer class values
For k=2 binary classifications, there is no way to set the threshold and so
a threshold of 0.5 is implicitly chosen by argmax.
This returns all of the details needed to plot the tiles. The coordinates of
the grid are a linear space from min to max of each variable, inclusively.
So if the range is 1..5 and we want 5 tiles, then the width of each tile is 1.
We get a tile at each position. When we are drawing, the position is taken as
the center of the tile. In this case, the grid points would be centered over
1,2,3,4, and 5.
"""
if isinstance(X, pd.DataFrame):
X = X.values
if isinstance(y, pd.Series):
y = y.values
X1 = X[:, 0]
X2 = X[:, 1]
x1r = max(X1) - min(X1)
x2r = max(X2) - min(X2)
border1 = x1r*0.05 # make a 5% border
border2 = x2r*0.05
x1range = (min(X1)-border1, max(X1)+border1)
x2range = (min(X2)-border2, max(X2)+border2)
w = (x1r+2*border1) / (ntiles-1)
h = (x2r+2*border2) / (ntiles-1)
w *= tile_fraction
h *= tile_fraction
grid_points = [] # a list of coordinate pairs for the grid
# Iterate through v1 (x-axis) most quickly then v2 (y-axis)
for iv2, v2 in enumerate(np.linspace(*x2range, num=ntiles, endpoint=True)):
for iv1, v1 in enumerate(np.linspace(*x1range, num=ntiles, endpoint=True)):
grid_points.append([v1, v2])
grid_points = np.array(grid_points)
class_values = np.unique(y)
class_X = [X[y == cl] for cl in class_values]
grid_proba = _predict_proba(model, grid_points)
if len(np.unique(y))==2: # is k=2 binary?
grid_pred = np.where(grid_proba[:,1]>=binary_threshold,1,0)
else:
| |
<reponame>Miracle2333/BSPPN
"""
Mask R-CNN
Configurations and data loading code for MS COCO.
Copyright (c) 2017 Matterport, Inc.
Licensed under the MIT License (see LICENSE for details)
Written by <NAME>
------------------------------------------------------------
Usage: import the module (see Jupyter notebooks for examples), or run from
the command line as such:
# Train a new model starting from pre-trained COCO weights
python3 coco.py train --dataset=/path/to/coco/ --model=coco
# Train a new model starting from ImageNet weights
python3 coco.py train --dataset=/path/to/coco/ --model=imagenet
# Continue training a model that you had trained earlier
python3 coco.py train --dataset=/path/to/coco/ --model=/path/to/weights.h5
# Continue training the last model you trained
python3 coco.py train --dataset=/path/to/coco/ --model=last
# Run COCO evaluatoin on the last model you trained
python3 coco.py evaluate --dataset=/path/to/coco/ --model=last
"""
import os
import time
import sys
import json
import datetime
import numpy as np
import skimage.io
from imgaug import augmenters as iaa
import re
import tqdm
import timeit
import logging
import cv2
import csv
from skimage.measure import find_contours
import skimage.draw
import matplotlib.pyplot as plt
from matplotlib import patches, lines
from matplotlib.patches import Polygon
import IPython.display
from keras.utils import plot_model
# Download and install the Python COCO tools from https://github.com/waleedka/coco
# That's a fork from the original https://github.com/pdollar/coco with a bug
# fix for Python 3.
# I submitted a pull request https://github.com/cocodataset/cocoapi/pull/50
# If the PR is merged then use the original repo.
# Note: Edit PythonAPI/Makefile and replace "python" with "python3".
from pycocotools.coco import COCO
from pycocotools.cocoeval import COCOeval
from pycocotools import mask as maskUtils
import zipfile
import urllib.request
import shutil
from config import Config
import utils
import model as modellib
import visualize
import torch
# Root directory of the project
ROOT_DIR = os.getcwd()
# Path to trained weights file
COCO_MODEL_PATH = os.path.join(ROOT_DIR, "mask_rcnn_coco.pth")
# Directory to save logs and model checkpoints, if not provided
# through the command line argument --logs
DEFAULT_LOGS_DIR = os.path.join(ROOT_DIR, "logs")
DEFAULT_DATASET_YEAR = "2014"
############################################################
# Configurations
############################################################
class CocoConfig(Config):
"""Configuration for training on MS COCO.
Derives from the base Config class and overrides values specific
to the COCO dataset.
"""
# Give the configuration a recognizable name
NAME = "coco"
# We use one GPU with 8GB memory, which can fit one image.
# Adjust down if you use a smaller GPU.
IMAGES_PER_GPU = 1
# Uncomment to train on 8 GPUs (default is 1)
GPU_COUNT = 1
# Number of classes (including background)
NUM_CLASSES = 1+1 # COCO has 80 classes
#steps per epoch
STEPS_PER_EPOCH = 500
VALIDATION_STEPS = 30
############################################################
# Dataset
############################################################
class CocoDataset(utils.Dataset):
def load_building(self, dataset_dir, subset):
"""Load a subset of the Building dataset.
dataset_dir: Root directory of the dataset.
subset: Subset to load: train or val or test
"""
# Add classes. We have only one class to add.
# self refers to NucleusDataset
self.add_class("building", 1, "building")
# Train or validation dataset?
assert subset in ["train", "val", "test"]
if subset == "test":
dataset_dir = os.path.join(dataset_dir, subset)
test_files = os.listdir(dataset_dir)
for f in test_files:
filename = f
image_path = os.path.join(dataset_dir, filename)
height = 650
width = 650
self.add_image(
"building",
image_id=filename, # use file name as a unique image id
path=image_path,
width=width, height=height)
else:
dataset_dir = os.path.join(dataset_dir, subset)
#can be modified to read any file ending with .json
annotations = json.load(open(os.path.join(dataset_dir,
"AOI_2_Vegas_Train_Building_Solutions_modified.json")))
# Add images
polygons = []
flag = 0
for a in annotations:
# Get the x, y coordinaets of points of the polygons that make up
# the outline of each object instance. There are stores in the
# shape_attributes (see json format above)
# polygons = [r['shape_attributes'] for r in a['regions'].values()]
if a['BuildingId'] != '1':
poly = {}.fromkeys(['x', 'y'])
poly['x'] = [float(s) for s in re.findall(r'-?\d+\.?\d*', a['X'])]
poly['y'] = [float(s) for s in re.findall(r'-?\d+\.?\d*', a['Y'])]
x = poly['x']
y = poly['y']
if (len(x) == 0|len(y) == 0):
continue
elif (np.size(x, 0) < 2 | np.size(y, 0) < 2):
continue
elif ((np.abs(np.max(x) - np.min(x)) < 1.6) | (np.abs(np.max(y) - np.min(y)) < 1.6)):
continue
else:
polygons.append(poly)
# load_mask() needs the image size to convert polygons to masks.
# Unfortunately, VIA doesn't include it in JSON, so we must read
# the image. This is only managable since the dataset is tiny.
filename = 'RGB-PanSharpen_' + a['ImageId'] + '.tif'
image_path = os.path.join(dataset_dir, filename)
# image = skimage.io.imread(image_path)
# height, width = image.shape[:2]
height = 650
width = 650
else:
if ((polygons != [])):
self.add_image(
"building",
image_id=filename, # use file name as a unique image id
path=image_path,
width=width, height=height,
polygons=polygons)
flag = 0
polygons = []
poly = {}.fromkeys(['x', 'y'])
poly['x'] = [float(s) for s in re.findall(r'-?\d+\.?\d*', a['X'])]
poly['y'] = [float(s) for s in re.findall(r'-?\d+\.?\d*', a['Y'])]
x = poly['x']
y = poly['y']
if (len(x) == 0|len(y) == 0):
flag = 1
continue
elif (np.size(x, 0) < 2 | np.size(y, 0) < 2):
flag = 1
continue
elif ((np.abs(np.max(x) - np.min(x)) < 1.6)|(np.abs(np.max(y) - np.min(y)) < 1.6)):
flag = 1
continue
else:
polygons.append(poly)
# load_mask() needs the image size to convert polygons to masks.
# Unfortunately, VIA doesn't include it in JSON, so we must read
# the image. This is only managable since the dataset is tiny.
filename = 'RGB-PanSharpen_' + a['ImageId'] + '.tif'
image_path = os.path.join(dataset_dir, filename)
# image = skimage.io.imread(image_path)
# height, width = image.shape[:2]
height = 650
width = 650
b = 1
def load_mask(self, image_id):
"""Load instance masks for the given image.
Different datasets use different ways to store masks. This
function converts the different mask format to one format
in the form of a bitmap [height, width, instances].
Returns:
masks: A bool array of shape [height, width, instance count] with
one mask per instance.
class_ids: a 1D array of class IDs of the instance masks.
"""
# If not a balloon dataset image, delegate to parent class.
# image_id1 = 1254
image_info = self.image_info[image_id]
if image_info["source"] != "building":
return super(self.__class__, self).load_mask(image_id)
# Convert polygons to a bitmap mask of shape
# [height, width, instance_count]
info = self.image_info[image_id]
mask = np.zeros([info['height'], info['width'], len(info['polygons'])],
dtype=np.uint8)
for i, a in enumerate(info["polygons"]):
# Get indexes of pixels inside the polygon and set them to 1
# avoid the area of mask is 0 or return of rr, cc has no value
# x = a['x']
# y = a['y']
# if(np.size(x, 0)<2|np.size(y, 0)<2):
# continue
# elif((np.abs(np.max(x)-np.min(x)) < 2)|(np.abs(np.max(y)-np.min(y)) < 2)):
# continue
# else:
rr, cc = skimage.draw.polygon((a['y']), (a['x']))
mask[rr, cc, i] = 1
# Return mask, and array of class IDs of each instance. Since we have
# one class ID only, we return an array of 1s
return mask.astype(np.bool), np.ones([mask.shape[-1]], dtype=np.int32)
def image_reference(self, image_id):
"""Return a link to the image in the COCO Website."""
info = self.image_info[image_id]
if info["source"] == "coco":
return "http://cocodataset.org/#explore?id={}".format(info["id"])
else:
super(CocoDataset, self).image_reference(image_id)
# The following two functions are from pycocotools with a few changes.
def annToRLE(self, ann, height, width):
"""
Convert annotation which can be polygons, uncompressed RLE to RLE.
:return: binary mask (numpy 2D array)
"""
segm = ann['segmentation']
if isinstance(segm, list):
# polygon -- a single object might consist of multiple parts
# we merge all parts into one mask rle code
rles = maskUtils.frPyObjects(segm, height, width)
rle = maskUtils.merge(rles)
elif isinstance(segm['counts'], list):
# uncompressed RLE
rle = maskUtils.frPyObjects(segm, height, width)
else:
# rle
rle = ann['segmentation']
return rle
def annToMask(self, ann, height, width):
"""
Convert annotation which can be polygons, uncompressed RLE, or RLE to binary mask.
:return: binary mask (numpy 2D array)
"""
rle = self.annToRLE(ann, height, width)
m = maskUtils.decode(rle)
return m
############################################################
# COCO Evaluation
############################################################
def test_building(model, dataset, output, limit=0):
"""Runs official COCO evaluation.
dataset: A Dataset object with valiadtion data
eval_type: "bbox" or "segm" for bounding box or segmentation evaluation
limit: if not 0, it's the number of images to use for evaluation
"""
# Pick COCO images from the dataset
image_ids = dataset.image_ids
# Limit to a subset
if limit:
image_ids = image_ids[:limit]
times = []
count = 0
a = enumerate(image_ids)
for i, image_id in a:
image_id = 100
start = timeit.default_timer()
image = dataset.load_image(image_id)
source_id_temp = dataset.image_info[image_id]["id"] # source ID = original image name
source_id = source_id_temp.split('.')[0]
print(source_id)
# image_name = source_id.split('_', 1)[1]
r = model.detect([image], source_id)[0]
stop = timeit.default_timer()
if count > 0:
times.append(stop - start)
# | |
<reponame>vishalbelsare/pyplan-ide<filename>pyplan/pyplan/dashboard/service.py
import uuid
from types import SimpleNamespace
from django.db.models import Q
from rest_framework import exceptions
from pyplan.pyplan.common.baseService import BaseService
from pyplan.pyplan.common.calcEngine import CalcEngine
from pyplan.pyplan.common.classes.eNodeProperty import eNodeProperty
from pyplan.pyplan.common.engineManager import EngineManager
from pyplan.pyplan.dashboardstyle.models import DashboardStyle
from pyplan.pyplan.department.models import Department
from pyplan.pyplan.report.models import Report
from pyplan.pyplan.usercompanies.models import UserCompany
from .classes.nodeDimension import NodeDimension
from .classes.nodeDimensionValue import NodeDimensionValue
from .classes.nodeEvalProperties import NodeEvalProperties, NodeEvalProperty
from .classes.nodeFullData import NodeFullData
from .classes.nodeProperties import NodeProperties
from .classes.nodeResult import (NodeResult, NodeResultColumns,
NodeResultPageInfo, NodeResultSerie)
from .classes.pivot import PivotQuery
from .models import Dashboard
from .serializers.nodeDimension import NodeDimensionSerializer
from .serializers.pivot import (PivotNodeValueChangesSerializer,
PivotQuerySerializer)
class DashboardManagerService(BaseService):
def getDashboard(self, dashboard_id):
return Dashboard.objects.get(pk=int(dashboard_id)) if dashboard_id.isnumeric() else Dashboard.objects.get(uuid=dashboard_id)
def companyDashboards(self):
company_id = self.client_session.companyId
model_id = self.client_session.modelInfo.modelId
dashboards = Dashboard.objects.filter(
model=model_id,
node__isnull=True,
owner__company__pk=company_id
)
return dashboards.order_by('name').distinct()
def myDashboards(self, report_id=None, favs=None):
usercompany_id = self.client_session.userCompanyId
model_id = self.client_session.modelInfo.modelId
dashboards = Dashboard.objects.filter(
model=model_id,
)
if type(favs) is bool and favs:
dashboards = dashboards.filter(
owner_id=usercompany_id,
is_fav=True,
)
else:
dashboards = dashboards.filter(
node__isnull=True,
)
if report_id and report_id.isnumeric():
dashboards = dashboards.filter(
report__pk=int(report_id),
)
else:
dashboards = dashboards.filter(
owner_id=usercompany_id,
report__pk__isnull=True,
)
return dashboards.order_by('order').distinct()
def sharedWithMe(self, report_id):
company_id = self.client_session.companyId
usercompany_id = self.client_session.userCompanyId
model_id = self.client_session.modelInfo.modelId
dashboards = Dashboard.objects.filter(
# shared to specific users, me included
Q(usercompanies__pk=usercompany_id) |
# shared to departments where I belong
Q(departments__usercompanies__pk=usercompany_id) |
# public but not mine
(Q(is_public=True) & ~Q(owner_id=usercompany_id)),
# from the same company
owner__company__pk=company_id,
model=model_id,
)
if report_id and isinstance(report_id, int):
dashboards = dashboards.filter(
report__pk=int(report_id),
)
else:
# exclude dashboards that are inside a shared report
reports = Report.objects.filter(
# shared to specific users, me included
Q(usercompanies__pk=usercompany_id) |
# shared to departments where I belong
Q(departments__usercompanies__pk=usercompany_id) |
# public but not mine
(Q(is_public=True) & ~Q(owner_id=usercompany_id)),
# from the same company
owner__company__pk=company_id,
model=model_id,
)
dashboards = dashboards.exclude(report__in=reports)
return dashboards.order_by('order').distinct()
def allMyDashboards(self):
company_id = self.client_session.companyId
usercompany_id = self.client_session.userCompanyId
model_id = self.client_session.modelInfo.modelId
dashboards = Dashboard.objects.filter(
Q(
# shared to specific users, me included
Q(usercompanies__pk=usercompany_id) |
# shared to departments where I belong
Q(departments__usercompanies__pk=usercompany_id) |
# public but not mine
(Q(is_public=True) & ~Q(owner_id=usercompany_id)),
# from the same company
owner__company__pk=company_id,
model=model_id
) |
# my dashboards
Q(model=model_id,
node__isnull=True,
owner__id=usercompany_id)
)
# include dashboards that are inside a shared report
reports = Report.objects.filter(
# shared to specific users, me included
Q(usercompanies__pk=usercompany_id) |
# shared to departments where I belong
Q(departments__usercompanies__pk=usercompany_id) |
# public but not mine
(Q(is_public=True) & ~Q(owner_id=usercompany_id)),
# from the same company
owner__company__pk=company_id,
model=model_id,
)
dashboards_from_shared_reports = Dashboard.objects.filter(
report__in=reports)
dashboards_response = dashboards | dashboards_from_shared_reports
return dashboards_response.order_by('name').distinct()
def mySharedDashboards(self, report_id):
dashboards = Dashboard.objects.filter(
Q(departments__isnull=False) | Q(usercompanies__isnull=False) | Q(is_public=True),
owner__pk=self.client_session.userCompanyId,
model=self.client_session.modelInfo.modelId
)
return dashboards.order_by('order').distinct()
def getNodeFullData(self, nodeQuery):
calcEngine = CalcEngine.factory(self.client_session)
result = NodeFullData()
result.itemType = "linechart"
item_properties = NodeProperties()
value_suffix = calcEngine.getNodeProperty(
nodeQuery.node, eNodeProperty.UNITS.value)
if value_suffix:
value_suffix = f" {value_suffix}"
item_properties.unit = f"({value_suffix})"
item_properties.tooltip.valueSuffix = value_suffix
# Main
original_id = calcEngine.getNodeProperty(
nodeQuery.node, eNodeProperty.ORIGINAL_ID.value)
if not original_id is None:
node_id = original_id
else:
node_id = nodeQuery.node
node_title = calcEngine.getNodeProperty(
nodeQuery.node, eNodeProperty.TITLE.value)
node_class = calcEngine.getNodeProperty(
node_id, eNodeProperty.CLASS.value)
alias_class = calcEngine.getNodeProperty(
nodeQuery.node, eNodeProperty.CLASS.value)
item_properties.title.text = node_title
item_type = None
object_type = None
node_result = None
edit_mode = False
if node_class == "module":
item_type = "objectItem"
object_type = "diagramviewer"
elif node_class == "decision" and (calcEngine.isChoice(node_id) or calcEngine.isSelector(node_id)):
item_type = "selector"
node_result = self._evaluateNode(
node_id, result.dims, result.rows, result.columns)
elif alias_class == "formnode":
item_type = "formnode"
node_result = self._evaluateNode(
nodeQuery.node, result.dims, result.rows, result.columns)
if calcEngine.isTable(node_id):
item_type = "nodetable"
edit_mode = True
elif node_class == "button":
item_type = "button"
item_properties.title.enabled = False
item_properties.title.text = ""
node_result = self._evaluateNode(
node_id, result.dims, result.rows, result.columns)
elif calcEngine.isIndex(node_id):
item_type = "indexlist"
if calcEngine.isTime(node_id):
item_properties.index = type("obj", (object,), {"ui": "range"})
else:
# get dimensions
result.dims = self.getNodeIndexes(nodeQuery.node)
dim_list = list(result.dims).copy()
for dim in dim_list:
if calcEngine.isTime(dim.name) or calcEngine.isTime(dim.field):
dim.isTime = True
if len(result.columns) is 0 and (dim.isTime or str(dim.field).startswith("Measures")):
result.dims.remove(dim)
result.columns.append(dim)
continue
if len(result.rows) is 0:
result.dims.remove(dim)
result.rows.append(dim)
if len(result.columns) > 0 and len(result.rows) > 0:
break
# ' evaluate node
node_result = self._evaluateNode(
node_id,
result.dims,
result.rows,
result.columns,
"sum",
nodeQuery.fromRow,
nodeQuery.toRow
)
if calcEngine.isTable(node_id):
if not result.columns is None and len(result.columns) == 0 and not result.dims is None and len(result.dims) > 0:
dim = result.dims[0]
result.dims.remove(dim)
result.columns.append(dim)
item_type = "nodetable"
item_properties.cubeOptions = dict(editMode=True)
if result.rows and len(result.rows) > 0:
item_properties.cubeOptions["rows"] = [
str(xx.field).split(".")[0] for xx in result.rows]
if result.columns and len(result.columns) > 0:
item_properties.cubeOptions["cols"] = [
str(xx.field).split(".")[0] for xx in result.columns]
else:
item_type = "indicator"
if len(result.dims) > 0 or len(result.columns) > 0 or len(result.rows) > 0 or (not node_result is None and not node_result.series is None and len(node_result.series) > 1):
item_type = "table"
result.nodeId = nodeQuery.node
result.nodeName = node_title
result.itemProperties = item_properties
result.itemType = item_type
result.objectType = object_type
result.nodeResult = node_result
return result
def existNode(self, nodeId):
calcEngine = CalcEngine.factory(self.client_session)
return calcEngine.existNode(nodeId)
def evaluateNode(self, nodeQuery):
calcEngine = CalcEngine.factory(self.client_session)
return self._evaluateNode(
nodeQuery.node, nodeQuery.dims, nodeQuery.rows,
nodeQuery.columns, nodeQuery.summaryBy,
nodeQuery.fromRow, nodeQuery.toRow, nodeQuery.bottomTotal, nodeQuery.rightTotal,
nodeQuery.timeFormat, nodeQuery.timeFormatType, nodeQuery.calendarType, nodeQuery.resultType)
def getOrCreate(self, node_id):
"""
- If the user has a dashboard for this model and node, return it.
- Otherwise, create it and return it.
"""
# Retrieve user dashboards for that node
user_company = UserCompany(id=self.client_session.userCompanyId)
dashboard = Dashboard.objects.filter(
node=node_id,
model=self.client_session.modelInfo.modelId,
owner=user_company,
)
if dashboard:
return dashboard[0]
else:
# Create
calcEngine = CalcEngine.factory(self.client_session)
node_name = calcEngine.getNodeProperty(
node_id, eNodeProperty.TITLE.value)
return Dashboard.objects.create(
model=self.client_session.modelInfo.modelId,
name=node_name,
node=node_id,
owner=user_company,
)
def createDashboard(self, data):
user_company = UserCompany(id=self.client_session.userCompanyId)
report = None
if "reportId" in data and not data["reportId"] is None:
report = Report(id=data["reportId"])
return Dashboard.objects.create(
model=self.client_session.modelInfo.modelId,
name=data["name"],
node=data["node"] if "node" in data else None,
owner=user_company,
report=report,
)
def updateDashboard(self, dashboard, data):
if "name" in data:
dashboard.name = data["name"]
if "definition" in data:
dashboard.definition = data["definition"]
if "report_id" in data:
dashboard.report = Report(id=data["report_id"])
if "styles" in data:
dashboard.styles.set(
DashboardStyle.objects.filter(pk__in=data['styles']))
return dashboard.save()
def bulkDelete(self, ids):
return Dashboard.objects.filter(pk__in=ids).delete()
def changeOrder(self, ids):
for index, val in enumerate(ids):
if val.isnumeric():
dash = Dashboard.objects.get(pk=int(val))
dash.order = index + 1
dash.save()
def getIndexValues(self, data):
calcEngine = CalcEngine.factory(self.client_session)
index_type = calcEngine.getIndexType(data['id'])
response = calcEngine.getIndexValues(data)
newResponse = {}
index_values = []
if response:
for index_value in response:
index_values.append(NodeDimensionValue(
type=index_type, value=index_value))
newResponse['results'] = index_values
return newResponse
def isResultComputed(self, nodes):
calcEngine = CalcEngine.factory(self.client_session)
is_result = calcEngine.isResultComputed(nodes)
res = []
if len(is_result) == len(nodes):
for nn, node in enumerate(nodes):
if not is_result[nn]:
res.append(node)
return res
def getShares(self, dashboard_id):
dashboard = Dashboard.objects.get(pk=dashboard_id)
is_shared = dashboard.usercompanies.count() > 0 or dashboard.departments.count() > 0
return {
"departments": Department.objects.filter(company_id=self.client_session.companyId).all(),
"usercompanies_shares": dashboard.usercompanies,
"departments_shares": dashboard.departments,
"sharedToEveryone": dashboard.is_public,
"sharedTo": is_shared,
"noShared": not is_shared,
}
def setShares(self, dashboard_id, data):
dashboard = Dashboard.objects.get(pk=dashboard_id)
dashboard.is_public = data["sharedToEveryone"]
dashboard.usercompanies.clear()
dashboard.departments.clear()
if not data["noShared"]:
for usercompany_id in data["usercompanies_ids"]:
usercompany = UserCompany.objects.get(pk=usercompany_id)
dashboard.usercompanies.add(usercompany)
for department_id in data["departments_ids"]:
department = Department.objects.get(pk=department_id)
dashboard.departments.add(department)
dashboard.save()
return {
"departments": Department.objects.filter(company_id=self.client_session.companyId).all(),
"usercompanies_shares": dashboard.usercompanies,
"departments_shares": dashboard.departments,
"sharedToEveryone": dashboard.is_public,
"sharedTo": dashboard.usercompanies.count() > 0 or dashboard.departments.count() > 0,
}
def copy(self, dashboard_id, name=None):
dashboard = Dashboard.objects.get(pk=dashboard_id)
if name:
dashboard.name = name
dashboard.pk = None
dashboard.uuid = uuid.uuid4()
if dashboard.owner_id != self.client_session.userCompanyId:
dashboard.report = None
dashboard.owner_id = self.client_session.userCompanyId
dashboard.is_public = False
dashboard.save()
return dashboard.pk
# Pivot
def getCubeMetadata(self, query):
calcEngine = CalcEngine.factory(self.client_session)
result = calcEngine.getCubeMetadata(PivotQuerySerializer(query).data)
original = calcEngine.getNodeProperty(
query.cube, eNodeProperty.IDENTIFIER.value)
# if not res is None and not res.nodeProperties is None:
# Dim srvWiki As New KnowledgeBaseService(token)
# res.nodeProperties.hasDescription = srvWiki.hasDescription(original)
# Dim srvWorkflow As New Workflow(token)
# res.nodeProperties.hasWorkflowTask = srvWorkflow.hasTask(original)
return result
def getCubeValues(self, pivotQuery: PivotQuery):
calcEngine = CalcEngine.factory(self.client_session)
return calcEngine.getCubeValues(PivotQuerySerializer(pivotQuery).data)
def setCubeChanges(self, changes):
calcEngine = CalcEngine.factory(self.client_session)
return calcEngine.setNodeValueChanges(PivotNodeValueChangesSerializer(changes).data)
def getCubeDimensionValues(self, query):
calcEngine = CalcEngine.factory(self.client_session)
return calcEngine.getCubeDimensionValues(PivotQuerySerializer(query).data)
# Private Methods
def _evaluateNode(
self, node: str, dims: list, rows: list, columns: list, summary_by: str = "sum",
from_row: int = 0, to_row: int = 0, bottom_total: bool = False, right_total: bool = False,
time_format: str = "A", time_format_type: str = "FLO", calendar_type: str = "CAL", resultType: str = ""):
calcEngine = CalcEngine.factory(self.client_session)
node_result = NodeResult()
props_to_get = [{"name": "numberFormat", "value": ""},
{"name": eNodeProperty.CLASS.value, "value": ""}]
node_properties = calcEngine.getNodeProperties(node, props_to_get)
for prop in node_properties['properties']:
if prop['name'] == "numberFormat" and prop['value']:
node_result.nodeProperties["numberFormat"] = str(prop['value'])
elif prop['name'] == eNodeProperty.CLASS.value:
node_class = str(prop['value'])
if calcEngine.isIndex(node):
node_result.indexValues = self.getIndexValues({'id': node})[
'results']
elif node_class == "formnode":
original_id = calcEngine.getNodeProperty(
node, eNodeProperty.ORIGINAL_ID.value)
if original_id:
node_result = self._evaluateNode(
original_id, dims, rows, columns)
props_to_get = [
| |
G.nodes():
info = nodeInfoDictPerturbed[node]
if 'argsIndex' in info:
argsIndex = info['argsIndex']
pressure = info['simulationData']['pressure']
velocityPressurePerturbed[argsIndex] = pressure
for edgeIndex in edgeIndexList:
info = edgeInfoDictPerturbed[edgeIndex]
if 'argsIndex' in info:
argsIndex = info['argsIndex']
velocity = info['simulationData']['velocity']
velocityPressurePerturbed[argsIndex] = velocity
return nodeInfoDictPerturbed, edgeInfoDictPerturbed, velocityPressurePerturbed
def GBMTest(self, saveResult=False):
"""
Create a GBM network with radius following the BraVa distribution, generate a ground truth solution, then perturb the network
in a particular way while keeping the terminating pressures unchanged, then try to solve the network.
"""
start_time = timeit.default_timer()
functionName = inspect.currentframe().f_code.co_name
resultDict = {'referenceYear': {}, 'perturbedYear': {}}
self.loadNetwork(version=4, year='BraVa')
self.convertNetowrk()
self.adjustNetwork()
self.setNetwork(option=2)
# extraInfo = {'perturbedYear': 2013, 'excludedEdgeIndex': [0,1,2,3,4,7,12]}
# self.perturbNetwork(option=2, extraInfo=extraInfo)
# self.setNetwork(option=2)
success = self.createGroundTruth()
self.showFlowInfo()
if not success:
return
G = self.G
nodeInfoDict = self.nodeInfoDict
edgeInfoDict = self.edgeInfoDict
spacing = self.spacing
edgeNameDict = {0: 'LICA', 3: 'LICA', 2: 'RICA', 7: 'RICA', 1: 'VA', 4: 'RPCA\nComm', 8: 'LMCA', 9: 'LM', 11: 'RM', 10: 'RMCA', 5: 'LPCA', 6: 'RPCA', 20: 'ACA'}
# nodeLabelDict = {node: G.node[node]['nodeIndex'] for node in G.nodes()} # nodeIndex
# nodeLabelDict = {node: G.node[node]['depth'] for node in G.nodes()} # nodeDepth
nodeLabelDict = {} # None
# nodeValueList = [G.node[node]['nodeIndex'] for node in G.nodes()] # nodeIndex
# nodeValueList = [G.node[node]['depth'] for node in G.nodes()] # nodeDepth
nodeValueList = [0 for node in G.nodes()] # None
# edgeLabelDict = {edge: G[edge[0]][edge[1]]['edgeIndex'] for edge in G.edges()} # edgeIndex
# edgeLabelDict = {edge: G[edge[0]][edge[1]]['depth'] for edge in G.edges()} # edgeDepth
# edgeLabelDict = {edge: np.round(edgeInfoDict[G[edge[0]][edge[1]]['edgeIndex']]['meanRadius']*spacing*1000, 2) for edge in G.edges()} # edge radius
edgeLabelDict = {edge: edgeNameDict[G[edge[0]][edge[1]]['edgeIndex']] if G[edge[0]][edge[1]]['edgeIndex'] in edgeNameDict else '' for edge in G.edges()} # edge name
# edgeValueList = [G[edge[0]][edge[1]]['edgeIndex'] for edge in G.edges()] # edgeIndex
# edgeValueList = [G[edge[0]][edge[1]]['depth'] for edge in G.edges()] # edgeDepth
# edgeValueList = [np.round(edgeInfoDict[G[edge[0]][edge[1]]['edgeIndex']]['meanRadius']*spacing*1000, 2) for edge in G.edges()] # edgeIndex
edgeValueList = [0 for edge in G.edges()] # None
infoDict = {'nodeLabelDict': nodeLabelDict, 'nodeValueList': nodeValueList, 'nodeColorbarLabel': [],
'edgeLabelDict': edgeLabelDict, 'edgeValueList': edgeValueList, 'edgeColorbarLabel': [],
'figTitle': 'Major branch name'}
self.plotNetwork(infoDict, figIndex=2, isLastFigure=True)
return
# print(G.edges(data=True))
# nodeLabelDict = {node: G.node[node]['depth'] for node in G.nodes()} # nodeLevel
# nodeLabelDict = {node: G.node[node]['nodeIndex'] for node in G.nodes()} # nodeIndex
nodeLabelDict = {node: np.round(nodeInfoDict[node]['simulationData']['pressure'] / 13560 / 9.8 * 1000, 1) for node in G.nodes()} # ground truth pressure in mmHg
# nodeValueList = [G.node[node]['depth'] for node in G.nodes()] # nodeLevel
# nodeValueList = [G.node[node]['nodeIndex'] for node in G.nodes()] # nodeIndex
nodeValueList = [np.round(nodeInfoDict[node]['simulationData']['pressure'] / 13560 / 9.8 * 1000, 1) for node in G.nodes()] # ground truth pressure in mmHg
# edgeLabelDict = {edge: G[edge[0]][edge[1]]['depth'] for edge in G.edges()} # edgeLevel
# edgeLabelDict = {edge: G[edge[0]][edge[1]]['edgeIndex'] for edge in G.edges()} # edgeIndex
edgeLabelDict = {edge: np.round(edgeInfoDict[G[edge[0]][edge[1]]['edgeIndex']]['simulationData']['flow']*10**6, 2) for edge in G.edges()} # ground truth flow in cm^3/s
# edgeValueList = [G[edge[0]][edge[1]]['depth'] for edge in G.edges()] # edgeLevel
# edgeValueList = [G[edge[0]][edge[1]]['edgeIndex'] for edge in G.edges()] # edgeIndex
# edgeValueList = [edgeInfoDict[edgeIndex]['meanRadius'] for edgeIndex in edgeIndexList] # meanRadius
edgeValueList = [np.round(edgeInfoDict[G[edge[0]][edge[1]]['edgeIndex']]['simulationData']['flow']*10**6, 2) for edge in G.edges()] # ground truth flow in cm^3/s
infoDict = {'nodeLabelDict': nodeLabelDict, 'nodeValueList': nodeValueList, 'nodeColorbarLabel': 'Node depth',
'edgeLabelDict': edgeLabelDict, 'edgeValueList': edgeValueList, 'edgeColorbarLabel': 'Edge depth',
'figTitle': 'GBM Reference'}
self.plotNetwork(infoDict, figIndex=1, isLastFigure=False)
resultDict['referenceYear'] = {'year': 'BraVa', 'nodeInfoDict': nodeInfoDict, 'edgeInfoDict': edgeInfoDict, 'G': G}
## Solve the system with perturbed network properties
edgeIndexList = self.edgeIndexList
# Manually perturb the network #
extraInfo = {'perturbedYear': 2013, 'excludedEdgeIndex': [0,1,2,3,4,7,12]}
# self.perturbNetwork(option=2, extraInfo=extraInfo)
# self.setNetwork(option=2)
# self.showFlowInfo()
# computeNetworkDetailExtraInfo = None
# Load previous optimization result #
loadFileName = 'fluidSimulationResult3(referenceYear=BraVa, perturbedYear=2013).pkl'
nodeInfoDictPerturbed, edgeInfoDictPerturbed, velocityPressurePerturbed = self.loadFluidResult(loadFileName)
velocityPressureInit = velocityPressurePerturbed
self.nodeInfoDict = nodeInfoDictPerturbed
self.edgeInfoDict = edgeInfoDictPerturbed
computeNetworkDetailExtraInfo = {'excludedEdgeIndex': [0,1,2,3,4,5,6,7,10,11,12,13]}
numOfNodes = len([node for node in nodeInfoDict if 'argsIndex' in nodeInfoDict[node]])
numOfEdges = len([edgeIndex for edgeIndex in edgeIndexList if 'argsIndex' in edgeInfoDict[edgeIndex]])
pressureIn = 13560 * 9.8 * 0.12 # Pascal # check if this number is consistent with that was used in the reference case!
velocityPressureInit = np.hstack((np.full((numOfEdges,), 0.4), np.linspace(pressureIn*0.8, pressureIn*0.5, numOfNodes)))
velocityPressureInit = [float(p) for p in velocityPressureInit]
# bounds in the form of ((min, min...), (max, max...)) #
# boundsVelocityPressure = [[], []] # first sublist contains lower bound and the second sublist contains upper bound
# boundsVelocityPressure[0] = [0] * numOfEdges + [13560*9.8*0.00] * numOfNodes # min velocity = 0 m/s, min pressure = 0 mmHg
# boundsVelocityPressure[1] = [5] * numOfEdges + [13560*9.8*0.12] * numOfNodes # max velocity = 5 m/s, max pressure = 120 mmHg
# boundsVelocityPressure = tuple(map(tuple, boundsVelocityPressure))
# bounds in the form of ((min, max), (min, max)...) #
boundsVelocityPressure = [[0, 5]] * numOfEdges + [[13560*9.8*0.00, 13560*9.8*0.12]] * numOfNodes
# Improve the lower bound of pressures at each node
self.calculateVariableBounds()
for node in G.nodes():
if 'argsIndex' in nodeInfoDict[node]:
argsIndex = self.nodeInfoDict[node]['argsIndex']
minPressure = self.nodeInfoDict[node]['simulationData']['minPressure']
boundsVelocityPressure[argsIndex][0] = minPressure
boundsVelocityPressure = tuple(map(tuple, boundsVelocityPressure))
fluidMethod = 'HW'
# least square optimization #
# self.setupFluidEquations()
# eqnInfoDictList = self.eqnInfoDictList
# optResult = least_squares(computeNetworkDetail, velocityPressureInit, args=(eqnInfoDictList, fluidMethod), bounds=boundsVelocityPressure, ftol=1e-9, xtol=1e-9)
# velocityPressure = np.abs(optResult.x)
# cost = optResult.cost
# message = optResult.message
# differential evolution, bounds in (min, max) pair form #
# self.setupFluidEquations()
# eqnInfoDictList = self.eqnInfoDictList
# errorNorm = 2
# optResult = differential_evolution(computeNetworkDetail, args=(eqnInfoDictList, fluidMethod, errorNorm), bounds=boundsVelocityPressure, maxiter=2000, polish=True, disp=True)
# velocityPressure = np.abs(optResult.x)
# cost = optResult.fun
# message = optResult.message
# basinhopping, bounds in (min, max) pair form #
self.setupFluidEquations()
eqnInfoDictList = self.eqnInfoDictList
errorNorm = 2
minimizer_kwargs = {'method': 'BFGS', 'args': (eqnInfoDictList, fluidMethod, errorNorm, computeNetworkDetailExtraInfo), 'options': {'norm': np.inf, 'maxiter': 40000}}
# minimizer_kwargs = {'method': 'L-BFGS-B', 'args': (eqnInfoDictList, fluidMethod, errorNorm), 'bounds': boundsVelocityPressure, 'options': {'maxiter': 40000, 'maxfun': 40000}}
optResult = basinhopping(computeNetworkDetail, velocityPressureInit, minimizer_kwargs=minimizer_kwargs, niter=100, T=100, stepsize=50, interval=5, niter_success=10, disp=True)
velocityPressure = np.abs(optResult.x)
cost = optResult.fun
message = optResult.message
print('cost={}, message={}'.format(cost, message))
pressures = velocityPressure[numOfEdges:]
print('Minimum pressure is {} mmHg and maximum pressure is {} mmHg'.format((np.amin(pressures))/13560/9.8*1000, (np.amax(pressures))/13560/9.8*1000))
velocities = velocityPressure[:numOfEdges]
print('Minimum velocity is {} m/s and maximum velocity is {} m/s'.format(np.amin(velocities), np.amax(velocities)))
velocityPressureGroundTruth = self.velocityPressureGroundTruth
self.velocityPressure = velocityPressure
self.validateFluidEquations(velocityPressure=velocityPressure)
print(list(zip(velocityPressureGroundTruth, velocityPressure)))
self.updateNetworkWithSimulationResult(velocityPressure)
elapsed = timeit.default_timer() - start_time
print('Elapsed time for function {}: {} sec'.format(functionName, elapsed))
G = self.G
nodeInfoDict = self.nodeInfoDict
edgeInfoDict = self.edgeInfoDict
nodeLabelDict = {node: np.round(nodeInfoDict[node]['simulationData']['pressure'] / 13560 / 9.8 * 1000, 1) for node in G.nodes()} # ground truth pressure in mmHg
nodeValueList = [np.round(nodeInfoDict[node]['simulationData']['pressure'] / 13560 / 9.8 * 1000, 1) for node in G.nodes()] # ground truth pressure in mmHg
edgeLabelDict = {edge: np.round(edgeInfoDict[G[edge[0]][edge[1]]['edgeIndex']]['simulationData']['flow']*10**6, 2) for edge in G.edges()} # ground truth flow in cm^3/s
edgeValueList = [np.round(edgeInfoDict[G[edge[0]][edge[1]]['edgeIndex']]['simulationData']['flow']*10**6, 2) for edge in G.edges()] # ground truth flow in cm^3/s
infoDict = {'nodeLabelDict': nodeLabelDict, 'nodeValueList': nodeValueList, 'nodeColorbarLabel': 'Node',
'edgeLabelDict': edgeLabelDict, 'edgeValueList': edgeValueList, 'edgeColorbarLabel': 'Edge',
'figTitle': 'GBM {}'.format(extraInfo['perturbedYear'])}
self.plotNetwork(infoDict, figIndex=3, isLastFigure=True)
resultDict['perturbedYear'] = {'year': 2013, 'nodeInfoDict': nodeInfoDict, 'edgeInfoDict': edgeInfoDict, 'G': G}
if saveResult:
directory = self.directory
saveFolderPath = join(directory, 'fluidSimulationResult')
saveFileName = 'fluidSimulationResult(referenceYear={}, perturbedYear={}).pkl'.format(resultDict['referenceYear']['year'], resultDict['perturbedYear']['year'])
with open(join(saveFolderPath, saveFileName), 'wb') as f:
pickle.dump(resultDict, f, 2)
print('{} saved to {}'.format(saveFileName, saveFolderPath))
def GBMTest2(self, perturbTerminatingPressureOption=1, saveResult=False):
"""
Perturb the terminating pressure in a specific way and check if the new system could be solved.
"""
start_time = timeit.default_timer()
functionName = inspect.currentframe().f_code.co_name
resultDict = {'referenceYear': {}, 'perturbedYear': {}}
self.loadNetwork(version=4, year='BraVa')
self.convertNetowrk()
self.adjustNetwork()
self.setNetwork(option=2)
success = self.createGroundTruth(option=2)
self.printTerminatingPressurePerPartition()
# self.showFlowInfo()
if not success:
return
G = self.G
nodeInfoDict = self.nodeInfoDict
edgeInfoDict = self.edgeInfoDict
spacing = self.spacing
# nodeLabelDict = {node: G.node[node]['depth'] for node in G.nodes()} # nodeLevel
# nodeLabelDict = {node: G.node[node]['nodeIndex'] for node in G.nodes()} # nodeIndex
nodeLabelDict = {node: np.round(nodeInfoDict[node]['simulationData']['pressure'] / 13560 / 9.8 * 1000, 1) for node in G.nodes()} # ground truth pressure in mmHg
# nodeValueList = [G.node[node]['depth'] for node in G.nodes()] # nodeLevel
# nodeValueList = [G.node[node]['nodeIndex'] for node in G.nodes()] # nodeIndex
nodeValueList = [np.round(nodeInfoDict[node]['simulationData']['pressure'] / 13560 / 9.8 * 1000, 1) for node in G.nodes()] # ground truth pressure in mmHg
# edgeLabelDict = {edge: G[edge[0]][edge[1]]['depth'] for edge in | |
# from dsgn.utils.torch_utils import generate_coord
from __future__ import print_function
import argparse
import os
import time
import numpy as np
import torch
from torch._C import device
import torch.nn as nn
import torch.nn.functional as F
import torch.nn.parallel
import torch.optim as optim
from torch.serialization import save
import torch.utils.data
torch.backends.cudnn.benchmark = True
from dsgn.dataloader import KITTILoader3D as ls
from dsgn.dataloader import KITTILoader_dataset3d as DA
# from dsgn.models import *
from dsgn.build_model import build_model
from dsgn.utils.numpy_utils import generate_depth_map_from_rect_points
from env_utils import *
from dsgn.models.loss3d import RPN3DLoss
from fvcore.nn import sigmoid_focal_loss_jit
import skimage.io
# multiprocessing distributed training
import torch.distributed as dist
import torch.utils.data.distributed
import torch.multiprocessing as mp
g_loss_normalizer = 1000 # avoid the value is too small at the begining
def get_parser():
parser = argparse.ArgumentParser(description='PSMNet')
parser.add_argument('-cfg', '--cfg', '--config', default='./configs/default/config_car.py', help='config path')
parser.add_argument('--data_path', default='./data/kitti/training/', help='data_path')
parser.add_argument('--epochs', type=int, default=60, help='number of epochs to train')
parser.add_argument('--loadmodel', default=None, help='load model')
parser.add_argument('--savemodel', default=None, help='save model')
parser.add_argument('--debug', action='store_true', default=False, help='debug mode')
parser.add_argument('--seed', type=int, default=1, metavar='S', help='random seed (default: 1)')
parser.add_argument('--devices', '-d', type=str, default=None)
parser.add_argument('--lr_scale', type=int, default=50, metavar='S', help='lr scale')
parser.add_argument('--split_file', default='./data/kitti/train.txt', help='split file')
parser.add_argument('--btrain', '-btrain', type=int, default=None)
parser.add_argument('--start_epoch', type=int, default=1)
parser.add_argument('-j', '--workers', default=4, type=int, metavar='N',
help='number of data loading workers (default: 4)')
## for distributed training
parser.add_argument('--world-size', default=1, type=int,
help='number of nodes for distributed training')
parser.add_argument('--rank', default=0, type=int,
help='node rank for distributed training')
parser.add_argument('--dist-url', type=str,
help='url used to set up distributed training')
parser.add_argument('--dist-backend', default='nccl', type=str,
help='distributed backend')
parser.add_argument('--multiprocessing-distributed', action='store_true',
help='Use multi-processing distributed training to launch '
'N processes per node, which has N GPUs. This is the '
'fastest way to use PyTorch for either single node or '
'multi node data parallel training')
args = parser.parse_args()
if not args.devices:
args.devices = str(np.argmin(mem_info()))
if args.devices is not None and '-' in args.devices:
gpus = args.devices.split('-')
gpus[0] = 0 if not gpus[0].isdigit() else int(gpus[0])
gpus[1] = len(mem_info()) if not gpus[1].isdigit() else int(gpus[1]) + 1
args.devices = ','.join(map(lambda x: str(x), list(range(*gpus))))
if not args.dist_url:
args.dist_url = "tcp://127.0.0.1:{}".format(random_int() % 30000)
print('Using GPU:{}'.format(args.devices))
os.environ['CUDA_VISIBLE_DEVICES'] = args.devices
return args
def main():
args = get_parser()
if args.debug:
args.savemodel = './outputs/debug/'
args.btrain = 1
args.workers = 0
global cfg
exp = Experimenter(args.savemodel, cfg_path=args.cfg)
cfg = exp.config
reset_seed(args.seed)
cfg.debug = args.debug
cfg.warmup = getattr(cfg, 'warmup', True) if not args.debug else False
### distributed training ###
if args.dist_url == "env://" and args.world_size == -1:
args.world_size = int(os.environ["WORLD_SIZE"])
ngpus_per_node = torch.cuda.device_count()
print('ngpus_per_node: {}'.format(ngpus_per_node))
args.ngpus_per_node = ngpus_per_node
args.distributed = ngpus_per_node > 0 and (args.world_size > 1 or args.multiprocessing_distributed)
args.multiprocessing_distributed = args.distributed
if args.distributed and args.multiprocessing_distributed:
# Since we have ngpus_per_node processes per node, the total world_size
# needs to be adjusted accordingly
args.world_size = ngpus_per_node * args.world_size
# Use torch.multiprocessing.spawn to launch distributed processes: the
# main_worker process function
mp.spawn(main_worker, nprocs=ngpus_per_node, args=(ngpus_per_node, args, cfg, exp))
else:
# Simply call main_worker function
main_worker(0, ngpus_per_node, args, cfg, exp)
def is_main_process(args):
return not args.multiprocessing_distributed or (args.multiprocessing_distributed and args.rank % args.ngpus_per_node == 0)
def main_worker(gpu, ngpus_per_node, args, cfg, exp):
print("Using GPU: {} for training".format(gpu))
if args.distributed:
if args.dist_url == "env://" and args.rank == -1:
args.rank = int(os.environ["RANK"])
if args.multiprocessing_distributed:
# For multiprocessing distributed training, rank needs to be the
# global rank among all the processes
args.rank = args.rank * ngpus_per_node + gpu
dist.init_process_group(backend=args.dist_backend, init_method=args.dist_url,
world_size=args.world_size, rank=args.rank)
#------------------- Model -----------------------
model = build_model(cfg)
optimizer = optim.Adam(model.parameters(), lr=0.1, betas=(0.9, 0.999))
if args.distributed:
# For multiprocessing distributed, DistributedDataParallel constructor
# should always set the single device scope, otherwise,
# DistributedDataParallel will use all available devices.
torch.cuda.set_device(gpu)
model.cuda(gpu)
# When using a single GPU per process and per
# DistributedDataParallel, we need to divide the batch size
# ourselves based on the total number of GPUs we have
args.btrain = int(args.btrain / ngpus_per_node)
args.workers = int((args.workers + ngpus_per_node - 1) / ngpus_per_node)
model = torch.nn.parallel.DistributedDataParallel(model, device_ids=[gpu], find_unused_parameters=True)
elif ngpus_per_node > 1:
model = torch.nn.DataParallel(model).cuda()
else:
torch.cuda.set_device(gpu)
model = model.cuda(gpu)
#------------------- Data Loader -----------------------
all_left_img, all_right_img, all_left_disp, = ls.dataloader(args.data_path,
args.split_file,
depth_disp=True,
cfg=cfg,
is_train=True)
ImageFloader = DA.myImageFloder(all_left_img, all_right_img, all_left_disp, True, split=args.split_file, cfg=cfg)
if args.distributed:
train_sampler = torch.utils.data.distributed.DistributedSampler(ImageFloader)
else:
train_sampler = None
TrainImgLoader = torch.utils.data.DataLoader(
ImageFloader,
batch_size=args.btrain, shuffle=(train_sampler is None), num_workers=args.workers, drop_last=True,
collate_fn=BatchCollator(cfg),
sampler=train_sampler)
args.max_warmup_step = min(len(TrainImgLoader), 500)
#------------------ Logger -------------------------------------
if is_main_process(args):
logger = exp.logger
logger.info('Number of model parameters: {}'.format(sum([p.data.nelement() for p in model.parameters()])))
writer = exp.writer
# ------------------------ Resume ------------------------------
if args.loadmodel is not None:
if is_main_process(args):
logger.info('load model ' + args.loadmodel)
state_dict = torch.load(args.loadmodel)
model.load_state_dict(state_dict['state_dict'], strict=False)
if 'optimizer' in state_dict:
try:
optimizer.load_state_dict(state_dict['optimizer'])
if is_main_process(args):
logger.info('Optimizer Restored.')
except Exception as e:
if is_main_process(args):
logger.error(str(e))
logger.info('Failed to restore Optimizer')
else:
if is_main_process(args):
logger.info('No saved optimizer.')
if args.start_epoch is None:
args.start_epoch = state_dict['epoch'] + 1
if args.start_epoch is None:
args.start_epoch = 1
# ------------------------ Training ------------------------------
for epoch in range(args.start_epoch, args.epochs + 1):
if args.distributed:
train_sampler.set_epoch(epoch)
total_train_loss = 0
adjust_learning_rate(optimizer, epoch, args=args)
for batch_idx, data_batch in enumerate(TrainImgLoader):
start_time = time.time()
if epoch == 1 and cfg.warmup and batch_idx < args.max_warmup_step:
adjust_learning_rate(optimizer, epoch, batch_idx, args=args)
losses = train(model, cfg, args, optimizer, **data_batch)
loss = losses.pop('loss')
if is_main_process(args):
logger.info('%s: %s' % (args.savemodel.strip('/').split('/')[-1], args.devices))
logger.info('Epoch %d Iter %d/%d training loss = %.3f , time = %.2f; Epoch time: %.3fs, Left time: %.3fs, lr: %.6f' % (
epoch,
batch_idx, len(TrainImgLoader), loss, time.time() - start_time, (time.time() - start_time) * len(TrainImgLoader),
(time.time() - start_time) * (len(TrainImgLoader) * (args.epochs - epoch) - batch_idx), optimizer.param_groups[0]["lr"]) )
logger.info('losses: {}'.format(list(losses.items())))
for lk, lv in losses.items():
writer.add_scalar(lk, lv, epoch * len(TrainImgLoader) + batch_idx)
total_train_loss += loss
if batch_idx == 100 and cfg.debug:
break
if is_main_process(args):
logger.info('epoch %d total training loss = %.3f' % (epoch, total_train_loss / len(TrainImgLoader)))
savefilename = args.savemodel + '/finetune_' + str(epoch) + '.tar'
torch.save({
'epoch': epoch,
'state_dict': model.state_dict(),
'train_loss': total_train_loss / len(TrainImgLoader),
'optimizer': optimizer.state_dict()
}, savefilename)
logger.info('Snapshot {} epoch in {}'.format(epoch, args.savemodel))
def train(model, cfg, args, optimizer, imgL, imgR, disp_L, calib=None, calib_R=None,
image_indexes=None, targets=None, ious=None, labels_map=None, depth_points=None,
flip_infos=None, image_sizes=None):
global g_loss_normalizer
model.train()
batch = imgL.size(0)
imgL = torch.FloatTensor(imgL).cuda()
imgR = torch.FloatTensor(imgR).cuda() if imgR is not None else None
disp_true = torch.FloatTensor(disp_L).cuda()
# max_pool2d = nn.MaxPool2d(3, stride=2, padding=1)
# depth_s2 = max_pool2d(disp_true)
# # depth_s4 = max_pool2d(depth_s2)
# depth_gt = F.interpolate(depth_s2.unsqueeze(1), scale_factor=2, mode='nearest')
# # depth_gt = F.interpolate(depth_gt, scale_factor=2, mode='nearest')
# imgL, imgR, disp_true = imgL.cuda(), imgR.cuda(), disp_L.cuda()
if targets is not None:
for i in range(len(targets)):
targets[i].bbox = targets[i].bbox.cuda()
targets[i].box3d = targets[i].box3d.cuda()
calibs_fu = torch.as_tensor([c.f_u for c in calib])
calibs_Proj = torch.as_tensor([c.P for c in calib])
if calib_R is not None:
calibs_baseline = torch.abs(torch.as_tensor([(c.P[0,3]-c_R.P[0,3])/c.P[0,0] for c, c_R in zip(calib, calib_R)]))
calibs_Proj_R = torch.as_tensor([c.P for c in calib_R])
else:
calibs_baseline = None
calibs_Proj_R = None
# ---------
mask = (disp_true >= cfg.min_depth) & (disp_true < cfg.max_depth)
mask.detach_()
# ---------
loss_dict = dict()
outputs = model(imgL, imgR, calibs_fu, calibs_baseline, calibs_Proj, calibs_Proj_R=calibs_Proj_R)
# loss = 0.
if getattr(cfg, 'PlaneSweepVolume', True) and cfg.loss_disp:
depth_preds = [torch.squeeze(o, 1) for o in outputs['depth_preds']]
disp_loss = 0.
weight = [0.5, 0.7, 1.0]
for i, o in enumerate(depth_preds):
disp_loss += weight[3 - len(depth_preds) + i] * F.smooth_l1_loss(o[mask], disp_true[mask], size_average=True)
loss_dict.update(disp_loss=disp_loss)
# loss += disp_loss
if getattr(cfg, 'mono', False):
if getattr(cfg, 'depth_map', False):
depth_preds = outputs['depth_preds']
depth_loss = F.smooth_l1_loss(depth_preds[mask], disp_true[mask], size_average=True)
loss_dict.update(depth_loss=depth_loss)
else:
# OCCUPANCY PRED
occupancy_preds = outputs['occupancy_preds'] # (N, 192, 20, 304)
norm_coord_imgs = outputs['norm_coord_imgs']
coord_rect = outputs['coord_rect'] # z axis is 40.3 -> 2.1
# upper_coord_rect = coord_rect.clone().detach()
# upper_coord_rect[..., 2] -= cfg.VOXEL_Z_SIZE / 2 # z axis is 40.4 -> 2.2
# lower_coord_rect = coord_rect.clone().detach()
# lower_coord_rect[..., 2] += cfg.VOXEL_Z_SIZE / 2 # z axis is 40.2 -> 2.0
occupancy_loss = 0.
# Project the depth points to rect coord, if the point locate in the voxel, set mask to true
positive_masks = []
merged_depth_maps = []
for i, depth_points_i in enumerate(depth_points):
depth_points_i = depth_points_i.cuda()
z_idxs = ((depth_points_i[:, 2] - cfg.Z_MIN) / cfg.VOXEL_Z_SIZE).to(torch.long)
y_idxs = ((depth_points_i[:, 1] - cfg.Y_MIN) / cfg.VOXEL_Y_SIZE).to(torch.long)
x_idxs = ((depth_points_i[:, 0] - cfg.X_MIN) / cfg.VOXEL_X_SIZE).to(torch.long)
mask_i = torch.zeros(coord_rect.size()[:3], device=coord_rect.device)
mask_i[z_idxs, y_idxs, x_idxs] = 1
positive_masks.append(mask_i)
# reproject the positive voxels to a new depth map. Because the depth points downsampled as voxels center shift the position.
# And choose the max value with the original depth map, | |
' ' + settings['None']['output_path']
print("Generating normalised RPKM/FPKMs: "+cmd_fpkm)
subprocess.call(cmd_fpkm, shell=True)
def de_analysis (settings, group1, group2, output_prefix, bin_path=''):
""" Calculate DEG analysis between two groups (column numbers 1-indexed)
"""
cmd_deg = 'Rscript '+bin_path+'de_analysis.r ' + \
' ' + count_matrix_filename (settings) + \
' ' + group1 + \
' ' + group2 + \
' ' + mapped_reads_matrix_filename(settings) + \
' ' + settings['None']['output_path'] + output_prefix
print("Generating normalised RPKM/FPKMs: "+cmd_deg)
subprocess.call(cmd_deg, shell=True)
def load_gff (settings, sample):
""" Load all annotation data from a GFF file
"""
gff = {}
data_reader = csv.reader(open(settings[sample]['gff_file'], 'rU'), delimiter='\t')
# Process each line
for row in data_reader:
if len(row) == 9:
chromo = row[0]
part_type = row[2]
start_bp = int(row[3])
end_bp = int(row[4])
part_dir = row[6]
part_attribs = {}
split_attribs = row[8].split(';')
part_name = None
for attrib in split_attribs:
key_value = attrib.split('=')
if len(key_value) == 2:
if key_value[0] == 'Name':
part_name = key_value[1]
else:
part_attribs[key_value[0]] = key_value[1]
if part_name != None:
if chromo not in gff.keys():
gff[chromo] = {}
gff[chromo][part_name] = [part_type, part_dir, start_bp, end_bp, part_attribs]
return gff
def load_profiles (settings, sample, normed=False):
""" Profiles have the form of a list chr: [start_bp, end_bp, [profile_fwd],[profile_rev]]
"""
profiles = {}
fwd_profile_filename = profile_fwd_filename(settings, sample)
if normed == True:
fwd_profile_filename = profile_norm_fwd_filename(settings, sample)
data_reader = csv.reader(open(fwd_profile_filename, 'rU'), delimiter='\t')
# Process each line in fwd profile
for row in data_reader:
if len(row) == 5:
cur_chrom = row[0]
if cur_chrom not in profiles.keys():
profiles[cur_chrom] = []
cur_start_bp = int(row[1])
cur_end_bp = int(row[2])
cur_profile = find_profile(profiles, cur_chrom, cur_start_bp, cur_end_bp)
if cur_profile == None:
new_profile = [cur_start_bp, cur_end_bp, np.zeros(cur_end_bp-cur_start_bp), np.zeros(cur_end_bp-cur_start_bp)]
new_profile[2][int(row[3])-1] = float(row[4])
profiles[cur_chrom].append(new_profile)
else:
cur_profile[0][int(row[3])-1] = float(row[4])
rev_profile_filename = profile_rev_filename(settings, sample)
if normed == True:
rev_profile_filename = profile_norm_rev_filename(settings, sample)
data_reader = csv.reader(open(rev_profile_filename, 'rU'), delimiter='\t')
# Process each line in fwd profile
for row in data_reader:
if len(row) == 5:
cur_chrom = row[0]
if cur_chrom not in profiles.keys():
profiles[cur_chrom] = []
cur_start_bp = int(row[1])
cur_end_bp = int(row[2])
cur_profile = find_profile(profiles, cur_chrom, cur_start_bp, cur_end_bp)
if cur_profile != None:
cur_profile[1][int(row[3])-1] = float(row[4])
return profiles
def find_profile (profiles, chrom, start_bp, end_bp):
""" Find a profile for a given chromosome that spans a given range of bp positions
"""
if chrom in profiles.keys():
for el in profiles[chrom]:
if el[0] == start_bp and el[1] == end_bp:
return [el[2], el[3]]
return None
def extract_profile_region (profiles, chrom, start_bp, end_bp):
""" Extract a region of a transcription profile for a given chromosome and region
"""
region = None
if chrom in profiles.keys():
for profile in profiles[chrom]:
full_chrom = False
if profile[0] == 0 and profile[1] == len(profile[2]):
full_chrom = True
if full_chrom == True:
fwd_profile = list(profile[2])
rev_profile = list(profile[3])
profile_len = len(fwd_profile)
ext_start_fwd = []
ext_end_fwd = []
ext_start_rev = []
ext_end_rev = []
# The region will exist
if start_bp < 0:
# extend the profile at start
ext_start_fwd = fwd_profile[start_bp:]
ext_start_rev = rev_profile[start_bp:]
if end_bp > profile_len:
# extend the profile at end
ext_end_fwd = fwd_profile[:(end_bp-profile_len)]
ext_end_rev = rev_profile[:(end_bp-profile_len)]
new_start_bp = start_bp
new_end_bp = end_bp
if ext_start_fwd != []:
new_start_bp = 0
new_end_bp = end_bp+len(ext_start_fwd)
new_fwd_profile = ext_start_fwd+fwd_profile+ext_end_fwd
new_rev_profile = ext_start_rev+rev_profile+ext_end_rev
region = [new_fwd_profile[new_start_bp:new_end_bp],
new_rev_profile[new_start_bp:new_end_bp]]
break
else:
if start_bp >= profile[0] and end_bp <= profile[1]:
fwd_profile = list(profile[2])
rev_profile = list(profile[3])
profile_len = len(fwd_profile)
region = [fwd_profile[start_bp-profile[0]:end_bp-profile[0]],
rev_profile[start_bp-profile[0]:end_bp-profile[0]]]
break
return region
def reverse_region (region):
""" Reverse a given region
"""
return [region[1][::-1], region[0][::-1]]
def avg_fn (data):
""" The average function to use
"""
return np.mean(data)
def characterize_promoter_units (settings, sample, upstream_bp=10, downstream_skip_bp=0, downstream_bp=10, normed=False):
""" Characterize all promoter units for a given sample
"""
profiles = load_profiles(settings, sample, normed=normed)
char_data = []
raw_region = None
gff = load_gff (settings, sample)
for chrom in gff.keys():
for part_name in gff[chrom].keys():
part_data = gff[chrom][part_name]
if part_data[0] == 'promoter_unit':
if part_data[1] == '+':
raw_region = extract_profile_region(profiles, chrom,
(part_data[2]-1)-upstream_bp, part_data[3]+downstream_skip_bp+downstream_bp)
else:
raw_region = reverse_region(extract_profile_region(profiles, chrom,
(part_data[2]-1)-downstream_skip_bp-downstream_bp, part_data[3]+upstream_bp))
# Calculate performance
avg_us = avg_fn(raw_region[0][0:upstream_bp])
avg_ds = avg_fn(raw_region[0][-downstream_bp:])
perf = avg_ds-avg_us
char_data.append([chrom, part_name, avg_us, avg_ds, perf])
return char_data
def characterize_promoters (settings, sample, upstream_bp=10, downstream_skip_bp=0, downstream_bp=10, normed=False):
""" Characterize all promoters for a given sample
"""
profiles = load_profiles(settings, sample, normed=normed)
char_data = []
raw_region = None
gff = load_gff (settings, sample)
for chrom in gff.keys():
for part_name in gff[chrom].keys():
part_data = gff[chrom][part_name]
if part_data[0] == 'promoter':
if part_data[1] == '+':
raw_region = extract_profile_region(profiles, chrom,
(part_data[2]-1)-upstream_bp, part_data[3]+downstream_skip_bp+downstream_bp)
else:
raw_region = reverse_region(extract_profile_region(profiles, chrom,
(part_data[2]-1)-downstream_skip_bp-downstream_bp, part_data[3]+upstream_bp))
# Calculate performance
avg_us = avg_fn(raw_region[0][0:upstream_bp])
avg_ds = avg_fn(raw_region[0][-downstream_bp:])
perf = avg_ds-avg_us
char_data.append([chrom, part_name, avg_us, avg_ds, perf])
return char_data
def characterize_terminators (settings, sample, upstream_bp=10, upstream_skip_bp=0, downstream_bp=10, normed=False):
""" Characterize all terminators for a given sample
"""
profiles = load_profiles(settings, sample, normed=normed)
char_data = []
raw_region = None
gff = load_gff (settings, sample)
for chrom in gff.keys():
for part_name in gff[chrom].keys():
part_data = gff[chrom][part_name]
if part_data[0] == 'terminator':
if part_data[1] == '+':
raw_region = extract_profile_region(profiles, chrom,
(part_data[2]-1)-upstream_skip_bp-upstream_bp, part_data[3]+downstream_bp)
else:
raw_region = reverse_region(extract_profile_region(profiles, chrom,
(part_data[2]-1)-downstream_bp, part_data[3]+upstream_skip_bp+upstream_bp))
# Calculate performance
if raw_region != None:
avg_us = avg_fn(raw_region[0][0:upstream_bp])
avg_ds = avg_fn(raw_region[0][-downstream_bp:])
max_term = 'N'
t_e = 0.0
if avg_us == 0.0:
max_term = 'Y'
t_e = 0.0
else:
if avg_ds < 1.0:
t_e = 1.0/float(avg_us)
max_term = 'Y'
else:
t_e = float(avg_ds)/float(avg_us)
if t_e != 0.0:
t_s = 1.0/t_e
else:
t_s = -1.0
char_data.append([chrom, part_name, avg_us, avg_ds, t_e, t_s, max_term])
return char_data
def characterize_ribozymes (settings, sample, upstream_promoter_bp=10, upstream_bp=10, downstream_skip_bp=0, downstream_bp=10, normed=False):
""" Characterize all ribozymes for a given sample
"""
profiles = load_profiles(settings, sample, normed=normed)
char_data = []
raw_region = None
promoter_region = None
gff = load_gff (settings, sample)
for chrom in gff.keys():
for part_name in gff[chrom].keys():
part_data = gff[chrom][part_name]
if part_data[0] == 'ribozyme':
cut_site = 0
promoter_start = int(part_data[4]['upstream_promoter_start'])
if 'cut_site' in part_data[4].keys():
cut_site = int(part_data[4]['cut_site'])
if part_data[1] == '+':
cur_site_bp = (part_data[2]-1)+cut_site
raw_region = extract_profile_region(profiles, chrom,
cur_site_bp-upstream_bp, cur_site_bp+downstream_skip_bp+downstream_bp)
promoter_region = extract_profile_region(profiles, chrom,
promoter_start-upstream_promoter_bp, promoter_start)
else:
cur_site_bp = (part_data[3])-cut_site
raw_region = reverse_region(extract_profile_region(profiles, chrom,
cur_site_bp-downstream_skip_bp-downstream_bp, cur_site_bp+upstream_bp))
promoter_region = reverse_region(extract_profile_region(profiles, chrom,
promoter_start, promoter_start+upstream_promoter_bp))
# Calculate performance
avg_promoter = avg_fn(promoter_region[0])
avg_us = avg_fn(raw_region[0][0:upstream_bp])
avg_ds = avg_fn(raw_region[0][-downstream_bp:])
# Correct for input transcription to promoter
avg_us = avg_us - avg_promoter
if avg_us < 0.0:
avg_us = 0.0
avg_ds = avg_ds - avg_promoter
if avg_ds < 0.0:
avg_ds = 0.0
max_cut = 'N'
c_e = 0.0
if avg_ds <= 0.0:
c_e = 0.0
max_cut = 'Y'
else:
if avg_us <= 0:
if avg_ds < avg_us:
c_e = 0.0
else:
c_e = 1.0-(1.0/float(avg_ds))
max_cut = 'Y'
else:
if avg_ds < avg_us:
c_e = 0.0
else:
c_e = 1.0-(float(avg_us)/float(avg_ds))
char_data.append([chrom, part_name, avg_us, avg_ds, c_e, max_cut, cut_site])
return char_data
def save_characterization_data (settings, sample, data, part_type=None):
""" Save all characterisation data (promoters, terminators, ribozymes) for a given sample
"""
if part_type == 'promoter':
filename = promoter_profile_perf_filename(settings, sample)
f_out = open(filename, 'w')
f_out.write( 'sample\tchromosome\tpart_name\treads_us\treads_ds\treads_strength\n' )
for d in data:
f_out.write( sample+'\t'+'\t'.join([str(x) for x in d])+'\n' )
f_out.close()
if part_type == 'terminator':
filename = terminator_profile_perf_filename(settings, sample)
f_out = open(filename, 'w')
f_out.write( 'sample\tchromosome\tpart_name\treads_us\treads_ds\tt_e\tt_s\tmax_term\n' )
for d in data:
f_out.write( sample+'\t'+'\t'.join([str(x) for x in d])+'\n' )
f_out.close()
if part_type == 'ribozyme':
filename = ribozyme_profile_perf_filename(settings, sample)
f_out = open(filename, 'w')
f_out.write( 'sample\tchromosome\tpart_name\treads_us\treads_ds\tc_e\tmax_cut\tcut_site\n' )
for d in data:
f_out.write( sample+'\t'+'\t'.join([str(x) for x in d])+'\n' )
f_out.close()
def combine_promoter_characterizations (settings, samples):
""" Combine all promoter characterization data across a set of samples
"""
data = {}
for s in samples:
filename = promoter_profile_perf_filename(settings, s)
data_reader = csv.reader(open(filename, 'rU'), delimiter='\t')
header = next(data_reader)
for row in data_reader:
if row[1] not in data.keys():
data[row[1]] = {}
chrom_data = data[row[1]]
if row[2] not in chrom_data.keys():
chrom_data[row[2]] = []
chrom_part_data = chrom_data[row[2]]
chrom_part_data.append([row[0]]+row[3:])
f_out = open(combined_promoter_profile_perf_filename(settings), 'w')
f_out.write('chromosome\tpart_name\tsample\treads_us\treads_ds\treads_strength\n')
for chrom in sorted(data.keys()):
chrom_data = data[chrom]
for part in sorted(chrom_data.keys()):
chrom_part_data = chrom_data[part]
for data_rec in chrom_part_data:
f_out.write( '\t'.join([chrom, part]+data_rec)+'\n' )
f_out.close()
def combine_terminator_characterizations (settings, samples):
""" Combine all terminator characterization data across a set of samples
"""
data = {}
for s in samples:
filename = terminator_profile_perf_filename(settings, s)
data_reader = csv.reader(open(filename, 'rU'), delimiter='\t')
header = next(data_reader)
for row in data_reader:
if row[1] not in data.keys():
data[row[1]] = {}
chrom_data = data[row[1]]
if row[2] not in chrom_data.keys():
chrom_data[row[2]] = []
chrom_part_data = chrom_data[row[2]]
chrom_part_data.append([row[0]]+row[3:])
f_out = open(combined_terminator_profile_perf_filename(settings), 'w')
f_out.write('chromosome\tpart_name\tsample\treads_us\treads_ds\tt_e\tt_s\tmax_term\n')
for chrom in sorted(data.keys()):
chrom_data = data[chrom]
for part in sorted(chrom_data.keys()):
chrom_part_data = chrom_data[part]
for data_rec in chrom_part_data:
f_out.write( '\t'.join([chrom, part]+data_rec)+'\n' )
f_out.close()
def combine_ribozyme_characterizations (settings, samples):
""" Combine all ribozyme characterization data across a set of samples
"""
data = {}
for s in samples:
filename = ribozyme_profile_perf_filename(settings, s)
data_reader = csv.reader(open(filename, 'rU'), delimiter='\t')
header = next(data_reader)
for row in data_reader:
if row[1] not in data.keys():
data[row[1]] = {}
chrom_data = data[row[1]]
if row[2] not in chrom_data.keys():
chrom_data[row[2]] = []
chrom_part_data = chrom_data[row[2]]
chrom_part_data.append([row[0]]+row[3:])
f_out = open(combined_ribozyme_profile_perf_filename(settings), 'w')
f_out.write('chromosome\tpart_name\tsample\treads_us\treads_ds\tc_e\tmax_cut\tcut_site\n')
for chrom in sorted(data.keys()):
chrom_data = data[chrom]
for part in sorted(chrom_data.keys()):
chrom_part_data = chrom_data[part]
for data_rec in chrom_part_data:
f_out.write( '\t'.join([chrom, part]+data_rec)+'\n' )
f_out.close()
def fragment_length_dists (settings, sample, reads_to_sample=1000000):
""" Generate the fragment length distribution for a sample (adapted from get_insert_size.py (<NAME>))
"""
frag_file = fragment_dist_filename(settings, sample)
sam_file = sam_filename(settings, sample)
f_in = open(sam_file, 'rU')
plrdlen={}
plrdspan={}
objmrl=re.compile('([0-9]+)M$')
objmtj=re.compile('NH:i:(\d+)')
nline=0
with open(sam_file, 'rU') as ins:
for lines in ins:
field=lines.strip().split()
nline=nline+1
if nline >= reads_to_sample:
break
if len(field)<12:
continue
try:
mrl=objmrl.match(field[5])
if mrl==None: # ignore non-perfect reads
continue
readlen=int(mrl.group(1))
if readlen in plrdlen.keys():
plrdlen[readlen]=plrdlen[readlen]+1
else:
plrdlen[readlen]=1
if field[6]!='=':
continue
dist=int(field[8])
if dist<=0: # ignore neg dist
continue
mtj=objmtj.search(lines)
if dist in plrdspan.keys():
plrdspan[dist]=plrdspan[dist]+1
else:
plrdspan[dist]=1
except ValueError:
continue
f_out = open(frag_file, 'w')
for k in sorted(plrdspan.keys()):
f_out.write(str(k)+'\t'+str(plrdspan[k])+'\n')
def load_norm_factor (settings, sample):
""" Load edgeR normalization factors from file
"""
norm_facs = {}
norm_fac_file = settings['None']['output_path']+'norm.factors.matrix.txt'
data_reader = csv.reader(open(norm_fac_file, 'rU'), delimiter='\t')
# Ignore the header
next(data_reader)
# Process each line
for row in data_reader:
if len(row) == 3:
norm_facs[row[0]] = [float(row[1]), float(row[2])]
return (norm_facs[sample][0]*norm_facs[sample][1])
def load_fragmentation_dist (settings, sample, max_frag_len=1000):
""" Load the fragmentation distribution from file
"""
frag_dist = np.zeros(max_frag_len+1)
frag_file = fragment_dist_filename(settings, sample)
data_reader = csv.reader(open(frag_file, 'rU'), delimiter='\t')
# Process each line
for row in data_reader:
frag_len = int(row[0])
frag_count = int(row[1])
if frag_len <= max_frag_len:
frag_dist[frag_len] = frag_count
return frag_dist
def identify_internal_and_overlapped_fragments(read_dictionary,transcript_unit,plasmid_length,scale):
""" Identify fragments that are internal to a transcription unit and those that overlap with the edges
"""
internal_profile = np.ones(plasmid_length)*scale
overlapped_profile = np.ones(plasmid_length)*scale
internal_read = {}
for i in range(plasmid_length):
internal_read[i] = []
overlapped_read = {}
for i in range(plasmid_length):
overlapped_read[i] = []
for rr in range(plasmid_length):
temp_locations = [0]+[item[0] for item in transcript_unit]+[item[1] for item in transcript_unit]+[plasmid_length-1]
left_pos = [item-rr for item in temp_locations if item-rr <= 0]
right_pos = [item-rr for item in temp_locations if item-rr >= 0]
closest_left = max(left_pos) + rr
closest_right = min(right_pos) + rr
if | |
import torch
import torch.nn as nn
import torch.nn.functional as F
from distributions import Categorical2D
from utils import init, init_normc_
class Flatten(nn.Module):
def forward(self, x):
return x.view(x.size(0), -1)
class Policy(nn.Module):
def __init__(self, obs_shape, action_space, base_kwargs=None, curiosity=False, algo='A2C', model='MicropolisBase', args=None):
super(Policy, self).__init__()
self.curiosity = curiosity
self.args = args
if base_kwargs is None:
base_kwargs = {}
if len(obs_shape) == 3:
if curiosity:
self.base = MicropolisBase_ICM(obs_shape[0], **base_kwargs)
elif args.model == 'squeeze':
self.base = MicropolisBase(obs_shape[0], **base_kwargs, map_width=args.map_width)
else:
self.base = MicropolisBase_fixedmap(obs_shape[0], **base_kwargs, map_width=args.map_width)
elif len(obs_shape) == 1:
self.base = MLPBase(obs_shape[0], **base_kwargs)
else:
raise NotImplementedError
if action_space.__class__.__name__ == "Discrete":
if True:
num_outputs = action_space.n
self.dist = Categorical2D(self.base.output_size, num_outputs)
else:
num_outputs = action_space.n
self.dist = Categorical2D(self.base.output_size, num_outputs)
elif action_space.__class__.__name__ == "Box":
num_outputs = action_space.shape[0]
if self.args.env_name == 'MicropolisPaintEnv-v0':
self.dist = None
else:
# self.dist = DiagGaussian(self.base.output_size, num_outputs)
self.dist = Categorical2D(self.base.output_size, num_outputs)
else:
raise NotImplementedError
@property
def is_recurrent(self):
return self.base.is_recurrent
@property
def recurrent_hidden_state_size(self):
"""Size of rnn_hx."""
return self.base.recurrent_hidden_state_size
def forward(self, inputs, rnn_hxs, masks):
raise NotImplementedError
def act(self, inputs, rnn_hxs, masks, deterministic=False,
player_act=None, icm_enabled=False):
''' assumes player actions can only occur on env rank 0'''
value, actor_features, rnn_hxs = self.base(inputs, rnn_hxs, masks)
action_bin = None
if 'paint' in self.args.env_name.lower():
dist = torch.distributions.binomial.Binomial(1, actor_features)
action = dist.sample()
action_log_probs = dist.log_prob(action)
else:
dist = self.dist(actor_features)
if player_act:
# force the model to sample the player-selected action
play_features = actor_features
play_features = play_features.view(actor_features.size(0), -1)
play_features.fill_(-99999)
play_features[:1, player_act] = 99999
play_features = play_features.view(actor_features.shape)
play_dist = self.dist(play_features)
action = play_dist.sample()
# backprop is sent through the original distribution
action_log_probs = dist.log_probs(action)
else:
if deterministic:
action = dist.mode()
else:
action = dist.sample()
action_log_probs = dist.log_probs(action)
if icm_enabled:
action_bin = torch.zeros(dist.probs.shape)
action_ixs = torch.Tensor(list(range(dist.probs.size(0)))).unsqueeze(1).long()
action_i = torch.cat((action_ixs.cuda(), action.cuda()), 1)
action_bin[action_i[:,0], action_i[:,1]] = 1
if torch.cuda.current_device() > 0:
action_bin = action_bin.cuda()
return value, action, action_log_probs, action_bin, rnn_hxs
def icm_act(self, inputs):
s1, pred_s1, pred_a = self.base(inputs, None, None, icm=True)
return s1, pred_s1, self.dist(pred_a).probs
def get_value(self, inputs, rnn_hxs, masks):
value, _, _ = self.base(inputs, rnn_hxs, masks)
return value
def evaluate_icm(self, inputs):
s1, pred_s1, pred_a = self.base(inputs, None, None, icm=True)
return s1, pred_s1, self.dist(pred_a).probs
def evaluate_actions(self, inputs, rnn_hxs, masks, action):
value, actor_features, rnn_hxs = self.base(inputs, rnn_hxs, masks)
if 'paint' in self.args.env_name.lower():
dist = torch.distributions.binomial.Binomial(1, actor_features)
action_log_probs = dist.log_prob(action)
dist_entropy = None
#dist_entropy = (dist.logits * dist.probs).mean()
else:
dist = self.dist(actor_features)
action_log_probs = dist.log_probs(action)
dist_entropy = dist.entropy().mean()
return value, action_log_probs, dist_entropy, rnn_hxs
class NNBase(nn.Module):
def __init__(self, recurrent, recurrent_input_size, hidden_size):
super(NNBase, self).__init__()
self._hidden_size = hidden_size
self._recurrent = recurrent
if recurrent:
self.gru = nn.GRUCell(recurrent_input_size, hidden_size)
nn.init.orthogonal_(self.gru.weight_ih.data)
nn.init.orthogonal_(self.gru.weight_hh.data)
self.gru.bias_ih.data.fill_(0)
self.gru.bias_hh.data.fill_(0)
@property
def is_recurrent(self):
return self._recurrent
@property
def recurrent_hidden_state_size(self):
if self._recurrent:
return self._hidden_size
return 1
@property
def output_size(self):
return self._hidden_size
def _forward_gru(self, x, hxs, masks):
if x.size(0) == hxs.size(0):
x = hxs = self.gru(x, hxs * masks)
else:
# x is a (T, N, -1) tensor that has been flatten to (T * N, -1)
N = hxs.size(0)
T = int(x.size(0) / N)
# unflatten
x = x.view(T, N, x.size(1))
# Same deal with masks
masks = masks.view(T, N, 1)
outputs = []
for i in range(T):
hx = hxs = self.gru(x[i], hxs * masks[i])
outputs.append(hx)
# assert len(outputs) == T
# x is a (T, N, -1) tensor
x = torch.stack(outputs, dim=0)
# flatten
x = x.view(T * N, -1)
return x, hxs
class MicropolisBase_fixedmap(NNBase):
def __init__(self, num_inputs, recurrent=False, hidden_size=512, map_width=20):
super(MicropolisBase_fixedmap, self).__init__(recurrent, hidden_size, hidden_size)
self.map_width = map_width
init_ = lambda m: init(m,
nn.init.dirac_,
lambda x: nn.init.constant_(x, 0.1),
nn.init.calculate_gain('relu'))
self.skip_compress = init_(nn.Conv2d(num_inputs, 15, 1, stride=1))
self.conv_0 = nn.Conv2d(num_inputs, 64, 1, 1, 0)
init_(self.conv_0)
self.conv_1 = nn.Conv2d(64, 64, 5, 1, 2)
init_(self.conv_1)
self.conv_2 = nn.Conv2d(64, 64, 3, 1, 1)
init_(self.conv_2)
self.critic_compress = init_(nn.Conv2d(79, 8, 1, 1, 1))
init_ = lambda m: init(m,
nn.init.dirac_,
lambda x: nn.init.constant_(x, 0))
self.actor_compress = init_(nn.Conv2d(79, 19, 3, 1, 1))
self.critic_conv_1 = init_(nn.Conv2d(8, 1, self.map_width, self.map_width, 0))
# self.critic_conv_2 = init_(nn.Conv2d(1, 1, 2, 1, 0)) # for 40x40 map
self.train()
def forward(self, inputs, rnn_hxs, masks):
x = inputs
x = F.relu(self.conv_0(x))
skip_input = F.relu(self.skip_compress(inputs))
x = F.relu(self.conv_1(x))
for i in range(self.map_width):
#print(self.conv_2.weight)
x = self.conv_2(x)
x = F.relu(x)
x = torch.cat((x, skip_input), 1)
values = F.relu(self.critic_compress(x))
values = self.critic_conv_1(values)
values = values.view(values.size(0), -1)
actions = self.actor_compress(x)
return values, actions, rnn_hxs
class MicropolisBase(NNBase):
def __init__(self, num_inputs, recurrent=False, hidden_size=512, map_width=20):
super(MicropolisBase, self).__init__(recurrent, hidden_size, hidden_size)
self.map_width = map_width
init_ = lambda m: init(m,
nn.init.dirac_,
lambda x: nn.init.constant_(x, 0.1),
nn.init.calculate_gain('relu'))
linit_ = lambda m: init(m,
nn.init.orthogonal_,
lambda x: nn.init.constant_(x, 0))
self.conv_00 = init_(nn.Conv2d(num_inputs, 64, 1, 1, 0))
self.conv_0 = init_(nn.Conv2d(64, 64, 3, 3, 0))
self.conv_1 = init_(nn.Conv2d(64, 64, 3, 1, 1))
#self.lin_0 = linit_(nn.Linear(1024, 1024))
self.val_cmprs = init_(nn.Conv2d(64, 64, 9, 1, 4))
self.val_conv_0 = init_(nn.Conv2d(64, 64, 3, 1, 1))
self.num_maps = 3 # how many different sizes
for i in range(self.num_maps):
setattr(self, 'upsample_{}'.format(i), nn.Upsample(size=(1+2*(i+1))))
self.act_convt = init_(nn.ConvTranspose2d(64 + 64, 64, 3, 3, 0))
init_ = lambda m: init(m,
nn.init.dirac_,
lambda x: nn.init.constant_(x, 0))
self.act_tomap = init_(nn.Conv2d(64, 19, 5, 1, 2))
self.val_conv = init_(nn.Conv2d(64, 1, 1, 1, 0))
self.train()
def forward(self, inputs, rnn_hxs, masks):
x = inputs
x = x_0 = F.relu(self.conv_00(x))
x_cmps = []
for i in range(self.num_maps):
x = F.relu(self.conv_0(x))
x_cmps += [x]
for i in range(1):
x = F.relu(self.conv_1(x))
cmprs_shape = x.shape
#x = x.view(x.size(0), -1)
#x = torch.tanh(self.lin_0(x))
#x = x.view(*cmprs_shape)
vals = F.relu(self.val_cmprs(x))
vals = F.relu(self.val_conv_0(vals))
vals = (self.val_conv(vals))
acts = x
for i in range(self.num_maps):
#upsample = getattr(self, 'upsample_{}'.format(i))
#acts = upsample(acts)
#acts = F.pad(acts, (1, 1, 1, 1))
x_0 = x_cmps[self.num_maps-1-i]
acts = torch.cat((acts, x_0), 1)
acts = F.relu(self.act_convt(acts))
acts = F.relu(self.act_tomap(acts))
return vals.view(vals.size(0), -1), acts, rnn_hxs
class MicropolisBase_ICM(MicropolisBase_fixedmap):
def __init__(self, num_inputs, recurrent=False, hidden_size=512):
super(MicropolisBase_ICM, self).__init__(num_inputs, recurrent, hidden_size)
### ICM feature encoder
init_ = lambda m: init(m,
nn.init.dirac_,
lambda x: nn.init.constant_(x, 0),
nn.init.calculate_gain('relu'))
num_skip_inputs=15
self.num_action_channels=19
self.icm_state_in = init_(nn.Conv2d(num_inputs, 64, 3, 1, 1))
self.icm_state_conv_0 = init_(nn.Conv2d(64, 64, 3, 1, 1))
self.icm_state_out = init_(nn.Conv2d(64, 64, 3, 1, 1))
self.icm_pred_a_in = init_(nn.Conv2d((num_inputs) * 2, 128, 3, 1, 1))
self.icm_pred_a_conv_0 = init_(nn.Conv2d(128, 128, 3, 1, 1))
self.icm_pred_s_in = init_(nn.Conv2d((num_inputs) + self.num_action_channels, 64, 1, 1, 0))
self.icm_pred_s_conv_0 = init_(nn.Conv2d(64, 64, 3, 1, 1))
self.icm_pred_s_conv_1 = init_(nn.Conv2d(64, 64, 3, 1, 1))
self.icm_pred_s_conv_2 = init_(nn.Conv2d(64, 64, 3, 1, 1))
self.icm_pred_s_conv_3 = init_(nn.Conv2d(64, 64, 3, 1, 1))
self.icm_pred_s_conv_4 = init_(nn.Conv2d(64, 64, 3, 1, 1))
self.icm_pred_s_conv_5 = init_(nn.Conv2d(64, 64, 3, 1, 1))
self.icm_pred_s_conv_6 = init_(nn.Conv2d(64, 64, 3, 1, 1))
self.icm_pred_s_conv_7 = init_(nn.Conv2d(64, 64, 3, 1, 1))
self.icm_pred_s_conv_8 = init_(nn.Conv2d(64, 64, 3, 1, 1))
self.icm_pred_s_conv_9 = init_(nn.Conv2d(64, 64, 3, 1, 1))
self.icm_pred_s_conv_10 = init_(nn.Conv2d(64, 64, 3, 1, 1))
#self.icm_skip_compress = init_(nn.Conv2d(num_inputs, 15, 1, stride=1))
init_ = lambda m: init(m,
nn.init.dirac_,
lambda x: nn.init.constant_(x, 0))
self.icm_pred_a_out = init_(nn.Conv2d(128, self.num_action_channels, 7, 1, 3))
self.icm_pred_s_out = init_(nn.Conv2d(64 + 64, num_inputs, 1, 1, 0))
self.train()
def forward(self, inputs, rnn_hxs, masks, icm=False):
if icm == False:
return super().forward(inputs, rnn_hxs, masks)
else:
# Encode state feature-maps
s0_in, s1_in, a1 = inputs
a1 = a1.view(a1.size(0), self.num_action_channels, 20, 20)
s0 = s0_in
# s0 = F.relu(self.icm_state_in(s0))
# for i in range(1):
# s0 = F.relu(self.icm_state_conv_0(s0))
# s0 = F.relu(self.icm_state_out(s0))
##s0_skip = F.relu(self.icm_skip_compress(s0))
s1 = s1_in
# s1 = F.relu(self.icm_state_in(s1))
# for i in range(1):
# s1 = F.relu(self.icm_state_conv_0(s1))
# s1 = F.relu(self.icm_state_out(s1))
##s1_skip = F.relu(self.icm_skip_compress(s1_in))
# Predict outcome state feature-map and action dist.
if True:
a1 = a1.cuda()
s0 = s0.cuda()
#print(a1.is_cuda, s0.is_cuda)
pred_s1 = pred_s1_0 = F.relu(self.icm_pred_s_in(torch.cat((s0, a1), 1)))
for i in range(2):
pred_s1 = F.relu(self.icm_pred_s_conv_0(pred_s1))
for i in range(2):
pred_s1 = F.relu(self.icm_pred_s_conv_1(pred_s1))
for i in range(2):
pred_s1 = F.relu(self.icm_pred_s_conv_2(pred_s1))
for i in range(2):
pred_s1 = F.relu(self.icm_pred_s_conv_3(pred_s1))
for i in range(2):
pred_s1 = F.relu(self.icm_pred_s_conv_4(pred_s1))
for i in range(2):
pred_s1 = F.relu(self.icm_pred_s_conv_5(pred_s1))
for i in range(2):
pred_s1 = F.relu(self.icm_pred_s_conv_6(pred_s1))
for i in range(2):
pred_s1 = F.relu(self.icm_pred_s_conv_7(pred_s1))
for i in range(2):
pred_s1 = F.relu(self.icm_pred_s_conv_8(pred_s1))
for i in range(2):
pred_s1 = F.relu(self.icm_pred_s_conv_9(pred_s1))
for i in range(2):
pred_s1 = F.relu(self.icm_pred_s_conv_10(pred_s1))
pred_s1 = torch.cat((pred_s1, pred_s1_0), 1)
pred_s1 = self.icm_pred_s_out(pred_s1)
pred_a = F.relu(self.icm_pred_a_in(torch.cat((s0, s1), 1)))
for i in range(1):
pred_a = F.relu(self.icm_pred_a_conv_0(pred_a))
pred_a = self.icm_pred_a_out(pred_a)
pred_a | |
90.00 P 1
SCALE1 0.084731 0.000000 0.000000 0.00000
SCALE2 0.000000 0.060321 0.000000 0.00000
SCALE3 0.000000 0.000000 0.071551 0.00000
ATOM 1 N PHE A 63 6.412 9.770 7.572 1.00 10.00 N
ATOM 2 CA PHE A 63 6.289 9.157 6.198 1.00 10.00 C
ATOM 3 C PHE A 63 6.292 10.223 5.110 1.00 10.00 C
ATOM 4 O PHE A 63 6.451 11.437 5.241 1.00 10.00 O
ATOM 5 CB PHE A 63 5.580 8.319 6.063 1.00 10.00 C
ATOM 6 CG PHE A 63 5.743 6.958 6.849 1.00 10.00 C
ATOM 7 CD1 PHE A 63 6.569 6.117 6.768 1.00 10.00 C
ATOM 8 CD2 PHE A 63 5.050 7.055 8.045 1.00 10.00 C
ATOM 9 CE1 PHE A 63 6.802 5.054 7.620 1.00 10.00 C
ATOM 10 CE2 PHE A 63 5.331 6.153 8.840 1.00 10.00 C
ATOM 11 CZ PHE A 63 6.270 5.103 8.840 1.00 10.00 C
ATOM 12 HA PHE A 63 7.224 9.004 5.959 1.00 10.00 H
ATOM 13 HB2 PHE A 63 4.619 8.277 5.936 1.00 10.00 H
ATOM 14 HB3 PHE A 63 5.713 7.612 5.105 1.00 10.00 H
ATOM 15 HD1 PHE A 63 7.301 6.004 5.919 1.00 10.00 H
ATOM 16 HD2 PHE A 63 4.436 7.641 8.314 1.00 10.00 H
ATOM 17 HE1 PHE A 63 7.224 4.307 7.397 1.00 10.00 H
ATOM 18 HE2 PHE A 63 4.727 6.172 9.862 1.00 10.00 H
ATOM 19 HZ PHE A 63 6.045 4.444 9.418 1.00 10.00 H
"""
# Proline
pdb_str_15 = """
CRYST1 12.293 14.006 12.486 90.00 90.00 90.00 P 1
SCALE1 0.081347 0.000000 0.000000 0.00000
SCALE2 0.000000 0.071398 0.000000 0.00000
SCALE3 0.000000 0.000000 0.080090 0.00000
ATOM 1 N PRO A 4 4.920 7.325 6.411 1.00 10.00 N
ATOM 2 CA PRO A 4 6.162 6.834 5.666 1.00 10.00 C
ATOM 3 C PRO A 4 6.405 5.341 5.894 1.00 10.00 C
ATOM 4 O PRO A 4 7.098 4.853 4.957 1.00 10.00 O
ATOM 5 CB PRO A 4 7.380 7.894 6.618 1.00 10.00 C
ATOM 6 CG PRO A 4 6.640 8.868 7.204 1.00 10.00 C
ATOM 7 CD PRO A 4 5.415 8.476 7.560 1.00 10.00 C
ATOM 8 HA PRO A 4 6.250 7.067 4.776 1.00 10.00 H
ATOM 9 HB2 PRO A 4 7.921 7.337 7.346 1.00 10.00 H
ATOM 10 HB3 PRO A 4 8.016 8.080 5.830 1.00 10.00 H
ATOM 11 HG2 PRO A 4 7.167 9.579 7.723 1.00 10.00 H
ATOM 12 HG3 PRO A 4 6.492 9.649 6.528 1.00 10.00 H
ATOM 13 HD2 PRO A 4 5.306 8.082 8.255 1.00 10.00 H
ATOM 14 HD3 PRO A 4 4.496 9.094 7.222 1.00 10.00 H
"""
# Serine
pdb_str_16 = """
CRYST1 12.893 12.708 12.721 90.00 90.00 90.00 P 1
SCALE1 0.077561 0.000000 0.000000 0.00000
SCALE2 0.000000 0.078691 0.000000 0.00000
SCALE3 0.000000 0.000000 0.078610 0.00000
ATOM 1 N SER A 73 5.347 5.750 5.075 1.00 10.00 N
ATOM 2 CA SER A 73 5.894 5.594 6.421 1.00 10.00 C
ATOM 3 C SER A 73 5.946 7.282 6.996 1.00 10.00 C
ATOM 4 O SER A 73 5.026 7.627 7.452 1.00 10.00 O
ATOM 5 CB SER A 73 7.501 5.019 6.235 1.00 10.00 C
ATOM 6 OG SER A 73 8.063 4.929 7.844 1.00 10.00 O
ATOM 7 HA SER A 73 5.300 5.295 7.021 1.00 10.00 H
ATOM 8 HB2 SER A 73 7.399 4.067 6.115 1.00 10.00 H
ATOM 9 HB3 SER A 73 8.072 5.776 5.734 1.00 10.00 H
ATOM 10 HG SER A 73 8.499 4.647 7.768 1.00 10.00 H
"""
# Threonine
pdb_str_17 = """
CRYST1 11.909 12.199 14.459 90.00 90.00 90.00 P 1
SCALE1 0.083970 0.000000 0.000000 0.00000
SCALE2 0.000000 0.081974 0.000000 0.00000
SCALE3 0.000000 0.000000 0.069161 0.00000
ATOM 1 N THR A 68 6.967 7.035 7.598 1.00 10.00 N
ATOM 2 CA THR A 68 5.580 6.835 7.409 1.00 10.00 C
ATOM 3 C THR A 68 4.981 6.180 8.674 1.00 10.00 C
ATOM 4 O THR A 68 6.079 5.829 9.469 1.00 10.00 O
ATOM 5 CB THR A 68 5.071 6.452 6.355 1.00 10.00 C
ATOM 6 OG1 THR A 68 5.819 5.087 6.249 1.00 10.00 O
ATOM 7 CG2 THR A 68 5.178 7.219 5.045 1.00 10.00 C
ATOM 8 HA THR A 68 4.978 7.946 7.649 1.00 10.00 H
ATOM 9 HB THR A 68 3.853 6.156 6.182 1.00 10.00 H
ATOM 10 HG1 THR A 68 6.272 5.006 6.437 1.00 10.00 H
ATOM 11 HG21 THR A 68 5.183 6.722 4.241 1.00 10.00 H
ATOM 12 HG22 THR A 68 4.943 8.119 5.225 1.00 10.00 H
ATOM 13 HG23 THR A 68 6.305 7.117 4.778 1.00 10.00 H
"""
# Triptophane
pdb_str_18 = """
CRYST1 12.502 12.982 18.312 90.00 90.00 90.00 P 1
SCALE1 0.079987 0.000000 0.000000 0.00000
SCALE2 0.000000 0.077030 0.000000 0.00000
SCALE3 0.000000 0.000000 0.054609 0.00000
ATOM 1 N TRP A 49 7.019 6.036 6.855 1.00 10.00 N
ATOM 2 CA TRP A 49 6.009 5.654 7.261 1.00 10.00 C
ATOM 3 C TRP A 49 5.132 5.145 5.877 1.00 10.00 C
ATOM 4 O TRP A 49 5.267 5.983 5.027 1.00 10.00 O
ATOM 5 CB TRP A 49 5.058 6.563 8.316 1.00 10.00 C
ATOM 6 CG TRP A 49 6.065 6.967 9.207 1.00 10.00 C
ATOM 7 CD1 TRP A 49 6.863 7.994 9.455 1.00 10.00 C
ATOM 8 CD2 TRP A 49 5.959 6.505 10.743 1.00 10.00 C
ATOM 9 NE1 TRP A 49 7.435 8.004 10.831 1.00 10.00 N
ATOM 10 CE2 TRP A 49 6.803 7.200 11.335 1.00 10.00 C
ATOM 11 CE3 TRP A 49 5.119 5.187 11.318 1.00 10.00 C
ATOM 12 CZ2 TRP A 49 7.129 6.620 12.976 1.00 10.00 C
ATOM 13 CZ3 TRP A 49 5.299 5.155 12.457 1.00 10.00 C
ATOM 14 CH2 TRP A 49 6.374 5.658 13.211 1.00 10.00 C
ATOM 15 HA TRP A 49 6.139 4.842 7.608 1.00 10.00 H
ATOM 16 HB2 TRP A 49 5.070 7.319 7.481 1.00 10.00 H
ATOM 17 HB3 TRP A 49 4.305 6.336 8.251 1.00 10.00 H
ATOM 18 HD1 TRP A 49 7.226 8.582 8.796 1.00 10.00 H
ATOM 19 HE1 TRP A 49 8.120 8.555 10.929 1.00 10.00 H
ATOM 20 HE3 TRP A 49 4.311 5.074 10.564 1.00 10.00 H
ATOM 21 HZ2 TRP A 49 7.806 7.092 13.214 1.00 10.00 H
ATOM 22 HZ3 TRP A 49 4.914 4.129 12.933 1.00 10.00 H
ATOM 23 HH2 TRP A 49 6.353 5.513 14.064 1.00 10.00 H
"""
# Tyrosine
pdb_str_19 = """
CRYST1 17.955 13.272 13.095 90.00 90.00 90.00 P 1
SCALE1 0.055695 0.000000 0.000000 0.00000
SCALE2 0.000000 0.075347 0.000000 0.00000
SCALE3 0.000000 0.000000 0.076365 0.00000
ATOM 1 N TYR A 139 10.057 7.968 5.049 1.00 10.00 N
ATOM 2 CA TYR A 139 10.657 7.531 6.379 1.00 10.00 C
ATOM 3 C TYR A 139 12.203 7.725 6.416 1.00 10.00 C
ATOM 4 O TYR A 139 12.999 8.272 7.373 1.00 10.00 O
ATOM 5 CB TYR A 139 10.644 6.145 6.711 1.00 10.00 C
ATOM 6 CG TYR A 139 9.159 5.899 6.690 1.00 10.00 C
ATOM 7 CD1 TYR A 139 8.503 5.230 5.513 1.00 10.00 C
ATOM 8 CD2 TYR A 139 8.317 6.046 8.121 1.00 10.00 C
ATOM 9 CE1 TYR A 139 6.876 4.938 5.643 1.00 10.00 C
ATOM 10 CE2 TYR A 139 7.209 5.706 8.077 1.00 10.00 C
ATOM 11 CZ TYR A 139 6.420 5.365 6.855 1.00 10.00 C
ATOM 12 OH TYR A 139 5.027 4.949 7.088 1.00 10.00 O
ATOM 13 HA TYR A 139 10.303 8.174 6.785 1.00 10.00 H
ATOM 14 HB2 TYR A 139 10.989 5.647 5.882 1.00 10.00 H
ATOM 15 HB3 TYR A 139 10.828 5.883 7.586 1.00 10.00 H
ATOM 16 HD1 TYR A 139 8.618 5.356 4.741 1.00 10.00 H
ATOM 17 HD2 TYR A 139 8.841 6.121 8.546 1.00 10.00 H
ATOM 18 HE1 TYR A 139 6.432 5.030 4.892 1.00 10.00 H
ATOM 19 HE2 TYR A 139 6.780 5.619 9.066 1.00 10.00 H
ATOM 20 HH TYR A 139 4.693 5.141 7.840 1.00 10.00 H
"""
# Valine
pdb_str_20 = """
CRYST1 12.396 13.122 13.130 90.00 90.00 90.00 P 1
SCALE1 0.080671 0.000000 0.000000 0.00000
SCALE2 0.000000 0.076208 0.000000 0.00000
SCALE3 0.000000 0.000000 0.076161 0.00000
ATOM 1 N VAL B 78 4.820 5.520 7.634 1.00 10.00 N
ATOM 2 CA VAL B 78 6.105 5.869 7.138 1.00 10.00 C
ATOM 3 C VAL B 78 6.717 7.176 8.084 1.00 10.00 C
ATOM 4 O VAL B 78 | |
"""Functional tests for all the routes."""
from book_api.models.book import Book
from book_api.models.user import User
from book_api.tests.conftest import FAKE
def test_signup_other_methods_gets_404_status_code(testapp):
"""Test that other HTTP method requests to signup get a 404 status code."""
for method in ('get', 'put', 'delete'):
res = getattr(testapp, method)('/signup', status=404)
assert res.status_code == 404
def test_signup_post_no_data_gets_400_status_code(testapp):
"""Test that POST to signup route gets 400 status code with no data."""
res = testapp.post('/signup', status=400)
assert res.status_code == 400
def test_signup_post_incomplete_data_gets_400_status_code(testapp):
"""Test that POST to signup route gets 400 status code for bad data."""
data = {
'first_name': FAKE.first_name(),
'last_name': FAKE.last_name(),
'password': <PASSWORD>()
}
res = testapp.post('/signup', data, status=400)
assert res.status_code == 400
def test_signup_post_complete_data_adds_user_to_database(testapp, testapp_session):
"""Test that POST to signup route creates a new User."""
assert len(testapp_session.query(User).all()) == 0
data = {
'first_name': FAKE.first_name(),
'last_name': FAKE.last_name(),
'email': FAKE.email(),
'password': <PASSWORD>()
}
testapp.post('/signup', data)
assert len(testapp_session.query(User).all()) == 1
def test_signup_post_complete_data_gets_201_status_code(testapp):
"""Test that POST to signup route gets 201 status code."""
data = {
'first_name': FAKE.first_name(),
'last_name': FAKE.last_name(),
'email': FAKE.email(),
'password': <PASSWORD>()
}
res = testapp.post('/signup', data)
assert res.status_code == 201
def test_signup_post_complete_data_returns_json_with_new_user_info(testapp):
"""Test that POST to signup route gets JSON with details for new User."""
data = {
'first_name': FAKE.first_name(),
'last_name': FAKE.last_name(),
'email': FAKE.email(),
'password': <PASSWORD>()
}
res = testapp.post('/signup', data)
for prop in ['first_name', 'last_name', 'email']:
assert res.json[prop] == data[prop]
assert res.json['id'] is not None
def test_signup_post_data_without_names_sets_names_to_none(testapp):
"""Test that POST to signup route sets first and last names to None."""
data = {
'email': FAKE.email(),
'password': <PASSWORD>()
}
res = testapp.post('/signup', data)
assert res.json['first_name'] is None
assert res.json['last_name'] is None
def test_book_list_other_methods_gets_404_status_code(testapp):
"""Test that other HTTP method requests to book-list get a 404 status code."""
for method in ('put', 'delete'):
res = getattr(testapp, method)('/books', status=404)
assert res.status_code == 404
def test_book_list_get_missing_auth_gets_400_status_code(testapp, testapp_session, one_user, fill_the_db):
"""Test that GET to book-list route gets 400 status code for missing auth."""
testapp_session.add(one_user)
testapp_session.commit()
res = testapp.get('/books', status=400)
assert res.status_code == 400
def test_book_list_get_incorrect_auth_gets_403_status_code(testapp, one_user):
"""Test that GET to book-list route gets 403 status code for bad auth."""
data = {
'email': one_user.email,
'password': '<PASSWORD>',
}
res = testapp.get('/books', data, status=403)
assert res.status_code == 403
def test_book_list_get_correct_auth_has_200_response_code(testapp, one_user):
"""Test that GET to book-list route gets 200 status code for good auth."""
data = {
'email': one_user.email,
'password': 'password',
}
res = testapp.get('/books', data)
assert res.status_code == 200
def test_book_list_get_correct_auth_empty_for_user_with_no_books(testapp, one_user):
"""Test that GET to book-list route returns empty list for user without books."""
data = {
'email': one_user.email,
'password': 'password',
}
res = testapp.get('/books', data)
assert res.json == []
def test_book_list_post_no_data_gets_400_status_code(testapp):
"""Test that POST to book-list route gets 400 status code with no data."""
res = testapp.post('/books', status=400)
assert res.status_code == 400
def test_book_list_post_missing_auth_gets_400_status_code(testapp):
"""Test that POST to book-list route gets 400 status code for missing auth."""
data = {
'title': FAKE.sentence(nb_words=3),
'author': FAKE.name(),
'isbn': FAKE.isbn13(separator="-"),
'pub_date': FAKE.date(pattern='%m/%d/%Y')
}
res = testapp.post('/books', data, status=400)
assert res.status_code == 400
def test_book_list_post_incorrect_auth_gets_403_status_code(testapp, one_user):
"""Test that POST to book-list route gets 403 status code for bad auth."""
data = {
'email': one_user.email,
'password': '<PASSWORD>',
'title': FAKE.sentence(nb_words=3),
'author': FAKE.name(),
'isbn': FAKE.isbn13(separator="-"),
'pub_date': FAKE.date(pattern='%m/%d/%Y')
}
res = testapp.post('/books', data, status=403)
assert res.status_code == 403
def test_book_list_post_incomplete_data_gets_400_status_code(testapp, one_user):
"""Test that POST to book-list route gets 400 status code for missing data."""
data = {
'email': one_user.email,
'password': 'password',
'author': FAKE.name(),
'isbn': FAKE.isbn13(separator="-"),
'pub_date': FAKE.date(pattern='%m/%d/%Y')
}
res = testapp.post('/books', data, status=400)
assert res.status_code == 400
def test_book_list_post_incorrect_date_gets_400_status_code(testapp, one_user):
"""Test that POST to book-list route gets 400 status code for bad data."""
data = {
'email': one_user.email,
'password': 'password',
'title': FAKE.sentence(nb_words=3),
'author': FAKE.name(),
'isbn': FAKE.isbn13(separator="-"),
'pub_date': FAKE.date(pattern='%Y-%m-%d')
}
res = testapp.post('/books', data, status=400)
assert res.status_code == 400
def test_book_list_post_complete_data_gets_201_status_code(testapp, one_user):
"""Test that POST to book-list route gets 201 status code."""
data = {
'email': one_user.email,
'password': 'password',
'title': FAKE.sentence(nb_words=3),
'author': FAKE.name(),
'isbn': FAKE.isbn13(separator="-"),
'pub_date': FAKE.date(pattern='%m/%d/%Y')
}
res = testapp.post('/books', data)
assert res.status_code == 201
def test_book_list_post_complete_data_adds_book_to_database(testapp, testapp_session, one_user):
"""Test that POST to book-list route creates a new Book."""
num_books = len(testapp_session.query(Book).all())
data = {
'email': one_user.email,
'password': 'password',
'title': FAKE.sentence(nb_words=3),
'author': FAKE.name(),
'isbn': FAKE.isbn13(separator="-"),
'pub_date': FAKE.date(pattern='%m/%d/%Y')
}
testapp.post('/books', data)
assert len(testapp_session.query(Book).all()) == num_books + 1
def test_book_list_post_sets_email_user_as_book_owner(testapp, testapp_session, one_user):
"""Test that POST to book-list route sets user with email as book owner."""
data = {
'email': one_user.email,
'password': 'password',
'title': FAKE.sentence(nb_words=3),
'author': FAKE.name(),
'isbn': FAKE.isbn13(separator="-"),
'pub_date': FAKE.date(pattern='%m/%d/%Y')
}
res = testapp.post('/books', data)
new_book = testapp_session.query(Book).get(res.json['id'])
assert new_book.user.email == one_user.email
def test_book_list_post_complete_data_returns_json_with_new_book_info(testapp, one_user):
"""Test that POST to book-list route gets JSON with details for new Book."""
data = {
'email': one_user.email,
'password': 'password',
'title': FAKE.sentence(nb_words=3),
'author': FAKE.name(),
'isbn': FAKE.isbn13(separator="-"),
'pub_date': FAKE.date(pattern='%m/%d/%Y')
}
res = testapp.post('/books', data)
for prop in ['title', 'author', 'isbn', 'pub_date']:
assert res.json[prop] == data[prop]
assert res.json['id'] is not None
def test_book_list_post_data_without_values_sets_values_to_none(testapp, one_user):
"""Test that POST to book-list route sets missing values to None."""
data = {
'email': one_user.email,
'password': 'password',
'title': FAKE.sentence(nb_words=3),
}
res = testapp.post('/books', data)
assert res.json['author'] is None
assert res.json['isbn'] is None
assert res.json['pub_date'] is None
def test_book_list_get_correct_auth_all_books_for_user(testapp, testapp_session, one_user):
"""Test that GET to book-list route lists all books for the users."""
data = {
'email': one_user.email,
'password': 'password',
}
res = testapp.get('/books', data)
user_books = testapp_session.query(User).get(one_user.id).books
assert len(res.json) == len(user_books)
def test_book_list_get_correct_auth_all_book_details(testapp, one_user):
"""Test that GET to book-list route has details for every book."""
data = {
'email': one_user.email,
'password': 'password',
}
res = testapp.get('/books', data)
for book in res.json:
assert all(prop in book for prop in
['id', 'title', 'author', 'isbn', 'pub_date'])
def test_book_id_other_methods_gets_404_status_code(testapp):
"""Test that other HTTP method requests to book-id get a 404 status code."""
for method in ('post',):
res = getattr(testapp, method)('/books/1', status=404)
assert res.status_code == 404
def test_book_id_get_missing_auth_gets_400_status_code(testapp, testapp_session, one_user):
"""Test that GET to book-id route gets 400 status code for missing auth."""
res = testapp.get('/books/1', status=400)
assert res.status_code == 400
def test_book_id_get_incorrect_auth_gets_403_status_code(testapp, one_user):
"""Test that GET to book-id route gets 403 status code for bad auth."""
data = {
'email': one_user.email,
'password': '<PASSWORD>',
}
res = testapp.get('/books/1', data, status=403)
assert res.status_code == 403
def test_book_id_get_correct_auth_not_users_book_gets_404_status_code(testapp, testapp_session, one_user):
"""Test that GET to book-id route gets 404 status code for book that does not beling to user."""
book = testapp_session.query(Book).filter(Book.user_id != one_user.id).first()
data = {
'email': one_user.email,
'password': 'password',
}
res = testapp.get('/books/{}'.format(book.id), data, status=404)
assert res.status_code == 404
def test_book_id_get_correct_auth_has_200_response_code(testapp, testapp_session, one_user):
"""Test that GET to book-id route gets 200 status code for good auth."""
book = testapp_session.query(User).get(one_user.id).books[0]
data = {
'email': one_user.email,
'password': 'password',
}
res = testapp.get('/books/{}'.format(book.id), data)
assert res.status_code == 200
def test_book_id_get_correct_auth_returns_json_with_book_info(testapp, testapp_session, one_user):
"""Test that GET to book-id route return JSON with correct book data."""
book = testapp_session.query(User).get(one_user.id).books[0]
data = {
'email': one_user.email,
'password': 'password',
}
res = testapp.get('/books/{}'.format(book.id), data)
for prop in ['id', 'title', 'author', 'isbn']:
assert res.json[prop] == getattr(book, prop)
assert res.json['pub_date'] == book.pub_date.strftime('%m/%d/%Y')
def test_book_id_put_missing_auth_gets_400_status_code(testapp, testapp_session, one_user):
"""Test that PUT to book-id route gets 400 status code for missing auth."""
res = testapp.put('/books/1', status=400)
assert res.status_code == 400
def test_book_id_put_incorrect_auth_gets_403_status_code(testapp, one_user):
"""Test that PUT to book-id route gets 403 status code for bad auth."""
data = {
'email': one_user.email,
'password': '<PASSWORD>',
}
res = testapp.put('/books/1', data, status=403)
assert res.status_code == 403
def test_book_id_put_correct_auth_not_users_book_gets_404_status_code(testapp, testapp_session, one_user):
"""Test that PUT to book-id route gets 404 status code for book that does not beling to user."""
book = testapp_session.query(Book).filter(Book.user_id != one_user.id).first()
data = {
'email': one_user.email,
'password': 'password',
}
res = testapp.put('/books/{}'.format(book.id), data, status=404)
assert res.status_code == 404
def test_book_id_put_correct_auth_incorrect_date_gets_400_status_code(testapp, testapp_session, one_user):
"""Test that POST to book-id route gets 400 status code for bad data."""
book = testapp_session.query(User).get(one_user.id).books[0]
data = {
'email': one_user.email,
'password': 'password',
'pub_date': FAKE.date(pattern='%Y-%m-%d')
}
res = testapp.put('/books/{}'.format(book.id), data, status=400)
assert res.status_code == 400
def test_book_id_put_correct_auth_has_200_response_code(testapp, testapp_session, one_user):
"""Test that PUT to book-id route gets 200 status code for good auth."""
book = testapp_session.query(User).get(one_user.id).books[0]
data = {
'email': one_user.email,
'password': 'password',
'author': FAKE.name()
}
res = testapp.put('/books/{}'.format(book.id), data)
assert res.status_code == 200
def test_book_id_put_correct_auth_does_not_add_book_to_database(testapp, testapp_session, one_user):
"""Test that PUT to book-id route does not create a new Book."""
book = testapp_session.query(User).get(one_user.id).books[0]
num_books = len(testapp_session.query(Book).all())
data = {
'email': one_user.email,
'password': 'password',
'isbn': FAKE.isbn13(separator="-")
}
testapp.put('/books/{}'.format(book.id), data)
assert len(testapp_session.query(Book).all()) == num_books
def test_book_id_put_correct_auth_updates_book_in_database(testapp, testapp_session, one_user):
"""Test that PUT to book-id route updates the | |
<gh_stars>1-10
"""Tools (notably `xpSpace`) for processing and presenting experiment data."""
import collections
import copy
import logging
import os
import shutil
import warnings
from pathlib import Path
import colorama
import dill
import matplotlib as mpl
import numpy as np
import struct_tools
from matplotlib import cm, ticker
from patlib.std import set_tmp
from tabulate import tabulate
from tqdm import tqdm
import dapper.tools.remote.uplink as uplink
from dapper.stats import align_col, unpack_uqs
from dapper.tools.colors import color_text
from dapper.tools.series import UncertainQtty
from dapper.tools.viz import axis_scale_by_array, freshfig
from dapper.xp_launch import collapse_str, xpList
mpl_logger = logging.getLogger('matplotlib')
NO_KEY = ("da_method", "Const", "upd_a")
def make_label(coord, no_key=NO_KEY, exclude=()):
dct = {a: v for a, v in coord._asdict().items() if v != None}
lbl = ''
for k, v in dct.items():
if k not in exclude:
if any(x in k for x in no_key):
lbl = lbl + f' {v}'
else:
lbl = lbl + f' {collapse_str(k,7)}:{v}'
return lbl[1:]
def default_styles(coord, baseline_legends=False):
"""Quick and dirty (but somewhat robust) styling."""
style = struct_tools.DotDict(ms=8)
style.label = make_label(coord)
try:
if coord.da_method == "Climatology":
style.ls = ":"
style.c = "k"
if not baseline_legends:
style.label = None
elif coord.da_method == "OptInterp":
style.ls = ":"
style.c = .7*np.ones(3)
style.label = "Opt. Interp."
if not baseline_legends:
style.label = None
elif coord.da_method == "Var3D":
style.ls = ":"
style.c = .5*np.ones(3)
style.label = "3D-Var"
if not baseline_legends:
style.label = None
elif coord.da_method == "EnKF":
style.marker = "*"
style.c = "C1"
elif coord.da_method == "PartFilt":
style.marker = "X"
style.c = "C2"
else:
style.marker = "."
except AttributeError:
pass
return style
def rel_index(elem, lst, default=None):
"""`lst.index(elem) / len(lst)` with fallback."""
try:
return lst.index(elem) / len(lst)
except ValueError:
if default == None:
raise
return default
def discretize_cmap(cmap, N, val0=0, val1=1, name=None):
"""Discretize `cmap` so that it partitions `[0,1]` into `N` segments.
I.e. `cmap(k/N) == cmap(k/N + eps)`.
Also provide the ScalarMappable `sm`
that maps range(N) to the segment centers,
as will be reflected by `cb = fig.colorbar(sm)`.
You can then re-label the ticks using
`cb.set_ticks(np.arange(N)); cb.set_ticklabels(["A","B","C",...])`."""
# cmap(k/N)
from_list = mpl.colors.LinearSegmentedColormap.from_list
colors = cmap(np.linspace(val0, val1, N))
cmap = from_list(name, colors, N)
# sm
cNorm = mpl.colors.Normalize(-.5, -.5+N)
sm = mpl.cm.ScalarMappable(cNorm, cmap)
return cmap, sm
def cm_bond(cmap, xp_dict, axis, vmin=0, vmax=0):
"""Map cmap for `coord.axis ∈ [0, len(ticks)]`."""
def link(coord):
"""Essentially: `cmap(ticks.index(coord.axis))`"""
if hasattr(coord, axis):
ticks = xp_dict.ticks[axis]
cNorm = mpl.colors.Normalize(vmin, vmax + len(ticks))
ScMap = cm.ScalarMappable(cNorm, cmap).to_rgba
index = ticks.index(getattr(coord, axis))
return ScMap(index)
else:
return cmap(0.5)
return link
def in_idx(coord, indices, xp_dict, axis):
"""Essentially: `coord.axis in ticks[indices]`."""
if hasattr(coord, axis):
ticks = np.array(xp_dict.ticks[axis])[indices]
return getattr(coord, axis) in ticks
else:
return True
def load_HMM(save_as):
save_as = Path(save_as).expanduser()
HMM = dill.load(open(save_as/"xp.com", "rb"))["HMM"]
return HMM
def load_xps(save_as):
"""Load `xps` (as a simple list) from dir."""
save_as = Path(save_as).expanduser()
files = [d/"xp" for d in uplink.list_job_dirs(save_as)]
def load_any(filepath):
"""Load any/all `xp's` from `filepath`."""
with open(filepath, "rb") as F:
# If experiment crashed, then xp will be empty
try:
data = dill.load(F)
except EOFError:
return []
# Always return list
try:
return data["xps"]
except KeyError:
return [data["xp"]]
print("Loading %d files from %s" % (len(files), save_as))
xps = [] # NB: progbar wont clean up properly w/ list compr.
for f in tqdm(files, desc="Loading"):
xps.extend(load_any(f))
if len(xps) < len(files):
print(f"{len(files)-len(xps)} files could not be loaded.")
return xps
def save_xps(xps, save_as, nDir=100):
"""Split xps and save in save_as/i for i in range(nDir).
Example: rename attr n_iter to nIter:
>>> proj_name = "Stein"
>>> dd = dpr.rc.dirs.data / proj_name
>>> save_as = dd / "run_2020-09-22__19:36:13"
>>>
>>> for save_as in os.listdir(dd):
>>> save_as = dd / save_as
>>>
>>> xps = load_xps(save_as)
>>> HMM = load_HMM(save_as)
>>>
>>> for xp in xps:
>>> if hasattr(xp,"n_iter"):
>>> xp.nIter = xp.n_iter
>>> del xp.n_iter
>>>
>>> overwrite_xps(xps, save_as)
"""
save_as = Path(save_as).expanduser()
save_as.mkdir(parents=False, exist_ok=False)
splitting = np.array_split(xps, nDir)
for i, sub_xps in enumerate(tqdm(splitting, desc="Saving")):
if len(sub_xps):
iDir = save_as / str(i)
os.mkdir(iDir)
with open(iDir/"xp", "wb") as F:
dill.dump({'xps': sub_xps}, F)
def overwrite_xps(xps, save_as, nDir=100):
"""Save xps in save_as, but safely (by first saving to tmp)."""
save_xps(xps, save_as/"tmp", nDir)
# Delete
for d in tqdm(uplink.list_job_dirs(save_as),
desc="Deleting old"):
shutil.rmtree(d)
# Mv up from tmp/ -- goes quick, coz there are not many.
for d in os.listdir(save_as/"tmp"):
shutil.move(save_as/"tmp"/d, save_as/d)
shutil.rmtree(save_as/"tmp")
def reduce_inodes(save_as, nDir=100):
"""Reduce the number of `xp` dirs
by packing multiple `xp`s into lists (`xps`).
This reduces the **number** of files (inodes) on the system,
which limits storage capacity (along with **size**).
It also deletes files "xp.var" and "out"
(which tends to be relatively large coz of the progbar).
This is probably also the reason that the loading time is sometimes reduced."""
overwrite_xps(load_xps(save_as), save_as, nDir)
class SparseSpace(dict):
"""Subclass of `dict` that enforces key conformity to a `namedtuple`.
Like a normal `dict`, it can hold any type of objects.
But, since keys must conform, this effectively defines a coordinate system,
i.e. vector **space**.
The coordinate system is specified by its "axes",
which is used to produce `self.Coord` (a `namedtuple` class).
In normal use, this space is highly sparse,
coz there are many coordinates with no matching experiment,
eg. `coord(da_method=Climatology, rot=True, ...)`.
Indeed, operations across (potentially multiple) axes,
such as optimization or averaging, should be carried out by iterating
-- not over the axes -- but over the the list of items.
The most important method is `nest`,
which is used (by `xpSpace.table_tree`) to separate tables/columns,
and also to carry out the mean/optim operations.
In addition, `__getitem__` is very flexible, allowing accessing by:
- The actual key, a `self.Coord` object. Returns single item.
- A `dict` to match against (part of) the coordinates. Returns subspace.
- An `int`. Returns `list(self)[key]`.
- A list of any of the above. Returns list.
This flexibility can cause bugs, but it's probably still worth it.
Also see `__call__`, `get_for`, and `coords`,
for further convenience.
Inspired by
- https://stackoverflow.com/a/7728830
- https://stackoverflow.com/q/3387691
"""
@property
def axes(self):
return self.Coord._fields
def __init__(self, axes, *args, **kwargs):
"""Usually initialized through `xpSpace`.
Parameters
----------
axes: list
The attributes defining the coordinate system.
args: entries
Nothing, or a list of `xp`s.
"""
# Define coordinate system
self.Coord = collections.namedtuple('Coord', axes)
# Write dict
self.update(*args, **kwargs)
# Add repr/str
self.Coord.__repr__ = lambda c: ",".join(
f"{k}={v!r}" for k, v in zip(c._fields, c))
self.Coord.__str__ = lambda c: ",".join(str(v) for v in c)
def update(self, *args, **kwargs):
"""Update using custom `__setitem__`."""
# See https://stackoverflow.com/a/2588648
# and https://stackoverflow.com/a/2390997
for k, v in dict(*args, **kwargs).items():
self[k] = v
def __setitem__(self, key, val):
"""Setitem ensuring coordinate conforms."""
try:
key = self.Coord(*key)
except TypeError:
raise TypeError(
f"The key {key!r} did not fit the coord. system "
f"which has axes {self.axes}")
super().__setitem__(key, val)
def __getitem__(self, key):
"""Flexible indexing."""
# List of items (by a list of indices).
# Also see get_for().
if isinstance(key, list):
return [self[k] for k in key]
# Single (by integer) or list (by Slice)
# Note: NOT validating np.int64 here catches quite a few bugs.
elif isinstance(key, int) or isinstance(key, slice):
return [*self.values()][key]
# Subspace (by dict, ie. an informal, partial coordinate)
elif isinstance(key, dict):
outer = self.nest(outer_axes=list(key)) # nest
coord = outer.Coord(*key.values()) # create coord
inner = outer[coord] # chose subspace
return inner
# Single item (by Coord object, coz an integer (eg)
# gets interpreted (above) as a list index)
else:
# NB: Dont't use isinstance(key, self.Coord)
# coz it fails when the namedtuple (Coord) has been
# instantiated in different places (but with equal params).
# Also see bugs.python.org/issue7796
return super().__getitem__(key)
def __getkey__(self, entry):
"""Inverse of `dict.__getitem__`, but also works on coords.
Note: This dunder method is not a "builtin" naming convention."""
coord = (getattr(entry, a, None) for a in self.axes)
return self.Coord(*coord)
def __call__(self, **kwargs):
"""Convenience, that enables, eg.:
>>> xp_dict(da_method="EnKF", infl=1, seed=3)
"""
return self.__getitem__(kwargs)
def get_for(self, ticks, default=None):
"""Almost `[self.get(Coord(x)) for x in ticks]`.
NB: using the "naive" thing: `[self[x] for x in ticks]`
would probably be a BUG coz x gets interpreted as indices
for the internal list."""
singleton = not hasattr(ticks[0], "__iter__")
def coord(xyz): return self.Coord(xyz if singleton else xyz)
return [self.get(coord(x), default) for x in ticks]
def coords(self, **kwargs):
"""Get all `coord`s matching kwargs.
Unlike | |
"""
This module defines the generic base class and the functionality.
All browsers from :py:mod:`browser_history.browsers` inherit this class.
"""
import abc
import csv
import datetime
import json
import os
import shutil
import sqlite3
import tempfile
import typing
from collections import defaultdict
from functools import partial
from io import StringIO
from pathlib import Path
from typing import Any, Callable, Dict, List, Tuple
from urllib.parse import urlparse
import browser_history.utils as utils
HistoryVar = List[Tuple[datetime.datetime, str]]
BookmarkVar = List[Tuple[datetime.datetime, str, str, str]]
class Browser(abc.ABC):
"""A generic class to support all major browsers with minimal
configuration.
Currently, only browsers which save the history in SQLite files are
supported.
To create a new browser type, the following class variables must be set.
* :py:class:`name`
* **paths**: A path string, relative to the home directory, where the
browsers data is saved.
At least one of the following must be set:
:py:class:`windows_path`, :py:class:`mac_path`, :py:class:`linux_path`
* :py:class:`history_file`
* :py:class:`history_SQL`
These following class variable can optionally be set:
* :py:class:`bookmarks_file`
* :py:class:`bookmarks_parser`
* :py:class:`profile_support`
* :py:class:`profile_dir_prefixes`
* :py:class:`_local_tz`
* :py:class:`aliases`: A tuple containing other names for the browser in lowercase
:param plat: the current platform. A value of :py:class:`None` means the platform
will be inferred from the system.
Examples:
>>> class CustomBrowser(Browser):
... name = 'custom browser'
... aliases = ('custom-browser', 'customhtm')
... history_file = 'history_file'
... history_SQL = \"\"\"
... SELECT
... url
... FROM
... history_visits
... \"\"\"
... linux_path = 'browser'
...
... vars(CustomBrowser())
{'profile_dir_prefixes': [], 'history_dir': PosixPath('/home/username/browser')}
"""
windows_path: typing.Optional[str] = None #: browser path on Windows.
mac_path: typing.Optional[str] = None #: browser path on Mac OS.
linux_path: typing.Optional[str] = None #: browser path on Linux.
profile_support: bool = False #: boolean indicating whether
"""Boolean indicating whether the browser supports multiple profiles."""
profile_dir_prefixes: typing.Optional[typing.List[typing.Any]] = None
"""List of possible prefixes for the profile directories.
Keep empty to check all subdirectories in the browser path.
"""
bookmarks_file: typing.Optional[str] = None
"""Name of the (SQLite, JSON or PLIST) file which stores the bookmarks."""
_local_tz: typing.Optional[datetime.tzinfo] = (
datetime.datetime.now().astimezone().tzinfo
)
"""Gets a datetime object of the current time as per the users timezone."""
history_dir: Path
"""History directory."""
aliases: tuple = ()
"""Gets possible names (lower-cased) used to refer to the browser type.
Useful for making the browser detectable as a default browser which may be
named in various forms on different platforms. Do not include :py:class:`name`
in this list"""
@property
@abc.abstractmethod
def name(self) -> str:
"""A name for the browser. Not used anywhere except for logging and errors."""
@property
@abc.abstractmethod
def history_file(self) -> str:
"""Name of the (SQLite) file which stores the history."""
@property
@abc.abstractmethod
def history_SQL(self) -> str:
"""SQL query required to extract history from the ``history_file``.
The query must return two columns: ``visit_time`` and ``url``.
The ``visit_time`` must be processed using the `datetime`_
function with the modifier ``localtime``.
.. _datetime: https://www.sqlitetutorial.net/sqlite-date-functions/sqlite-datetime-function/
""" # pylint: disable=line-too-long # noqa: E501
def __init__(self, plat: typing.Optional[utils.Platform] = None):
self.profile_dir_prefixes = []
if plat is None:
plat = utils.get_platform()
homedir = Path.home()
error_string = self.name + " browser is not supported on {}"
if plat == utils.Platform.WINDOWS:
assert self.windows_path is not None, error_string.format("windows")
self.history_dir = homedir / self.windows_path
elif plat == utils.Platform.MAC:
assert self.mac_path is not None, error_string.format("Mac OS")
self.history_dir = homedir / self.mac_path
elif plat == utils.Platform.LINUX:
assert self.linux_path is not None, error_string.format("Linux")
self.history_dir = homedir / self.linux_path
else:
raise NotImplementedError()
if self.profile_support and not self.profile_dir_prefixes:
self.profile_dir_prefixes.append("*")
def bookmarks_parser(
self, bookmark_path
): # pylint: disable=assignment-from-no-return
"""A function to parse bookmarks and convert to readable format."""
def profiles(self, profile_file) -> typing.List[str]:
"""Returns a list of profile directories. If the browser is supported
on the current
platform but is not installed an empty list will be returned
:param profile_file: file to search for in the profile directories.
This should be either ``history_file`` or ``bookmarks_file``.
:type profile_file: str
:rtype: list(str)
"""
if not os.path.exists(self.history_dir):
utils.logger.info("%s browser is not installed", self.name)
return []
if not self.profile_support:
return ["."]
profile_dirs = []
for files in os.walk(str(self.history_dir)):
for item in files[2]:
if os.path.split(os.path.join(files[0], item))[-1] == profile_file:
path = str(files[0]).split(str(self.history_dir), maxsplit=1)[-1]
if path.startswith(os.sep):
path = path[1:]
if path.endswith(os.sep):
path = path[:-1]
profile_dirs.append(path)
return profile_dirs
def history_path_profile(self, profile_dir: Path) -> typing.Optional[Path]:
"""Returns path of the history file for the given ``profile_dir``
The ``profile_dir`` should be one of the outputs from
:py:meth:`profiles`
:param profile_dir: Profile directory (should be a single name,
relative to ``history_dir``)
:type profile_dir: :py:class:`pathlib.Path`
:return: path to history file of the profile
"""
if self.history_file is None:
return None
return self.history_dir / profile_dir / self.history_file
def bookmarks_path_profile(self, profile_dir: Path) -> typing.Optional[Path]:
"""Returns path of the bookmark file for the given ``profile_dir``
The ``profile_dir`` should be one of the outputs from
:py:meth:`profiles`
:param profile_dir: Profile directory (should be a single name,
relative to ``history_dir``)
:type profile_dir: :py:class:`pathlib.Path`
:return: path to bookmark file of the profile
"""
if self.bookmarks_file is None:
return None
return self.history_dir / profile_dir / self.bookmarks_file
def paths(self, profile_file):
"""Returns a list of file paths, for all profiles.
:rtype: list(:py:class:`pathlib.Path`)
"""
return [
self.history_dir / profile_dir / profile_file
for profile_dir in self.profiles(profile_file=profile_file)
]
def history_profiles(self, profile_dirs):
"""Returns history of profiles given by `profile_dirs`.
:param profile_dirs: List or iterable of profile directories. Can be
obtained from :py:meth:`profiles`
:type profile_dirs: list(str)
:return: Object of class :py:class:`browser_history.generic.Outputs`
with the data member histories set to
list(tuple(:py:class:`datetime.datetime`, str))
:rtype: :py:class:`browser_history.generic.Outputs`
"""
history_paths = [
self.history_path_profile(profile_dir) for profile_dir in profile_dirs
]
return self.fetch_history(history_paths)
def fetch_history(self, history_paths=None, sort=True, desc=False):
"""Returns history of all available profiles stored in SQL.
The returned datetimes are timezone-aware with the local timezone set
by default.
The history files are first copied to a temporary location and then
queried, this might lead to some additional overhead and results
returned might not be the latest if the browser is in use. This is
done because the SQlite files are locked by the browser when in use.
:param history_paths: (optional) a list of history files.
:type history_paths: list(:py:class:`pathlib.Path`)
:param sort: (optional) flag to specify if the output should be
sorted. Default value set to True.
:type sort: boolean
:param desc: (optional) flag to specify asc/desc
(Applicable if sort is True) Default value set to False.
:type asc: boolean
:return: Object of class :py:class:`browser_history.generic.Outputs`
with the data member histories set to
list(tuple(:py:class:`datetime.datetime`, str)).
If the browser is not installed, this object will be empty.
:rtype: :py:class:`browser_history.generic.Outputs`
"""
if history_paths is None:
history_paths = self.paths(profile_file=self.history_file)
output_object = Outputs(fetch_type="history")
with tempfile.TemporaryDirectory() as tmpdirname:
for history_path in history_paths:
copied_history_path = shutil.copy2(history_path.absolute(), tmpdirname)
conn = sqlite3.connect(f"file:{copied_history_path}?mode=ro", uri=True)
cursor = conn.cursor()
cursor.execute(self.history_SQL)
date_histories = [
(
datetime.datetime.strptime(d, "%Y-%m-%d %H:%M:%S").replace(
tzinfo=self._local_tz
),
url,
)
for d, url in cursor.fetchall()
]
output_object.histories.extend(date_histories)
if sort:
output_object.histories.sort(reverse=desc)
conn.close()
return output_object
def fetch_bookmarks(self, bookmarks_paths=None, sort=True, desc=False):
"""Returns bookmarks of all available profiles stored in SQL or JSON
or plist.
The returned datetimes are timezone-aware with the local timezone set
by default.
The bookmark files are first copied to a temporary location and then
queried, this might lead to some additional overhead and results
returned might not be the latest if the browser is in use. This is
done because the SQlite files are locked by the browser when in use.
:param bookmarks_paths: (optional) a list of bookmark files.
:type bookmarks_paths: list(:py:class:`pathlib.Path`)
:param sort: (optional) flag to specify if the output should be
sorted. Default value set to True.
:type sort: boolean
:param desc: (optional) flag to specify asc/desc
(Applicable if sort is True) Default value set to False.
:type asc: boolean
:return: Object of class :py:class:`browser_history.generic.Outputs`
with the attribute bookmarks set to a list of
(timestamp, url, title, folder) tuples
:rtype: :py:class:`browser_history.generic.Outputs`
"""
assert (
self.bookmarks_file is not None
), "Bookmarks are not supported for {} browser".format(self.name)
if bookmarks_paths is None:
bookmarks_paths = self.paths(profile_file=self.bookmarks_file)
output_object = Outputs(fetch_type="bookmarks")
with tempfile.TemporaryDirectory() as tmpdirname:
for bookmarks_path in bookmarks_paths:
if not os.path.exists(bookmarks_path):
continue
copied_bookmark_path = shutil.copy2(
bookmarks_path.absolute(), tmpdirname
)
date_bookmarks = self.bookmarks_parser(copied_bookmark_path)
output_object.bookmarks.extend(date_bookmarks)
if sort:
output_object.bookmarks.sort(reverse=desc)
return output_object
class Outputs:
"""
A generic class to encapsulate history and | |
#!/usr/bin/env python
# Licensed under a 3-clause BSD style license - see LICENSE.rst
"""
Tests for pyvo.dal.query
"""
from __future__ import print_function, division
import os, sys, shutil, re, imp, glob, tempfile, random, time
import unittest, pdb
from urllib2 import URLError, HTTPError
import pyvo.dal.query as dalq
import pyvo.dal.dbapi2 as daldbapi
# from astropy.io.vo import parse as votableparse
from astropy.io.votable.tree import VOTableFile
from pyvo.dal.query import _votableparse as votableparse
from astropy.utils.data import get_pkg_data_filename
from . import aTestSIAServer as testserve
siaresultfile = "data/neat-sia.xml"
ssaresultfile = "data/jhu-ssa.xml"
testserverport = 8084
testserverport += 10
testserverport += random.randint(0,9)
testserver = None
# def setup_module(module):
# """
# module level setup: start test server
# """
# testserver = testserve.TestServer(testserverport)
# testserver.start()
# def teardown_module(module):
# """
# shutdown the test server
# """
# if testserver and testserver.isAlive():
# testserver.shutdown()
class DALAccessErrorTest(unittest.TestCase):
msg = "nya-nya"
url = "http://localhost"
def testProperties2(self):
e = dalq.DALAccessError(self.msg, self.url)
self.assertEquals(self.msg, e.reason)
self.assertEquals(self.url, e.url)
e.reason = "poof"
self.assertEquals("poof", e.reason)
del e.reason
self.assertEquals(dalq.DALAccessError._defreason, e.reason)
def testProperties1(self):
e = dalq.DALAccessError(self.msg)
self.assertEquals(self.msg, e.reason)
self.assert_(e.url is None)
def testPropertiesDef(self):
e = dalq.DALAccessError()
self.assertEquals(dalq.DALAccessError._defreason, e.reason)
self.assert_(e.url is None)
class DALServiceErrorTest(unittest.TestCase):
msg = "nya-nya"
code = 404
url = "http://localhost/"
def testProperties4(self):
c = HTTPError("http://localhost/", self.code, self.msg, None, None)
e = dalq.DALServiceError(self.msg, self.code, c, self.url)
self.assertEquals(self.msg, e.reason)
self.assert_(e.cause is c)
self.assertEquals(self.code, e.code)
self.assertEquals(self.url, e.url)
del e.cause
self.assert_(e.cause is None)
e.cause = c
self.assert_(e.cause is c)
e.code = 505
self.assertEquals(505, e.code)
del e.code
self.assert_(e.code is None)
def testProperties3(self):
c = HTTPError("http://localhost/", self.code, self.msg, None, None)
e = dalq.DALServiceError(self.msg, self.code, c)
self.assertEquals(self.msg, e.reason)
self.assert_(e.cause is c)
self.assertEquals(self.code, e.code)
self.assert_(e.url is None)
def testProperties2(self):
e = dalq.DALServiceError(self.msg, self.code)
self.assertEquals(self.msg, e.reason)
self.assert_(e.cause is None)
self.assertEquals(self.code, e.code)
self.assert_(e.url is None)
def testProperties1(self):
e = dalq.DALServiceError(self.msg)
self.assertEquals(self.msg, e.reason)
self.assert_(e.cause is None)
self.assert_(e.code is None)
self.assert_(e.url is None)
def testPropertiesDef(self):
e = dalq.DALServiceError()
self.assert_(e.reason and e.reason.startswith("Unknown service "))
self.assert_(e.cause is None)
self.assert_(e.code is None)
self.assert_(e.url is None)
def testFromExceptHTTP(self):
url = "http://localhost/"
c = HTTPError(url, self.code, self.msg, None, None)
e = dalq.DALServiceError.from_except(c)
self.assertEquals(self.msg, e.reason)
self.assert_(e.cause is c)
self.assertEquals(self.code, e.code)
self.assertEquals(url, e.url)
def testFromExceptURL(self):
url = "http://localhost/"
c = URLError(self.msg)
e = dalq.DALServiceError.from_except(c, url)
self.assertEquals(self.msg, e.reason)
self.assert_(e.cause is c)
self.assert_(e.code is None)
self.assertEquals(url, e.url)
def testFromExcept(self):
c = RuntimeError(self.msg)
e = dalq.DALServiceError.from_except(c)
self.assertEquals(e.reason, "RuntimeError: " + self.msg)
self.assert_(e.cause is c)
self.assert_(e.code is None)
self.assert_(e.url is None)
class DALQueryErrorTest(unittest.TestCase):
msg = "nya-nya"
label = "goofed"
def testProperties2(self):
e = dalq.DALQueryError(self.msg, self.label)
self.assertEquals(self.msg, e.reason)
self.assertEquals(self.label, e.label)
e.reason = "poof"
self.assertEquals("poof", e.reason)
e.label = "OVERFLOW"
self.assertEquals("OVERFLOW", e.label)
del e.label
self.assert_(e.label is None)
def testProperties1(self):
e = dalq.DALQueryError(self.msg)
self.assertEquals(self.msg, e.reason)
self.assert_(e.label is None)
def testPropertiesDef(self):
e = dalq.DALQueryError()
self.assert_(e.reason and e.reason.startswith("Unknown DAL Query "))
self.assert_(e.label is None)
class DALResultsTest(unittest.TestCase):
def setUp(self):
resultfile = get_pkg_data_filename(siaresultfile)
self.tbl = votableparse(resultfile)
def testCtor(self):
self.result = dalq.DALResults(self.tbl)
self.assert_(isinstance(self.result._fldnames, list))
self.assert_(self.result.votable is not None)
def testProps(self):
self.testCtor()
self.assertEquals(self.result.nrecs, 2)
try:
self.result.nrecs = 4
self.fail("size is not read-only")
except AttributeError:
pass
names = self.result.fieldnames()
self.assert_(isinstance(names, list))
self.assertEquals(len(names), 10)
for i in xrange(len(names)):
self.assert_(isinstance(names[i], str) or
isinstance(names[i], unicode),
"field name #{0} not a string: {1}".format(i,type(names[i])))
self.assert_(len(names[i]) > 0, "field name #{0} is empty".format(i))
fd = self.result.fielddesc()
self.assert_(isinstance(fd, list))
self.assertEquals(len(fd), 10)
for fld in fd:
self.assert_(hasattr(fld,'name'))
self.assert_(hasattr(fld,'ID'))
self.assert_(hasattr(fld,'ucd'))
self.assert_(hasattr(fld,'datatype'))
for i in xrange(len(names)):
fld = self.result.getdesc(names[i])
self.assert_(fld is fd[i])
fld = self.result.getdesc("Format")
self.assertEquals(fld.name, "Format")
# self.assertEquals(fld.ID, "Format")
self.assertEquals(fld.ucd, "VOX:Image_Format")
self.assertEquals(fld.datatype, "char")
self.assertEquals(fld.arraysize, "*")
self.assert_(fld.utype is None)
def testValue(self):
self.testCtor()
self.assertEquals(self.result.getvalue("Format", 0), b"image/fits")
self.assertEquals(self.result.getvalue("Format", 1), b"image/jpeg")
self.assertEquals(self.result.getvalue("Dim", 0), 2)
val = self.result.getvalue("Size", 0)
self.assertEquals(len(val), 2)
self.assertEquals(val[0], 300)
self.assertEquals(val[1], 300)
self.assertRaises(ValueError, self.result.getvalue, "Goober", 0)
def testGetRecord(self):
self.testCtor()
rec = self.result.getrecord(0)
self.assert_(rec is not None)
self.assert_(isinstance(rec, dalq.Record))
rec = self.result.getrecord(1)
self.assert_(rec is not None)
self.assert_(isinstance(rec, dalq.Record))
self.assertRaises(IndexError, self.result.getrecord, 2)
def testGetColumn(self):
self.testCtor()
col = self.result.getcolumn('Ra')
shifted = col + 0.05
self.assertAlmostEquals(0.05, shifted[0]-col[0])
self.assertRaises(ValueError, self.result.getcolumn, 'goob')
def testIter(self):
self.testCtor()
i = 0
for rec in self.result:
self.assert_(rec is not None)
self.assert_(isinstance(rec, dalq.Record))
i += 1
self.assertEquals(i, 2)
def testCursor(self):
self.testCtor()
c = self.result.cursor()
self.assert_(c is not None)
self.assert_(isinstance(c, daldbapi.Cursor))
def testByUcd(self):
self.testCtor()
self.assertEquals(self.result.fieldname_with_ucd("POS_EQ_RA_MAIN"),"Ra")
self.assertEquals(self.result.fieldname_with_ucd("VOX:Image_AccessReference"),"URL")
class RecordTest(unittest.TestCase):
def setUp(self):
resultfile = get_pkg_data_filename(siaresultfile)
self.tbl = votableparse(resultfile)
self.result = dalq.DALResults(self.tbl)
self.rec = self.result.getrecord(0)
def testFields(self):
fnames = self.result.fieldnames()
reckeys = self.rec.keys()
for name in fnames:
self.assert_(name in reckeys, "Missing fieldname: "+name)
def testValues(self):
self.assertEquals(self.rec["Format"], b"image/fits")
self.assertEquals(self.rec["Dim"], 2)
val = self.rec["Size"]
self.assertEquals(len(val), 2)
self.assertEquals(val[0], 300)
self.assertEquals(val[1], 300)
try:
self.rec["Goober"]
self.fail("Failed to raise KeyError on bad key")
except KeyError:
pass
def testSuggestExtension(self):
self.assertEquals(self.rec.suggest_extension("goob"), "goob")
self.assert_(self.rec.suggest_extension() is None)
def testHasKey(self):
self.assertEquals(self.rec["Format"], b"image/fits")
self.assertTrue(self.rec.has_key('Format'))
self.assertTrue('Format' in self.rec)
self.assertFalse(self.rec.has_key('Goober'))
self.assertFalse('Goober' in self.rec)
class EnsureBaseURLTest(unittest.TestCase):
def testFix(self):
self.assertEquals(dalq.ensure_baseurl("http://localhost")[-1], '?')
self.assertEquals(dalq.ensure_baseurl("http://localhost/sia")[-1], '?')
self.assertEquals(dalq.ensure_baseurl("http://localhost/sia?cat=neat")[-1], '&')
self.assertEquals(dalq.ensure_baseurl("http://localhost/sia?cat=neat&usecache=yes")[-1], '&')
self.assertEquals(dalq.ensure_baseurl("http://localhost?"),
"http://localhost?")
self.assertEquals(dalq.ensure_baseurl("http://localhost/sia?"),
"http://localhost/sia?")
self.assertEquals(dalq.ensure_baseurl("http://localhost/sia?cat=neat&"),
"http://localhost/sia?cat=neat&")
self.assertEquals(dalq.ensure_baseurl("http://localhost/sia?cat=neat&usecache=yes&"),
"http://localhost/sia?cat=neat&usecache=yes&")
class MimeCheckTestCase(unittest.TestCase):
def testGood(self):
self.assertTrue(dalq.is_mime_type("image/jpeg"))
self.assertTrue(dalq.is_mime_type("application/fits"))
self.assertTrue(dalq.is_mime_type("application/x-fits"))
self.assertTrue(dalq.is_mime_type("application/fits"))
self.assertTrue(dalq.is_mime_type("application/votable+xml"))
self.assertTrue(dalq.is_mime_type("application/fits;convention=STScI-STIS"))
def testBad(self):
self.assertFalse(dalq.is_mime_type("image"))
self.assertFalse(dalq.is_mime_type("image/votable/xml"))
class DALServiceTest(unittest.TestCase):
def setUp(self):
self.baseurl = "http://localhost/sia"
def testCtor(self):
self.res = {"title": "Archive", "shortName": "arch"}
self.srv = dalq.DALService(self.baseurl, "sga", "2.0", self.res)
def testCtorSimpleResource(self):
import pyvo.registry.vao as reg
regresultfile = \
get_pkg_data_filename("../../registry/tests/data/reg-short.xml")
res = dalq.DALResults(votableparse(regresultfile))
# import pytest; pytest.set_trace()
srv = dalq.DALService(self.baseurl, "sga", "3.0",
reg.SimpleResource(res, 0))
self.assertTrue(len(srv.info.keys()) > 0)
self.assertTrue(srv.info.get("title") is not None)
def testProps(self):
self.testCtor()
self.assertEquals(self.srv.baseurl, self.baseurl)
self.assertEquals(self.srv.protocol, "sga")
self.assertEquals(self.srv.version, "2.0")
try:
self.srv.baseurl = "goober"
self.fail("baseurl not read-only")
except AttributeError:
pass
try:
self.srv.protocol = "sia"
self.fail("protocol not read-only")
except AttributeError:
pass
try:
self.srv.version = "1.0"
self.fail("version not read-only")
except AttributeError:
pass
self.assertEquals(self.srv.info["title"], "Archive")
self.assertEquals(self.srv.info["shortName"], "arch")
self.srv.info["title"] = "Sir"
self.assertEquals(self.res["title"], "Archive")
def testNoResmeta(self):
srv = dalq.DALService(self.baseurl)
self.assertEquals(srv.baseurl, self.baseurl)
self.assert_(srv.info is not None)
self.assert_(hasattr(srv.info, "get"))
self.assertEquals(len(srv.info.keys()), 0)
def testCreateQuery(self):
self.testCtor()
q = self.srv.create_query()
self.assert_(isinstance(q, dalq.DALQuery))
self.assertEquals(q.baseurl, self.baseurl)
self.assertEquals(q.protocol, self.srv.protocol)
self.assertEquals(q.version, self.srv.version)
def testCreateQueryWithKws(self):
self.testCtor()
q = self.srv.create_query(RA=12.045, DEC=-13.08, SR=0.01)
self.assert_(isinstance(q, dalq.DALQuery))
self.assertEquals(q.baseurl, self.baseurl)
self.assertEquals(q.protocol, self.srv.protocol)
self.assertEquals(q.version, self.srv.version)
self.assertAlmostEquals(q.getparam('RA'), 12.045)
self.assertAlmostEquals(q.getparam('DEC'), -13.08)
self.assertAlmostEquals(q.getparam('SR'), 0.01)
class DALQueryTest(unittest.TestCase):
def setUp(self):
self.baseurl = "http://localhost/sia"
def testCtor(self):
self.query = dalq.DALQuery(self.baseurl, "sga", "2.0")
self.assert_(self.query.getparam("format") is None)
def testProps(self):
self.testCtor()
self.assertEquals(self.query.baseurl, self.baseurl)
self.assertEquals(self.query.protocol, "sga")
self.assertEquals(self.query.version, "2.0")
self.query.baseurl = "http://gomer.net/infinite/loop?"
self.assertEquals(self.query.baseurl,
"http://gomer.net/infinite/loop?");
def testParam(self):
self.testCtor()
self.assertEquals(len(self.query.paramnames()), 0,
"param set should be empty: " +
str(self.query.paramnames()))
self.assert_(self.query.getparam("RA") is None)
self.query.setparam("RA", 51.235)
self.assertEquals(len(self.query.paramnames()), 1)
self.assertEquals(self.query.getparam("RA"), 51.235)
self.query.setparam("RA", 127.235)
self.assertEquals(len(self.query.paramnames()), 1)
self.assertEquals(self.query.getparam("RA"), 127.235)
self.query.setparam("DEC", -13.49677)
self.assertEquals(len(self.query.paramnames()), 2)
self.assertEquals(self.query.getparam("DEC"), -13.49677)
self.query.unsetparam("FORMAT")
self.assertEquals(len(self.query.paramnames()), 2)
self.query.unsetparam("RA")
self.assertEquals(len(self.query.paramnames()), 1)
self.assertEquals(self.query.getparam("DEC"), -13.49677)
self.assert_(self.query.getparam("RA") is None)
def testQueryURL(self):
self.testCtor()
self.query.setparam("RA", 51.235)
qurl = self.query.getqueryurl()
self.assertEquals(qurl, self.baseurl+'?RA=51.235')
self.query.setparam("DEC", -13.49677)
qurl = self.query.getqueryurl()
self.assert_(qurl == self.baseurl+'?RA=51.235&DEC=-13.49677' or
qurl == self.baseurl+'?DEC=-13.49677&RA=51.235')
self.query.setparam("SR", "1.0")
qurl = self.query.getqueryurl()
self.assert_(qurl == self.baseurl+'?RA=51.235&SR=1.0&DEC=-13.49677' or
qurl == self.baseurl+'?DEC=-13.49677&SR=1.0&RA=51.235' or
qurl == self.baseurl+'?RA=51.235&DEC=-13.49677&SR=1.0' or
qurl == self.baseurl+'?DEC=-13.49677&RA=51.235&SR=1.0' or
qurl == self.baseurl+'?SR=1.0&DEC=-13.49677&RA=51.235' or
qurl == self.baseurl+'?SR=1.0&RA=51.235&DEC=-13.49677')
def testEncode(self):
self.testCtor()
self.query.setparam("NaMe", "a val")
qurl = self.query.getqueryurl()
self.assertEquals(qurl, self.baseurl+'?NaMe=a+val')
self.testCtor()
self.query.setparam("NaMe", "a+val")
qurl = self.query.getqueryurl()
self.assertEquals(qurl, self.baseurl+'?NaMe=a%2Bval')
def testEncodeList(self):
self.testCtor()
self.query.setparam("POS", (5.231, -13.441))
qurl = self.query.getqueryurl()
self.assertEquals(qurl, self.baseurl+'?POS=5.231,-13.441')
class QueryExecuteTest(unittest.TestCase):
srvr = None
@classmethod
def setup_class(cls):
cls.srvr = testserve.get_server(testserverport)
cls.srvr.start()
time.sleep(0.5)
@classmethod
def teardown_class(cls):
if cls.srvr.isAlive():
cls.srvr.shutdown()
if cls.srvr.isAlive():
print("prob")
def testExecute(self):
q = dalq.DALQuery("http://localhost:{0}/sia".format(self.srvr.port))
q.setparam("foo", "bar")
# pdb.set_trace()
results = q.execute()
self.assert_(isinstance(results, dalq.DALResults))
self.assertEquals(results.nrecs, 2)
def testExecuteStream(self):
q = dalq.DALQuery("http://localhost:{0}/sia".format(self.srvr.port))
q.setparam("foo", "bar")
# pdb.set_trace()
strm = q.execute_stream()
self.assert_(strm is not None)
self.assert_(hasattr(strm, "read"))
results = strm.read()
strm.close()
self.assert_(results.startswith(b"<?xml version="))
def testExecuteRaw(self):
q = dalq.DALQuery("http://localhost:{0}/sia".format(self.srvr.port))
q.setparam("foo", "bar")
# pdb.set_trace()
data = q.execute_raw()
self.assert_(data is not None)
if sys.version_info[0] >= 3:
self.assert_(isinstance(data, str) or isinstance(data, bytes))
else:
self.assert_(isinstance(data, unicode) or isinstance(data, str))
self.assert_(data.startswith(b"<?xml version="))
def testExecuteVotable(self):
q = dalq.DALQuery("http://localhost:{0}/sia".format(self.srvr.port))
q.setparam("foo", "bar")
# pdb.set_trace()
results = q.execute_votable()
self.assert_(isinstance(results, VOTableFile))
def testExecuteServiceErr(self):
q = dalq.DALQuery("http://localhost:{0}/goob".format(self.srvr.port))
q.setparam("foo", "bar")
# pdb.set_trace()
self.assertRaises(dalq.DALServiceError, q.execute)
def testExecuteRawServiceErr(self):
q = dalq.DALQuery("http://localhost:{0}/goob".format(self.srvr.port))
q.setparam("foo", "bar")
# pdb.set_trace()
self.assertRaises(dalq.DALServiceError, q.execute_raw)
def testExecuteStreamServiceErr(self):
q = dalq.DALQuery("http://localhost:{0}/goob".format(self.srvr.port))
q.setparam("foo", "bar")
# pdb.set_trace()
try:
q.execute_stream()
self.fail("failed to raise exception on bad url")
except dalq.DALServiceError as e:
self.assertEquals(e.code, 404)
self.assertEquals(e.reason, "Not Found")
self.assert_(isinstance(e.cause, HTTPError))
except Exception as e:
self.fail("wrong exception raised: " + str(type(e)))
def testExecuteVotableServiceErr(self):
q = dalq.DALQuery("http://localhost:{0}/goob".format(self.srvr.port))
q.setparam("foo", "bar")
# pdb.set_trace()
self.assertRaises(dalq.DALServiceError, q.execute_votable)
def testExecuteRawQueryErr(self):
q = dalq.DALQuery("http://localhost:{0}/err".format(self.srvr.port))
q.setparam("foo", "bar")
# pdb.set_trace()
data = q.execute_raw()
self.assert_(data is not None)
if sys.version_info[0] >= 3:
self.assert_(isinstance(data, str) or isinstance(data, bytes))
else:
self.assert_(isinstance(data, unicode) or isinstance(data, str))
self.assert_(data.startswith(b"<?xml version="))
self.assert_(b'<INFO name="QUERY_STATUS" value="ERR' in data)
def testExecuteQueryErr(self):
q = dalq.DALQuery("http://localhost:{0}/err".format(self.srvr.port))
q.setparam("foo", "bar")
# pdb.set_trace()
try:
q.execute()
self.fail("failed to raise exception | |
this job
`('kill', jobno)` : immediately kill this job
`('available_cores')` : put available core count in outqueue
job_info must be in the form:
`(int(jobno), str(command), int(threads), list(dependencies))`
outqueue : multiprocessing.Queue
job information available_cores if argument was available_cores
max_jobs : int
The maximum number of concurrently running jobs, will be adjusted to
be 4 <= max_jobs <= cpu_count. The minimum of 4 jobs is a hard limit
and is enforced, so a machine with only 2 cores will still end up with
4 jobs running. This is required to avoid hangs on some kinds of fyrd
jobs, where a split job is created from a child process.
Returns
-------
bool
If 'stop' is sent, will return `True` if there are no running or
pending jobs and `False` if there are still running or pending jobs.
Raises
------
QueueError
If invalid argument put into inqueue
"""
if not _WE_ARE_A_SERVER:
return
tries = 5
while tries:
qserver = get_server()
if qserver:
break
_sleep(1)
tries -= 1
continue
if not qserver:
qserver = get_server(raise_on_error=True)
max_jobs = int(max_jobs)
if max_jobs < mp.cpu_count():
max_jobs = mp.cpu_count()
if max_jobs < 4:
max_jobs = 4
available_cores = max_jobs
running = {} # {jobno: Process}
queued = _OD() # {jobno: {'command': command, 'depends': depends, ...}
done = {} # {jobno: Process}
jobs = [] # [jobno, ...]
put_core_info = False
while True:
# Get everything from the input queue first, queue everything
while True:
if inqueue.empty():
break
info = inqueue.get() # Will block if input queue empty
if info == 'stop' or info[0] == 'stop':
good = True
pids = []
if running:
good = False
for jobno, job in running.items():
qserver.update_job(jobno, state='killed')
pids.append(job.pid)
job.terminate()
if queued:
good = False
for jobno, job in queued.items():
qserver.update_job(jobno, state='killed')
for pid in pids:
if _pid_exists(pid):
_os.kill(pid, _signal.SIGKILL)
outqueue.put(good)
return good
if info == 'available_cores' or info[0] == 'available_cores':
put_core_info = True
continue
if info[0] == 'kill':
jobno = int(info[1])
if jobno in running:
running[jobno].terminate()
qserver.update_job(jobno, state='killed')
running.pop(jobno)
if jobno in queued:
queued.pop(jobno)
qserver.update_job(jobno, state='killed')
continue
if info[0] != 'queue':
raise QueueError('Invalid argument: {0}'.format(info[0]))
jobno, command, threads, depends, stdout, stderr, runpath = info[1]
if not command:
raise QueueError('Job command is {0}, cannot continue'
.format(type(command)))
jobno = int(jobno)
threads = int(threads)
# Run anyway
if threads >= max_jobs:
threads = max_jobs-1
# Add to queue
if jobno in jobs:
# This should never happen
raise QueueError('Job already submitted!')
jobs.append(jobno)
queued[jobno] = {'command': command, 'threads': threads,
'depends': depends, 'stdout': stdout,
'stderr': stderr, 'runpath': runpath}
qserver.update_job(jobno, state='pending')
# Update running and done queues
for jobno, process in running.items():
if process.is_alive():
continue
# Completed
process.join()
code = process.exitcode
state = 'completed' if code == 0 else 'failed'
qserver.update_job(jobno, state=state, exitcode=code)
done[jobno] = process
# Remove completed jobs from running
for jobno in done:
if jobno in running:
p = running.pop(jobno)
available_cores += p.cores
# Start jobs if can run
if available_cores > max_jobs:
available_cores = max_jobs
if available_cores < 0: # Shouldn't happen
available_cores = 0
if put_core_info:
outqueue.put(available_cores)
put_core_info = False
for jobno, info in queued.items():
if info['depends']:
not_done = []
for dep_id in info['depends']:
if dep_id not in done:
not_done.append(dep_id)
if not_done:
continue
if info['threads'] <= available_cores:
if info['runpath']:
curpath = _os.path.abspath('.')
_os.chdir(info['runpath'])
p = mp.Process(
target=_run.cmd,
args=(info['command'],),
kwargs={
'stdout': info['stdout'],
'stderr': info['stderr'],
}
)
p.daemon = True
p.start()
running[jobno] = p
available_cores -= info['threads']
p.cores = info['threads']
if info['runpath']:
_os.chdir(curpath)
qserver.update_job(jobno, state='running', pid=p.pid)
# Clear running jobs from queue
for jobno in running:
if jobno in queued:
queued.pop(jobno)
# Block for a moment to avoid running at 100% cpu
_sleep(SLEEP_LEN)
###############################################################################
# Daemon Creation and Management Functions #
###############################################################################
def get_uri():
"""Get the URI from the config or file.
Tests if URI is active before returning.
Returns
-------
uri : str or None
If file does not exist or URI is inactive, returns None and deletes
URI_FILE, else returns the URI as a string.
"""
curi = _conf.get_option('local', 'server_uri')
if curi:
t = _test_uri(curi)
if t == 'connected':
with open(URI_FILE, 'w') as fout:
fout.write(str(curi))
return curi
if t == 'invalid':
_conf.set_option('local', 'server_uri', None)
return None
if not _os.path.isfile(URI_FILE):
return None
with open(URI_FILE) as fin:
uri = fin.read().strip()
t = _test_uri(uri)
if t == 'connected':
return uri
_os.remove(URI_FILE)
return None
def _test_uri(uri):
"""Test if a URI refers to an accessible Pyro4 object."""
try:
p = Pyro4.Proxy(uri)
except Pyro4.errors.PyroError:
_logme.log('URI {0} in an invalid URI'.format(uri), 'error')
return 'invalid'
try:
if p._pyroBind():
out = 'connected'
elif p.available_cores:
out = 'connected'
else:
out = 'disconnect'
if out == 'connected':
p._pyroRelease()
return out
except Pyro4.errors.CommunicationError:
_logme.log('URI {0} is not connected'.format(uri), 'warn')
return 'disconnect'
def daemonizer():
"""Create the server daemon."""
# Get pre-configured URI if available
curi = _conf.get_option('local', 'server_uri')
utest = _test_uri(curi) if curi else None
# Test if there is already another daemon running
crun = True if utest == 'connected' else False
if crun or server_running():
raise QueueError('Daemon already running, cannot start')
# Set port and host if present in URI
if utest == 'disconnect':
uri = Pyro4.URI(curi)
args = {'host': uri.host, 'port': uri.port}
objId = uri.object
else:
args = {}
objId = "QueueManager"
# Create the daemon
with Pyro4.Daemon(**args) as daemon:
queue_manager = QueueManager(daemon)
uri = daemon.register(queue_manager, objectId=objId)
# daemon.housekeeping = queue_manager._housekeeping
with open(PID_FILE, 'w') as fout:
fout.write(str(_os.getpid()))
with open(URI_FILE, 'w') as fout:
fout.write(str(uri))
print("Ready. Object uri =", uri)
daemon.requestLoop()
def shutdown_queue():
"""Kill the server and queue gracefully."""
good = True
server = get_server(start=False)
if server:
try:
res = server.shutdown_jobs()
except OSError:
res = None
except Pyro4.errors.CommunicationError:
res = None
_logme.log('Local queue runner terminated.', 'debug')
if res is None:
_logme.log('Could not determine process completion state',
'warn')
good = False
elif res:
_logme.log('All jobs completed', 'debug')
else:
_logme.log('Some jobs failed!', 'error', also_write='stderr')
good = False
else:
_logme.log('Server appears already stopped', 'info')
kill_queue()
_logme.log('Local queue terminated', 'info')
return 0 if good else 1
def kill_queue():
"""Kill the server and queue without trying to clean jobs."""
if _os.path.isfile(PID_FILE):
with open(PID_FILE) as fin:
pid = int(fin.read().strip())
_os.remove(PID_FILE)
_kill_proc_tree(pid, including_parent=True)
if _pid_exists(pid):
_os.kill(pid, _signal.SIGKILL)
if _os.path.isfile(URI_FILE):
_os.remove(URI_FILE)
def daemon_manager(mode):
"""Manage the daemon process
Parameters
----------
mode : {'start', 'stop', 'restart', 'status'}
Returns
-------
status : int
0 on success, 1 on failure
"""
global _WE_ARE_A_SERVER
_WE_ARE_A_SERVER = True
check_conf()
if mode == 'start':
return _start()
elif mode == 'stop':
return _stop()
elif mode == 'restart':
_stop()
return _start()
elif mode == 'status':
running = server_running()
if running:
_logme.log('Local queue server is running', 'info',
also_write='stderr')
else:
_logme.log('Local queue server is not running', 'info',
also_write='stderr')
return 0 if running else 1
_logme.log('Invalid mode {0}'.format(mode), 'error')
return 1
def _start():
"""Start the daemon process as a fork."""
if _os.path.isfile(PID_FILE):
with open(PID_FILE) as fin:
pid = fin.read().strip()
if _pid_exists(pid):
_logme.log('Local queue already running with pid {0}'
.format(pid), 'info')
return 1
_os.remove(PID_FILE)
pid = _os.fork()
if pid == 0: # The first child.
daemonizer()
else:
_logme.log('Local queue starting', 'info')
_sleep(1)
if server_running():
return 0
_logme.log('Server failed to start', 'critical')
return 1
def _stop():
"""Stop the daemon process."""
if not _os.path.isfile(PID_FILE):
_logme.log('Queue does not appear to be running, cannot stop',
'info')
return 1
return shutdown_queue()
def _kill_proc_tree(pid, including_parent=True):
"""Kill an entire process tree."""
parent = _psutil.Process(int(pid))
if hasattr(parent, 'get_children'):
parent.children = parent.get_children
for child in parent.children(recursive=True):
child.kill()
if including_parent:
parent.kill()
###############################################################################
# Fyrd Functions #
###############################################################################
###############################################################################
# Functionality Test #
###############################################################################
def queue_test(warn=True):
"""Check that this batch system can be used.
Parameters
----------
warn : bool
log a warning on fail
Returns
-------
batch_system_functional : bool
"""
# Check for a remote server_uri is running
_logme.log('Checking for a remote queue server_uri (Pyro4)', 'debug')
uri = get_uri()
if uri is not None:
_logme.log('Remote queue server is running at {}'.format(uri), 'debug')
return True
log_level = 'error' if warn else 'debug'
try:
if not server_running():
start_server()
return server_running()
except:
_logme.log('Cannot get local queue sever address', log_level)
return False
###############################################################################
# Normalization Functions #
###############################################################################
def normalize_job_id(job_id):
"""Convert the job id into job_id, array_id."""
return str(int(job_id)), None
def normalize_state(state):
"""Convert state into standardized | |
# Copyright 2017 Amazon.com, Inc. or its affiliates. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License"). You
# may not use this file except in compliance with the License. A copy of
# the License is located at
#
# http://aws.amazon.com/apache2.0/
#
# or in the "license" file accompanying this file. This file is
# distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF
# ANY KIND, either express or implied. See the License for the specific
# language governing permissions and limitations under the License.
"""Test suite for aws_encryption_sdk.key_providers.base.MasterKeyProvider"""
import attr
import pytest
from mock import MagicMock, PropertyMock, call, patch, sentinel
from aws_encryption_sdk.exceptions import (
DecryptKeyError,
IncorrectMasterKeyError,
InvalidKeyIdError,
MasterKeyProviderError,
)
from aws_encryption_sdk.key_providers.base import MasterKey, MasterKeyProvider, MasterKeyProviderConfig
from .test_values import VALUES
pytestmark = [pytest.mark.unit, pytest.mark.local]
@attr.s(hash=True)
class MockMasterKeyProviderConfig(MasterKeyProviderConfig):
provider_id = attr.ib(hash=True)
mock_new_master_key = attr.ib(hash=True, default=None)
class MockMasterKeyProvider(MasterKeyProvider):
provider_id = None
_config_class = MockMasterKeyProviderConfig
def __init__(self, **kwargs):
self.provider_id = self.config.provider_id
def _new_master_key(self, key_id):
return self.config.mock_new_master_key
class MockMasterKeyProviderNoVendOnDecrypt(MockMasterKeyProvider):
vend_masterkey_on_decrypt = False
def _new_master_key(self, key_id):
pass
def test_repr():
test = MockMasterKeyProvider(provider_id="ex_provider_id", mock_new_master_key="ex_new_master_key")
assert repr(test) == (
"MockMasterKeyProvider(" "mock_new_master_key=ex_new_master_key, " "provider_id=ex_provider_id" ")"
)
class TestBaseMasterKeyProvider(object):
def test_provider_id_enforcement(self):
class TestProvider(MasterKeyProvider):
def _new_master_key(self, key_id):
pass
with pytest.raises(TypeError) as excinfo:
TestProvider()
excinfo.match("Can't instantiate abstract class TestProvider *")
def test_new_master_key_enforcement(self):
class TestProvider(MasterKeyProvider):
provider_id = None
with pytest.raises(TypeError) as excinfo:
TestProvider()
excinfo.match("Can't instantiate abstract class TestProvider *")
def test_master_keys_for_encryption(self):
mock_master_key_a = MagicMock()
mock_master_key_a.master_keys_for_encryption.return_value = (
sentinel.master_key_a,
(sentinel.master_key_i, sentinel.master_key_a),
)
mock_master_key_b = MagicMock()
mock_master_key_b.master_keys_for_encryption.return_value = (sentinel.master_key_b, (sentinel.master_key_b,))
mock_key_provider_c = MagicMock()
mock_key_provider_c.master_keys_for_encryption.return_value = (
sentinel.master_key_c,
[sentinel.master_key_c, sentinel.master_key_d, sentinel.master_key_e],
)
mock_master_key_provider = MockMasterKeyProvider(
provider_id=sentinel.provider_id, mock_new_master_key=sentinel.new_master_key
)
mock_master_key_provider._members = [mock_master_key_a, mock_master_key_b, mock_key_provider_c]
test_primary, test = mock_master_key_provider.master_keys_for_encryption(
encryption_context=sentinel.encryption_context,
plaintext_rostream=sentinel.plaintext_rostream,
plaintext_length=sentinel.plaintext_length,
)
mock_master_key_a.master_keys_for_encryption.assert_called_once_with(
sentinel.encryption_context, sentinel.plaintext_rostream, sentinel.plaintext_length
)
mock_master_key_b.master_keys_for_encryption.assert_called_once_with(
sentinel.encryption_context, sentinel.plaintext_rostream, sentinel.plaintext_length
)
mock_key_provider_c.master_keys_for_encryption.assert_called_once_with(
sentinel.encryption_context, sentinel.plaintext_rostream, sentinel.plaintext_length
)
assert test_primary is sentinel.master_key_a
assert test == [
sentinel.master_key_i,
sentinel.master_key_a,
sentinel.master_key_b,
sentinel.master_key_c,
sentinel.master_key_d,
sentinel.master_key_e,
]
def test_master_keys_for_encryption_no_master_keys(self):
mock_master_key_provider = MockMasterKeyProvider(
provider_id=sentinel.provider_id, mock_new_master_key=sentinel.new_master_key
)
with pytest.raises(MasterKeyProviderError) as excinfo:
mock_master_key_provider.master_keys_for_encryption(
encryption_context=sentinel.encryption_context,
plaintext_rostream=sentinel.plaintext_rostream,
plaintext_length=sentinel.plaintext_length,
)
excinfo.match("No Master Keys available from Master Key Provider")
def test_add_master_keys_from_list(self):
mock_master_key_provider = MockMasterKeyProvider(
provider_id=sentinel.provider_id, mock_new_master_key=sentinel.new_master_key
)
mock_master_key_provider.add_master_key = MagicMock()
mock_master_key_provider.add_master_keys_from_list([sentinel.key_a, sentinel.key_b, sentinel.key_c])
mock_master_key_provider.add_master_key.assert_has_calls(
(call(sentinel.key_a), call(sentinel.key_b), call(sentinel.key_c))
)
def test_add_master_key_new(self):
mock_master_key_provider = MockMasterKeyProvider(
provider_id=sentinel.provider_id, mock_new_master_key=sentinel.new_master_key
)
mock_master_key_provider._new_master_key = MagicMock()
mock_master_key_provider._new_master_key.return_value = sentinel.new_master_key
mock_master_key_provider.add_master_key(VALUES["key_info"])
mock_master_key_provider._new_master_key.assert_called_once_with(VALUES["key_info"])
assert sentinel.new_master_key in mock_master_key_provider._members
assert mock_master_key_provider._encrypt_key_index[VALUES["key_info"]] is sentinel.new_master_key
def test_add_master_key_exists(self):
mock_master_key_provider = MockMasterKeyProvider(
provider_id=sentinel.provider_id, mock_new_master_key=sentinel.new_master_key
)
mock_master_key_provider._new_master_key = MagicMock()
mock_master_key_provider._encrypt_key_index = {VALUES["key_info"]: sentinel.existing_master_key}
mock_master_key_provider.add_master_key(VALUES["key_info"])
assert not mock_master_key_provider._new_master_key.called
def test_add_master_key_to_bytes_exists(self):
mock_master_key_provider = MockMasterKeyProvider(
provider_id=sentinel.provider_id, mock_new_master_key=sentinel.new_master_key
)
mock_master_key_provider._new_master_key = MagicMock()
mock_master_key_provider._encrypt_key_index = {b"ex_key_info": sentinel.existing_master_key}
mock_master_key_provider.add_master_key("ex_key_info")
assert not mock_master_key_provider._new_master_key.called
def test_add_master_key_providers_from_list(self):
mock_master_key_provider = MockMasterKeyProvider(
provider_id=sentinel.provider_id, mock_new_master_key=sentinel.new_master_key
)
mock_master_key_provider.add_master_key_provider = MagicMock()
mock_master_key_provider.add_master_key_providers_from_list(
[sentinel.key_provider_a, sentinel.key_provider_b, sentinel.key_provider_c]
)
mock_master_key_provider.add_master_key_provider.assert_has_calls(
(call(sentinel.key_provider_a), call(sentinel.key_provider_b), call(sentinel.key_provider_c))
)
def test_master_key_provider(self):
mock_master_key_provider = MockMasterKeyProvider(
provider_id=sentinel.provider_id, mock_new_master_key=sentinel.new_master_key
)
mock_master_key_provider.add_master_key_provider(sentinel.new_key_provider)
assert sentinel.new_key_provider in mock_master_key_provider._members
def test_master_key_to_bytes(self):
mock_master_key_provider = MockMasterKeyProvider(
provider_id=sentinel.provider_id, mock_new_master_key=sentinel.new_master_key
)
mock_master_key_provider.add_master_key = MagicMock()
mock_master_key_provider._encrypt_key_index[b"ex_key_info"] = sentinel.new_master_key
mock_master_key_provider.master_key_for_encrypt("ex_key_info")
mock_master_key_provider.add_master_key.assert_called_once_with(b"ex_key_info")
def test_master_key_for_encrypt(self):
mock_master_key_provider = MockMasterKeyProvider(
provider_id=sentinel.provider_id, mock_new_master_key=sentinel.new_master_key
)
mock_master_key_provider.add_master_key = MagicMock()
mock_master_key_provider._encrypt_key_index[VALUES["key_info"]] = sentinel.new_master_key
test = mock_master_key_provider.master_key_for_encrypt(VALUES["key_info"])
mock_master_key_provider.add_master_key.assert_called_once_with(VALUES["key_info"])
assert test is sentinel.new_master_key
def test_master_key_for_decrypt_in_encrypt_key_index(self):
mock_master_key_provider = MockMasterKeyProvider(
provider_id=sentinel.provider_id, mock_new_master_key=sentinel.new_master_key
)
mock_master_key_provider._new_master_key = MagicMock()
mock_master_key_provider._encrypt_key_index[sentinel.key_info] = sentinel.known_encrypt_master_key
mock_master_key_provider._decrypt_key_index[sentinel.key_info] = sentinel.known_decrypt_master_key
test = mock_master_key_provider.master_key_for_decrypt(sentinel.key_info)
assert test is sentinel.known_encrypt_master_key
assert not mock_master_key_provider._new_master_key.called
def test_master_key_for_decrypt_in_decrypt_key_index(self):
mock_master_key_provider = MockMasterKeyProvider(
provider_id=sentinel.provider_id, mock_new_master_key=sentinel.new_master_key
)
mock_master_key_provider._new_master_key = MagicMock()
mock_master_key_provider._encrypt_key_index = {}
mock_master_key_provider._decrypt_key_index[sentinel.key_info] = sentinel.known_decrypt_master_key
test = mock_master_key_provider.master_key_for_decrypt(sentinel.key_info)
assert test is sentinel.known_decrypt_master_key
assert not mock_master_key_provider._new_master_key.called
def test_master_key_for_decrypt(self):
mock_master_key_provider = MockMasterKeyProvider(
provider_id=sentinel.provider_id, mock_new_master_key=sentinel.new_master_key
)
mock_master_key_provider._new_master_key = MagicMock(return_value=sentinel.new_master_key)
# //= compliance/framework/aws-kms/aws-kms-mrk-aware-master-key-provider.txt#2.9
# //= type=test
# //# For each encrypted data key in the filtered set, one at a time, the
# //# master key provider MUST call Get Master Key (aws-kms-mrk-aware-
# //# master-key-provider.md#get-master-key) with the encrypted data key's
# //# provider info as the AWS KMS key ARN.
test = mock_master_key_provider.master_key_for_decrypt(sentinel.key_info)
mock_master_key_provider._new_master_key.assert_called_once_with(sentinel.key_info)
assert mock_master_key_provider._decrypt_key_index[sentinel.key_info] is sentinel.new_master_key
assert test is sentinel.new_master_key
def test_decrypt_data_key_successful(self):
mock_member = MagicMock()
mock_member.provider_id = sentinel.provider_id
mock_master_key = MagicMock()
mock_master_key.decrypt_data_key.return_value = sentinel.data_key
mock_member.master_key_for_decrypt.return_value = mock_master_key
mock_encrypted_data_key = MagicMock()
mock_encrypted_data_key.key_provider.provider_id = sentinel.provider_id
mock_encrypted_data_key.key_provider.key_info = sentinel.key_info
mock_master_key_provider = MockMasterKeyProvider(
provider_id=sentinel.provider_id_2, mock_new_master_key=sentinel.new_master_key
)
mock_master_key_provider._members = [mock_member]
test = mock_master_key_provider.decrypt_data_key(
encrypted_data_key=mock_encrypted_data_key,
algorithm=sentinel.algorithm,
encryption_context=sentinel.encryption_context,
)
mock_member.master_key_for_decrypt.assert_called_once_with(sentinel.key_info)
# //= compliance/framework/aws-kms/aws-kms-mrk-aware-master-key-provider.txt#2.9
# //= type=test
# //# It MUST call Decrypt Data Key
# //# (aws-kms-mrk-aware-master-key.md#decrypt-data-key) on this master key
# //# with the input algorithm, this single encrypted data key, and the
# //# input encryption context.
mock_master_key.decrypt_data_key.assert_called_once_with(
mock_encrypted_data_key, sentinel.algorithm, sentinel.encryption_context
)
assert test is sentinel.data_key
def test_decrypt_data_key_successful_no_key_ids(self):
"""Test that a Master Key Provider configured with vend_masterkey_on_decrypt = True
without any key ids can successfully decrypt an EDK.
"""
mock_master_key = MagicMock()
mock_master_key.decrypt_data_key.return_value = sentinel.data_key
mock_encrypted_data_key = MagicMock()
mock_encrypted_data_key.key_provider.provider_id = sentinel.provider_id
mock_encrypted_data_key.key_provider.key_info = sentinel.key_info
mock_master_key_provider = MockMasterKeyProvider(
provider_id=sentinel.provider_id, mock_new_master_key=mock_master_key
)
mock_master_key_provider.vend_masterkey_on_decrypt = True
mock_master_key_provider._members = []
test = mock_master_key_provider.decrypt_data_key(
encrypted_data_key=mock_encrypted_data_key,
algorithm=sentinel.algorithm,
encryption_context=sentinel.encryption_context,
)
mock_master_key.decrypt_data_key.assert_called_once_with(
mock_encrypted_data_key, sentinel.algorithm, sentinel.encryption_context
)
assert test is sentinel.data_key
def test_decrypt_data_key_successful_second_try_provider_id(self):
mock_first_member = MagicMock()
mock_first_member.provider_id = sentinel.another_provider_id
mock_member = MagicMock()
mock_member.provider_id = sentinel.provider_id
mock_master_key = MagicMock()
mock_master_key.decrypt_data_key.return_value = sentinel.data_key
mock_member.master_key_for_decrypt.return_value = mock_master_key
mock_encrypted_data_key = MagicMock()
mock_encrypted_data_key.key_provider.provider_id = sentinel.provider_id
mock_encrypted_data_key.key_provider.key_info = sentinel.key_info
mock_master_key_provider = MockMasterKeyProvider(
provider_id=sentinel.provider_id_2, mock_new_master_key=sentinel.new_master_key
)
mock_master_key_provider._members = [mock_first_member, mock_member]
test = mock_master_key_provider.decrypt_data_key(
encrypted_data_key=mock_encrypted_data_key,
algorithm=sentinel.algorithm,
encryption_context=sentinel.encryption_context,
)
assert not mock_first_member.master_key_for_decrypt.called
assert test is sentinel.data_key
def test_decrypt_data_key_successful_multiple_members(self):
"""Test that a Master Key Provider with multiple members which are able
to decrypt a given EDK will successfully use the first key to decrypt
and will not try the others.
"""
mock_member1 = MagicMock()
mock_member1.provider_id = sentinel.provider_id
mock_member1.key_id = sentinel.key_info1
mock_member2 = MagicMock()
mock_member2.provider_id = sentinel.provider_id
mock_member2.key_id = sentinel.key_info2
mock_master_key = MagicMock()
mock_master_key.decrypt_data_key.return_value = sentinel.data_key
mock_member1.master_key_for_decrypt.return_value = mock_master_key
mock_encrypted_data_key = MagicMock()
mock_encrypted_data_key.key_provider.provider_id = sentinel.provider_id
mock_encrypted_data_key.key_provider.key_info = sentinel.key_info
mock_master_key_provider = MockMasterKeyProvider(
provider_id=sentinel.provider_id_2, mock_new_master_key=sentinel.new_master_key
)
mock_master_key_provider._members = [mock_member1, mock_member2]
test = mock_master_key_provider.decrypt_data_key(
encrypted_data_key=mock_encrypted_data_key,
algorithm=sentinel.algorithm,
encryption_context=sentinel.encryption_context,
)
assert mock_member1.master_key_for_decrypt.called
assert not mock_member2.master_key_for_decrypt.called
assert test is sentinel.data_key
def test_decrypt_data_key_successful_one_matching_member_no_vend(self):
"""Test that a Master Key Provider configured to not vend keys
can successfully decrypt an EDK when it was configured with a
key that is able to decrypt the EDK.
"""
mock_member = MagicMock()
mock_member.__class__ = MasterKey
mock_member.provider_id = sentinel.provider_id
mock_encrypted_data_key = MagicMock()
mock_encrypted_data_key.key_provider.provider_id = sentinel.provider_id
mock_encrypted_data_key.key_provider.key_info = sentinel.key_info
mock_master_key_provider = MockMasterKeyProviderNoVendOnDecrypt(provider_id=sentinel.provider_id)
mock_master_key_provider._members = [mock_member]
mock_master_key_provider.master_key_for_decrypt = MagicMock()
mock_master_key_provider.decrypt_data_key(
encrypted_data_key=mock_encrypted_data_key,
algorithm=sentinel.algorithm,
encryption_context=sentinel.encryption_context,
)
mock_member.decrypt_data_key.assert_called_once_with(
mock_encrypted_data_key, sentinel.algorithm, sentinel.encryption_context
)
def test_decrypt_data_key_unsuccessful_no_matching_members(self):
"""Test that a Master Key Provider returns the correct error when none
of its members are able to successfully decrypt an EDK
"""
mock_member = MagicMock()
mock_member.provider_id = sentinel.another_provider_id
mock_encrypted_data_key = MagicMock()
mock_encrypted_data_key.key_provider.provider_id = sentinel.provider_id
mock_encrypted_data_key.key_provider.key_info = sentinel.key_info
mock_master_key = MagicMock()
mock_master_key.decrypt_data_key.side_effect = DecryptKeyError()
mock_master_key_provider = MockMasterKeyProvider(
provider_id=sentinel.provider_id, mock_new_master_key=mock_master_key
)
mock_master_key_provider._members = [mock_member]
with pytest.raises(DecryptKeyError) as excinfo:
mock_master_key_provider.decrypt_data_key(
encrypted_data_key=mock_encrypted_data_key,
algorithm=sentinel.algorithm,
encryption_context=sentinel.encryption_context,
)
excinfo.match("Unable to decrypt data key")
def test_decrypt_data_key_unsuccessful_matching_provider_invalid_key_id(self):
mock_encrypted_data_key = MagicMock()
mock_encrypted_data_key.key_provider.provider_id = sentinel.provider_id
mock_encrypted_data_key.key_provider.key_info = sentinel.key_info
mock_master_key_provider = MockMasterKeyProvider(
provider_id=sentinel.provider_id, mock_new_master_key=sentinel.master_key
)
with patch.object(
mock_master_key_provider, "master_key_for_decrypt", new_callable=PropertyMock, side_effect=InvalidKeyIdError
) as mock_master_key:
with pytest.raises(DecryptKeyError) as excinfo:
mock_master_key_provider.decrypt_data_key(
encrypted_data_key=mock_encrypted_data_key,
algorithm=sentinel.algorithm,
encryption_context=sentinel.encryption_context,
)
excinfo.match("Unable to decrypt data key")
mock_master_key.assert_called_once_with(sentinel.key_info)
def test_decrypt_data_key_unsuccessful_no_matching_members_no_vend(self):
"""Test that a Master Key Provider cannot decrypt an EDK when it is
configured to not vend keys and no keys explicitly configured
match the EDK.
"""
mock_member = MagicMock()
mock_member.provider_id = sentinel.another_provider_id
mock_encrypted_data_key = MagicMock()
mock_encrypted_data_key.key_provider.provider_id = sentinel.provider_id
mock_encrypted_data_key.key_provider.key_info = sentinel.key_info
mock_master_key_provider = MockMasterKeyProviderNoVendOnDecrypt(provider_id=sentinel.provider_id)
mock_master_key_provider._members = [mock_member]
mock_master_key_provider.master_key_for_decrypt = MagicMock()
with pytest.raises(DecryptKeyError) as excinfo:
mock_master_key_provider.decrypt_data_key(
encrypted_data_key=mock_encrypted_data_key,
algorithm=sentinel.algorithm,
encryption_context=sentinel.encryption_context,
)
excinfo.match("Unable to decrypt data key")
assert not mock_master_key_provider.master_key_for_decrypt.called
def test_decrypt_data_key_unsuccessful_invalid_key_info(self):
mock_member = MagicMock()
mock_member.provider_id = sentinel.provider_id
mock_member.master_key_for_decrypt.side_effect = (InvalidKeyIdError,)
mock_encrypted_data_key = MagicMock()
mock_encrypted_data_key.key_provider.provider_id = sentinel.provider_id
mock_encrypted_data_key.key_provider.key_info = sentinel.key_info
mock_master_key_provider = MockMasterKeyProvider(
provider_id=sentinel.provider_id_2, mock_new_master_key=sentinel.new_master_key
)
mock_master_key_provider._members = [mock_member]
with pytest.raises(DecryptKeyError) as excinfo:
mock_master_key_provider.decrypt_data_key(
encrypted_data_key=mock_encrypted_data_key,
algorithm=sentinel.algorithm,
encryption_context=sentinel.encryption_context,
)
excinfo.match("Unable to decrypt data key")
def test_decrypt_data_key_unsuccessful_incorrect_master_key(self):
mock_member = MagicMock()
mock_member.provider_id = sentinel.provider_id
mock_master_key = MagicMock()
mock_master_key.decrypt_data_key.side_effect = (IncorrectMasterKeyError,)
mock_member.master_key_for_decrypt.return_value = mock_master_key
mock_encrypted_data_key = MagicMock()
mock_encrypted_data_key.key_provider.provider_id = sentinel.provider_id
mock_encrypted_data_key.key_provider.key_info = sentinel.key_info
mock_master_key_provider = MockMasterKeyProvider(
provider_id=sentinel.provider_id_2, mock_new_master_key=sentinel.new_master_key
)
mock_master_key_provider._members = [mock_member]
with pytest.raises(DecryptKeyError) as excinfo:
mock_master_key_provider.decrypt_data_key(
encrypted_data_key=mock_encrypted_data_key,
algorithm=sentinel.algorithm,
encryption_context=sentinel.encryption_context,
)
excinfo.match("Unable to decrypt data key")
def test_decrypt_data_key_unsuccessful_master_key_decrypt_error(self):
mock_member = MagicMock()
mock_member.provider_id = sentinel.provider_id
mock_master_key = MagicMock()
mock_master_key.decrypt_data_key.side_effect = (DecryptKeyError,)
mock_member.master_key_for_decrypt.return_value = mock_master_key
mock_encrypted_data_key = MagicMock()
mock_encrypted_data_key.key_provider.provider_id = sentinel.provider_id
mock_encrypted_data_key.key_provider.key_info = sentinel.key_info
mock_master_key = MagicMock()
mock_master_key.decrypt_data_key.side_effect = DecryptKeyError()
mock_master_key_provider = MockMasterKeyProvider(
provider_id=sentinel.provider_id, mock_new_master_key=mock_master_key
)
mock_master_key_provider._members = [mock_member]
with pytest.raises(DecryptKeyError) as | |
# -*- coding: utf-8 -*-
#
# <NAME>
# orthologue
# (c) 1998-2021 all rights reserved
#
# externals
import numbers
from . import gsl # the extension
# the class declaration
class Vector:
"""
A wrapper over a gsl vector
"""
# types
from .Permutation import Permutation as permutation
# constants
defaultFormat = "+16.7"
# class methods
# mpi support
@classmethod
def bcast(cls, vector=None, communicator=None, source=0):
"""
Broadcast the given {vector} from {source} to all tasks in {communicator}
"""
# normalize the communicator
if communicator is None:
# get the mpi package
import mpi
# use the world by default
communicator = mpi.world
# get the vector capsule
data = None if vector is None else vector.data
# scatter the data
capsule, shape = gsl.bcastVector(communicator.capsule, source, data)
# dress up my local portion as a vector
result = cls(shape=shape, data=capsule)
# and return it
return result
@classmethod
def collect(cls, vector, communicator=None, destination=0):
"""
Gather the data in {vector} from each task in {communicator} into one big vector
available at the {destination} task
"""
# normalize the communicator
if communicator is None:
# get the mpi package
import mpi
# use the world by default
communicator = mpi.world
# gather the data
result = gsl.gatherVector(communicator.capsule, destination, vector.data)
# if i am not the destination task, nothing further to do
if communicator.rank != destination: return
# otherwise, unpack the result
data, shape = result
# dress up the result as a vector
result = cls(shape=shape, data=data)
# and return it
return result
def excerpt(self, communicator=None, source=0, vector=None):
"""
Scatter {vector} held by the task {source} among all tasks in {communicator} and fill me
with the partition values. Only {source} has to provide a {vector}; the other tasks can
use the default value.
"""
# normalize the communicator
if communicator is None:
# get the mpi package
import mpi
# use the world by default
communicator = mpi.world
# get the vector capsule
data = None if vector is None else vector.data
# scatter the data
gsl.scatterVector(communicator.capsule, source, self.data, data)
# and return me
return self
# public data
@property
def elements(self):
"""
Iterate over all my elements
"""
# i'm already accessible as an iterator
yield from self
# all done
return
# initialization
def zero(self):
"""
Set all my elements to zero
"""
# zero me out
gsl.vector_zero(self.data)
# and return
return self
def fill(self, value):
"""
Set all my elements to {value}
"""
# grab my capsule
data = self.data
# first, attempt to
try:
# convert {value} into a float
value = float(value)
# if this fails
except TypeError:
# go through the input values
for idx, elem in zip(range(self.shape), value):
# set the value
gsl.vector_set(data, idx, float(elem))
# if the conversion to float were successful
else:
# fill me with {value}
gsl.vector_fill(data, value)
# all done
return self
def basis(self, index):
"""
Initialize me as a basis vector: all elements are set to zero except {index}, which is
set to one
"""
# initialize
gsl.vector_basis(self.data, index)
# and return
return self
def random(self, pdf):
"""
Fill me with random numbers using the probability distribution {pdf}
"""
# the {pdf} knows how to do this
return pdf.vector(vector=self)
def clone(self):
"""
Allocate a new vector and initialize it using my values
"""
# build the clone
clone = type(self)(shape=self.shape)
# have the extension initialize the clone
gsl.vector_copy(clone.data, self.data)
# and return it
return clone
def copy(self, other):
"""
Fill me with values from {other}, which is assumed to be of compatible shape
"""
# fill me with values from {other}
gsl.vector_copy(self.data, other.data)
# and return me
return self
def tuple(self):
"""
Build a representation of my contents as a tuple
"""
# ask the extension to build the rep
rep = gsl.vector_tuple(self.data)
# and return it
return rep
def view(self, start, shape):
"""
Build a view of my from {start} to {start+shape}
"""
# access the view object
from .VectorView import VectorView
# build and return one
return VectorView(vector=self, start=start, shape=shape)
def load(self, filename, binary=None):
"""
Read my values from {filename}
This method attempts to distinguish between text and binary representations of the
data, based on the parameter {mode}, or the {filename} extension if {mode} is absent
"""
# if the caller asked for binary mode
if binary is True:
# pick the binary representation
return self.read(filename)
# if the caller asked for ascii mode
if binary is False:
# pick ascii
return self.scanf(filename)
# otherwise, look at the file extension
suffix = filename.suffix
# if it's {bin}
if suffix == "bin":
# go binary
return self.read(filename)
# otherwise
return self.scanf(filename)
def save(self, filename, binary=None, format=defaultFormat):
"""
Write my values to {filename}
This method attempts to distinguish between text and binary representations of the
data, based on the parameter {mode}, or the {filename} extension if {mode} is absent
"""
# if the caller asked for binary mode
if binary is True:
# pick the binary representation
return self.write(filename)
# if the caller asked for ascii mode
if binary is False:
# pick ascii
return self.printf(filename=filename, format=format)
# otherwise, look at the file extension
suffix = filename.suffix
# if it's {bin}
if suffix == ".bin":
# go binary
return self.write(filename)
# otherwise
return self.printf(filename=filename, format=format)
def read(self, filename):
"""
Read my values from {filename}
"""
# read
gsl.vector_read(self.data, filename.path)
# and return
return self
def write(self, filename):
"""
Write my values to {filename}
"""
# write
gsl.vector_write(self.data, filename.path)
# and return
return self
def scanf(self, filename):
"""
Read my values from {filename}
"""
# read
gsl.vector_scanf(self.data, filename.path)
# and return
return self
def printf(self, filename, format=defaultFormat):
"""
Write my values to {filename}
"""
# write
gsl.vector_printf(self.data, filename.path, '%'+format+'e')
# and return
return self
def print(self, format='{:+13.4e}', indent='', interactive=True):
"""
Print my values using the given {format}
"""
# build the line
line = ' '.join(
[ '{}['.format(indent) ] +
[ format.format(value) for value in self ] +
[']']
)
# if we are in interactive mode
if interactive:
# print all this our
print(line)
# all done
return line
# maxima and minima
def max(self):
"""
Compute my maximum value
"""
# easy enough
return gsl.vector_max(self.data)
def min(self):
"""
Compute my maximum value
"""
# easy enough
return gsl.vector_min(self.data)
def minmax(self):
"""
Compute my minimum and maximum values
"""
# easy enough
return gsl.vector_minmax(self.data)
# statistics
def sort(self):
"""
In-place sort of the elements of a vector
"""
# sort
gsl.vector_sort(self.data)
# and return myself
return self
def sortIndirect(self):
"""
Construct the permutation that would sort me in ascending order
"""
# get the permutation capsule
pdata = gsl.vector_sortIndex(self.data)
# build a permutation object and return it
return self.permutation(shape=self.shape, data=pdata)
def mean(self, weights=None):
"""
Compute the mean value of my elements, weighted by the optional {weights}
"""
# easy enough
return gsl.vector_mean(self.data, weights.data if weights is not None else None)
def median(self):
"""
Compute the median value of my elements; only works on previously sorted vectors
"""
# easy enough
return gsl.vector_median(self.data)
def variance(self, mean=None):
"""
Compute the variance of my elements with respect to {mean}. If {mean} is {None}, it is
computed on the fly
"""
# easy enough
return gsl.vector_variance(self.data, float(mean) if mean is not None else None)
def sdev(self, mean=None):
"""
Compute the mean value of my elements with respect to {mean}. If {mean} is {None}, it
is computed on the fly
"""
# easy enough
return gsl.vector_sdev(self.data, float(mean) if mean is not None else None)
def ndarray(self, copy=False):
"""
Return a numpy array reference (w/ shared data) if {copy} is False, or a new copy if {copy} is {True}
"""
# call c-api extension to create a numpy array reference
array = gsl.vector_ndarray(self.data)
# whether the data copy is required
if copy:
array = array.copy()
return array
# meta methods
def __init__(self, shape, data=None, **kwds):
# | |
# Copyright 2020 Google LLC
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Helps nanoemoji build svg fonts."""
import dataclasses
from io import BytesIO
from fontTools import ttLib
from lxml import etree # pytype: disable=import-error
from nanoemoji.color_glyph import ColorGlyph, PaintedLayer
from nanoemoji.disjoint_set import DisjointSet
from nanoemoji.paint import (
Extend,
Paint,
PaintSolid,
PaintLinearGradient,
PaintRadialGradient,
PaintGlyph,
PaintColrGlyph,
PaintTransform,
PaintComposite,
PaintColrLayers,
)
from picosvg.geometric_types import Rect
from picosvg.svg import to_element, SVG
from picosvg import svg_meta
from picosvg.svg_transform import Affine2D
from picosvg.svg_types import SVGPath
from typing import MutableMapping, NamedTuple, Optional, Sequence, Tuple, Union
# topicosvg()'s default
_DEFAULT_ROUND_NDIGITS = 3
class InterGlyphReuseKey(NamedTuple):
view_box: Rect
paint: Paint
path: str
reuses: Tuple[Affine2D]
class GradientReuseKey(NamedTuple):
paint: Paint
transform: Affine2D = Affine2D.identity()
_GradientPaint = Union[PaintLinearGradient, PaintRadialGradient]
@dataclasses.dataclass
class ReuseCache:
shapes: MutableMapping[InterGlyphReuseKey, etree.Element] = dataclasses.field(
default_factory=dict
)
gradient_ids: MutableMapping[GradientReuseKey, str] = dataclasses.field(
default_factory=dict
)
def _ensure_has_id(el: etree.Element):
if "id" in el.attrib:
return
nth_child = 0
prev = el.getprevious()
while prev is not None:
nth_child += 1
prev = prev.getprevious()
el.attrib["id"] = f'{el.getparent().attrib["id"]}::{nth_child}'
def _glyph_groups(color_glyphs: Sequence[ColorGlyph]) -> Tuple[Tuple[str, ...]]:
"""Find glyphs that need to be kept together by union find."""
# glyphs by reuse_key
glyphs = {}
reuse_groups = DisjointSet()
for color_glyph in color_glyphs:
reuse_groups.make_set(color_glyph.glyph_name)
for painted_layer in color_glyph.painted_layers:
reuse_key = _inter_glyph_reuse_key(
color_glyph.svg.view_box(), painted_layer
)
if reuse_key not in glyphs:
glyphs[reuse_key] = color_glyph.glyph_name
else:
reuse_groups.union(color_glyph.glyph_name, glyphs[reuse_key])
return reuse_groups.sorted()
def _ntos(n: float) -> str:
return svg_meta.ntos(round(n, _DEFAULT_ROUND_NDIGITS))
# https://docs.microsoft.com/en-us/typography/opentype/spec/svg#coordinate-systems-and-glyph-metrics
def _svg_matrix(transform: Affine2D) -> str:
return transform.round(_DEFAULT_ROUND_NDIGITS).tostring()
def _inter_glyph_reuse_key(
view_box: Rect, painted_layer: PaintedLayer
) -> InterGlyphReuseKey:
"""Individual glyf entries, including composites, can be reused.
SVG reuses w/paint so paint is part of key."""
# TODO we could recycle shapes that differ only in paint, would just need to
# transfer the paint attributes onto the use element if they differ
return InterGlyphReuseKey(
view_box, painted_layer.paint, painted_layer.path, painted_layer.reuses
)
def _apply_solid_paint(svg_path: etree.Element, paint: PaintSolid):
svg_path.attrib["fill"] = paint.color.opaque().to_string()
if paint.color.alpha != 1.0:
svg_path.attrib["opacity"] = _ntos(paint.color.alpha)
def _apply_gradient_paint(
svg_defs: etree.Element,
svg_path: etree.Element,
paint: _GradientPaint,
reuse_cache: Optional[ReuseCache] = None,
transform: Affine2D = Affine2D.identity(),
):
if reuse_cache is None:
grad_id = _define_gradient(svg_defs, paint, transform)
else:
# Gradients can be reused by multiple glyphs in the same OT-SVG document,
# provided paints are the same and have the same transform.
reuse_key = GradientReuseKey(paint, transform)
grad_id = reuse_cache.gradient_ids.get(reuse_key)
if grad_id is None:
grad_id = _define_gradient(svg_defs, paint, transform)
reuse_cache.gradient_ids[reuse_key] = grad_id
svg_path.attrib["fill"] = f"url(#{grad_id})"
def _define_gradient(
svg_defs: etree.Element,
paint: _GradientPaint,
transform: Affine2D = Affine2D.identity(),
) -> str:
if isinstance(paint, PaintLinearGradient):
return _define_linear_gradient(svg_defs, paint, transform)
elif isinstance(paint, PaintRadialGradient):
return _define_radial_gradient(svg_defs, paint, transform)
else:
raise TypeError(type(paint))
def _apply_gradient_common_parts(
gradient: etree.Element,
paint: _GradientPaint,
transform: Affine2D = Affine2D.identity(),
):
gradient.attrib["gradientUnits"] = "userSpaceOnUse"
for stop in paint.stops:
stop_el = etree.SubElement(gradient, "stop")
stop_el.attrib["offset"] = _ntos(stop.stopOffset)
stop_el.attrib["stop-color"] = stop.color.opaque().to_string()
if stop.color.alpha != 1.0:
stop_el.attrib["stop-opacity"] = _ntos(stop.color.alpha)
if paint.extend != Extend.PAD:
gradient.attrib["spreadMethod"] = paint.extend.name.lower()
transform = transform.round(_DEFAULT_ROUND_NDIGITS)
if transform != Affine2D.identity():
# Safari has a bug which makes it reject a gradient if gradientTransform
# contains an 'involutory matrix' (i.e. matrix whose inverse equals itself,
# such that M @ M == Identity, e.g. reflection), hence the following hack:
# https://github.com/googlefonts/nanoemoji/issues/268
# https://en.wikipedia.org/wiki/Involutory_matrix
# TODO: Remove once the bug gets fixed
if Affine2D.product(transform, transform) == Affine2D.identity():
transform = transform._replace(a=transform.a + 0.00001)
assert transform.inverse() != transform
gradient.attrib["gradientTransform"] = transform.tostring()
def _define_linear_gradient(
svg_defs: etree.Element,
paint: PaintLinearGradient,
transform: Affine2D = Affine2D.identity(),
) -> str:
gradient = etree.SubElement(svg_defs, "linearGradient")
gradient_id = gradient.attrib["id"] = f"g{len(svg_defs)}"
p0, p1, p2 = paint.p0, paint.p1, paint.p2
# P2 allows to rotate the linear gradient independently of the end points P0 and P1.
# Below we compute P3 which is the orthogonal projection of P1 onto a line passing
# through P0 and perpendicular to the "normal" or "rotation vector" from P0 and P2.
# The vector P3-P0 is the "effective" linear gradient vector after this rotation.
# When vector P2-P0 is perpendicular to the gradient vector P1-P0, then P3
# (projection of P1 onto perpendicular to normal) is == P1 itself thus no rotation.
# When P2 is collinear to the P1-P0 gradient vector, then this projected P3 == P0
# and the gradient degenerates to a solid paint (the last color stop).
p3 = p0 + (p1 - p0).projection((p2 - p0).perpendicular())
x1, y1 = p0
x2, y2 = p3
gradient.attrib["x1"] = _ntos(x1)
gradient.attrib["y1"] = _ntos(y1)
gradient.attrib["x2"] = _ntos(x2)
gradient.attrib["y2"] = _ntos(y2)
_apply_gradient_common_parts(gradient, paint, transform)
return gradient_id
def _define_radial_gradient(
svg_defs: etree.Element,
paint: PaintRadialGradient,
transform: Affine2D = Affine2D.identity(),
) -> str:
gradient = etree.SubElement(svg_defs, "radialGradient")
gradient_id = gradient.attrib["id"] = f"g{len(svg_defs)}"
if paint.c0 != paint.c1:
fx, fy = paint.c0
gradient.attrib["fx"] = _ntos(fx)
gradient.attrib["fy"] = _ntos(fy)
if paint.r0 != 0:
gradient.attrib["fr"] = _ntos(paint.r0)
cx, cy = paint.c1
gradient.attrib["cx"] = _ntos(cx)
gradient.attrib["cy"] = _ntos(cy)
gradient.attrib["r"] = _ntos(paint.r1)
_apply_gradient_common_parts(gradient, paint, transform)
return gradient_id
def _map_gradient_coordinates(paint: Paint, affine: Affine2D) -> Paint:
if isinstance(paint, PaintLinearGradient):
return dataclasses.replace(
paint,
p0=affine.map_point(paint.p0),
p1=affine.map_point(paint.p1),
p2=affine.map_point(paint.p2),
)
elif isinstance(paint, PaintRadialGradient):
scalex, scaley = affine.getscale()
if not scalex or abs(scalex) != abs(scaley):
raise ValueError(
f"Expected uniform scale and/or translate, found: {affine}"
)
return dataclasses.replace(
paint,
c0=affine.map_point(paint.c0),
c1=affine.map_point(paint.c1),
r0=affine.map_vector((paint.r0, 0)).x,
r1=affine.map_vector((paint.r1, 0)).x,
)
raise TypeError(type(paint))
def _apply_paint(
svg_defs: etree.Element,
svg_path: etree.Element,
paint: Paint,
upem_to_vbox: Affine2D,
reuse_cache: ReuseCache,
transform: Affine2D = Affine2D.identity(),
):
if isinstance(paint, PaintSolid):
_apply_solid_paint(svg_path, paint)
elif isinstance(paint, (PaintLinearGradient, PaintRadialGradient)):
# Gradient paint coordinates are in UPEM space, we want them in SVG viewBox
# so that they match the SVGPath.d coordinates (that we copy unmodified).
paint = _map_gradient_coordinates(paint, upem_to_vbox)
# Likewise PaintTransforms refer to UPEM so they must be adjusted for SVG
if transform != Affine2D.identity():
transform = Affine2D.product(
upem_to_vbox.inverse(), Affine2D.product(transform, upem_to_vbox)
)
_apply_gradient_paint(svg_defs, svg_path, paint, reuse_cache, transform)
elif isinstance(paint, PaintTransform):
transform = Affine2D.product(paint.transform, transform)
_apply_paint(
svg_defs, svg_path, paint.paint, upem_to_vbox, reuse_cache, transform
)
else:
raise NotImplementedError(type(paint))
_XLINK_HREF_ATTR_NAME = f"{{{svg_meta.xlinkns()}}}href"
def _add_glyph(svg: SVG, color_glyph: ColorGlyph, reuse_cache: ReuseCache):
svg_defs = svg.xpath_one("//svg:defs")
# each glyph gets a group of its very own
svg_g = svg.append_to("/svg:svg", etree.Element("g"))
svg_g.attrib["id"] = f"glyph{color_glyph.glyph_id}"
view_box = color_glyph.svg.view_box()
if view_box is None:
raise ValueError(f"{color_glyph.filename} must declare view box")
# https://github.com/googlefonts/nanoemoji/issues/58: group needs transform
svg_g.attrib["transform"] = _svg_matrix(color_glyph.transform_for_otsvg_space())
vbox_to_upem = color_glyph.transform_for_font_space()
upem_to_vbox = vbox_to_upem.inverse()
# copy the shapes into our svg
for painted_layer in color_glyph.painted_layers:
reuse_key = _inter_glyph_reuse_key(view_box, painted_layer)
if reuse_key not in reuse_cache.shapes:
el = to_element(SVGPath(d=painted_layer.path))
_apply_paint(svg_defs, el, painted_layer.paint, upem_to_vbox, reuse_cache)
svg_g.append(el)
reuse_cache.shapes[reuse_key] = el
else:
el = reuse_cache.shapes[reuse_key]
_ensure_has_id(el)
# we have an inter-glyph shape reuse: move the reused element to the outer
# <defs> and replace its first occurrence with a <use>. Adobe Illustrator
# doesn't support direct references between glyphs:
# https://github.com/googlefonts/nanoemoji/issues/264#issuecomment-820518808
if el not in svg_defs:
svg_use = etree.Element("use", nsmap=svg.svg_root.nsmap)
svg_use.attrib[_XLINK_HREF_ATTR_NAME] = f'#{el.attrib["id"]}'
el.addnext(svg_use)
svg_defs.append(el) # append moves
svg_use = etree.SubElement(svg_g, "use", nsmap=svg.svg_root.nsmap)
svg_use.attrib[_XLINK_HREF_ATTR_NAME] = f'#{el.attrib["id"]}'
for reuse in painted_layer.reuses:
# intra-glyph shape reuse
_ensure_has_id(el)
svg_use = etree.SubElement(svg_g, "use", nsmap=svg.svg_root.nsmap)
svg_use.attrib[_XLINK_HREF_ATTR_NAME] = f'#{el.attrib["id"]}'
tx, ty = reuse.gettranslate()
if tx:
svg_use.attrib["x"] = _ntos(tx)
if ty:
svg_use.attrib["y"] = _ntos(ty)
transform = reuse.translate(-tx, -ty)
if transform != Affine2D.identity():
svg_use.attrib["transform"] = _svg_matrix(transform)
def _ensure_ttfont_fully_decompiled(ttfont: ttLib.TTFont):
# A TTFont might be opened lazily and some tables only partially decompiled.
# So for this to work on any TTFont, we first compile everything to a temporary
# stream then reload with lazy=False. Input font is modified in-place.
tmp = BytesIO()
ttfont.save(tmp)
tmp.seek(0)
ttfont2 = ttLib.TTFont(tmp, lazy=False)
for tag in ttfont2.keys():
table = ttfont2[tag]
# cmap is exceptional in that it always loads subtables lazily upon getting
# their attributes, no matter the value of TTFont.lazy option.
# TODO: remove this hack once fixed in fonttools upstream
if tag == "cmap":
_ = [st.cmap for st in table.tables]
ttfont[tag] = table
def _ensure_groups_grouped_in_glyph_order(
color_glyphs: MutableMapping[str, ColorGlyph],
color_glyph_order: Sequence[str],
ttfont: ttLib.TTFont,
reuse_groups: Tuple[Tuple[str, ...]],
):
# svg requires glyphs in same doc have sequential gids; reshuffle to make this true.
# Changing the | |
numbytes)
if signext:
assign(f, "spec_rd_wdata", "spec_rd_addr ? $signed(result) : 0")
else:
assign(f, "spec_rd_wdata", "spec_rd_addr ? result : 0")
assign(f, "spec_pc_wdata", "rvfi_pc_rdata + 4")
assign(f, "spec_trap", "!misa_ok")
print("`endif", file=f)
footer(f)
def s(instruction, funct3, numbytes, misa=0):
with open(os.path.join(output_prefix, "%s.v" % instruction), "w") as f:
header(f, instruction)
format_s(f)
misa_check(f, misa)
print("", file=f)
print(" // %s instruction" % instruction.upper(), file=f)
print("`ifdef RISCV_FORMAL_ALIGNED_MEM", file=f)
print(" wire [`RISCV_FORMAL_XLEN-1:0] addr = rvfi_rs1_rdata + instruction_imm;", file=f)
assign(f, "spec_valid", "rvfi_valid && !instruction_padding && instruction_funct3 == 3'b %s && instruction_opcode == 7'b 0100011" % funct3)
assign(f, "spec_rs1_addr", "instruction_rs1")
assign(f, "spec_rs2_addr", "instruction_rs2")
assign(f, "spec_mem_addr", "addr & ~(`RISCV_FORMAL_XLEN/8-1)")
assign(f, "spec_mem_wmask", "((1 << %d)-1) << (addr-spec_mem_addr)" % numbytes)
assign(f, "spec_mem_wdata", "rvfi_rs2_rdata << (8*(addr-spec_mem_addr))")
assign(f, "spec_pc_wdata", "rvfi_pc_rdata + 4")
assign(f, "spec_trap", "((addr & (%d-1)) != 0) || !misa_ok" % numbytes)
print("`else", file=f)
print(" wire [`RISCV_FORMAL_XLEN-1:0] addr = rvfi_rs1_rdata + instruction_imm;", file=f)
assign(f, "spec_valid", "rvfi_valid && !instruction_padding && instruction_funct3 == 3'b %s && instruction_opcode == 7'b 0100011" % funct3)
assign(f, "spec_rs1_addr", "instruction_rs1")
assign(f, "spec_rs2_addr", "instruction_rs2")
assign(f, "spec_mem_addr", "addr")
assign(f, "spec_mem_wmask", "((1 << %d)-1)" % numbytes)
assign(f, "spec_mem_wdata", "rvfi_rs2_rdata")
assign(f, "spec_pc_wdata", "rvfi_pc_rdata + 4")
assign(f, "spec_trap", "!misa_ok")
print("`endif", file=f)
footer(f)
def imm(instruction, funct3, expr, wmode=False, misa=0):
with open(os.path.join(output_prefix, "%s.v" % instruction), "w") as f:
header(f, instruction)
format_i(f)
misa_check(f, misa)
if wmode:
result_range = "31:0"
opcode = "0011011"
else:
result_range = "`RISCV_FORMAL_XLEN-1:0"
opcode = "0010011"
print("", file=f)
print(" // %s instruction" % instruction.upper(), file=f)
print(" wire [%s] result = %s;" % (result_range, expr), file=f)
assign(f, "spec_valid", "rvfi_valid && !instruction_padding && instruction_funct3 == 3'b %s && instruction_opcode == 7'b %s" % (funct3, opcode))
assign(f, "spec_rs1_addr", "instruction_rs1")
assign(f, "spec_rd_addr", "instruction_rd")
if wmode:
assign(f, "spec_rd_wdata", "spec_rd_addr ? {{`RISCV_FORMAL_XLEN-32{result[31]}}, result} : 0")
else:
assign(f, "spec_rd_wdata", "spec_rd_addr ? result : 0")
assign(f, "spec_pc_wdata", "rvfi_pc_rdata + 4")
footer(f)
def shimm(instruction, funct6, funct3, expr, wmode=False, misa=0):
with open(os.path.join(output_prefix, "%s.v" % instruction), "w") as f:
header(f, instruction)
format_i_shift(f)
misa_check(f, misa)
if wmode:
xtra_shamt_check = "!instruction_shamt[5]"
result_range = "31:0"
opcode = "0011011"
else:
xtra_shamt_check = "(!instruction_shamt[5] || `RISCV_FORMAL_XLEN == 64)"
result_range = "`RISCV_FORMAL_XLEN-1:0"
opcode = "0010011"
print("", file=f)
print(" // %s instruction" % instruction.upper(), file=f)
print(" wire [%s] result = %s;" % (result_range, expr), file=f)
assign(f, "spec_valid", "rvfi_valid && !instruction_padding && instruction_funct6 == 6'b %s && instruction_funct3 == 3'b %s && instruction_opcode == 7'b %s && %s" % (funct6, funct3, opcode, xtra_shamt_check))
assign(f, "spec_rs1_addr", "instruction_rs1")
assign(f, "spec_rd_addr", "instruction_rd")
if wmode:
assign(f, "spec_rd_wdata", "spec_rd_addr ? {{`RISCV_FORMAL_XLEN-32{result[31]}}, result} : 0")
else:
assign(f, "spec_rd_wdata", "spec_rd_addr ? result : 0")
assign(f, "spec_pc_wdata", "rvfi_pc_rdata + 4")
footer(f)
def alu(instruction, funct7, funct3, expr, alt_add=None, alt_sub=None, shamt=False, wmode=False, misa=0):
with open(os.path.join(output_prefix, "%s.v" % instruction), "w") as f:
header(f, instruction)
format_r(f)
misa_check(f, misa)
if wmode:
result_range = "31:0"
opcode = "0111011"
else:
result_range = "`RISCV_FORMAL_XLEN-1:0"
opcode = "0110011"
print("", file=f)
print(" // %s instruction" % instruction.upper(), file=f)
if shamt:
if wmode:
print(" wire [4:0] shamt = rvfi_rs2_rdata[4:0];", file=f)
else:
print(" wire [5:0] shamt = `RISCV_FORMAL_XLEN == 64 ? rvfi_rs2_rdata[5:0] : rvfi_rs2_rdata[4:0];", file=f)
if alt_add is not None or alt_sub is not None:
print("`ifdef RISCV_FORMAL_ALTOPS", file=f)
if alt_add is not None:
print(" wire [%s] altops_bitmask = 64'h%016x;" % (result_range, alt_add), file=f)
print(" wire [%s] result = (rvfi_rs1_rdata + rvfi_rs2_rdata) ^ altops_bitmask;" % result_range, file=f)
else:
print(" wire [%s] altops_bitmask = 64'h%016x;" % (result_range, alt_sub), file=f)
print(" wire [%s] result = (rvfi_rs1_rdata - rvfi_rs2_rdata) ^ altops_bitmask;" % result_range, file=f)
print("`else", file=f)
print(" wire [%s] result = %s;" % (result_range, expr), file=f)
print("`endif", file=f)
else:
print(" wire [%s] result = %s;" % (result_range, expr), file=f)
assign(f, "spec_valid", "rvfi_valid && !instruction_padding && instruction_funct7 == 7'b %s && instruction_funct3 == 3'b %s && instruction_opcode == 7'b %s" % (funct7, funct3, opcode))
assign(f, "spec_rs1_addr", "instruction_rs1")
assign(f, "spec_rs2_addr", "instruction_rs2")
assign(f, "spec_rd_addr", "instruction_rd")
if wmode:
assign(f, "spec_rd_wdata", "spec_rd_addr ? {{`RISCV_FORMAL_XLEN-32{result[31]}}, result} : 0")
else:
assign(f, "spec_rd_wdata", "spec_rd_addr ? result : 0")
assign(f, "spec_pc_wdata", "rvfi_pc_rdata + 4")
footer(f)
def amo(instruction, funct5, funct3, expr, misa=MISA_A):
with open(os.path.join(output_prefix, "%s.v" % instruction), "w") as f:
header(f, instruction)
format_ra(f)
misa_check(f, misa)
if funct3 == "010":
oprange = "31:0"
numbytes = 4
else:
oprange = "63:0"
numbytes = 8
print("", file=f)
print(" // %s instruction" % instruction.upper(), file=f)
print(" wire [%s] mem_result = %s;" % (oprange, expr), file=f)
print(" wire [%s] reg_result = rvfi_mem_rdata[%s];" % (oprange, oprange), file=f)
print(" wire [`RISCV_FORMAL_XLEN-1:0] addr = rvfi_rs1_rdata;", file=f)
print("`ifdef RISCV_FORMAL_ALIGNED_MEM", file=f)
assign(f, "spec_valid", "rvfi_valid && !instruction_padding && instruction_funct5 == 5'b %s && instruction_funct3 == 3'b %s && instruction_opcode == 7'b 0101111" % (funct5, funct3))
assign(f, "spec_rs1_addr", "instruction_rs1")
assign(f, "spec_rs2_addr", "instruction_rs2")
assign(f, "spec_rd_addr", "instruction_rd")
assign(f, "spec_rd_wdata", "spec_rd_addr ? $signed(reg_result) : 0")
assign(f, "spec_mem_addr", "addr & ~(`RISCV_FORMAL_XLEN/8-1)")
assign(f, "spec_mem_wmask", "((1 << %d)-1) << (addr-spec_mem_addr)" % numbytes)
assign(f, "spec_mem_wdata", "mem_result << (8*(addr-spec_mem_addr))")
assign(f, "spec_pc_wdata", "rvfi_pc_rdata + 4")
assign(f, "spec_trap", "((addr & (%d-1)) != 0) || !misa_ok" % numbytes)
print("`else", file=f)
assign(f, "spec_valid", "rvfi_valid && !instruction_padding && instruction_funct5 == 5'b %s && instruction_funct3 == 3'b %s && instruction_opcode == 7'b 0101111" % (funct5, funct3))
assign(f, "spec_rs1_addr", "instruction_rs1")
assign(f, "spec_rs2_addr", "instruction_rs2")
assign(f, "spec_rd_addr", "instruction_rd")
assign(f, "spec_rd_wdata", "spec_rd_addr ? $signed(reg_result) : 0")
assign(f, "spec_mem_addr", "addr")
assign(f, "spec_mem_wmask", "((1 << %d)-1)" % numbytes)
assign(f, "spec_mem_wdata", "mem_result")
assign(f, "spec_pc_wdata", "rvfi_pc_rdata + 4")
assign(f, "spec_trap", "((addr & (%d-1)) != 0) || !misa_ok" % numbytes)
print("`endif", file=f)
footer(f)
def c_addi4spn(instruction="c_addi4spn", misa=MISA_C):
with open(os.path.join(output_prefix, "%s.v" % instruction), "w") as f:
header(f, instruction)
format_ciw(f)
misa_check(f, misa)
print("", file=f)
print(" // %s instruction" % instruction.upper(), file=f)
print(" wire [`RISCV_FORMAL_XLEN-1:0] result = rvfi_rs1_rdata + instruction_imm;", file=f)
assign(f, "spec_valid", "rvfi_valid && !instruction_padding && instruction_funct3 == 3'b 000 && instruction_opcode == 2'b 00 && instruction_imm")
assign(f, "spec_rs1_addr", "2")
assign(f, "spec_rd_addr", "instruction_rd")
assign(f, "spec_rd_wdata", "spec_rd_addr ? result : 0")
assign(f, "spec_pc_wdata", "rvfi_pc_rdata + 2")
footer(f)
def c_l(instruction, funct3, numbytes, signext, misa=MISA_C):
with open(os.path.join(output_prefix, "%s.v" % instruction), "w") as f:
header(f, instruction)
format_cl(f, numbytes)
misa_check(f, misa)
print("", file=f)
print(" // %s instruction" % instruction.upper(), file=f)
print("`ifdef RISCV_FORMAL_ALIGNED_MEM", file=f)
print(" wire [`RISCV_FORMAL_XLEN-1:0] addr = rvfi_rs1_rdata + instruction_imm;", file=f)
print(" wire [%d:0] result = rvfi_mem_rdata >> (8*(addr-spec_mem_addr));" % (8*numbytes-1), file=f)
assign(f, "spec_valid", "rvfi_valid && !instruction_padding && instruction_funct3 == 3'b %s && instruction_opcode == 2'b 00" % funct3)
assign(f, "spec_rs1_addr", "instruction_rs1")
assign(f, "spec_rd_addr", "instruction_rd")
assign(f, "spec_mem_addr", "addr & ~(`RISCV_FORMAL_XLEN/8-1)")
assign(f, "spec_mem_rmask", "((1 << %d)-1) << (addr-spec_mem_addr)" % numbytes)
if signext:
assign(f, "spec_rd_wdata", "spec_rd_addr ? $signed(result) : 0")
else:
assign(f, "spec_rd_wdata", "spec_rd_addr ? result : 0")
assign(f, "spec_pc_wdata", "rvfi_pc_rdata + 2")
assign(f, "spec_trap", "((addr & (%d-1)) != 0) || !misa_ok" % numbytes)
print("`else", file=f)
print(" wire [`RISCV_FORMAL_XLEN-1:0] addr = rvfi_rs1_rdata + instruction_imm;", file=f)
print(" wire [%d:0] result = rvfi_mem_rdata;" % (8*numbytes-1), file=f)
assign(f, "spec_valid", "rvfi_valid && !instruction_padding && instruction_funct3 == 3'b %s && instruction_opcode == 2'b 00" % funct3)
assign(f, "spec_rs1_addr", "instruction_rs1")
assign(f, "spec_rd_addr", "instruction_rd")
assign(f, "spec_mem_addr", "addr")
assign(f, "spec_mem_rmask", "((1 << %d)-1)" % numbytes)
if signext:
assign(f, "spec_rd_wdata", "spec_rd_addr ? $signed(result) : 0")
else:
assign(f, "spec_rd_wdata", "spec_rd_addr ? result : 0")
assign(f, "spec_pc_wdata", "rvfi_pc_rdata + 2")
assign(f, "spec_trap", "!misa_ok")
print("`endif", file=f)
footer(f)
def c_s(instruction, funct3, numbytes, misa=MISA_C):
with open(os.path.join(output_prefix, "%s.v" % instruction), "w") as f:
header(f, instruction)
format_cs(f, numbytes)
misa_check(f, misa)
print("", file=f)
print(" // %s instruction" % instruction.upper(), file=f)
print(" wire [`RISCV_FORMAL_XLEN-1:0] addr = rvfi_rs1_rdata + instruction_imm;", file=f)
print("`ifdef RISCV_FORMAL_ALIGNED_MEM", file=f)
assign(f, "spec_valid", "rvfi_valid && !instruction_padding && instruction_funct3 == 3'b %s && instruction_opcode == 2'b 00" % funct3)
assign(f, "spec_rs1_addr", "instruction_rs1")
assign(f, "spec_rs2_addr", "instruction_rs2")
assign(f, "spec_mem_addr", "addr & ~(`RISCV_FORMAL_XLEN/8-1)")
assign(f, "spec_mem_wmask", "((1 << %d)-1) << (addr-spec_mem_addr)" % numbytes)
assign(f, "spec_mem_wdata", "rvfi_rs2_rdata << (8*(addr-spec_mem_addr))")
assign(f, "spec_pc_wdata", "rvfi_pc_rdata + 2")
assign(f, "spec_trap", "((addr & (%d-1)) != 0) || !misa_ok" % numbytes)
print("`else", file=f)
assign(f, "spec_valid", "rvfi_valid && !instruction_padding && instruction_funct3 == 3'b %s && instruction_opcode == 2'b 00" % funct3)
assign(f, "spec_rs1_addr", "instruction_rs1")
assign(f, "spec_rs2_addr", "instruction_rs2")
assign(f, "spec_mem_addr", "addr")
assign(f, "spec_mem_wmask", "((1 << %d)-1)" % numbytes)
assign(f, "spec_mem_wdata", "rvfi_rs2_rdata")
assign(f, "spec_pc_wdata", "rvfi_pc_rdata + 2")
assign(f, "spec_trap", "!misa_ok")
print("`endif", file=f)
footer(f)
def c_addi(instruction="c_addi", wmode=False, misa=MISA_C):
with open(os.path.join(output_prefix, "%s.v" % instruction), "w") as f:
header(f, instruction)
format_ci(f)
misa_check(f, misa)
print("", file=f)
print(" // %s | |
error, re-authenticating..."
)
== 5
)
self.assertIn(
"INFO iCloud re-authentication failed! Please try again later.",
self._caplog.text,
)
# Make sure we only call sleep 4 times (skip the first retry)
self.assertEqual(sleep_mock.call_count, 4)
assert result.exit_code == -1
def test_handle_connection_error(self):
base_dir = os.path.normpath("tests/fixtures/Photos")
if os.path.exists(base_dir):
shutil.rmtree(base_dir)
os.makedirs(base_dir)
with vcr.use_cassette("tests/vcr_cassettes/listing_photos.yml"):
# Pass fixed client ID via environment variable
os.environ["CLIENT_ID"] = "DE309E26-942E-11E8-92F5-14109FE0B321"
def mock_raise_response_error(arg):
raise ConnectionError("Connection Error")
with mock.patch.object(PhotoAsset, "download") as pa_download:
pa_download.side_effect = mock_raise_response_error
# Let the initial authenticate() call succeed,
# but do nothing on the second try.
orig_authenticate = PyiCloudService.authenticate
def mocked_authenticate(self):
if not hasattr(self, "already_authenticated"):
orig_authenticate(self)
setattr(self, "already_authenticated", True)
with mock.patch("icloudpd.constants.WAIT_SECONDS", 0):
with mock.patch.object(
PyiCloudService, "authenticate", new=mocked_authenticate
):
runner = CliRunner()
result = runner.invoke(
main,
[
"--username",
"<EMAIL>",
"--password",
"<PASSWORD>",
"--recent",
"1",
"--skip-videos",
"--skip-live-photos",
"--no-progress-bar",
"--threads-num",
1,
"-d",
base_dir,
],
)
print_result_exception(result)
# Error msg should be repeated 5 times
assert (
self._caplog.text.count(
"Error downloading IMG_7409.JPG, retrying after 0 seconds..."
)
== 5
)
self.assertIn(
"INFO Could not download IMG_7409.JPG! Please try again later.",
self._caplog.text,
)
assert result.exit_code == 0
def test_handle_albums_error(self):
base_dir = os.path.normpath("tests/fixtures/Photos")
if os.path.exists(base_dir):
shutil.rmtree(base_dir)
os.makedirs(base_dir)
with vcr.use_cassette("tests/vcr_cassettes/listing_photos.yml"):
# Pass fixed client ID via environment variable
os.environ["CLIENT_ID"] = "DE309E26-942E-11E8-92F5-14109FE0B321"
def mock_raise_response_error():
raise PyiCloudAPIResponseError("Api Error", 100)
with mock.patch.object(PhotosService, "_fetch_folders") as pa_photos_request:
pa_photos_request.side_effect = mock_raise_response_error
# Let the initial authenticate() call succeed,
# but do nothing on the second try.
orig_authenticate = PyiCloudService.authenticate
def mocked_authenticate(self):
if not hasattr(self, "already_authenticated"):
orig_authenticate(self)
setattr(self, "already_authenticated", True)
with mock.patch("icloudpd.constants.WAIT_SECONDS", 0):
with mock.patch.object(
PyiCloudService, "authenticate", new=mocked_authenticate
):
runner = CliRunner()
result = runner.invoke(
main,
[
"--username",
"<EMAIL>",
"--password",
"<PASSWORD>",
"--recent",
"1",
"--skip-videos",
"--skip-live-photos",
"--no-progress-bar",
"--threads-num",
1,
"-d",
base_dir,
],
)
print_result_exception(result)
assert result.exit_code == 1
def test_missing_size(self):
base_dir = os.path.normpath("tests/fixtures/Photos")
if os.path.exists(base_dir):
shutil.rmtree(base_dir)
os.makedirs(base_dir)
with mock.patch.object(PhotoAsset, "download") as pa_download:
pa_download.return_value = False
with vcr.use_cassette("tests/vcr_cassettes/listing_photos.yml"):
# Pass fixed client ID via environment variable
os.environ["CLIENT_ID"] = "DE309E26-942E-11E8-92F5-14109FE0B321"
runner = CliRunner()
result = runner.invoke(
main,
[
"--username",
"<EMAIL>",
"--password",
"<PASSWORD>",
"--recent",
"3",
"--no-progress-bar",
"--threads-num",
1,
"-d",
base_dir,
],
)
print_result_exception(result)
self.assertIn(
"DEBUG Looking up all photos and videos from album All Photos...", self._caplog.text
)
self.assertIn(
f"INFO Downloading 3 original photos and videos to {base_dir} ...",
self._caplog.text,
)
# These error messages should not be repeated more than once
assert (
self._caplog.text.count(
"ERROR Could not find URL to download IMG_7409.JPG for size original!"
)
== 1
)
assert (
self._caplog.text.count(
"ERROR Could not find URL to download IMG_7408.JPG for size original!"
)
== 1
)
assert (
self._caplog.text.count(
"ERROR Could not find URL to download IMG_7407.JPG for size original!"
)
== 1
)
self.assertIn(
"INFO All photos have been downloaded!", self._caplog.text
)
assert result.exit_code == 0
def test_size_fallback_to_original(self):
base_dir = os.path.normpath("tests/fixtures/Photos")
if os.path.exists(base_dir):
shutil.rmtree(base_dir)
os.makedirs(base_dir)
with mock.patch("icloudpd.download.download_media") as dp_patched:
dp_patched.return_value = True
with mock.patch("icloudpd.download.os.utime") as ut_patched:
ut_patched.return_value = None
with mock.patch.object(PhotoAsset, "versions") as pa:
pa.return_value = ["original", "medium"]
with vcr.use_cassette("tests/vcr_cassettes/listing_photos.yml"):
# Pass fixed client ID via environment variable
os.environ["CLIENT_ID"] = "DE309E26-942E-11E8-92F5-14109FE0B321"
runner = CliRunner()
result = runner.invoke(
main,
[
"--username",
"<EMAIL>",
"--password",
"<PASSWORD>",
"--recent",
"1",
"--size",
"thumb",
"--no-progress-bar",
"--threads-num",
1,
"-d",
base_dir,
],
)
print_result_exception(result)
self.assertIn(
"DEBUG Looking up all photos and videos from album All Photos...",
self._caplog.text,
)
self.assertIn(
f"INFO Downloading the first thumb photo or video to {base_dir} ...",
self._caplog.text,
)
self.assertIn(
f"INFO Downloading {os.path.join(base_dir, os.path.normpath('2018/07/31/IMG_7409.JPG'))}",
self._caplog.text,
)
self.assertIn(
"INFO All photos have been downloaded!", self._caplog.text
)
dp_patched.assert_called_once_with(
ANY,
ANY,
f"{os.path.join(base_dir, os.path.normpath('2018/07/31/IMG_7409.JPG'))}",
"original",
)
assert result.exit_code == 0
def test_force_size(self):
base_dir = os.path.normpath("tests/fixtures/Photos")
if os.path.exists(base_dir):
shutil.rmtree(base_dir)
os.makedirs(base_dir)
with mock.patch("icloudpd.download.download_media") as dp_patched:
dp_patched.return_value = True
with mock.patch.object(PhotoAsset, "versions") as pa:
pa.return_value = ["original", "medium"]
with vcr.use_cassette("tests/vcr_cassettes/listing_photos.yml"):
# Pass fixed client ID via environment variable
os.environ["CLIENT_ID"] = "DE309E26-942E-11E8-92F5-14109FE0B321"
runner = CliRunner()
result = runner.invoke(
main,
[
"--username",
"<EMAIL>",
"--password",
"<PASSWORD>",
"--recent",
"1",
"--size",
"thumb",
"--force-size",
"--no-progress-bar",
"--threads-num",
1,
"-d",
base_dir,
],
)
print_result_exception(result)
self.assertIn(
"DEBUG Looking up all photos and videos from album All Photos...",
self._caplog.text,
)
self.assertIn(
f"INFO Downloading the first thumb photo or video to {base_dir} ...",
self._caplog.text,
)
self.assertIn(
"ERROR thumb size does not exist for IMG_7409.JPG. Skipping...",
self._caplog.text,
)
self.assertIn(
"INFO All photos have been downloaded!", self._caplog.text
)
dp_patched.assert_not_called
assert result.exit_code == 0
def test_invalid_creation_date(self):
base_dir = os.path.normpath("tests/fixtures/Photos")
if os.path.exists(base_dir):
shutil.rmtree(base_dir)
os.makedirs(base_dir)
with mock.patch.object(PhotoAsset, "created", new_callable=mock.PropertyMock) as dt_mock:
# Can't mock `astimezone` because it's a readonly property, so have to
# create a new class that inherits from datetime.datetime
class NewDateTime(datetime.datetime):
def astimezone(self, tz=None):
raise ValueError('Invalid date')
dt_mock.return_value = NewDateTime(2018,1,1,0,0,0)
with vcr.use_cassette("tests/vcr_cassettes/listing_photos.yml"):
# Pass fixed client ID via environment variable
os.environ["CLIENT_ID"] = "DE309E26-942E-11E8-92F5-14109FE0B321"
runner = CliRunner()
result = runner.invoke(
main,
[
"--username",
"<EMAIL>",
"--password",
"<PASSWORD>",
"--recent",
"1",
"--skip-live-photos",
"--no-progress-bar",
"--threads-num",
1,
"-d",
base_dir,
],
)
print_result_exception(result)
self.assertIn(
"DEBUG Looking up all photos and videos from album All Photos...",
self._caplog.text,
)
self.assertIn(
f"INFO Downloading the first original photo or video to {base_dir} ...",
self._caplog.text,
)
self.assertIn(
"ERROR Could not convert photo created date to local timezone (2018-01-01 00:00:00)",
self._caplog.text,
)
self.assertIn(
f"INFO Downloading {os.path.join(base_dir, os.path.normpath('2018/01/01/IMG_7409.JPG'))}",
self._caplog.text,
)
self.assertIn(
"INFO All photos have been downloaded!", self._caplog.text
)
assert result.exit_code == 0
@pytest.mark.skipif(sys.platform == 'win32',
reason="does not run on windows")
def test_invalid_creation_year(self):
base_dir = os.path.normpath("tests/fixtures/Photos")
if os.path.exists(base_dir):
shutil.rmtree(base_dir)
os.makedirs(base_dir)
with mock.patch.object(PhotoAsset, "created", new_callable=mock.PropertyMock) as dt_mock:
# Can't mock `astimezone` because it's a readonly property, so have to
# create a new class that inherits from datetime.datetime
class NewDateTime(datetime.datetime):
def astimezone(self, tz=None):
raise ValueError('Invalid date')
dt_mock.return_value = NewDateTime(5,1,1,0,0,0)
with vcr.use_cassette("tests/vcr_cassettes/listing_photos.yml"):
# Pass fixed client ID via environment variable
os.environ["CLIENT_ID"] = "DE309E26-942E-11E8-92F5-14109FE0B321"
runner = CliRunner()
result = runner.invoke(
main,
[
"--username",
"<EMAIL>",
"--password",
"<PASSWORD>",
"--recent",
"1",
"--skip-live-photos",
"--no-progress-bar",
"--threads-num",
1,
"-d",
base_dir,
],
)
print_result_exception(result)
self.assertIn(
"DEBUG Looking up all photos and videos from album All Photos...",
self._caplog.text,
)
self.assertIn(
f"INFO Downloading the first original photo or video to {base_dir} ...",
self._caplog.text,
)
self.assertIn(
"ERROR Could not convert photo created date to local timezone (0005-01-01 00:00:00)",
self._caplog.text,
)
self.assertIn(
f"INFO Downloading {os.path.join(base_dir, os.path.normpath('5/01/01/IMG_7409.JPG'))}",
self._caplog.text,
)
self.assertIn(
"INFO All photos have been downloaded!", self._caplog.text
)
assert result.exit_code == 0
def test_unknown_item_type(self):
base_dir = os.path.normpath("tests/fixtures/Photos")
if os.path.exists(base_dir):
shutil.rmtree(base_dir)
os.makedirs(base_dir)
with mock.patch("icloudpd.download.download_media") as dp_patched:
dp_patched.return_value = True
with mock.patch.object(PhotoAsset, "item_type", new_callable=mock.PropertyMock) as it_mock:
it_mock.return_value = 'unknown'
with vcr.use_cassette("tests/vcr_cassettes/listing_photos.yml"):
# Pass fixed client ID via environment variable
os.environ["CLIENT_ID"] = "DE309E26-942E-11E8-92F5-14109FE0B321"
runner = CliRunner()
result = runner.invoke(
main,
[
"--username",
"<EMAIL>",
"--password",
"<PASSWORD>",
"--recent",
"1",
"--no-progress-bar",
"--threads-num",
1,
"-d",
base_dir,
],
)
print_result_exception(result)
self.assertIn(
"DEBUG Looking up all photos and videos from album All Photos...",
self._caplog.text,
)
self.assertIn(
f"INFO Downloading the first original photo or video to {base_dir} ...",
self._caplog.text,
)
self.assertIn(
"INFO Skipping IMG_7409.JPG, only downloading photos and videos. (Item type was: unknown)",
self._caplog.text,
)
self.assertIn(
"INFO All photos have been downloaded!", self._caplog.text
)
dp_patched.assert_not_called
assert result.exit_code == 0
def test_download_and_dedupe_existing_photos(self):
base_dir = os.path.normpath("tests/fixtures/Photos")
if os.path.exists(base_dir):
shutil.rmtree(base_dir)
os.makedirs(base_dir)
os.makedirs(os.path.join(base_dir, os.path.normpath("2018/07/31/")))
with open(os.path.join(base_dir, os.path.normpath("2018/07/31/IMG_7409.JPG")), "a") as f:
f.truncate(1)
with open(os.path.join(base_dir, os.path.normpath("2018/07/31/IMG_7409.MOV")), "a") as f:
f.truncate(1)
os.makedirs(os.path.join(base_dir, os.path.normpath("2018/07/30/")))
with open(os.path.join(base_dir, os.path.normpath("2018/07/30/IMG_7408.JPG")), "a") as f:
f.truncate(1151066)
with open(os.path.join(base_dir, os.path.normpath("2018/07/30/IMG_7408.MOV")), "a") as f:
f.truncate(1606512)
# Download the first photo, but mock the video download
orig_download = PhotoAsset.download
def mocked_download(self, size):
if not hasattr(PhotoAsset, "already_downloaded"):
response = orig_download(self, size)
setattr(PhotoAsset, "already_downloaded", True)
return response
return mock.MagicMock()
with mock.patch.object(PhotoAsset, "download", new=mocked_download):
with vcr.use_cassette("tests/vcr_cassettes/listing_photos.yml"):
# Pass fixed client ID via environment variable
os.environ["CLIENT_ID"] = "DE309E26-942E-11E8-92F5-14109FE0B321"
runner = CliRunner()
result = runner.invoke(
main,
[
"--username",
"<EMAIL>",
"--password",
"<PASSWORD>",
"--recent",
"5",
"--skip-videos",
# "--set-exif-datetime",
"--no-progress-bar",
"-d",
base_dir,
"--threads-num",
"1"
],
)
print_result_exception(result)
self.assertIn("DEBUG Looking up all photos from album All Photos...", self._caplog.text)
self.assertIn(
f"INFO Downloading 5 original photos to {base_dir} ...",
self._caplog.text,
)
self.assertIn(
f"INFO {os.path.join(base_dir, os.path.normpath('2018/07/31/IMG_7409-1884695.JPG'))} deduplicated.",
self._caplog.text,
)
self.assertIn(
f"INFO Downloading {os.path.join(base_dir, os.path.normpath('2018/07/31/IMG_7409-1884695.JPG'))}",
self._caplog.text,
)
self.assertIn(
f"INFO {os.path.join(base_dir, os.path.normpath('2018/07/31/IMG_7409-3294075.MOV'))} deduplicated.",
self._caplog.text,
)
self.assertIn(
f"INFO Downloading {os.path.join(base_dir, os.path.normpath('2018/07/31/IMG_7409-3294075.MOV'))}",
self._caplog.text,
)
self.assertIn(
f"INFO {os.path.join(base_dir, os.path.normpath('2018/07/30/IMG_7408.JPG'))} already exists.",
self._caplog.text,
)
self.assertIn(
f"INFO {os.path.join(base_dir, os.path.normpath('2018/07/30/IMG_7408.MOV'))} already exists.",
self._caplog.text,
)
self.assertIn(
"INFO Skipping IMG_7405.MOV, only downloading photos.", self._caplog.text
)
self.assertIn(
"INFO Skipping IMG_7404.MOV, only downloading | |
# catch and set all unexpected exceptions
except Exception as e:
self.set_exception(e)
self.complete_task()
return False
def check(self):
try:
rest = RestConnection(self.server)
# make sure view was deleted
query = {"stale": "ok"}
content = rest.query_view(self.design_doc_name, self.view.name,
self.bucket, query)
return False
except QueryViewException as e:
self.test_log.debug("View: {0} was successfully deleted in ddoc: {1}"
.format(self.view.name, self.design_doc_name))
return True
# catch and set all unexpected exceptions
except Exception as e:
self.set_exception(e)
return False
class ViewQueryTask(Task):
def __init__(self, server, design_doc_name, view_name,
query, expected_rows=None,
bucket="default", retry_time=2):
Task.__init__(self, "query_view_task_{}_{}".format(design_doc_name,
view_name))
self.server = server
self.bucket = bucket
self.view_name = view_name
self.design_doc_name = design_doc_name
self.query = query
self.expected_rows = expected_rows
self.retry_time = retry_time
self.timeout = 900
def call(self):
self.start_task()
retries = 0
while retries < self.retry_time:
try:
rest = RestConnection(self.server)
# make sure view can be queried
content = \
rest.query_view(self.design_doc_name, self.view_name,
self.bucket, self.query, self.timeout)
if self.expected_rows is None:
# no verification
self.result = True
self.complete_task()
return content
else:
return_value = self.check()
self.result = return_value
self.complete_task()
return return_value
except QueryViewException as e:
# initial query failed, try again
time.sleep(self.retry_time)
retries += 1
# catch and set all unexpected exceptions
except Exception as e:
self.set_exception(e)
self.complete_task()
self.result = True
return False
def check(self):
try:
rest = RestConnection(self.server)
# query and verify expected num of rows returned
content = \
rest.query_view(self.design_doc_name, self.view_name,
self.bucket, self.query, self.timeout)
self.test_log.debug("Server: %s, Design Doc: %s, View: %s, (%d rows) expected, (%d rows) returned"
% (self.server.ip, self.design_doc_name,
self.view_name, self.expected_rows,
len(content['rows'])))
raised_error = content.get(u'error', '') or \
''.join([str(item) for item in content.get(u'errors', [])])
if raised_error:
raise QueryViewException(self.view_name, raised_error)
if len(content['rows']) == self.expected_rows:
self.test_log.debug("Expected rows: '{0}' was found for view query"
.format(self.expected_rows))
return True
else:
if len(content['rows']) > self.expected_rows:
raise QueryViewException(self.view_name,
"Server: {0}, Design Doc: {1}, actual returned rows: '{2}' are greater than expected {3}"
.format(self.server.ip,
self.design_doc_name,
len(content['rows']),
self.expected_rows, ))
if "stale" in self.query:
if self.query["stale"].lower() == "false":
return False
# retry until expected results or task times out
time.sleep(self.retry_time)
self.check()
except QueryViewException as e:
# subsequent query failed! exit
self.set_exception(e)
return False
# catch and set all unexpected exceptions
except Exception as e:
self.set_exception(e)
return False
class N1QLQueryTask(Task):
def __init__(self, server, bucket, query, n1ql_helper=None,
expected_result=None, verify_results=True,
is_explain_query=False, index_name=None, retry_time=2,
scan_consistency=None, scan_vector=None):
super(N1QLQueryTask, self).__init__("query_n1ql_task")
self.server = server
self.bucket = bucket
self.query = query
self.expected_result = expected_result
self.n1ql_helper = n1ql_helper
self.timeout = 900
self.verify_results = verify_results
self.is_explain_query = is_explain_query
self.index_name = index_name
self.retry_time = 2
self.retried = 0
self.scan_consistency = scan_consistency
self.scan_vector = scan_vector
def call(self):
self.start_task()
try:
# Query and get results
self.test_log.debug(" <<<<< START Executing Query {0} >>>>>>"
.format(self.query))
if not self.is_explain_query:
self.msg, self.isSuccess = self.n1ql_helper.run_query_and_verify_result(
query=self.query, server=self.server,
expected_result=self.expected_result,
scan_consistency=self.scan_consistency,
scan_vector=self.scan_vector,
verify_results=self.verify_results)
else:
self.actual_result = self.n1ql_helper.run_cbq_query(
query=self.query, server=self.server,
scan_consistency=self.scan_consistency,
scan_vector=self.scan_vector)
self.test_log.debug(self.actual_result)
self.test_log.debug(" <<<<< Done Executing Query {0} >>>>>>"
.format(self.query))
return_value = self.check()
self.complete_task()
return return_value
except N1QLQueryException as e:
# initial query failed, try again
if self.retried < self.retry_time:
self.retried += 1
time.sleep(self.retry_time)
self.call()
# catch and set all unexpected exceptions
except Exception as e:
self.complete_task()
self.set_exception(e)
def check(self):
try:
# Verify correctness of result set
if self.verify_results:
if not self.is_explain_query:
if not self.isSuccess:
self.test_log.debug("Query {0} results leads to INCORRECT RESULT"
.format(self.query))
raise N1QLQueryException(self.msg)
else:
check = self.n1ql_helper.verify_index_with_explain(self.actual_result, self.index_name)
if not check:
actual_result = self.n1ql_helper.run_cbq_query(
query="select * from system:indexes",
server=self.server)
self.test_log.debug(actual_result)
raise Exception(
" INDEX usage in Query {0} :: NOT FOUND {1} :: as observed in result {2}"
.format(self.query, self.index_name, self.actual_result))
self.test_log.debug(" <<<<< Done VERIFYING Query {0} >>>>>>"
.format(self.query))
return True
except N1QLQueryException as e:
# subsequent query failed! exit
self.set_exception(e)
# catch and set all unexpected exceptions
except Exception as e:
self.set_exception(e)
class CreateIndexTask(Task):
def __init__(self, server, bucket, index_name, query, n1ql_helper=None,
retry_time=2, defer_build=False, timeout=240):
super(CreateIndexTask, self).__init__("create_index_task")
Task.__init__(self, "create_index_task")
self.server = server
self.bucket = bucket
self.defer_build = defer_build
self.query = query
self.index_name = index_name
self.n1ql_helper = n1ql_helper
self.retry_time = 2
self.retried = 0
self.timeout = timeout
def call(self):
self.start_task()
try:
# Query and get results
self.n1ql_helper.run_cbq_query(query=self.query, server=self.server)
return_value = self.check()
self.complete_task()
return return_value
except CreateIndexException as e:
# initial query failed, try again
if self.retried < self.retry_time:
self.retried += 1
time.sleep(self.retry_time)
self.call()
# catch and set all unexpected exceptions
except Exception as e:
self.test_log.error(e)
self.set_exception(e)
def check(self):
try:
# Verify correctness of result set
check = True
if not self.defer_build:
check = self.n1ql_helper.is_index_online_and_in_list(
self.bucket, self.index_name, server=self.server,
timeout=self.timeout)
if not check:
raise CreateIndexException("Index {0} not created as expected"
.format(self.index_name))
return True
except CreateIndexException as e:
# subsequent query failed! exit
self.test_log.error(e)
self.set_exception(e)
# catch and set all unexpected exceptions
except Exception as e:
self.test_log.error(e)
self.set_exception(e)
class BuildIndexTask(Task):
def __init__(self, server, bucket, query, n1ql_helper=None,
retry_time=2):
super(BuildIndexTask, self).__init__("build_index_task")
self.server = server
self.bucket = bucket
self.query = query
self.n1ql_helper = n1ql_helper
self.retry_time = 2
self.retried = 0
def call(self):
self.start_task()
try:
# Query and get results
self.n1ql_helper.run_cbq_query(query=self.query, server=self.server)
return_value = self.check()
self.complete_task()
return return_value
except CreateIndexException as e:
# initial query failed, try again
if self.retried < self.retry_time:
self.retried += 1
time.sleep(self.retry_time)
self.call()
# catch and set all unexpected exceptions
except Exception as e:
self.set_exception(e)
def check(self):
try:
# Verify correctness of result set
return True
except CreateIndexException as e:
# subsequent query failed! exit
self.set_exception(e)
# catch and set all unexpected exceptions
except Exception as e:
self.set_exception(e)
class MonitorIndexTask(Task):
def __init__(self, server, bucket, index_name, n1ql_helper=None,
retry_time=2, timeout=240):
super(MonitorIndexTask, self).__init__("build_index_task")
self.server = server
self.bucket = bucket
self.index_name = index_name
self.n1ql_helper = n1ql_helper
self.retry_time = 2
self.timeout = timeout
def call(self):
self.start_task()
try:
check = self.n1ql_helper.is_index_online_and_in_list(
self.bucket, self.index_name, server=self.server,
timeout=self.timeout)
if not check:
raise CreateIndexException("Index {0} not created as expected"
.format(self.index_name))
return_value = self.check()
self.complete_task()
return return_value
except CreateIndexException as e:
# initial query failed, try again
self.set_exception(e)
# catch and set all unexpected exceptions
except Exception as e:
self.set_exception(e)
def check(self):
try:
return True
except CreateIndexException as e:
# subsequent query failed! exit
self.set_exception(e)
# catch and set all unexpected exceptions
except Exception as e:
self.set_exception(e)
class DropIndexTask(Task):
def __init__(self, server, bucket, index_name, query, n1ql_helper=None,
retry_time=2):
super(DropIndexTask, self).__init__("drop_index_task")
self.server = server
self.bucket = bucket
self.query = query
self.index_name = index_name
self.n1ql_helper = n1ql_helper
self.timeout = 900
self.retry_time = 2
self.retried = 0
def call(self):
self.start_task()
try:
# Query and get results
check = self.n1ql_helper._is_index_in_list(
self.bucket, self.index_name, server=self.server)
if not check:
raise DropIndexException("index {0} does not exist will not drop"
.format(self.index_name))
self.n1ql_helper.run_cbq_query(query=self.query, server=self.server)
return_value = self.check()
except N1QLQueryException as e:
# initial query failed, try again
if self.retried < self.retry_time:
self.retried += 1
time.sleep(self.retry_time)
self.call()
# catch and set all unexpected exceptions
except DropIndexException as e:
self.setexception(e)
def check(self):
try:
# Verify correctness of result set
check = self.n1ql_helper._is_index_in_list(
self.bucket, self.index_name, server=self.server)
if check:
raise Exception("Index {0} not dropped as expected"
.format(self.index_name))
return True
except DropIndexException as e:
# subsequent query failed! exit
self.set_exception(e)
# catch and set all unexpected exceptions
except Exception as e:
self.set_exception(e)
class PrintOpsRate(Task):
def __init__(self, cluster, bucket, sleep=1):
super(PrintOpsRate, self).__init__("print_ops_rate_{}"
.format(bucket.name))
self.cluster = cluster
self.bucket = bucket
self.bucket_helper = BucketHelper(self.cluster.master)
self.sleep = sleep
self.stop_task = False
def call(self):
ops_rate_trend = list()
t_ops_rate = list()
self.start_task()
while not self.stop_task:
try:
bucket_stats = self.bucket_helper.fetch_bucket_stats(self.bucket)
if 'op' in bucket_stats and \
'samples' in bucket_stats['op'] and \
'ops' in bucket_stats['op']['samples']:
ops = bucket_stats['op']['samples']['ops'][-1]
self.test_log.debug("Ops rate for '%s': %f"
% (self.bucket.name, ops))
if t_ops_rate and t_ops_rate[-1] > ops:
ops_rate_trend.append(t_ops_rate)
t_ops_rate = list()
t_ops_rate.append(ops)
time.sleep(self.sleep)
except:
#Case when cluster.master is rebalance out of the cluster
self.bucket_helper = BucketHelper(self.cluster.master)
time.sleep(20)
if t_ops_rate:
ops_rate_trend.append(t_ops_rate)
plot_graph(self.test_log, self.bucket.name, ops_rate_trend)
self.complete_task()
def end_task(self):
self.stop_task = True
class BucketCreateTask(Task):
def __init__(self, server, bucket):
super(BucketCreateTask, self).__init__("bucket_create_task")
self.server = server
self.bucket = bucket
if self.bucket.priority is None or self.bucket.priority.lower() is 'low':
self.bucket_priority = 3
else:
self.bucket_priority = 8
self.retries = 0
def call(self):
try:
rest = RestConnection(self.server)
except ServerUnavailableException as error:
self.log.error("RestConnection failed for {0}: {1}"
.format(self.server.ip, error))
self.result = False
return
info = rest.get_nodes_self()
if self.bucket.ramQuotaMB <= 0:
self.size = info.memoryQuota * 2 / 3
if int(info.port) in xrange(9091, 9991):
try:
self.port = info.port
BucketHelper(self.server).create_bucket(self.bucket.__dict__)
# return_value = self.check()
self.complete_task()
self.result = True
return
except Exception as e:
| |
<gh_stars>1-10
""" Node.
Do not edit this file by hand.
This is generated by parsing api.html service doc.
"""
from ambra_sdk.exceptions.service import AccountNotFound
from ambra_sdk.exceptions.service import AlreadyConnected
from ambra_sdk.exceptions.service import AlreadyDone
from ambra_sdk.exceptions.service import AlreadyPending
from ambra_sdk.exceptions.service import ConflictingStatus
from ambra_sdk.exceptions.service import FilterNotFound
from ambra_sdk.exceptions.service import Full
from ambra_sdk.exceptions.service import HasDestinations
from ambra_sdk.exceptions.service import InvalidCondition
from ambra_sdk.exceptions.service import InvalidConfiguration
from ambra_sdk.exceptions.service import InvalidDateTime
from ambra_sdk.exceptions.service import InvalidEvent
from ambra_sdk.exceptions.service import InvalidField
from ambra_sdk.exceptions.service import InvalidFilter
from ambra_sdk.exceptions.service import InvalidJson
from ambra_sdk.exceptions.service import InvalidLinkage
from ambra_sdk.exceptions.service import InvalidMetric
from ambra_sdk.exceptions.service import InvalidOsType
from ambra_sdk.exceptions.service import InvalidRange
from ambra_sdk.exceptions.service import InvalidSortField
from ambra_sdk.exceptions.service import InvalidSortOrder
from ambra_sdk.exceptions.service import InvalidStatus
from ambra_sdk.exceptions.service import InvalidType
from ambra_sdk.exceptions.service import InvalidUuid
from ambra_sdk.exceptions.service import InvalidXml
from ambra_sdk.exceptions.service import MissingFields
from ambra_sdk.exceptions.service import NoNodeOverride
from ambra_sdk.exceptions.service import NotFound
from ambra_sdk.exceptions.service import NotPermitted
from ambra_sdk.exceptions.service import ScheduleIsOff
from ambra_sdk.exceptions.service import TryLater
from ambra_sdk.service.query import QueryO
from ambra_sdk.service.query import AsyncQueryO
from ambra_sdk.service.query import QueryOPSF
from ambra_sdk.service.query import AsyncQueryOPSF
class Node:
"""Node."""
def __init__(self, api):
self._api = api
def list(
self,
account_id=None,
):
"""List.
:param account_id: uuid of the account (optional)
"""
request_data = {
'account_id': account_id,
}
errors_mapping = {}
errors_mapping[('FILTER_NOT_FOUND', None)] = FilterNotFound('The filter can not be found. The error_subtype will hold the filter UUID')
errors_mapping[('INVALID_CONDITION', None)] = InvalidCondition('The condition is not support. The error_subtype will hold the filter expression this applies to')
errors_mapping[('INVALID_FIELD', None)] = InvalidField('The field is not valid for this object. The error_subtype will hold the filter expression this applies to')
errors_mapping[('INVALID_SORT_FIELD', None)] = InvalidSortField('The field is not valid for this object. The error_subtype will hold the field name this applies to')
errors_mapping[('INVALID_SORT_ORDER', None)] = InvalidSortOrder('The sort order for the field is invalid. The error_subtype will hold the field name this applies to')
errors_mapping[('MISSING_FIELDS', None)] = MissingFields('A required field is missing or does not have data in it. The error_subtype holds a array of all the missing fields')
errors_mapping[('NOT_FOUND', None)] = NotFound('The account can not be found')
errors_mapping[('NOT_PERMITTED', None)] = NotPermitted('You are not permitted to view this list')
query_data = {
'api': self._api,
'url': '/node/list',
'request_data': request_data,
'errors_mapping': errors_mapping,
'required_sid': True,
}
query_data['paginated_field'] = 'nodes'
return QueryOPSF(**query_data)
def public(
self,
account_id,
):
"""Public.
:param account_id: The account the user is in
"""
request_data = {
'account_id': account_id,
}
errors_mapping = {}
errors_mapping[('FILTER_NOT_FOUND', None)] = FilterNotFound('The filter can not be found. The error_subtype will hold the filter UUID')
errors_mapping[('INVALID_CONDITION', None)] = InvalidCondition('The condition is not support. The error_subtype will hold the filter expression this applies to')
errors_mapping[('INVALID_FIELD', None)] = InvalidField('The field is not valid for this object. The error_subtype will hold the filter expression this applies to')
errors_mapping[('INVALID_FILTER', None)] = InvalidFilter('Invalid filter field')
errors_mapping[('INVALID_SORT_FIELD', None)] = InvalidSortField('The field is not valid for this object. The error_subtype will hold the field name this applies to')
errors_mapping[('INVALID_SORT_ORDER', None)] = InvalidSortOrder('The sort order for the field is invalid. The error_subtype will hold the field name this applies to')
errors_mapping[('MISSING_FIELDS', None)] = MissingFields('A required field is missing or does not have data in it. The error_subtype holds a array of all the missing fields')
errors_mapping[('NOT_FOUND', None)] = NotFound('The node or account can not be found')
errors_mapping[('NOT_PERMITTED', None)] = NotPermitted('You are not permitted to do this')
query_data = {
'api': self._api,
'url': '/node/public',
'request_data': request_data,
'errors_mapping': errors_mapping,
'required_sid': True,
}
query_data['paginated_field'] = 'nodes'
return QueryOPSF(**query_data)
def connect(
self,
account_id,
uuid,
message=None,
):
"""Connect.
:param account_id: The account the user is in
:param uuid: The node id
:param message: Message (optional)
"""
request_data = {
'account_id': account_id,
'message': message,
'uuid': uuid,
}
errors_mapping = {}
errors_mapping[('ALREADY_CONNECTED', None)] = AlreadyConnected('The node is already connected to the account')
errors_mapping[('ALREADY_PENDING', None)] = AlreadyPending('The node is pending connection already')
errors_mapping[('MISSING_FIELDS', None)] = MissingFields('A required field is missing or does not have data in it. The error_subtype holds a array of all the missing fields')
errors_mapping[('NOT_FOUND', None)] = NotFound('The node or account can not be found')
errors_mapping[('NOT_PERMITTED', None)] = NotPermitted('You are not permitted to do this')
query_data = {
'api': self._api,
'url': '/node/connect',
'request_data': request_data,
'errors_mapping': errors_mapping,
'required_sid': True,
}
return QueryO(**query_data)
def approve(
self,
account_id,
uuid,
):
"""Approve.
:param account_id: The account which requested the connection
:param uuid: The node id
"""
request_data = {
'account_id': account_id,
'uuid': uuid,
}
errors_mapping = {}
errors_mapping[('ALREADY_CONNECTED', None)] = AlreadyConnected('The node is already connected to the account')
errors_mapping[('MISSING_FIELDS', None)] = MissingFields('A required field is missing or does not have data in it. The error_subtype holds a array of all the missing fields')
errors_mapping[('NOT_FOUND', None)] = NotFound('The node, node connection request or account can not be found')
errors_mapping[('NOT_PERMITTED', None)] = NotPermitted('You are not permitted to do this')
query_data = {
'api': self._api,
'url': '/node/approve',
'request_data': request_data,
'errors_mapping': errors_mapping,
'required_sid': True,
}
return QueryO(**query_data)
def add(
self,
accelerator_id,
name,
type,
account_id=None,
category=None,
ctc_bucket=None,
facility_contact=None,
facility_contact_title=None,
facility_email=None,
facility_name=None,
facility_notes=None,
facility_zip=None,
group_id=None,
is_public=None,
location_id=None,
os_type=None,
uuid=None,
):
"""Add.
:param accelerator_id: uuid of the accelerator if this is an accelerator node
:param name: Description of the node
:param type: Type of node (STORAGE|HARVESTER|ACCELERATOR|CLEARINGHOUSE|VIRTUAL|UTILITY|XDS)
:param account_id: account_id
:param category: Node category (ACTIVE|INACTIVE|MIGRATION|TEST|DUPLICATE|INTEGRATED|ACCELERATOR) (optional)
:param ctc_bucket: Name of the S3 bucket to use for a cloud to cloud gateway (optional)
:param facility_contact: Name of the facility contact (optional)
:param facility_contact_title: Title of the facility contact (optional)
:param facility_email: Email of the facility contact (optional)
:param facility_name: Name of the facility it is installed at (optional)
:param facility_notes: Notes about the facility (optional)
:param facility_zip: Zip code of the facility it is installed at (optional)
:param group_id: group_id
:param is_public: Flag if the node is public (optional)
:param location_id: location_id
:param os_type: Node OS type, used with HARVESTER node type only (WINDOWS|MACOS) (optional)
:param uuid: uuid of the node (optional, you can use this to explicitly set the UUID)
"""
request_data = {
'accelerator_id': accelerator_id,
'account_id': account_id,
'category': category,
'ctc_bucket': ctc_bucket,
'facility_contact': facility_contact,
'facility_contact_title': facility_contact_title,
'facility_email': facility_email,
'facility_name': facility_name,
'facility_notes': facility_notes,
'facility_zip': facility_zip,
'group_id': group_id,
'is_public': is_public,
'location_id': location_id,
'name': name,
'os_type': os_type,
'type': type,
'uuid': uuid,
}
errors_mapping = {}
errors_mapping[('ACCOUNT_NOT_FOUND', None)] = AccountNotFound('The account was not found')
errors_mapping[('INVALID_LINKAGE', None)] = InvalidLinkage('The linkage is invalid')
errors_mapping[('INVALID_OS_TYPE', None)] = InvalidOsType('Invalid OS type of node')
errors_mapping[('INVALID_TYPE', None)] = InvalidType('Invalid type of node')
errors_mapping[('INVALID_UUID', None)] = InvalidUuid('Invalid uuid format or this uuid is already in use')
errors_mapping[('MISSING_FIELDS', None)] = MissingFields('A required field is missing or does not have data in it. The error_subtype holds a array of all the missing fields')
errors_mapping[('NOT_PERMITTED', None)] = NotPermitted('You are not permitted to add a node to this account')
query_data = {
'api': self._api,
'url': '/node/add',
'request_data': request_data,
'errors_mapping': errors_mapping,
'required_sid': True,
}
return QueryO(**query_data)
def set(
self,
uuid,
category=None,
configuration=None,
ctc_bucket=None,
facility_contact=None,
facility_contact_title=None,
facility_email=None,
facility_name=None,
facility_notes=None,
facility_zip=None,
is_public=None,
monitor_email=None,
monitor_node_last_send=None,
monitor_node_last_send_threshold=None,
monitor_node_ping=None,
monitor_node_slow_push=None,
monitor_node_slow_push_threshold=None,
monitor_study_create=None,
monitor_study_create_threshold=None,
name=None,
reload_configuration=None,
serial_no=None,
setting_param=None,
settings=None,
storage_namespace=None,
warning_email=None,
):
"""Set.
:param uuid: The node id
:param category: Node category (ACTIVE|INACTIVE|MIGRATION|TEST|DUPLICATE|INTEGRATED) (optional)
:param configuration: The configuration as a JSON hash of key values pairs (optional)
:param ctc_bucket: Name of the S3 bucket to use for a cloud to cloud gateway (optional)
:param facility_contact: Name of the facility contact (optional)
:param facility_contact_title: Title of the facility contact (optional)
:param facility_email: Email of the facility contact (optional)
:param facility_name: Name of the facility it is installed at (optional)
:param facility_notes: Notes about the facility (optional)
:param facility_zip: Zip code of the facility it is installed at (optional)
:param is_public: Flag if the node is public (optional)
:param monitor_email: Email address(es) to send monitor failure notices (optional)
:param monitor_node_last_send: Check if the node has sent a study recently (optional)
:param monitor_node_last_send_threshold: Threshold in minutes for triggering the monitor_node_last_send notification (optional)
:param monitor_node_ping: Check if the node is pinging (optional)
:param monitor_node_slow_push: Check if the node is pushing slowly (optional)
:param monitor_node_slow_push_threshold: Threshold in minutes for triggering the monitor_node_slow_push notification (optional)
:param monitor_study_create: Check if the node is sending studies normally (optional)
:param monitor_study_create_threshold: Threshold in minutes for triggering the monitor_study_create notification (optional)
:param name: Description of the node (optional)
:param reload_configuration: If this flag is | |
"source_id": "SMILES: C[C@@H]1C[C@H]2[C@@H]3C[C@@H](C4=CC(=O)C=C[C@@]4([C@]3([C@H](C[C@@]2([C@]1(C(=O)SCF)O)C)O)F)C)F",
"target_id": "MONDO:0004979",
"attributes": {
"p_val": 0.17901741601915602
}
},
{
"type": "associated_with",
"source_id": "CAS: 90566-53-3",
"target_id": "MONDO:0004979",
"attributes": {
"p_val": 0.17901741601915602
}
},
{
"type": "associated_with",
"source_id": "MESH:D000068298",
"target_id": "MONDO:0004979",
"attributes": {
"p_val": 0.17901741601915602
}
},
{
"type": "associated_with",
"source_id": "CHEMBL:CHEMBL1473",
"target_id": "MONDO:0004979",
"attributes": {
"p_val": 0.17901741601915602
}
},
{
"type": "associated_with",
"source_id": "PUBCHEM:5311101",
"target_id": "MONDO:0004979",
"attributes": {
"p_val": 0.17901741601915602
}
},
{
"type": "associated_with",
"source_id": "PUBCHEM:62924",
"target_id": "MONDO:0004979",
"attributes": {
"p_val": 0.17901741601915602
}
},
{
"type": "associated_with",
"source_id": "PUBCHEM:24847768",
"target_id": "MONDO:0004979",
"attributes": {
"p_val": 0.17901741601915602
}
},
{
"type": "associated_with",
"source_id": "PUBCHEM:122130768",
"target_id": "MONDO:0004979",
"attributes": {
"p_val": 0.17901741601915602
}
},
{
"type": "associated_with",
"source_id": "PUBCHEM:134687786",
"target_id": "MONDO:0004979",
"attributes": {
"p_val": 0.17901741601915602
}
},
{
"type": "associated_with",
"source_id": "rxcui:1945044",
"target_id": "MONDO:0004979",
"attributes": {
"p_val": 0.17901741601915602
}
},
{
"type": "associated_with",
"source_id": "rxcui:1945048",
"target_id": "MONDO:0004979",
"attributes": {
"p_val": 0.17901741601915602
}
},
{
"type": "associated_with",
"source_id": "rxcui:1872967",
"target_id": "MONDO:0004979",
"attributes": {
"p_val": 0.17901741601915602
}
},
{
"type": "associated_with",
"source_id": "rxcui:895697",
"target_id": "MONDO:0004979",
"attributes": {
"p_val": 0.17901741601915602
}
},
{
"type": "associated_with",
"source_id": "rxcui:895989",
"target_id": "MONDO:0004979",
"attributes": {
"p_val": 0.17901741601915602
}
},
{
"type": "associated_with",
"source_id": "rxcui:895991",
"target_id": "MONDO:0004979",
"attributes": {
"p_val": 0.17901741601915602
}
},
{
"type": "associated_with",
"source_id": "rxcui:1148646",
"target_id": "MONDO:0004979",
"attributes": {
"p_val": 0.17901741601915602
}
},
{
"type": "associated_with",
"source_id": "rxcui:895996",
"target_id": "MONDO:0004979",
"attributes": {
"p_val": 0.17901741601915602
}
},
{
"type": "associated_with",
"source_id": "rxcui:896001",
"target_id": "MONDO:0004979",
"attributes": {
"p_val": 0.17901741601915602
}
},
{
"type": "associated_with",
"source_id": "rxcui:896006",
"target_id": "MONDO:0004979",
"attributes": {
"p_val": 0.17901741601915602
}
},
{
"type": "associated_with",
"source_id": "rxcui:896019",
"target_id": "MONDO:0004979",
"attributes": {
"p_val": 0.17901741601915602
}
},
{
"type": "associated_with",
"source_id": "rxcui:896023",
"target_id": "MONDO:0004979",
"attributes": {
"p_val": 0.17901741601915602
}
},
{
"type": "associated_with",
"source_id": "rxcui:896027",
"target_id": "MONDO:0004979",
"attributes": {
"p_val": 0.17901741601915602
}
},
{
"type": "associated_with",
"source_id": "rxcui:896031",
"target_id": "MONDO:0004979",
"attributes": {
"p_val": 0.17901741601915602
}
},
{
"type": "associated_with",
"source_id": "rxcui:896165",
"target_id": "MONDO:0004979",
"attributes": {
"p_val": 0.17901741601915602
}
},
{
"type": "associated_with",
"source_id": "rxcui:896185",
"target_id": "MONDO:0004979",
"attributes": {
"p_val": 0.17901741601915602
}
},
{
"type": "associated_with",
"source_id": "rxcui:896190",
"target_id": "MONDO:0004979",
"attributes": {
"p_val": 0.17901741601915602
}
},
{
"type": "associated_with",
"source_id": "rxcui:896212",
"target_id": "MONDO:0004979",
"attributes": {
"p_val": 0.17901741601915602
}
},
{
"type": "associated_with",
"source_id": "rxcui:896222",
"target_id": "MONDO:0004979",
"attributes": {
"p_val": 0.17901741601915602
}
},
{
"type": "associated_with",
"source_id": "rxcui:896229",
"target_id": "MONDO:0004979",
"attributes": {
"p_val": 0.17901741601915602
}
},
{
"type": "associated_with",
"source_id": "rxcui:896235",
"target_id": "MONDO:0004979",
"attributes": {
"p_val": 0.17901741601915602
}
},
{
"type": "associated_with",
"source_id": "rxcui:896237",
"target_id": "MONDO:0004979",
"attributes": {
"p_val": 0.17901741601915602
}
},
{
"type": "associated_with",
"source_id": "rxcui:896243",
"target_id": "MONDO:0004979",
"attributes": {
"p_val": 0.17901741601915602
}
},
{
"type": "associated_with",
"source_id": "rxcui:896245",
"target_id": "MONDO:0004979",
"attributes": {
"p_val": 0.17901741601915602
}
},
{
"type": "associated_with",
"source_id": "rxcui:896271",
"target_id": "MONDO:0004979",
"attributes": {
"p_val": 0.17901741601915602
}
},
{
"type": "associated_with",
"source_id": "rxcui:896273",
"target_id": "MONDO:0004979",
"attributes": {
"p_val": 0.17901741601915602
}
},
{
"type": "associated_with",
"source_id": "rxcui:1797892",
"target_id": "MONDO:0004979",
"attributes": {
"p_val": 0.17901741601915602
}
},
{
"type": "associated_with",
"source_id": "rxcui:1797933",
"target_id": "MONDO:0004979",
"attributes": {
"p_val": 0.17901741601915602
}
},
{
"type": "associated_with",
"source_id": "rxcui:1539891",
"target_id": "MONDO:0004979",
"attributes": {
"p_val": 0.17901741601915602
}
},
{
"type": "associated_with",
"source_id": "rxcui:1539893",
"target_id": "MONDO:0004979",
"attributes": {
"p_val": 0.17901741601915602
}
},
{
"type": "associated_with",
"source_id": "rxcui:1648785",
"target_id": "MONDO:0004979",
"attributes": {
"p_val": 0.17901741601915602
}
},
{
"type": "associated_with",
"source_id": "rxcui:1648789",
"target_id": "MONDO:0004979",
"attributes": {
"p_val": 0.17901741601915602
}
},
{
"type": "associated_with",
"source_id": "rxcui:1547660",
"target_id": "MONDO:0004979",
"attributes": {
"p_val": 0.17901741601915602
}
},
{
"type": "associated_with",
"source_id": "rxcui:1547664",
"target_id": "MONDO:0004979",
"attributes": {
"p_val": 0.17901741601915602
}
},
{
"type": "associated_with",
"source_id": "rxcui:1547668",
"target_id": "MONDO:0004979",
"attributes": {
"p_val": 0.17901741601915602
}
},
{
"type": "associated_with",
"source_id": "rxcui:1547672",
"target_id": "MONDO:0004979",
"attributes": {
"p_val": 0.17901741601915602
}
},
{
"type": "associated_with",
"source_id": "rxcui:2045382",
"target_id": "MONDO:0004979",
"attributes": {
"p_val": 0.17901741601915602
}
},
{
"type": "associated_with",
"source_id": "rxcui:1797919",
"target_id": "MONDO:0004979",
"attributes": {
"p_val": 0.17901741601915602
}
},
{
"type": "associated_with",
"source_id": "rxcui:1797909",
"target_id": "MONDO:0004979",
"attributes": {
"p_val": 0.17901741601915602
}
},
{
"type": "associated_with",
"source_id": "rxcui:1797935",
"target_id": "MONDO:0004979",
"attributes": {
"p_val": 0.17901741601915602
}
},
{
"type": "associated_with",
"source_id": "rxcui:1797849",
"target_id": "MONDO:0004979",
"attributes": {
"p_val": 0.17901741601915602
}
},
{
"type": "associated_with",
"source_id": "rxcui:1941536",
"target_id": "MONDO:0004979",
"attributes": {
"p_val": 0.17901741601915602
}
},
{
"type": "associated_with",
"source_id": "rxcui:1941607",
"target_id": "MONDO:0004979",
"attributes": {
"p_val": 0.17901741601915602
}
},
{
"type": "associated_with",
"source_id": "rxcui:1941613",
"target_id": "MONDO:0004979",
"attributes": {
"p_val": 0.17901741601915602
}
},
{
"type": "associated_with",
"source_id": "rxcui:1946589",
"target_id": "MONDO:0004979",
"attributes": {
"p_val": 0.17901741601915602
}
},
{
"type": "associated_with",
"source_id": "rxcui:1918199",
"target_id": "MONDO:0004979",
"attributes": {
"p_val": 0.17901741601915602
}
},
{
"type": "associated_with",
"source_id": "rxcui:1918205",
"target_id": "MONDO:0004979",
"attributes": {
"p_val": 0.17901741601915602
}
},
{
"type": "associated_with",
"source_id": "rxcui:1918211",
"target_id": "MONDO:0004979",
"attributes": {
"p_val": 0.17901741601915602
}
},
{
"type": "associated_with",
"source_id": "rxcui:1869712",
"target_id": "MONDO:0004979",
"attributes": {
"p_val": 0.17901741601915602
}
},
{
"type": "associated_with",
"source_id": "rxcui:1996212",
"target_id": "MONDO:0004979",
"attributes": {
"p_val": 0.17901741601915602
}
},
{
"type": "associated_with",
"source_id": "rxcui:1148643",
"target_id": "MONDO:0004979",
"attributes": {
"p_val": 0.17901741601915602
}
},
{
"type": "associated_with",
"source_id": "rxcui:1148645",
"target_id": "MONDO:0004979",
"attributes": {
"p_val": 0.17901741601915602
}
},
{
"type": "associated_with",
"source_id": "rxcui:895487",
"target_id": "MONDO:0004979",
"attributes": {
"p_val": 0.17901741601915602
}
},
{
"type": "associated_with",
"source_id": "rxcui:895987",
"target_id": "MONDO:0004979",
"attributes": {
"p_val": 0.17901741601915602
}
},
{
"type": "associated_with",
"source_id": "rxcui:895990",
"target_id": "MONDO:0004979",
"attributes": {
"p_val": 0.17901741601915602
}
},
{
"type": "associated_with",
"source_id": "rxcui:895994",
"target_id": "MONDO:0004979",
"attributes": {
"p_val": 0.17901741601915602
}
},
{
"type": "associated_with",
"source_id": "rxcui:895999",
"target_id": "MONDO:0004979",
"attributes": {
"p_val": 0.17901741601915602
}
},
{
"type": "associated_with",
"source_id": "rxcui:896004",
"target_id": "MONDO:0004979",
"attributes": {
"p_val": 0.17901741601915602
}
},
{
"type": "associated_with",
"source_id": "rxcui:896018",
"target_id": "MONDO:0004979",
"attributes": {
"p_val": 0.17901741601915602
}
},
{
"type": "associated_with",
"source_id": "rxcui:896021",
"target_id": "MONDO:0004979",
"attributes": {
"p_val": 0.17901741601915602
}
},
{
"type": "associated_with",
"source_id": "rxcui:896025",
"target_id": "MONDO:0004979",
"attributes": {
"p_val": 0.17901741601915602
}
},
{
"type": "associated_with",
"source_id": "rxcui:896030",
"target_id": "MONDO:0004979",
"attributes": {
"p_val": 0.17901741601915602
}
},
{
"type": "associated_with",
"source_id": "rxcui:896161",
"target_id": "MONDO:0004979",
"attributes": {
"p_val": 0.17901741601915602
}
},
{
"type": "associated_with",
"source_id": "rxcui:896184",
"target_id": "MONDO:0004979",
"attributes": {
"p_val": 0.17901741601915602
}
},
{
"type": "associated_with",
"source_id": "rxcui:896186",
"target_id": "MONDO:0004979",
"attributes": {
"p_val": 0.17901741601915602
}
},
{
"type": "associated_with",
"source_id": "rxcui:896209",
"target_id": "MONDO:0004979",
"attributes": {
"p_val": 0.17901741601915602
}
},
{
"type": "associated_with",
"source_id": "rxcui:896218",
"target_id": "MONDO:0004979",
"attributes": {
"p_val": 0.17901741601915602
}
},
{
"type": "associated_with",
"source_id": "rxcui:896228",
"target_id": "MONDO:0004979",
"attributes": {
"p_val": 0.17901741601915602
}
},
{
"type": "associated_with",
"source_id": "rxcui:896231",
"target_id": "MONDO:0004979",
"attributes": {
"p_val": 0.17901741601915602
}
},
{
"type": "associated_with",
"source_id": "rxcui:896236",
"target_id": "MONDO:0004979",
"attributes": {
"p_val": 0.17901741601915602
}
},
{
"type": "associated_with",
"source_id": "rxcui:896239",
"target_id": "MONDO:0004979",
"attributes": {
"p_val": 0.17901741601915602
}
},
{
"type": "associated_with",
"source_id": "rxcui:896244",
"target_id": "MONDO:0004979",
"attributes": {
"p_val": 0.17901741601915602
}
},
{
"type": "associated_with",
"source_id": "rxcui:896267",
"target_id": "MONDO:0004979",
"attributes": {
"p_val": 0.17901741601915602
}
},
{
"type": "associated_with",
"source_id": "rxcui:896272",
"target_id": "MONDO:0004979",
"attributes": {
"p_val": 0.17901741601915602
}
},
{
"type": "associated_with",
"source_id": "rxcui:896294",
"target_id": "MONDO:0004979",
"attributes": {
"p_val": 0.17901741601915602
}
},
{
"type": "associated_with",
"source_id": "rxcui:896300",
"target_id": "MONDO:0004979",
"attributes": {
"p_val": 0.17901741601915602
}
},
{
"type": "associated_with",
"source_id": "rxcui:1797847",
"target_id": "MONDO:0004979",
"attributes": {
"p_val": 0.17901741601915602
}
},
{
"type": "associated_with",
"source_id": "rxcui:1797890",
"target_id": "MONDO:0004979",
"attributes": {
"p_val": 0.17901741601915602
}
},
{
"type": "associated_with",
"source_id": "rxcui:1797907",
"target_id": "MONDO:0004979",
"attributes": {
"p_val": 0.17901741601915602
}
},
{
"type": "associated_with",
"source_id": "rxcui:1945039",
"target_id": "MONDO:0004979",
"attributes": {
"p_val": 0.17901741601915602
}
},
{
"type": "associated_with",
"source_id": "rxcui:1945047",
"target_id": "MONDO:0004979",
"attributes": {
"p_val": 0.17901741601915602
}
},
{
"type": "associated_with",
"source_id": "rxcui:1424889",
"target_id": "MONDO:0004979",
"attributes": {
"p_val": 0.17901741601915602
}
},
{
"type": "associated_with",
"source_id": "rxcui:1424899",
"target_id": "MONDO:0004979",
"attributes": {
"p_val": 0.17901741601915602
}
},
{
"type": "associated_with",
"source_id": "rxcui:1547658",
"target_id": "MONDO:0004979",
"attributes": {
"p_val": 0.17901741601915602
}
},
{
"type": "associated_with",
"source_id": "rxcui:1547663",
"target_id": "MONDO:0004979",
"attributes": {
"p_val": 0.17901741601915602
}
},
{
"type": "associated_with",
"source_id": "rxcui:1547666",
"target_id": "MONDO:0004979",
"attributes": {
"p_val": 0.17901741601915602
}
},
{
"type": "associated_with",
"source_id": "rxcui:1547671",
"target_id": "MONDO:0004979",
"attributes": {
"p_val": 0.17901741601915602
}
},
{
"type": "associated_with",
"source_id": "rxcui:1648783",
"target_id": "MONDO:0004979",
"attributes": {
"p_val": 0.17901741601915602
}
},
{
"type": "associated_with",
"source_id": "rxcui:1648788",
"target_id": "MONDO:0004979",
"attributes": {
"p_val": 0.17901741601915602
}
},
{
"type": "associated_with",
"source_id": "rxcui:2045377",
"target_id": "MONDO:0004979",
| |
<reponame>Sillte/fairypptx
"""
Policy:
-----------
`converter.config` will contain the required extensions for expansion.
Memeorandum
-----------
When you want to `newline` in markdown,
please consider the usage of ` `.
Reference
------
# https://hackage.haskell.org/package/pandoc-types-1.22.1/docs/Text-Pandoc-Definition.html
"""
import subprocess
from pathlib import Path
from fairypptx import Shape, TextRange
from fairypptx import Table as PPTXTable
from fairypptx import constants
from fairypptx import text
from contextlib import contextmanager
import json
# [NOTE]
# This is temporary solution.
# I feel it is better to configure
# these fontsizes...
DEFAULT_FONTSIZE = 16
class Formatter:
"""Formatter is called once it requires the
`Format` should be changed.
"""
def __call__(self, textrange):
# Default settings are determined here..
#
textrange.font.bold = False
textrange.font.underline = False
textrange.font.size = DEFAULT_FONTSIZE
textrange.api.ParagraphFormat.Bullet.Visible = False
class HeaderFormatter:
"""Formatter for `Header`.
"""
def __init__(self, level, converter):
# If you want to configure the behavior.
# you should use `converter.config`.
self.level = level
self.converter = converter
self.level_to_fontsize = {1: 36, 2:24, 3:20, 4:18, 5:16}
def __call__(self, textrange):
fontsize = self.level_to_fontsize.get(self.level, DEFAULT_FONTSIZE)
if self.level <= 3:
textrange.font.underline = True
textrange.font.bold = True
textrange.font.size = fontsize
textrange.api.ParagraphFormat.Bullet.Visible = False
class Converter:
"""Convert `JsonAst` to `fairypptx.Markdown`.
* Tag's interface.
"""
elements = dict()
def __init__(self, config=None):
self._formatters = [Formatter()]
self.markdown = None
if config is None:
config = {}
self.config = config
self._indent_level = 0
# Register the `Element`.
@staticmethod
def element(cls):
assert hasattr(cls, "from_tag")
name = cls.__name__
Converter.elements[name] = cls
def to_cls(self, tag):
name = tag["t"]
element = Converter.elements[name]
return element
@property
def formatter(self):
return self._formatters[-1]
@property
def indent_level(self):
"""Return level of indent.
"""
if self._indent_level == 0:
return 1
return self._indent_level
def insert(self, text):
"""Insert the `text`
with the corrent formatter.
Micellaneous specifications I cannot understand, but can guess.
Inside this function, counter-act to these specification.
[TODO]: To acceessing of the last paragraph is not efficient yet.
You should be take it consider later.
"""
# I do now why, but empty text seems illegal, which may differ from MSDN...?
# https://docs.microsoft.com/ja-jp/office/vba/api/powerpoint.textrange.insertafter
if text == "":
return None
last_textrange = self.markdown.shape.textrange
# I do now why, but when the `IndentLevel` is changed,
# The previous paragraphs of `Indent` also may change, unintentionally.
if not last_textrange.text:
last_indent_level = None
else:
last_indent_level = last_textrange.paragraphs[-1].api.IndentLevel
last_n_paragraph = len(last_textrange.paragraphs)
is_prev_paragraph = bool(last_textrange.paragraphs)
textrange_api = last_textrange.api
nt_api = textrange_api.InsertAfter(text)
textrange = TextRange(nt_api)
textrange.api.IndentLevel = self.indent_level
self.formatter(textrange)
# Here, the paragraph's is reset.
paragraphs = self.markdown.shape.textrange.paragraphs
# If we have to revise the paragraph of the second to last.
# we perform these processings, here.
is_inc_paragraph = (last_n_paragraph < len(paragraphs))
if is_inc_paragraph and is_prev_paragraph:
assert 2 <= len(paragraphs)
paragraphs[-2].api.IndentLevel = last_indent_level
return textrange
def set_tail_cr(self, n_cr=1):
"""This function assures the number of the
tail of `Text`'s `carriage return` .
"""
paragraphs = self.markdown.shape.textrange.paragraphs
if not paragraphs:
self.insert("\r" * n_cr)
return
from itertools import takewhile
text = paragraphs[-1].text
n_tail_cr = len(list(takewhile(lambda t: t == "\r", reversed(text))))
# I do not why, but `len(text)-` seems required.
stem = text[:len(text) - n_tail_cr]
if n_tail_cr == n_cr:
return
# If you set `paragraphs[-1].text`,
# then `format` may cnahge,
# We would like to prevent these situations as much as possible.
# Hence, `insert` is used.
if n_tail_cr < n_cr:
self.insert("\r" * (n_cr - n_tail_cr))
else:
# [TODO]: `Delete` is more appropriate?
# https://docs.microsoft.com/ja-jp/office/vba/api/powerpoint.textrange.delete
paragraphs[-1].text = stem + "\r" * n_cr
@contextmanager
def formatter_scope(self, formatter):
self._formatters.append(formatter)
yield formatter
self._formatters.pop()
@contextmanager
def inc_indent(self):
"""Increase `indent_level` one.
"""
self._indent_level += 1
yield
self._indent_level -= 1
def parse(self, json_ast):
if self._is_json(json_ast):
json_ast = json.loads(json_ast)
elif isinstance(json_ast, (str, Path)):
json_ast = self._from_str_or_path(json_ast)
assert isinstance(json_ast, dict)
blocks = json_ast["blocks"]
from pprint import pprint
print("INPUT")
pprint(blocks)
shape = Shape.make(1) # Temporary.
from fairypptx import Markdown # For dependency hierarchy
markdown = Markdown(shape)
self.markdown = markdown # Set `self.markdown`.
from pprint import pprint
for block in blocks:
pprint(block)
cls = self.elements[block["t"]]
cls.from_tag(block, markdown, self)
markdown.shape.tighten()
markdown.shape.textrange.paragraphformat.api.Alignment = constants.ppAlignLeft
return markdown
@classmethod
def _from_str_or_path(self, content):
if self._is_existent_path(content):
content = Path(content).read_text("utf8")
ret = subprocess.run("pandoc -t json",
universal_newlines=True,
stdout=subprocess.PIPE,
input=content, encoding="utf8")
assert ret.returncode == 0
return json.loads(ret.stdout)
@classmethod
def _is_json(self, json_ast):
try:
json.loads(json_ast)
except Exception as e:
return False
return True
@classmethod
def _is_existent_path(self, content):
try:
return Path(content).exists()
except OSError:
return False
class Element:
def __init_subclass__(cls, **kwargs):
Converter.element(cls)
@classmethod
def from_tag(cls, tag, markdown, converter):
raise NotImplementedError("")
@classmethod
def delegate_inlines(cls, inlines, markdown, converter):
""" Delegate the `inlines`'s handling.n_tail_cr
"""
for inline in inlines:
cls = converter.to_cls(inline)
element = cls.from_tag(inline, markdown, converter)
class Para(Element):
@classmethod
def from_tag(cls, tag, markdown, converter):
inlines = tag["c"]
cls.delegate_inlines(inlines, markdown, converter)
converter.insert("\r")
class Plain(Element):
@classmethod
def from_tag(cls, tag, markdown, converter):
inlines = tag["c"]
cls.delegate_inlines(inlines, markdown, converter)
class Str(Element):
@classmethod
def from_tag(cls, tag, markdown, converter):
string = tag["c"]
converter.insert(string)
class Space(Element):
@classmethod
def from_tag(cls, tag, markdown, converter):
converter.insert(" ")
class LineBreak(Element):
@classmethod
def from_tag(cls, tag, markdown, converter):
converter.insert("\013") # vertical tab.
class SoftBreak(Element):
@classmethod
def from_tag(cls, tag, markdown, converter):
pass
# converter.insert(" ")
class Header(Element):
@classmethod
def from_tag(cls, tag, markdown, converter):
level, attrs, inlines = tag["c"]
formatter = HeaderFormatter(level, converter)
with converter.formatter_scope(formatter):
# Performs setting of `Format`.
cls.delegate_inlines(inlines, markdown, converter)
converter.set_tail_cr(2)
class Strong(Element):
@classmethod
def from_tag(cls, tag, markdown, converter):
inlines = tag["c"]
def emphasize(textrange):
textrange.font.bold = True
with converter.formatter_scope(emphasize):
# Performs setting of `Format`.
cls.delegate_inlines(inlines, markdown, converter)
class CodeBlock(Element):
@classmethod
def from_tag(cls, tag, markdown, converter):
attrs, string = tag["c"]
converter.insert(string)
class Code(Element):
@classmethod
def from_tag(cls, tag, markdown, converter):
attrs, string = tag["c"]
converter.insert(string)
def _change_bullet_type(paragraph, bullet_type):
"""
# [BUG] / [UNSOLVED] I do not why, however,
# in some cases, the change of `ParagraphFormat.BulletType`
# does not applied when `IndentLevel` is the same as the previous ones...
# Experimentally, I can guess that
# once you changes `IndentLevel` and set `Bullet.Type`,
# then, this problem does not seem occur.
"""
indent_level = paragraph.api.IndentLevel
assert 1 <= indent_level <= 5, "BUG."
paragraph.api.IndentLevel = 5
paragraph.api.ParagraphFormat.Bullet.Type = bullet_type
paragraph.api.IndentLevel = indent_level
class BulletList(Element):
@classmethod
def from_tag(cls, tag, markdown, converter):
blocks = tag["c"]
def bullet_list(textrange):
textrange.api.ParagraphFormat.Bullet.Visible = True
_change_bullet_type(textrange, constants.ppBulletUnnumbered)
textrange.font.bold = False
textrange.font.underline = False
textrange.font.size = DEFAULT_FONTSIZE
with converter.formatter_scope(bullet_list), converter.inc_indent():
for block in blocks:
for inlines in block:
cls = converter.to_cls(inlines)
cls.from_tag(inlines, markdown, converter)
converter.set_tail_cr(1)
# For survey.
#n_length = len(markdown.shape.textrange.text)
#sub_api = markdown.shape.textrange.api.Characters(n_length - 1, 1)
#print("sub_api", sub_api.Text)
#TextRange(sub_api).font.bold = False
#print(TextRange(sub_api).font.bold)
class OrderedList(Element):
@classmethod
def from_tag(cls, tag, markdown, converter):
list_attributes, blocks = tag["c"]
def bullet_list(textrange):
textrange.api.ParagraphFormat.Bullet.Visible = constants.msoTrue
textrange.api.ParagraphFormat.Bullet.Type = constants.ppBulletNumbered
textrange.font.size = DEFAULT_FONTSIZE
with converter.formatter_scope(bullet_list), converter.inc_indent():
for block in blocks:
for inlines in block:
cls = converter.to_cls(inlines)
cls.from_tag(inlines, markdown, converter)
converter.set_tail_cr(1)
class Link(Element):
@classmethod
def from_tag(cls, tag, markdown, converter):
#print("Link", tag); assert False
attrs, inlines, targets = tag["c"]
string = "".join([str(inline.get("c", "")) for inline in inlines])
path = targets[0]
textrange = converter.insert(string)
hyperlink = textrange.ActionSettings(constants.ppMouseClick)
hyperlink.Action = constants.ppActionHyperlink
hyperlink.Hyperlink.Address = path
class HorizontalRule(Element):
@classmethod
def from_tag(cls, tag, markdown, converter):
pass
# Below elements are incomplete.
#
class Table(Element):
@classmethod
def from_tag(cls, tag, markdown, converter):
#print("Link", tag); assert False
#inlines, alignment, number, columns, rows = tag["c"]
# Table Attr Caption [ColSpec] TableHead [TableBody] TableFoot
attr, caption, colspec, table_head, table_body, table_foot = tag["c"]
print("Currently `Table` cannot be handled")
import numpy as np
values = np.array([["" for _ in range(2)] for _ in range(2)])
table = PPTXTable.make(values)
# Currently, (2020/01/06) `markdown`'s cannot handles
# the multiple shapes.
# So, I orphanage the generated Table.
# markdown._shapes.append(table)
class RawInline(Element):
"""Handling Html...
"""
@classmethod
def from_tag(cls, tag, markdown, converter):
format_, string = tag["c"]
def from_jsonast(content, config=None):
converter = Converter(config)
return converter.parse(content)
if __name__ == "__main__":
pass
"""
target = Shape().textrange.paragraphs[-1]
print(target.text)
target.api.IndentLevel = 2
s = target.api.InsertAfter("\r\nHOGEHOIGE")
s.IndentLevel = 1
print(target.api.Text, "check")
print(Shape().textrange.paragraphs[-1].text)
print(Shape().textrange.api.IndentLevel); exit(0)
"""
#add(textrange, "\r")
#exit(0)
#print(Shape().textrange.text); exit(0)
#TextRange().api.IndentLevel = 2; exit(0)
#print(Shape().textrange.text); exit(0)
sample = """
{"blocks": [{"t": "Para", "c": [{"t": "Str", "c": "Three"}]}]}
"""
SCRIPT = """
* ITEM1
1. ITEM1-1
2. ITEM1-2
* ITEM1-3
* ITEM1-4
| |
<gh_stars>1-10
import logging
import pyfftw
import dask
import warnings
import numpy as np
import time
import traceback
from multiprocessing import cpu_count
from dask.diagnostics import ProgressBar
from ghostipy.utils import (hz_to_normalized_rad, normalized_rad_to_hz)
from ghostipy.spectral.wavelets import (Wavelet, MorseWavelet, AmorWavelet, BumpWavelet)
__all__ = ['cwt']
def cwt(data, *, fs=1, timestamps=None, wavelet=MorseWavelet(gamma=3, beta=20),
freq_limits=None, freqs=None, voices_per_octave=10,
n_workers=cpu_count(), verbose=False, method='full',
derivative=False, remove_mean=False, boundary='mirror',
coi_threshold=1/(np.e**2), describe_dims=False,
cwt_out=None):
"""Computes the continuous wavelet transform.
Parameters
----------
data : np.ndarray, with shape (n_timepoints, )
Data with which to compute the CWT
fs : float
Sampling rate of the data in Hz.
timestamps : np.ndarray, with shape (n_timepoints, ) optional
Timestamps corresponding to the data, in seconds.
If None, they will be computed automatically based on the
assumption that all the data are one contiguous block, and
the units will be in seconds.
wavelet : ghostipy.wavelet
Type of wavelet to use for the transform.
Default is a Morse wavelet with beta=3 and gamma=20.
freq_limits : list, optional
List of [lower_bound, upper_bound] for frequencies to use,
in units of Hz. Note that a reference set of frequencies
is generated on the shortest segment of data since that
determines the lowest frequency that can be used. If the
bounds specified by 'freq_limits' are outside the bounds
determined by the reference set, 'freq_limits' will be
adjusted to be within the bounds of the reference set.
freqs : array-like, optional
Frequencies to analyze, in units of Hz.
Note that a reference set of frequencies is computed on the
shortest segment of data since that determines the lowest
frequency that can be used. If any frequencies specified in
'freqs' are outside the bounds determined by the reference
set, 'freqs' will be adjusted such that all frequencies in
'freqs' will be within those bounds.
voices_per_octave : int, optional
Number of wavelet frequencies per octave. Note that this
parameter is not used if frequencies were already specified
by the 'freqs' option.
Default is 10.
n_workers : integer, optional
Number of parallel jobs to use.
Default is the total number of CPUs (which may be virtual).
verbose : boolean, optional
Whether to print messages displaying this function's progress.
Default is False.
method: {'full', 'ola'}, optional
Which algorithm to use for computing the CWT. 'ola' will give
superior performance for long lengths of data.
derivative: boolean, optional
Whether to compute the derivative of the CWT.
Default is False.
remove_mean: boolean, optional
Whether to remove the mean of the data before computing the CWT.
Default is False.
boundary: {'mirror', 'zeros', 'periodic'}, optional
To handle boundaries, the data are extended before computing the CWT
according to the following options:
'mirror' : Mirror/reflect the data at each end
'zeros': Add zeros at each end
'periodic': Treat the data as periodic
Note that regardless of the boundary method, the CWT should not be
treated as reliable within the cone of influence.
Default is 'mirror'.
coi_threshold : float, optional
The value C that determines the wavelet's cone of influence. The
maximum value P of the wavelet's power autocorrelation is taken
in the time domain. Then the cone of influence is given by the
region where the power autocorrelation is above C*P. Default value
for C is e^(-2).
describe_dims : boolean, optional
Whether to return the expected shape and dtype of the output and
return immediately (no CWT is computed). This option is
useful for out-of-core computation. While the expected shape
should not be changed, the dtype is only suggested, e.g. it is
acceptable to use a lower precision dtype (such as complex64 instead
of complex128 to save space)
Default is False.
cwt_out: array-like, optional
If specified, the CWT output coefficients will be stored here.
Useful if the output is too large to fit into memory and must instead
be saved to an array stored on disk.
Returns
-------
If 'describe_dims' is True:
shape, dtype : tuple
Expected output array shape and dtype
Otherwise:
coefs : np.ndarray, with shape (n_scales, n_timepoints)
Calculated continuous wavelet coefficients. Note that the scale
dimension is ordered by increasing wavelet scale, which corresponds
to decreasing frequency.
scales : np.ndarray, with shape (n_scales, )
Wavelet scales for which CWT was calculated.
frequencies : np.ndarray, with shape (n_frequencies, )
If a sampling rate is given, these will be the frequency
corresponding to each wavelet scale.
timestamps : np.array, with shape (n_timepoints, )
If timestamps were not specified, these are the timestamps
calculated for data samples. Pass through of input
timestamps otherwise.
cois : np.ndarray, with shape (n_cois, )
Cones of influence for each wavelet scale.
"""
t0 = time.time()
if verbose:
print("Using maximum of {} workers".format(n_workers))
if not isinstance(wavelet, Wavelet):
raise TypeError("Supplied wavelet must inherit from a ghostipy.Wavelet type")
if freqs is not None and freq_limits is not None:
raise ValueError("'freqs' and 'freq_limits' cannot both be used at the"
" same time. Either specify one or the other, or"
" leave both as unspecified")
if freqs is not None and voices_per_octave is not None:
raise ValueError("'freqs' and 'voices_per_octave' cannot both be used"
" at the same time. Either specify one or the other,"
" or leave both as unspecified")
if method not in ('full', 'ola'):
raise ValueError(f"Invalid method {method}")
N = data.shape[0]
if verbose:
print("Determining smallest scale...")
ref_scale, ref_coi = wavelet.reference_coi(threshold=coi_threshold)
max_scale = N / ref_coi * ref_scale
w_ref_low = wavelet.scale_to_freq(max_scale).squeeze()
w_ref_high = np.pi
if verbose:
print(
f"Smallest reference frequency: {normalized_rad_to_hz(w_ref_low, fs):0.4f} Hz")
if freqs is not None:
# just in case user didn't pass in sorted
# frequencies after all
freqs = np.sort(freqs)
ws = hz_to_normalized_rad(freqs, fs)
w_low = ws[0]
w_high = ws[-1]
if w_low < w_ref_low:
warnings.warn(
f"Warning: Lower frequency limit of {freq_limits[0]} is less than the smallest "
f"recommended frequency of {normalized_rad_to_hz(w_ref_low, fs):0.4f} Hz")
if w_high > w_ref_high:
warnings.warn(
f"Warning: Upper frequency limit of {freq_limits[1]} is greater than the largest "
f"recommended frequency of {normalized_rad_to_hz(w_ref_high, fs):0.4f} Hz")
elif freq_limits is not None:
# just in case user didn't pass in limits as [lower_bound, upper_bound]
freq_limits = np.sort(freq_limits)
w_low = hz_to_normalized_rad(freq_limits[0], fs)
w_high = hz_to_normalized_rad(freq_limits[1], fs)
if w_low < w_ref_low:
warnings.warn(
f"Lower frequency limit of {freq_limits[0]} is less than the smallest "
f"recommended frequency of {normalized_rad_to_hz(w_ref_low, fs):0.4f} Hz")
if w_high > w_ref_high:
warnings.warn(
f"Upper frequency limit of {freq_limits[1]} is greater than the largest "
f"recommended frequency of {normalized_rad_to_hz(w_ref_high, fs):0.4f} Hz")
else:
w_low = w_ref_low
w_high = w_ref_high
if freqs is None:
n_octaves = np.log2(w_high / w_low)
j = np.arange(n_octaves * voices_per_octave)
ws = w_high / 2**(j/voices_per_octave)
scales = wavelet.freq_to_scale(ws)
cois = wavelet.coi(scales, ref_scale, ref_coi)
if remove_mean:
# Don't do in place here, even though it saves memory,
# as that would mutate the original data
data = data - data.mean()
extend_len = int(np.ceil(np.max(cois)))
if extend_len > N:
warnings.warn(f"Cannot add {extend_len} points to satisfy requested"
f" boundary policy. Shorting this value to data length {N}")
extend_len = N
if boundary == 'mirror':
data = np.hstack((np.flip(data[:extend_len]), data, np.flip(data[-extend_len:])))
elif boundary == 'zeros':
data = np.hstack((np.zeros(extend_len), data, np.zeros(extend_len)))
elif boundary == 'periodic':
data = np.hstack((data[-extend_len:], data, data[:extend_len]))
else:
extend_len = 0
# Set up array as C contiguous since we will be iterating row-by-row
n_bits = len(scales) * data.shape[0] * 16
if verbose:
print(f"Output space requirement: {n_bits/1e9} GB = {n_bits/(1024**3)} GiB")
output_shape = (scales.shape[0], N)
dtype = '<c16'
if describe_dims:
if verbose:
print("Calculating output array sizes. Skipping transform computation")
print(f"Output array with 'derivative' {derivative}"
f" should have shape {output_shape} with dtype {dtype}")
return output_shape, dtype
if cwt_out is not None:
if verbose:
print("Using passed-in output array")
if cwt_out.shape != output_shape:
raise ValueError(
f"Provided output array has shape {coefs.shape}"
f" but needs shape {output_shape}")
coefs = cwt_out
else:
if verbose:
print("Allocating output array")
coefs = pyfftw.zeros_aligned(output_shape, dtype='complex128')
######################################################################
# Set up CWT parallel computation
task_list = []
if method == 'ola':
for ii in range(scales.shape[0]):
task = dask.delayed(_cwt_ola_fftw)(
data,
wavelet,
scales[ii],
derivative,
coefs,
ii,
extend_len,
1,
cois[-1])
task_list.append(task)
elif method == 'full':
if verbose:
| |
(List, Set, Tuple)
), "Please specify a list of agent ids to watch"
assert isinstance(
agents_list_alive.minimum_agents_alive_in_list, int
), "Please specify an int for minimum number of alive agents in the list"
assert (
agents_list_alive.minimum_agents_alive_in_list >= 0
), "minimum_agents_alive_in_list should not be negative"
agents_alive_check = [
1 if id in agent_manager.agent_ids else 0
for id in agents_list_alive.agents_list
]
if (
agents_alive_check.count(1)
< agents_list_alive.minimum_agents_alive_in_list
):
return True
return False
@classmethod
def _is_done_with_events(cls, sim, agent_id, vehicle, sensor_state):
interface = sim.agent_manager.agent_interface_for_agent_id(agent_id)
done_criteria = interface.done_criteria
# TODO: the following calls nearest_lanes (expensive) 6 times
reached_goal = cls._agent_reached_goal(sim, vehicle)
collided = sim.vehicle_did_collide(vehicle.id)
is_off_road = cls._vehicle_is_off_road(sim, vehicle)
is_on_shoulder = cls._vehicle_is_on_shoulder(sim, vehicle)
is_not_moving = cls._vehicle_is_not_moving(sim, vehicle)
reached_max_episode_steps = sensor_state.reached_max_episode_steps
is_off_route, is_wrong_way = cls._vehicle_is_off_route_and_wrong_way(
sim, vehicle
)
agents_alive_done = cls._agents_alive_done_check(
sim.agent_manager, done_criteria.agents_alive
)
done = (
(is_off_road and done_criteria.off_road)
or reached_goal
or reached_max_episode_steps
or (is_on_shoulder and done_criteria.on_shoulder)
or (collided and done_criteria.collision)
or (is_not_moving and done_criteria.not_moving)
or (is_off_route and done_criteria.off_route)
or (is_wrong_way and done_criteria.wrong_way)
or agents_alive_done
)
events = Events(
collisions=sim.vehicle_collisions(vehicle.id),
off_road=is_off_road,
reached_goal=reached_goal,
reached_max_episode_steps=reached_max_episode_steps,
off_route=is_off_route,
on_shoulder=is_on_shoulder,
wrong_way=is_wrong_way,
not_moving=is_not_moving,
agents_alive_done=agents_alive_done,
)
return done, events
@classmethod
def _agent_reached_goal(cls, sim, vehicle):
sensor_state = sim.vehicle_index.sensor_state_for_vehicle_id(vehicle.id)
distance_travelled = vehicle.trip_meter_sensor()
mission = sensor_state.plan.mission
return mission.is_complete(vehicle, distance_travelled)
@classmethod
def _vehicle_is_off_road(cls, sim, vehicle):
return not sim.scenario.road_map.road_with_point(Point(*vehicle.position))
@classmethod
def _vehicle_is_on_shoulder(cls, sim, vehicle):
# XXX: this isn't technically right as this would also return True
# for vehicles that are completely off road.
for corner_coordinate in vehicle.bounding_box:
if not sim.scenario.road_map.road_with_point(Point(*corner_coordinate)):
return True
return False
@classmethod
def _vehicle_is_not_moving(cls, sim, vehicle):
last_n_seconds_considered = 60
# Flag if the vehicle has been immobile for the past 60 seconds
if sim.elapsed_sim_time < last_n_seconds_considered:
return False
distance = vehicle.driven_path_sensor.distance_travelled(
sim, last_n_seconds=last_n_seconds_considered
)
# Due to controller instabilities there may be some movement even when a
# vehicle is "stopped". Here we allow 1m of total distance in 60 seconds.
return distance < 1
@classmethod
def _vehicle_is_off_route_and_wrong_way(cls, sim, vehicle):
"""Determines if the agent is on route and on the correct side of the road.
Args:
sim: An instance of the simulator.
agent_id: The id of the agent to check.
Returns:
A tuple (is_off_route, is_wrong_way)
is_off_route:
Actor's vehicle is not on its route or an oncoming traffic lane.
is_wrong_way:
Actor's vehicle is going against the lane travel direction.
"""
sensor_state = sim.vehicle_index.sensor_state_for_vehicle_id(vehicle.id)
route_roads = sensor_state.plan.route.roads
vehicle_pos = Point(*vehicle.position)
vehicle_minimum_radius_bounds = (
np.linalg.norm(vehicle.chassis.dimensions.as_lwh[:2]) * 0.5
)
# Check that center of vehicle is still close to route
radius = vehicle_minimum_radius_bounds + 5
nearest_lane = sim.scenario.road_map.nearest_lane(vehicle_pos, radius=radius)
# No road nearby, so we're not on route!
if not nearest_lane:
return (True, False)
# Check whether vehicle is in wrong-way
is_wrong_way = cls._check_wrong_way_event(nearest_lane, vehicle)
# Check whether vehicle has no-route or is on-route
if (
not route_roads # Vehicle has no-route. E.g., endless mission with a random route
or nearest_lane.road in route_roads # Vehicle is on-route
or nearest_lane.in_junction
):
return (False, is_wrong_way)
veh_offset = nearest_lane.offset_along_lane(vehicle_pos)
# so we're obviously not on the route, but we might have just gone
# over the center line into an oncoming lane...
for on_lane in nearest_lane.oncoming_lanes_at_offset(veh_offset):
if on_lane.road in route_roads:
return (False, is_wrong_way)
# Vehicle is completely off-route
return (True, is_wrong_way)
@staticmethod
def _vehicle_is_wrong_way(vehicle, closest_lane):
target_pose = closest_lane.center_pose_at_point(Point(*vehicle.pose.position))
# Check if the vehicle heading is oriented away from the lane heading.
return (
np.fabs(vehicle.pose.heading.relative_to(target_pose.heading)) > 0.5 * np.pi
)
@classmethod
def _check_wrong_way_event(cls, lane_to_check, vehicle):
# When the vehicle is in an intersection, turn off the `wrong way` check to avoid
# false positive `wrong way` events.
if lane_to_check.in_junction:
return False
return cls._vehicle_is_wrong_way(vehicle, lane_to_check)
class Sensor:
"""The sensor base class."""
def step(self):
"""Update sensor state."""
pass
def teardown(self):
"""Clean up internal resources"""
raise NotImplementedError
class SensorState:
"""Sensor state information"""
def __init__(self, max_episode_steps, plan):
self._max_episode_steps = max_episode_steps
self._plan = plan
self._step = 0
def step(self):
"""Update internal state."""
self._step += 1
@property
def reached_max_episode_steps(self):
"""Inbuilt sensor information that describes if episode step limit has been reached."""
if self._max_episode_steps is None:
return False
return self._step >= self._max_episode_steps
@property
def plan(self):
"""Get the current plan for the actor."""
return self._plan
@property
def steps_completed(self):
"""Get the number of steps where this sensor has been updated."""
return self._step
class CameraSensor(Sensor):
"""The base for a sensor that renders images."""
def __init__(
self,
vehicle,
renderer, # type Renderer or None
name: str,
mask: int,
width: int,
height: int,
resolution: float,
):
assert renderer
self._log = logging.getLogger(self.__class__.__name__)
self._vehicle = vehicle
self._camera = renderer.build_offscreen_camera(
name,
mask,
width,
height,
resolution,
)
def teardown(self):
self._camera.teardown()
def step(self):
self._follow_vehicle()
def _follow_vehicle(self):
largest_dim = max(self._vehicle._chassis.dimensions.as_lwh)
self._camera.update(self._vehicle.pose, 20 * largest_dim)
class DrivableAreaGridMapSensor(CameraSensor):
"""A sensor that renders drivable area from around its target actor."""
def __init__(
self,
vehicle,
width: int,
height: int,
resolution: float,
renderer, # type Renderer or None
):
super().__init__(
vehicle,
renderer,
"drivable_area_grid_map",
RenderMasks.DRIVABLE_AREA_HIDE,
width,
height,
resolution,
)
self._resolution = resolution
def __call__(self) -> DrivableAreaGridMap:
assert (
self._camera is not None
), "Drivable area grid map has not been initialized"
ram_image = self._camera.wait_for_ram_image(img_format="A")
mem_view = memoryview(ram_image)
image = np.frombuffer(mem_view, np.uint8)
image.shape = (self._camera.tex.getYSize(), self._camera.tex.getXSize(), 1)
image = np.flipud(image)
metadata = GridMapMetadata(
created_at=int(time.time()),
resolution=self._resolution,
height=image.shape[0],
width=image.shape[1],
camera_pos=self._camera.camera_np.getPos(),
camera_heading_in_degrees=self._camera.camera_np.getH(),
)
return DrivableAreaGridMap(data=image, metadata=metadata)
class OGMSensor(CameraSensor):
"""A sensor that renders occupancy information from around its target actor."""
def __init__(
self,
vehicle,
width: int,
height: int,
resolution: float,
renderer, # type Renderer or None
):
super().__init__(
vehicle,
renderer,
"ogm",
RenderMasks.OCCUPANCY_HIDE,
width,
height,
resolution,
)
self._resolution = resolution
def __call__(self) -> OccupancyGridMap:
assert self._camera is not None, "OGM has not been initialized"
ram_image = self._camera.wait_for_ram_image(img_format="A")
mem_view = memoryview(ram_image)
grid = np.frombuffer(mem_view, np.uint8)
grid.shape = (self._camera.tex.getYSize(), self._camera.tex.getXSize(), 1)
grid = np.flipud(grid)
grid = grid.clip(min=0, max=1).astype(np.int8)
grid *= 100 # full confidence on known cells
metadata = GridMapMetadata(
created_at=int(time.time()),
resolution=self._resolution,
height=grid.shape[0],
width=grid.shape[1],
camera_pos=self._camera.camera_np.getPos(),
camera_heading_in_degrees=self._camera.camera_np.getH(),
)
return OccupancyGridMap(data=grid, metadata=metadata)
class RGBSensor(CameraSensor):
"""A sensor that renders color values from around its target actor."""
def __init__(
self,
vehicle,
width: int,
height: int,
resolution: float,
renderer, # type Renderer or None
):
super().__init__(
vehicle, renderer, "rgb", RenderMasks.RGB_HIDE, width, height, resolution
)
self._resolution = resolution
def __call__(self) -> TopDownRGB:
assert self._camera is not None, "RGB has not been initialized"
ram_image = self._camera.wait_for_ram_image(img_format="RGB")
mem_view = memoryview(ram_image)
image = np.frombuffer(mem_view, np.uint8)
image.shape = (self._camera.tex.getYSize(), self._camera.tex.getXSize(), 3)
image = np.flipud(image)
metadata = GridMapMetadata(
created_at=int(time.time()),
resolution=self._resolution,
height=image.shape[0],
width=image.shape[1],
camera_pos=self._camera.camera_np.getPos(),
camera_heading_in_degrees=self._camera.camera_np.getH(),
)
return TopDownRGB(data=image, metadata=metadata)
class LidarSensor(Sensor):
"""A lidar sensor."""
def __init__(
self,
vehicle,
bullet_client,
sensor_params: SensorParams = None,
lidar_offset=(0, 0, 1),
):
self._vehicle = vehicle
self._bullet_client = bullet_client
self._lidar_offset = np.array(lidar_offset)
self._lidar = Lidar(
self._vehicle.position + self._lidar_offset,
sensor_params,
self._bullet_client,
)
def step(self):
self._follow_vehicle()
def _follow_vehicle(self):
self._lidar.origin = self._vehicle.position + self._lidar_offset
def __call__(self):
return self._lidar.compute_point_cloud()
def teardown(self):
pass
class DrivenPathSensor(Sensor):
"""Tracks the driven path as a series of positions (regardless if the vehicle is
following the route or not). For performance reasons it only keeps the last
N=max_path_length path segments.
"""
Entry = namedtuple("TimeAndPos", ["timestamp", "position"])
def __init__(self, vehicle, max_path_length: float = 500):
self._vehicle = vehicle
self._driven_path = deque(maxlen=max_path_length)
def track_latest_driven_path(self, sim):
"""Records the current location of the tracked vehicle."""
pos = self._vehicle.position[:2]
self._driven_path.append(
DrivenPathSensor.Entry(timestamp=sim.elapsed_sim_time, position=pos)
)
def __call__(self):
return [x.position for x in self._driven_path] # only return the positions
def teardown(self):
pass
def distance_travelled(
self,
sim,
last_n_seconds: Optional[float] = None,
last_n_steps: Optional[int] = None,
):
"""Find the amount of distance travelled over the last # of seconds XOR steps"""
if last_n_seconds is None and last_n_steps is None:
raise ValueError("Either last N seconds or last N steps must be provided")
if last_n_steps is not None:
n = last_n_steps + 1 # to factor in the current step we're on
filtered_pos = [x.position for x in self._driven_path][-n:]
else: # last_n_seconds
threshold = sim.elapsed_sim_time - last_n_seconds
filtered_pos = [
x.position for x in self._driven_path if x.timestamp >= threshold
]
xs = np.array([p[0] for p in filtered_pos])
ys = np.array([p[1] for p in filtered_pos])
dist_array = (xs[:-1] - xs[1:]) ** 2 + (ys[:-1] - ys[1:]) ** 2
return np.sum(np.sqrt(dist_array))
class TripMeterSensor(Sensor):
"""Tracks distance travelled along the route (in KM). Kilometeres driven while
off-route are not counted as | |
extstructs=self.registry.validextensionstructs[typeName] if name == 'pNext' else None,
condition=conditions[name] if conditions and name in conditions else None,
cdecl=cdecl))
# If this struct extends another, keep its name in list for further processing
if typeinfo.elem.attrib.get('structextends') is not None:
self.structextends_list.append(typeName)
# Returnedonly structs should have most of their members ignored -- on entry, we only care about validating the sType and
# pNext members. Everything else will be overwritten by the callee.
if typeinfo.elem.attrib.get('returnedonly') is not None:
self.returnedonly_structs.append(typeName)
membersInfo = [m for m in membersInfo if m.name in ('sType', 'pNext')]
self.structMembers.append(self.StructMemberData(name=typeName, members=membersInfo))
#
# Capture group (e.g. C "enum" type) info to be used for param check code generation.
# These are concatenated together with other types.
def genGroup(self, groupinfo, groupName, alias):
if not self.source_file:
return
# record the name/alias pair
if alias is not None:
self.alias_dict[groupName]=alias
OutputGenerator.genGroup(self, groupinfo, groupName, alias)
groupElem = groupinfo.elem
# Store the sType values
if groupName == 'VkStructureType':
for elem in groupElem.findall('enum'):
self.stypes.append(elem.get('name'))
elif 'FlagBits' in groupName:
bits = []
for elem in groupElem.findall('enum'):
if elem.get('supported') != 'disabled':
bits.append(elem.get('name'))
if bits:
self.flagBits[groupName] = bits
else:
# Determine if begin/end ranges are needed (we don't do this for VkStructureType, which has a more finely grained check)
expandName = re.sub(r'([0-9a-z_])([A-Z0-9][^A-Z0-9]?)',r'\1_\2',groupName).upper()
expandPrefix = expandName
expandSuffix = ''
expandSuffixMatch = re.search(r'[A-Z][A-Z]+$',groupName)
if expandSuffixMatch:
expandSuffix = '_' + expandSuffixMatch.group()
# Strip off the suffix from the prefix
expandPrefix = expandName.rsplit(expandSuffix, 1)[0]
isEnum = ('FLAG_BITS' not in expandPrefix)
if isEnum:
self.enumRanges[groupName] = (expandPrefix + '_BEGIN_RANGE' + expandSuffix, expandPrefix + '_END_RANGE' + expandSuffix)
# Create definition for a list containing valid enum values for this enumerated type
if self.featureExtraProtect is not None:
enum_entry = '\n#ifdef %s\n' % self.featureExtraProtect
else:
enum_entry = ''
enum_entry += 'const std::vector<%s> All%sEnums = {' % (groupName, groupName)
for enum in groupElem:
name = enum.get('name')
if name is not None and enum.get('supported') != 'disabled':
enum_entry += '%s, ' % name
enum_entry += '};\n'
if self.featureExtraProtect is not None:
enum_entry += '#endif // %s\n' % self.featureExtraProtect
self.enumValueLists += enum_entry
#
# Capture command parameter info to be used for param check code generation.
def genCmd(self, cmdinfo, name, alias):
# record the name/alias pair
if alias is not None:
self.alias_dict[name]=alias
OutputGenerator.genCmd(self, cmdinfo, name, alias)
decls = self.makeCDecls(cmdinfo.elem)
typedef = decls[1]
typedef = typedef.split(')',1)[1]
if self.header_file:
if name not in self.blacklist:
if (self.featureExtraProtect is not None):
self.declarations += [ '#ifdef %s' % self.featureExtraProtect ]
# Strip off 'vk' from API name
self.declarations += [ '%s%s' % ('bool PreCallValidate', decls[0].split("VKAPI_CALL vk")[1])]
if (self.featureExtraProtect is not None):
self.declarations += [ '#endif' ]
if self.source_file:
if name not in self.blacklist:
params = cmdinfo.elem.findall('param')
# Get list of array lengths
lens = set()
for param in params:
len = self.getLen(param)
if len:
lens.add(len)
# Get param info
paramsInfo = []
for param in params:
paramInfo = self.getTypeNameTuple(param)
cdecl = self.makeCParamDecl(param, 0)
# Check for parameter name in lens set
iscount = False
if paramInfo[1] in lens:
iscount = True
paramsInfo.append(self.CommandParam(type=paramInfo[0], name=paramInfo[1],
ispointer=self.paramIsPointer(param),
isstaticarray=self.paramIsStaticArray(param),
isbool=True if paramInfo[0] == 'VkBool32' else False,
israngedenum=True if paramInfo[0] in self.enumRanges else False,
isconst=True if 'const' in cdecl else False,
isoptional=self.paramIsOptional(param),
iscount=iscount,
noautovalidity=True if param.attrib.get('noautovalidity') is not None else False,
len=self.getLen(param),
extstructs=None,
condition=None,
cdecl=cdecl))
# Save return value information, if any
result_type = ''
resultinfo = cmdinfo.elem.find('proto/type')
if (resultinfo is not None and resultinfo.text != 'void'):
result_type = resultinfo.text
self.commands.append(self.CommandData(name=name, params=paramsInfo, cdecl=self.makeCDecls(cmdinfo.elem)[0], extension_type=self.extension_type, result=result_type))
#
# Check if the parameter passed in is a pointer
def paramIsPointer(self, param):
ispointer = 0
paramtype = param.find('type')
if (paramtype.tail is not None) and ('*' in paramtype.tail):
ispointer = paramtype.tail.count('*')
elif paramtype.text[:4] == 'PFN_':
# Treat function pointer typedefs as a pointer to a single value
ispointer = 1
return ispointer
#
# Check if the parameter passed in is a static array
def paramIsStaticArray(self, param):
isstaticarray = 0
paramname = param.find('name')
if (paramname.tail is not None) and ('[' in paramname.tail):
isstaticarray = paramname.tail.count('[')
return isstaticarray
#
# Check if the parameter passed in is optional
# Returns a list of Boolean values for comma separated len attributes (len='false,true')
def paramIsOptional(self, param):
# See if the handle is optional
isoptional = False
# Simple, if it's optional, return true
optString = param.attrib.get('optional')
if optString:
if optString == 'true':
isoptional = True
elif ',' in optString:
opts = []
for opt in optString.split(','):
val = opt.strip()
if val == 'true':
opts.append(True)
elif val == 'false':
opts.append(False)
else:
print('Unrecognized len attribute value',val)
isoptional = opts
return isoptional
#
# Check if the handle passed in is optional
# Uses the same logic as ValidityOutputGenerator.isHandleOptional
def isHandleOptional(self, param, lenParam):
# Simple, if it's optional, return true
if param.isoptional:
return True
# If no validity is being generated, it usually means that validity is complex and not absolute, so let's say yes.
if param.noautovalidity:
return True
# If the parameter is an array and we haven't already returned, find out if any of the len parameters are optional
if lenParam and lenParam.isoptional:
return True
return False
#
# Retrieve the value of the len tag
def getLen(self, param):
result = None
len = param.attrib.get('len')
if len and len != 'null-terminated':
# For string arrays, 'len' can look like 'count,null-terminated', indicating that we have a null terminated array of
# strings. We strip the null-terminated from the 'len' field and only return the parameter specifying the string count
if 'null-terminated' in len:
result = len.split(',')[0]
else:
result = len
result = str(result).replace('::', '->')
return result
#
# Retrieve the type and name for a parameter
def getTypeNameTuple(self, param):
type = ''
name = ''
for elem in param:
if elem.tag == 'type':
type = noneStr(elem.text)
elif elem.tag == 'name':
name = noneStr(elem.text)
return (type, name)
#
# Find a named parameter in a parameter list
def getParamByName(self, params, name):
for param in params:
if param.name == name:
return param
return None
#
# Extract length values from latexmath. Currently an inflexible solution that looks for specific
# patterns that are found in vk.xml. Will need to be updated when new patterns are introduced.
def parseLateXMath(self, source):
name = 'ERROR'
decoratedName = 'ERROR'
if 'mathit' in source:
# Matches expressions similar to 'latexmath:[\lceil{\mathit{rasterizationSamples} \over 32}\rceil]'
match = re.match(r'latexmath\s*\:\s*\[\s*\\l(\w+)\s*\{\s*\\mathit\s*\{\s*(\w+)\s*\}\s*\\over\s*(\d+)\s*\}\s*\\r(\w+)\s*\]', source)
if not match or match.group(1) != match.group(4):
raise 'Unrecognized latexmath expression'
name = match.group(2)
decoratedName = '{}({}/{})'.format(*match.group(1, 2, 3))
else:
# Matches expressions similar to 'latexmath : [dataSize \over 4]'
match = re.match(r'latexmath\s*\:\s*\[\s*(\\textrm\{)?(\w+)\}?\s*\\over\s*(\d+)\s*\]', source)
name = match.group(2)
decoratedName = '{}/{}'.format(*match.group(2, 3))
return name, decoratedName
#
# Get the length paramater record for the specified parameter name
def getLenParam(self, params, name):
lenParam = None
if name:
if '->' in name:
# The count is obtained by dereferencing a member of a struct parameter
lenParam = self.CommandParam(name=name, iscount=True, ispointer=False, isbool=False, israngedenum=False, isconst=False,
isstaticarray=None, isoptional=False, type=None, noautovalidity=False,
len=None, extstructs=None, condition=None, cdecl=None)
elif 'latexmath' in name:
lenName, decoratedName = self.parseLateXMath(name)
lenParam = self.getParamByName(params, lenName)
else:
lenParam = self.getParamByName(params, name)
return lenParam
#
# Convert a vulkan.h command declaration into a parameter_validation.h definition
def getCmdDef(self, cmd):
# Strip the trailing ';' and split into individual lines
lines = cmd.cdecl[:-1].split('\n')
cmd_hdr = '\n'.join(lines)
return cmd_hdr
#
# Generate the code to check for a NULL dereference before calling the
# validation function
def genCheckedLengthCall(self, name, exprs):
count = name.count('->')
if count:
checkedExpr = []
localIndent = ''
elements = name.split('->')
# Open the if expression blocks
for i in range(0, count):
checkedExpr.append(localIndent + 'if ({} != NULL) {{\n'.format('->'.join(elements[0:i+1])))
localIndent = self.incIndent(localIndent)
# Add the validation expression
for expr in exprs:
checkedExpr.append(localIndent + expr)
# Close the if blocks
for i in range(0, count):
localIndent = self.decIndent(localIndent)
checkedExpr.append(localIndent + '}\n')
return [checkedExpr]
# No if statements were required
return exprs
#
# Generate code to check for a specific condition before executing validation code
def genConditionalCall(self, prefix, condition, exprs):
checkedExpr = []
| |
<gh_stars>1-10
# -*- coding: utf-8 -*-
import PIL
import numpy as np
import weakref
from math import floor
from .helper import make_slice_list
class Pixel:
def __init__(self, parent, row, col):
self._parent = weakref.ref(parent)
self._col = col
self._row = row
self._value = parent.getpixel(row, col)
@property
def col(self):
return self._col
@property
def row(self):
return self._row
@property
def value(self):
return self._value
def patch(self, size):
return self._parent().patch(self._row, self._col, size)
class Line:
def __init__(self, parent, lineno, margin):
self._parent = weakref.ref(parent)
self._lineno = lineno
self._margin = margin
rect = (0, lineno - margin, parent.shape[0], lineno + margin + 1)
self._image = parent._image.crop(rect)
@property
def parent(self):
return self._parent()
@property
def lineno(self):
return self._lineno
@property
def margin(self):
return self._margin
def to_array(self, margin = 0):
self_margin = self.margin
if margin > self_margin:
raise ValueError("Margin {0} too large for line height {1}".format(margin, 2*self_margin + 1))
rect = (self_margin - margin, self_margin - margin,
self._image.size[0] - (self_margin - margin), self_margin + margin + 1)
return np.array(self._image.crop(rect))
def pixeltype(self, ratio):
pixel_numbers = np.arange(self.margin, self._image.size[0] - self.margin)
return ((self.lineno) % ratio) * ratio + ((pixel_numbers) % ratio)
def census_transform(self, operator = np.greater, fuzzyness = 0.0):
block = self.to_array(margin = 1)
tl = block[0, 0:-2]
tc = block[0, 1:-1]
tr = block[0, 2:]
cl = block[1, 0:-2]
cc = block[1, 1:-1]
cr = block[1, 2:]
bl = block[2, 0:-2]
bc = block[2, 1:-1]
br = block[2, 2:]
pixel_stack = np.vstack((tl, cl, bl, tc, bc, tr, cr, br))
comp_hi = operator(cc[None,:] + fuzzyness, pixel_stack)
comp_lo = operator(cc[None,:] - fuzzyness, pixel_stack)
bools = np.all((comp_hi, comp_lo), axis = 0)
byte_vector = np.array((128, 64, 32, 16, 8, 4, 2, 1))
return np.sum(bools * byte_vector[:, None], axis = 0)
def hashkey(self, margin, gradient_weight, angle_bins, strength_thresholds, coherence_thresholds):
# Calculate gradient of input block
block = self.to_array(margin = margin).astype('float')
gy, gx = np.gradient(block)
gradientsize = 2 * margin - 1
# Decompose gradient into list of quadratic pieces
start = 1
stop = block.shape[1] - gradientsize
# TODO: Do not compute this anew every time
slice_list = make_slice_list(start, stop, gradientsize)
gy_list = np.array([gy[..., 1:-1, sl] for sl in slice_list])
gx_list = np.array([gx[..., 1:-1, sl] for sl in slice_list])
gy_lines = gy_list.reshape((gy_list.shape[0], gy_list.shape[1] * gy_list.shape[2]))
gx_lines = gx_list.reshape((gx_list.shape[0], gx_list.shape[1] * gx_list.shape[2]))
# Get list of corresponding matrices G, G^T and W
G_list = np.copy(np.array([gx_lines, gy_lines]).transpose((1,2,0)))
GT_list = np.copy(G_list.transpose((0,2,1)))
# Calculate list of G^T * W * G matrix products
GTWG_list = np.einsum('ijk,ikl->ijl', GT_list,
gradient_weight[None, :, None] * G_list,
optimize = True)
# Extract lists of individual matrix entries by writing
# / a b \
# G^T * W * G = | |
# \ c d /
a_list = GTWG_list[:, 0, 0]
b_list = GTWG_list[:, 0, 1]
c_list = GTWG_list[:, 1, 0]
d_list = GTWG_list[:, 1, 1]
# Calculate lists of determinants and traces using general formula
# for 2-by-2 matrices
det_list = a_list * d_list - b_list * c_list
tr_list = a_list + d_list
# Calculate maximum and minimum eigenvalue using general formula
# for 2-by-2 matrices
sqrt_list = np.sqrt(tr_list**2 / 4 - det_list)
sqrt_list[np.isnan(sqrt_list)] = 0
eig_max_list = tr_list / 2 + sqrt_list
eig_min_list = tr_list / 2 - sqrt_list
# There exists no general closed form for the corresponding eigenvector.
# Depending on whether c != 0 (case 1) or b != 0 (case 2) there are two
# equivalent results
v_list_1 = np.vstack((eig_max_list - d_list, c_list))
v_list_2 = np.vstack((b_list, eig_max_list - a_list))
# The results from the two cases are always correct, but it can happen
# that the resulting vectors are zero, if c == 0 or b == 0, respectively.
# Since G^T * W * G is symmetric, b == c holds true. So the two vectors
# are of similar magnitude and adding them can help to rediuce numerical
# noise. The following lines produce v_1 + v_2 or v_1 - v_2, respectively,
# depending on which of the two sums has larger norm.
# More importantly, this also resolves the not explicitly covered case
# b == c == 0: If b*c is much smaller than a*d, then eig_max will be
# approximately equal to max(a, d). This results in either v_1 or v_2
# being approximately zero, while the respective other vector has length
# of approximately abs(a - d). The only unhandled remaining case is
# b == c == 0 and a == d, but then the corresponding eigenvector is
# not well-defined anyway. Therefore, using the result v = v_1 ± v_2 is
# sufficient.
v_list_p = v_list_1 + v_list_2
v_list_m = v_list_1 - v_list_2
norm_list_p = v_list_p[0,:]**2 + v_list_p[1,:]**2
norm_list_m = v_list_m[0,:]**2 + v_list_m[1,:]**2
v_list = v_list_p * (norm_list_p > norm_list_m) + v_list_m * (norm_list_p <= norm_list_m)
# Calculate theta
theta_list = np.arctan2(v_list[1,:], v_list[0,:])
theta_list[theta_list < 0] += np.pi
# Calculate u
sqrt_eig_max_list = np.sqrt(eig_max_list)
sqrt_eig_min_list = np.sqrt(eig_min_list)
u_list = (sqrt_eig_max_list - sqrt_eig_min_list) / (sqrt_eig_max_list + sqrt_eig_min_list)
u_list[np.logical_not(np.isfinite(u_list))] = 0
# Quantize
# TODO: Find optimal theshold values
angle_list = (theta_list * angle_bins / np.pi).astype('uint')
angle_list[angle_list == angle_bins] = 0
strength_list = np.zeros(eig_max_list.shape, dtype = 'uint')
for threshold in strength_thresholds:
strength_list += (eig_max_list > threshold).astype('uint')
coherence_list = np.zeros(eig_max_list.shape, dtype = 'uint')
for threshold in coherence_thresholds:
coherence_list += (u_list > threshold).astype('uint')
return angle_list, strength_list, coherence_list
class Image:
def __init__(self, image):
self._image = image
@classmethod
def from_file(cls, fname):
# TODO: Error if file does not exist
return cls(PIL.Image.open(fname))
@classmethod
def from_array(cls, arr):
return cls(PIL.Image.fromarray(arr, mode = 'L'))
@classmethod
def from_channels(cls, mode, channels):
channel_list = [ch._image for ch in channels]
return cls(PIL.Image.merge(mode, channel_list))
def patch(self, row, col, size):
margin = size // 2
box = (col - margin, row - margin, col + margin + 1, row + margin + 1)
return np.array(self._image.crop(box))
def census_transform(self, row, col, operator = np.greater, fuzzyness = 0.0):
patch = np.array(self.patch(row, col, 3))
comp_hi = operator(patch[1,1] + fuzzyness, patch)
comp_lo = operator(patch[1,1] - fuzzyness, patch)
bools = np.all((comp_hi, comp_lo), axis = 0).astype(int).ravel()
return np.dot(bools, np.array((128, 16, 4, 64, 0, 2, 32, 8, 1)))
def pixels(self, *, margin = 0):
width, height = self.shape
for row in range(margin, height - margin):
for col in range(margin, width - margin):
yield Pixel(self, row, col)
def lines(self, *, margin = 0):
for lineno in range(margin, self.shape[1] - margin):
yield Line(self, lineno, margin)
def getpixel(self, row, col):
return self._image.getpixel((col, row))
def getchannel(self, identifier):
return self.__class__(self._image.getchannel(identifier))
def number_of_pixels(self, *, margin = 0):
width, height = self.shape
return (height - 2*margin) * (width - 2*margin)
@property
def shape(self):
return self._image.size
@property
def mode(self):
return self._image.mode
def to_grayscale(self):
if self.mode == 'RGB':
return self.to_ycbcr().to_grayscale()
elif self.mode == 'YCbCr':
return self.__class__(self._image.getchannel('Y'))
else:
raise ValueError('Expected RGB or YCbCr mode image.')
def to_ycbcr(self):
if self.mode == 'YCbCr':
return self
elif self.mode == 'RGB' or self.mode == 'RGBA':
return self.__class__(self._image.convert('YCbCr'))
else:
raise ValueError('Expected RGB mode image.')
def to_rgb(self):
if self.mode == 'RGB':
return self
elif self.mode == 'YCbCr':
return self.__class__(self._image.convert('RGB'))
else:
raise ValueError('Expected YCbCr mode image.')
def crop(self, box):
return self.__class__(self._image.crop(box))
def downscale(self, ratio, method = 'bicubic'):
if method == 'bicubic':
resample = PIL.Image.BICUBIC
elif method == 'bilinear':
resample = PIL.Image.BILINEAR
elif method == 'lanczos':
resample = PIL.Image.LANCZOS
elif method == 'nearest':
resample = PIL.Image.NEAREST
else:
raise ValueError('Unknown resampling method "{0}"'.format(method))
width, height = self.shape
downscaled_height = floor((height - 1) / ratio) + 1
downscaled_width = floor((width - 1) / ratio) + 1
return self.__class__(self._image.resize((downscaled_width, downscaled_height), resample = resample))
def upscale(self, ratio, method = 'bilinear'):
if method == 'bicubic':
resample = PIL.Image.BICUBIC
elif method == 'bilinear':
resample = PIL.Image.BILINEAR
elif method == 'lanczos':
resample = PIL.Image.LANCZOS
elif method == 'nearest':
resample = PIL.Image.NEAREST
else:
raise ValueError('Unknown resampling method "{0}"'.format(method))
width, height | |
<filename>sdk/python/pulumi_azure_native/labservices/v20181015/environment_setting.py<gh_stars>0
# coding=utf-8
# *** WARNING: this file was generated by the Pulumi SDK Generator. ***
# *** Do not edit by hand unless you're certain you know what you are doing! ***
import warnings
import pulumi
import pulumi.runtime
from typing import Any, Mapping, Optional, Sequence, Union, overload
from ... import _utilities
from . import outputs
from ._enums import *
from ._inputs import *
__all__ = ['EnvironmentSettingArgs', 'EnvironmentSetting']
@pulumi.input_type
class EnvironmentSettingArgs:
def __init__(__self__, *,
lab_account_name: pulumi.Input[str],
lab_name: pulumi.Input[str],
resource_group_name: pulumi.Input[str],
resource_settings: pulumi.Input['ResourceSettingsArgs'],
configuration_state: Optional[pulumi.Input[Union[str, 'ConfigurationState']]] = None,
description: Optional[pulumi.Input[str]] = None,
environment_setting_name: Optional[pulumi.Input[str]] = None,
location: Optional[pulumi.Input[str]] = None,
provisioning_state: Optional[pulumi.Input[str]] = None,
tags: Optional[pulumi.Input[Mapping[str, pulumi.Input[str]]]] = None,
title: Optional[pulumi.Input[str]] = None,
unique_identifier: Optional[pulumi.Input[str]] = None):
"""
The set of arguments for constructing a EnvironmentSetting resource.
:param pulumi.Input[str] lab_account_name: The name of the lab Account.
:param pulumi.Input[str] lab_name: The name of the lab.
:param pulumi.Input[str] resource_group_name: The name of the resource group.
:param pulumi.Input['ResourceSettingsArgs'] resource_settings: The resource specific settings
:param pulumi.Input[Union[str, 'ConfigurationState']] configuration_state: Describes the user's progress in configuring their environment setting
:param pulumi.Input[str] description: Describes the environment and its resource settings
:param pulumi.Input[str] environment_setting_name: The name of the environment Setting.
:param pulumi.Input[str] location: The location of the resource.
:param pulumi.Input[str] provisioning_state: The provisioning status of the resource.
:param pulumi.Input[Mapping[str, pulumi.Input[str]]] tags: The tags of the resource.
:param pulumi.Input[str] title: Brief title describing the environment and its resource settings
:param pulumi.Input[str] unique_identifier: The unique immutable identifier of a resource (Guid).
"""
pulumi.set(__self__, "lab_account_name", lab_account_name)
pulumi.set(__self__, "lab_name", lab_name)
pulumi.set(__self__, "resource_group_name", resource_group_name)
pulumi.set(__self__, "resource_settings", resource_settings)
if configuration_state is not None:
pulumi.set(__self__, "configuration_state", configuration_state)
if description is not None:
pulumi.set(__self__, "description", description)
if environment_setting_name is not None:
pulumi.set(__self__, "environment_setting_name", environment_setting_name)
if location is not None:
pulumi.set(__self__, "location", location)
if provisioning_state is not None:
pulumi.set(__self__, "provisioning_state", provisioning_state)
if tags is not None:
pulumi.set(__self__, "tags", tags)
if title is not None:
pulumi.set(__self__, "title", title)
if unique_identifier is not None:
pulumi.set(__self__, "unique_identifier", unique_identifier)
@property
@pulumi.getter(name="labAccountName")
def lab_account_name(self) -> pulumi.Input[str]:
"""
The name of the lab Account.
"""
return pulumi.get(self, "lab_account_name")
@lab_account_name.setter
def lab_account_name(self, value: pulumi.Input[str]):
pulumi.set(self, "lab_account_name", value)
@property
@pulumi.getter(name="labName")
def lab_name(self) -> pulumi.Input[str]:
"""
The name of the lab.
"""
return pulumi.get(self, "lab_name")
@lab_name.setter
def lab_name(self, value: pulumi.Input[str]):
pulumi.set(self, "lab_name", value)
@property
@pulumi.getter(name="resourceGroupName")
def resource_group_name(self) -> pulumi.Input[str]:
"""
The name of the resource group.
"""
return pulumi.get(self, "resource_group_name")
@resource_group_name.setter
def resource_group_name(self, value: pulumi.Input[str]):
pulumi.set(self, "resource_group_name", value)
@property
@pulumi.getter(name="resourceSettings")
def resource_settings(self) -> pulumi.Input['ResourceSettingsArgs']:
"""
The resource specific settings
"""
return pulumi.get(self, "resource_settings")
@resource_settings.setter
def resource_settings(self, value: pulumi.Input['ResourceSettingsArgs']):
pulumi.set(self, "resource_settings", value)
@property
@pulumi.getter(name="configurationState")
def configuration_state(self) -> Optional[pulumi.Input[Union[str, 'ConfigurationState']]]:
"""
Describes the user's progress in configuring their environment setting
"""
return pulumi.get(self, "configuration_state")
@configuration_state.setter
def configuration_state(self, value: Optional[pulumi.Input[Union[str, 'ConfigurationState']]]):
pulumi.set(self, "configuration_state", value)
@property
@pulumi.getter
def description(self) -> Optional[pulumi.Input[str]]:
"""
Describes the environment and its resource settings
"""
return pulumi.get(self, "description")
@description.setter
def description(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "description", value)
@property
@pulumi.getter(name="environmentSettingName")
def environment_setting_name(self) -> Optional[pulumi.Input[str]]:
"""
The name of the environment Setting.
"""
return pulumi.get(self, "environment_setting_name")
@environment_setting_name.setter
def environment_setting_name(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "environment_setting_name", value)
@property
@pulumi.getter
def location(self) -> Optional[pulumi.Input[str]]:
"""
The location of the resource.
"""
return pulumi.get(self, "location")
@location.setter
def location(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "location", value)
@property
@pulumi.getter(name="provisioningState")
def provisioning_state(self) -> Optional[pulumi.Input[str]]:
"""
The provisioning status of the resource.
"""
return pulumi.get(self, "provisioning_state")
@provisioning_state.setter
def provisioning_state(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "provisioning_state", value)
@property
@pulumi.getter
def tags(self) -> Optional[pulumi.Input[Mapping[str, pulumi.Input[str]]]]:
"""
The tags of the resource.
"""
return pulumi.get(self, "tags")
@tags.setter
def tags(self, value: Optional[pulumi.Input[Mapping[str, pulumi.Input[str]]]]):
pulumi.set(self, "tags", value)
@property
@pulumi.getter
def title(self) -> Optional[pulumi.Input[str]]:
"""
Brief title describing the environment and its resource settings
"""
return pulumi.get(self, "title")
@title.setter
def title(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "title", value)
@property
@pulumi.getter(name="uniqueIdentifier")
def unique_identifier(self) -> Optional[pulumi.Input[str]]:
"""
The unique immutable identifier of a resource (Guid).
"""
return pulumi.get(self, "unique_identifier")
@unique_identifier.setter
def unique_identifier(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "unique_identifier", value)
class EnvironmentSetting(pulumi.CustomResource):
@overload
def __init__(__self__,
resource_name: str,
opts: Optional[pulumi.ResourceOptions] = None,
configuration_state: Optional[pulumi.Input[Union[str, 'ConfigurationState']]] = None,
description: Optional[pulumi.Input[str]] = None,
environment_setting_name: Optional[pulumi.Input[str]] = None,
lab_account_name: Optional[pulumi.Input[str]] = None,
lab_name: Optional[pulumi.Input[str]] = None,
location: Optional[pulumi.Input[str]] = None,
provisioning_state: Optional[pulumi.Input[str]] = None,
resource_group_name: Optional[pulumi.Input[str]] = None,
resource_settings: Optional[pulumi.Input[pulumi.InputType['ResourceSettingsArgs']]] = None,
tags: Optional[pulumi.Input[Mapping[str, pulumi.Input[str]]]] = None,
title: Optional[pulumi.Input[str]] = None,
unique_identifier: Optional[pulumi.Input[str]] = None,
__props__=None):
"""
Represents settings of an environment, from which environment instances would be created
:param str resource_name: The name of the resource.
:param pulumi.ResourceOptions opts: Options for the resource.
:param pulumi.Input[Union[str, 'ConfigurationState']] configuration_state: Describes the user's progress in configuring their environment setting
:param pulumi.Input[str] description: Describes the environment and its resource settings
:param pulumi.Input[str] environment_setting_name: The name of the environment Setting.
:param pulumi.Input[str] lab_account_name: The name of the lab Account.
:param pulumi.Input[str] lab_name: The name of the lab.
:param pulumi.Input[str] location: The location of the resource.
:param pulumi.Input[str] provisioning_state: The provisioning status of the resource.
:param pulumi.Input[str] resource_group_name: The name of the resource group.
:param pulumi.Input[pulumi.InputType['ResourceSettingsArgs']] resource_settings: The resource specific settings
:param pulumi.Input[Mapping[str, pulumi.Input[str]]] tags: The tags of the resource.
:param pulumi.Input[str] title: Brief title describing the environment and its resource settings
:param pulumi.Input[str] unique_identifier: The unique immutable identifier of a resource (Guid).
"""
...
@overload
def __init__(__self__,
resource_name: str,
args: EnvironmentSettingArgs,
opts: Optional[pulumi.ResourceOptions] = None):
"""
Represents settings of an environment, from which environment instances would be created
:param str resource_name: The name of the resource.
:param EnvironmentSettingArgs args: The arguments to use to populate this resource's properties.
:param pulumi.ResourceOptions opts: Options for the resource.
"""
...
def __init__(__self__, resource_name: str, *args, **kwargs):
resource_args, opts = _utilities.get_resource_args_opts(EnvironmentSettingArgs, pulumi.ResourceOptions, *args, **kwargs)
if resource_args is not None:
__self__._internal_init(resource_name, opts, **resource_args.__dict__)
else:
__self__._internal_init(resource_name, *args, **kwargs)
def _internal_init(__self__,
resource_name: str,
opts: Optional[pulumi.ResourceOptions] = None,
configuration_state: Optional[pulumi.Input[Union[str, 'ConfigurationState']]] = None,
description: Optional[pulumi.Input[str]] = None,
environment_setting_name: Optional[pulumi.Input[str]] = None,
lab_account_name: Optional[pulumi.Input[str]] = None,
lab_name: Optional[pulumi.Input[str]] = None,
location: Optional[pulumi.Input[str]] = None,
provisioning_state: Optional[pulumi.Input[str]] = None,
resource_group_name: Optional[pulumi.Input[str]] = None,
resource_settings: Optional[pulumi.Input[pulumi.InputType['ResourceSettingsArgs']]] = None,
tags: Optional[pulumi.Input[Mapping[str, pulumi.Input[str]]]] = None,
title: Optional[pulumi.Input[str]] = None,
unique_identifier: Optional[pulumi.Input[str]] = None,
__props__=None):
if opts is None:
opts = pulumi.ResourceOptions()
if not isinstance(opts, pulumi.ResourceOptions):
raise TypeError('Expected resource options to be a ResourceOptions instance')
if opts.version is None:
opts.version = _utilities.get_version()
if opts.id is None:
if __props__ is not None:
raise TypeError('__props__ is only valid when passed in combination with a valid opts.id to get an existing resource')
__props__ = EnvironmentSettingArgs.__new__(EnvironmentSettingArgs)
__props__.__dict__["configuration_state"] = configuration_state
__props__.__dict__["description"] = description
__props__.__dict__["environment_setting_name"] = environment_setting_name
if lab_account_name is None and not opts.urn:
raise TypeError("Missing required property 'lab_account_name'")
__props__.__dict__["lab_account_name"] = lab_account_name
if lab_name is None and not opts.urn:
raise TypeError("Missing required property 'lab_name'")
__props__.__dict__["lab_name"] = lab_name
__props__.__dict__["location"] = location
__props__.__dict__["provisioning_state"] = provisioning_state
if resource_group_name is None and not opts.urn:
raise TypeError("Missing required property 'resource_group_name'")
__props__.__dict__["resource_group_name"] = resource_group_name
if resource_settings is None and not opts.urn:
raise TypeError("Missing required property 'resource_settings'")
__props__.__dict__["resource_settings"] = resource_settings
__props__.__dict__["tags"] = tags
__props__.__dict__["title"] = title
__props__.__dict__["unique_identifier"] = unique_identifier
__props__.__dict__["last_changed"] = None
__props__.__dict__["last_published"] = None
__props__.__dict__["latest_operation_result"] = None
__props__.__dict__["name"] = None
__props__.__dict__["publishing_state"] = None
__props__.__dict__["type"] = None
alias_opts = pulumi.ResourceOptions(aliases=[pulumi.Alias(type_="azure-nextgen:labservices/v20181015:EnvironmentSetting"), pulumi.Alias(type_="azure-native:labservices:EnvironmentSetting"), pulumi.Alias(type_="azure-nextgen:labservices:EnvironmentSetting")])
opts = pulumi.ResourceOptions.merge(opts, alias_opts)
super(EnvironmentSetting, __self__).__init__(
'azure-native:labservices/v20181015:EnvironmentSetting',
resource_name,
__props__,
opts)
@staticmethod
def get(resource_name: str,
id: pulumi.Input[str],
opts: Optional[pulumi.ResourceOptions] = None) -> 'EnvironmentSetting':
"""
Get an existing EnvironmentSetting resource's state with the given name, id, and optional extra
properties used to qualify the lookup.
:param str resource_name: The unique name of the resulting resource.
:param pulumi.Input[str] id: The unique provider ID of the resource to lookup.
:param pulumi.ResourceOptions opts: Options for the resource.
"""
opts = pulumi.ResourceOptions.merge(opts, pulumi.ResourceOptions(id=id))
__props__ = EnvironmentSettingArgs.__new__(EnvironmentSettingArgs)
__props__.__dict__["configuration_state"] = None
__props__.__dict__["description"] = None
__props__.__dict__["last_changed"] = None
__props__.__dict__["last_published"] = None
__props__.__dict__["latest_operation_result"] = None
__props__.__dict__["location"] = None
__props__.__dict__["name"] = None
__props__.__dict__["provisioning_state"] = None
__props__.__dict__["publishing_state"] = None
__props__.__dict__["resource_settings"] = None
__props__.__dict__["tags"] = None
__props__.__dict__["title"] = None
__props__.__dict__["type"] = None
__props__.__dict__["unique_identifier"] = None
return EnvironmentSetting(resource_name, opts=opts, __props__=__props__)
@property
@pulumi.getter(name="configurationState")
def configuration_state(self) -> pulumi.Output[Optional[str]]:
"""
Describes the user's progress in configuring their environment setting
"""
return pulumi.get(self, "configuration_state")
@property
@pulumi.getter
def description(self) -> pulumi.Output[Optional[str]]:
"""
Describes the environment and its resource settings
"""
return pulumi.get(self, | |
from bs4 import BeautifulSoup
import sys
import time
from random import randint
import re
import requests
import pandas as pd
from psycopg2.extras import execute_batch
import traceback
import queue
from datetime import datetime
from BaseScraper import BaseScraper
class LetterboxScraper(BaseScraper):
def scrape(self):
"""
Scrapes letterboxd.com for review pages.
Works very similarly to the basic scrape function. Takes in ids
from imdb and checks to see if they exist on letterboxd. If
not, it will hit an exception and move on. Movies with no
reviews also hit an exception. This is much slower than imbd
due to differing website design.
"""
id_list = self.get_ids()
t = time.perf_counter()
movie_id = []
rating = []
reviews = []
username = []
likes = []
date = []
review_id = []
iteration_counter = 0
broken = []
page_count = 0
query = """
SELECT movie_id, primary_title FROM movies
"""
curs, conn = self.connect_to_database()
curs.execute(query)
fetched = curs.fetchall()
movie_ids = set([row[0] for row in fetched])
movie_titles = {row[0]: row[1] for row in fetched}
for count, id in enumerate(id for id in id_list if id in movie_ids):
print("----------------------------------------")
try:
t1 = time.perf_counter()
review_count = 0
# self.locate(id)
url_initial = f"https://www.letterboxd.com/imdb/{id}"
time.sleep(randint(3, 6))
initial_response = requests.get(url_initial)
title = ""
try:
soup = BeautifulSoup(initial_response.text, 'html.parser')
title = soup.find(class_="headline-1 js-widont prettify").get_text()
title = title.replace(" ", "-").lower()
print("Found: ", title, movie_titles[id])
except Exception as e:
print(f"Unable to find a title for this movie at index: {id}")
print("This is normal and expected behavior")
raise Exception(e)
url_reviews = initial_response.url + 'reviews/by/activity/'
print(url_reviews)
# initially, I wanted to make this sorted by recency, but if
# there are fewer than 12 reviews only sorting by popular is
# available
time.sleep(randint(3, 6))
response = requests.get(url_reviews)
if response.status_code != 200:
time.sleep(randint(3, 6))
response = requests.get(url_reviews)
if response.status_code != 200:
print(f"call to {url_reviews} failed with status \
code {response.status_code}!")
continue
soup = BeautifulSoup(response.text, 'html.parser')
items = soup.find_all(class_='film-detail')
if len(items) == 0:
print(f"No reviews for {id} {title}")
continue
print(f"ID: {id} at index {self.all_ids.index(id)}")
while True:
if iteration_counter >= self.max_iter_count:
df = self.letterboxd_dataframe(movie_id, review_id,
rating, reviews, date,
username)
self.letterboxd_insert(df)
movie_id.clear()
rating.clear()
reviews.clear()
username.clear()
likes.clear()
date.clear()
review_id.clear()
df = df.iloc[0:0]
iteration_counter = 0
iteration_counter += 1
for item in items:
body = item.find(class_="body-text -prose collapsible-text")
append = body['data-full-text-url']
if item.find(class_="reveal js-reveal") or item.find(class_="collapsed-text"):
text_url = 'https://www.letterboxd.com' + append
time.sleep(randint(3, 4))
fulltext = requests.get(text_url)
if fulltext.status_code != 200:
time.sleep(randint(3, 6))
fulltext = requests.get(text_url)
if fulltext.status_code != 200:
print(f"call to {text_url} failed with \
status code {fulltext.status_code}!")
continue
fulltext = re.sub(r'\<[^>]*\>', "", fulltext.text)
reviews.append(fulltext)
else:
reviews.append(body.get_text())
review_count += 1
movie_id.append(id.replace("tt", ""))
append = append.split(":", 1)[1].replace("/", "")
review_id.append(append)
try:
rating1 = str(item.find(class_="attribution"))
found = re.search(r'rating -green rated-\d+', rating1)
found = found.group()
text = found.split("-")
rate = int(text[-1])
rating.append(rate)
except Exception:
rating.append(11)
username.append(item.find(class_="name").get_text())
if item.find('span', '_nobr').get_text():
dates = item.find('span', '_nobr').get_text()
date.append(dates)
else:
datetime = str(item.find('time', class_="localtime-dd-mmm-yyyy"))
extract = datetime.split('"')
dates = str(extract[3])
date.append(dates[:10])
if soup.find('a', class_="next"):
page_count += 1
url_more = url_reviews + 'page/' + str(page_count+1) + '/'
print(url_more)
time.sleep(randint(3, 6))
response = requests.get(url_more)
if response.status_code != 200:
time.sleep(randint(3, 6))
response = requests.get(url_more)
if response.status_code != 200:
print(f"call to {url_more} failed with status \
code {response.status_code}!")
continue
soup = BeautifulSoup(response.text, 'html.parser')
items = soup.find_all(class_='film-detail')
else:
print('end of this movie')
page_count = 0
break
# While loop ends here
t2 = time.perf_counter()
finish = t2-t1
# if count == 0 and os.path.exists(f"Logfile{self.scraper_instance}.txt"):
# os.remove(f"Logfile{self.scraper_instance}.txt")
# print("Logging")
# self.create_log(title,review_count,None,finish)
except Exception as e:
broken.append(id)
print("Broken!", id)
err1, err2, tb = sys.exc_info()
print(err1, err2)
print(traceback.print_tb(tb))
continue
try:
df = self.letterboxd_dataframe(movie_id, review_id, rating,
reviews, date, username)
self.letterboxd_insert(df)
except Exception as e:
print("error while creating dataframe or inserting into database")
raise Exception(e)
t3 = time.perf_counter()
total = t3 - t
print(f"Scraped {count + 1} movies in {round(total,2)} seconds")
print('All done!\n')
print("The following IDs were not scraped succcessfully:")
self.show(broken)
if len(broken) == 0:
print("none")
def scrape_by_users(self, first_user="flyingindie"):
"""
Scrapes all movies rated or reviewed by `first_user`,
then moves to users connected to `first_user` and scrapes
movies rated by them, and so on.
"""
print("scrape_by_users()")
base_url = "https://letterboxd.com"
# high priority is a list of users with no reviews currently in the db
high_priority = []
# low priority is a list of users with existing reviews in the db
low_priority = [first_user]
curs, conn = self.connect_to_database()
curs.execute("SELECT movie_id, primary_title, original_title, start_year FROM movies;")
fetched = curs.fetchall()
print("Got movie ids")
title_to_id = {
(row[1], row[3]): row[0]
for row in fetched
}
title_to_id.update({
(row[2], row[3]): row[0]
for row in fetched
})
curs.execute("SELECT COUNT(DISTINCT user_name) FROM movie_reviews;")
num_users = curs.fetchone()[0]
print(f"Found {num_users} distinct users")
step = 100_000
existing_users = set()
for ix in range(0, num_users+1, step):
curs.execute("SELECT DISTINCT user_name FROM movie_reviews LIMIT %s OFFSET %s;", (ix, step))
fetched = curs.fetchall()
existing_users.update(set(row[0] for row in fetched))
print(f"retrieved {len(existing_users)} existing users")
counter = 0
visited = set()
while high_priority or low_priority:
if counter >= self.max_iter_count:
print("Reached max iterations")
break
try:
curs, conn = self.connect_to_database()
if high_priority:
print(f"Getting user from high_priority out of {len(high_priority)} total users")
ix = randint(0, len(high_priority)-1)
username = high_priority.pop(ix)
elif low_priority:
print(f"Getting user from low_priority out of {len(low_priority)} total users")
ix = randint(0, len(low_priority)-1)
username = low_priority.pop(ix)
if username in visited:
continue
if username in existing_users:
query = """
SELECT movie_id
FROM movie_reviews
WHERE user_name=%s;
"""
curs.execute(query, [username])
existing_reviews = set(row[0] for row in curs.fetchall())
print(f"Got {len(existing_reviews)} existing reviews for user {username}")
else:
existing_reviews = set()
visited.add(username)
counter += 1
if counter % 50 == 0:
high_priority = list(set(high_priority))
low_priority = list(set(low_priority))
print(f"scraping user: {username}")
url = base_url + "/" + username + "/films/reviews/"
reviews = []
except Exception as e:
print(f"Exception initializing scraping for this page")
print(e)
continue
while url is not None:
try:
res = requests.get(url)
if res.status_code != 200:
print(f"Failed to scrape user: {url}")
continue
print(f"Scraping {url}")
soup = BeautifulSoup(res.text, "html.parser")
films = soup.find_all(class_="film-detail")
except Exception as e:
print("Error in requests or beautifulsoup")
print(e)
url = None
continue
for film in films:
try:
headline = film.find("h2")
title, year = headline.find_all("a")
movie_id = title_to_id.get((title.text, int(year.text)))
if not movie_id:
continue
if movie_id in existing_reviews:
continue
rating_el = film.find(class_="rating")
if not rating_el:
print("No rating")
rating = None
else:
rating = len(rating_el.text)
text_el = film.find(class_="body-text")
if not rating_el:
print("No review text")
text = None
else:
text = text_el.text
except Exception as e:
print(f"scrape_by_users(): unhandled exception getting rating or text")
print(e)
continue
try:
date_text = film.find(class_="date").text
date = date_text.strip("Watched ").strip("Rewatched ").strip("Added ")
if date:
dt = datetime.strptime(date, "%d %b, %Y")
else:
dt = pd.to_datetime(film.find(class_="date").find("time").get("datetime"))
except Exception as e:
print(f"scrape_by_users(): unhandled exception getting date: {date_text}")
print(e)
dt = None
reviews.append((
movie_id,
dt,
rating,
text,
username,
"letterboxd",
))
# end for
try:
next_button = soup.find("a", class_="next")
if next_button is not None:
url = base_url + next_button.get("href")
else:
url = None
time.sleep(1.5)
except Exception as e:
print(f"scrape_by_users(): unhandled exception getting next button")
print(e)
url = None
# end while
inner_counter = 0
while True:
try:
query = """
INSERT INTO movie_reviews (
movie_id,
review_date,
user_rating,
review_text,
user_name,
source
)
VALUES (%s, %s, %s, %s, %s, %s);
"""
execute_batch(curs, query, reviews)
conn.commit()
existing_users.add(username)
print(f"Inserted {len(reviews)} reviews in db\n")
break
except Exception as e:
if inner_counter < 3:
print("Error inserting rows, trying again")
curs, conn = self.connect_to_database()
inner_counter += 1
continue
else:
print("Giving up")
break
try:
# networking: add users that the current user is following
url = base_url + "/" + username + "/following"
while url is not None:
print(f"Getting network on page {url}")
res = requests.get(url)
if res.status_code != 200:
break
soup = BeautifulSoup(res.text, "html.parser")
for person in soup.find_all(class_="table-person"):
title = person.find(class_="title-3")
href = title.find("a").get("href")
username = href.strip("/")
if username in visited:
continue
if username in existing_users:
low_priority.append(username)
else:
high_priority.append(username)
next_button = soup.find(class_="next")
if not next_button or not next_button.get("href"):
print("Finish networking")
break
url = base_url + next_button.get("href")
time.sleep(1)
except Exception as e:
print("Failed networking")
print(e)
print("\n\n")
# end while
def letterboxd_dataframe(self, movie_id, review_id,
ratings, reviews, date, username):
"""
Used in scrape_letterboxd to make and return a dataframe.
"""
df | |
import os
import re
import subprocess
import random
import shutil
import logging
import time
from anchore import anchore_utils
from anchore.util import scripting, contexts
class Navigator(object):
_logger = logging.getLogger(__name__)
def __init__(self, anchore_config, imagelist, allimages):
self.config = anchore_config
self.allimages = allimages
#self.anchore_datadir = self.config['image_data_store']
self.images = list()
self.images = anchore_utils.image_context_add(imagelist, allimages, docker_cli=contexts['docker_cli'], tmproot=self.config['tmpdir'], anchore_db=contexts['anchore_db'], docker_images=contexts['docker_images'], must_be_analyzed=True, must_load_all=True)
self.anchoreDB = contexts['anchore_db']
def add_images(self, imagelist):
newimages = anchore_utils.image_context_add(imagelist, self.allimages, docker_cli=contexts['docker_cli'], tmproot=self.config['tmpdir'], anchore_db=contexts['anchore_db'], docker_images=contexts['docker_images'], must_be_analyzed=True, must_load_all=True)
self.images = list(set(self.images) | set(newimages))
def generate_reports(self):
ret = {}
for imageId in self.images:
image = self.allimages[imageId]
baseId = image.get_earliest_base()
bimage = self.allimages[baseId]
sizebytes = image.meta['sizebytes']
image_report = image.get_image_report()
analysis_report = image.get_analysis_report()
gates_report = image.get_gates_report()
gates_eval_report = image.get_gates_eval_report()
record = {
'image_report': image_report,
'analysis_report': analysis_report,
'gates_report': gates_report,
'gates_eval_report': gates_eval_report,
'result': {
'header':['Image_Id', 'Type', 'Current_Tags', 'All_Tags', 'Gate_Status', 'Size(bytes)', 'Counts', 'Base_Diffs'],
'rows': list()
}
}
shortId = image.meta['shortId']
usertype = str(image.get_usertype())
currtags = ','.join(image.get_alltags_current())
alltags = ','.join(image.get_alltags_ever())
gateaction = 'UNKNOWN'
for g in gates_eval_report:
if g['trigger'] == 'FINAL':
gateaction = g['action']
break
try:
pnum = str(len(anchore_utils.load_analysis_output(image.meta['imageId'], 'package_list', 'pkgs.all').keys()))
except:
pnum = "N/A"
try:
fnum = str(len(anchore_utils.load_analysis_output(image.meta['imageId'], 'file_list', 'files.all').keys()))
except:
fnum = "N/A"
try:
snum = str(len(anchore_utils.load_analysis_output(image.meta['imageId'], 'file_suids', 'files.suids').keys()))
except:
fnum = "N/A"
analysis_str = ' '.join(["PKGS="+pnum, "FILES="+fnum, "SUIDFILES="+snum])
compare_str = "N/A"
if image.meta['imageId'] != baseId:
diffdata = anchore_utils.diff_images(image.meta['imageId'], baseId)
record['base_compare_data'] = diffdata
pnum = "N/A"
if 'package_list' in diffdata and 'pkgs.all' in diffdata['package_list']:
for module_type in diffdata['package_list']['pkgs.all']:
pnum = str(len(diffdata['package_list']['pkgs.all'][module_type]))
break
fnum = "N/A"
if 'file_list' in diffdata and 'files.all' in diffdata['file_list']:
for module_type in diffdata['file_list']['files.all']:
fnum = str(len(diffdata['file_list']['files.all'][module_type]))
snum = "N/A"
if 'file_suids' in diffdata and 'files.suids' in diffdata['file_suids']:
for module_type in diffdata['file_suids']['files.suids']:
snum = str(len(diffdata['file_suids']['files.suids'][module_type]))
compare_str = ' '.join(["PKGS="+pnum, "FILES="+fnum, "SUIDFILES="+snum])
row = [ shortId, usertype, currtags, alltags, gateaction, sizebytes, analysis_str, compare_str ]
record['result']['rows'].append(row)
ret[imageId] = record
return ret
def get_images(self):
return(self.images)
def get_dockerfile_contents(self):
result = {}
for imageId in self.images:
image = self.allimages[imageId]
#result[imageId] = image.get_dockerfile_contents()
record = {'result':{}}
record['result']['header'] = ['Image_Id', 'Mode', 'Dockerfile_Contents']
record['result']['rows'] = list()
(dbuf, mode) = image.get_dockerfile_contents()
record['result']['rows'].append([image.meta['shortId'], mode, dbuf])
result[imageId] = record
return(result)
def get_familytree(self):
result = {}
for imageId in self.images:
image = self.allimages[imageId]
record = {'result':{}}
record['result']['header'] = ['Image_Id', 'Current_Repo_Tags', 'Past_Repo_Tags', 'Image_Type']
record['result']['rows'] = list()
for fid in image.get_familytree():
fimage = self.allimages[fid]
fidstr = fimage.meta['shortId']
curr_tags = ','.join(fimage.get_alltags_current())
past_tags = ','.join(fimage.get_alltags_past())
usertype = fimage.get_usertype()
if usertype == "anchorebase":
userstr = "Anchore Base Image"
elif usertype == "base":
userstr = "Base"
elif usertype == "oldanchorebase":
userstr = "Previous Anchore Base Image"
elif usertype == "user":
userstr = "User Image"
else:
userstr = "Intermediate"
record['result']['rows'].append([fidstr, curr_tags, past_tags, userstr])
result[imageId] = record
return(result)
def get_layers(self):
result = {}
for imageId in self.images:
image = self.allimages[imageId]
record = {'result':{}}
record['result']['header'] = ['Layer_Id']
record['result']['rows'] = list()
for fid in image.get_layers() + [imageId]:
record['result']['rows'].append([fid])
result[imageId] = record
return(result)
def get_taghistory(self):
result = {}
for imageId in self.images:
image = self.allimages[imageId]
record = {'result':{}}
record['result']['header'] = ['Image_Id', 'Date', 'Known_Tags']
record['result']['rows'] = list()
for tagtup in image.get_tag_history():
tstr = time.ctime(int(tagtup[0]))
tagstr = ','.join(tagtup[1])
record['result']['rows'].append([image.meta['shortId'], tstr, tagstr])
try:
currtags = ','.join(image.get_alltags_current())
record['result']['rows'].append([image.meta['shortId'], time.ctime(), currtags])
except:
pass
result[imageId] = record
return(result)
def unpack(self, destdir='/tmp'):
ret = {}
for imageId in self.images:
image = self.allimages[imageId]
outdir = image.unpack(docleanup=False, destdir=destdir)
if not outdir:
self._logger.warn("failed to unpack image ("+str(imageId)+")")
else:
self._logger.debug("Unpacked image " + image.meta['shortId'] + " in " + outdir)
ret[imageId] = outdir
return(ret)
def run(self):
return(True)
def find_query_command(self, action):
cmdobj = None
mode = None
cmd = None
rc = False
try:
path_overrides = ['/'.join([self.config['user_scripts_dir'], 'queries'])]
if self.config['extra_scripts_dir']:
path_overrides = path_overrides + ['/'.join([self.config['extra_scripts_dir'], 'queries'])]
cmdobj = scripting.ScriptExecutor(path='/'.join([self.config['scripts_dir'], 'queries']), script_name=action, path_overrides=path_overrides)
cmd = cmdobj.thecmd
mode = 'query'
rc = True
except ValueError as err:
raise err
except Exception as err:
errstr = str(err)
try:
path_overrides = ['/'.join([self.config['user_scripts_dir'], 'multi-queries'])]
if self.config['extra_scripts_dir']:
path_overrides = path_overrides + ['/'.join([self.config['extra_scripts_dir'], 'multi-queries'])]
cmdobj = scripting.ScriptExecutor(path='/'.join([self.config['scripts_dir'], 'multi-queries']), script_name=action, path_overrides=path_overrides)
cmd = cmdobj.thecmd
mode = 'multi-query'
rc = True
except ValueError as err:
raise err
except Exception as err:
raise err
ret = (rc, mode, cmdobj)
return(ret)
def execute_query(self, imglist, se, params):
success = True
datadir = self.config['image_data_store']
outputdir = '/'.join([self.config['anchore_data_dir'], "querytmp", "query." + str(random.randint(0, 99999999))])
if not os.path.exists(outputdir):
os.makedirs(outputdir)
imgfile = '/'.join([self.config['anchore_data_dir'], "querytmp", "queryimages." + str(random.randint(0, 99999999))])
anchore_utils.write_plainfile_fromlist(imgfile, imglist)
cmdline = ' '.join([imgfile, datadir, outputdir])
if params:
cmdline = cmdline + ' ' + ' '.join(params)
meta = {}
try:
(cmd, rc, sout) = se.execute(capture_output=True, cmdline=cmdline)
if rc:
self._logger.error("Query command ran but execution failed" )
self._logger.error("Query command: (" + ' '.join([se.thecmd, cmdline])+")")
self._logger.error("Query output: (" + str(sout) + ")")
self._logger.error("Exit code: (" + str(rc)+")")
raise Exception("Query ran but exited non-zero.")
except Exception as err:
raise Exception("Query execution failed: " + str(err))
else:
try:
#outputs = os.listdir(outputdir)
warnfile = False
found = False
for f in os.listdir(outputdir):
if re.match(".*\.WARNS", f):
warnfile = '/'.join([outputdir, f])
else:
ofile = '/'.join([outputdir, f])
found=True
if not found:
raise Exception("No output files found after executing query command\n\tCommand Output:\n"+sout+"\n\tInfo: Query command should have produced an output file in: " + outputdir)
orows = list()
try:
frows = anchore_utils.read_kvfile_tolist(ofile)
header = frows[0]
rowlen = len(header)
for row in frows[1:]:
if len(row) != rowlen:
raise Exception("Number of columns in data row ("+str(len(row))+") is not equal to number of columns in header ("+str(rowlen)+")\n\tHeader: "+str(header)+"\n\tOffending Row: "+str(row))
orows.append(row)
except Exception as err:
raise err
if warnfile:
try:
meta['warns'] = anchore_utils.read_plainfile_tolist(warnfile)
except:
pass
meta['queryparams'] = ','.join(params)
meta['querycommand'] = cmd
try:
i = header.index('URL')
meta['url_column_index'] = i
except:
pass
meta['result'] = {}
meta['result']['header'] = header
meta['result']['rowcount'] = len(orows)
try:
#meta['result']['colcount'] = len(orows[0])
meta['result']['colcount'] = len(header)
except:
meta['result']['colcount'] = 0
meta['result']['rows'] = orows
except Exception as err:
self._logger.error("Query output handling failed: ")
self._logger.error("\tCommand: " + str(cmd))
self._logger.error("\tException: " + str(err))
success = False
finally:
if imgfile and os.path.exists(imgfile):
os.remove(imgfile)
if outputdir and os.path.exists(outputdir):
shutil.rmtree(outputdir)
ret = [success, cmd, meta]
return(ret)
def format_query_manifest_record(self, command, status, returncode, timestamp, qtype, output, csum):
ret = {
"status": status,
"returncode": returncode,
"timestamp": timestamp,
"type": qtype,
"command": command,
"output": output,
"csum": csum
}
return(ret)
def list_query_commands(self, command=None):
result = {}
record = {'result':{}}
record['result']['header'] = ["Query", "Help_String"]
record['result']['rows'] = list()
query_manifest = self.anchoreDB.load_query_manifest()
if command:
try:
(rc, command_type, se) = self.find_query_command(command)
if rc:
commandpath = se.get_script()
csum = se.csum()
if commandpath in query_manifest and csum != "N/A" and csum == query_manifest[commandpath]['csum']:
self._logger.debug("skipping query help refresh run ("+str(command)+"): no change in query")
cmd = query_manifest[commandpath]['command']
rc = query_manifest[commandpath]['returncode']
sout = query_manifest[commandpath]['output']
else:
(cmd, rc, sout) = se.execute(capture_output=True, cmdline='help')
if rc == 0:
query_manifest[commandpath] = self.format_query_manifest_record(cmd, "SUCCESS", rc, time.time(), "N/A", sout, csum)
record['result']['rows'].append([command, sout])
else:
query_manifest[commandpath] = self.format_query_manifest_record(cmd, "FAIL", rc, time.time(), "N/A", "N/A", csum)
except Exception as err:
self._logger.debug("cannot execute ("+str(command)+"): skipping in list")
else:
paths = list()
paths.append('/'.join([self.config['scripts_dir'], "queries"]))
paths.append('/'.join([self.config['scripts_dir'], "multi-queries"]))
if self.config['user_scripts_dir']:
paths.append('/'.join([self.config['user_scripts_dir'], 'queries']))
paths.append('/'.join([self.config['user_scripts_dir'], 'multi-queries']))
if self.config['extra_scripts_dir']:
paths.append('/'.join([self.config['extra_scripts_dir'], 'queries']))
paths.append('/'.join([self.config['extra_scripts_dir'], 'multi-queries']))
for dd in paths:
if not os.path.exists(dd):
continue
for d in os.listdir(dd):
command = re.sub("(\.py|\.sh)$", "", d)
commandpath = os.path.join(dd, d)
if re.match(".*\.pyc$", d):
continue
try:
(rc, command_type, se) = self.find_query_command(command)
if rc:
commandpath = se.get_script()
csum = se.csum()
if commandpath in query_manifest and csum != "N/A" and csum == query_manifest[commandpath]['csum']:
self._logger.debug("skipping query help refresh run ("+str(command)+"): no change in query")
cmd = query_manifest[commandpath]['command']
rc = query_manifest[commandpath]['returncode']
sout = query_manifest[commandpath]['output']
else:
(cmd, rc, sout) = se.execute(capture_output=True, cmdline='help')
if rc == 0:
query_manifest[commandpath] = self.format_query_manifest_record(cmd, "SUCCESS", rc, time.time(), command_type, sout, csum)
record['result']['rows'].append([command, sout])
else:
query_manifest[commandpath] = self.format_query_manifest_record(cmd, "FAIL", rc, time.time(), command_type, "N/A", csum)
except Exception as err:
self._logger.debug("cannot execute ("+str(command)+"): skipping in list")
self.anchoreDB.save_query_manifest(query_manifest)
result['list_query_commands'] = record
return(result)
def check_for_warnings(self, result):
for k in result.keys():
if 'warns' in result[k]:
return(True)
return(False)
def run_query(self, query):
result = {}
if len(query) == 0:
return(self.list_query_commands())
elif len(query) == 1:
action = query[0]
params = ['help']
else:
action = query[0]
params = query[1:]
if re.match(r".*(\.\.|~|/).*", action) or re.match(r"^\..*", action):
self._logger.error("invalid query string (bad characters in string)")
return(False)
try:
(rc, command_type, se) = self.find_query_command(action)
except Exception as err:
raise err
if not rc:
| |
<filename>src/sparse_alignments/evaluation.py
import argparse
import os
import numpy as np
import pickle
import json
import sys
sys.path.append('../')
import src.utils as utils
from scipy.stats import hypergeom
import scipy.sparse as sp
from scipy.spatial.distance import cosine
from collections import defaultdict, OrderedDict
from sklearn.preprocessing import normalize
class Evaluator(object):
def __init__(self, alignment_path, out_path, dense):
self.dense = True
self.out_path = out_path
self.index_name, self.sparse_name, self.concept_name, self.full_name, self.thd, self.binary, self.random = self.load_names(alignment_path)
self.i2c, self.c2i, self.i2w, self.w2i, self.word_concept_dict = self.load_indexing()
self.E, self.C, self.alignment = self.load_files(alignment_path)
def load_names(self, alingment_path):
path, base = os.path.split(alingment_path)
path = path.replace("\\", "/")
random = False
if alingment_path.find("random1") != -1:
random = True
print(path, base)
sparse_name = (path.strip().split("/"))[-2]
index_name = (sparse_name.strip().split(".npz"))[0]
concept_name = (path.strip().split("/"))[-1]
full_name = (os.path.basename(alingment_path).strip().split("max_concepts_of_base_"))[-1]
full_name = (full_name.split(".p"))[0]
binary = True
if full_name.find("binary") != -1:
binary = True
thd = (concept_name.split("_t"))[-1]
print(index_name, sparse_name, concept_name, full_name, thd, binary, random)
return index_name, sparse_name, concept_name, full_name, thd, binary, random
def random2str(self):
text = ""
if self.random:
text = "random1/"
return text
def load_indexing(self):
i2c = pickle.load(open(('../data/indexing/concept/' + self.concept_name + "_i2c.p"), 'rb'))
c2i = pickle.load(open('../data/indexing/concept/' + self.concept_name + "_c2i.p", 'rb'))
i2w = pickle.load(open('../data/indexing/words/embeddings/' + self.index_name + "_i2w.p", 'rb'))
w2i = pickle.load(open('../data/indexing/words/embeddings/' + self.index_name + "_w2i.p", 'rb'))
word_concept_dict = pickle.load(open(('../data/word_concept_dict/' + self.concept_name + "_word_concept_dict.p"), 'rb'))
return i2c, c2i, i2w, w2i, word_concept_dict
def load_files(self, alignment_path):
if self.dense:
sparse_path = "../data/sparse_matrices/word_base/embeddings/filtered/" + self.random2str() + self.sparse_name + ".p"
E = pickle.load(open(sparse_path, 'rb'))
else:
if alignment_path.find("random") != -1:
sparse_path = "../data/sparse_matrices/word_base/" + self.random2str() + self.sparse_name + ".npz"
sparse_path = "../data/sparse_matrices/word_base/embeddings/filtered/" + self.random2str() + self.sparse_name + ".npz"
E = sp.load_npz(sparse_path)
print("alignment path: ", alignment_path)
alignment = pickle.load(open(alignment_path, 'rb'), encoding='utf-8')
no_random_sparse_name = (self.sparse_name.strip().split(".npz_random"))[0]
concept_path = os.path.join("../data/sparse_matrices/word_concept/", no_random_sparse_name,
self.concept_name,
"word_concept_mtx.npz")
C = sp.load_npz(concept_path)
E = normalize(E, norm='l2', axis=1)
E = sp.csc_matrix(E)
C = normalize(C, norm='l2', axis=1)
C = sp.csc_matrix(C)
return E, C, alignment
def arange_concepts(self, concepts):
if type(concepts) != type(list()):
tmp = concepts
concepts = set()
if tmp != "NONE":
concepts.add(tmp)
return set(concepts)
def mrr(self):
out_rr_list = []
rr_list = []
r_list = []
denom = 0.0
for i in range(self.E.shape[1]):
concepts = self.alignment[i]
concepts = self.arange_concepts(concepts)
if len(concepts) > 0:
denom += 1.0
col = (self.E.getcol(i).toarray().T)[0, :]
sorted_col = sorted(enumerate(col), key=lambda e: float(e[1]), reverse=True)
words_rank = [(self.i2w[original_id], rank) for rank, (original_id, value) in
enumerate(sorted_col, 1) if value > 0]
words_rank_cn = [(word, rank) for (word, rank) in words_rank if
len(concepts.intersection(self.word_concept_dict[word])) > 0]
# assert len(words_rank_cn) != 0
# assert len(words_rank) != 0
if len(words_rank_cn)!=0:
sorted_relevant = sorted(words_rank_cn, key=lambda e: e[1])
first_rr= float(1 / sorted_relevant[0][1])
rr_list.append(first_rr)
r_list.append(sorted_relevant[0][1])
out_rr_list.append((first_rr, sorted_relevant[0][1]))
else:
rr_list.append(0.0)
r_list.append(self.E.shape[0])
out_rr_list.append((0.0, self.E.shape[0]))
else:
out_rr_list.append((0.0, self.E.shape[0]))
utils.pickler("../results/evaluation/cummulated/" + self.sparse_name + "_mrr_r_cummulated.p", out_rr_list)
return sum(rr_list)/denom, sum(r_list)/denom
def map(self, k=50):
ap_list = []
out_ap_list = []
denom = 0.0
for i in range(self.E.shape[1]):
concepts = self.alignment[i]
concepts = self.arange_concepts(concepts)
if len(concepts) > 0:
denom += 1.0
col = (self.E.getcol(i).toarray().T)[0, :]
sorted_col = sorted(enumerate(col), key=lambda e: float(e[1]), reverse=True)
nonzero = [(self.i2w[original_id]) for original_id, value in
sorted_col if value > 0]
relevant = []
for j in range(k): #range(len(nonzero)):
if len(nonzero) >= k and len(concepts.intersection(self.word_concept_dict[nonzero[j]])) > 0: #concepts in self.word_concept_dict[nonzero[j]]:
relevant.append(1)
else:
relevant.append(0)
avg_list = []
for j in range(k): #range(len(nonzero)):
avg_list.append(np.sum(relevant[0:j])/(j+1))
ap_list.append(np.sum(avg_list)/len(avg_list))
out_ap_list.append(np.sum(avg_list) / len(avg_list))
else:
out_ap_list.append(0.0)
utils.pickler("../results/evaluation/cummulated/" + self.sparse_name + "_map_cummulated.p", out_ap_list)
return np.sum(ap_list)/denom
def mp(self, k=50):
p_list = []
out_p_list = []
denom = 0.0
for i in range(self.E.shape[1]):
concepts = self.alignment[i]
concepts = self.arange_concepts(concepts)
if len(concepts) > 0:
denom += 1.0
col = enumerate( (self.E.getcol(i).toarray().T)[0, :] )
nonzero = [(ind, val) for ind, val in col if val > 0]
nonzero = sorted(nonzero, reverse=True, key=lambda e: float(e[1]))
nonzero = nonzero[0:k]
all = [self.i2w[original_index] for (original_index, value) in nonzero]
relevant = [word for word in all if
len(concepts.intersection(self.word_concept_dict[word])) > 0] #concepts in self.word_concept_dict[word]]
# assert len(relevant)!= 0
# assert len(all) != 0
if len(all) != 0:
p_list.append(len(relevant)/len(all))
out_p_list.append(len(relevant)/len(all))
else:
p_list.append(0.0)
out_p_list.append(0.0)
else:
out_p_list.append(0.0)
utils.pickler("../results/evaluation/cummulated/" + self.sparse_name + "_mp_cummulated.p", out_p_list)
return np.sum(p_list)/denom
def takeSecond(self, elem):
return elem[1]
def distance_ratio(self, k=5): # from rotated word embeddings paper
intra = 0.0
inter = 0.0
for i in range(self.E.shape[1]):
concept = self.alignment[i]
col = enumerate( (self.E.getcol(i).toarray().T)[0, :])
nonzero = [(original_index, value) for (original_index, value) in col if value>0]
sorted_nonzero = sorted(nonzero, reverse=True, key=self.takeSecond)
k2 = np.min([k, len(sorted_nonzero)])
top_k_words = [index for (index, value) in sorted_nonzero[0:k2]]
intra += self.intra_distance_ratio(top_k_words)
inter += self.inter_distance_ratio(top_k_words, concept)
print("inter: ", inter)
print("intra: ", intra)
overall_distance_ratio = float(inter)/float(intra) #(1.0/float(self.E.shape[1])) * (float(inter)/float(intra))
return overall_distance_ratio
def intra_distance_ratio(self, words):
k = float(len(words))
if k <= 1.0:
return 0.0
word_vectors = [self.E.getrow(ind).todense() for ind in words]
nom = 0
for i in range(len(word_vectors)):
for j in range(len(word_vectors)):
nom += cosine(word_vectors[i], word_vectors[j])
ret = nom/(k*(k-1))
return ret
def avg_concept_vector(self, concept_words):
word_vectors = [self.E.getrow(ind).todense() for ind in concept_words]
nom = np.zeros((1,self.E.shape[1]))
denom = len(word_vectors)
for i in range(len(word_vectors)):
nom += word_vectors[i]
return nom/float(denom)
def inter_distance_ratio(self, words, concept):
k = float(len(words))
word_vectors = [self.E.getrow(ind).todense() for ind in words]
concept_ind = self.w2i.get(concept, None)
# if concept_ind != None:
# concept_vector = self.E.getrow(concept_ind).todense()
# else:
concept_ind = (self.c2i[concept])
col = enumerate( (self.C.getcol(concept_ind).toarray().T)[0, :])
nonzero = [(original_index, value) for (original_index, value) in col if value > 0.0]
sorted_nonzero = sorted(nonzero, reverse=True, key=self.takeSecond)
concept_words = [ind for (ind, value) in sorted_nonzero]
cw = [self.i2w[ind] for ind in concept_words]
# print("concept: ", concept, " words: ", cw)
assert len(concept_words) != 0
# avg_concept_vector = self.avg_concept_vector(concept_words)# self.intra_distance_ratio(concept_words)
ret = self.intra_distance_ratio(concept_words)
# nom = 0.0
# for i in range(len(word_vectors)):
# nom += cosine(word_vectors[i], avg_concept_vector)
# ret = nom/k
return ret
def avg_pairwise_product(self, values):
sum = 0.0
for i in values:
for j in values:
sum += i*j
return sum/(2*len(values))
def list_product(self, values):
return np.prod(values)
def intersection_ranking(self):
reciprocal_ranks = []
ranks = []
for i in range(self.E.shape[1]):
values_to_rank = []
concept = self.alignment[i]
if concept != "NONE":
col = enumerate( (self.E.getcol(i).toarray().T)[0, :])
intersection_pairs = [(original_index, value) for (original_index, value) in col
if (value > 0 and concept in self.word_concept_dict[self.i2w[original_index]])]
intersection_ind = [ind for (ind, value) in intersection_pairs]
intersection_value = [value for (ind, value) in intersection_pairs]
focus_value = self.avg_pairwise_product(intersection_value)
print("base: ", i, "\tintersection size: ", len(intersection_ind), "\tvalue: ", focus_value, end="")
values_to_rank.append(focus_value)
for j in range(self.E.shape[1]):
comaparsion_col = enumerate((self.E.getcol(j).toarray().T)[0, :])
comparison_values = [value for (ind, value) in comaparsion_col if ind in intersection_ind]
assert len(intersection_ind) == len(comparison_values)
value = self.avg_pairwise_product(comparison_values)
values_to_rank.append(value)
values_to_rank = sorted(values_to_rank, reverse=True)
rank = values_to_rank.index(focus_value) + 1 # indexing from 0
print("\trank: ", rank)
reciprocal_ranks.append((1.0/rank))
ranks.append(rank)
mean_reciprocal_rank = np.mean(reciprocal_ranks)
mean_rank = np.mean(ranks)
return mean_rank, mean_reciprocal_rank
def get_test_ind(self, concepts, test_size):
test_inds = set()
for concept in concepts:
tmp_set = set(test_size[self.c2i[concept]][1])
test_inds = test_inds.union(tmp_set)
return test_inds
def test_set_evaluation_by_base(self, k=5):
if self.sparse_name.find("animals") != -1:
sparse_name = (self.sparse_name.strip().split("emb"))[0]
sparse_name = (sparse_name.split("animals_"))[-1]
frequency_name = os.path.join("../data/word_frequency",
(sparse_name + "emb.gz_frequency.p"))
else:
frequency_name = os.path.join("../data/word_frequency",
((self.sparse_name.strip().split("emb"))[0] + "emb.gz_frequency.p"))
frequency = pickle.load(open(frequency_name, "rb"))
test_set_name = os.path.join("../data/sparse_matrices/word_concept/splitted/test_size/", self.sparse_name, self.concept_name, "test_size.npz")
test_size = pickle.load(open(test_set_name, "rb"))
avg_acc = 0.0
avg_k_acc = 0.0
sum = 0.0
out_acc = []
for i in range(self.E.shape[1]):
concepts = self.alignment[i]
concepts = self.arange_concepts(concepts)
if len(concepts) > 0:
test_inds = self.get_test_ind(concepts, test_size)
col = enumerate( (self.E.getcol(i).toarray().T)[0, :])
nonzero = {(ind,value) for (ind, value) in col if value > 0}
words_connected_to_concept = {(ind, value) for (ind, value) in nonzero
if len(concepts.intersection(self.word_concept_dict[self.i2w[ind]])) > 0}
train = {(ind, value) for (ind, value) in words_connected_to_concept if ind not in test_inds}
# print(words_connected_to_concept)
# print(train, "\n")
k_nonzero = nonzero.difference(train)
# k_nonzero = [(ind, value) for (ind, value) in nonzero if ind not in train]
k_nonzero = sorted(k_nonzero, key=lambda t: float(frequency.index(self.i2w[t[0]])), reverse=False)
set_k_nonzero = set([ind for (ind, val) in k_nonzero][0: (k*len(test_inds))])
set_nonzero = {ind for ind, val in nonzero}
set_test = set(test_inds)
intersection = set_test.intersection(set_nonzero)
k_intersection = set_test.intersection(set_k_nonzero)
acc = 0.0
k_acc = 0.0
if len(test_inds) != 0:
acc = len(intersection)/len(set_test)
k_acc = len(k_intersection)/len(set_test)
avg_acc += acc
avg_k_acc += k_acc
sum += 1.0
out_acc.append((acc, k_acc))
else:
out_acc.append((0.0, 0.0))
utils.pickler("../results/evaluation/cummulated/" + self.sparse_name + "_tsa_base_k_acc_cummulated.p", out_acc)
avg_acc = avg_acc/sum
avg_k_acc = avg_k_acc/sum
return avg_acc, avg_k_acc
def test_set_evaluation_by_concept(self, k=5):
test_set_name = os.path.join("../data/sparse_matrices/word_concept/splitted/test_size/", self.sparse_name,
self.concept_name, "test_size.npz")
test_size = pickle.load(open(test_set_name, "rb"))
avg_acc = 0.0
avg_k_acc = 0
denom = 0.0
out_acc = []
aligned_concepts = | |
#!/usr/bin/env python
# -*- coding: utf-8 -*-
from .brokerage_base import BrokerageBase
from ..event.event import LogEvent
from ..account import AccountEvent
from ..data import TickEvent, TickType, BarEvent
from ..order.order_type import OrderType
from ..order.fill_event import FillEvent
from ..order.order_event import OrderEvent
from ..order.order_status import OrderStatus
from ..position.position_event import PositionEvent
from datetime import datetime
from copy import copy
from threading import Thread
import logging
from ibapi.wrapper import EWrapper
from ibapi.client import EClient
from ibapi import utils
# types
from ibapi.common import * # @UnusedWildImport
from ibapi.order_condition import * # @UnusedWildImport
from ibapi.contract import * # @UnusedWildImport
from ibapi.order import * # @UnusedWildImport
from ibapi.order_state import * # @UnusedWildImport
from ibapi.execution import Execution
from ibapi.execution import ExecutionFilter
from ibapi.commission_report import CommissionReport
from ibapi.ticktype import TickTypeEnum
from ibapi.tag_value import TagValue
from ibapi.account_summary_tags import *
_logger = logging.getLogger(__name__)
class InteractiveBrokers(BrokerageBase):
def __init__(self, msg_event_engine, tick_event_engine, account: str):
"""
Initialize InteractiveBrokers brokerage.
Currently, the client is strongly coupled to broker without an incoming queue,
e.g. client calls broker.place_order to place order directly.
:param msg_event_engine: used to broadcast messages the broker generates back to client
:param tick_event_engine: used to broadcast market data back to client
:param account: the IB account
"""
self.event_engine = msg_event_engine # save events to event queue
self.tick_event_engine = tick_event_engine
self.api = IBApi(self)
self.account = account
self.contract_detail_request_contract_dict = {} # reqid ==> contract
self.contract_detail_request_symbol_dict = {} # reqid ==> symbol
self.sym_contract_dict = {} # sym ==> contract
self.contract_symbol_dict = {} # conId ==> symbol
self.market_data_subscription_dict = {} # reqId ==> sym
self.market_data_subscription_reverse_dict = {} # sym ==> reqId
self.market_data_tick_dict = {} # reqid ==> tick_event; to combine tickprice and ticksize
self.market_depth_subscription_dict = {}
self.market_depth_subscription_reverse_dict = {}
self.market_depth_tick_dict = {} # to combine tickprice and ticksize
self.hist_data_request_dict = {}
self.order_dict = {} # order id ==> order_event
self.account_summary_reqid = -1
self.account_summary = AccountEvent()
self.account_summary.brokerage = 'IB'
self.clientid = 0
self.reqid = 0 # next/available reqid
self.orderid = 0 # next/available orderid
def connect(self, host='127.0.0.1', port=7497, clientId=0):
"""
Connect to IB. Request open orders under clientid upon successful connection.
:param host: host address
:param port: socket port
:param clientId: client id
"""
self.clientid = clientId
if self.api.connected:
return
self.api.connect(host, port, clientId=clientId)
self.api.thread.start()
self.reqCurrentTime()
if clientId == 0:
# associate TWS with the client
self.api.reqAutoOpenOrders(True)
def disconnect(self):
"""
Disconnect from IB
"""
if not self.api.isConnected():
return
self.api.connected = False
# self.api.conn.disconnect()
self.api.conn.socket = None
self.api.disconnect()
_logger.info(f'connected {self.api.isConnected()}')
def _calculate_commission(self, full_symbol, fill_price, fill_size):
pass
def next_order_id(self):
"""
Return next available order id
:return: next order id available for next orders
"""
return self.orderid
def place_order(self, order_event):
"""
Place order to IB
:param order_event: client order to be placed
:return: no return. An order event is pushed to message queue with order status Acknowledged
"""
if not self.api.connected:
return
ib_contract = InteractiveBrokers.symbol_to_contract(order_event.full_symbol)
if not ib_contract:
_logger.error(f'Failed to find contract to place order {order_event.full_symbol}')
return
ib_order = InteractiveBrokers.order_to_ib_order(order_event)
if not ib_order:
_logger.error(f'Failed to create order to place {order_event.full_symbol}')
return
ib_order.eTradeOnly = False # The EtradeOnly IBApi.Order attribute is no longer supported. Error received with TWS versions 983+
ib_order.firmQuoteOnly = False # The firmQuoteOnly IBApi.Order attribute is no longer supported. Error received with TWS versions 983+
if order_event.order_id < 0:
order_event.order_id = self.orderid
self.orderid += 1
order_event.account = self.account
order_event.timestamp = datetime.now().strftime("%H:%M:%S.%f")
order_event.order_status = OrderStatus.ACKNOWLEDGED # acknowledged
self.order_dict[order_event.order_id] = order_event
_logger.info(f'Order acknowledged {order_event.order_id}, {order_event.full_symbol}')
self.event_engine.put(copy(order_event))
self.api.placeOrder(order_event.order_id, ib_contract, ib_order)
def cancel_order(self, order_id):
"""
Cancel client order.
:param order_id: order id of the order to be canceled
:return: no return. If order is successfully canceled, IB will return an orderstatus message.
"""
if not self.api.connected:
return
if not order_id in self.order_dict.keys():
_logger.error(f'Order to cancel not found. order id {order_id}')
return
self.order_dict[order_id].cancel_time = datetime.now().strftime("%H:%M:%S.%f")
self.api.cancelOrder(order_id)
def cancel_all_orders(self):
"""
Cancel all standing orders, for example, before one wants to shut down completely for some reasons.
"""
self.api.reqGlobalCancel()
def subscribe_market_data(self, sym):
"""
Subscribe market L1 data. Market data for this symbol will then be streamed to client.
:param sym: the symbol to be subscribed.
"""
if not self.api.connected:
return
# it's not going to re-subscribe, because we only call subscribe_market_datas
# if sym in self.market_data_subscription_reverse_dict.keys():
# return
contract = InteractiveBrokers.symbol_to_contract(sym)
if not contract:
_logger.error(f'Failed to find contract to subscribe market data: {sym}')
return
self.api.reqContractDetails(self.reqid, contract)
_logger.info(f'Requesting market data {self.reqid} {sym}')
self.contract_detail_request_contract_dict[self.reqid] = contract
self.contract_detail_request_symbol_dict[self.reqid] = sym
self.reqid +=1
self.api.reqMktData(self.reqid, contract, '', False, False, [])
tick_event = TickEvent()
tick_event.full_symbol = sym
self.market_data_subscription_dict[self.reqid] = sym
self.market_data_subscription_reverse_dict[sym] = self.reqid
self.market_data_tick_dict[self.reqid] = tick_event
self.reqid += 1
def subscribe_market_datas(self):
"""
Subscribe market L1 data for all symbols used in strategies. Market data for this symbol will then be streamed to client.
"""
syms = list(self.market_data_subscription_reverse_dict.keys())
for sym in syms:
self.subscribe_market_data(sym)
def unsubscribe_market_data(self, sym):
"""
Unsubscribe market L1 data. Market data for this symbol will stop streaming to client.
:param sym: the symbol to be subscribed.
"""
if not self.api.connected:
return
if not sym in self.market_data_subscription_reverse_dict.keys():
return
self.api.cancelMktData(self.market_data_subscription_reverse_dict[sym])
def subscribe_market_depth(self, sym):
"""
Subscribe market L2 data. Market data for this symbol will then be streamed to client.
:param sym: the symbol to be subscribed.
"""
if not self.api.connected:
return
if sym in self.market_depth_subscription_reverse_dict.keys():
return
contract = InteractiveBrokers.symbol_to_contract(sym)
if not contract:
_logger.error(f'Failed to find contract to subscribe market depth: {sym}')
return
self.api.reqMktDepth(self.reqid, contract, 5, True, [])
self.reqid += 1
self.market_depth_subscription_dict[self.reqid] = sym
self.market_depth_subscription_reverse_dict[sym] = self.reqid
def unsubscribe_market_depth(self, sym):
"""
Unsubscribe market L2 data. Market data for this symbol will stop streaming to client.
:param sym: the symbol to be subscribed.
"""
if not self.api.connected:
return
if not sym in self.market_depth_subscription_reverse_dict.keys():
return
self.api.cancelMktDepth(self.market_depth_subscription_reverse_dict[sym], True)
def subscribe_account_summary(self):
"""
Request account summary from broker
"""
if not self.api.connected:
return
if self.account_summary_reqid > 0: # subscribed
return
self.account_summary_reqid = self.reqid
self.api.reqAccountSummary(self.account_summary_reqid, "All", "$LEDGER")
self.reqid += 1
def unsubscribe_account_summary(self):
"""
Stop receiving account summary from broker
"""
if not self.api.connected:
return
if self.account_summary_reqid == -1:
return
self.api.cancelAccountSummary(self.account_summary_reqid)
self.account_summary_reqid = -1
def subscribe_positions(self):
"""
Request existing positions from broker
"""
self.api.reqPositions()
def unsubscribe_positions(self):
"""
Stop receiving existing position message from broker.
"""
self.api.cancelPositions()
def request_historical_data(self, symbol, end=None):
"""
Request 1800 S (30 mins) historical bar data from Interactive Brokers.
:param symbol: the contract whose historical data is requested
:param end: the end time of the historical data
:return: no returns; data is broadcasted through message queue
"""
ib_contract = InteractiveBrokers.symbol_to_contract(symbol)
if end:
end_str = end.strftime("%Y%m%d %H:%M:%S")
else:
end_str = ''
self.hist_data_request_dict[self.reqid] = symbol
self.api.reqHistoricalData(self.reqid, ib_contract, end_str, '1800 S', '1 secs', 'TRADES', 1, 1, False, []) # first 1 is useRTH
self.reqid += 1
def cancel_historical_data(self, reqid):
"""
Cancel historical data request. Usually not necessary.
:param reqid: the historical data request id
"""
self.api.cancelHistoricalData(reqid)
def request_historical_ticks(self, symbol, start_time, reqtype='TICKS'):
"""
Request historical time and sales data from Interactive Brokers.
See here https://interactivebrokers.github.io/tws-api/historical_time_and_sales.html
:param symbol: the contract whose historical data is requested
:param start_time: i.e. "20170701 12:01:00". Uses TWS timezone specified at login
:param reqtype: TRADES, BID_ASK, or MIDPOINT
:return: no returns; data is broadcasted through message queue
"""
ib_contract = InteractiveBrokers.symbol_to_contract(symbol)
useRth = 1
self.hist_data_request_dict[self.reqid] = symbol
self.api.reqHistoricalTicks(self.reqid, ib_contract, start_time, "", 1000, reqtype, useRth, True, [])
self.reqid += 1
def reqCurrentTime(self):
"""
Request server time on broker side
"""
self.api.reqCurrentTime()
def setServerLogLevel(self, level=1):
"""
Set server side log level or the log messages received from server.
:param level: log level
"""
self.api.setServerLogLevel(level)
def heartbeat(self):
"""
Request server time as heartbeat
"""
if self.api.isConnected():
_logger.info('reqPositions')
# self.api.reqPositions()
self.reqCurrentTime() # EWrapper::currentTime
def log(self, msg):
"""
Broadcast server log message through message queue
:param msg: message to be broadcast
:return: no return; log meesage is placed into message queue
"""
timestamp = datetime.now().strftime("%H:%M:%S.%f")
log_event = LogEvent()
log_event.timestamp = timestamp
log_event.content = msg
self.event_engine.put(log_event)
@staticmethod
def symbol_to_contract(symbol):
"""
Convert fulll symbol string to IB contract
TODO
CL.HO BAG 174230608 1 NYMEX 257430162 1 NYMEX NYMEX # Inter-comdty
ES.NQ BAG 371749798 1 GLOBEX 371749745 1 GLOBEX GLOBEX # Inter-comdty
CL.HO BAG 257430162 1 NYMEX 174230608 1 NYMEX NYMEX
:param symbol: full symbol, e.g. AMZN STK SMART
:return: IB contract
"""
symbol_fields = symbol.split(' ')
ib_contract = Contract()
if symbol_fields[1] == 'STK':
ib_contract.localSymbol = symbol_fields[0]
ib_contract.secType = symbol_fields[1]
ib_contract.currency = 'USD'
| |
right')
raw_pred_right = CCL_check_1ststg(raw_pred_right)
else:
raw_ct_right_shape = (raw_ct_shape[0], raw_ct_shape[1], int(raw_ct_shape[2] * 3 / 5))
raw_pred_right_shape = [raw_ct_shape[0], 200, 200, num_labels_1ststg]
raw_pred_right_tmp = np.zeros(shape=raw_pred_right_shape) # raw_ct_shape[0], 200, 200, 3
raw_pred_right_tmp_cnt = np.zeros(shape=raw_pred_right_shape) # raw_ct_shape[0], 200, 200, 3
z_list = list(np.arange(0, raw_ct_shape[0] - 200, 100)) + [raw_ct_shape[0] - 200]
x_start_src = 0
x_end_src = int(raw_ct_shape[2] * 3 / 5)
for z_start in z_list:
raw_ct_right_frame_shape = (200, raw_ct_shape[1], int(raw_ct_shape[2] * 3 / 5))
raw_ct_right_frame = np.ones(raw_ct_right_frame_shape, dtype=np.float32) * -1024
raw_ct_right_frame[:, :, :] = raw_ct[z_start:z_start + 200, :, x_start_src:x_end_src]
img_ct_right = sitk.GetImageFromArray(raw_ct_right_frame)
img_ct_right_rs = resample_img_asdim(img_ct_right, net_input_dim, c_val=-1024)
raw_ct_right_rs = sitk.GetArrayFromImage(img_ct_right_rs)
raw_ct_right_rs_normed = normalize_vol(raw_ct_right_rs, norm_wind_lower=norm_wind_lower,
norm_wind_upper=norm_wind_upper)
fn_raw_ct_right_rs_normed = os.path.join(dir_output_final, 'raw_right_ct_%dx%dx%d.raw' % tuple(
reversed(np.shape(raw_ct_right_rs_normed))))
np.array(raw_ct_right_rs_normed).astype(np.float32).tofile(fn_raw_ct_right_rs_normed)
raw_ct_right_rs_normed = np.expand_dims(raw_ct_right_rs_normed, axis=0)
raw_ct_right_rs_normed = np.expand_dims(raw_ct_right_rs_normed, axis=-1)
prediction = np.squeeze(model_1st.predict(x=raw_ct_right_rs_normed), axis=0)
raw_pred_right_tmp[z_start:z_start + 200, :, :, :] += prediction
raw_pred_right_tmp_cnt[z_start:z_start + 200, :, :, :] += 1
raw_pred_right_tmp[np.where(raw_pred_right_tmp_cnt > 0)] /= raw_pred_right_tmp_cnt[
np.where(raw_pred_right_tmp_cnt > 0)]
fn_pred = os.path.join(dir_output_final, 'pred_right.raw')
np.transpose(raw_pred_right_tmp, [3, 0, 1, 2]).astype(np.float32).tofile(fn_pred)
if num_labels_1ststg != 1:
prediction = np.argmax(raw_pred_right_tmp, axis=-1)
else:
prediction = np.squeeze(raw_pred_right_tmp)
prediction[np.where(prediction > 0.5)] = 1
raw_pred_right = sitk.GetArrayFromImage(
resample_img_asdim(sitk.GetImageFromArray(prediction), tuple(reversed(raw_ct_right_shape)),
interp=sitk.sitkNearestNeighbor))
raw_pred_right[np.where(raw_pred_right > 0.5)] = 1
print('1st right')
raw_pred_right = CCL_check_1ststg(raw_pred_right)
# left kidney
if not is_large_z:
z_start_dst = int((200 - raw_ct_shape[0]) / 2)
z_end_dst = z_start_dst + raw_ct_shape[0]
x_start_src = int(raw_ct_shape[2] * 2 / 5)
x_end_src = raw_ct_shape[2]
raw_ct_left_shape = (raw_ct_shape[0], raw_ct_shape[1], x_end_src - x_start_src)
raw_ct_left_frame_shape = (200, raw_ct_shape[1], x_end_src - x_start_src)
raw_ct_left_frame = np.ones(raw_ct_left_frame_shape, dtype=np.float32) * -1024
raw_ct_left_frame[z_start_dst:z_end_dst, :, :] = raw_ct[:, :, x_start_src:x_end_src]
raw_ct_left_frame = raw_ct_left_frame[:, :, -1::-1]
img_ct_left = sitk.GetImageFromArray(raw_ct_left_frame)
img_ct_left_rs = resample_img_asdim(img_ct_left, net_input_dim, c_val=-1024)
raw_ct_left_rs = sitk.GetArrayFromImage(img_ct_left_rs)
raw_ct_left_rs_normed = normalize_vol(raw_ct_left_rs, norm_wind_lower=norm_wind_lower,
norm_wind_upper=norm_wind_upper)
fn_raw_ct_left_rs_normed = os.path.join(dir_output_final, 'raw_left_ct_%dx%dx%d.raw' % tuple(
reversed(np.shape(raw_ct_left_rs_normed))))
np.array(raw_ct_left_rs_normed).astype(np.float32).tofile(fn_raw_ct_left_rs_normed)
raw_ct_left_rs_normed = np.expand_dims(raw_ct_left_rs_normed, axis=0)
raw_ct_left_rs_normed = np.expand_dims(raw_ct_left_rs_normed, axis=-1)
prediction = model_1st.predict(x=raw_ct_left_rs_normed)
fn_pred = os.path.join(dir_output_final, 'pred_left.raw')
np.transpose(prediction, [0, 4, 1, 2, 3]).astype(np.float32).tofile(fn_pred)
np.array(raw_ct_left_rs_normed).astype(np.float32).tofile(fn_raw_ct_left_rs_normed)
if np.shape(prediction)[-1] == 1:
prediction = np.squeeze(prediction)
else:
prediction = np.squeeze(np.argmax(prediction, axis=-1))
prediction = prediction[z_start_dst:z_end_dst, :, :]
raw_pred_left = sitk.GetArrayFromImage(
resample_img_asdim(sitk.GetImageFromArray(prediction), tuple(reversed(raw_ct_left_shape)),
interp=sitk.sitkNearestNeighbor))
raw_pred_left[np.where(raw_pred_left > 0.5)] = 1
print('1st left')
raw_pred_left = CCL_check_1ststg(raw_pred_left)
else:
raw_ct_left_shape = (raw_ct_shape[0], raw_ct_shape[1], int(raw_ct_shape[2] * 3 / 5))
raw_pred_left_shape = [raw_ct_shape[0], 200, 200, num_labels_1ststg]
raw_pred_left_tmp = np.zeros(shape=raw_pred_left_shape) # raw_ct_shape[0], 200, 200, 3
raw_pred_left_tmp_cnt = np.zeros(shape=raw_pred_left_shape) # raw_ct_shape[0], 200, 200, 3
z_list = list(np.arange(0, raw_ct_shape[0] - 200, 100)) + [raw_ct_shape[0] - 200]
x_start_src = 0
x_end_src = int(raw_ct_shape[2] * 3 / 5)
for z_start in z_list:
raw_ct_left_frame_shape = (200, raw_ct_shape[1], int(raw_ct_shape[2] * 3 / 5))
raw_ct_left_frame = np.ones(raw_ct_left_frame_shape, dtype=np.float32) * -1024
raw_ct_left_frame[:, :, :] = raw_ct[z_start:z_start + 200, :, -raw_ct_left_frame_shape[2]:]
raw_ct_left_frame = raw_ct_left_frame[:, :, -1::-1]
img_ct_left = sitk.GetImageFromArray(raw_ct_left_frame)
img_ct_left_rs = resample_img_asdim(img_ct_left, net_input_dim, c_val=-1024)
raw_ct_left_rs = sitk.GetArrayFromImage(img_ct_left_rs)
raw_ct_left_rs_normed = normalize_vol(raw_ct_left_rs, norm_wind_lower=norm_wind_lower,
norm_wind_upper=norm_wind_upper)
fn_raw_ct_left_rs_normed = os.path.join(dir_output_final, 'raw_left_ct_%dx%dx%d.raw' % tuple(
reversed(np.shape(raw_ct_left_rs_normed))))
np.array(raw_ct_left_rs_normed).astype(np.float32).tofile(fn_raw_ct_left_rs_normed)
raw_ct_left_rs_normed = np.expand_dims(raw_ct_left_rs_normed, axis=0)
raw_ct_left_rs_normed = np.expand_dims(raw_ct_left_rs_normed, axis=-1)
prediction = np.squeeze(model_1st.predict(x=raw_ct_left_rs_normed), axis=0)
raw_pred_left_tmp[z_start:z_start + 200, :, :, :] += prediction
raw_pred_left_tmp_cnt[z_start:z_start + 200, :, :, :] += 1
raw_pred_left_tmp[np.where(raw_pred_left_tmp_cnt > 0)] /= raw_pred_left_tmp_cnt[
np.where(raw_pred_left_tmp_cnt > 0)]
fn_pred = os.path.join(dir_output_final, 'pred_left.raw')
np.transpose(raw_pred_left_tmp, [3, 0, 1, 2]).astype(np.float32).tofile(fn_pred)
if num_labels_1ststg != 1:
prediction = np.argmax(raw_pred_left_tmp, axis=-1)
else:
prediction = np.squeeze(raw_pred_left_tmp)
prediction[np.where(prediction > 0.5)] = 1
raw_pred_left = sitk.GetArrayFromImage(
resample_img_asdim(sitk.GetImageFromArray(prediction), tuple(reversed(raw_ct_left_shape)),
interp=sitk.sitkNearestNeighbor))
raw_pred_left[np.where(raw_pred_left > 0.5)] = 1
print('1st left')
# raw_pred_left = CCL(raw_pred_left, num_labels=2)
raw_pred_left = CCL_check_1ststg(raw_pred_left)
# check if both kidneys are valid
raw_pred_whole = np.zeros(np.shape(raw_ct), dtype=np.uint8)
raw_pred_right_shape = np.shape(raw_pred_right)
raw_pred_whole[:, :, :raw_pred_right_shape[2]] = raw_pred_right
raw_pred_left_shape = np.shape(raw_pred_left)
raw_pred_left[:, :, :] = raw_pred_left[:, :, -1::-1]
raw_pred_whole_left_tmp = raw_pred_whole[:, :, -raw_pred_left_shape[2]:]
raw_pred_whole_left_tmp[np.where(raw_pred_left > 0)] = raw_pred_left[np.where(raw_pred_left > 0)]
raw_pred_whole[:, :, -raw_pred_left_shape[2]:] = raw_pred_whole_left_tmp
fn_raw_pred_whole = os.path.join(dir_output_final,
'raw_whole_pred_1st_beforeCCL_%dx%dx%d.raw' % tuple(
reversed(np.shape(raw_pred_whole))))
raw_pred_whole.astype(np.uint8).tofile(fn_raw_pred_whole)
raw_pred_whole = CCL_1ststg_post(raw_pred_whole)
idcs_label_1 = np.where(raw_pred_whole == 1)
label_1_x_pos = np.mean(idcs_label_1[2])
idcs_label_2 = np.where(raw_pred_whole == 2)
print(' # of voxels 1 : %d' % len(idcs_label_1[0]))
print(' # of voxels 2 : %d' % len(idcs_label_2[0]))
if len(idcs_label_2[0]) > len(idcs_label_1[0]) * 0.2:
is_both_kidney = True
label_2_x_pos = np.mean(idcs_label_2[2])
print('both kidney')
else:
is_both_kidney = False
print('one kidney')
if is_both_kidney:
if label_1_x_pos > label_2_x_pos:
# swap label btw. 1 and 2
raw_pred_whole[idcs_label_1] = 2
raw_pred_whole[idcs_label_2] = 1
is_left_kidney = True
is_right_kidney = True
print('swap position')
else:
is_left_kidney = True
is_right_kidney = True
else:
if np.min(idcs_label_1[2]) < raw_ct_shape[2] / 2:
raw_pred_whole[idcs_label_1] = 1
raw_pred_whole[idcs_label_2] = 0
is_right_kidney = True
is_left_kidney = False
print('right kidney only')
else:
raw_pred_whole[idcs_label_1] = 2
raw_pred_whole[idcs_label_2] = 0
is_right_kidney = False
is_left_kidney = True
print('left kidney only')
fn_raw_pred_whole = os.path.join(dir_output_final,
'raw_whole_pred_1st_%dx%dx%d.raw' % tuple(reversed(np.shape(raw_pred_whole))))
raw_pred_whole.astype(np.uint8).tofile(fn_raw_pred_whole)
fn_raw_ct_whole = os.path.join(dir_output_final,
'raw_whole_ct_%dx%dx%d.raw' % tuple(reversed(np.shape(raw_pred_whole))))
raw_ct.astype(np.int16).tofile(fn_raw_ct_whole)
# extract kidney coordinate
if is_right_kidney:
idcs_label_1 = np.where(raw_pred_whole == 1)
kidney_right_start = (np.max((np.min(idcs_label_1[0] - 16), 0)),
np.max((np.min(idcs_label_1[1] - 16), 0)),
np.max((np.min(idcs_label_1[2] - 16), 0)))
kidney_right_end = (np.min((np.max(idcs_label_1[0] + 16), raw_ct_shape[0])),
np.min((np.max(idcs_label_1[1] + 16), raw_ct_shape[1])),
np.min((np.max(idcs_label_1[2] + 16), raw_ct_shape[2])))
print('kidney_right_start: ' + str(kidney_right_start))
print('kidney_right_end: ' + str(kidney_right_end))
if is_left_kidney:
idcs_label_2 = np.where(raw_pred_whole == 2)
kidney_left_start = (np.max((np.min(idcs_label_2[0] - 16), 0)),
np.max((np.min(idcs_label_2[1] - 16), 0)),
np.max((np.min(idcs_label_2[2] - 16), 0)))
kidney_left_end = (np.min((np.max(idcs_label_2[0] + 16), raw_ct_shape[0])),
np.min((np.max(idcs_label_2[1] + 16), raw_ct_shape[1])),
np.min((np.max(idcs_label_2[2] + 16), raw_ct_shape[2])))
print('kidney_left_start: ' + str(kidney_left_start))
print('kidney_left_end: ' + str(kidney_left_end))
''' 2nd stage '''
# Seg right kidney if it is valid
if is_right_kidney:
# right kidney
raw_ct_right_2nd_shape = (
int(kidney_right_end[0] - kidney_right_start[0]),
int(kidney_right_end[1] - kidney_right_start[1]),
int(kidney_right_end[2] - kidney_right_start[2]))
raw_ct_right_frame = np.ones(raw_ct_right_2nd_shape, dtype=np.float32) * -1024
raw_ct_right_frame[:, :, :] = raw_ct[kidney_right_start[0]:kidney_right_end[0],
kidney_right_start[1]:kidney_right_end[1],
kidney_right_start[2]:kidney_right_end[2]]
img_ct_right = sitk.GetImageFromArray(raw_ct_right_frame)
img_ct_right_rs = resample_img_asdim(img_ct_right, net_input_dim, c_val=-1024)
raw_ct_right_rs = sitk.GetArrayFromImage(img_ct_right_rs)
raw_ct_right_rs_normed = normalize_vol(raw_ct_right_rs, norm_wind_lower=norm_wind_lower,
norm_wind_upper=norm_wind_upper)
fn_raw_ct_right_rs_normed = os.path.join(dir_output_final, 'raw_right_ct_2nd_%dx%dx%d.raw' % tuple(
reversed(np.shape(raw_ct_right_rs_normed))))
np.array(raw_ct_right_rs_normed).astype(np.float32).tofile(fn_raw_ct_right_rs_normed)
raw_ct_right_rs_normed = np.expand_dims(raw_ct_right_rs_normed, axis=0)
raw_ct_right_rs_normed = np.expand_dims(raw_ct_right_rs_normed, axis=-1)
prediction = model_2nd.predict(x=raw_ct_right_rs_normed)
# fn_pred = os.path.join(dir_output_final, 'pred_right.raw')
# np.transpose(prediction, [0, 4, 1, 2, 3]).astype(np.float32).tofile(fn_pred)
np.array(raw_ct_right_rs_normed).astype(np.float32).tofile(fn_raw_ct_right_rs_normed)
if np.shape(prediction)[-1] == 1:
prediction = np.squeeze(prediction)
else:
prediction = np.squeeze(np.argmax(prediction, axis=-1))
raw_pred_right = sitk.GetArrayFromImage(
resample_img_asdim(sitk.GetImageFromArray(prediction), tuple(reversed(raw_ct_right_2nd_shape)),
interp=sitk.sitkNearestNeighbor))
raw_pred_right_tmp = np.array(raw_pred_right)
raw_pred_right_tmp[np.where(raw_pred_right_tmp > 0)] = 1
fn_raw_pred_right_tmp = os.path.join(dir_output_final, 'raw_right_pred_beforeCCL_2nd_%dx%dx%d.raw' % tuple(
reversed(np.shape(raw_pred_right))))
raw_pred_right_tmp.astype(np.uint8).tofile(fn_raw_pred_right_tmp)
raw_pred_right_tmp = CCL(raw_pred_right_tmp, num_labels=2)
raw_pred_right[np.where(raw_pred_right_tmp == 0)] = 0
fn_raw_pred_right = os.path.join(dir_output_final, 'raw_right_pred_2nd_%dx%dx%d.raw' % tuple(
reversed(np.shape(raw_pred_right))))
raw_pred_right.astype(np.uint8).tofile(fn_raw_pred_right)
fn_raw_ct_right = os.path.join(dir_output_final,
'raw_right_ct_2nd_%dx%dx%d.raw' % tuple(reversed(np.shape(raw_pred_right))))
raw_ct_right = np.array(raw_ct[kidney_right_start[0]:kidney_right_end[0],
kidney_right_start[1]:kidney_right_end[1],
kidney_right_start[2]:kidney_right_end[2]])
raw_ct_right.astype(np.int16).tofile(fn_raw_ct_right)
if is_left_kidney:
# left kidney
raw_ct_left_2nd_shape = (
int(kidney_left_end[0] - kidney_left_start[0]),
int(kidney_left_end[1] - kidney_left_start[1]),
int(kidney_left_end[2] - kidney_left_start[2]))
raw_ct_left_frame = np.ones(raw_ct_left_2nd_shape, dtype=np.float32) * -1024
raw_ct_left_frame[:, :, :] = raw_ct[kidney_left_start[0]:kidney_left_end[0],
kidney_left_start[1]:kidney_left_end[1],
kidney_left_start[2]:kidney_left_end[2]]
raw_ct_left_frame = raw_ct_left_frame[:, :, -1::-1]
img_ct_left = sitk.GetImageFromArray(raw_ct_left_frame)
img_ct_left_rs = resample_img_asdim(img_ct_left, net_input_dim, c_val=-1024)
raw_ct_left_rs = sitk.GetArrayFromImage(img_ct_left_rs)
raw_ct_left_rs_normed = normalize_vol(raw_ct_left_rs, norm_wind_lower=norm_wind_lower,
norm_wind_upper=norm_wind_upper)
fn_raw_ct_left_rs_normed = os.path.join(dir_output_final, 'raw_left_ct_2nd_%dx%dx%d.raw' % tuple(
reversed(np.shape(raw_ct_left_rs_normed))))
np.array(raw_ct_left_rs_normed).astype(np.float32).tofile(fn_raw_ct_left_rs_normed)
raw_ct_left_rs_normed = np.expand_dims(raw_ct_left_rs_normed, axis=0)
raw_ct_left_rs_normed = np.expand_dims(raw_ct_left_rs_normed, axis=-1)
prediction = model_2nd.predict(x=raw_ct_left_rs_normed)
# fn_pred = os.path.join(dir_output_final, 'pred_left.raw')
# np.transpose(prediction, [0, 4, 1, 2, 3]).astype(np.float32).tofile(fn_pred)
np.array(raw_ct_left_rs_normed).astype(np.float32).tofile(fn_raw_ct_left_rs_normed)
if np.shape(prediction)[-1] == 1:
prediction = np.squeeze(prediction)
else:
prediction = np.squeeze(np.argmax(prediction, axis=-1))
raw_pred_left = sitk.GetArrayFromImage(
resample_img_asdim(sitk.GetImageFromArray(prediction), tuple(reversed(raw_ct_left_2nd_shape)),
interp=sitk.sitkNearestNeighbor))
raw_pred_left = raw_pred_left[:, :, -1::-1]
raw_pred_left_tmp = np.array(raw_pred_left)
raw_pred_left_tmp[np.where(raw_pred_left_tmp > 0)] = 1
fn_raw_pred_left_tmp = os.path.join(dir_output_final, 'raw_left_pred_beforeCCL_2nd_%dx%dx%d.raw' % tuple(
reversed(np.shape(raw_pred_left))))
raw_pred_left_tmp.astype(np.uint8).tofile(fn_raw_pred_left_tmp)
raw_pred_left_tmp = CCL(raw_pred_left_tmp, num_labels=2)
raw_pred_left[np.where(raw_pred_left_tmp == 0)] = 0
fn_raw_pred_left = os.path.join(dir_output_final, 'raw_left_pred_2nd_%dx%dx%d.raw' % tuple(
reversed(np.shape(raw_pred_left))))
raw_pred_left.astype(np.uint8).tofile(fn_raw_pred_left)
fn_raw_ct_left = os.path.join(dir_output_final,
'raw_left_ct_2nd_%dx%dx%d.raw' % tuple(reversed(np.shape(raw_pred_left))))
raw_ct_left = np.array(raw_ct[kidney_left_start[0]:kidney_left_end[0],
kidney_left_start[1]:kidney_left_end[1],
kidney_left_start[2]:kidney_left_end[2]])
raw_ct_left.astype(np.int16).tofile(fn_raw_ct_left)
raw_pred_whole = np.zeros(np.shape(raw_ct), dtype=np.uint8)
if is_right_kidney:
raw_pred_whole[kidney_right_start[0]:kidney_right_end[0], kidney_right_start[1]:kidney_right_end[1],
kidney_right_start[2]:kidney_right_end[2]] = raw_pred_right
if is_left_kidney:
raw_pred_whole_left_tmp = raw_pred_whole[kidney_left_start[0]:kidney_left_end[0],
kidney_left_start[1]:kidney_left_end[1], kidney_left_start[2]:kidney_left_end[2]]
raw_pred_whole_left_tmp[np.where(raw_pred_left > 0)] = raw_pred_left[np.where(raw_pred_left > 0)]
raw_pred_whole[kidney_left_start[0]:kidney_left_end[0], kidney_left_start[1]:kidney_left_end[1],
kidney_left_start[2]:kidney_left_end[2]] = raw_pred_whole_left_tmp
fn_raw_pred_whole = os.path.join(dir_output_final,
'raw_whole_pred_2nd_%dx%dx%d.raw' % tuple(reversed(np.shape(raw_pred_whole))))
raw_pred_whole.astype(np.uint8).tofile(fn_raw_pred_whole)
fn_raw_pred_whole_color = os.path.join(dir_output_final, 'color_raw_whole_pred_2nd_%dx%dx%d.raw' % tuple(
reversed(np.shape(raw_pred_whole))))
# colorize_boundary_and_fill_vol(raw_ct, raw_pred_whole, norm_wind_lower=-200, norm_wind_upper=300,
# fn_out_fullpath=fn_raw_pred_whole_color)
if pid == 223:
raw_pred_whole_tmp = np.zeros(np.shape(raw_ct_original), dtype=np.uint8)
raw_pred_whole_tmp[-180:, :, :] = raw_pred_whole
raw_pred_whole = raw_pred_whole_tmp
''' Save final prediction as Nifti '''
fn_final_pred_nifti = os.path.join(dir_output_base, dir_output, 'prediction_%05d.nii.gz' % pid)
x_nib = nib.load(fn_img_ct)
p_nib = nib.Nifti1Image(raw_pred_whole[-1::-1], x_nib.affine)
nib.save(p_nib, fn_final_pred_nifti)
# sitk.WriteImage(img_final_pred, fn_final_pred_nifti, True)
if raw_gt is not None:
fn_csv = os.path.join(dir_output_final, 'Dice.csv')
GetDice(raw_gt, raw_pred_whole, fn_csv, num_label=3)
fn_raw_gt = os.path.join(dir_output_final, 'raw_whole_gt_%dx%dx%d.raw' % tuple(reversed(np.shape(raw_gt))))
raw_gt.astype(np.uint8).tofile(fn_raw_gt)
fn_raw_gt_whole_color = os.path.join(dir_output_final,
'color_raw_whole_gt_%dx%dx%d.raw' % tuple(reversed(np.shape(raw_gt))))
# colorize_boundary_and_fill_vol(raw_ct, raw_gt, norm_wind_lower=-200, norm_wind_upper=300,
# fn_out_fullpath=fn_raw_gt_whole_color)
summary_dice(os.path.join(dir_output_base, dir_output))
def TransAxis(img, dtype):
spacing = img.GetSpacing()
direction = img.GetDirection()
img_raw = sitk.GetArrayFromImage(img) # order x y z
img_raw = np.transpose(img_raw, axes=[2, 1, 0])
img_raw = img_raw[-1::-1, :, :]
img_new = sitk.GetImageFromArray(img_raw.astype(dtype))
img_new.SetSpacing(tuple(reversed(spacing)))
img_new.SetDirection((1, 0, 0, 0, 1, 0, 0, 0, 1))
return img_new
def TransAxisReverse(raw_data, ref_img, | |
import torch
from allennlp.common.util import END_SYMBOL, START_SYMBOL
from allennlp.data.vocabulary import DEFAULT_PADDING_TOKEN, Vocabulary
from allennlp.models import Model
from allennlp.modules import FeedForward, MatrixAttention, TextFieldEmbedder, TokenEmbedder
from allennlp.nn import InitializerApplicator, RegularizerApplicator
from allennlp.nn.util import get_text_field_mask, masked_softmax, weighted_sum
from allennlp.training.metrics import Metric
from overrides import overrides
from typing import Any, Dict, List, Optional, Tuple, Union
from summarize.common.util import SENT_START_SYMBOL, SENT_END_SYMBOL
from summarize.modules.bridge import Bridge
from summarize.modules.rnns import RNN
from summarize.nn.beam_search import BeamSearch
from summarize.nn.util import normalize_losses
from summarize.training.metrics import CrossEntropyMetric
@Model.register('sds-seq2seq')
class Seq2SeqModel(Model):
"""
An implementation of a standard encoder-decoder network with attention
built on RNNs.
Parameters
----------
document_token_embedder: ``TextFieldEmbedder``
The ``TextFieldEmbedder`` that will embed the document tokens.
encoder: ``RNN``
The RNN that will encode the sequence of document tokens.
attention: ``MatrixAttention``
The attention function that will be computed between the encoder and
decoder hidden states.
attention_layer: ``FeedForward``
The ``attention_layer`` will be applied after the decoder hidden state
and attention context are concatenated. The output should be the size
of the decoder hidden state. This abstraction was created because sometimes
a ``tanh`` unit is used after the projection and other times it is not.
In our experience, this decision can make a big difference in terms of
performance and training speed.
decoder: ``RNN``
The RNN that will produce the sequence of summary tokens.
bridge: ``Bridge``, optional (default = ``None``)
The bridge layer to use in between the encoder final state and the
initial decoder hidden state. If ``None``, no layer will be used.
beam_search: ``BeamSearch``
The ``BeamSearch`` object to use for prediction and validation.
run_beam_search: ``bool``
Indicates whether or not beam search should be run during prediction. This
is useful to turn off during training and on during testing if the
beam search procedure is expensive.
summary_token_embedder: ``TokenEmbedder``, optional (default = ``None``)
The ``TokenEmbedder`` that will embed the summary tokens. If ``None``, the
``document_token_embedder``'s embedder for the ``"tokens"`` will be used.
summary_namespace: ``str``, optional (default = ``"tokens"``)
The namespace of the summary tokens which is used to map from the integer
token representation to the string token representation.
use_input_feeding: ``bool``, optional (default = ``False``)
Indicates if input feeding should be used. See https://arxiv.org/pdf/1508.04025.pdf
for details.
input_feeding_projection_layer: ``FeedForward``, optional (default = ``None``)
If input feeding is used, the ``input_feeding_projection_layer`` will optionally
run on the concatenated input embedding and context vector. The output will
be passed as input to the decoder. This is not specified in Luong et al. (2015),
but it is used in See et al. (2017).
instance_loss_normalization: ``str``
The method for normalizing the loss per-instance. See `summarize.nn.util.normalize_losses`
for more information.
batch_loss_normalization: ``str``
The method for normalizing the loss for the batch. See `summarize.nn.util.normalize_losses`
for more information.
"""
def __init__(self,
vocab: Vocabulary,
document_token_embedder: TextFieldEmbedder,
encoder: RNN,
attention: MatrixAttention,
attention_layer: FeedForward,
decoder: RNN,
bridge: Bridge,
beam_search: BeamSearch,
run_beam_search: bool = True,
summary_token_embedder: Optional[TokenEmbedder] = None,
summary_namespace: str = 'tokens',
use_input_feeding: bool = False,
input_feeding_projection_layer: Optional[FeedForward] = None,
instance_loss_normalization: str = 'sum',
batch_loss_normalization: str = 'average',
metrics: Optional[List[Metric]] = None,
initializer: InitializerApplicator = InitializerApplicator(),
regularizer: RegularizerApplicator = None) -> None:
super().__init__(vocab, regularizer)
self.document_token_embedder = document_token_embedder
self.encoder = encoder
self.attention = attention
self.attention_layer = attention_layer
self.decoder = decoder
self.bridge = bridge
self.beam_search = beam_search
self.run_beam_search = run_beam_search
self.summary_token_embedder = summary_token_embedder or document_token_embedder._token_embedders['tokens']
self.summary_namespace = summary_namespace
self.use_input_feeding = use_input_feeding
self.input_feeding_projection_layer = input_feeding_projection_layer
self.instance_loss_normalization = instance_loss_normalization
self.batch_loss_normalization = batch_loss_normalization
# The ``output_layer`` is applied after the attention context and decoder
# hidden state are combined. It is used to calculate the softmax over the
# summary vocabulary
self.output_layer = torch.nn.Linear(decoder.get_output_dim(), vocab.get_vocab_size(summary_namespace))
# Retrieve some special vocabulary token indices. Some of them are
# required to exist.
token_to_index = vocab.get_token_to_index_vocabulary(summary_namespace)
assert START_SYMBOL in token_to_index
self.start_index = token_to_index[START_SYMBOL]
assert END_SYMBOL in token_to_index
self.end_index = token_to_index[END_SYMBOL]
assert DEFAULT_PADDING_TOKEN in token_to_index
self.pad_index = token_to_index[DEFAULT_PADDING_TOKEN]
self.sent_start_index = None
if SENT_START_SYMBOL in token_to_index:
self.sent_start_index = token_to_index[SENT_START_SYMBOL]
self.sent_end_index = None
if SENT_END_SYMBOL in token_to_index:
self.sent_end_index = token_to_index[SENT_END_SYMBOL]
self.loss = torch.nn.CrossEntropyLoss(ignore_index=self.pad_index, reduction='none')
# Define the metrics that will be computed
self.metrics = metrics
self.cross_entropy_metric = CrossEntropyMetric()
initializer(self)
def _run_encoder(self, document: Dict[str, torch.Tensor]) -> Tuple[torch.Tensor, ...]:
"""
Runs the encoder RNN over the document tokens and prepares the encoder's
hidden states to be ready to initialize the decoder.
Parameters
----------
document: ``Dict[str, torch.Tensor]``
The document tokens.
Returns
-------
encoder_outputs: ``torch.Tensor``, ``(batch_size, num_document_tokens, encoder_hidden_size)``
The hidden state outputs from the encoder.
document_mask: ``torch.Tensor``, ``(batch_size, num_document_tokens)``
The document tokens mask.
hidden: ``torch.Tensor``, ``(batch_size, decoder_hidden_size)``
The hidden state that should be used to initialize the decoder
memory: ``torch.Tensor``, ``(batch_size, decoder_hidden_size)``
The memory state that should be used to initialize the decoder
"""
# Encoder the document tokens
# shape: (batch_size, num_document_tokens)
document_mask = get_text_field_mask(document)
# shape: (batch_size, num_document_tokens, embedding_size)
document_token_embeddings = self.document_token_embedder(document)
# shape: (batch_size, num_document_tokens, encoder_hidden_size)
# shape: (num_layers * num_directions, batch_size, encoder_hidden_size)
encoder_outputs, hidden = self.encoder(document_token_embeddings, document_mask)
# Reshape the encoder's hidden state(s) for decoding
# shape: (num_layers, batch_size, encoder_hidden_size * num_directions)
hidden = self.encoder.reshape_hidden_for_decoder(hidden)
# For now, we only support ``num_layers = 1``. The beam search logic
# requires handling tensors where the first dimension is the batch size.
# Implementing a larger number of layers would require messing with the
# dimensions, and we haven't put the effort in to do that.
message = f'Currently, only ``num_layers = 1`` is supported.'
if isinstance(hidden, tuple):
if hidden[0].size(0) != 1:
raise Exception(message)
# shape: (batch_size, encoder_hidden_size * num_directions)
hidden = hidden[0].squeeze(0), hidden[1].squeeze(0)
else:
if hidden.size(0) != 1:
raise Exception(message)
# shape: (batch_size, encoder_hidden_size * num_directions)
hidden = hidden.squeeze(0)
# Apply the bridge layer
if self.bridge is not None:
# shape: (batch_size, decoder_hidden_size)
hidden = self.bridge(hidden)
# Split the hidden state's tuple items for decoding purposes. The generic
# beam search code expects tensors as values in the state dictionary, so
# we can't use the default tuple-based implementation. This means we have
# to create a ``memory`` tensor even if it's not used (e.g., by a GRU) or
# else the reshaping logic of the decoding will fail. However, it will not
# be used.
if self.encoder.has_memory():
# shape: (batch_size, encoder_hidden_size * num_directions)
hidden, memory = hidden
else:
memory = hidden.new_zeros(hidden.size())
return encoder_outputs, document_mask, hidden, memory
def _run_teacher_forcing(self,
initial_decoding_state: Dict[str, torch.Tensor],
summary: Dict[str, torch.Tensor]) -> Dict[str, torch.Tensor]:
"""
Runs teacher forcing and computes the scores over the vocabulary for
every decoding timestep.
Parameters
----------
initial_decoding_state: ``Dict[str, torch.Tensor]``
The dictionary with the tensors used to initialize decoding.
summary: ``Dict[str, torch.Tensor]``
The summary tokens.
Returns
------
logits: ``torch.Tensor``, ``(batch_size, num_summary_tokens - 1, vocab_size)``
The unnormalized scores over the vocabulary for each time step.
targets: ``torch.Tensor``, ``(batch_size, num_summary_tokens - 1, vocab_size)``
The ground-truth target summary tokens that should be used to compute the loss.
"""
# Get the summary tokens from the dictionary
# shape: (batch_size, num_summary_tokens)
summary_tokens = summary['tokens']
# The tokens that we feed into the decoder are all but the last time
# step, which has the <eos> token.
# shape: (batch_size, num_summary_tokens - 1)
summary_input_tokens = summary_tokens[:, :-1]
# The target tokens are from the first time step onward
# shape: (batch_size, num_summary_tokens - 1)
summary_target_tokens = summary_tokens[:, 1:].contiguous()
# Pass the input tokens through the decoding step
# shape: (batch_size, num_summary_tokens - 1, summary_vocab_size)
logits, _ = self._decoder_step(summary_input_tokens, initial_decoding_state)
return logits, summary_target_tokens
def _decoder_step(self,
summary_tokens: torch.Tensor,
state: Dict[str, torch.Tensor],
token_mask: torch.Tensor = None) -> Tuple[torch.Tensor, Dict[str, torch.Tensor]]:
"""
Runs the decoder one step for every input token. This function implements
the interface for AllenNLP's generic beam search code. Instead of a ``batch_size``,
this function uses a ``group_size`` since the beam search will run in
parallel for each batch.
Parameters
----------
summary_tokens: ``torch.Tensor``, ``(group_size, num_summary_tokens)`` or ``(group_size,)
The tokens which should be input to the next step. The decoder will
run one time for each token time step. If there is only one dimension, the function
is being called during inference and will only run 1 decoder step.
state: ``Dict[str, torch.Tensor]``
The current decoder state.
token_mask: ``torch.Tensor``, (group_size, num_summary_tokens)
An optional mask to apply during | |
<gh_stars>1-10
import csv
import json
import math
import os
import h5py
import numpy as np
# import staintools
from matplotlib.colors import LinearSegmentedColormap
from numba import jit
from scipy import ndimage
import cv2
import random
from skimage.feature import peak_local_max
import skimage.segmentation
from skimage.morphology import watershed as ws, remove_small_objects
import matplotlib.pyplot as plt
def remove_small_objects_from_image(red_channel, min_size=100):
red_channel_copy = red_channel.copy()
red_channel_copy[red_channel > 0] = 1
red_channel_copy = red_channel_copy.astype(np.bool)
removed_red_channel = remove_small_objects(red_channel_copy, min_size=min_size).astype(np.uint8)
red_channel[removed_red_channel == 0] = 0
return red_channel
def read_BC_detection_mask(img_name, data_type='test'):
img_type = img_name.split('.')[-1]
base_dir = '/home/parmida/Downloads/BCData'
annotations_dir = os.path.join(base_dir, 'annotations')
images_dir = os.path.join(base_dir, 'images')
negative_dir = os.path.join(annotations_dir, data_type, 'negative')
positive_dir = os.path.join(annotations_dir, data_type, 'positive')
images_dir = os.path.join(images_dir, data_type)
print(os.path.join(negative_dir, img_name.replace('.png', '.h5')))
gt_file_negative = h5py.File(os.path.join(negative_dir, img_name.replace('.' + img_type, '.h5')))
coordinates_negative = np.asarray(gt_file_negative['coordinates'])
gt_file_positive = h5py.File(os.path.join(positive_dir, img_name.replace('.' + img_type, '.h5')))
coordinates_positive = np.asarray(gt_file_positive['coordinates'])
positive_mask = np.zeros((640, 640), dtype=np.uint8)
negative_mask = np.zeros((640, 640), dtype=np.uint8)
for coord in coordinates_positive:
positive_mask[coord[1], coord[0]] = 255
for coord in coordinates_negative:
negative_mask[coord[1], coord[0]] = 255
return positive_mask, negative_mask
def read_BC_detection_point(img_name, data_type='test'):
img_type = img_name.split('.')[-1]
base_dir = '/home/parmida/Downloads/BCData'
annotations_dir = os.path.join(base_dir, 'annotations')
images_dir = os.path.join(base_dir, 'images')
negative_dir = os.path.join(annotations_dir, data_type, 'negative')
positive_dir = os.path.join(annotations_dir, data_type, 'positive')
images_dir = os.path.join(images_dir, data_type)
print(os.path.join(negative_dir, img_name.replace('.png', '.h5')))
gt_file_negative = h5py.File(os.path.join(negative_dir, img_name.replace('.' + img_type, '.h5')))
coordinates_negative = np.asarray(gt_file_negative['coordinates'])
gt_file_positive = h5py.File(os.path.join(positive_dir, img_name.replace('.' + img_type, '.h5')))
coordinates_positive = np.asarray(gt_file_positive['coordinates'])
return coordinates_positive, coordinates_negative
def compute_TP_FP_of_each_class(image, marked_class):
labeled, nr_objects = ndimage.label(image > 0)
TP = 0
FP = 0
for c in range(1, nr_objects):
component = np.zeros_like(image)
component[labeled == c] = image[labeled == c]
component = cv2.morphologyEx(component, cv2.MORPH_DILATE, kernel=np.ones((5, 5)), iterations=1)
TP, FP = compute_component_TP_FP(component, marked_class, TP, FP)
return TP, FP
@jit(nopython=True)
def compute_component_TP_FP(component, marked_class, TP, FP):
indices = np.nonzero(component)
cell_flag = False
for i in range(len(indices[0])):
if marked_class[indices[0][i], indices[1][i]] > 0:
TP += 1
cell_flag = True
if not cell_flag:
FP += 1
return TP, FP
def compute_precision_recall_f1(TP, FP, FN):
precision = TP / (TP + FP) if (TP + FP) > 0 else 1
recall = TP / (TP + FN) if (TP + FN) > 0 else 1
F1 = 2 * precision * recall / (precision + recall) if (precision + recall) > 0 else 0
return precision, recall, F1
def mark_Shiraz_image_with_markers(image, immunopositive, immunonegative, immunoTIL):
marked_image = image.copy()
positive = cv2.morphologyEx(immunopositive, cv2.MORPH_DILATE, kernel=np.ones((5,5)))
negative = cv2.morphologyEx(immunonegative, cv2.MORPH_DILATE, kernel=np.ones((5,5)))
TIL = cv2.morphologyEx(immunoTIL, cv2.MORPH_DILATE, kernel=np.ones((5,5)))
marked_image[positive > 0] = (0,0,255)
marked_image[negative > 0] = (255,0,0)
marked_image[TIL > 0] = (0,255,0)
return marked_image
def read_NuClick_mask(img_name, dir_type='Train'):
# image_dir = '/home/parmida/Pathology/IHC_Nuclick/images/Train'
mask_dir = '/home/parmida/Pathology/IHC_Nuclick/masks/' + dir_type
mask = np.load(os.path.join(mask_dir, img_name.replace('.png', '.npy')))
labeled_mask = np.zeros((mask.shape[0], mask.shape[1], 3), dtype=np.uint8)
final_mask = np.zeros((mask.shape[0], mask.shape[1], 3), dtype=np.uint8)
labels_no = np.max(mask) + 1
color_dict = {}
color_dict[0] = (0, 0, 0)
for i in range(1, labels_no):
color_dict[i] = (random.randint(0, 255), random.randint(0, 255), random.randint(0, 255))
for i in range(mask.shape[0]):
for j in range(mask.shape[1]):
labeled_mask[i, j] = color_dict[mask[i, j]]
final_mask[i, j] = (0, 0, 0)
boundaries = cv2.Canny(labeled_mask, 100, 200)
# boundaries = cv2.dilate(boundaries, kernel=np.ones((3, 3), np.uint8))
labeled_mask_bw = cv2.cvtColor(labeled_mask, cv2.COLOR_RGB2GRAY)
final_mask[labeled_mask_bw > 0] = (0, 0, 255)
# final_mask[labeled_mask_bw == 0] = (0, 0, 255)
# final_mask[boundaries > 0] = (255, 255, 255)
contours, hierarchy = cv2.findContours(boundaries,
cv2.RETR_TREE, cv2.CHAIN_APPROX_NONE)
cv2.drawContours(final_mask, contours, -1, (255, 255, 255), 2)
# cv2.imshow('labeled_mask', labeled_mask)
# cv2.imshow('boundaries', boundaries)
# cv2.imshow('final', final_mask)
# cv2.waitKey(0)
boundaries[boundaries > 0] = 255
return final_mask
def get_detection_points(seg_img):
seg_img = cv2.resize(seg_img, (640, 640))
det_img = np.zeros_like(seg_img)
thresh = 50
det_img[np.logical_and(seg_img[:, :, 2] > thresh, seg_img[:, :, 2] > seg_img[:, :, 0] + 50)] = (0, 0, 255)
det_img[np.logical_and(seg_img[:, :, 0] > thresh, seg_img[:, :, 0] >= seg_img[:, :, 2])] = (255, 0, 0)
det_img[seg_img[:, :, 1] > thresh] = 0
det_img[:, :, 0] = remove_small_objects_from_image(det_img[:, :, 0], 80)
det_img[:, :, 2] = remove_small_objects_from_image(det_img[:, :, 2], 80)
det_img[:, :, 0] = ndimage.binary_fill_holes(det_img[:, :, 0]).astype(np.uint8) * 255
det_img[:, :, 2] = ndimage.binary_fill_holes(det_img[:, :, 2]).astype(np.uint8) * 255
# det_img[:, :, 0] = cv2.morphologyEx(det_img[:, :, 0], cv2.MORPH_ERODE, kernel=np.ones((3, 3)), iterations=2)
# det_img[:, :, 2] = cv2.morphologyEx(det_img[:, :, 2], cv2.MORPH_ERODE, kernel=np.ones((3, 3)), iterations=2)
# cv2.imshow('det_img', det_img)
det_img = np.squeeze(det_img).astype(np.uint8)
cells = watershed(det_img)
final_cells = []
positive_points = []
negative_points = []
seen = np.zeros((seg_img.shape[0], seg_img.shape[1]), dtype=np.uint8)
for i in range(len(cells)):
p1 = cells[i]
x1, y1, c1 = int(p1[1]), int(p1[0]), int(p1[2])
flag = False
seen[x1][y1] = 1
for j in range(len(cells)):
p2 = cells[j]
x2, y2, c2 = int(p2[1]), int(p2[0]), int(p2[2])
if seen[x2][y2] == 0:
if abs(x1 - x2) < 20 and abs(y1 - y2) < 20:
flag = True
# new_cell = int((x1 + x2) / 2), int((y1 + y2) / 2), int((c1 + c2)/2)
# final_cells.append(new_cell)
if not flag:
final_cells.append(p1)
if c1 == 2:
positive_points.append((x1, y1))
elif c1 == 0:
negative_points.append((x1, y1))
return final_cells, positive_points, negative_points
def detect_circles(component, output):
gray_blurred = cv2.blur(component, (3, 3))
# Apply Hough transform on the blurred image.
detected_circles = cv2.HoughCircles(gray_blurred,
cv2.HOUGH_GRADIENT, 1, 20, param1=100,
param2=20, minRadius=1, maxRadius=40)
# circles = cv2.HoughCircles(component, cv2.HOUGH_GRADIENT, 1, 10)
# ensure at least some circles were found
if detected_circles is not None:
# Convert the circle parameters a, b and r to integers.
detected_circles = np.uint16(np.around(detected_circles))
for pt in detected_circles[0, :]:
a, b, r = pt[0], pt[1], pt[2]
# Draw the circumference of the circle.
cv2.circle(output, (a, b), r, (0, 255, 0), 2)
# Draw a small circle (of radius 1) to show the center.
# cv2.circle(output, (a, b), 1, (0, 0, 255), 3)
# cv2.imshow("Detected Circle", output)
# cv2.waitKey(0)
# cv2.imshow('component', component)
def watershed(pred):
cells=[]
for ch in range(3):
gray=pred[:,:,ch]
D = ndimage.distance_transform_edt(gray)
localMax = peak_local_max(D, indices=False, min_distance=10,exclude_border=False,labels=gray)
markers = ndimage.label(localMax, structure=np.ones((3, 3)))[0]
labels = ws(-D, markers, mask=gray)
for label in np.unique(labels):
if label == 0:
continue
mask = np.zeros(gray.shape, dtype="uint8")
mask[labels == label] = 255
cnts = cv2.findContours(mask.copy(), cv2.RETR_EXTERNAL,cv2.CHAIN_APPROX_SIMPLE)[-2]
c = max(cnts, key=cv2.contourArea)
((x, y), _) = cv2.minEnclosingCircle(c)
cells.append([x,y,ch])
return np.array(cells)
def read_PathoNet_data(img_addr):
print(img_addr)
points = np.loadtxt(img_addr.replace('.jpg', '_points.txt'))
image = cv2.imread(img_addr)
# positive_mask = np.zeros((640, 640), dtype=np.uint8)
# negative_mask = np.zeros((640, 640), dtype=np.uint8)
positive_points = []
negative_points = []
for p in points:
if int(p[2]) == 1:
image[int(p[1]), int(p[0])] = (255, 0, 255)
negative_points.append((int(p[1]), int(p[0])))
else:
image[int(p[1]), int(p[0])] = (0, 255, 255)
positive_points.append((int(p[1]), int(p[0])))
# cv2.imshow('image', image)
# cv2.waitKey(0)
return positive_points, negative_points
def crop_modalities(input_dir, img_name, img_types, location, size, output_dir):
for img_type in img_types:
image = cv2.imread(os.path.join(input_dir, img_name + img_type + '.png'))
crop = image[location[0]:location[0] + size[0], location[1]: location[1] + size[1]]
cv2.imwrite(os.path.join(output_dir, 'MYC_' + img_type + '.png'), crop)
def read_mask_rcnn_segmentation_masks(input_dir, image_size):
images = os.listdir(input_dir)
masks = {}
for img in images:
if '.png' in img and len(img.split('_')) > 5:
print(img)
splitted = img.split('_')
image_name = ''
for i in range(0, len(splitted) - 3):
image_name += splitted[i] + '_'
image_name += splitted[-3]
cell_type = 'positive' if splitted[-2] == 1 else 'negative'
image = cv2.imread(os.path.join(input_dir, img))
image = cv2.resize(image, (image_size, image_size))
image_bw = np.zeros((image.shape[0], image.shape[1]), dtype=np.uint8)
image_bw[image[:,:,0] > 250] = 1
image_bw[image[:,:,1] > 250] = 1
image_bw[image[:,:,2] > 250] = 1
if image_name not in masks.keys():
masks[image_name] = {'positive': np.zeros((image.shape[0], image.shape[1]), dtype=np.uint8), 'negative': np.zeros((image.shape[0], image.shape[1]), dtype=np.uint8), 'binary': np.zeros((image.shape[0], image.shape[1]), dtype=np.uint8)}
masks[image_name][cell_type][image_bw > 0] = 1
masks[image_name]['binary'][image_bw > 0] = 1
return masks
def read_mask_rcnn_detection_masks(input_dir, image_size):
images = os.listdir(input_dir)
masks = {}
for img in images:
if '_' in img and '.png' in img:
splitted = img.split('_')
image_name = ''
for i in range(0, len(splitted) - 3):
image_name += splitted[i] + '_'
image_name += splitted[-3]
cell_type = 'positive' if splitted[-2] == '1' else 'negative'
image = cv2.imread(os.path.join(input_dir, img))
image = cv2.resize(image, (image_size, image_size))
image_bw = np.zeros((image.shape[0], image.shape[1]), dtype=np.uint8)
image_bw[image[:,:,0] > 250] = 1
image_bw[image[:,:,1] > 250] = 1
image_bw[image[:,:,2] > 250] = 1
points = np.nonzero(image_bw)
x = points[0]
y = points[1]
bounding_box = [np.min(x), np.min(y), np.max(x), np.max(y)]
center = (int((bounding_box[0] + bounding_box[2]) / 2), int((bounding_box[1] + bounding_box[3]) / 2))
if image_name not in masks.keys():
masks[image_name] = {'positive': [], 'negative': [], 'binary': []}
masks[image_name][cell_type].append(center)
masks[image_name]['binary'].append(center)
return masks
def read_unetplusplus_unet(input_npy, npy_path):
# results = np.load(input_dir)
imgs = np.load(input_npy)
with open(npy_path + 'dict_name.txt', 'r') as f:
names = json.load(f)
res_path = os.path.dirname(input_npy)
for i in range(imgs.shape[0]):
img = imgs[i]
print(img.shape)
plt.imsave(os.path.join(res_path, names[str(i)] + '.png'), (img * 255).astype(np.uint8))
def | |
from medium.powlib import pluralize
import datetime
from cerberus import Validator
import xmltodict
import simplejson as json
import datetime, decimal
from medium.config import myapp
from medium.powlib import merge_two_dicts
from medium.encoders import pow_json_serializer
from medium.decoders import pow_json_deserializer
import medium.config as cfg
from medium.decoders import pow_init_from_dict_deserializer
class ModelObject():
"""
The BaseClass for all PoW Model Classes
"""
# if you need a basic schema in the class override this (see tinyDB BaseModel)
basic_schema={}
observers_initialized = False
observers = []
autocommit = True
def init_on_load(self, *args, **kwargs):
"""
should be called from instances or BaseModels __init__
will be called by sqlalchemy automatically on model creation
"""
self.tablename = pluralize(self.__class__.__name__.lower())
self.setup_instance_schema()
if "format" in kwargs:
self.setup_from_format( args, kwargs)
def setup_dirty_model(self):
"""
Tracks changes in the instance relative to last save to DB
Rails: see: https://apidock.com/rails/ActiveRecord/Dirty
"""
# will hold last value before change + a flag if this attribute is dirty.
self.dirty = {}
self.is_dirty = False
def rollback_dirty(self, name=None):
"""
Rolls back the changes made to the object since last save operation to DB
see: https://apidock.com/rails/ActiveRecord/Dirty
This is NOT a DB rollback.
Look at session.rollback() Bfor SQL or accordinug for mongoDB > 4 or other transation capable DBs
"""
if self.is_dirty:
if name in self.dirty:
# only rollback attribute changes for name
try:
setattr(self, name, self.dirty[name]["value"])
self.dirty.pop(name, None)
# check if still elements in dirty
if not self.dirty:
self.is_dirty = False
except Exception as e:
print("ERROR Dirty rollback : {}".format(str(e)))
# else: rollback all changes
for elem in self.dirty:
try:
setattr(self, elem, self.dirty[elem]["value"])
except Exception as e:
print("ERROR Dirty rollback : {}".format(str(e)))
def was(self,name):
"""
returns the value that attribute name had before the last save to DB operation (dirty object)
see: https://apidock.com/rails/ActiveRecord/Dirty
"""
try:
return self.dirty[name]["value"]
except Exception as e:
raise e
def changed(self,name):
"""
returns the value and changed value that attribute name had before the last save to
DB operation (dirty object)
see: https://apidock.com/rails/ActiveRecord/Dirty
"""
try:
return [self.dirty[name]["value"], getattr(self,name)]
except Exception as e:
raise e
def __setattr__(self, name, value):
#print("trying to set attribute: {} -> to {}".format(str(name), str(value)))
#
# try to convert the value to the schema type
#
d={}
d[name]=value
d=pow_init_from_dict_deserializer(d, self.schema, simple_conversion=myapp["simple_conversion"])
# check if dirty mark has to be set.
if name in self.schema:
try:
current_value = getattr(self, name)
if value != current_value and not name in self.dirty:
#has changed, so save the old val and mark as dirty:
self.dirty[name] = { "value" : current_value, "dirty" : True }
self.is_dirty = True
except:
pass
# set the value
super().__setattr__(name, d[name])
#
# These Methods can normally be inherited
#
def setup_instance_schema(self):
"""
if there is a schema (cerberus) set it in the instance
"""
if "schema" in self.__class__.__dict__:
print(" .. found a schema for: " +str(self.__class__.__name__) + " in class dict")
self.schema = merge_two_dicts(
self.__class__.__dict__["schema"],
basic_schema)
print(" .. Schema is now: " + str(self.schema))
def setup_instance_values(self):
""" fills the instance with defined default values"""
for key in self.schema.keys():
if self.schema[key].get("default", None) != None:
setattr(self,key,self.schema[key].get("default"))
self.schema[key].pop("default", None)
else:
#print("no default for: " + str(self.schema[key]))
#print("trying: " + str(cfg.database["default_values"][self.schema[key]["type"]]))
try:
#print("trying: " + config.database["default_values"][self.schema[key]["type"]])
setattr(self,key,cfg.database["default_values"][self.schema[key]["type"]])
except Exception as e:
print(str(e))
setattr(self, key, None)
def setup_from_format(self, *args, **kwargs):
"""
setup values from kwargs or from init_from_<format> if format="someformat"
example: m = Model( data = { 'test' : 1 }, format="json")
will call m.init_from_json(data)
"""
if "format" in kwargs:
# set the format and call the according init_from_<format> method
# which initializes the instance with the given vaules (from data)
# e.g. Model(format=json, data={data})
f = getattr(self, "init_from_" + kwargs["format"], None)
if f:
f(kwargs)
else:
# initializes the instanmce with the given kwargs values:
# e.g.: Model(test="sometext", title="sometitle")
for key in kwargs.keys():
if key in self.schema:
setattr(self, key, kwargs[key])
def init_observers(self):
#
# Try to find Observers.
#
if self.__class__.observers_initialized:
return
obs = getattr(self,"observers", False)
if obs:
# try to load the classes and fire their action on the corresponding model actions.
# rails: (remark: obervers are a separate module since 3.2)
# https://api.rubyonrails.org/v3.2.13/classes/ActiveRecord/Callbacks.html
# https://api.rubyonrails.org/v3.2.13/classes/ActiveRecord/Observer.html#method-i-define_callbacks
# pow:
# before & after: save, create, commit, validation, delete.
pass
from pydoc import locate
print("trying to find possible observer in {}".format(
str(self.__class__.__module__)+"_observer."+ str(self.__class__.__name__)+ "Observer"
)
)
try:
obs = locate(str(self.__class__.__module__) +"_observer." + str(self.__class__.__name__) + "Observer")
o=obs()
print(" ... Found: {}".format(str(o.__class__)))
self.__class__.observers_initialized = True
self.__class__.observers.append(o)
except Exception as e:
self.__class__.observers_initialized = True
#print (" ... Found None: {}".format(str(e) ))
def api(self):
""" just for conveniance """
return self.show_api()
def show_api(self):
"""
prints the "external API of the class.
No under or dunder methods
And methods only.
Uses inspect module.
"""
import inspect
print(50*"-")
print(" external API for " + self.__class__.__name__)
print(50*"-")
for elem in inspect.getmembers(self, predicate=inspect.ismethod):
meth = elem[0]
if not meth.startswith("_"):
print("{0:30s}".format(" .. " +str.strip(meth)), end="")
# print(" method: " + str.strip(meth) , end="")
func=getattr(self,meth)
if func:
if func.__doc__:
print( " --> " + str.strip(func.__doc__[0:50]))
else:
#print( " No docstring ")
print()
else:
print()
def validate(self):
"""
checks the instance against a schema.
validatees the current values
"""
if getattr(self,"schema", False):
# if instance has a schema. (also see init_on_load)
#v = cerberus.Validator(self.schema)
v = Validator(self.schema)
if self.observers_initialized:
for observer in self.observers:
try:
ret = observer.before_validate(self, v)
except:
pass
res = v.validate(self.to_dict(lazy=False))
if self.observers_initialized:
for observer in self.observers:
try:
ret = observer.after_validate(self, res)
except:
pass
if v.validate(self.to_dict(lazy=False)):
return (True, None)
else:
return (False,v)
def init_from_dict(self, d, ignore=True, simple_conversion=False):
"""
creates a Model from the given data dictionary
simple_conversion = True tries to use simple logic to create
a little bit more advanced python data types.
for example "a b c" will be model.attribute = "a b c".split(myapp["list_separator"])
Mainly used for handling request from simple html form scaffolding
"""
from medium.decoders import pow_init_from_dict_deserializer
#print("init from dict")
#print(d)
d=pow_init_from_dict_deserializer(d,self.schema, simple_conversion)
#print("after conversion: ")
#for elem in d:
# print(str(elem) + "->" + str(type(elem)))
for key in d:
if ignore:
setattr(self, key, d[key])
else:
if key in self.schema:
setattr(self, key, d[key])
else:
raise Exception(" Key: " + str(key) + " is not in schema for: " + self.__class__.__name__)
def init_from_xml(self, data, root="root", ignore=True):
"""
makes a py dict from input xml and
sets the instance attributes
root defines the xml root node
"""
d=xmltodict.parse(data)
d=d[root]
for key in d:
#print("key: " + key + " : " + str(d[key]) )
if isinstance(d[key],dict):
print(d[key])
for elem in d[key]:
if elem.startswith("#"):
if key in self.__class__.__dict__:
setattr(self, key, d[key][elem])
else:
#if key in self.__class__.__dict__:
if ignore:
setattr(self, key, d[key])
else:
if key in self.schema:
setattr(self, key, d[key])
else:
raise Exception(" Key: " + str(key) + " is not in schema for: " + self.__class__.__name__)
def init_from_json_file(self, json_file=None, ignore=True, simple_conversion=False):
"""
returns a generator that yields models instances per row
of the json file.
"""
with open(json_file) as f:
data = json.load(f)
for d in data:
m = self.__class__()
m.init_from_dict(d, ignore, simple_conversion=simple_conversion)
yield m
def init_from_json(self, data, ignore=True, simple_conversion=False):
"""
makes a py dict from input json and
sets the instance attributes
sets the attributes on self if len(data) == 1
returns a generator if len(data)>1
"""
d=json.loads(data,object_hook=pow_json_deserializer)
return self.init_from_dict(d, ignore, simple_conversion=simple_conversion)
#else:
# for d in data:
# m = self.__class__()
# m.init_from_dict(d, ignore, simple_conversion=simple_conversion)
# yield m
# def init_from_json(self, data, ignore=True, simple_conversion=False):
# """
# makes a py dict from input json and
# sets the instance attributes
# """
# d=json.loads(data,object_hook=pow_json_deserializer)
# return self.init_from_dict(d, ignore, simple_conversion=simple_conversion)
def init_from_csv_file(self, csv_file=None, newline='', ignore=True):
"""
inits instances of this model from the given csv
returns a generator that yields models instances per row
of the csv file.
"""
import csv
with open(csv_file, newline=newline) as csvfile:
reader = csv.DictReader(csvfile)
for row in reader:
#print(row)
m = self.__class__()
for key,value in row.items():
if ignore:
setattr(m, key, value)
else:
if key in self.schema:
setattr(m, key, value)
else:
| |
<reponame>mike-n-7/tsr<gh_stars>10-100
#!/usr/bin/env python
# Copyright (c) 2013, Carnegie Mellon University
# All rights reserved.
# Authors: <NAME> <<EMAIL>>
#
# Redistribution and use in source and binary forms, with or without
# modification, are permitted provided that the following conditions are met:
#
# - Redistributions of source code must retain the above copyright notice, this
# list of conditions and the following disclaimer.
# - Redistributions in binary form must reproduce the above copyright notice,
# this list of conditions and the following disclaimer in the documentation
# and/or other materials provided with the distribution.
# - Neither the name of Carnegie Mellon University nor the names of its
# contributors may be used to endorse or promote products derived from this
# software without specific prior written permission.
#
# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS"
# AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
# IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE
# ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE
# LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR
# CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF
# SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS
# INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN
# CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE)
# ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE
# POSSIBILITY OF SUCH DAMAGE.
#!/usr/bin/env python
# -*- coding: utf-8 -*-
## @package libherb.kin Helper functions for creating and converting transforms and rotations and all their representations.
import numpy
# Python implementation of functions from libcd
# quat = numpy.array([qx,qy,qz,qw]) # 4 element quaternion list with qw last
# H = numpy.eye(4) #4x4 transformation matrix
# R = numpy.eye(3) #3x3 rotation matrix
# pose = [tx,ty,tz, qx,qy,qz,qw] # 7 element pose list with 3 element translation first followed by 4 element quaternion with qw last
# Ideas:
# TODO: rewrite the quat functions to match the OpenRAVE quat format ([qw,qx,qy,qz]).
# TODO: rewrite the pose functions to match the OpenRAVE pose format ([qw,qx,qy,qz,tx,ty,tz]).
def pose_normalize(pose):
nm = numpy.linalg.norm(pose[3:7])
pose[3:7] /= nm
def R_to_quat(R):
q = numpy.zeros(4)
t = 1 + R[0,0] + R[1,1] + R[2,2]
if R[0,0] > R[1,1] and R[0,0] > R[2,2]:
imax = 0
elif R[1,1] > R[2,2]:
imax = 1
else:
imax = 2
if t > 0.000001:
r = numpy.sqrt(t)
s = 0.5 / r
q[0] = (R[2,1]-R[1,2])*s # x
q[1] = (R[0,2]-R[2,0])*s # y
q[2] = (R[1,0]-R[0,1])*s # z
q[3] = 0.5 * r # w
elif imax == 0: # Rxx largest
r = numpy.sqrt(1 + R[0,0] - R[1,1] - R[2,2])
s = 0.5 / r
q[0] = 0.5 * r # x
q[1] = (R[0,1]+R[1,0])*s # y
q[2] = (R[0,2]+R[2,0])*s # z
q[3] = (R[2,1]-R[1,2])*s # w
elif imax == 1: # Ryy largest
r = numpy.sqrt(1 - R[0,0] + R[1,1] - R[2,2])
s = 0.5 / r
q[0] = (R[1,0]+R[0,1])*s # x
q[1] = 0.5 * r # y
q[2] = (R[1,2]+R[2,1])*s # z ???
q[3] = (R[0,2]-R[2,0])*s # w
else: # Rzz largest
r = numpy.sqrt(1 - R[0,0] - R[1,1] + R[2,2])
s = 0.5 / r
q[0] = (R[2,0]+R[0,2])*s # x
q[1] = (R[2,1]+R[1,2])*s # y
q[2] = 0.5 * r # z
q[3] = (R[1,0]-R[0,1])*s # w
return q
def R_from_quat(quat):
R = numpy.zeros((3,3))
xx = quat[0] * quat[0]
xy = quat[0] * quat[1]
xz = quat[0] * quat[2]
xw = quat[0] * quat[3]
yy = quat[1] * quat[1]
yz = quat[1] * quat[2]
yw = quat[1] * quat[3]
zz = quat[2] * quat[2]
zw = quat[2] * quat[3]
R[0,0] = 1 - 2 * (yy + zz)
R[0,1] = 2 * (xy - zw)
R[0,2] = 2 * (xz + yw)
R[1,0] = 2 * (xy + zw)
R[1,1] = 1 - 2 * (xx + zz)
R[1,2] = 2 * (yz - xw)
R[2,0] = 2 * (xz - yw)
R[2,1] = 2 * (yz + xw)
R[2,2] = 1 - 2 * (xx + yy)
return R
def pose_to_H(pose):
H = numpy.eye(4)
H[0:3,0:3] = R_from_quat(pose[3:7])
H[0:3,3] = pose[0:3]
return H
def pose_from_H(H):
pose = numpy.zeros(7)
pose[0:3] = H[0:3,3]
pose[3:7] = R_to_quat(H[0:3,0:3])
return pose
def quat_to_ypr(quat):
ypr = numpy.zeros(3)
qx = quat[0]
qy = quat[1]
qz = quat[2]
qw = quat[3]
sinp2 = qw*qy-qz*qx
if sinp2 > 0.49999:
ypr[0] = -2.0*numpy.arctan2(qx,qw)
ypr[1] = 0.5*numpy.pi
ypr[2] = 0.0
elif sinp2 < -0.49999:
ypr[0] = 2.0*numpy.arctan2(qx,qw)
ypr[1] = -0.5*numpy.pi
ypr[2] = 0.0
else:
ypr[0] = numpy.arctan2(2*(qw*qz+qx*qy), 1-2*(qy*qy+qz*qz))
ypr[1] = numpy.arcsin(2*sinp2)
ypr[2] = numpy.arctan2(2*(qw*qx+qy*qz), 1-2*(qx*qx+qy*qy))
return ypr
def quat_from_ypr(ypr):
quat = numpy.zeros(4)
cy2 = numpy.cos(0.5*ypr[0])
sy2 = numpy.sin(0.5*ypr[0])
cp2 = numpy.cos(0.5*ypr[1])
sp2 = numpy.sin(0.5*ypr[1])
cr2 = numpy.cos(0.5*ypr[2])
sr2 = numpy.sin(0.5*ypr[2])
quat[0] = -sy2*sp2*cr2 + cy2*cp2*sr2 # qx
quat[1] = cy2*sp2*cr2 + sy2*cp2*sr2 # qy
quat[2] = -cy2*sp2*sr2 + sy2*cp2*cr2 # qz
quat[3] = sy2*sp2*sr2 + cy2*cp2*cr2 # qw
return quat
def pose_from_xyzypr(xyzypr):
pose = numpy.zeros(7)
cy2 = numpy.cos(0.5*xyzypr[3])
sy2 = numpy.sin(0.5*xyzypr[3])
cp2 = numpy.cos(0.5*xyzypr[4])
sp2 = numpy.sin(0.5*xyzypr[4])
cr2 = numpy.cos(0.5*xyzypr[5])
sr2 = numpy.sin(0.5*xyzypr[5])
pose[0] = xyzypr[0]
pose[1] = xyzypr[1]
pose[2] = xyzypr[2]
pose[3] = -sy2*sp2*cr2 + cy2*cp2*sr2 # qx
pose[4] = cy2*sp2*cr2 + sy2*cp2*sr2 # qy
pose[5] = -cy2*sp2*sr2 + sy2*cp2*cr2 # qz
pose[6] = sy2*sp2*sr2 + cy2*cp2*cr2 # qw
return pose
def pose_to_xyzypr(pose):
xyzypr = numpy.zeros(6)
xyzypr[0] = pose[0]
xyzypr[1] = pose[1]
xyzypr[2] = pose[2]
qx = pose[3]
qy = pose[4]
qz = pose[5]
qw = pose[6]
sinp2 = qw*qy-qz*qx
if sinp2 > 0.49999:
xyzypr[3] = -2.0*numpy.arctan2(qx,qw)
xyzypr[4] = 0.5*numpy.pi
xyzypr[5] = 0.0
elif sinp2 < -0.49999:
xyzypr[3] = 2.0*numpy.arctan2(qx,qw)
xyzypr[4] = -0.5*numpy.pi
xyzypr[5] = 0.0
else:
xyzypr[3] = numpy.arctan2(2*(qw*qz+qx*qy), 1-2*(qy*qy+qz*qz))
xyzypr[4] = numpy.arcsin(2*sinp2)
xyzypr[5] = numpy.arctan2(2*(qw*qx+qy*qz), 1-2*(qx*qx+qy*qy))
return xyzypr
def H_from_op_diff(pos_from, pos_to_diff):
'''
Produce a transform H rooted at location pos_from
with Z axis pointed in direction pos_to_diff
Taken from libcds kin.c
2011-08-01 cdellin
'''
H = numpy.eye(4)
# Set d
H[0,3] = pos_from[0]
H[1,3] = pos_from[1]
H[2,3] = pos_from[2]
# Define Z axis in direction of arrow */
zlen = numpy.sqrt(numpy.dot(pos_to_diff,pos_to_diff))
H[0,2] = pos_to_diff[0]/zlen
H[1,2] = pos_to_diff[1]/zlen
H[2,2] = pos_to_diff[2]/zlen
# Define other axes
if abs(H[0,2]) > 0.9:
# Z is too close to e1, but sufficiently far from e2
# cross e2 with Z to get X (and normalize)
vlen = numpy.sqrt(H[2,2]*H[2,2] + H[0,2]*H[0,2])
H[0][0] = H[2,2] / vlen
H[1][0] = 0.0
H[2][0] = -H[0,2] / vlen
# Then Y = Z x X
H[0,1] = H[1,2] * H[2,0] - H[2,2] * H[1,0]
H[1,1] = H[2,2] * H[0,0] - H[0,2] * H[2,0]
H[2,1] = H[0,2] * H[1,0] - H[1,2] * H[0,0]
else:
# Z is sufficiently far from e1;
# cross Z with e1 to get Y (and normalize)
vlen = numpy.sqrt(H[2,2]*H[2,2] + H[1,2]*H[1,2])
H[0,1] = 0.0
H[1,1] = H[2,2] / vlen
H[2,1] = -H[1,2] / vlen
# Then X = Y x Z
H[0,0] = H[1,1] * H[2,2] - H[2,1] * H[1,2]
H[1,0] = H[2,1] * H[0,2] - H[0,1] * H[2,2]
H[2,0] = H[0,1] * H[1,2] - H[1,1] * H[0,2]
return H
def invert_H(H):
'''
Invert transform H
'''
R = H[0:3,0:3]
d = H[0:3,3]
Hinv = numpy.eye(4)
Hinv[0:3,0:3] = R.T
Hinv[0:3,3] = -numpy.dot(R.T, d)
return Hinv
def xyzt_to_H(xyzt):
'''
Convert [x,y,z,theta] to 4x4 transform H
theta is rotation about z-axis
'''
ypr = [xyzt[3],0.0,0.0]
quat = quat_from_ypr(ypr)
pose = [xyzt[0],xyzt[1],xyzt[2],quat[0],quat[1],quat[2],quat[3]]
H = pose_to_H(pose)
return H
def xyzypr_to_H(xyzypr):
'''
Convert [x,y,z,yaw,pitch,roll] to 4x4 transform H
'''
quat = quat_from_ypr(xyzypr[3:6])
pose = [xyzypr[0],xyzypr[1],xyzypr[2],quat[0],quat[1],quat[2],quat[3]]
H = pose_to_H(pose)
return H
def quat_to_axisangle(quat):
a2 = numpy.arccos(quat[3]);
angle = 2.0*a2;
sina2inv = 1.0/numpy.sin(a2);
axis = numpy.zeros(3)
axis[0] = sina2inv * quat[0];
axis[1] = sina2inv * quat[1];
axis[2] = sina2inv * quat[2];
return (axis, angle)
def transform_comparison(H1, H2):
'''
Compare two 4x4 transforms H1 and H2.
Return the differnce in position and rotation.
'''
T_difference = numpy.dot( invert_H(H1), H2 )
quat_difference = R_to_quat(T_difference[0:3,0:3]) #[x,y,z,w]
rotation_difference = numpy.abs(2.0* numpy.arccos(quat_difference[3])) # 2*acos(qw)
position_difference = numpy.sqrt( numpy.dot( numpy.array(T_difference[0:3,3]), numpy.array(T_difference[0:3,3]) ) )
| |
= value
# Then update the entry
new_release_json[release_entry] = _stringify_config(new_version_config)
return new_release_json
##
## Main functions
##
def _update_release_json(ctx, release_json, release_entry, new_version: Version, github_token):
"""
Updates the provided release.json object by fetching compatible versions for all dependencies
of the provided Agent version, constructing the new entry, adding it to the release.json object
and returning it.
"""
allowed_major_versions = COMPATIBLE_MAJOR_VERSIONS[new_version.major]
# Part 1: repositories which follow the Agent version scheme
# For repositories which follow the Agent version scheme, we want to only get
# tags with the same minor version, to avoid problems when releasing a patch
# version while a minor version release is ongoing.
compatible_version_re = build_compatible_version_re(allowed_major_versions, new_version.minor)
# If the new version is a final version, set the check_for_rc flag to true to warn if a dependency's version
# is an RC.
check_for_rc = not new_version.is_rc()
integrations_version = _fetch_dependency_repo_version(
ctx, "integrations-core", new_version, allowed_major_versions, compatible_version_re, github_token, check_for_rc
)
omnibus_software_version = _fetch_dependency_repo_version(
ctx, "omnibus-software", new_version, allowed_major_versions, compatible_version_re, github_token, check_for_rc
)
omnibus_ruby_version = _fetch_dependency_repo_version(
ctx, "omnibus-ruby", new_version, allowed_major_versions, compatible_version_re, github_token, check_for_rc
)
macos_build_version = _fetch_dependency_repo_version(
ctx,
"datadog-agent-macos-build",
new_version,
allowed_major_versions,
compatible_version_re,
github_token,
check_for_rc,
)
# Part 2: repositories which have their own version scheme
jmxfetch_version = _fetch_independent_dependency_repo_version(
"jmxfetch", release_json, new_version.major, github_token, "JMXFETCH_VERSION"
)
security_agent_policies_version = _fetch_independent_dependency_repo_version(
"security-agent-policies", release_json, new_version.major, github_token, "SECURITY_AGENT_POLICIES_VERSION"
)
windows_ddnpm_driver, windows_ddnpm_version, windows_ddnpm_shasum = _get_windows_ddnpm_release_json_info(
release_json, new_version.major, is_first_rc=(new_version.rc == 1)
)
# Add new entry to the release.json object and return it
return _update_release_json_entry(
release_json,
release_entry,
integrations_version,
omnibus_software_version,
omnibus_ruby_version,
jmxfetch_version,
security_agent_policies_version,
macos_build_version,
windows_ddnpm_driver,
windows_ddnpm_version,
windows_ddnpm_shasum,
)
def update_release_json(ctx, github_token, new_version: Version):
"""
Updates the release entries in release.json to prepare the next RC or final build.
"""
release_json = _load_release_json()
release_entry = release_entry_for(new_version.major)
print(f"Updating {release_entry} for {new_version}")
# Update release.json object with the entry for the new version
release_json = _update_release_json(ctx, release_json, release_entry, new_version, github_token)
_save_release_json(release_json)
def check_version(agent_version):
"""Check Agent version to see if it is valid."""
version_re = re.compile(r'7[.](\d+)[.](\d+)(-rc\.(\d+))?')
if not version_re.match(agent_version):
raise Exit(message="Version should be of the form 7.Y.Z or 7.Y.Z-rc.t")
@task
def update_modules(ctx, agent_version, verify=True):
"""
Update internal dependencies between the different Agent modules.
* --verify checks for correctness on the Agent Version (on by default).
Examples:
inv -e release.update-modules 7.27.0
"""
if verify:
check_version(agent_version)
for module in DEFAULT_MODULES.values():
for dependency in module.dependencies:
dependency_mod = DEFAULT_MODULES[dependency]
ctx.run(f"go mod edit -require={dependency_mod.dependency_path(agent_version)} {module.go_mod_path()}")
@task
def tag_version(ctx, agent_version, commit="HEAD", verify=True, tag_modules=True, push=True, force=False):
"""
Create tags for a given Datadog Agent version.
The version should be given as an Agent 7 version.
* --commit COMMIT will tag COMMIT with the tags (default HEAD)
* --verify checks for correctness on the Agent version (on by default).
* --tag_modules tags Go modules in addition to the agent repository
* --push will push the tags to the origin remote (on by default).
* --force will allow the task to overwrite existing tags. Needed to move existing tags (off by default).
Examples:
inv -e release.tag-version 7.27.0 # Create tags and push them to origin
inv -e release.tag-version 7.27.0-rc.3 --no-push # Create tags locally; don't push them
inv -e release.tag-version 7.29.0-rc.3 --force # Create tags (overwriting existing tags with the same name), force-push them to origin
"""
if verify:
check_version(agent_version)
force_option = ""
if force:
print(color_message("--force option enabled. This will allow the task to overwrite existing tags.", "orange"))
result = yes_no_question("Please confirm the use of the --force option.", color="orange", default=False)
if result:
print("Continuing with the --force option.")
force_option = " --force"
else:
print("Continuing without the --force option.")
for module in DEFAULT_MODULES.values():
if (tag_modules or module.path == ".") and module.should_tag:
for tag in module.tag(agent_version):
ok = try_git_command(
ctx,
f"git tag -m {tag} {tag} {commit}{force_option}",
)
if not ok:
message = f"Could not create tag {tag}. Please rerun the task to retry creating the tags (you may need the --force option)"
raise Exit(color_message(message, "red"), code=1)
print(f"Created tag {tag}")
if push:
ctx.run(f"git push origin {tag}{force_option}")
print(f"Pushed tag {tag}")
print(f"Created all tags for version {agent_version}")
def current_version(ctx, major_version) -> Version:
return _create_version_from_match(VERSION_RE.search(get_version(ctx, major_version=major_version)))
def next_final_version(ctx, major_version) -> Version:
previous_version = current_version(ctx, major_version)
# Set the new version
if previous_version.is_devel():
# If the previous version was a devel version, use the same version without devel
# (should never happen during regular releases, we always do at least one RC)
return previous_version.non_devel_version()
return previous_version.next_version(rc=False)
def next_rc_version(ctx, major_version, patch_version=False) -> Version:
# Fetch previous version from the most recent tag on the branch
previous_version = current_version(ctx, major_version)
if previous_version.is_rc():
# We're already on an RC, only bump the RC version
new_version = previous_version.next_version(rc=True)
else:
if patch_version:
new_version = previous_version.next_version(bump_patch=True, rc=True)
else:
# Minor version bump, we're doing a standard release:
# - if the previous tag is a devel tag, use it without the devel tag
# - otherwise (should not happen during regular release cycles), bump the minor version
if previous_version.is_devel():
new_version = previous_version.non_devel_version()
new_version = new_version.next_version(rc=True)
else:
new_version = previous_version.next_version(bump_minor=True, rc=True)
return new_version
def check_base_branch(branch, release_version):
"""
Checks if the given branch is either the default branch or the release branch associated
with the given release version.
"""
return branch == DEFAULT_BRANCH or branch == release_version.branch()
def check_uncommitted_changes(ctx):
"""
Checks if there are uncommitted changes in the local git repository.
"""
modified_files = ctx.run("git --no-pager diff --name-only HEAD | wc -l", hide=True).stdout.strip()
# Return True if at least one file has uncommitted changes.
return modified_files != "0"
def check_local_branch(ctx, branch):
"""
Checks if the given branch exists locally
"""
matching_branch = ctx.run(f"git --no-pager branch --list {branch} | wc -l", hide=True).stdout.strip()
# Return True if a branch is returned by git branch --list
return matching_branch != "0"
def check_upstream_branch(github, branch):
"""
Checks if the given branch already exists in the upstream repository
"""
github_branch = github.get_branch(branch)
# Return True if the branch exists
return github_branch and github_branch.get('name', False)
def parse_major_versions(major_versions):
return sorted(int(x) for x in major_versions.split(","))
def try_git_command(ctx, git_command):
"""
Try a git command that should be retried (after user confirmation) if it fails.
Primarily useful for commands which can fail if commit signing fails: we don't want the
whole workflow to fail if that happens, we want to retry.
"""
do_retry = True
while do_retry:
res = ctx.run(git_command, warn=True)
if res.exited is None or res.exited > 0:
print(
color_message(
f"Failed to run \"{git_command}\" (did the commit/tag signing operation fail?)",
"orange",
)
)
do_retry = yes_no_question("Do you want to retry this operation?", color="orange", default=True)
continue
return True
return False
@task
def finish(ctx, major_versions="6,7"):
"""
Updates the release entry in the release.json file for the new version.
Updates internal module dependencies with the new version.
"""
if sys.version_info[0] < 3:
return Exit(message="Must use Python 3 for this task", code=1)
list_major_versions = parse_major_versions(major_versions)
print(f"Finishing release for major version(s) {list_major_versions}")
github_token = get_github_token()
for major_version in list_major_versions:
new_version = next_final_version(ctx, major_version)
update_release_json(ctx, github_token, new_version)
# Update internal module dependencies
update_modules(ctx, str(new_version))
@task(help={'upstream': "Remote repository name (default 'origin')"})
def create_rc(ctx, major_versions="6,7", patch_version=False, upstream="origin"):
"""
Updates the release entries in release.json to prepare the next RC build.
If the previous version of the Agent (determined as the latest tag on the
current branch) is not an RC:
- by default, updates the release entries for the next minor version of
the Agent.
- if --patch-version is specified, updates the release entries for the next
patch version of the Agent.
This changes which tags will be considered on the dependency repositories (only
tags that match the same major and minor version as the Agent).
If the previous version of the Agent was an RC, updates the release entries for RC + 1.
Examples:
If the latest tag on the branch is 7.31.0, and invoke release.create-rc --patch-version
is run, then the task will prepare the release entries for 7.31.1-rc.1, and therefore
will only use 7.31.X tags on the dependency repositories that follow the Agent version scheme.
If the latest tag on the branch is 7.32.0-devel or 7.31.0, and invoke release.create-rc
is run, then the task will prepare the release entries for 7.32.0-rc.1, and therefore
will only use 7.32.X tags on | |
<filename>app/main.py
import math
from tkinter import *
from tkinter import messagebox, filedialog
from tkinter.font import Font
from tkinter.ttk import Progressbar
import geopandas as gpd
import numpy as np
import shapefile
from descartes import PolygonPatch
from geopandas import GeoDataFrame, points_from_xy
from matplotlib.backends._backend_tk import NavigationToolbar2Tk
from matplotlib.backends.backend_tkagg import FigureCanvasTkAgg
from matplotlib.figure import Figure
import matplotlib.pyplot as plt
from scipy.spatial.qhull import Delaunay, ConvexHull
from sklearn.neighbors._ball_tree import BallTree
import pandas as pd
from shapely.geometry import Point
import geopy.distance
def about():
"""
Method that is called when the about_btn is clicked.
It displays basic information about the application.
"""
messagebox.showinfo(title='About the program',
message="This program is a geographic information system created by <NAME>.")
class GeographicInformationSystem:
"""
Main class of the application. It contains variables and methods that enables the functionalities:
1. Selecting and loading vector and raster files from the computer
2. Zooming in, zooming out, pan, zoom extent, saving the figure inside the canvas etc.
3. Selecting and loading CSV file that contains coordinates for Points
4. Delaunay's triangulation for the selected CSV file or the shapefile that contains only points or multipoints
5. Convex Hull Polygon generating for the selected CSV file or the shapefile that contains only points or multipoints
6. Nearest Neighbor search for the points inside of the CSV file or the shapefile for the entered coordinates
7. Measuring distance between points entered via mouse click, both on raster or the vector file.
"""
# Initialization of the main Tkinter window
root = Tk()
# Variables that store the links to the selected raster and vector file
raster_file = None
vector_file = None
# Variables used for checking if the raster file or shapefile is loaded onto the canvas
vector_loaded = False
raster_loaded = False
# Figures added to the FigureCanvasTk of the vector and raster Frame
fig1 = Figure(figsize=(6, 5), dpi=100)
fig2 = Figure(figsize=(6, 5), dpi=100)
# Variables that store figures to which the subplot was added
a = fig2.add_subplot(111)
ax = fig1.add_subplot(111)
# Variable that stores the uploaded shapefile
vector = None
# Variable that stores the link to the selected CSV file
csv_destination = ''
# Variables that store the number of clicks inside the canvases when the measuring distance has started
num_of_clicks_raster = 0
num_of_clicks_vector = 0
# Variables that store bindings of the click event to the raster_can and vector_can
connection_raster = None
connection_vector = None
# Variables that store the distance between two points selected on the raster_can or vector_can
distance_vec = distance_ras = 0
# Variables that store data of the click on the vector and raster canvas during the measurement between two points
point1_vec = point2_vec = point1_ras = point2_ras = None
# Font added to some widgets
helv12 = Font(family='Helvetica', size=12, weight='bold')
# Customization of the buttons inside the NavigationToolbar2Tk
NavigationToolbar2Tk.toolitems = (
('Home', 'Reset view', 'home', 'home'),
(None, None, None, None),
('Pan', 'Pan', 'move', 'pan'),
('Zoom', 'Zoom In/Out', 'zoom_to_rect', 'zoom'),
(None, None, None, None),
('Subplots', 'Adjust subplot', 'subplots', 'configure_subplots'),
('Save', 'Save figure', 'filesave', 'save_figure'),
)
def __init__(self):
"""
Initialization method that is called when the class is instantiated. It creates and adds all the necessary
widgets to the main (root) window.
"""
# Setting the title and the favicon for the application
self.root.title('Geographic information system')
self.root.iconbitmap(r".\resources\flaticon.ico")
# Setting up the size of the main window
self.root.geometry("1536x764+0+0")
# Making the main window not resizable
self.root.resizable(0, 0)
# Configuring the grid layout for the main window
self.root.columnconfigure(0, weight=4)
self.root.columnconfigure(1, weight=4)
self.root.rowconfigure(0, weight=1)
# Creating two frames inside the main window (one for the raster files, and the other for the vector files)
self.raster_side = Frame(self.root, bg='lightgoldenrod1')
self.vector_side = Frame(self.root, bg='lightgoldenrod1')
# Adding the two frames to the grid layout of the main window
self.raster_side.grid(row=0, column=0, sticky="nsew")
self.vector_side.grid(row=0, column=1, sticky="nsew")
# Adding the label that marks the Frame that is meant for the vector files
self.vector_lab = Label(self.vector_side, text='Shapefile', bg='lightgoldenrod1')
self.vector_lab.place(relx=0.1, rely=0.05)
# Adding the label that marks the Frame that is meant for the raster files
self.raster_lab = Label(self.raster_side, text='Raster', bg='lightgoldenrod1')
self.raster_lab.place(relx=0.1, rely=0.05)
# Creating and adding the button that enables the 'Load CSV' functionality
load_csv_image = PhotoImage(file=r'.\resources\load_csv.gif')
self.load_csv_btn = Button(self.raster_side, image=load_csv_image, command=self.load_csv_data)
self.load_csv_btn.place(relx=0.0, rely=0.0, width=32, height=32)
# Creating and adding the button that enables the 'Delaunay's Triangulation' functionality
delaunay_triangulation_image = PhotoImage(file=r'.\resources\delaunay_triangulation_icon.gif')
self.delaunay_triangulation_btn = Button(self.raster_side, image=delaunay_triangulation_image,
command=self.delaunay_triangulation)
self.delaunay_triangulation_btn.place(relx=0.045, rely=0.0, width=32, height=32)
# Creating and adding the button that enables the 'Convex Hull Polygon' functionality
polygon_image = PhotoImage(file=r'.\resources\polygon.gif')
self.convex_hull_polygon_btn = Button(self.raster_side, image=polygon_image,
command=self.convex_hull_polygon)
self.convex_hull_polygon_btn.place(relx=0.09, rely=0.0, width=32, height=32)
# Creating and adding the button that enables the 'Nearest neighbor search' functionality
nearest_neighbor_search_image = PhotoImage(file=r'.\resources\nearest_neighbor.gif')
self.nearest_neighbor_search_btn = Button(self.raster_side, image=nearest_neighbor_search_image,
command=self.nearest_neighbor_input)
self.nearest_neighbor_search_btn.place(relx=0.135, rely=0.0, width=32, height=32)
# Creating and adding of the 'About' button
about_image = PhotoImage(file=r'.\resources\about.gif')
self.about_btn = Button(self.raster_side, image=about_image, command=about)
self.about_btn.place(relx=0.18, rely=0.0, width=32, height=32)
# Creating and adding of the 'Exit' button
exit_image = PhotoImage(file=r'.\resources\exit.gif')
self.exit_btn = Button(self.raster_side, image=exit_image, command=self.exit)
self.exit_btn.place(relx=0.225, rely=0.0, width=32, height=32)
# Creating and adding the button that enables 'Measure distance between two points'
# functionality for the raster canvas
ruler_image = PhotoImage(file=r'.\resources\ruler.gif')
self.calculate_distance_btn = Button(self.raster_side, image=ruler_image,
command=self.calculate_distance_raster)
self.calculate_distance_btn.place(relx=0.03, rely=0.25, width=32, height=32)
# Creating and adding the button that enables 'Measure distance between two points'
# functionality for the vector canvas (vector_can)
self.calculate_distance_vector_btn = Button(self.vector_side, image=ruler_image,
command=self.calculate_distance_vector)
self.calculate_distance_vector_btn.place(relx=0.6, rely=0.093, width=32, height=32)
# Creating and adding the label in which the information regarding the measuring distance are shown
# (for the raster canvas)
self.raster_distance_lbl = Label(self.raster_side, text='', bg='lightgoldenrod1')
self.raster_distance_lbl.place(relx=0.1, rely=0.255)
# Creating and adding the label in which the information regarding the measuring distance are shown
# (for the vector canvas (vector_can))
self.vector_distance_lbl = Label(self.vector_side, text='', bg='lightgoldenrod1')
self.vector_distance_lbl.place(relx=0.6, rely=0.15)
# Creating and adding the button that enables raster file upload to the application
self.select_raster_btn = Button(self.raster_side, command=self.select_raster, text='Select a raster file',
bg='lightgoldenrod2',
activebackground='lightgoldenrod3')
self.select_raster_btn.place(relx=0.10, rely=0.1)
# Creating and adding the button that enables shapefile upload to the application
self.select_vector_btn = Button(self.vector_side, command=self.select_vector, text='Select a vector file',
bg='lightgoldenrod2',
activebackground='lightgoldenrod3')
self.select_vector_btn.place(relx=0.10, rely=0.1)
# Creating and adding the button that enables the loading od the raster file to the canvas
self.load_raster_btn = Button(self.raster_side, command=self.load_raster, text='Load a raster file',
bg='lightgoldenrod2',
activebackground='lightgoldenrod3')
self.load_raster_btn.place(relx=0.10, rely=0.16)
# Creating and adding the button that enables the loading of the vector file to the canvas
self.load_vector_btn = Button(self.vector_side, command=self.load_vector, text='Load a vector file',
bg='lightgoldenrod2',
activebackground='lightgoldenrod3')
self.load_vector_btn.place(relx=0.10, rely=0.16)
# Creating and adding the text field in which the link to the selected raster file will be shown
self.raster_path = Text(self.raster_side, state=DISABLED)
self.raster_path.place(relx=0.25, rely=0.1, height=25, width=250)
# Creating and adding the text field in which the link to the selected vector file will be shown
self.vector_path = Text(self.vector_side, state=DISABLED)
self.vector_path.place(relx=0.25, rely=0.1, height=25, width=250)
# Creating and adding the matplotlib canvas in which the raster files will be displayed
self.raster_can = FigureCanvasTkAgg(self.fig1, master=self.raster_side)
self.raster_can.get_tk_widget().place(relx=0.1, rely=0.22, height=500, width=600)
# Creating and adding the matplotlib canvas in which the raster files will be displayed
self.vector_can = FigureCanvasTkAgg(self.fig2, master=self.vector_side)
self.vector_can.get_tk_widget().place(relx=0.005, rely=0.3, height=535, width=740)
# Creating and adding of the progressbar of the uploading of the file to the canvas for displaying the raster
# files
self.progress_raster = Progressbar(self.raster_side, orient=HORIZONTAL, length=250, mode='determinate')
self.progress_raster.place(relx=0.25, rely=0.165)
# Creating and adding of the progressbar of the uploading of the file to the canvas for displaying the
# shapefiles
self.progress_vector = Progressbar(self.vector_side, orient=HORIZONTAL, length=250, mode='determinate')
self.progress_vector.place(relx=0.25, rely=0.165)
# Creating and adding the navigation toolbar for the canvas in which the raster files are displayed
# It contains elements within the NavigationToolbar2Tk.toolitems
self.toolbar = NavigationToolbar2Tk(self.raster_can, self.raster_side)
self.raster_can._tkcanvas.pack(expand=False, side=BOTTOM, fill=BOTH)
self.toolbar.update()
# Creating and adding the navigation toolbar for the canvas in which the vector files are displayed
# It contains elements within the NavigationToolbar2Tk.toolitems
self.toolbar_vec = NavigationToolbar2Tk(self.vector_can, self.vector_side)
self.vector_can._tkcanvas.pack(padx=2, expand=False, side=BOTTOM, fill='x')
self.toolbar_vec.update()
# Creating and adding the text box in which the attributes of the uploaded shapefile will
# be shown on the click of a button
self.vector_attributes_text = Text(self.vector_side, state=DISABLED, width=72, height=3)
self.vector_attributes_text.place(relx=0.11, rely=0.22)
# Creating and adding the button that enables the functionality of displaying vector data in the above-mentioned
# text box
| |
<reponame>dmitryvinn/cinder
# Copyright (c) Facebook, Inc. and its affiliates. (http://www.facebook.com)
from __future__ import annotations
import ast
from ast import (
AST,
And,
AnnAssign,
Assign,
AsyncFunctionDef,
Attribute,
AugAssign,
Await,
BinOp,
BoolOp,
Call,
ClassDef,
Compare,
Constant,
DictComp,
For,
FormattedValue,
FunctionDef,
GeneratorExp,
If,
IfExp,
ImportFrom,
Index,
Is,
IsNot,
JoinedStr,
Lambda,
ListComp,
Module,
Name,
NameConstant,
Return,
SetComp,
Slice,
Starred,
Subscript,
Try,
UnaryOp,
While,
Yield,
YieldFrom,
expr,
)
from enum import IntEnum
from typing import (
Dict,
List,
Optional,
Sequence,
TYPE_CHECKING,
Type,
Union,
cast,
)
from ..consts import SC_GLOBAL_EXPLICIT, SC_GLOBAL_IMPLICIT, SC_LOCAL
from ..errors import CollectingErrorSink, TypedSyntaxError
from ..symbols import SymbolVisitor
from .declaration_visitor import GenericVisitor
from .effects import NarrowingEffect, NO_EFFECT
from .module_table import ModuleTable, ModuleFlag
from .types import (
AwaitableType,
CInstance,
CType,
CheckedDictInstance,
CheckedListInstance,
Class,
ClassVar,
FinalClass,
Function,
FunctionContainer,
GenericClass,
IsInstanceEffect,
ModuleInstance,
TypeDescr,
Slot,
TType,
TypeEnvironment,
UnionInstance,
UnknownDecoratedMethod,
Value,
OptionalInstance,
TransparentDecoratedMethod,
)
if TYPE_CHECKING:
from .compiler import Compiler
class BindingScope:
def __init__(
self,
node: AST,
type_env: TypeEnvironment,
) -> None:
self.node = node
self.local_types: Dict[str, Value] = {}
self.decl_types: Dict[str, TypeDeclaration] = {}
self.type_env: TypeEnvironment = type_env
def branch(self) -> LocalsBranch:
return LocalsBranch(self)
def declare(
self, name: str, typ: Value, is_final: bool = False, is_inferred: bool = False
) -> TypeDeclaration:
# For an unannotated assignment (is_inferred=True), we declare dynamic
# type; this disallows later re-declaration, but allows any type to be
# assigned later, so `x = None; if flag: x = "foo"` works.
decl = TypeDeclaration(self.type_env.DYNAMIC if is_inferred else typ, is_final)
self.decl_types[name] = decl
self.local_types[name] = typ
return decl
class ModuleBindingScope(BindingScope):
def __init__(
self,
node: ast.Module,
module: ModuleTable,
type_env: TypeEnvironment,
) -> None:
super().__init__(node, type_env)
self.module = module
def declare(
self, name: str, typ: Value, is_final: bool = False, is_inferred: bool = False
) -> TypeDeclaration:
# at module scope we will go ahead and set a declared type even without
# an annotation, but we don't want to infer the exact type; should be
# able to reassign to a subtype
if is_inferred:
typ = typ.nonliteral().inexact()
is_inferred = False
self.module.children[name] = typ
return super().declare(name, typ, is_final=is_final, is_inferred=is_inferred)
class LocalsBranch:
"""Handles branching and merging local variable types"""
def __init__(self, scope: BindingScope) -> None:
self.scope = scope
self.type_env: TypeEnvironment = scope.type_env
self.entry_locals: Dict[str, Value] = dict(scope.local_types)
def copy(self) -> Dict[str, Value]:
"""Make a copy of the current local state"""
return dict(self.scope.local_types)
def restore(self, state: Optional[Dict[str, Value]] = None) -> None:
"""Restore the locals to the state when we entered"""
self.scope.local_types.clear()
self.scope.local_types.update(state or self.entry_locals)
def merge(self, entry_locals: Optional[Dict[str, Value]] = None) -> None:
"""Merge the entry locals, or a specific copy, into the current locals"""
# TODO: What about del's?
if entry_locals is None:
entry_locals = self.entry_locals
local_types = self.scope.local_types
for key, value in entry_locals.items():
if key in local_types:
if value != local_types[key]:
local_types[key] = self._join(value, local_types[key])
continue
def changed(self) -> bool:
return self.entry_locals != self.scope.local_types
def _join(self, *types: Value) -> Value:
if len(types) == 1:
return types[0]
return self.type_env.get_union(tuple(t.inexact().klass for t in types)).instance
class TypeDeclaration:
def __init__(self, typ: Value, is_final: bool = False) -> None:
self.type = typ
self.is_final = is_final
class TerminalKind(IntEnum):
NonTerminal = 0
BreakOrContinue = 1
RaiseOrReturn = 2
class TypeBinder(GenericVisitor):
"""Walks an AST and produces an optionally strongly typed AST, reporting errors when
operations are occuring that are not sound. Strong types are based upon places where
annotations occur which opt-in the strong typing"""
def __init__(
self,
symbols: SymbolVisitor,
filename: str,
compiler: Compiler,
module_name: str,
optimize: int,
enable_patching: bool = False,
) -> None:
module = compiler[module_name]
super().__init__(module)
self.symbols = symbols
self.scopes: List[BindingScope] = []
self.modules: Dict[str, ModuleTable] = compiler.modules
self.optimize = optimize
self.terminals: Dict[AST, TerminalKind] = {}
self.type_env: TypeEnvironment = compiler.type_env
self.inline_depth = 0
self.inline_calls = 0
self.enable_patching = enable_patching
@property
def nodes_default_dynamic(self) -> bool:
# If we have a non-throwing ErrorSink, then we may miss typing some
# nodes on error, so default them to dynamic silently.
return not self.error_sink.throwing
@property
def local_types(self) -> Dict[str, Value]:
return self.binding_scope.local_types
@property
def decl_types(self) -> Dict[str, TypeDeclaration]:
return self.binding_scope.decl_types
@property
def binding_scope(self) -> BindingScope:
return self.scopes[-1]
@property
def scope(self) -> AST:
return self.binding_scope.node
def maybe_set_local_type(self, name: str, local_type: Value) -> Value:
decl = self.get_target_decl(name)
assert decl is not None
decl_type = decl.type
if local_type is self.type_env.DYNAMIC or not decl_type.klass.can_be_narrowed:
local_type = decl_type
self.local_types[name] = local_type
return local_type
def maybe_get_current_class(self) -> Optional[Class]:
node = self.scope
if isinstance(node, ClassDef):
res = self.get_type(node)
assert isinstance(res, Class)
return res
def visit(
self, node: Union[AST, Sequence[AST]], *args: object
) -> Optional[NarrowingEffect]:
"""This override is only here to give Pyre the return type information."""
ret = super().visit(node, *args)
if ret is not None:
assert isinstance(ret, NarrowingEffect)
return ret
return None
def get_final_literal(self, node: AST) -> Optional[ast.Constant]:
return self.module.get_final_literal(node, self.symbols.scopes[self.scope])
def declare_local(
self,
name: str,
typ: Value,
is_final: bool = False,
is_inferred: bool = False,
) -> None:
if name in self.decl_types:
raise TypedSyntaxError(f"Cannot redefine local variable {name}")
if isinstance(typ, CInstance):
self.check_primitive_scope(name)
self.binding_scope.declare(
name, typ, is_final=is_final, is_inferred=is_inferred
)
def check_static_import_flags(self, node: Module) -> None:
saw_doc_str = False
for stmt in node.body:
if isinstance(stmt, ast.Expr):
val = stmt.value
if isinstance(val, ast.Constant) and isinstance(val.value, str):
if saw_doc_str:
break
saw_doc_str = True
else:
break
elif isinstance(stmt, ast.Import):
continue
elif isinstance(stmt, ast.ImportFrom):
if stmt.module == "__static__.compiler_flags":
for name in stmt.names:
if name.name == "checked_dicts":
self.module.flags.add(ModuleFlag.CHECKED_DICTS)
elif name.name == "checked_lists":
self.module.flags.add(ModuleFlag.CHECKED_LISTS)
elif name.name in ("noframe", "shadow_frame"):
self.module.flags.add(ModuleFlag.SHADOW_FRAME)
def visitModule(self, node: Module) -> None:
self.scopes.append(
ModuleBindingScope(
node,
self.module,
type_env=self.type_env,
)
)
self.check_static_import_flags(node)
for stmt in node.body:
self.visit(stmt)
self.scopes.pop()
def set_param(
self,
arg: ast.arg,
arg_type: Value,
scope: BindingScope,
) -> None:
scope.declare(arg.arg, arg_type)
self.set_type(arg, arg_type)
def _visitParameters(self, args: ast.arguments, scope: BindingScope) -> None:
default_index = len(args.defaults or []) - (
len(args.posonlyargs) + len(args.args)
)
for arg in args.posonlyargs:
ann = arg.annotation
if ann:
self.visitExpectedType(
ann,
self.type_env.DYNAMIC,
"argument annotation cannot be a primitive",
)
arg_type = self.module.resolve_annotation(ann) or self.type_env.dynamic
elif arg.arg in scope.decl_types:
# Already handled self
default_index += 1
continue
else:
arg_type = self.type_env.dynamic
arg_type = arg_type.unwrap()
if default_index >= 0:
self.visit(args.defaults[default_index], arg_type.instance)
self.check_can_assign_from(
arg_type,
self.get_type(args.defaults[default_index]).klass,
args.defaults[default_index],
)
default_index += 1
self.set_param(arg, arg_type.instance, scope)
for arg in args.args:
ann = arg.annotation
if ann:
self.visitExpectedType(
ann,
self.type_env.DYNAMIC,
"argument annotation cannot be a primitive",
)
arg_type = self.module.resolve_annotation(ann) or self.type_env.dynamic
elif arg.arg in scope.decl_types:
# Already handled self
default_index += 1
continue
else:
arg_type = self.type_env.dynamic
arg_type = arg_type.unwrap()
if default_index >= 0:
self.visit(args.defaults[default_index], arg_type.instance)
self.check_can_assign_from(
arg_type,
self.get_type(args.defaults[default_index]).klass,
args.defaults[default_index],
)
default_index += 1
self.set_param(arg, arg_type.instance, scope)
vararg = args.vararg
if vararg:
ann = vararg.annotation
if ann:
self.visitExpectedType(
ann,
self.type_env.DYNAMIC,
"argument annotation cannot be a primitive",
)
self.set_param(vararg, self.type_env.tuple.exact_type().instance, scope)
default_index = len(args.kw_defaults or []) - len(args.kwonlyargs)
for arg in args.kwonlyargs:
ann = arg.annotation
if ann:
self.visitExpectedType(
ann,
self.type_env.DYNAMIC,
"argument annotation cannot be a primitive",
)
arg_type = self.module.resolve_annotation(ann) or self.type_env.dynamic
else:
arg_type = self.type_env.dynamic
arg_type = arg_type.unwrap()
if default_index >= 0:
default = args.kw_defaults[default_index]
if default is not None:
self.visit(default, arg_type.instance)
self.check_can_assign_from(
arg_type,
self.get_type(default).klass,
default,
)
default_index += 1
self.set_param(arg, arg_type.instance, scope)
kwarg = args.kwarg
if kwarg:
ann = kwarg.annotation
if ann:
self.visitExpectedType(
ann,
self.type_env.DYNAMIC,
"argument annotation cannot be a primitive",
)
self.set_param(kwarg, self.type_env.dict.exact_type().instance, scope)
def new_scope(self, node: AST) -> BindingScope:
return BindingScope(
node,
type_env=self.type_env,
)
def get_func_container(
self, node: Union[ast.FunctionDef, ast.AsyncFunctionDef]
) -> FunctionContainer:
function = self.get_type(node)
if not isinstance(function, FunctionContainer):
raise RuntimeError("bad value for function")
return function
def _visitFunc(self, node: Union[FunctionDef, AsyncFunctionDef]) -> None:
func = self.get_func_container(node)
func.bind_function(node, self)
typ = self.get_type(node)
# avoid declaring unknown-decorateds as locals in order to support
# @overload and @property.setter
if not isinstance(typ, UnknownDecoratedMethod):
if isinstance(self.scope, (FunctionDef, AsyncFunctionDef)):
# nested functions can't be invoked against; to ensure we
# don't, declare them as dynamic type
typ = self.type_env.DYNAMIC
self.declare_local(node.name, typ)
def visitFunctionDef(self, node: FunctionDef) -> None:
self._visitFunc(node)
def visitAsyncFunctionDef(self, node: AsyncFunctionDef) -> None:
self._visitFunc(node)
def visitClassDef(self, node: ClassDef) -> None:
for decorator in node.decorator_list:
self.visitExpectedType(
decorator, self.type_env.DYNAMIC, "decorator cannot be a primitive"
)
for kwarg in node.keywords:
self.visitExpectedType(
kwarg.value, self.type_env.DYNAMIC, "class kwarg cannot be a primitive"
)
is_protocol = False
for base in node.bases:
self.visitExpectedType(
base, self.type_env.DYNAMIC, "class base cannot be a primitive"
| |
<gh_stars>0
# -*- coding: utf-8 -*-
"""
General functions supporting other modules.
"""
import datetime as dt
import fiona
import numpy as np
import os
import pandas as pd
import pyproj
import sqlite3
from functools import partial
from shapely import geometry as geo
from shapely import ops
def boxcox_backtransform(xt, lmbda):
"""Back transform box-cox transformed data. Assumes data was transformed
using equation from scipy.stats.box.
Parameters
----------
xt : array
Array of box-cox transformed data.
lmbda : scalar
Lambda used during box-cox transformation.
Returns
-------
x : array
Array of back transformed data.
Notes
-----
"""
if lmbda == 0:
x = np.e ** xt
else:
x = [(lmbda * val + 1) ** (1 / lmbda) for val in np.nditer(xt)]
return x
def check_expected_list(df, col_name, expected_values, verbose=0):
"""Checks that column values match expected.
Parameters
----------
df : dataframe
Dataframe to check.
col_name : str
Name of column to check.
expected_values : list
Expected values for column.
verbose : int
Defines verbosity for output statements.
Returns
-------
match : bool
True if all values match expected; otherwise, False.
Notes
-----
"""
uniq = pd.Series(pd.unique(df[col_name]))
uniq_in = uniq.isin(expected_values)
if verbose >= 3:
output('Unique ' + col_name + ' values: ')
print(uniq)
print('')
match = True
if not all(uniq_in):
uniq_not_in = [not i for i in uniq_in]
output('Error : Unexpected ' + col_name + ' value(s).',
'check_expected')
print(uniq[uniq_not_in])
match = False
return match
def connect_db(db_path, verbose=0):
"""Connects to a sqlite database. Creates the database if it does not exist.
Parameters
----------
db_path : str
Path to sqlite database to create or connect to.
verbose : int
Defines verbosity for output statements.
Returns
-------
conn : database connection
Returns database connection.
Notes
-----
db_path = '/Users/httran/Documents/projects/twitterinfrastructure/data
/processed/nyctlc-triprecorddata.db'
"""
conn = sqlite3.connect(db_path)
if verbose >= 1:
output('Connected to (or created if not exists) sqlite database.')
return conn
def create_table(db_path, table, create_sql, indexes=None, overwrite=False,
verbose=0):
"""Creates a sqlite table.
Parameters
----------
db_path : str
Path to sqlite database to create or connect to.
table : str
Name of table to be created.
create_sql : str
Sql query, defined as a string.
E.g. 'CREATE TABLE IF NOT EXISTS table (col1 TEXT, col2 INTEGER);'
indexes : list or None
List of create index sql statements, each defined as a string.
E.g. ['CREATE INDEX IF NOT EXISTS temp ON table col;']
overwrite : bool
Defines whether or not to overwrite existing table.
verbose : int
Defines verbosity for output statements.
Returns
-------
Notes
-----
"""
# connect to database
conn = connect_db(db_path)
c = conn.cursor()
# drop table if needed
if overwrite:
sql = 'DROP TABLE IF EXISTS {table};'.format(table=table)
c.execute(sql)
conn.commit()
if verbose >= 1:
output('Dropped {table} table (if exists).'.format(table=table))
# create table (if not exists)
c.execute(create_sql)
conn.commit()
# create indexes
if indexes:
for index_sql in indexes:
c.execute(index_sql)
conn.commit()
# close connection
conn.close()
if verbose >= 1:
output('Created new (if not exists) {table} table.'.format(table=table))
def cross_corr(a, b, normalized=True):
"""Calculate the cross-correlation between two datasets of the same length.
Parameters
----------
a : array
1-d array of the first dataset.
b : array
1-d array of the second dataset.
normalized : bool
If True, then normalize each dataset.
Returns
-------
rho : cross-correlation
Notes
-----
"""
if normalized:
a = (a - np.mean(a)) / (np.std(a) * len(a))
b = (b - np.mean(b)) / (np.std(b))
rho = np.correlate(a, b, mode='valid')[0]
return rho
def df_to_table(db_path, df, table, dtype={}, overwrite=False, verbose=0):
"""Writes a dataframe to a table in a database.
Parameters
----------
db_path : str
Path to sqlite database.
df : dataframe
Dataframe to write from.
table : str
Name of table in database.
dtype : dict
See pandas documentation for to_sql.
overwrite : bool
Boolean data type defining whether or not to overwrite existing table.
verbose : int
Defines verbosity for output statements.
Returns
-------
Notes
-----
"""
# connect to database
conn = connect_db(db_path)
c = conn.cursor()
# write to table
if overwrite:
df.to_sql(table, conn, if_exists='replace', index=False, dtype=dtype)
if verbose >= 1:
output('Wrote dataframe to new {table} table.'.format(table=table))
else:
df.to_sql(table, conn, if_exists='append', index=False, dtype=dtype)
if verbose >= 1:
output('Wrote dataframe to new (if not exists) or existing {table} '
'table.'.format(table=table))
conn.close()
def dump(items, func_name='unknown', tostr=True, overwrite=True):
"""Dumps a list of items to a text file, where each item is written to a
new line. Dump file is written to the 'data/dump/' directory with a
[yearmonthday_hourminuteseconds] timestamp.
Parameters
----------
items : list
List of items to dump.
func_name : str
Function name to associate with the dump file.
tostr : bool
Defines whether or not to convert items to strings.
overwrite : bool
Defines whether or not to overwrite existing file.
Returns
-------
written : bool
Defines whether or not dump file was written.
Notes
-----
"""
written = False
if items:
path = 'data/dump/dump-{func_name}-{date:%Y%m%d_%H%M%S}.txt'.format(
func_name=func_name, date=dt.datetime.now())
if not tostr:
items = [str(item) for item in items]
if overwrite:
open_str = 'w'
else:
open_str = 'a'
with open(path, open_str) as file:
for item in items:
file.write('{}\n'.format(item))
written = True
return written
def get_regex_files(files_dir, pattern, verbose=0):
"""Get file names matching regex pattern from specified directory.
Parameters
----------
files_dir : str
Path of directory to get file names from.
pattern : regex
Regex pattern.
verbose : int
Defines verbosity for output statements.
Returns
-------
files : list
List of matching file names in directory.
Notes
-----
"""
files = [f for f in os.listdir(files_dir) if pattern.match(f)]
files.sort()
if verbose >= 1:
output(str(len(files)) + ' matching files in \"' + files_dir + '\".')
return files
def haversine(lat1, lon1, lat2, lon2):
"""Calculate the great circle distance between two points on earth (
specified in decimal degrees). All arguments must be of equal shape.
Parameters
----------
lat1 : list
List of latitudes for point 1 (decimal degrees).
lon1 : list
List of longitudes for point 1.
lat2 : list
List of latitudes for point 2.
lon2 : list
List of longitudes for point 2.
Returns
-------
d : list
List of distances (miles).
Notes
-----
See https://stackoverflow.com/questions/29545704/fast-haversine
-approximation-python-pandas.
"""
lat1, lon1, lat2, lon2 = map(np.radians, [lat1, lon1, lat2, lon2])
dlat = lat2 - lat1
dlon = lon2 - lon1
R = 3956.5465 # earth's radius in miles (taken as average between poles
# and equator)
a = np.sin(dlat / 2)**2 + np.cos(lat1) * np.cos(lat2) * np.sin(
dlon / 2)**2
d = 2 * R * np.arcsin(np.sqrt(a))
return d
def output(print_str, fn_str=None):
"""Handles print statements with standard format.
Parameters
----------
print_str : str
String to print.
fn_str : str
Name of function the print statement is being called from.
Returns
-------
Notes
-----
"""
if fn_str:
print(dt.datetime.now().strftime("%Y-%m-%d %H:%M:%S") + ' : ' +
fn_str + ' : ' + print_str)
else:
print(dt.datetime.now().strftime("%Y-%m-%d %H:%M:%S") + ' : ' +
print_str)
print('')
def read_shapefile(path, to_wgs84=True):
"""Reads a shapefile into lists of shapes and properties for each feature
within the shapefile layer.
Parameters
----------
path : str
Path to shapefile. Assumes the shapefile contains one layer with
all features of interest. Assumes each feature contains 'geometry'
and 'properties' attributes.
to_wgs84 : bool
If True, applies coordinate transformation to WGS84.
Returns
-------
shapes : list
List of features as shapely shapes.
properties : list
List of feature properties (i.e. attributes).
Notes
-----
"""
# updated fiona version with Python 3 requires explicit GDAL_ENV ignore
# reads shapefile layer
with fiona.Env():
with fiona.open(path, 'r') as fiona_collection:
# define projection transformation function
if to_wgs84:
proj_in = pyproj.Proj(fiona_collection.crs)
proj_out = pyproj.Proj(init='EPSG:4326') # WGS84
proj = partial(pyproj.transform, proj_in, proj_out)
# save layer as list
layer = list(fiona_collection)
# get WGS84 shapes and properties
shapes = []
properties = []
for feature in layer:
shape = geo.asShape(feature['geometry'])
if to_wgs84:
shapes.append(ops.transform(proj, shape))
else:
shapes.append(shape)
properties.append(feature['properties'])
return shapes, properties
def query(db_path, sql, parse_dates=False, verbose=0):
"""Query a database. Opens and closes database connection.
Parameters
----------
db_path : str
Path to sqlite database to create or connect to.
sql : str
Sql query.
parse_dates : dict or False
Defines which columns to read as datetime dtype.
verbose : | |
from __future__ import annotations
import uvicore
from uvicore.typing import Any, Callable, List, Dict, Optional, Decorator, get_type_hints
from uvicore.http.routing.router import Router
from uvicore.contracts import ApiRoute as RouteInterface
from prettyprinter import pretty_call, register_pretty
from uvicore.support.dumper import dump, dd
from merge_args import _merge as merge_args
from functools import partial
from uvicore.http.routing.guard import Guard
#from uvicore.auth.middleware.auth import Guard
# from fastapi import APIRouter as _FastAPIRouter
# from uvicore.typing import Any, Type, List, Callable, Optional
# from starlette.routing import BaseRoute
# from uvicore.contracts import ApiRouter as RouterInterface
# from starlette.responses import Response
# from uvicore.support.dumper import dump, dd
# from uvicore.support.module import load
@uvicore.service()
class ApiRouter(Router['ApiRoute']):
def get(self,
# Common to both ApiRouter and WebRouter
path: str,
endpoint: Optional[Callable] = None,
*,
name: Optional[str] = None,
autoprefix: bool = True,
middleware: Optional[List] = None,
auth: Optional[Guard] = None,
scopes: Optional[List] = None,
inherits: Optional[Callable] = None,
# ApiRouter specific
responses: Optional[Dict] = None,
response_model: Optional[Any] = None,
tags: Optional[List[str]] = None,
summary: Optional[str] = None,
description: Optional[str] = None,
) -> None:
"""Add a new HTTP GET route to the ApiRouter
:param path: URL path, beginning with /
:param endpoint: None if decorator, else method to call when route is fired.
:param name: Name of this route to consistently access with url('name') helper.
:param autoprefix: Disable the name autoprefixer (autoprefix adds the appname. prefix). Useful when overriding another packages route.
:param middleware: List of route level middleware.
:param auth: Shorthand for the middleware=[Guard(['scope'])]. Usage auth=Guard(['scope']).
:param scopes: Shorthand for middleware=[Guard(['scope'])]. Usage scopes=['scope1', 'scope1'].
:param response_model: Response ORM Model. Can also use -> Model on function return type.
:param tags: List of tags to group the endpoint in the OpenAPI docs.
:param summary: Summary of this endpoint for OpenAPI docs.
:param description: Description of this endpoint for OpenAPI docs.
:param inherits: Endpoint function can inhert the parameters of another function. Useful if you have tons of parameters you want shared on multiple endpoints. Usage inherits=AuthApi.getsig.
"""
# Build parameters
methods = ['GET']
#params = {key:value for key, value in locals().items() if key != 'self'}
params = locals()
params.pop('self')
# Pass to generic add method
return self.add(**params)
def post(self,
# Common to both ApiRouter and WebRouter
path: str,
endpoint: Optional[Callable] = None,
*,
name: Optional[str] = None,
autoprefix: bool = True,
middleware: Optional[List] = None,
auth: Optional[Guard] = None,
scopes: Optional[List] = None,
inherits: Optional[Callable] = None,
# ApiRouter specific
responses: Optional[Dict] = None,
response_model: Optional[Any] = None,
tags: Optional[List[str]] = None,
summary: Optional[str] = None,
description: Optional[str] = None,
):
"""Add a new HTTP POST route to the ApiRouter
:param path: URL path, beginning with /
:param endpoint: None if decorator, else method to call when route is fired.
:param name: Name of this route to consistently access with url('name') helper.
:param autoprefix: Disable the name autoprefixer (autoprefix adds the appname. prefix). Useful when overriding another packages route.
:param middleware: List of route level middleware.
:param auth: Shorthand for the middleware=[Guard(['scope'])]. Usage auth=Guard(['scope']).
:param scopes: Shorthand for middleware=[Guard(['scope'])]. Usage scopes=['scope1', 'scope1'].
:param response_model: Response ORM Model. Can also use -> Model on function return type.
:param tags: List of tags to group the endpoint in the OpenAPI docs.
:param summary: Summary of this endpoint for OpenAPI docs.
:param description: Description of this endpoint for OpenAPI docs.
:param inherits: Endpoint function can inhert the parameters of another function. Useful if you have tons of parameters you want shared on multiple endpoints. Usage inherits=AuthApi.getsig.
"""
# Build parameters
methods = ['POST']
#params = {key:value for key, value in locals().items() if key != 'self'}
params = locals()
params.pop('self')
# Pass to generic add method
return self.add(**params)
def put(self,
# Common to both ApiRouter and WebRouter
path: str,
endpoint: Optional[Callable] = None,
*,
name: Optional[str] = None,
autoprefix: bool = True,
middleware: Optional[List] = None,
auth: Optional[Guard] = None,
scopes: Optional[List] = None,
inherits: Optional[Callable] = None,
# ApiRouter specific
responses: Optional[Dict] = None,
response_model: Optional[Any] = None,
tags: Optional[List[str]] = None,
summary: Optional[str] = None,
description: Optional[str] = None,
):
"""Add a new HTTP PUT route to the ApiRouter
:param path: URL path, beginning with /
:param endpoint: None if decorator, else method to call when route is fired.
:param name: Name of this route to consistently access with url('name') helper.
:param autoprefix: Disable the name autoprefixer (autoprefix adds the appname. prefix). Useful when overriding another packages route.
:param middleware: List of route level middleware.
:param auth: Shorthand for the middleware=[Guard(['scope'])]. Usage auth=Guard(['scope']).
:param scopes: Shorthand for middleware=[Guard(['scope'])]. Usage scopes=['scope1', 'scope1'].
:param response_model: Response ORM Model. Can also use -> Model on function return type.
:param tags: List of tags to group the endpoint in the OpenAPI docs.
:param summary: Summary of this endpoint for OpenAPI docs.
:param description: Description of this endpoint for OpenAPI docs.
:param inherits: Endpoint function can inhert the parameters of another function. Useful if you have tons of parameters you want shared on multiple endpoints. Usage inherits=AuthApi.getsig.
"""
# Build parameters
methods = ['PUT']
#params = {key:value for key, value in locals().items() if key != 'self'}
params = locals()
params.pop('self')
# Pass to generic add method
return self.add(**params)
def patch(self,
# Common to both ApiRouter and WebRouter
path: str,
endpoint: Optional[Callable] = None,
*,
name: Optional[str] = None,
autoprefix: bool = True,
middleware: Optional[List] = None,
auth: Optional[Guard] = None,
scopes: Optional[List] = None,
inherits: Optional[Callable] = None,
# ApiRouter specific
responses: Optional[Dict] = None,
response_model: Optional[Any] = None,
tags: Optional[List[str]] = None,
summary: Optional[str] = None,
description: Optional[str] = None,
):
"""Add a new HTTP PATCH route to the ApiRouter
:param path: URL path, beginning with /
:param endpoint: None if decorator, else method to call when route is fired.
:param name: Name of this route to consistently access with url('name') helper.
:param autoprefix: Disable the name autoprefixer (autoprefix adds the appname. prefix). Useful when overriding another packages route.
:param middleware: List of route level middleware.
:param auth: Shorthand for the middleware=[Guard(['scope'])]. Usage auth=Guard(['scope']).
:param scopes: Shorthand for middleware=[Guard(['scope'])]. Usage scopes=['scope1', 'scope1'].
:param response_model: Response ORM Model. Can also use -> Model on function return type.
:param tags: List of tags to group the endpoint in the OpenAPI docs.
:param summary: Summary of this endpoint for OpenAPI docs.
:param description: Description of this endpoint for OpenAPI docs.
:param inherits: Endpoint function can inhert the parameters of another function. Useful if you have tons of parameters you want shared on multiple endpoints. Usage inherits=AuthApi.getsig.
"""
# Build parameters
methods = ['PATCH']
#params = {key:value for key, value in locals().items() if key != 'self'}
params = locals()
params.pop('self')
# Pass to generic add method
return self.add(**params)
def delete(self,
# Common to both ApiRouter and WebRouter
path: str,
endpoint: Optional[Callable] = None,
*,
name: Optional[str] = None,
autoprefix: bool = True,
middleware: Optional[List] = None,
auth: Optional[Guard] = None,
scopes: Optional[List] = None,
inherits: Optional[Callable] = None,
# ApiRouter specific
responses: Optional[Dict] = None,
response_model: Optional[Any] = None,
tags: Optional[List[str]] = None,
summary: Optional[str] = None,
description: Optional[str] = None,
):
"""Add a new HTTP DELETE route to the ApiRouter
:param path: URL path, beginning with /
:param endpoint: None if decorator, else method to call when route is fired.
:param name: Name of this route to consistently access with url('name') helper.
:param autoprefix: Disable the name autoprefixer (autoprefix adds the appname. prefix). Useful when overriding another packages route.
:param middleware: List of route level middleware.
:param auth: Shorthand for the middleware=[Guard(['scope'])]. Usage auth=Guard(['scope']).
:param scopes: Shorthand for middleware=[Guard(['scope'])]. Usage scopes=['scope1', 'scope1'].
:param response_model: Response ORM Model. Can also use -> Model on function return type.
:param tags: List of tags to group the endpoint in the OpenAPI docs.
:param summary: Summary of this endpoint for OpenAPI docs.
:param description: Description of this endpoint for OpenAPI docs.
:param inherits: Endpoint function can inhert the parameters of another function. Useful if you have tons of parameters | |
<reponame>matthew-cen/schedule-creator
# TODO: Implement modification/validation logic
# TODO: Default values
# TODO: Use datetime module for time handling
# TODO: KeyError for days and empty input
from utilities import parse_day, parse_time, parse_command, parse_command_num
from exceptions import *
from schedule import create_schedule
from backend import *
class User(db.Model):
""""
Generic User database class, ripped from the interwebs
"""
id = db.Column(db.Integer, primary_key=True)
username = db.Column(db.String(80), unique=True)
email = db.Column(db.String(80), unique=True)
def __init__(self, username, email):
self.username = username
self.email = email
def __repr__(self):
return '<User %r>' % self.username
class Section(db.Model):
"""
The Section class contains the time and days of a course section.
It allows for the modification of a section's timeslot and scheduled days
"""
__tablename__ = 'sections'
id = db.Column(db.Integer, primary_key=True)
section_id = db.Column(db.String(80), unique=False)
time_start = db.Column(db.Integer, unique=False)
time_end = db.Column(db.Integer, unique=False)
Monday = db.Column(db.BOOLEAN, unique=False)
Tuesday = db.Column(db.BOOLEAN, unique=False)
Wednesday = db.Column(db.BOOLEAN, unique=False)
Thursday = db.Column(db.BOOLEAN, unique=False)
Friday = db.Column(db.BOOLEAN, unique=False)
Saturday = db.Column(db.BOOLEAN, unique=False)
Sunday = db.Column(db.BOOLEAN, unique=False)
db_schedule = db.relationship("DBFinalSectionSelection", backref="backref_schedule", lazy=True)
section_course_id = db.Column(db.Integer, db.ForeignKey('course.id'), nullable=False)
# CONSTRUCTOR
def __init__(self, course, section_id, time_start=-1, time_end=-1, day="0"):
self.course = course
self.section_id = section_id
self.time_start = time_start # need to initialize this to save to table
self.time_end = time_end # this too
self.timeslot = (time_start, time_end)
self.day = day # this is just testing for database
self.Monday = False
self.Tuesday = False
self.Wednesday = False
self.Thursday = False
self.Friday = False
self.Saturday = False
self.Sunday = False
self.days = [0,0,0,0,0,0,0] # stores days at bits starting with Sunday
# OPERATOR OVERLOADS
def __str__(self):
return f"Section ID: {self.section_id} Time: {self.timeslot} Days: {self.days}"
# INTERFACE
def interface(self):
while True:
print("\n[SECTION MODIFICATION]")
print(self)
self.print_commands()
try:
user_res = parse_command_num(input("Enter a command via the command number: "), 5)
# Validate input as a number
if user_res == 1:
self.set_timeslot()
elif user_res == 2:
self.add_day()
elif user_res == 3:
self.remove_day()
elif user_res == 4:
self.remove_all_days()
elif user_res == 5: # Exit interface for Section
break
except ValueError:
print("[ERROR] Invalid command. Please enter a number between 1 and 5")
# COMMAND METHODS
def set_timeslot(self):
while True:
try:
time_start = parse_time(input("Please enter the START time of the section as minutes since 12AM: "))
time_end = parse_time(input("Please enter the END time of the section as minutes since 12AM: "))
if time_end < time_start:
raise EndBeforeStartTimeException
self.timeslot = (time_start, time_end)
break
except ValueError:
print("You provided an invalid input, please try again")
except IndexError:
print("The provided time is out of range, please try again")
except EndBeforeStartTimeException:
print("The provided end time is before the start time, please try again")
def add_day(self):
while True:
try:
user_day_res = parse_day(input("Please enter the day you want to add: "))
self.days[user_day_res] = 1
break
except:
print("You provided an invalid input, please try again.")
def remove_day(self):
while True:
try:
user_day_res = parse_day(input("Please enter the day you want to remove: "))
self.days[user_day_res] = 0
break
except:
print("You provided an invalid input, please try again.")
def remove_all_days(self):
self.days = [0,0,0,0,0,0,0]
print("Removed all days for this section")
# UTILITY METHODS
@staticmethod
def print_commands():
"""
Displays valid commands for the Section class
"""
print("1) Change timeslot")
print("2) Add a day")
print("3) Remove a day")
print("4) Remove all days")
print("5) Return to course interface")
class Course(db.Model):
"""
The Course class stores a dictionary of its sections.
It is used to add, modify, and remove its contained sections
"""
__tablename__ = "course"
id = db.Column(db.Integer, primary_key=True)
course_id = db.Column(db.String(80), unique=False)
course_name = db.Column(db.String(80), unique=False)
db_sections = db.relationship("Section", backref="backref_course", lazy=True)
# CONSTRUCTOR
def __init__(self, course_id, course_name):
self.course_id = course_id
self.course_name = course_name
self.sections = {}
# COMMAND METHODs
def add_section(self):
"""
Add section to Course
Command Number: 1
"""
section_id = input("Please enter the section ID: ")
if self.section_exists(section_id):
print(f"[ERROR] The following section already exists: {section_id}")
return
else:
self.sections[section_id] = Section(self, section_id) # instantiate new section
self.sections[section_id].set_timeslot()
# Loop to allow user to add multiple days at once
while True:
# TODO: Day and time validation
# TODO: Allow user to input multiple days in one input string
self.sections[section_id].add_day()
user_res = input("Do you want to add another day for this section? (Y/N): ").upper()
if user_res == "N":
break
elif user_res != "Y":
print("[ERROR] Invalid command. Please try again")
def modify_section(self):
"""
Brings user to section modification menu
Command Number: 2
"""
user_sel_section = input("Please enter the section ID you want to modify: ")
if self.section_exists(user_sel_section):
self.sections[user_sel_section].interface()
else:
print(f"The following section does not exist: {user_sel_section}")
def remove_section(self):
"""
Remove section from Course
Command Number: 3
"""
user_selected_course = input("Please enter the section ID you want to remove: ")
if self.section_exists(user_selected_course):
self.sections.pop(user_selected_course)
else:
print(f"The following section does not exist: {user_selected_course}")
# INTERFACE
def interface(self):
while True:
print("\n[COURSE MODIFICATION]")
print(f"Course: {self.course_id} - {self.course_name}")
self.print_sections() # show sections in current course
self.print_commands()
try:
user_res = parse_command_num(input("Enter a command via the command number: "), 4)
# Validate input as a number
if user_res == 1:
self.add_section()
elif user_res == 2:
self.modify_section()
elif user_res == 3:
self.remove_section()
elif user_res == 4:
break
except ValueError:
print("[ERROR] Invalid command. Please enter a number between 1 and 3")
# UTILITY METHODS
def __repr__(self):
print("Section ID: ", self.course_id, "\n")
print("----------------------------------------")
def __str__(self):
return f"Course #: {self.course_id} Course Name: {self.course_name}"
@staticmethod
def print_commands():
"""
Displays valid commands for the Course class
"""
print("1) Add a new section")
print("2) Modify a section")
print("3) Remove a section")
print("4) Return to main menu")
def print_sections(self):
"""
Displays the sections contained by the Course class as a list
"""
print(f"Number of Sections: {len(self.sections)}")
print("-----------------------------------------------------")
# Check if course list is empty
if len(self.sections):
counter = 1
for section_id in self.sections:
print(f"{counter} - {self.sections[section_id]}")
counter += 1
else:
print("<<<You have added no sections to this course>>>")
print("-----------------------------------------------------")
def section_exists(self, section_id):
return section_id in self.sections.keys()
class DBFinalSectionSelection(db.Model):
id = db.Column(db.Integer, primary_key=True)
schedule_id = db.Column(db.Integer, unique=False)
section_id = db.Column(db.Integer, db.ForeignKey('sections.id'), nullable=False)
def __init__(self, schedule_id):
self.schedule_id = schedule_id
class Database:
"""
The Database class is pseudo-database of courses stored as a dictionary
It is used to add, modify, and remove its contained courses
"""
def __init__(self):
self.courses = {}
# COMMAND METHODS
def add_course(self):
"""
Adds course to Database
Command Number : 1
"""
course_id = input("Please enter the course number: ") # asks user to input course num
course_name = input("Please enter the course name: ") # asks user to input course name
if self.course_exists(course_id): # checks if the course number already exists
print(f"[ERROR] The following course already exists: {course_id}")
else:
self.courses[course_id] = Course(course_id, course_name) # instantiate new Course object
print(f"Successfully added the following course: {course_name}")
def modify_course(self,course_id):
"""
Brings user to Course modification menu
Command Number : 2
"""
if self.course_exists(course_id):
print(f"Selected the following course for modification: {course_id}")
self.courses[course_id].interface() # Initialize command interface for course
else:
print(f"[ERROR] The provided course number does not exist: {course_id}")
def remove_course(self, course_id):
"""
Removes course from Database
Command Number : 3
"""
if self.course_exists(course_id):
self.courses.pop(course_id) # remove course from database
print(f"Successfully removed the following course: {course_id}")
else:
print("[ERROR] The provided course number does not exist:" + course_id)
def gen_schedules(self):
print("TYe")
section_pools = [tuple(self.courses[course].sections.values()) for course in self.courses]
print("SECTION POOLS", section_pools)
valid_schedules = create_schedule(section_pools)
print("VALID SCHEDULES", valid_schedules)
for schedule in valid_schedules:
print(schedule)
# INTERFACE METHOD
def interface(self):
while True:
print("\n[MAIN MENU]") # prints newline for readability
self.print_courses() # show user added courses
self.print_commands() # show user available commands
try:
user_res = parse_command_num(input("Enter a command via the command number: "), 4)
# Validate input as a number
if user_res == 1:
self.add_course()
elif user_res == 2:
user_selected_course = input("Please enter the course number you want to modify: ")
self.modify_course(user_selected_course)
elif user_res == 3:
user_selected_course = input("Please enter the course number you want to remove: ")
self.remove_course(user_selected_course)
elif user_res == 4:
self.gen_schedules()
except ValueError:
print("[ERROR] Invalid command. Please enter a number between 1 and 3")
# UTILITY METHODS
@staticmethod # | |
import click
import io
import requests
import time
import zlib
from prog.cli import request
from prog.cli import create
from prog.cli import delete
from prog.cli import show
from prog.cli import set as cli_set
from prog.cli import unset
from prog import client
from prog import multipart
from prog import output
from prog import utils
@show.group('system')
@click.pass_obj
def show_system(data):
"""Show system information."""
@show_system.command()
@click.pass_obj
def usage(data):
"""Show system usage."""
usage = data.client.show("system", "usage", "usage")
columns = ["reported_at", "platform", "hosts", "cores", "cvedb_version", "domains", "running_pods"]
output.list(columns, usage)
@show_system.command()
@click.pass_obj
def summary(data):
"""Show system summary."""
summary = data.client.show("system", "summary", "summary")
column_map = (("domains", "Domains"),
("hosts", "Hosts"),
("controllers", "Controllers"),
("enforcers", "Enforcers"),
("disconnected_enforcers", "Disconnected Enforcers"),
("workloads", "Workloads"),
("running_workloads", "Running workloads"),
("running_pods", "Running pods"),
("services", "Services"),
("policy_rules", "Policy rules"),
("platform", "Platform"),
("cvedb_version", "CVE-DB version"),
("cvedb_create_time", "CVE-DB created at"))
output.show_with_map(column_map, summary)
@show_system.command()
@click.pass_obj
def stats(data):
"""Show system stats."""
stats = data.client.show("debug/system", "stats", "stats")
column_map = (("expired_tokens", "Expired tokens"),
("scan_state_keys", "Scan state keys"),
("scan_data_keys", "Scan data keys"),)
output.show_with_map(column_map, stats)
def _show_system_setting_display_format(s):
f = "syslog_categories"
if s.get(f):
s[output.key_output(f)] = ",".join(s[f])
f = "auth_order"
if s.get(f):
s[output.key_output(f)] = ",".join(s[f])
f = "configured_internal_subnets"
if s.get(f):
s[output.key_output(f)] = ",".join(s[f])
f = "controller_debug"
if f in s:
s[output.key_output(f)] = ", ".join(s[f])
f = "registry_http_proxy"
if s.get(f):
if s[f].get("username"):
s[output.key_output(f)] = s[f]["url"].replace("//", "//%s:%s@" % (s[f]["username"], s[f]["username"]))
else:
s[output.key_output(f)] = s[f]["url"]
f = "registry_https_proxy"
if s.get(f):
if s[f].get("username"):
s[output.key_output(f)] = s[f]["url"].replace("//", "//%s:%s@" % (s[f]["username"], s[f]["username"]))
else:
s[output.key_output(f)] = s[f]["url"]
@show_system.command()
@click.option("--scope", default="all", type=click.Choice(['fed', 'local', 'all']), show_default=True,
help="Show federal, local or all system configuration")
@click.pass_obj
def setting(data, scope):
"""Show system configuration."""
if scope == "fed":
showFedSystemConfig(data, scope)
else:
showLocalSystemConfig(data, scope)
def showFedSystemConfig(data, scope):
args = {}
if scope == 'fed':
args["scope"] = scope
conf = data.client.show("system", "fed_config", "config", **args)
column_map = ()
_show_system_setting_display_format(conf)
output.show_with_map(column_map, conf)
if "webhooks" in conf:
for wh in conf["webhooks"]:
wh["scope"] = client.CfgTypeDisplay[wh["cfg_type"]]
columns = ("name", "url", "type", "enable", "scope")
output.list(columns, conf["webhooks"])
def showLocalSystemConfig(data, scope):
args = {}
if scope == 'local':
args["scope"] = scope
conf = data.client.show("system", "config", "config", **args)
column_map = ()
if "policy_mode" in conf:
column_map += (("policy_mode", "Policy Mode"),)
if "new_service_policy_mode" in conf:
column_map += (("new_service_policy_mode", "New Service Policy Mode"),)
if "new_service_profile_baseline" in conf:
column_map += (("new_service_profile_baseline", "New Service Profile Baseline"),)
if "unused_group_aging" in conf:
column_map += (("unused_group_aging", "Unused group aging time (hour)"),)
if conf["syslog_ip"]:
conf["syslog_addr"] = "%s:%d" % (conf["syslog_ip"], conf["syslog_port"])
else:
conf["syslog_addr"] = ""
if conf["syslog_ip_proto"] == 6:
conf["syslog_protocol"] = "TCP"
else:
conf["syslog_protocol"] = "UDP"
conf["ibmsa_ep"] = ""
if conf["ibmsa_ep_start"] == 1:
conf["ibmsa_ep_start"] = True
else:
conf["ibmsa_ep_start"] = False
column_map += (("ibmsa_ep", "Integrate with IBM Security Advisor"),
("ibmsa_ep_enabled", " Enabled"),
("ibmsa_ep_start", " Setup done"),
("ibmsa_ep_dashboard_url", " NeuVector Dashboard URL"),
("ibmsa_ep_connected_at", " Connection creation time"),)
column_map += (("syslog_addr", "Syslog Address"),
("syslog_protocol", " Protocol"),
("syslog_level", " Level"),
("syslog_in_json", " In-JSON"),
("syslog_status", " Status"),
("syslog_categories", " categories"),)
if "single_cve_per_syslog" in conf:
column_map += (("single_cve_per_syslog", "Single CVE per syslog"),)
if "auth_order" in conf:
column_map += (("auth_order", "Authentication order"),)
if "auth_by_platform" in conf:
column_map += (("auth_by_platform", "Authentication by platform(Rancher or OpenShift)"),)
if "rancher_ep" in conf:
column_map += (("rancher_ep", "Rancher endpoint url"),)
if "configured_internal_subnets" in conf:
column_map += (("configured_internal_subnets", "Configured internal subnets"),)
if "cluster_name" in conf:
column_map += (("cluster_name", "Cluster Name"),)
if "controller_debug" in conf:
column_map += (("controller_debug", "Controller Debug"),)
if "monitor_service_mesh" in conf:
column_map += (("monitor_service_mesh", "Monitor Service Mesh Status"),)
if "registry_http_proxy_status" in conf:
column_map += (("registry_http_proxy_status", "HTTP Proxy status"),)
if "registry_https_proxy_status" in conf:
column_map += (("registry_https_proxy_status", "HTTPS Proxy status"),)
if "registry_http_proxy" in conf:
column_map += (("registry_http_proxy", "HTTP Proxy"),)
if "registry_https_proxy" in conf:
column_map += (("registry_https_proxy", "HTTPS Proxy"),)
if "xff_enabled" in conf:
column_map += (("xff_enabled", "Enable xff based policy match"),)
if "net_service_status" in conf:
column_map += (("net_service_status", "Enable Network Service Policy Mode"),)
if "net_service_policy_mode" in conf:
column_map += (("net_service_policy_mode", "Network Service Policy Mode"),)
if "mode_auto_d2m" in conf:
column_map += (("mode_auto_d2m", "Auto Mode Upgrader: Discover -> Monitor"),
("mode_auto_d2m_duration", " Duration"),)
if "mode_auto_m2p" in conf:
column_map += (("mode_auto_m2p", "Auto Mode Upgrader: Monitor -> Protect"),
("mode_auto_m2p_duration", " Duration"),)
_show_system_setting_display_format(conf)
output.show_with_map(column_map, conf)
if "webhooks" in conf:
for wh in conf["webhooks"]:
wh["scope"] = client.CfgTypeDisplay[wh["cfg_type"]]
click.echo("")
click.echo("Webhooks:")
columns = ("name", "url", "type", "enable", "scope")
output.list(columns, conf["webhooks"])
@show_system.group("partner")
@click.pass_obj
def show_partner(data):
"""Show partner data."""
@show_partner.group("ibmsa")
@click.pass_obj
def show_ibmsa(data):
"""Show IBM Security Advisor setup data."""
@show_ibmsa.command("config")
@click.pass_obj
def show_ibmsa_config(data):
"""Show IBM Security Advisor setup configuration."""
conf = data.client.show("partner/ibm_sa_config", None, None)
column_map = ()
if "account_id" in conf:
column_map += (("account_id", "IBM Account ID"),)
if "apikey" in conf:
column_map += (("apikey", "API Key(masked)"),)
if "findings_url" in conf:
column_map += (("findings_url", "findings URL"),)
if "provider_id" in conf:
column_map += (("provider_id", "Provider/Service ID"),)
if "token_url" in conf:
column_map += (("token_url", "Token URL"),)
output.show_with_map(column_map, conf)
@show_ibmsa.command("setup_uri")
@click.pass_obj
def get_ibmsa_setup_url(data):
"""Get IBM Security Advisor setup URI."""
conf = data.client.show("partner/ibm_sa_ep", None, None)
if "url" in conf:
click.echo("")
click.echo("URI: {}".format(conf["url"]))
click.echo("")
def _show_system_internal_subnets_display_format(s):
f = "configured_internal_subnets"
if s.get(f):
s[output.key_output(f)] = ",".join(s[f])
f = "learned_internal_subnets"
if s.get(f):
s[output.key_output(f)] = ",".join(s[f])
f = "effective_internal_subnets"
if s.get(f):
s[output.key_output(f)] = ",".join(s[f])
@show_system.command()
@click.pass_obj
def internal_subnets(data):
"""Show internal subnets."""
subnets = data.client.show("debug", "internal_subnets", "internal_subnets")
column_map = (("configured_internal_subnets", "Configured"),
("learned_internal_subnets", "Learned"),
("effective_internal_subnets", "Effective"),)
_show_system_internal_subnets_display_format(subnets)
output.show_with_map(column_map, subnets)
@show_system.command()
@click.option("--page", default=20, type=click.IntRange(1), help="list page size, default=20")
@click.pass_obj
def ip_2_container(data, page):
"""Show ip-continer map."""
filter = {"start": 0, "limit": page}
while True:
wls = data.client.list("debug/ip2workload", "ip_2_workload", **filter)
for wl in wls:
utils.list_format_ip2workload(wl)
columns = ["ip", "id", "name"]
output.list(columns, wls)
if filter["limit"] > 0 and len(wls) < filter["limit"]:
break
click.echo("Press <esc> to exit, press other key to continue ...")
c = utils.keypress()
if ord(c) == 27:
break
filter["start"] += page
def _display_license(lic):
column_map = (("name", "Name"),
("email", "Email"),
("phone", "Phone"),
# ("id", "ID"),
# ("id_type", "ID type"),
("expire", "Expire"),
("node_limit", "Supported Number of Enforcers"),
("cpu_limit", "Supported Number of CPUs"),
("multi_cluster_limit", "Supported Number of Manageable non-Primary Clusters"),
("scan", "Allow Container Scan"),
("enforce", "Allow Enforce Mode"),
("serverless", "Allow Serverless scan"))
if lic["serverless"] == True:
lic["serverless"] = "Y"
else:
lic["serverless"] = "N"
if lic["scan"] == True:
lic["scan"] = "Y"
else:
lic["scan"] = "N"
if lic["enforce"] == True:
lic["enforce"] = "Y"
else:
lic["enforce"] = "N"
output.show_with_map(column_map, lic)
@show_system.command()
@click.pass_obj
def license(data):
"""Show system license."""
lic = data.client.show("system", "license", "license")
if lic != None:
_display_license(lic["info"])
# request
@request.group('system')
@click.pass_obj
def request_system(data):
"""System"""
@request_system.command('policy_mode')
@click.argument('mode', type=click.Choice(['discover', 'monitor', 'protect']))
@click.pass_obj
def request_system_policy_mode(data, mode):
"""Set policy mode for all existing services"""
data.client.request("system", "request", None, {"request": {"policy_mode": mode.title()}})
@request_system.command('unquarantine')
@click.option('--group', '-g')
@click.option('--rule', '-r', type=int)
@click.pass_obj
def request_system_unquarantine(data, group, rule):
"""Unquarantine containers"""
if group == None and rule == None:
click.echo("Error: must have at least one param")
return
unquar = {}
if group != None and group != "all":
unquar["group"] = group
if rule != None:
unquar["response_rule"] = rule
data.client.request("system", "request", None, {"request": {"unquarantine": unquar}})
# create
@create.group('system')
@click.pass_obj
def create_system(data):
"""Create system configuration."""
@create_system.command("webhook")
@click.argument('name')
@click.argument('url')
@click.option("--type", default="", help="webhook type", type=click.Choice(['Slack', 'JSON']))
@click.option("--scope", default="local", type=click.Choice(['fed', 'local']), show_default=True,
help="It's for local or federal response rule")
@click.option("--enable/--disable", default=True, is_flag=True, help="Enable/Disable the webhook")
@click.pass_obj
def create_system_webhook_url(data, name, url, type, scope, enable):
"""Create webhook settings"""
if type == "slack":
type = "Slack"
else:
type = ""
body = {"name": name, "url": url, "enable": enable, "type": type}
if scope == "fed":
body["cfg_type"] = "federal"
else:
body["cfg_type"] = "user_created"
data.client.create("system/config/webhook", {"config": body})
# delete
@delete.group('system')
@click.pass_obj
def delete_system(data):
"""Delete system configuration."""
@delete_system.command("webhook")
@click.argument('name')
@click.option("--scope", default="local", type=click.Choice(['fed', 'local']), show_default=True,
help="It's for local or federal response rule")
@click.pass_obj
def delete_system_webhook_url(data, name, scope):
"""Delete webhook settings"""
args = {}
args["scope"] = scope
data.client.delete("system/config/webhook", name, **args)
# set
@cli_set.group('system')
@click.pass_obj
def set_system(data):
"""Set system configuration."""
@set_system.group('new_service')
@click.pass_obj
def set_system_new_service(data):
"""Set system new service configruation"""
@set_system_new_service.command("policy_mode")
@click.argument('mode', type=click.Choice(['discover', 'monitor', 'protect']))
@click.pass_obj
def set_system_new_service_policy_mode(data, mode):
"""Set system new service policy mode."""
data.client.config_system(new_service_policy_mode=mode.title())
@set_system_new_service.command("profile_baseline")
@click.argument('baseline', type=click.Choice(['basic', 'zero-drift']))
@click.pass_obj
def set_system_new_service_profile_baseline(data, baseline):
"""Set system new service profile baseline."""
data.client.config_system(new_service_profile_baseline=baseline.title())
@set_system.group('unused_group')
@click.pass_obj
def set_system_unused_group(data):
"""Set system unused group configruation"""
@set_system_unused_group.command("aging")
@click.option("--time", default=24, type=click.IntRange(0, 168),
help="unused group aging, default=24hr, 0 means no aging")
@click.pass_obj
def set_system_unused_group_aging(data, time):
"""Set system unused group aging time."""
data.client.config_system(unused_group_aging=time)
@set_system.group("partner")
@click.pass_obj
def set_system_partner(data):
"""Set partner integration settings"""
@set_system_partner.group("ibmsa", invoke_without_command=True)
@click.option("--disable/--enable", default=None, required=False,
help="Enable/disable IBM Security Advisor integration")
@click.option("--dashboard", default=None, required=False, help="NeuVector dashboard URL")
@click.pass_obj
def set_system_ibmsa(data, disable, dashboard):
"""Set IBM Security Advisor integration settings"""
enable = None
if disable is True:
enable = False
elif disable is False:
enable = True
if enable is not None:
if enable:
if dashboard is None:
data.client.config_system(ibmsa_ep_enabled=True)
else:
data.client.config_system(ibmsa_ep_enabled=True, ibmsa_ep_dashboard_url=dashboard)
else:
if dashboard is None:
data.client.config_system(ibmsa_ep_enabled=False)
else:
data.client.config_system(ibmsa_ep_enabled=False, ibmsa_ep_dashboard_url=dashboard)
elif dashboard is not None and dashboard != "":
data.client.config_system(ibmsa_ep_dashboard_url=dashboard)
@set_system.group("syslog")
@click.pass_obj
def set_system_syslog(data):
"""Set syslog settings"""
@set_system_syslog.command("status")
@click.argument('status', type=click.Choice(['enable', 'disable']))
@click.pass_obj
def set_system_syslog_status(data, status):
"""Enable/disable syslog"""
if status == 'enable':
data.client.config_system(syslog_status=True)
else:
data.client.config_system(syslog_status=False)
@set_system_syslog.command("in-json")
@click.argument('in_json', type=click.Choice(['enable', 'disable']))
@click.pass_obj
def set_system_syslog_in_json(data, in_json):
"""Enable/disable syslog in JSON format"""
if in_json == 'enable':
data.client.config_system(syslog_in_json=True)
else:
data.client.config_system(syslog_in_json=False)
@set_system_syslog.command("category")
@click.option('--category', '-c', multiple=True,
type=click.Choice(['all', 'event', | |
"""The tests for the Jewish calendar sensor platform."""
from collections import namedtuple
from datetime import time
from datetime import datetime as dt
from unittest.mock import patch
import pytest
from homeassistant.util.async_ import run_coroutine_threadsafe
from homeassistant.util.dt import get_time_zone, set_default_time_zone
from homeassistant.setup import setup_component
from homeassistant.components.jewish_calendar.sensor import (
JewishCalSensor, CANDLE_LIGHT_DEFAULT)
from tests.common import get_test_home_assistant
_LatLng = namedtuple('_LatLng', ['lat', 'lng'])
NYC_LATLNG = _LatLng(40.7128, -74.0060)
JERUSALEM_LATLNG = _LatLng(31.778, 35.235)
def make_nyc_test_params(dtime, results, havdalah_offset=0):
"""Make test params for NYC."""
return (dtime, CANDLE_LIGHT_DEFAULT, havdalah_offset, True,
'America/New_York', NYC_LATLNG.lat, NYC_LATLNG.lng, results)
def make_jerusalem_test_params(dtime, results, havdalah_offset=0):
"""Make test params for Jerusalem."""
return (dtime, CANDLE_LIGHT_DEFAULT, havdalah_offset, False,
'Asia/Jerusalem', JERUSALEM_LATLNG.lat, JERUSALEM_LATLNG.lng,
results)
class TestJewishCalenderSensor():
"""Test the Jewish Calendar sensor."""
# pylint: disable=attribute-defined-outside-init
def setup_method(self, method):
"""Set up things to run when tests begin."""
self.hass = get_test_home_assistant()
def teardown_method(self, method):
"""Stop everything that was started."""
self.hass.stop()
# Reset the default timezone, so we don't affect other tests
set_default_time_zone(get_time_zone('UTC'))
def test_jewish_calendar_min_config(self):
"""Test minimum jewish calendar configuration."""
config = {
'sensor': {
'platform': 'jewish_calendar'
}
}
assert setup_component(self.hass, 'sensor', config)
def test_jewish_calendar_hebrew(self):
"""Test jewish calendar sensor with language set to hebrew."""
config = {
'sensor': {
'platform': 'jewish_calendar',
'language': 'hebrew',
}
}
assert setup_component(self.hass, 'sensor', config)
def test_jewish_calendar_multiple_sensors(self):
"""Test jewish calendar sensor with multiple sensors setup."""
config = {
'sensor': {
'platform': 'jewish_calendar',
'sensors': [
'date', 'weekly_portion', 'holiday_name',
'holyness', 'first_light', 'gra_end_shma',
'mga_end_shma', 'plag_mincha', 'first_stars'
]
}
}
assert setup_component(self.hass, 'sensor', config)
test_params = [
(dt(2018, 9, 3), 'UTC', 31.778, 35.235, "english", "date",
False, "23 Elul 5778"),
(dt(2018, 9, 3), 'UTC', 31.778, 35.235, "hebrew", "date",
False, "כ\"ג אלול ה\' תשע\"ח"),
(dt(2018, 9, 10), 'UTC', 31.778, 35.235, "hebrew", "holiday_name",
False, "א\' ראש השנה"),
(dt(2018, 9, 10), 'UTC', 31.778, 35.235, "english", "holiday_name",
False, "Rosh Hashana I"),
(dt(2018, 9, 10), 'UTC', 31.778, 35.235, "english", "holyness",
False, 1),
(dt(2018, 9, 8), 'UTC', 31.778, 35.235, "hebrew", "weekly_portion",
False, "נצבים"),
(dt(2018, 9, 8), 'America/New_York', 40.7128, -74.0060, "hebrew",
"first_stars", True, time(19, 48)),
(dt(2018, 9, 8), "Asia/Jerusalem", 31.778, 35.235, "hebrew",
"first_stars", False, time(19, 21)),
(dt(2018, 10, 14), "Asia/Jerusalem", 31.778, 35.235, "hebrew",
"weekly_portion", False, "לך לך"),
(dt(2018, 10, 14, 17, 0, 0), "Asia/Jerusalem", 31.778, 35.235,
"hebrew", "date", False, "ה\' מרחשוון ה\' תשע\"ט"),
(dt(2018, 10, 14, 19, 0, 0), "Asia/Jerusalem", 31.778, 35.235,
"hebrew", "date", False, "ו\' מרחשוון ה\' תשע\"ט")
]
test_ids = [
"date_output",
"date_output_hebrew",
"holiday_name",
"holiday_name_english",
"holyness",
"torah_reading",
"first_stars_ny",
"first_stars_jerusalem",
"torah_reading_weekday",
"date_before_sunset",
"date_after_sunset"
]
@pytest.mark.parametrize(["cur_time", "tzname", "latitude", "longitude",
"language", "sensor", "diaspora", "result"],
test_params, ids=test_ids)
def test_jewish_calendar_sensor(self, cur_time, tzname, latitude,
longitude, language, sensor, diaspora,
result):
"""Test Jewish calendar sensor output."""
time_zone = get_time_zone(tzname)
set_default_time_zone(time_zone)
test_time = time_zone.localize(cur_time)
self.hass.config.latitude = latitude
self.hass.config.longitude = longitude
sensor = JewishCalSensor(
name='test', language=language, sensor_type=sensor,
latitude=latitude, longitude=longitude,
timezone=time_zone, diaspora=diaspora)
sensor.hass = self.hass
with patch('homeassistant.util.dt.now', return_value=test_time):
run_coroutine_threadsafe(
sensor.async_update(),
self.hass.loop).result()
assert sensor.state == result
shabbat_params = [
make_nyc_test_params(
dt(2018, 9, 1, 16, 0),
{'upcoming_shabbat_candle_lighting': dt(2018, 8, 31, 19, 15),
'upcoming_shabbat_havdalah': dt(2018, 9, 1, 20, 14),
'weekly_portion': 'Ki Tavo',
'hebrew_weekly_portion': 'כי תבוא'}),
make_nyc_test_params(
dt(2018, 9, 1, 16, 0),
{'upcoming_shabbat_candle_lighting': dt(2018, 8, 31, 19, 15),
'upcoming_shabbat_havdalah': dt(2018, 9, 1, 20, 22),
'weekly_portion': 'Ki Tavo',
'hebrew_weekly_portion': 'כי תבוא'},
havdalah_offset=50),
make_nyc_test_params(
dt(2018, 9, 1, 20, 0),
{'upcoming_shabbat_candle_lighting': dt(2018, 8, 31, 19, 15),
'upcoming_shabbat_havdalah': dt(2018, 9, 1, 20, 14),
'upcoming_candle_lighting': dt(2018, 8, 31, 19, 15),
'upcoming_havdalah': dt(2018, 9, 1, 20, 14),
'weekly_portion': 'Ki Tavo',
'hebrew_weekly_portion': 'כי תבוא'}),
make_nyc_test_params(
dt(2018, 9, 1, 20, 21),
{'upcoming_shabbat_candle_lighting': dt(2018, 9, 7, 19, 4),
'upcoming_shabbat_havdalah': dt(2018, 9, 8, 20, 2),
'weekly_portion': 'Nitzavim',
'hebrew_weekly_portion': 'נצבים'}),
make_nyc_test_params(
dt(2018, 9, 7, 13, 1),
{'upcoming_shabbat_candle_lighting': dt(2018, 9, 7, 19, 4),
'upcoming_shabbat_havdalah': dt(2018, 9, 8, 20, 2),
'weekly_portion': 'Nitzavim',
'hebrew_weekly_portion': 'נצבים'}),
make_nyc_test_params(
dt(2018, 9, 8, 21, 25),
{'upcoming_candle_lighting': dt(2018, 9, 9, 19, 1),
'upcoming_havdalah': dt(2018, 9, 11, 19, 57),
'upcoming_shabbat_candle_lighting': dt(2018, 9, 14, 18, 52),
'upcoming_shabbat_havdalah': dt(2018, 9, 15, 19, 50),
'weekly_portion': 'Vayeilech',
'hebrew_weekly_portion': 'וילך',
'holiday_name': '<NAME>',
'hebrew_holiday_name': 'ערב ראש השנה'}),
make_nyc_test_params(
dt(2018, 9, 9, 21, 25),
{'upcoming_candle_lighting': dt(2018, 9, 9, 19, 1),
'upcoming_havdalah': dt(2018, 9, 11, 19, 57),
'upcoming_shabbat_candle_lighting': dt(2018, 9, 14, 18, 52),
'upcoming_shabbat_havdalah': dt(2018, 9, 15, 19, 50),
'weekly_portion': 'Vayeilech',
'hebrew_weekly_portion': 'וילך',
'holiday_name': '<NAME>',
'hebrew_holiday_name': "א' ראש השנה"}),
make_nyc_test_params(
dt(2018, 9, 10, 21, 25),
{'upcoming_candle_lighting': dt(2018, 9, 9, 19, 1),
'upcoming_havdalah': dt(2018, 9, 11, 19, 57),
'upcoming_shabbat_candle_lighting': dt(2018, 9, 14, 18, 52),
'upcoming_shabbat_havdalah': dt(2018, 9, 15, 19, 50),
'weekly_portion': 'Vayeilech',
'hebrew_weekly_portion': 'וילך',
'holiday_name': '<NAME>',
'hebrew_holiday_name': "ב' ראש השנה"}),
make_nyc_test_params(
dt(2018, 9, 28, 21, 25),
{'upcoming_shabbat_candle_lighting': dt(2018, 9, 28, 18, 28),
'upcoming_shabbat_havdalah': dt(2018, 9, 29, 19, 25),
'weekly_portion': 'none',
'hebrew_weekly_portion': 'none'}),
make_nyc_test_params(
dt(2018, 9, 29, 21, 25),
{'upcoming_candle_lighting': dt(2018, 9, 30, 18, 25),
'upcoming_havdalah': dt(2018, 10, 2, 19, 20),
'upcoming_shabbat_candle_lighting': dt(2018, 10, 5, 18, 17),
'upcoming_shabbat_havdalah': dt(2018, 10, 6, 19, 13),
'weekly_portion': 'Bereshit',
'hebrew_weekly_portion': 'בראשית',
'holiday_name': '<NAME>',
'hebrew_holiday_name': 'הושענא רבה'}),
make_nyc_test_params(
dt(2018, 9, 30, 21, 25),
{'upcoming_candle_lighting': dt(2018, 9, 30, 18, 25),
'upcoming_havdalah': dt(2018, 10, 2, 19, 20),
'upcoming_shabbat_candle_lighting': dt(2018, 10, 5, 18, 17),
'upcoming_shabbat_havdalah': dt(2018, 10, 6, 19, 13),
'weekly_portion': 'Bereshit',
'hebrew_weekly_portion': 'בראשית',
'holiday_name': '<NAME>',
'hebrew_holiday_name': 'שמיני עצרת'}),
make_nyc_test_params(
dt(2018, 10, 1, 21, 25),
{'upcoming_candle_lighting': dt(2018, 9, 30, 18, 25),
'upcoming_havdalah': dt(2018, 10, 2, 19, 20),
'upcoming_shabbat_candle_lighting': dt(2018, 10, 5, 18, 17),
'upcoming_shabbat_havdalah': dt(2018, 10, 6, 19, 13),
'weekly_portion': 'Bereshit',
'hebrew_weekly_portion': 'בראשית',
'holiday_name': '<NAME>',
'hebrew_holiday_name': 'שמחת תורה'}),
make_jerusalem_test_params(
dt(2018, 9, 29, 21, 25),
{'upcoming_candle_lighting': dt(2018, 9, 30, 18, 10),
'upcoming_havdalah': dt(2018, 10, 1, 19, 2),
'upcoming_shabbat_candle_lighting': dt(2018, 10, 5, 18, 3),
'upcoming_shabbat_havdalah': dt(2018, 10, 6, 18, 56),
'weekly_portion': 'Bereshit',
'hebrew_weekly_portion': 'בראשית',
'holiday_name': '<NAME>',
'hebrew_holiday_name': 'הושענא רבה'}),
make_jerusalem_test_params(
dt(2018, 9, 30, 21, 25),
{'upcoming_candle_lighting': dt(2018, 9, 30, 18, 10),
'upcoming_havdalah': dt(2018, 10, 1, 19, 2),
'upcoming_shabbat_candle_lighting': dt(2018, 10, 5, 18, 3),
'upcoming_shabbat_havdalah': dt(2018, 10, 6, 18, 56),
'weekly_portion': 'Bereshit',
'hebrew_weekly_portion': 'בראשית',
'holiday_name': '<NAME>',
'hebrew_holiday_name': 'שמיני עצרת'}),
make_jerusalem_test_params(
dt(2018, 10, 1, 21, 25),
{'upcoming_shabbat_candle_lighting': dt(2018, 10, 5, 18, 3),
'upcoming_shabbat_havdalah': dt(2018, 10, 6, 18, 56),
'weekly_portion': 'Bereshit',
'hebrew_weekly_portion': 'בראשית'}),
make_nyc_test_params(
dt(2016, 6, 11, 8, 25),
{'upcoming_candle_lighting': dt(2016, 6, 10, 20, 7),
'upcoming_havdalah': dt(2016, 6, 13, 21, 17),
'upcoming_shabbat_candle_lighting': dt(2016, 6, 10, 20, 7),
'upcoming_shabbat_havdalah': None,
'weekly_portion': 'Bamidbar',
'hebrew_weekly_portion': 'במדבר',
'holiday_name': '<NAME>',
'hebrew_holiday_name': 'ערב שבועות'}),
make_nyc_test_params(
dt(2016, 6, 12, 8, 25),
{'upcoming_candle_lighting': dt(2016, 6, 10, 20, 7),
'upcoming_havdalah': dt(2016, 6, 13, 21, 17),
'upcoming_shabbat_candle_lighting': dt(2016, 6, 17, 20, 10),
'upcoming_shabbat_havdalah': dt(2016, 6, 18, 21, 19),
'weekly_portion': 'Nasso',
'hebrew_weekly_portion': 'נשא',
'holiday_name': 'Shavuot',
'hebrew_holiday_name': 'שבועות'}),
make_jerusalem_test_params(
dt(2017, 9, 21, 8, 25),
{'upcoming_candle_lighting': dt(2017, 9, 20, 18, 23),
'upcoming_havdalah': dt(2017, 9, 23, 19, 13),
'upcoming_shabbat_candle_lighting': dt(2017, 9, 22, 19, 14),
'upcoming_shabbat_havdalah': dt(2017, 9, 23, 19, 13),
'weekly_portion': "Ha'Azinu",
'hebrew_weekly_portion': 'האזינו',
'holiday_name': '<NAME> I',
'hebrew_holiday_name': "א' ראש השנה"}),
make_jerusalem_test_params(
dt(2017, 9, 22, 8, 25),
{'upcoming_candle_lighting': dt(2017, 9, 20, 18, 23),
'upcoming_havdalah': dt(2017, 9, 23, 19, 13),
'upcoming_shabbat_candle_lighting': dt(2017, 9, 22, 19, 14),
'upcoming_shabbat_havdalah': dt(2017, 9, 23, 19, 13),
'weekly_portion': "Ha'Azinu",
'hebrew_weekly_portion': 'האזינו',
'holiday_name': '<NAME> II',
'hebrew_holiday_name': "ב' ראש השנה"}),
make_jerusalem_test_params(
dt(2017, 9, 23, 8, 25),
{'upcoming_candle_lighting': dt(2017, 9, 20, 18, 23),
'upcoming_havdalah': dt(2017, 9, 23, 19, 13),
'upcoming_shabbat_candle_lighting': dt(2017, 9, 22, 19, 14),
'upcoming_shabbat_havdalah': dt(2017, 9, 23, 19, 13),
'weekly_portion': "Ha'Azinu",
'hebrew_weekly_portion': 'האזינו',
'holiday_name': '',
'hebrew_holiday_name': ''}),
]
shabbat_test_ids = [
"currently_first_shabbat",
"currently_first_shabbat_with_havdalah_offset",
"currently_first_shabbat_bein_hashmashot_lagging_date",
"after_first_shabbat",
"friday_upcoming_shabbat",
"upcoming_rosh_hashana",
"currently_rosh_hashana",
"second_day_rosh_hashana",
"currently_shabbat_chol_hamoed",
"upcoming_two_day_yomtov_in_diaspora",
"currently_first_day_of_two_day_yomtov_in_diaspora",
"currently_second_day_of_two_day_yomtov_in_diaspora",
"upcoming_one_day_yom_tov_in_israel",
"currently_one_day_yom_tov_in_israel",
"after_one_day_yom_tov_in_israel",
# Type 1 = Sat/Sun/Mon
"currently_first_day_of_three_day_type1_yomtov_in_diaspora",
"currently_second_day_of_three_day_type1_yomtov_in_diaspora",
# Type 2 = Thurs/Fri/Sat
"currently_first_day_of_three_day_type2_yomtov_in_israel",
"currently_second_day_of_three_day_type2_yomtov_in_israel",
"currently_third_day_of_three_day_type2_yomtov_in_israel",
]
@pytest.mark.parametrize(["now", "candle_lighting", "havdalah", "diaspora",
"tzname", "latitude", "longitude", "result"],
shabbat_params, ids=shabbat_test_ids)
def test_shabbat_times_sensor(self, now, candle_lighting, havdalah,
diaspora, tzname, latitude, longitude,
result):
"""Test sensor output for upcoming shabbat/yomtov times."""
time_zone = get_time_zone(tzname)
set_default_time_zone(time_zone)
test_time = time_zone.localize(now)
for sensor_type, value in result.items():
if isinstance(value, dt):
result[sensor_type] = time_zone.localize(value)
self.hass.config.latitude = latitude
self.hass.config.longitude = longitude
if ('upcoming_shabbat_candle_lighting' in result
and 'upcoming_candle_lighting' not in result):
result['upcoming_candle_lighting'] = \
result['upcoming_shabbat_candle_lighting']
if ('upcoming_shabbat_havdalah' in result
and 'upcoming_havdalah' not in result):
result['upcoming_havdalah'] = result['upcoming_shabbat_havdalah']
for sensor_type, result_value in result.items():
language = 'english'
if sensor_type.startswith('hebrew_'):
language = 'hebrew'
sensor_type = sensor_type.replace('hebrew_', '')
sensor = JewishCalSensor(
name='test', language=language, sensor_type=sensor_type,
latitude=latitude, longitude=longitude,
timezone=time_zone, diaspora=diaspora,
havdalah_offset=havdalah,
candle_lighting_offset=candle_lighting)
sensor.hass = self.hass
with patch('homeassistant.util.dt.now', return_value=test_time):
run_coroutine_threadsafe(
sensor.async_update(),
self.hass.loop).result()
assert sensor.state == result_value, "Value for {}".format(
sensor_type)
melacha_params = [
make_nyc_test_params(dt(2018, 9, 1, 16, 0), True),
make_nyc_test_params(dt(2018, 9, 1, 20, 21), False),
| |
to ensure that at least one replication
# snapshot with repl_status success exists at all times.
snapshot.repl_status = 'success'
# For completeness also set repl_status to success on destination.
# The snapshot list must be refreshed as the dst_dataset snapshot cache
# does not know that a new snapshot has arrived
dst_snapshot = dst_dataset.get_snapshot(snapshot.snapshot_name, refresh=True)
dst_snapshot.repl_status = snapshot.repl_status
self._enforce_read_only(dst_dataset, read_only)
@staticmethod
def _cleanup_sync_files(metadata, src_dir):
with contextlib.suppress(FileNotFoundError):
for segment in metadata.segments:
segment_path = os.path.join(src_dir, segment)
os.remove(segment_path)
os.remove(os.path.join(src_dir, metadata.path))
@staticmethod
def _get_segment_name(line, segments_log_re):
match = re.match(segments_log_re, line)
if match:
segment = match.group(1)
return os.path.basename(segment)
@staticmethod
def _get_segments(src_dir, metadata_segments):
# Refresh file list for each run as each sync can potentially take
# a long time
src_file_names = {f.name for f in scandir(src_dir)}
for segment in sorted(metadata_segments):
if segment not in src_file_names:
raise SegmentMissingException('Segment %s is missing in %s' % (segment, src_dir))
yield os.path.join(src_dir, segment)
@staticmethod
def _write_metadata_file(name, segments, snapshot, base_snapshot=None):
metadata = MetadataFile(name)
metadata.segments = segments
metadata.label = snapshot.label
metadata.snapshot = snapshot.snapshot_name
metadata.version = snapshot.version
metadata.timestamp = snapshot.timestamp
if base_snapshot:
metadata.depends_on = base_snapshot.snapshot_name
metadata.write()
def receive_from_file(self, dst_dataset, label, src_dir, metadata, read_only=False):
LOGGER.info('Selecting %s', metadata.path)
# Make sure the cache is refreshed as the snapshot count might have
# changed if multiple metadata files are processed in one run
previous_snapshot = dst_dataset.get_latest_repl_snapshot(label, refresh=True)
if previous_snapshot and previous_snapshot.datetime >= metadata.datetime:
LOGGER.warning('Ignoring %s as it is already applied or '
'older than the current snapshot', metadata.path)
self._cleanup_sync_files(metadata, src_dir)
return
if metadata.depends_on and not dst_dataset.get_snapshot(metadata.depends_on):
raise ReplicationException(
'The dependant snapshot %s does not exist on destination dataset %s' %
(metadata.depends_on, dst_dataset.name))
segments = self._get_segments(src_dir, metadata.segments)
cat_cmd = dst_dataset.get_cat_cmd(segments)
receive_cmd = dst_dataset.get_receive_cmd()
self._run_replication_cmd(cat_cmd, receive_cmd)
# See comment in replicate()
# Workaround for ZoL bug in initial replication fixed in 0.7.0?
dst_snapshot = Snapshot(dst_dataset.host, '%s@%s' % (dst_dataset.name, metadata.snapshot))
dst_snapshot.label = metadata.label
dst_snapshot.version = metadata.version
#dst_snapshot = self.get_snapshot(metadata.snapshot)
dst_snapshot.repl_status = 'success'
self._enforce_read_only(dst_dataset, read_only)
# Cleanup files after marking the sync as success as we don't
# really care if this goes well for the sake of sync integrity
self._cleanup_sync_files(metadata, src_dir)
def send_to_file(self, src_dataset, label, dst_dir, file_prefix='zfssnap', suffix_length=None,
split_size=None, base_snapshot=None):
_base_snapshot = src_dataset.get_base_snapshot(label, base_snapshot)
snapshot = src_dataset.snapshot(label, recursive=True)
prefix = os.path.join(dst_dir, '%s_%s-' % (file_prefix, snapshot.timestamp))
segments_log_pattern = r'^creating\sfile\s.*(%s[a-z]{%s}).*$' % (prefix, suffix_length)
segments_log_re = re.compile(segments_log_pattern)
send_cmd = src_dataset.get_send_cmd(snapshot, _base_snapshot)
split_cmd = src_dataset.get_split_cmd(prefix, split_size, suffix_length)
output = self._run_replication_cmd(send_cmd, split_cmd)
segments = []
for line in output:
segment = self._get_segment_name(line, segments_log_re)
if segment:
segments.append(segment)
LOGGER.info('Total segment count: %s', len(segments))
# Ensure metadata file are written before repl_status are set to
# 'success', so we are sure this end does not believe things are
# ok and uses this snapshot as the base for the next sync while the
# metadata file for the opposite end might not have been written
metadata_file = os.path.join(dst_dir,
'%s_%s.json' % (file_prefix, snapshot.timestamp))
self._write_metadata_file(metadata_file, segments, snapshot, _base_snapshot)
# See comment in replicate()
snapshot.repl_status = 'success'
def _run_snapshot_policy(self, policy, reset=False):
if not reset:
sleep = 1
LOGGER.debug('Sleeping %ss to avoid potential snapshot name '
'collisions due to matching timestamps', sleep)
time.sleep(sleep)
policy_config = self.config.get_policy(policy)
label = policy_config['label']
host = Host(cmds=policy_config['cmds'])
recursive = policy_config['recursive']
datasets = host.get_fsvols(
policy_config.get('include', None),
policy_config.get('exclude', None),
recursive)
keep = policy_config['keep']
self._aquire_lock()
if reset:
LOGGER.warning('Reset is enabled. Removing all snapshots for this policy')
for dataset in datasets:
if not reset:
dataset.snapshot(label, recursive)
dataset.enforce_retention(keep, label, recursive, reset)
self._release_lock()
def _run_replicate_policy(self, policy, reset=False, base_snapshot=None):
if not reset:
sleep = 1
LOGGER.debug('Sleeping %ss to avoid potential snapshot name '
'collisions due to matching timestamps', sleep)
time.sleep(sleep)
policy_config = self.config.get_policy(policy)
src_host = Host(policy_config['source']['cmds'])
src_dataset = src_host.get_fsvol(policy_config['source']['dataset'])
ssh_params = dict()
ssh_params['ssh'] = policy_config['source']['cmds']['ssh']
ssh_params['user'] = policy_config['destination']['ssh_user']
ssh_params['host'] = policy_config['destination']['host']
dst_host = Host(policy_config['destination']['cmds'], ssh_params)
dst_dataset = dst_host.get_fsvol(policy_config['destination']['dataset'])
label = policy_config['label']
self._aquire_lock()
if reset:
LOGGER.warning('Reset is enabled. Reinitializing replication.')
if dst_dataset:
dst_dataset.destroy(recursive=True)
else:
# If this is the first replication run the destination file system
# might not exist
if not dst_dataset:
dst_dataset = FsVol(dst_host, policy_config['destination']['dataset'])
read_only = policy_config['destination']['read_only']
self.replicate(src_dataset, dst_dataset, label, base_snapshot, read_only)
keep = policy_config['keep']
src_dataset.enforce_retention(keep, label, recursive=True, reset=reset,
replication=True)
self._release_lock()
def _run_receive_from_file_policy(self, policy, reset=False):
policy_config = self.config.get_policy(policy)
if not reset:
src_dir = policy_config['source']['dir']
label = policy_config['label']
file_prefix = policy_config.get('file_prefix', None)
metadata_files = [
f for f in self._get_metadata_files(src_dir, label, file_prefix)]
# Return early if no metadata files are found to avoid triggering
# unnecessary cache refreshes against the host
if not metadata_files:
LOGGER.debug('No metadata files found in %s', src_dir)
return
dst_host = Host(policy_config['cmds'])
dst_dataset = dst_host.get_fsvol(policy_config['destination']['dataset'])
self._aquire_lock()
if reset:
LOGGER.warning('Reset is enabled. Reinitializing replication.')
if dst_dataset:
dst_dataset.destroy(recursive=True)
else:
# If this is the first replication run the destination file system
# might not exist
if not dst_dataset:
dst_dataset = FsVol(dst_host, policy_config['destination']['dataset'])
read_only = policy_config['destination']['read_only']
try:
for metadata in sorted(metadata_files, key=attrgetter('datetime')):
self.receive_from_file(dst_dataset, label, src_dir, metadata, read_only)
except SegmentMissingException as e:
LOGGER.error(e)
self._release_lock()
def _run_send_to_file_policy(self, policy, reset=False, base_snapshot=None):
if not reset:
sleep = 1
LOGGER.debug('Sleeping %ss to avoid potential snapshot name '
'collisions due to matching timestamps', sleep)
time.sleep(sleep)
policy_config = self.config.get_policy(policy)
label = policy_config['label']
src_host = Host(policy_config['cmds'])
src_dataset = src_host.get_fsvol(policy_config['source']['dataset'])
dst_dir = policy_config['destination']['dir']
file_prefix = policy_config['file_prefix']
suffix_length = policy_config['suffix_length']
split_size = policy_config['split_size']
keep = policy_config['keep']
self._aquire_lock()
if reset:
LOGGER.warning('Reset is enabled. Reinitializing replication.')
LOGGER.warning('Cleaning up source replication snapshots')
else:
self.send_to_file(src_dataset, label, dst_dir, file_prefix, suffix_length,
split_size, base_snapshot)
src_dataset.enforce_retention(keep, label, recursive=True, reset=reset,
replication=True)
self._release_lock()
@staticmethod
def _print_header(text):
print('%s' % text)
dash_count = len(text)
if text.startswith('\n'):
dash_count -= 1
print('-' * dash_count)
def _print_datasets(self, datasets, header='DATASETS'):
self._print_header(header)
for dataset in sorted(datasets, key=attrgetter('name')):
print(dataset.name)
def _print_snapshots(self, datasets, label, header='SNAPSHOTS'):
self._print_header(header)
for dataset in sorted(datasets, key=attrgetter('name')):
snapshots = dataset.get_snapshots(label)
for snapshot in sorted(snapshots, key=attrgetter('name')):
print(snapshot.name)
def _print_config(self, config):
self._print_header('POLICY CONFIG')
print(yaml.dump(config, default_flow_style=False))
def _list_snapshot_policy(self, policy, list_mode):
policy_config = self.config.get_policy(policy)
label = policy_config['label']
host = Host(policy_config['cmds'])
recursive = policy_config['recursive']
datasets = [
d for d in host.get_fsvols(
policy_config.get('include', None),
policy_config.get('exclude', None),
recursive)
]
if list_mode == 'config':
self._print_config(policy_config)
if list_mode == 'fsvols':
self._print_datasets(datasets)
if list_mode == 'snapshots':
self._print_snapshots(datasets, label)
def _list_send_to_file_policy(self, policy, list_mode):
policy_config = self.config.get_policy(policy)
label = policy_config['label']
src_host = Host(policy_config['cmds'])
src_dataset = src_host.get_fsvol(policy_config['source']['dataset'])
if list_mode == 'config':
self._print_config(policy_config)
if list_mode == 'datasets':
self._print_datasets([src_dataset], 'SOURCE DATASET')
if list_mode == 'snapshots':
self._print_snapshots([src_dataset], label, 'SOURCE SNAPSHOTS')
def _list_receive_from_file_policy(self, policy, list_mode):
policy_config = self.config.get_policy(policy)
label = policy_config['label']
dst_host = Host(policy_config['cmds'])
dst_dataset = dst_host.get_fsvol(policy_config['destination']['dataset'])
if dst_dataset:
dst_datasets = [dst_dataset]
else:
dst_datasets = []
if list_mode == 'config':
self._print_config(policy_config)
if list_mode == 'datasets':
self._print_datasets(dst_datasets, 'DESTINATION DATASET')
if list_mode == 'snapshots':
self._print_snapshots(dst_datasets, label, 'DESTINATION SNAPSHOTS')
def _list_replicate_policy(self, policy, list_mode):
policy_config = self.config.get_policy(policy)
label = policy_config['label']
src_host = Host(policy_config['source']['cmds'])
src_dataset = src_host.get_fsvol(policy_config['source']['dataset'])
ssh_params = dict()
ssh_params['ssh'] = policy_config['source']['cmds']['ssh']
ssh_params['user'] = policy_config['destination']['ssh_user']
ssh_params['host'] = policy_config['destination']['host']
dst_host = Host(policy_config['destination']['cmds'], ssh_params)
dst_dataset = dst_host.get_fsvol(policy_config['destination']['dataset'])
if dst_dataset:
dst_datasets = [dst_dataset]
else:
dst_datasets = []
if list_mode == 'config':
self._print_config(policy_config)
if list_mode == 'fsvols':
self._print_datasets([src_dataset], 'SOURCE DATASET')
self._print_datasets(dst_datasets, '\nDESTINATION DATASET')
if list_mode == 'snapshots':
self._print_snapshots([src_dataset], label, 'SOURCE SNAPSHOTS')
self._print_snapshots(dst_datasets, label, '\nDESTINATION SNAPSHOTS')
def execute_policy(self, policy, list_mode=None, reset=False, base_snapshot=None):
policy_type = self.config.get_policy(policy)['type']
if policy_type == 'snapshot':
if list_mode:
self._list_snapshot_policy(policy, list_mode)
else:
self._run_snapshot_policy(policy, reset)
elif policy_type == 'replicate':
if list_mode:
self._list_replicate_policy(policy, list_mode)
else:
self._run_replicate_policy(policy, reset, base_snapshot)
elif policy_type == 'send_to_file':
if list_mode:
self._list_send_to_file_policy(policy, list_mode)
else:
self._run_send_to_file_policy(policy, reset, base_snapshot)
elif policy_type == 'receive_from_file':
if list_mode:
self._list_receive_from_file_policy(policy, list_mode)
else:
self._run_receive_from_file_policy(policy, reset)
else:
raise ZFSSnapException('%s is not a valid policy type' % policy_type)
def main():
parser = argparse.ArgumentParser(
description='Automatic snapshotting and replication for ZFS on Linux')
mutex_group = parser.add_mutually_exclusive_group(required=True)
mutex_group.add_argument('--version', action='store_true',
help='Print version and exit')
mutex_group.add_argument('--policy', help='Select policy')
mutex_group2 = parser.add_mutually_exclusive_group()
mutex_group2.add_argument(
'--reset', action='store_true',
help='Remove all policy snapshots or reinitialize replication')
mutex_group2.add_argument('--list', help='List policy information',
choices=[
'snapshots',
'fsvols',
'config',
])
mutex_group2.add_argument(
'--base-snapshot', metavar='NAME',
help='Override the base snapshot used for replication')
parser.add_argument('--quiet', action='store_true',
help='Suppress output from script')
parser.add_argument(
'--log-level',
choices=[
'CRITICAL',
'ERROR',
'WARNING',
'INFO',
'DEBUG'
],
default='INFO', help='Set log level for console output. Default: INFO')
parser.add_argument('--config', metavar='PATH',
help='Path to configuration file')
parser.add_argument('--lockfile', metavar='PATH',
help='Override path to lockfile')
args = parser.parse_args()
if args.version:
print('zfssnap v%s' % VERSION)
return 0
logger = logging.getLogger()
logger.setLevel(logging.DEBUG)
if not args.quiet:
fmt |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.