code
stringlengths 1
199k
|
|---|
import sys
test_cases = open(sys.argv[1], 'r')
test_lines = (line.rstrip() for line in test_cases)
vampir_pts = 3
zombie_pts = 4
witch_pts = 5
for test in test_lines:
data = test.split(',')
vamp_num = int(data[0].split(':')[-1])
zomb_num = int(data[1].split(':')[-1])
witch_num = int(data[2].split(':')[-1])
total_houses = int(data[3].split(':')[-1])
total_kids = vamp_num + zomb_num + witch_num
total_vamp_candies = vamp_num * total_houses * vampir_pts
total_zomb_candies = zomb_num * total_houses * zombie_pts
total_witch_candies = witch_num * total_houses * witch_pts
all_candies = total_vamp_candies + total_zomb_candies + total_witch_candies
for_each_candy = int(all_candies / total_kids)
print(for_each_candy)
|
import ttc_util
import traceback
import itertools
import os
import copy
import math
import sys
import random
import transpose
OKGREEN = '\033[92m'
FAIL = '\033[91m'
WARNING = '\033[93m'
ENDC = '\033[0m'
class transposeGenerator:
def __init__(self, perm, loopPermutations, size, alpha, beta, maxNumImplementations,
floatTypeA, floatTypeB, parallelize, streamingStores, prefetchDistances, blockings, papi,
noTest, scalar, align, architecture, mpi, lda, ldb, silent,
tmpDirectory, hotA = 0, hotB = 0, emitReference = 0):
self.hotA = hotA
self.hotB = hotB
self.silent = silent
self.tmpDirectory = tmpDirectory
self.mpi = mpi
self.lda = copy.deepcopy(lda)
self.ldb = copy.deepcopy(ldb)
self.scalar = scalar
self.noTest = noTest
self.streamingStores = streamingStores
self.architecture = architecture
self.cacheLineSize = 64 #in bytes
self.alignmentRequirement = 32 #in bytes for AVX
self.registerSizeBits = 256
if architecture == "power":
self.streamingStores = 0
self.cacheLineSize = 128 #in bytes
elif architecture == "knc":
self.streamingStores = 0
self.registerSizeBits = 512
self.cacheLineSize = 128 #in bytes
self.alignmentRequirement = 64
elif architecture == "avx512":
self.registerSizeBits = 512
self.cacheLineSize = 128 #in bytes
self.alignmentRequirement = 64
self.parallelize = parallelize
self.floatTypeA = floatTypeA
self.floatTypeB = floatTypeB
self.papi = papi
self.alpha = alpha
self.beta = beta
self.size = copy.deepcopy(size)
self.dim = len(perm)
self.perm = copy.deepcopy(perm)
self.indent = " "
self.prefetchDistances = copy.deepcopy(prefetchDistances)
if( self.perm[0] == 0 ):
self.prefetchDistances = [0] #we don't support prefetching in this case
self.aligned = 1
self.floatSizeA = ttc_util.getFloatTypeSize(floatTypeA)
self.floatSizeB = ttc_util.getFloatTypeSize(floatTypeB)
if( self.scalar == 1):
self.microBlocking = ((1,1),"NOT AVAILABLE")
else:
self.microBlocking = self.getTranspositionMicroKernel()
if( self.microBlocking[0][0] * 8 * self.floatSizeA < self.registerSizeBits and self.floatTypeA != self.floatTypeB ):
# this is not implemented yet => fallback to sclar
self.scalar = 1
self.microBlocking = (self.microBlocking[0],"NOT AVAILABLE")
self.registerSizeBits = self.microBlocking[0][0] * 8 * self.floatSizeA
#obey the alignment requirements for streaming-stores
if( (self.size[0] * self.floatSizeA) % self.alignmentRequirement != 0 or
(self.size[self.perm[0]] * self.floatSizeB) % self.alignmentRequirement != 0):
self.aligned = 0
if( align != 1 ):
self.aligned = 0
#initialize available blockings
minA = self.microBlocking[0][0]
minB = self.microBlocking[0][1]
maxA = minA * 4
maxB = minB * 4
self.blockings = []
if( not emitReference ):
if( self.aligned != 1 and (self.architecture == "knc" or self.architecture == "power") ):
print WARNING + "WARNING: non-aligned is not yet supported for the specified architecture" + ENDC
print WARNING + " => Fallback: use non-vectorized code." + ENDC
self.scalar = 1
if( self.perm[0] == 0):
if( len(blockings) == 0 ): #default, no blockings provided => use all blockings
if( len(self.size) == 1 ): #this is only the case if perm = IDENTITY _and_ lda and ldb are non-default
self.blockings.append((1,1))
else:
for i in range (1,11):
for j in range (1,11):
if( self.size[1] >= i and self.size[self.perm[1]] >= j ):
self.blockings.append((i,j))
else:
if( len(blockings) == 0 ): #default, no blockings provided => use all blockings
for i in range (minA,maxA+1,minA):
for j in range (minB,maxB+1,minB):
if( self.size[0] >= i and self.size[self.perm[0]] >= j ):
self.blockings.append((i,j))
else:
for blocking in blockings:
if( blocking[0] % minA != 0):
print FAIL + "[TTC] ERROR: blocking in A (%d) is not a multiple of %d."%(blocking[0],minA) + ENDC
exit(-1)
if( blocking[1] % minB != 0):
print FAIL + "[TTC] ERROR: blocking in B (%d) is not a multiple of %d."%(blocking[1],minB) + ENDC
exit(-1)
if( self.size[0] >= blocking[0] and self.size[self.perm[0]] >= blocking[1] ):
self.blockings.append(blocking)
if( len(self.blockings) == 0): #this is needed to find solutions for which the size is smaller than the smallest blocking
self.blockings.append((minA,minB))
#sort blockings according to cost
tmpBlockings = []
for blocking in self.blockings:
tmpBlockings.append((blocking, self.getCostBlocking(blocking)))
tmpBlockings.sort(key=lambda tup: tup[1])
tmpBlockings.reverse()
self.blockings = []
for (blocking, cost) in tmpBlockings:
self.blockings.append(blocking)
self.implementations = []
self.maxNumImplementations = maxNumImplementations
#generate scalar version as reference
optRef = ""
if( ttc_util.streamingStoresApplicable(self.ldb, self.size, self.perm, self.beta, self.cacheLineSize, self.floatSizeB, self.streamingStores) ):
optRef = "streamingstore"
self.referenceImplementation = transpose.implementation((1,1),
perm[-1::-1], self.perm, self.size, self.alpha, self.beta,
self.floatTypeA, self.floatTypeB, optRef, 1, 0,(1,1),1, self.architecture, parallelize)
self.minImplementationsPerFile = 64
self.maxImplementationsPerFile = 256
start = 0
if( self.perm[0] == 0 ): #the first index will always be within our kernel (i.e., it will always be the inner-most loop)
start = 1
if( len(loopPermutations) == 0):
self.loopPermutations = []
for loopPerm in itertools.permutations(range(start, self.dim)):
self.loopPermutations.append(loopPerm)
else:
self.loopPermutations = copy.deepcopy(loopPermutations)
# sort loopPermutations
self.loopPermutations.sort(key=lambda tup: ttc_util.getCostLoop(tup, self.perm, self.size))
######################################
# Reduce search space
######################################
# combine the best sqrt(maxNumImplementations) blockings with the best sqrt() loopOrders
# only keep the best sqrt(maxNumImplementations) blockings
maxBlockings = math.ceil(math.sqrt(float(maxNumImplementations)))
while( len(self.blockings) > maxBlockings
and len(self.blockings) * len(self.loopPermutations) > maxNumImplementations):
self.blockings.pop()
maxLoopPermutations= maxBlockings
while( len(self.loopPermutations) > maxLoopPermutations
and len(self.blockings) * len(self.loopPermutations) > maxNumImplementations):
self.loopPermutations.pop()
def getCostBlocking(self, blocking):
if( len(self.size) == 1):
return 1 #we don't have any blockings in this case
#remainder should be zero
size0 = self.size[0]
sizep0 = self.size[self.perm[0]]
if(self.perm[0] == 0):
size0 = self.size[1]
sizep0 = self.size[self.perm[1]]
remainderA = (size0 % blocking[0]) / float(size0) #should be (close to) zero
remainderB = (sizep0 % blocking[1]) / float(sizep0) #should be (close to) zero
#blocking should be multiple of cacheline
numElementsPerCacheLine = self.cacheLineSize / self.floatSizeA
numCacheLines = (blocking[0] + numElementsPerCacheLine - 1) / numElementsPerCacheLine
cacheLineUtilizationA = blocking[0] / float(numCacheLines * numElementsPerCacheLine) #should be (close to) one
numCacheLines = (blocking[1] + numElementsPerCacheLine - 1) / numElementsPerCacheLine
cacheLineUtilizationB = blocking[1] / float(numCacheLines * numElementsPerCacheLine) #should be (close to) one
metric = (cacheLineUtilizationA + cacheLineUtilizationB)/2.0 #should be close to 1
metric += ((1 - remainderA) + (1- remainderB))/2.0
return metric /2.0
def getNumSolutions(self):
return len(self.implementations)
def generateOffsetFile(self, directory):
codeOffset = "#ifndef _TTC_OFFSET_H\n"
codeOffset += "#define _TTC_OFFSET_H\n"
codeOffset += "struct Offset\n{\n"
codeOffset += " int offsetA;\n"
codeOffset += " int offsetB;\n"
codeOffset += "};\n\n"
codeOffset += "#endif\n"
if(directory[-1] != '/'):
directory += '/'
offsetFile = open(directory + "ttc_offset.h","w")
offsetFile.write(codeOffset)
offsetFile.close()
def generateVersion(self,versionStr):
#used to generate a specific implementation
for impl in self.implementations:
if( impl.getVersionName() == versionStr ):
prefetchDistances = list(set([impl.getPrefetchDistance(), 0])) #we need prefetch distance 0 for the remainder while-loop
code = "#if defined(__ICC) || defined(__INTEL_COMPILER)\n"
code += "#define INLINE __forceinline\n"
code += "#elif defined(__GNUC__) || defined(__GNUG__)\n"
code += "#define INLINE __attribute__((always_inline))\n"
code += "#endif\n\n"
if(len(prefetchDistances)>1):
code += "#include <queue>\n"
code += "#include \"ttc_offset.h\"\n"
code += self.generateTranspositionKernel([impl.getBlocking()],prefetchDistances, 1, [impl.optimization])[0]
return code + impl.getImplementation(self.parallelize, clean=1 )
return ""
def generate(self):
if( len(self.size) != 1 and ( not(self.perm[0] == 0 and self.perm[1] == 1)) ): #only use code generator if at least one of the first two indices changes
self.genCandidates()
self.printMain()
self.generateImplementations()
def getAppropriateOptimizations(self):
optimizations = []
# streaming-stores
if( ttc_util.streamingStoresApplicable(self.ldb, self.size, self.perm, self.beta, self.cacheLineSize, self.floatSizeB, self.streamingStores) ):
optimizations.append("streamingstore")
else:
optimizations.append("")
return optimizations
def listToString(self, perm):
string = ""
for s in perm:
string += str(s) + ","
print string
return string[:-1]
def genCandidates(self):
optimizations = self.getAppropriateOptimizations()
counter = 0
#generate all implementations
for prefetchDistance in self.prefetchDistances:
for blocking in self.blockings:
for loopPerm in self.loopPermutations:
for opt in optimizations:
if( opt == "streamingstore" and not ttc_util.streamingStoresApplicable(self.ldb, self.size, self.perm, self.beta, self.cacheLineSize, self.floatSizeB, self.streamingStores) ):
#skip this optimization if the blocking in B is not a multiple of the cacheLineSize
continue
counter += 1
if( self.silent != 1):
sys.stdout.write("[TTC] Implementations generated so far: %d \r"%counter)
sys.stdout.flush()
implementation = transpose.implementation(blocking, loopPerm,
self.perm, self.size, self.alpha, self.beta, self.floatTypeA, self.floatTypeB,
opt, self.scalar, prefetchDistance, self.microBlocking[0],
0, self.architecture, self.parallelize)
if( len(self.implementations) < self.maxNumImplementations ):
self.implementations.append(implementation)
self.implementations.sort(key=lambda tup: tup.getCostLoop() )
elif( self.implementations[-1].getCostLoop() > implementation.getCostLoop() ):
self.implementations.pop()
self.implementations.append(implementation)
self.implementations.sort(key=lambda tup: tup.getCostLoop() )
return len(self.implementations)
def generateUtil(self):
code = ""
code +="#include \"transpose.h\"\n"
code +="#include <omp.h>\n"
code +="#include <fstream>\n"
code +="#include <float.h>\n"
code +="#include <stdlib.h>\n"
code +="#include <stdio.h>\n"
code +="#include <time.h>\n"
code +="#include <string>\n"
if self.architecture == "avx" or self.architecture == "avx512" or self.architecture == "knc":
code +="#include <immintrin.h>\n"
code +="#include <xmmintrin.h>\n"
elif self.architecture == "power":
code += "#include <builtins.h>\n"
code += "#include <altivec.h>\n"
code +="#include <complex.h>\n"
if self.papi:
code +="#include <papi.h>\n"
code +="\n"
hppCode ="#include <complex.h>\n"
hppCode +="#include <stdio.h>\n"
hppCode +="#include <float.h>\n"
hppCode +="#include <omp.h>\n"
hppCode +="#include <stdlib.h>\n"
hppCode +="#include <string>\n"
if self.papi:
hppCode +="#include <papi.h>\n"
hppCode +="\n"
hppCode +="void restoreA(const %s *in, %s*out, int total_size);"%(self.floatTypeA,self.floatTypeA)
code +="void restoreA(const %s *in, %s*out, int total_size)"%(self.floatTypeA,self.floatTypeA)
code +="{\n"
code +=" for(int i=0;i < total_size ; ++i){\n"
code +=" out[i] = in[i];\n"
code +=" }\n"
code +="}\n"
hppCode +="void restoreB(const %s *in, %s*out, int total_size);"%(self.floatTypeB,self.floatTypeB)
code +="void restoreB(const %s *in, %s*out, int total_size)"%(self.floatTypeB,self.floatTypeB)
code +="{\n"
code +=" for(int i=0;i < total_size ; ++i){\n"
code +=" out[i] = in[i];\n"
code +=" }\n"
code +="}\n"
hppCode +="int equal(const %s *A, const %s*B, int total_size);"%(self.floatTypeB,self.floatTypeB)
code +="int equal(const %s *A, const %s*B, int total_size)"%(self.floatTypeB,self.floatTypeB)
code +="{\n"
code +=" int error = 0;\n"
if( self.floatTypeB.find("complex") != -1 ):
_floatTypeB = "float"
if( self.floatTypeB.find("double") != -1 ):
_floatTypeB = "double"
code +=" const %s *Atmp = (%s*)A;\n"%(_floatTypeB,_floatTypeB)
code +=" const %s *Btmp= (%s*)B;\n"%(_floatTypeB,_floatTypeB)
if self.parallelize != 0:
code +=" #pragma omp parallel for reduction(+:error) \n"
code +=" for(int i=0;i < 2*total_size ; ++i){\n"
else:
_floatTypeB = self.floatTypeB
code +=" const %s *Atmp= A;\n"%self.floatTypeB
code +=" const %s *Btmp= B;\n"%self.floatTypeB
if self.parallelize != 0:
code +=" #pragma omp parallel for reduction(+:error) \n"
code +=" for(int i=0;i < total_size ; ++i){\n"
code +=" double Aabs = (Atmp[i] < 0) ? -Atmp[i] : Atmp[i];\n"
code +=" double Babs = (Btmp[i] < 0) ? -Btmp[i] : Btmp[i];\n"
code +=" double max = (Aabs < Babs) ? Babs : Aabs;\n"
code +=" double diff = (Aabs - Babs);\n"
code +=" diff = (diff < 0) ? -diff : diff;\n"
code +=" if(diff > 0){\n"
code +=" double relError = (diff / max);\n"
if( self.floatTypeA.find("float") != -1 or self.floatTypeB.find("float") != -1):
code +=" if(relError > 4e-5){\n"
else:
code +=" if(relError > 1e-12){\n"
code +=" //printf(\"i: %d relError: %.8e\\n\",i,relError);\n"
code +=" error += 1;\n"
code +=" }\n"
code +=" }\n"
code +=" }\n"
#code +=" return error;\n"
code +=" return (error > 0) ? 0 : 1;\n"
code +="}\n"
f = open(self.tmpDirectory + "util.cpp",'w')
f.write(code)
f.close()
f = open(self.tmpDirectory + "util.h",'w')
f.write(hppCode)
f.close()
def printMain(self):
code = ""
code +="#include \"transpose.h\"\n"
code +="#include \"util.h\"\n"
code +="#include \"measure.h\"\n"
code +="#include <fstream>\n"
code +="#include <time.h>\n"
if self.mpi:
code +="#include <mpi.h>\n"
if self.architecture == "avx" or self.architecture == "avx512" or self.architecture == "knc":
code +="#include <immintrin.h>\n"
code +="#include <xmmintrin.h>\n"
elif self.architecture == "power":
code += "#include <builtins.h>\n"
code += "#include <altivec.h>\n"
code +="#include <complex.h>\n"
code +="\n"
self.generateUtil()
code +="\n"
if self.papi:
code +="int PapiEventSet;\n"
code +="int main(int argc, char** argv)\n"
code +="{\n"
if self.mpi:
code +=" MPI_Init(&argc, &argv);\n"
if self.papi:
code +=" int retval;\n"
code +=" PapiEventSet = PAPI_NULL;\n"
code +=" /* Initialize the PAPI library */\n"
code +=" retval = PAPI_library_init(PAPI_VER_CURRENT);\n"
code +=" if (retval != PAPI_VER_CURRENT) {\n"
code +=" fprintf(stderr, \"PAPI library init error!\\n\");\n"
code +=" exit(1);\n"
code +=" }\n"
code +=" /* Create the Event Set */\n"
code +=" if (PAPI_create_eventset(&PapiEventSet) != PAPI_OK)\n"
code +=" fprintf(stderr,\"Error: Papi event not available.\\n\");\n"
code +=" if (PAPI_add_event(PapiEventSet, PAPI_TLB_DM) != PAPI_OK)\n"
code +=" fprintf(stderr,\"Error: Papi event not available\\n\");\n"
code +=" if (PAPI_add_event(PapiEventSet, PAPI_L2_DCM) != PAPI_OK)\n"
code +=" fprintf(stderr,\"Error: Papi event not available\\n\");\n"
#code +=" if (PAPI_add_event(PapiEventSet, PAPI_CA_INV) != PAPI_OK)\n"
#code +=" fprintf(stderr,\"Error: Papi invalidate event not available\\n\");\n"
code +=" srand(time(NULL));\n"
code +="\n"
code +=" double start;\n"
code +=" int nRepeat = 4;\n"
code +=" if(argc > 2) nRepeat = atoi(argv[2]);\n"
code +=" int dim = %d;\n"%self.dim
line = " int size[] = {"
for i in range(self.dim):
line += str(self.size[i])
if i != self.dim -1:
line += ","
line += "};\n"
code +=line
maxSizeA = 1
if( len(self.lda) == 0):
for s in self.size:
maxSizeA *= s
line = " int *lda = NULL;\n"
else:
for s in self.lda:
maxSizeA *= s
line = " int lda[] = {"
for i in range(self.dim):
line += str(self.lda[i])
if i != self.dim -1:
line += ","
line += "};\n"
code +=line
maxSizeB = 1
if( len(self.ldb) == 0):
for s in self.size:
maxSizeB *= s
line = " int *ldb = NULL;\n"
else:
for s in self.ldb:
maxSizeB *= s
line = " int ldb[] = {"
for i in range(self.dim):
line += str(self.ldb[i])
if i != self.dim -1:
line += ","
line += "};\n"
code +=line
maxSize = max(maxSizeA, maxSizeB)
code +=" int total_size = %d;\n"%(maxSize)
code +=" int elements_moved = 1;\n"
code +=" //compute total size\n"
code +=" for(int i=0;i < dim; ++i){\n"
code +=" elements_moved *= size[i];\n"
code +=" }\n"
code +="\n"
code +=" double *trash1, *trash2;\n"
code +=" %s *A, *A_copy;\n"%(self.floatTypeA)
code +=" %s *B, *B_ref, *B_copy;\n"%(self.floatTypeB)
code +=" double time;\n"
if( self.floatTypeA.find("double") != -1 ):
code +=" const double alpha = %f;\n"%(self.alpha)
else:
code +=" const float alpha = %f;\n"%(self.alpha)
if( self.floatTypeB.find("double") != -1 ):
code +=" const double beta = %f;\n"%(self.beta)
else:
code +=" const float beta = %f;\n"%(self.beta)
code +=" int largerThanL3 = 1024*1024*100/sizeof(double); \n"
code +=" int ret = posix_memalign((void**) &trash1, %d, sizeof(double) * largerThanL3);\n"%(self.alignmentRequirement)
code +=" ret += posix_memalign((void**) &trash2, %d, sizeof(double) * largerThanL3);\n"%(self.alignmentRequirement)
code +=" ret += posix_memalign((void**) &A, %d, sizeof(%s) * total_size);\n"%(self.alignmentRequirement, self.floatTypeA)
code +=" ret += posix_memalign((void**) &B_ref, %d, sizeof(%s) * total_size);\n"%(self.alignmentRequirement, self.floatTypeB)
code +=" ret += posix_memalign((void**) &B_copy, %d, sizeof(%s) * total_size);\n"%(self.alignmentRequirement, self.floatTypeB)
code +=" ret += posix_memalign((void**) &A_copy, %d, sizeof(%s) * total_size);\n"%(self.alignmentRequirement, self.floatTypeA)
code +=" ret += posix_memalign((void**) &B, %d, sizeof(%s) * total_size);\n"%(self.alignmentRequirement, self.floatTypeB)
code +=" if( ret != 0){ printf(\"[TTC] ERROR: posix_memalign failed\\n\"); exit(-1); }\n"
code +=" const %s *A_const = A;\n"%(self.floatTypeA)
code +=" const %s *B_copy_const = B_copy;\n"%(self.floatTypeB)
code +="\n"
if self.parallelize != 0:
code +=" #pragma omp parallel for\n"
code +=" for(int i=0;i < largerThanL3; ++i){\n"
code +=" trash1[i] = 0;\n"
code +=" trash2[i] = 0;\n"
code +=" }\n"
if self.floatTypeA.find("complex") != -1:
tmpTypeA = "float"
if self.floatTypeA.find("double") != -1:
tmpTypeA = "double"
tmpTypeB = "float"
if self.floatTypeB.find("double") != -1:
tmpTypeB = "double"
code +=" %s *Atmp = (%s*) A;\n"%(tmpTypeA,tmpTypeA)
code +=" %s *Btmp = (%s*) B;\n"%(tmpTypeB,tmpTypeB)
if self.parallelize != 0:
code +=" #pragma omp parallel for\n"
code +=" for(int i=0;i < 2*total_size ; ++i){\n"
code +=" Atmp[i] = (%s)i;\n"%(tmpTypeA)
code +=" Btmp[i] = (%s)i;\n"%(tmpTypeB)
code +=" }\n"
if self.parallelize != 0:
code +=" #pragma omp parallel for\n"
code +=" for(int i=0;i < total_size ; ++i){\n"
code +=" B_ref[i] = B[i];\n"
code +=" B_copy[i] = B[i];\n"
code +=" A_copy[i] = A[i];\n"
code +=" }\n"
else:
if self.parallelize != 0:
code +=" #pragma omp parallel for\n"
code +=" for(int i=0;i < total_size ; ++i){\n"
code +=" A[i] = (%s)i;\n"%(self.floatTypeA)
code +=" B[i] = (%s)i;\n"%(self.floatTypeB)
code +=" B_ref[i] = B[i];\n"
code +=" B_copy[i] = B[i];\n"
code +=" A_copy[i] = A[i];\n"
code +=" }\n"
code +="\n"
if self.mpi:
code += " int rank, numRanks;\n"
code += " MPI_Comm_rank(MPI_COMM_WORLD, &rank);\n"
code += " MPI_Comm_size(MPI_COMM_WORLD, &numRanks);\n"
code +=" /***************************************************\n"
code +=" *make sure that all versions yield the same result\n"
code +=" ***************************************************/\n"
code +=" double referenceBandwidth = 0;\n"
if( self.noTest == 0 ):
if(self.beta != 0 ):
code +=" %s(A_const, B_ref, alpha, beta, size, lda, ldb);\n"%self.referenceImplementation.getTransposeName()
else:
code +=" %s(A_const, B_ref, alpha, size, lda, ldb);\n"%self.referenceImplementation.getTransposeName()
refVersionStr = self.referenceImplementation.getTransposeName()
#time reference version
code +=" //time reference version\n"
code +=" if( argc == 1 || argc >= 2 && std::string(\"" + refVersionStr + "\").compare(argv[1]) == 0){\n"
code +=" time = FLT_MAX;\n"
code +=" for(int i = 0; i < nRepeat; i++){\n"
if( self.noTest == 0 ):
code +=" if( i < 2 )\n"
code +=" restoreB(B_copy_const, B, total_size);\n"
code +=" trashCache(trash1, trash2,largerThanL3);\n"
if( self.hotB ):
code +=" restoreB(B_copy_const, B, total_size);\n"
if( self.hotA ):
code +=" restoreA(A_const, A_copy, total_size);\n"
if( self.mpi ):
code +=" MPI_Barrier(MPI_COMM_WORLD);\n"
code +=" start = omp_get_wtime();\n"
if(self.beta != 0 ):
code +=" %s(A_const, B, alpha, beta, size, lda, ldb);\n"%refVersionStr
else:
code +=" %s(A_const, B, alpha, size, lda, ldb);\n"%refVersionStr
if( self.mpi ):
code +=" MPI_Barrier(MPI_COMM_WORLD);\n"
code +=" double tmpTime = omp_get_wtime() - start;\n"
code +=" if( tmpTime < time ) time = tmpTime;\n"
code +=" }\n"
if self.beta != 0:
code +=" double bandwidth = ((double)(elements_moved * (sizeof("+self.floatTypeA+") + 2.0 * sizeof("+self.floatTypeB+"))))/(1<<30)/(time);\n"
else:
code +=" double bandwidth = ((double)(elements_moved * (sizeof("+self.floatTypeA+") + 1.0 * sizeof("+self.floatTypeB+"))))/(1<<30)/(time);\n"
code +=" if( time <= 0.0) bandwidth = 100;\n" #if the transpose didn't take enough time too measure it, we just fix the bandwidth to 100 #TODO
code +=" referenceBandwidth = bandwidth;\n"
if( self.mpi ):
code +=" referenceBandwidth *= numRanks;\n"
code +=" if(rank == 0)\n"
code +=" printf(\"reference version %s took %%e and achieved %%.2f GB/s \\n\",time, referenceBandwidth );\n"%refVersionStr
code +=" fflush(stdout);\n"
code +=" }\n"
code +=" double maxBandwidth = -1;\n"
code +=" double maxTop1Bandwidth = -1;\n"
code +=" double maxTop5Bandwidth = -1;\n"
code +=" double tmpBandwidth = -1;\n"
counter = 0
numImplementations = len(self.implementations)
numFiles = max(1,(numImplementations + self.minImplementationsPerFile -1) / self.minImplementationsPerFile)
if( numFiles > 20 ):
numFiles = (numImplementations + self.maxImplementationsPerFile -1) / self.maxImplementationsPerFile
numSolutionsPerFile = (numImplementations + numFiles - 1) / numFiles
loopCosts = []
for impl in self.implementations:
loopCosts.append(impl.getCostLoop())
loopCosts = list(set(loopCosts))
loopCosts.sort()
#split measurement into several files
measureHPP = ""
for i in range(numFiles):
code += " tmpBandwidth = measure%d(nRepeat, argc, argv, A_const, A_copy, B, B_copy_const, B_ref, alpha, beta, total_size, elements_moved, largerThanL3, size, trash1, trash2, lda, ldb);\n"%(i)
code += " maxBandwidth = (tmpBandwidth < maxBandwidth) ? maxBandwidth : tmpBandwidth;\n"
tmpCode = "#include \"util.h\"\n"
tmpCode += "#include \"transpose.h\"\n"
if self.mpi:
tmpCode += "#include <mpi.h>\n"
alphaFloatType = "float"
if( self.floatTypeA.find("double") != -1 ):
alphaFloatType = "double"
betaFloatType = "float"
if( self.floatTypeB.find("double") != -1 ):
betaFloatType = "double"
header = """double measure%d(int nRepeat, int argc, char** argv, const %s *
A_const, %s * A_copy, %s * B, const %s * B_copy_const, const %s * B_ref, const %s alpha,
const %s beta, int total_size, int elements_moved,
int largerThanL3, int *size, double *trash1, double
*trash2, int* lda, int* ldb)"""%(i,self.floatTypeA,self.floatTypeA,self.floatTypeB,self.floatTypeB,self.floatTypeB,alphaFloatType , betaFloatType)
if self.papi:
tmpCode += "extern int PapiEventSet;\n"
tmpCode += header + "{\n"
if self.mpi:
tmpCode += " int rank, numRanks;\n"
tmpCode += " MPI_Comm_rank(MPI_COMM_WORLD, &rank);\n"
tmpCode += " MPI_Comm_size(MPI_COMM_WORLD, &numRanks);\n"
measureHPP += header + ";\n"
tmpCode += " double maxBandwidth = -1;\n"
tmpCode += " long long values[3];\n"
for j in range(i * numSolutionsPerFile, min(numImplementations,(i+1)*numSolutionsPerFile)):
implementation = self.implementations[j]
transposeName = implementation.getTransposeName()
versionStr = implementation.getVersionName()
tmpCode +=" if( argc == 1 || argc >= 2 && std::string(\"" + versionStr + "\").compare(argv[1]) == 0){\n"
tmpCode += " long long tlb_misses = 0;\n"
tmpCode += " long long l2misses = 0;\n"
tmpCode += " long long invalidates = 0;\n"
tmpCode +=" double time = FLT_MAX;\n"
tmpCode +=" for(int i = 0; i < nRepeat; i++){\n"
if( self.noTest == 0 ):
tmpCode +=" if( i < 2 )\n"
tmpCode +=" restoreB(B_copy_const, B, total_size);\n"
tmpCode +=" trashCache(trash1, trash2,largerThanL3);\n"
if( self.hotB ):
tmpCode +=" restoreB(B_copy_const, B, total_size);\n"
if( self.hotA ):
tmpCode +=" restoreA(A_const, A_copy, total_size);\n"
if self.mpi:
tmpCode +=" MPI_Barrier(MPI_COMM_WORLD);\n"
tmpCode +=" double start = omp_get_wtime();\n"
if self.papi:
tmpCode +=" /* Start counting */\n"
tmpCode +=" if (PAPI_start(PapiEventSet) != PAPI_OK)\n"
tmpCode +=" printf(\"Error: papi_start\\n\");\n"
if(self.beta != 0):
tmpCode +=" %s(A_const, B, alpha, beta, size, lda, ldb);\n"%transposeName
else:
tmpCode +=" %s(A_const, B, alpha, size, lda, ldb);\n"%transposeName
if self.papi:
tmpCode +=" if (PAPI_stop(PapiEventSet, values) != PAPI_OK)\n"
tmpCode +=" printf(\"Error: papi_stop\\n\");\n"
tmpCode +=" tlb_misses += values[0];\n"
#tmpCode +=" l2misses += values[1];\n"
#tmpCode +=" invalidates += values[2];\n"
if self.mpi:
tmpCode +=" MPI_Barrier(MPI_COMM_WORLD);\n"
tmpCode +=" double tmpTime = omp_get_wtime() - start;\n"
tmpCode +=" if( tmpTime < time ) time = tmpTime;\n"
if( self.noTest == 0 ):
tmpCode +=" if(i == 0 && !equal(B_ref, B, total_size) ){\n"
#tmpCode +=" printf(\"B_ref:\\n\");\n"
#tmpCode +=" printMatrix2D(B_ref, size);\n"
#tmpCode +=" printf(\"B:\\n\");\n"
#tmpCode +=" printMatrix2D(B, size);\n"
tmpCode +=" printf(\"ERROR version "+versionStr+" doesn't give the same result (line %d)\\n\",__LINE__);\n"
tmpCode +=" exit(-1);\n"
tmpCode +=" };\n"
tmpCode +=" }\n"
if self.beta != 0:
tmpCode +=" double bandwidth = ((double)(elements_moved * (sizeof("+self.floatTypeA+") + 2.0 * sizeof("+self.floatTypeB+"))))/(1<<30)/(time);\n"
else:
tmpCode +=" double bandwidth = ((double)(elements_moved * (sizeof("+self.floatTypeA+") + 1.0 * sizeof("+self.floatTypeB+"))))/(1<<30)/(time);\n"
tmpCode +=" if( time <= 0.0) bandwidth = 100;\n" #if the transpose didn't take enough time too measure it, we just fix the bandwidth to 100 #TODO
if( self.mpi ):
tmpCode +=" bandwidth *= numRanks;\n"
tmpCode +=" if( bandwidth > maxBandwidth ) maxBandwidth = bandwidth;\n"
blockingRank = -1
for rank in range(len(self.blockings)):
if self.blockings[rank] == implementation.getBlocking():
blockingRank = rank
break
loopRank = loopCosts.index(implementation.getCostLoop())
#if( self.loopRank.has_key(tuple(implementation.getLoopPerm())) ):
# loopRank = self.loopRank[tuple(implementation.getLoopPerm())]
if( self.mpi ):
tmpCode +=" if(rank == 0)\n"
tmpCode +=" printf(\"variant "+versionStr+" took %%e and achieved %%.2f GB/s (blocking rank: %d) (loop rank: %d) (l2 misses: %%f) (invalidates: %%f)\\n\",time, bandwidth,l2misses/((float)nRepeat),invalidates/((float)nRepeat));\n"%(blockingRank, loopRank)
tmpCode +=" fflush(stdout);\n"
tmpCode +=" }\n"
counter += 1
tmpCode +=" return maxBandwidth;\n"
tmpCode +="}\n"
f = open(self.tmpDirectory + "measure%d.cpp"%i,'w')
f.write(tmpCode)
f.close()
f = open(self.tmpDirectory + "measure.h",'w')
f.write(measureHPP)
f.close()
code +=" /***************************************************/\n"
if( self.mpi ):
code +=" if(rank == 0){\n"
code +=" printf(\"Maximal bandwidth: %f\\n\", maxBandwidth);\n"
code +=" printf(\"Speedup over reference: %f\\n\", maxBandwidth / referenceBandwidth );\n"
code +=" printf(\"Top-1 speedup: %.2f\\n\", maxTop1Bandwidth/maxBandwidth);\n"
code +=" printf(\"Top-5 speedup: %.2f\\n\", maxTop5Bandwidth/maxBandwidth);\n"
code +=" printf(\"SUCCESS!\\n\");\n"
if( self.mpi ):
code +=" }\n"
code +=" free(A); free(B);\n"
code +=" free(A_copy); free(B_copy);\n"
code +=" free(B_ref);\n"
code +=" free(trash1);\n"
code +=" free(trash2);\n"
if self.mpi:
code +=" MPI_Finalize();\n"
code +=" return 0;\n"
code +="}\n"
f = open(self.tmpDirectory + "main.cpp",'w')
f.write(code)
f.close()
def getScalarFraction(self,blocking):
remainderA = (self.size[0] % blocking[0])
fractionA = remainderA / self.size[0]
remainderB = (self.size[self.perm[0]] % blocking[1])
fractionB = remainderB / self.size[self.perm[0]]
return max(fractionA, fractionB)
def getTranspositionMicroKernel(self):
# we choose the precision based on the input tensor A
availableBlocking = []
kernelName = self.floatTypeA
if( self.floatTypeA == "double complex" ):
kernelName = "doubleComplex"
elif( self.floatTypeA == "float complex" ):
kernelName = "complex"
found = 0
for filename in os.listdir("./micro-kernels"):
if( filename.find( self.architecture ) != -1 ):
if( filename.find( kernelName +"_"+self.architecture+ ".kernel") != -1 ):
blocking = (int(filename.split("_")[1].split("x")[0]),
int(filename.split("_")[1].split("x")[1]))
f = open("./micro-kernels/"+filename,'r')
code = self.indent +"//%dx%d transpose micro kernel\n"%(blocking[0],blocking[1])
code += f.read() + "\n"
f.close()
availableBlocking.append( (blocking, code) )
found += 1
if( found <= 0 ):
print "ERROR: no suitable kernels found."
exit(-1)
#determine which micro-blocking to use
availableBlocking = sorted(availableBlocking, key = lambda tup : tup[0][0], reverse=True) # sort blockings from large to small
for (blocking, code) in availableBlocking:
scalarFraction = self.getScalarFraction(blocking)
if( scalarFraction >= 0.33 ):
continue
else:
return (blocking,code)
return availableBlocking[0]
def getLoadKernel(self, A, lda, floatType, mixedPrecision, offset, define):
code = self.indent +"//Load %s\n"%A
maxRange = self.registerSizeBits / 8 / self.floatSizeA
if( mixedPrecision ):
if( self.architecture != "avx" ):
print FAIL + "Error: mixed precision is not yet supported for this architecture.\n" + ENDC
exit(-1)
vectorType = "__m256"
cast = ""
if( self.architecture == "avx" or self.architecture == "avx512" or self.architecture == "knc"): #-------------- avx ---------------------
if( floatType == "float" or floatType == "float complex"):
if( floatType == "float complex" ):
cast = "(const float*)"
if( mixedPrecision and self.architecture == "avx" ):
vectorType = "__m128"
if( self.aligned ):
functionName = "_mm_load_ps"
else:
functionName = "_mm_loadu_ps"
else:
vectorType = "__m%d"%self.registerSizeBits
if( self.aligned ):
if( self.registerSizeBits == 128 ):
functionName = "_mm_load_ps"
else:
functionName = "_mm%d_load_ps"%self.registerSizeBits
else:
if( self.registerSizeBits == 128 ):
functionName = "_mm_loadu_ps"
else:
functionName = "_mm%d_loadu_ps"%self.registerSizeBits
elif( floatType == "double" or floatType == "double complex"):
if( floatType == "double complex" ):
cast = "(const double*)"
if( mixedPrecision and self.architecture == "avx" ):
vectorType = "__m256d"
if( self.aligned ):
functionName = "_mm256_load_pd"
else:
functionName = "_mm256_loadu_pd"
else:
vectorType = "__m%dd"%self.registerSizeBits
if( self.aligned ):
if( self.registerSizeBits == 128 ):
functionName = "_mm_load_pd"
else:
functionName = "_mm%d_load_pd"%self.registerSizeBits
else:
if( self.registerSizeBits == 128 ):
functionName = "_mm_loadu_pd"
else:
functionName = "_mm%d_loadu_pd"%self.registerSizeBits
else:
print FAIL + "Error: unknown datatype.\n" + ENDC
exit(-1)
elif(self.architecture == "power"):
vectorType = "vector4double"
else:
print FAIL + "Error: architecture unknown.\n" + ENDC
exit(-1)
if( define == 0 ):
vectorType = ""
for i in range(maxRange):
if( self.aligned ):
if(self.architecture == "power"):
code += self.indent + "%s row%s%d = vec_lda(0,const_cast<float*>(%s+%d+%d*%s));\n"%(vectorType,A,i,A,offset,i,lda)
else:
code += self.indent + "%s row%s%d = %s(%s(%s + %d +%d*%s));\n"%(vectorType,A,i,functionName,cast,A,offset,i,lda)
else:
if(self.architecture == "power"):
print "non-aligned loads are not yet supported for Power ."
exit(-1)
else:
code += self.indent + "%s row%s%d = %s(%s(%s+%d+%d*%s));\n"%(vectorType,A,i,functionName,cast,A,offset,i,lda)
return code + "\n"
def getStoreKernel(self, reg, offset):
code = self.indent +"//Store B\n"
maxRange = self.registerSizeBits / 8 / self.floatSizeA
cast = ""
if( self.architecture == "avx" or self.architecture == "avx512" or self.architecture == "knc" ):
post = "ps"
if( self.floatTypeB.find("double") != -1 ):
post = "pd"
if( self.aligned ):
if( self.floatSizeB < self.floatSizeA ): # mixed precision
functionName = "_mm_store_%s"%(post)
else:
if( self.registerSizeBits == 128 ):
functionName = "_mm_store_%s"%(post)
else:
functionName = "_mm%d_store_%s"%(self.registerSizeBits,post)
else:
if( self.floatSizeB < self.floatSizeA ): # mixed precision
functionName = "_mm_storeu_%s"%(post)
else:
if( self.registerSizeBits == 128 ):
functionName = "_mm_storeu_%s"%(post)
else:
functionName = "_mm%d_storeu_%s"%(self.registerSizeBits,post)
if( self.floatTypeB == "float complex" ):
cast = "(float*)"
elif( self.floatTypeB == "double complex" ):
cast = "(double*)"
elif(self.architecture == "power"):
functionName = "vec_sta"
for i in range(maxRange):
if( self.aligned ):
if(self.architecture == "power"):
code += self.indent + "%s(%s, 0 ,B + %d + %i * ldb);\n"%(functionName, reg.replace("#",str(i)),offset,i)
else:
code += self.indent + "%s(%s(B + %d + %i * ldb), %s);\n"%(functionName,cast,offset, i,reg.replace("#",str(i)))
else:
if(self.architecture == "power"):
print "non-aligned stores are not yet supported for Power ."
exit(-1)
else:
code += self.indent + "%s(%s(B + %d + %i * ldb), %s);\n"%(functionName,cast,offset, i,reg.replace("#",str(i)))
return code
def getScaleKernel(self, A, alpha):
code = self.indent +"//Scale %s\n"%A
maxRange = self.registerSizeBits / 8 / self.floatSizeA
if( self.floatTypeA == "float" or self.floatTypeA == "float complex"):
if( self.registerSizeBits == 128 ):
functionName = "_mm_mul_ps"
else:
functionName = "_mm%d_mul_ps"%self.registerSizeBits
if( self.floatTypeA == "double" or self.floatTypeA == "double complex"):
if( self.registerSizeBits == 128 ):
functionName = "_mm_mul_pd"
else:
functionName = "_mm%d_mul_pd"%self.registerSizeBits
for i in range(maxRange):
if(self.architecture == "power"):
code += self.indent + "row%s%d = vec_mul(row%s%d, %s);\n"%(A,i,A,i,alpha)
else:
code += self.indent + "row%s%d = %s(row%s%d, %s);\n"%(A,i,functionName,A,i,alpha)
return code + "\n"
#d = a * b + c
def getFmaKernel(self, a, b, c, d):
code = ""
if(self.architecture == "power"):
code += self.indent + "%s = vec_madd( %s, %s, %s);\n"%(d,a,b,c)
else:
if( self.floatTypeB.find("float") != -1):
if( self.architecture == "avx512" or self.architecture == "knc" ):
code += self.indent + "%s = _mm512_fmadd_ps( %s, %s, %s);\n"%(d,a,b,c)
else:
if( self.floatTypeA.find("double") != -1 and self.floatTypeB.find("float") != -1): #mixed precision
code += self.indent + "%s = _mm_add_ps( _mm_mul_ps(%s, %s), _mm256_cvtpd_ps(%s));\n"%(d,a,b,c)
else:
if( self.registerSizeBits == 128 ):
code += self.indent + "%s = _mm_add_ps( _mm_mul_ps(%s, %s), %s);\n"%(d,a,b,c)
else:
code += self.indent + "%s = _mm256_add_ps( _mm256_mul_ps(%s, %s), %s);\n"%(d,a,b,c)
if( self.floatTypeB.find("double") != -1):
if( self.architecture == "avx512" or self.architecture == "knc" ):
code += self.indent + "%s = _mm512_fmadd_pd( %s, %s, %s);\n"%(d,a,b,c)
else:
if( self.floatTypeA.find("float") != -1): #mixed precision
code += self.indent + "%s = _mm256_add_pd( _mm256_mul_pd(%s, %s), _mm256_cvtps_pd((%s)));\n"%(d,a,b,c)
else:
if( self.registerSizeBits == 128 ):
code += self.indent + "%s = _mm_add_pd( _mm_mul_pd(%s, %s), %s);\n"%(d,a,b,c)
else:
code += self.indent + "%s = _mm256_add_pd( _mm256_mul_pd(%s, %s), %s);\n"%(d,a,b,c)
return code
def getBroadcastVariables(self, withType):
code = " "
if(self.perm[0] != 0 and self.scalar == 0):
alphaFloatType = "__m%d"%self.registerSizeBits
if(self.floatTypeA =="double" or self.floatTypeA =="double complex"):
alphaFloatType = "__m%dd"%self.registerSizeBits
if(self.architecture == "power"):
alphaFloatType = "vector4double"
betaFloatType = "__m%d"%self.registerSizeBits
if( (self.floatTypeB.find("float") != -1) and (self.floatTypeA.find("double") != -1)):
betaFloatType = "__m%d"%(self.registerSizeBits/2)
if(self.floatTypeB =="double" or self.floatTypeB =="double complex"):
betaFloatType = "__m%dd"%self.registerSizeBits
if(self.architecture == "power"):
betaFloatType = "vector4double"
if(withType==1):
code += " ,const %s ®_alpha"%alphaFloatType
else:
code += " , reg_alpha"
if(self.beta !=0):
if(withType==1):
code += " ,const %s ®_beta"%betaFloatType
else:
code += " , reg_beta"
else:
alphaFloatType = "float"
if( self.floatTypeA.find("double") != -1 ):
alphaFloatType = "double"
betaFloatType = "float"
if( self.floatTypeB.find("double") != -1 ):
betaFloatType = "double"
if(withType==1):
code += " ,const %s alpha"%(alphaFloatType)
if(self.beta !=0 ):
code += " ,const %s beta"%(betaFloatType)
else:
code += " ,alpha"
if(self.beta !=0 ):
code += " ,beta"
return code
def getMicroKernelHeader(self,blocking, prefetchDistance = 0, staticAndInline = 0):
code = "//B_ji = alpha * A_ij + beta * B_ji\n"
transposeMicroKernelname = "%sTranspose%dx%d"%(ttc_util.getFloatPrefix(self.floatTypeA, self.floatTypeB),blocking[0], blocking[1])
if( self.perm[0] == 0):
transposeMicroKernelname += "_0"
if( ttc_util.streamingStoresApplicable(self.ldb, self.size, self.perm, self.beta, self.cacheLineSize, self.floatSizeB, self.streamingStores) ):
transposeMicroKernelname += "_streamingstore"
if( self.beta == 0 ):
transposeMicroKernelname += "_bz"
if( prefetchDistance > 0):
transposeMicroKernelname += "_prefetch_%d"%prefetchDistance
#if( staticAndInline ):
# transposeMicroKernelname += "_"
# for i in self.perm:
# transposeMicroKernelname += str(i)
# transposeMicroKernelname +="_"
# for idx in range(len(self.size)):
# transposeMicroKernelname += "%d"%(self.size[idx])
# if(idx != len(self.size)-1):
# transposeMicroKernelname +="x"
static = ""
if staticAndInline :
static = "static INLINE "
if( prefetchDistance > 0):
return transposeMicroKernelname, code +static+"void %s(const %s* __restrict__ A, const int lda, %s* __restrict__ B, const int ldb, const %s* __restrict__ Anext0, %s* __restrict__ Bnext0, const %s* __restrict__ Anext1, %s* __restrict__ Bnext1%s)\n{\n"""%(transposeMicroKernelname, self.floatTypeA,self.floatTypeB, self.floatTypeA,self.floatTypeB, self.floatTypeA,self.floatTypeB,self.getBroadcastVariables(1))
else:
if( self.perm[0] != 0):
return transposeMicroKernelname, code +static+"void %s(const %s* __restrict__ A, const int lda, %s* __restrict__ B, const int ldb%s)\n{\n"%(transposeMicroKernelname, self.floatTypeA, self.floatTypeB,self.getBroadcastVariables(1))
else:
if( staticAndInline ):
return transposeMicroKernelname, code +"template<int size0>\nvoid %s(const %s* __restrict__ A, int lda1, const int lda, %s* __restrict__ B, const int ldb1, const int ldb%s)\n{\n"%(transposeMicroKernelname, self.floatTypeA, self.floatTypeB,self.getBroadcastVariables(1))
else:
return transposeMicroKernelname, code +static+"void %s(const %s* __restrict__ A, int lda1, const int lda, %s* __restrict__ B, const int ldb1, const int ldb%s)\n{\n"%(transposeMicroKernelname, self.floatTypeA, self.floatTypeB,self.getBroadcastVariables(1))
def getUpdateAndStore(self):
numIterations = 1
if( self.floatTypeA.find("float") != -1 and self.floatTypeB.find("double") != -1):
numIterations = 2
code = ""
for iteration in range(numIterations):
offset = (self.registerSizeBits / 8 / self.floatSizeB) * iteration
if( self.beta != 0 ):
loadKernelB = self.getLoadKernel("B","ldb", self.floatTypeB, self.floatTypeA != self.floatTypeB, offset, iteration == 0)
if( iteration == 0 ):
code += self.getScaleKernel("A", "reg_alpha")
if(self.beta != 0 ):
code += loadKernelB
for i in range(self.microBlocking[0][0]):
if( self.floatTypeA.find("float") != -1 and self.floatTypeB.find("double") != -1): #mixed precision
if( iteration == 0):
code += self.getFmaKernel("rowB%d"%i, "reg_beta", "_mm256_castps256_ps128(rowA%d)"%i, "rowB%d"%i)
else:
code += self.getFmaKernel("rowB%d"%i, "reg_beta", "_mm256_extractf128_ps(rowA%d, 0x1)"%i, "rowB%d"%i)
else:
code += self.getFmaKernel("rowB%d"%i, "reg_beta", "rowA%d"%i, "rowB%d"%i)
code += self.getStoreKernel("rowB#", offset)
else:
if( self.floatTypeA.find("float") != -1 and self.floatTypeB.find("double") != -1): #mixed precision
if( iteration == 0):
code += self.getStoreKernel("_mm256_cvtps_pd(_mm256_castps256_ps128(rowA#))", offset)
else:
code += self.getStoreKernel("_mm256_cvtps_pd(_mm256_extractf128_ps(rowA#, 0x1))", offset)
elif( self.floatTypeA.find("double") != -1 and self.floatTypeB.find("float") != -1): #mixed precision
code += self.getStoreKernel("_mm256_cvtpd_ps(rowA#)", offset)
else:
code += self.getStoreKernel("rowA#", offset)
return code
def getPrefetchCode(self, ii, jj, numBlocksI, numBlocksJ, prefetchDistance, opt):
blockA = self.microBlocking[0][0]
blockB = self.microBlocking[0][1]
numBlocksTotal = numBlocksI * numBlocksJ
blockId = ii * numBlocksJ + jj + prefetchDistance
tile = 0
if( blockId >= numBlocksTotal ):
tile = 1
blockId = blockId % numBlocksTotal
iPrefetch = blockId / numBlocksJ
jPrefetch = blockId % numBlocksJ
if ( iPrefetch == 0):
if( jPrefetch == 0):
offsetA = "Anext%d"%tile
offsetB = "Bnext%d"%tile
else:
offsetA = "Anext%d"%(tile)
offsetB = "Bnext%d + %d"%(tile,jPrefetch * blockB)
else:
if( jPrefetch == 0):
offsetA = "Anext%d + %d"%(tile,iPrefetch * blockA)
offsetB = "Bnext%d"%(tile)
else:
offsetA = "Anext%d + %d"%(tile,iPrefetch * blockA)
offsetB = "Bnext%d + %d"%(tile,jPrefetch * blockB)
numElementsPerCacheLineA = self.cacheLineSize / self.floatSizeA
numElementsPerCacheLineB = self.cacheLineSize / self.floatSizeB
code = ""
if( (iPrefetch * blockA) % numElementsPerCacheLineA == 0 ): #we only prefech once per cache-line
code += self.indent + "//prefetch A\n"
for l in range(blockA):
if( self.architecture == "avx" or self.architecture == "knc" or self.architecture == "avx512" ):
code += self.indent + "_mm_prefetch((char*)(%s + %d * lda), _MM_HINT_T2);\n"%(offsetA,l + jPrefetch * blockA)
elif( self.architecture == "power" ):
code += self.indent + "__prefetch_by_load((const void*)(%s + %d * lda));\n"%(offsetA,l + jPrefetch * blockA)
else:
print "ERROR: wrong architecture!"
exit(-1)
if( opt != "streamingstore" ):
if( (jPrefetch * blockB) % numElementsPerCacheLineB == 0 ): #we only prefech once per cache-line
code += self.indent + "//prefetch B\n"
for l in range(blockB):
if( self.architecture == "avx" or self.architecture == "knc" or self.architecture == "avx512" ):
code += self.indent + "_mm_prefetch((char*)(%s + %d * ldb), _MM_HINT_T2);\n"%(offsetB,l + iPrefetch * blockB)
elif( self.architecture == "power" ):
code += self.indent + "__prefetch_by_load((const void*)(%s + %d * lda));\n"%(offsetA,l + jPrefetch * blockA)
else:
print "ERROR: wrong architecture!"
exit(-1)
return code
def generateTranspositionKernel(self, blockings, prefetchDistances, staticAndInline=0, optimizations = []):
# This function generates the transpose.cpp file (_not_ the transpose%d.cpp files)
#
# staticAndInline this is _only_ set if the final/fastest version will be dumped to file
loadKernelA = self.getLoadKernel("A","lda", self.floatTypeA, 0, 0, 1)
retHPP = ""
ret = ""
#generate DxD micro kernel
for opt in optimizations:
if( self.perm[0] != 0 ):
transposeMicroKernelname, tmpCode = self.getMicroKernelHeader(self.microBlocking[0], 0, staticAndInline)
code = ""
if( staticAndInline ):
code += "#ifndef _TTC_%s\n"%transposeMicroKernelname.upper()
code += "#define _TTC_%s\n"%transposeMicroKernelname.upper()
code += tmpCode
retHPP += tmpCode.split("\n")[1]+";\n"
if( self.scalar != 0 ):
code += " for(int i=0; i < %d; i++)\n"%self.microBlocking[0][0]
code += " for(int j=0; j < %d; j++)\n"%self.microBlocking[0][1]
if(self.beta != 0):
code += " B[j + i * ldb] = alpha*A[i + j * lda] + beta*B[j + i * ldb];\n"
else:
code += " B[j + i * ldb] = alpha*A[i + j * lda];\n"
else:
code += loadKernelA
code += self.microBlocking[1]
code += self.getUpdateAndStore()
code += "}\n"
if( staticAndInline ):
code += "#endif\n"
ret += code
blockA = self.microBlocking[0][0]
blockB = self.microBlocking[0][1]
else:
blockA = 1
blockB = 1
if( blockA != blockB ):
print "Error: non-square micro-kernels are not supported yet."
exit(-1)
if( self.perm[0] != 0):
for prefetchDistance in prefetchDistances:
# generate arbitrary blockings based on the DxD micro kernel
for blocking in blockings:
if( opt == "streamingstore" and (not ttc_util.streamingStoresApplicable(self.ldb, self.size, self.perm, self.beta, self.cacheLineSize, self.floatSizeB, self.streamingStores)) ):
continue #skip this blocking if necessary
if ( blocking[0] % blockA == 0 and blocking[1] % blockB == 0 and
(blocking[0] / blockA > 1 or blocking[1] / blockB > 1 or
prefetchDistance > 0)):
transposeMicroKernelname, tmpCode = self.getMicroKernelHeader(blocking, prefetchDistance, staticAndInline)
code = ""
if( staticAndInline ):
code += "#ifndef _TTC_%s\n"%transposeMicroKernelname.upper()
code += "#define _TTC_%s\n"%transposeMicroKernelname.upper()
code += tmpCode
retHPP += tmpCode.split("\n")[1]+";\n"
#replicate the micro-transpose to build the bigger transpose
numBlocksA = blocking[0] / blockA
numBlocksB = blocking[1] / blockB
if( opt == "streamingstore"):
code += self.indent + "%s B_buffer[%d * %d] __attribute__((aligned(%d)));\n"%(self.floatTypeB, blocking[0], blocking[1],self.cacheLineSize)
for i in range(numBlocksA):
for j in range(numBlocksB):
offsetA = ""
offsetB = ""
if ( i == 0):
if( j == 0):
offsetA = ""
offsetB = ""
else:
offsetA = " + %d * lda"%(j * blockA)
offsetB = " + %d"%(j * blockB)
else:
if( j == 0):
offsetA =" + %d"%(i * blockA)
offsetB =" + %d * ldb"%(i * blockB)
else:
offsetA = " + %d + %d * lda"%(i * blockA,j * blockA)
offsetB = " + %d + %d * ldb"%(j * blockB,i * blockB)
#prefetch next block
if( prefetchDistance > 0 ):
#Citation form the Intel Optimization Manual:
#"It may seem convenient to cluster all of PREFETCH instructions at the beginning of a loop
#body or before a loop, but this can lead to severe performance degradation. In order
#to achieve the best possible performance, PREFETCH instructions must be interspersed
#with other computational instructions in the instruction sequence rather than
#clustered together"
code += self.getPrefetchCode(i,j,numBlocksA,
numBlocksB, prefetchDistance, opt)
transposeName = "%sTranspose%dx%d"%(ttc_util.getFloatPrefix(self.floatTypeA, self.floatTypeB),blockA, blockB)
if( self.perm[0] == 0):
transposeName += "_0"
if( ttc_util.streamingStoresApplicable(self.ldb, self.size, self.perm, self.beta, self.cacheLineSize, self.floatSizeB, self.streamingStores) ):
transposeName += "_streamingstore"
if( self.beta == 0 ):
transposeName += "_bz"
code += self.indent + "//invoke micro-transpose\n"
if( opt == "streamingstore"):
code += self.indent + "%s(A%s, lda, B_buffer%s, %d%s);\n\n"%(transposeName, offsetA, offsetB.replace("ldb","%d"%blocking[1]),blocking[1],self.getBroadcastVariables(0))
else:
code += self.indent + "%s(A%s, lda, B%s, ldb%s);\n\n"%(transposeName, offsetA, offsetB,self.getBroadcastVariables(0))
if( opt == "streamingstore"):
elementsPerRegister = self.registerSizeBits / 8 / self.floatSizeB
code += self.indent + "// write buffer to main-memory via non-temporal stores\n"
code += self.indent + "for( int i = 0; i < %d; i++){\n"%(blocking[0])
if( not ttc_util.streamingStoresApplicable(self.ldb, self.size, self.perm, self.beta, self.cacheLineSize, self.floatSizeB, self.streamingStores) ):
print "ERROR (internal): blockB is not a multiple of the cacheline size"
exit(-1)
for j in range((blocking[1] / elementsPerRegister) ): #store one cacheline at a time
cast = ""
if( self.floatTypeB == "float complex" ):
cast = "(float*)"
elif( self.floatTypeB == "double complex" ):
cast = "(double*)"
post = "ps"
if( self.floatTypeB.find("double") != -1 ):
post = "pd"
if( self.registerSizeBits == 128 ):
functionNameStream = "_mm_stream_%s"%post
functionNameLoad = "_mm_load_%s"%post
else:
functionNameStream = "_mm%d_stream_%s"%(self.registerSizeBits,post)
functionNameLoad = "_mm%d_load_%s"%(self.registerSizeBits,post)
code += self.indent + self.indent + "%s(%s(B + i * ldb + %d), %s(%s(B_buffer + i * %d + %d)));\n"%(functionNameStream, cast, j *
elementsPerRegister, functionNameLoad, cast, blocking[1], j * elementsPerRegister)
code += self.indent + "}\n"
code += "}\n"
if( staticAndInline ):
code += "#endif\n"
ret += code
else: # perm[0] == 0
tmpBlockings = copy.deepcopy(sorted(blockings)) #it's important to sort the
#blockings in an ascending order because all blockings will
#use the 1x1 as a building block. This is done to trick the
#compiler into issuing vmovntps, when needed
if( not (tmpBlockings[0][0] == 1 and tmpBlockings[0][1] == 1) ): # (1,1) needs to be present for every blocking
tmpBlockings = [(1,1)] + tmpBlockings
for blocking in tmpBlockings:
transposeMicroKernelname, tmpCode = self.getMicroKernelHeader(blocking, 0, staticAndInline)
code = ""
if( staticAndInline ):
code += "#ifndef _TTC_%s\n"%transposeMicroKernelname.upper()
code += "#define _TTC_%s\n"%transposeMicroKernelname.upper()
code += tmpCode
retHPP += tmpCode.split("\n")[1]+";\n"
indent = self.indent
alphaFloatType = "float"
if( self.floatTypeA.find("double") != -1 ):
alphaFloatType = "double"
betaFloatType = "float"
if( self.floatTypeB.find("double") != -1 ):
alphaFloatType = "double"
offsetB = ""
offsetA = ""
if( blocking[0] > 1 ):
code += indent + "for(int ia = 0; ia < %d; ia++)\n"%(blocking[0])
offsetB += " + ia * ldb"
offsetA += " + ia * lda1"
indent += self.indent
if( blocking[1] > 1 ):
code += indent + "for(int ib = 0; ib < %d; ib++)\n"%(blocking[1])
offsetB += " + ib * ldb1"
offsetA += " + ib * lda"
indent += self.indent
if( (blocking[0] == 1 and blocking[1] == 1) or opt != "streamingstore"):
if self.architecture != "power":
if( opt == "streamingstore" ):
code += indent + "#pragma vector nontemporal\n"
code += indent + "#pragma omp simd\n"
if( staticAndInline ):
code += indent + "for(int i0 = 0; i0 < size0; i0++)\n"
else:
code += indent + "for(int i0 = 0; i0 < %d; i0++)\n"%(self.size[0])
updateStr = ""
outStr = "B[i0%s]"%offsetB
if( len(self.size) == 1):
inStr = "A[i0]"
else:
inStr = "A[i0%s]"%offsetA
if( self.beta == 0.0 ):
updateStr += "%s%s = alpha * %s;\n"%(indent + self.indent, outStr, inStr)
else:
updateStr += "%s%s = alpha * %s + beta * %s;\n"%(indent + self.indent, outStr, inStr, outStr)
code += updateStr
else:
streamStr = ""
if( opt == "streamingstore" ):
streamStr = "_streamingstore"
betaStr = ""
if( self.beta == 0 ):
betaStr = "_bz"
if( staticAndInline ):
code += indent + "%sTranspose1x1_0%s%s<size0>(A%s, lda1, lda, B%s, ldb1, ldb, alpha);\n"%(ttc_util.getFloatPrefix(self.floatTypeA, self.floatTypeB),streamStr, betaStr, offsetA, offsetB)
else:
code += indent + "%sTranspose1x1_0%s%s(A%s, lda1, lda, B%s, ldb1, ldb, alpha);\n"%(ttc_util.getFloatPrefix(self.floatTypeA, self.floatTypeB), streamStr, betaStr, offsetA, offsetB)
code += "}\n"
if( staticAndInline ):
code += "#endif\n"
ret += code
return (ret,retHPP)
#only used in the case of perm[0] == 0
def getUpdateString(self,indent):
outStr = "B[i0 + ib * %d + ia * ldb]"%self.size[0]
inStr = "A[i0 + ia * %d + ib * lda]"%self.size[0]
ret = ""
if(self.beta != 0):
ret += "%s%s = alpha*%s + beta*%s;\n"%(indent + self.indent, outStr, inStr,outStr)
else:
ret += "%s%s = alpha*%s;\n"%(indent + self.indent, outStr, inStr)
return ret
def getTrashCache(self):
cppCode = "void trashCache(double *A, double *B, int n)\n"
cppCode += "{\n"
if self.parallelize != 0:
cppCode += " #pragma omp parallel for\n"
cppCode += " for(int i = 0; i < n; i++)\n"
cppCode += " A[i] += 0.999 * B[i];\n"
cppCode += "}\n"
return cppCode
def generateImplementations(self):
#generate CPP and HPP files
sortedImplementations = copy.deepcopy(self.implementations)
sortedImplementations.sort(key=lambda x: x.getBlocking()) #sort according to the
#blocking, this is done to reduce the overhead during compilation
numImplementations = len(self.implementations)
numFiles = max((numImplementations + self.minImplementationsPerFile -1) / self.minImplementationsPerFile, 1)
if( numFiles > 20 ):
numFiles = (numImplementations + self.maxImplementationsPerFile -1) / self.maxImplementationsPerFile
numSolutionsPerFile = (numImplementations + numFiles - 1) / numFiles
cppCode = ""
if self.architecture == "avx" or self.architecture == "knc" or self.architecture == "avx512":
cppCode += "#include <xmmintrin.h>\n"
cppCode += "#include <immintrin.h>\n"
elif self.architecture == "power":
cppCode += "#include <builtins.h>\n"
cppCode += "#include <altivec.h>\n"
cppCode += "#include <complex.h>\n"
cppCode += "#if defined(__ICC) || defined(__INTEL_COMPILER)\n"
cppCode += "#define INLINE __forceinline\n"
cppCode += "#else\n"
cppCode += "#define INLINE __attribute__((always_inline))\n"
cppCode += "#endif\n\n"
hppCode = ""
if self.architecture == "avx" or self.architecture == "knc" or self.architecture == "avx512":
hppCode += "#include <xmmintrin.h>\n"
hppCode += "#include <immintrin.h>\n"
elif self.architecture == "power":
hppCode += "#include <builtins.h>\n"
hppCode += "#include <altivec.h>\n"
hppCode += "#include<complex.h>\n"
tmpPrefetchDistances = list(self.prefetchDistances)
#we need prefetch distance 0 for the remainder while-loop
tmpPrefetchDistances.append(0)
tmpPrefetchDistances = set( tmpPrefetchDistances )
(retCpp, retHpp) = self.generateTranspositionKernel(self.blockings, tmpPrefetchDistances, 0, self.getAppropriateOptimizations() )
hppCode += retHpp
hppCode += "void trashCache(double *A, double *B, int n);\n"
hppCode += self.referenceImplementation.getHeader()
for implementation in self.implementations:
hppCode += implementation.getHeader()
f = open(self.tmpDirectory+"transpose.h",'w')
f.write(hppCode)
f.close()
cppCode += retCpp
f = open(self.tmpDirectory+"transpose.cpp",'w')
f.write(cppCode)
f.close()
self.generateOffsetFile(self.tmpDirectory)
implementationCounter = 1 #include reference implementation
for i in range(numFiles):
cppCode = ""
cppCode += "#include <stdio.h>\n"
cppCode += "#include <queue>\n"
cppCode += "#include <omp.h>\n"
cppCode += "#include <complex.h>\n"
cppCode += "#include \"transpose.h\"\n"
cppCode += "#include \"ttc_offset.h\"\n"
if( i == 0):
cppCode += self.getTrashCache()
cppCode += self.referenceImplementation.getImplementation(self.parallelize)
cppImplementations = ""
for j in range(i * numSolutionsPerFile, min(numImplementations,(i+1)*numSolutionsPerFile)):
implementation = sortedImplementations[j]
implementationCounter += 1
cppImplementations += implementation.getImplementation(self.parallelize)
cppCode += cppImplementations
f = open(self.tmpDirectory+"transpose%d.cpp"%i,'w')
f.write(cppCode)
f.close()
if(implementationCounter-1 != numImplementations ):
print FAIL+"ERROR: not all implementations dumped to file"+ENDC
exit(-1)
|
import gettext
from gettext import gettext as _
gettext.textdomain('slidewall')
import logging
logger = logging.getLogger('slidewall')
from slidewall_lib.AboutDialog import AboutDialog
class AboutSlidewallDialog(AboutDialog):
__gtype_name__ = "AboutSlidewallDialog"
def finish_initializing(self, builder): # pylint: disable=E1002
"""Set up the about dialog"""
super(AboutSlidewallDialog, self).finish_initializing(builder)
# Code for other initialization actions should be added here.
|
from adafruit_display_text.label import Label
class ApplicationScreen(object):
""" This is the interface for application screens """
pyportal = None
logger = None
def change_to_state(self, state_name, current_state, states):
if current_state:
self.logger.debug('Exiting %s', current_state.name)
current_state.exit()
current_state = states[state_name]
self.logger.debug('Entering %s', current_state.name)
current_state.enter()
def touch_in_button(self, t, b):
in_horizontal = b['left'] <= t[0] <= b['right']
in_vertical = b['top'] <= t[1] <= b['bottom']
return in_horizontal and in_vertical
def create_text_areas(self, configs):
"""Given a list of area specifications, create and return test areas."""
text_areas = []
for cfg in configs:
textarea = Label(cfg['font'], text=' '*cfg['size'])
textarea.x = cfg['x']
textarea.y = cfg['y']
textarea.color = cfg['color']
text_areas.append(textarea)
return text_areas
def __init__(self, pyportal, logger):
self.pyportal = pyportal
self.logger = logger
def clear_splash(self):
for _ in range(len(self.pyportal.splash) - 1):
self.pyportal.splash.pop()
def tick(self, now):
""" handle one pass of the main loop """
pass
def exit(self):
""" exit and clear splash """
self.clear_splash()
|
from unittest.mock import MagicMock
from randovania.gui.lib import common_qt_lib
def test_get_network_client(skip_qtbot, qapp):
qapp.network_client = MagicMock()
assert common_qt_lib.get_network_client() is qapp.network_client
def test_get_game_connection(skip_qtbot, qapp):
qapp.game_connection = MagicMock()
assert common_qt_lib.get_game_connection() is qapp.game_connection
|
import logging
import traceback
from ftplib import FTP
from pathlib import PurePath
from FileListRetriever import FileListRetriever
class FTPRetriever(FileListRetriever):
"""
File retriever for files held on an FTP server.
"""
def __init__(self, instrument_id, logger, configuration=None):
super().__init__(instrument_id, logger, configuration)
self._ftp = None
@staticmethod
def _get_config_entries():
return ['Server', 'Port', 'User', 'Password', 'Source Folder', 'File Specification']
@staticmethod
def get_type():
return "FTP"
# noinspection PyBroadException
def test_configuration(self):
config_ok = True
try:
with FTP() as ftp:
try:
ftp.connect(self._configuration['Server'], port=int(self._configuration['Port']))
except Exception:
config_ok = False
print("Cannot connect to FTP server: " + traceback.format_exc())
self.log(logging.CRITICAL, "Cannot connect to FTP server: "
+ traceback.format_exc())
if config_ok:
try:
ftp.login(user=self._configuration['User'], passwd=self._configuration['Password'])
except Exception:
config_ok = False
print("Cannot log in to FTP server: " + traceback.format_exc())
self.log(logging.CRITICAL, "Cannot log in to FTP server: "
+ traceback.format_exc())
if config_ok:
try:
ftp.cwd(self._configuration['Source Folder'])
except Exception:
config_ok = False
print("Cannot access source folder: " + traceback.format_exc())
self.log(logging.CRITICAL, "Cannot access source folder: "
+ traceback.format_exc())
ftp.quit()
except Exception:
config_ok = False
self.log(logging.CRITICAL, "Error checking FTP configuration: "
+ traceback.format_exc())
return config_ok
# noinspection PyBroadException
def startup(self):
result = True
try:
self._ftp = FTP()
self._ftp.connect(self._configuration['Server'], port=int(self._configuration['Port']))
self._ftp.login(user=self._configuration['User'], passwd=self._configuration['Password'])
self._ftp.cwd(self._configuration['Source Folder'])
except Exception:
self.log(logging.CRITICAL, "Cannot log in to FTP server: "
+ traceback.format_exc())
result = False
return result
def shutdown(self):
self._ftp.close()
self._ftp = None
def _get_all_files(self):
all_files = self._ftp.nlst()
return list(filter(lambda name: PurePath(name).match(self._configuration["File Specification"]), all_files))
def _load_file(self, filename):
result = bytearray()
self._ftp.retrbinary(f'RETR {filename}', callback=result.extend)
return result
|
import os
import re
import six
def digits_only(string):
"""Return all digits that the given string starts with."""
match = re.match(r'(?P<digits>\d+)', string)
if match:
return int(match.group('digits'))
return 0
def to_unicode(string):
try:
return six.u(string)
except: # noqa: E722
# probably already decoded
return string
def is_on_path(exec_name):
"""
Indicates if the command 'exec_name' appears to be installed.
Returns:
True --- if it is installed
False --- if it isn't
"""
for dirpath in os.environ["PATH"].split(os.pathsep):
path = os.path.join(dirpath, exec_name)
if os.path.exists(path) and os.access(path, os.X_OK):
return True
return False
|
import json
import pytest
from newmansound.model import Artist, Album, Song
from newmansound.restweb import app, PlaylistService, SongService
from newmansound.schema import AlbumSchema, ArtistSchema, SongSchema
from tests.fixtures import client, engine, playlist_service, session
from tests.helpers import add_album, add_artist, add_song
class TestAlbumList:
def test_get_returns_list_of_albums(self, client, session):
add_album(session)
add_album(session)
album_schema = AlbumSchema()
albums = album_schema.load(json.loads(client.get('/album').data.decode('utf8')))
assert len(albums) == 2
class TestAlbum:
def test_get_returns_album_with_a_given_id(self, client, session):
album1 = add_album(session, name='album')
album_schema = AlbumSchema()
album = album_schema.load(json.loads(client.get('/album/' + str(album1.id)).data.decode('utf8')))
assert album.data['name'] == 'album'
class TestAlbum:
def test_get_returns_album_with_a_given_id(self, client, session):
album1 = add_album(session, name='album')
album_schema = AlbumSchema()
album = album_schema.load(json.loads(client.get('/album/' + str(album1.id)).data.decode('utf8')))
assert album.data['name'] == 'album'
class TestArtistList:
def test_get_returns_list_of_artists(self, client, session):
add_artist(session)
add_artist(session)
artist_schema = ArtistSchema()
artists = artist_schema.load(json.loads(client.get('/artist').data.decode('utf8')))
assert len(artists) == 2
class TestArtist:
def test_get_returns_artist_with_a_given_id(self, client, session):
artist1 = add_artist(session, name='artist')
artist_schema = ArtistSchema()
artist = artist_schema.load(json.loads(client.get('/artist/' + str(artist1.id)).data.decode('utf8')))
assert artist.data['name'] == 'artist'
class TestPlaylistRequest:
def test_post_creates_playlist_request(self, client, playlist_service, session):
song = add_song(session, path='newsong')
client.post('/playlist', data=json.dumps({'id': song.id}), content_type='application/json')
assert playlist_service.dequeue_song().path == 'newsong'
def test_post_status_400_on_bad_data(self, client, playlist_service, session):
assert client.post('/playlist', data=json.dumps({}), content_type='application/json').status_code == 400
class TestSongList:
def test_get_returns_list_of_songs(self, client, session):
add_song(session)
add_song(session)
song_schema = SongSchema()
songs = song_schema.load(json.loads(client.get('/song').data.decode('utf8')))
assert len(songs) == 2
class TestSong:
def test_get_returns_song_with_a_given_id(self, client, session):
song1 = add_song(session, name='song')
song_schema = SongSchema()
song = song_schema.load(json.loads(client.get('/song/' + str(song1.id)).data.decode('utf8')))
assert song.data['name'] == 'song'
def test_get_returns_404_for_invalid_id(self, client, session):
assert client.get('/song/invalid').status_code == 404
|
import subprocess
from mycroft.tts import TTS, TTSValidator
__author__ = 'jdorleans'
NAME = 'spdsay'
class SpdSay(TTS):
def __init__(self, lang, voice):
super(SpdSay, self).__init__(lang, voice)
def execute(self, sentence):
subprocess.call(
['spd-say', '-l', self.lang, '-t', self.voice, sentence])
class SpdSayValidator(TTSValidator):
def __init__(self):
super(SpdSayValidator, self).__init__()
def validate_lang(self, lang):
# TODO
pass
def validate_connection(self, tts):
try:
subprocess.call(['spd-say', '--version'])
except:
raise Exception(
'SpdSay is not installed. Run on terminal: sudo apt-get'
'install speech-dispatcher')
def get_instance(self):
return SpdSay
|
import re
from module.plugins.internal.SimpleHoster import SimpleHoster, create_getInfo
class OneFichierCom(SimpleHoster):
__name__ = "OneFichierCom"
__type__ = "hoster"
__version__ = "0.76"
__pattern__ = r'https?://(?:www\.)?(?:(?P<ID1>\w+)\.)?(?P<HOST>1fichier\.com|alterupload\.com|cjoint\.net|d(es)?fichiers\.com|dl4free\.com|megadl\.fr|mesfichiers\.org|piecejointe\.net|pjointe\.com|tenvoi\.com)(?:/\?(?P<ID2>\w+))?'
__description__ = """1fichier.com hoster plugin"""
__license__ = "GPLv3"
__authors__ = [("fragonib", "fragonib[AT]yahoo[DOT]es"),
("the-razer", "daniel_ AT gmx DOT net"),
("zoidberg", "zoidberg@mujmail.cz"),
("imclem", None),
("stickell", "l.stickell@yahoo.it"),
("Elrick69", "elrick69[AT]rocketmail[DOT]com"),
("Walter Purcaro", "vuolter@gmail.com")]
NAME_PATTERN = r'>FileName :</td>\s*<td.*>(?P<N>.+?)<'
SIZE_PATTERN = r'>Size :</td>\s*<td.*>(?P<S>[\d.,]+) (?P<U>[\w^_]+)'
OFFLINE_PATTERN = r'File not found !\s*<'
COOKIES = [("1fichier.com", "LG", "en")]
WAIT_PATTERN = r'>You must wait (\d+) minutes'
def setup(self):
self.multiDL = self.premium
self.resumeDownload = True
def handleFree(self, pyfile):
id = self.info['pattern']['ID1'] or self.info['pattern']['ID2']
url, inputs = self.parseHtmlForm('action="https://1fichier.com/\?%s' % id)
if not url:
self.fail(_("Download link not found"))
if "pass" in inputs:
inputs['pass'] = self.getPassword()
inputs['submit'] = "Download"
self.download(url, post=inputs)
def handlePremium(self, pyfile):
return self.handleFree(pyfile)
getInfo = create_getInfo(OneFichierCom)
|
from __future__ import unicode_literals
from django.contrib.auth.models import User
import factory
from pages.models import Page
class UserFactory(factory.DjangoModelFactory):
class Meta:
model = User
username = factory.Sequence(lambda n: "user_%d" % n)
class PageFactory(factory.DjangoModelFactory):
class Meta:
model = Page
owner = factory.SubFactory(UserFactory)
|
from vsg.rules import blank_line_above_line_starting_with_token
from vsg import token
lTokens = []
lTokens.append(token.entity_declaration.end_keyword)
class rule_016(blank_line_above_line_starting_with_token):
'''
This rule checks for blank lines above the **end entity** keywords.
**Violation**
.. code-block:: vhdl
wr_en : in std_logic;
rd_en : in std_logic
);
end entity fifo;
**Fix**
.. code-block:: vhdl
wr_en : in std_logic;
rd_en : in std_logic
);
end entity fifo;
'''
def __init__(self):
blank_line_above_line_starting_with_token.__init__(self, 'entity', '016', lTokens)
self.style = 'no_blank_line'
|
"""
A read-only cached version of @property
"""
class cached_property(object):
def __init__(self, method, name=None):
self.method = method
self.name = name or method.__name__
self.__doc__ = method.__doc__
def __get__(self, instance, cls):
if instance is None:
return self
result = self.method(instance)
setattr(instance, self.name, result)
return result
|
import jinja2
from jinja2 import nodes
from jinja2.ext import Extension
class RegionExtension(Extension):
tags = set(['region'])
def __init__(self, environment):
super(RegionExtension, self).__init__(environment)
environment.extend(regions=dict())
def parse(self, parser):
next(parser.stream)
name = parser.parse_expression()
body = parser.parse_statements(['name:endregion'], drop_needle=True)
print(body)
self.environment.regions[name.name] = body
return []
class InsertExtension(Extension):
tags = set(['insert'])
def parse(self, parser):
next(parser.stream)
name = parser.parse_expression()
return self.environment.regions[name.name]
class CaseExtension(Extension):
tags = set(['case'])
def parse(self, parser):
next(parser.stream)
name = parser.parse_expression()
body = parser.parse_statements(['name:endcase'], drop_needle=True)
body.append(nodes.TemplateData('\n\n'))
return body
env = jinja2.Environment(
optimized=False,
extensions=[InsertExtension, RegionExtension, CaseExtension],
block_start_string='/*#',
block_end_string='*/',
line_statement_prefix='//#')
tmpl = env.from_string('''
//# region elems
[x = 23]
//#- endregion
//# region vals
[,]
//#- endregion
//# region body
assert.sameValue(x, 23);
//# endregion
//# case var
var /*# insert elems */ = /*# insert vals */;
//# insert body
//# endcase
//# case funcexpr
var callCount = 0;
var f = function(/*# insert elems */) {
//# insert body
callCount = callCount + 1;
};
f(/*# insert vals */);
assert.sameValue(callCount, 1);
//# endcase
//# case genmeth
var callCount = 0;
var obj = {
*method(/*# insert elems */) {
//# insert body
callCount = callCount + 1;
}
};
obj.method().next(/*# insert vals */);
assert.sameValue(callCount, 1);
//# endcase
''')
print tmpl.render(name='Mark')
|
from corpustools import ccat
import unittest
from lxml import etree
import io
import cStringIO
class TestCcatHyph(unittest.TestCase):
'''Test how ccat handles hyph
'''
def test_hyph1(self):
'''Test the default treatment of hyph tags
'''
xml_printer = ccat.XMLPrinter()
buffer = cStringIO.StringIO()
xml_printer.etree = etree.parse(
io.BytesIO(
'<document id="no_id" xml:lang="nob">'
'<body><p>mellom<hyph/>krigs<hyph/>tiden</p></body>'
'</document>')
)
buffer = xml_printer.process_file()
self.assertEqual(buffer.getvalue(), 'mellomkrigstiden ¶\n')
def test_hyph2(self):
'''Test the treatment of hyph tags when hyph_replacement is
set to "xml"
'''
xml_printer = ccat.XMLPrinter(hyph_replacement='xml')
buffer = cStringIO.StringIO()
xml_printer.etree = etree.parse(
io.BytesIO(
'<document id="no_id" xml:lang="nob">'
'<body><p>mellom<hyph/>krigs<hyph/>tiden</p></body>'
'</document>')
)
buffer = xml_printer.process_file()
self.assertEqual(buffer.getvalue(),
'mellom<hyph/>krigs<hyph/>tiden ¶\n')
def test_hyph3(self):
'''Test the treatment of hyph tags when hyph_replacement is
set to "-"
'''
xml_printer = ccat.XMLPrinter(hyph_replacement='-')
buffer = cStringIO.StringIO()
xml_printer.etree = etree.parse(
io.BytesIO(
'<document id="no_id" xml:lang="nob">'
'<body><p>mellom<hyph/>krigs<hyph/>tiden</p></body>'
'</document>')
)
buffer = xml_printer.process_file()
self.assertEqual(buffer.getvalue(), 'mellom-krigs-tiden ¶\n')
def test_hyph4(self):
'''Test the treatment of two hyph tags in a row"
'''
xml_printer = ccat.XMLPrinter(hyph_replacement='-')
buffer = cStringIO.StringIO()
xml_printer.etree = etree.parse(
io.BytesIO(
'<document id="no_id" xml:lang="nob">'
'<body><p>mellom<hyph/><hyph/>tiden</p></body>'
'</document>')
)
buffer = xml_printer.process_file()
self.assertEqual(buffer.getvalue(), 'mellom-tiden ¶\n')
def test_hyph5(self):
'''Test the treatment of hyph tags when hyph_replacement is
set to None
'''
xml_printer = ccat.XMLPrinter(hyph_replacement=None)
buffer = cStringIO.StringIO()
xml_printer.etree = etree.parse(
io.BytesIO(
'<document id="no_id" xml:lang="nob">'
'<body><p>mellom<hyph/>krigs<hyph/>tiden</p></body>'
'</document>')
)
buffer = xml_printer.process_file()
self.assertEqual(buffer.getvalue(), 'mellom krigs tiden ¶\n')
class TestCcatErrormarkup(unittest.TestCase):
'''Test how ccat handles errormarkup
'''
def test_single_error_inline(self):
'''Plain error element, default text flow
'''
xml_printer = ccat.XMLPrinter()
input_error = etree.fromstring(
'<errorortreal correct="fiskeleting" errtype="nosplit" pos="noun">'
' fiske leting'
'</errorortreal>')
textlist = []
xml_printer.collect_inline_errors(input_error, textlist, 'nob')
self.assertEqual('\n'.join(textlist), 'fiskeleting')
def test_single_error_not_inline(self):
'''Plain error element, one word per line output
'''
xml_printer = ccat.XMLPrinter()
input_error = etree.fromstring(
'<errorortreal correct="fiskeleting" errtype="nosplit" pos="noun">'
' fiske leting'
'</errorortreal>')
textlist = []
xml_printer.collect_not_inline_errors(input_error, textlist)
self.assertEqual('\n'.join(textlist), (
'fiske leting\tfiskeleting\t#errtype=nosplit,pos=noun'))
def test_single_error_not_inline_with_filename(self):
'''Plain error element, one word per line output, with filename
'''
xml_printer = ccat.XMLPrinter(print_filename=True)
input_error = etree.fromstring(
'<errorortreal correct="fiskeleting" errtype="nosplit" pos="noun">'
' fiske leting'
'</errorortreal>')
xml_printer.filename = 'p.xml'
textlist = []
xml_printer.collect_not_inline_errors(input_error, textlist)
self.assertEqual('\n'.join(textlist), (
'fiske leting\tfiskeleting'
'\t#errtype=nosplit,pos=noun, file: p.xml'))
def test_single_error_not_inline_with_filename_without_attributes(self):
'''Plain error element, one word per line output, with filename,
only correct attribute
'''
xml_printer = ccat.XMLPrinter(print_filename=True)
input_error = etree.fromstring(
'<errorortreal correct="fiskeleting">'
' fiske leting'
'</errorortreal>')
xml_printer.filename = 'p.xml'
textlist = []
xml_printer.collect_not_inline_errors(input_error, textlist)
self.assertEqual('\n'.join(textlist),
'fiske leting\tfiskeleting\t#file: p.xml')
def test_multi_error_in_line(self):
'''Nested error element, default text flow
'''
xml_printer = ccat.XMLPrinter()
input_error = etree.fromstring(
'<errormorphsyn cat="x" const="spred" '
'correct="skoledagene er så vanskelige" errtype="agr" '
'orig="x" pos="adj">'
' skoledagene er så'
' <errorort correct="vanskelig" errtype="nosilent" pos="adj">'
' vanskerlig'
' </errorort>'
'</errormorphsyn>')
textlist = []
xml_printer.collect_inline_errors(input_error, textlist, 'nob')
self.assertEqual('\n'.join(textlist),
u'skoledagene er så vanskelige')
def test_multi_errormorphsyn_not_inline_with_filename(self):
'''Nested error element, one word per line output, with filename
'''
input_error = etree.fromstring(
'<errormorphsyn cat="x" const="spred" '
'correct="skoledagene er så vanskelige" errtype="agr" orig="x" '
'pos="adj">'
' skoledagene er så'
' <errorort correct="vanskelig" errtype="nosilent" pos="adj">'
' vanskerlig'
' </errorort>'
'</errormorphsyn>')
xml_printer = ccat.XMLPrinter(one_word_per_line=True,
print_filename=True)
xml_printer.filename = 'p.xml'
textlist = []
xml_printer.collect_not_inline_errors(input_error, textlist)
self.assertEqual('\n'.join(textlist), (
u'skoledagene er så vanskelig\tskoledagene er så vanskelige'
'\t#cat=x,const=spred,errtype=agr,orig=x,pos=adj, file: p.xml\n'
'vanskerlig\tvanskelig\t#errtype=nosilent,pos=adj, file: p.xml'))
def test_multi_errorlex_not_inline(self):
'''Nested error element, one word per line output
'''
input_error = etree.fromstring(
'<errorlex correct="man soga">'
' <errorort correct="makkár" errtype="á" pos="interr">'
' makkar'
' </errorort>'
' soga'
'</errorlex>')
textlist = []
xml_printer = ccat.XMLPrinter(typos=True)
xml_printer.collect_not_inline_errors(input_error, textlist)
self.assertEqual('\n'.join(textlist), (
u'makkár soga\tman soga\nmakkar\tmakkár\t#errtype=á,pos=interr'))
class TestCcat(unittest.TestCase):
def test_p(self):
'''Test the output of a plain p with default text flow
'''
xml_printer = ccat.XMLPrinter()
buffer = cStringIO.StringIO()
input_p = etree.fromstring(
'<p>Et stykke av Norge som er lite kjent - '
'Litt om Norge i mellomkrigstiden</p>')
xml_printer.collect_text(input_p, 'nob', buffer)
self.assertEqual(buffer.getvalue(), (
'Et stykke av Norge som er lite kjent - '
'Litt om Norge i mellomkrigstiden ¶\n'))
def test_p_with_span(self):
'''Test the output of a plain p with a span element with default
text flow
'''
xml_printer = ccat.XMLPrinter()
buffer = cStringIO.StringIO()
input_p = etree.fromstring(
'<p>I 1864 ga han ut boka '
' <span type="quote" xml:lang="dan">'
' "Fornuftigt Madstel"'
' </span>.'
'</p>')
xml_printer.collect_text(input_p, 'nob', buffer)
self.assertEqual(buffer.getvalue(),
'I 1864 ga han ut boka "Fornuftigt Madstel" . ¶\n')
def test_p_with_error(self):
'''Test the output of a p containing a nested error element,
with default text flow
'''
xml_printer = ccat.XMLPrinter()
buffer = cStringIO.StringIO()
input_p = etree.fromstring(
'<p>'
' <errormorphsyn cat="pl3prs" const="fin" '
' correct="Bearpmehat sirrejit" errtype="agr" '
' orig="sg3prs" pos="verb">'
' <errorort correct="Bearpmehat" errtype="svow" pos="noun">'
' Bearpmahat'
' </errorort>'
' <errorlex correct="sirre" errtype="w" origpos="v" '
' pos="verb">'
' earuha'
' </errorlex>'
' </errormorphsyn> '
' uskki ja loaiddu.'
'</p>')
xml_printer.collect_text(input_p, 'sme', buffer)
self.assertEqual(buffer.getvalue(),
"Bearpmahat earuha uskki ja loaiddu. ¶\n")
def test_p_one_word_per_line(self):
'''Test the output of a plain p element, one word per line
'''
input_p = etree.fromstring(
'<p>Et stykke av Norge som er lite kjent - '
'Litt om Norge i mellomkrigstiden</p>')
xml_printer = ccat.XMLPrinter(one_word_per_line=True)
buffer = cStringIO.StringIO()
xml_printer.collect_text(input_p, 'nob', buffer)
self.assertEqual(buffer.getvalue(), (
'Et\n'
'stykke\n'
'av\n'
'Norge\n'
'som\n'
'er\n'
'lite\n'
'kjent\n'
'-\n'
'Litt\n'
'om\n'
'Norge\n'
'i\n'
'mellomkrigstiden\n'))
def test_p_with_span_one_word_per_line(self):
'''Test the output a plain p that contains a spen element,
one word per line
'''
input_p = etree.fromstring(
'<p>I 1864 ga han ut boka '
' <span type="quote" xml:lang="dan">'
' "Fornuftigt Madstel"'
' </span>.'
'</p>')
xml_printer = ccat.XMLPrinter(one_word_per_line=True)
buffer = cStringIO.StringIO()
xml_printer.collect_text(input_p, 'nob', buffer)
self.assertEqual(buffer.getvalue(), (
'I\n'
'1864\n'
'ga\n'
'han\n'
'ut\n'
'boka\n'
'"Fornuftigt\n'
'Madstel"\n'
'.\n'))
def test_p_with_error_one_word_per_line(self):
'''Test the output of a p element containing one plain and one
nested error element
'''
input_p = etree.fromstring(
'<p>livččii'
' <errorort correct="makkárge" errtype="á" pos="adv">'
' makkarge'
' </errorort>'
' politihkka, muhto rahpasit baicca muitalivčče'
' <errorlex correct="man soga">'
' <errorort correct="makkár" errtype="á" pos="interr">'
' makkar'
' </errorort>'
' soga'
' </errorlex>'
' sii'
'</p>')
xml_printer = ccat.XMLPrinter(one_word_per_line=True)
buffer = cStringIO.StringIO()
xml_printer.collect_text(input_p, 'sme', buffer)
self.assertEqual(buffer.getvalue(), (
'livččii\n'
'makkarge\tmakkárge\t#errtype=á,pos=adv\n'
'politihkka,\nmuhto\nrahpasit\nbaicca\nmuitalivčče\n'
'makkár soga\tman soga\n'
'makkar\tmakkár\t#errtype=á,pos=interr\nsoga\nsii\n'))
def test_p_with_error_correction(self):
'''Test the output of a plain p element containing two error elements,
one plain and one nested, when we want to print the corrections in the
error elements, with default text flow
'''
input_p = etree.fromstring(
'<p>livččii'
' <errorort correct="makkárge" errtype="á" pos="adv">'
' makkarge'
' </errorort>'
' politihkka, muhto rahpasit baicca muitalivčče'
' <errorlex correct="man soga">'
' <errorort correct="makkár" errtype="á" pos="interr">'
' makkar'
' </errorort>'
' soga'
' </errorlex>'
' sii'
'</p>')
xml_printer = ccat.XMLPrinter(correction=True)
buffer = cStringIO.StringIO()
xml_printer.collect_text(input_p, 'sme', buffer)
self.assertEqual(buffer.getvalue(), (
'livččii makkárge politihkka, muhto rahpasit baicca muitalivčče '
'man soga sii ¶\n'))
def test_p_with_error_filtering_errorlex(self):
'''Test the output of plain p, when we only want the correction
from the errorlex element, with the default text flow
'''
input_p = etree.fromstring(
'<p>livččii'
' <errorort correct="makkárge" errtype="á" pos="adv">'
' makkarge'
' </errorort>'
' politihkka, muhto rahpasit baicca muitalivčče'
' <errorlex correct="man soga">'
' <errorort correct="makkár" errtype="á" pos="interr">'
' makkar'
' </errorort>'
' soga'
' </errorlex>'
' sii'
'</p>')
xml_printer = ccat.XMLPrinter(errorlex=True)
buffer = cStringIO.StringIO()
xml_printer.collect_text(input_p, 'sme', buffer)
self.assertEqual(buffer.getvalue(), (
'livččii makkarge politihkka, muhto rahpasit baicca muitalivčče '
'man soga sii ¶\n'))
def test_p_with_error_filtering_errormorphsyn(self):
'''Test the output of a p element containing two error elements
that are not affected by the error filtering, with default text flow.
'''
input_p = etree.fromstring(
'<p>livččii'
' <errorort correct="makkárge" errtype="á" pos="adv">'
' makkarge'
' </errorort>'
' politihkka, muhto rahpasit baicca muitalivčče'
' <errorlex correct="man soga">'
' <errorort correct="makkár" errtype="á" pos="interr">'
' makkar'
' </errorort>'
' soga'
' </errorlex>'
' sii'
'</p>')
xml_printer = ccat.XMLPrinter(errormorphsyn=True)
buffer = cStringIO.StringIO()
xml_printer.collect_text(input_p, 'sme', buffer)
self.assertEqual(buffer.getvalue(), (
'livččii makkarge politihkka, muhto rahpasit baicca muitalivčče '
'makkar soga sii ¶\n'))
def test_p_with_error_filtering_errorort(self):
'''Test the output of a p element with two error elements,
where errorort filtering is on. That is the correct attributes of
the errorort elements should be printed instead of errorort.text.
'''
xml_printer = ccat.XMLPrinter(errorort=True)
input_p = etree.fromstring(
'<p>livččii'
' <errorort correct="makkárge" errtype="á" pos="adv">'
' makkarge'
' </errorort>'
' politihkka, muhto rahpasit baicca muitalivčče'
' <errorlex correct="man soga">'
' <errorort correct="makkár" errtype="á" pos="interr">'
' makkar'
' </errorort>'
' soga'
' </errorlex>'
' sii'
'</p>')
buffer = cStringIO.StringIO()
xml_printer.collect_text(input_p, 'sme', buffer)
self.assertEqual(buffer.getvalue(), (
'livččii makkárge politihkka, muhto rahpasit baicca muitalivčče '
'makkár soga sii ¶\n'))
def test_p_with_error_filtering_errorortreal(self):
'''
'''
xml_printer = ccat.XMLPrinter(errorortreal=True)
input_p = etree.fromstring(
'<p>livččii'
' <errorort correct="makkárge" errtype="á" pos="adv">'
' makkarge'
' </errorort>'
' politihkka, muhto rahpasit baicca muitalivčče'
' <errorlex correct="man soga">'
' <errorort correct="makkár" errtype="á" pos="interr">'
' makkar'
' </errorort>'
' soga'
' </errorlex>'
' sii'
'</p>')
buffer = cStringIO.StringIO()
xml_printer.collect_text(input_p, 'sme', buffer)
self.assertEqual(buffer.getvalue(), (
'livččii makkarge politihkka, muhto rahpasit baicca muitalivčče '
'makkar soga sii ¶\n'))
def test_visit_this_p_default(self):
'''Check that only plain p elements and p elements where the
type attribute is text is visited
'''
xml_printer = ccat.XMLPrinter()
for types in [' type="title"',
' type="listitem"',
' type="tablecell"']:
input_xml = etree.fromstring('<p' + types + '>ášŧŋđžčøåæ</p>')
self.assertFalse(xml_printer.visit_this_node(input_xml))
for types in ['',
' type="text"']:
input_xml = etree.fromstring('<p' + types + '>ášŧŋđžčøåæ</p>')
self.assertTrue(xml_printer.visit_this_node(input_xml))
def test_visit_this_p_title_set(self):
'''Check that only p elements where the
type attribute is title is visited, when the title option is True
'''
xml_printer = ccat.XMLPrinter(title=True)
for types in ['',
' type="text"',
' type="listitem"',
' type="tablecell"']:
input_xml = etree.fromstring('<p' + types + '>ášŧŋđžčøåæ</p>')
self.assertFalse(xml_printer.visit_this_node(input_xml))
for types in [' type="title"']:
input_xml = etree.fromstring('<p' + types + '>ášŧŋđžčøåæ</p>')
self.assertTrue(xml_printer.visit_this_node(input_xml))
def test_visit_this_p_listitem_set(self):
'''Check that only p elements where the
type attribute is listitem is visited, when the listitem option is True
'''
xml_printer = ccat.XMLPrinter(listitem=True)
for types in ['',
' type="text"',
' type="title"',
' type="tablecell"']:
input_xml = etree.fromstring('<p' + types + '>ášŧŋđžčøåæ</p>')
self.assertFalse(xml_printer.visit_this_node(input_xml))
for types in [' type="listitem"']:
input_xml = etree.fromstring('<p' + types + '>ášŧŋđžčøåæ</p>')
self.assertTrue(xml_printer.visit_this_node(input_xml))
def test_visit_this_p_tablecell_set(self):
'''Check that only p elements where the
type attribute is tablecess is visited, when the table option is
True
'''
xml_printer = ccat.XMLPrinter(table=True)
for types in ['',
' type="text"',
' type="title"',
' type="listitem"']:
input_xml = etree.fromstring('<p' + types + '>ášŧŋđžčøåæ</p>')
self.assertFalse(xml_printer.visit_this_node(input_xml))
for types in [' type="tablecell"']:
input_xml = etree.fromstring('<p' + types + '>ášŧŋđžčøåæ</p>')
self.assertTrue(xml_printer.visit_this_node(input_xml))
def test_visit_this_p_allp_set(self):
'''Check that all p elements are visited when the
all_paragraphs option is True
'''
xml_printer = ccat.XMLPrinter(all_paragraphs=True)
for types in ['',
' type="text"',
' type="title"',
' type="listitem"',
' type="tablecell"']:
input_xml = etree.fromstring('<p' + types + '>ášŧŋđžčøåæ</p>')
self.assertTrue(xml_printer.visit_this_node(input_xml))
def test_process_file_default(self):
'''Check the output of plain p elements, with default settings
Specifically, check that only plain p gets output, whereas
p elements with the type title, listitem and tablecell get no output.
'''
xml_printer = ccat.XMLPrinter()
for types in [' type="title"',
' type="listitem"',
' type="tablecell"']:
xml_printer.etree = etree.parse(io.BytesIO(
'''<document id="no_id" xml:lang="sme"><body><p''' +
types +
'''>ášŧŋđžčøåæ</p></body></document>'''))
buffer = xml_printer.process_file()
self.assertEqual(buffer.getvalue(), '')
for types in ['',
' type="text"']:
xml_printer.etree = etree.parse(io.BytesIO(
'''<document id="no_id" xml:lang="sme"><body><p''' +
types +
'''>ášŧŋđžčøåæ</p></body></document>'''))
buffer = xml_printer.process_file()
self.assertEqual(buffer.getvalue(), 'ášŧŋđžčøåæ ¶\n')
def test_process_file_title_set(self):
'''When the title option is True, check that only p elements with
type=title gets output.
'''
xml_printer = ccat.XMLPrinter(title=True)
for types in ['',
' type="text"',
' type="listitem"',
' type="tablecell"']:
xml_printer.etree = etree.parse(io.BytesIO(
'''<document id="no_id" xml:lang="sme"><body><p''' +
types +
'''>ášŧŋđžčøåæ</p></body></document>'''))
buffer = xml_printer.process_file()
self.assertEqual(buffer.getvalue(), '')
for types in [' type="title"']:
xml_printer.etree = etree.parse(io.BytesIO(
'''<document id="no_id" xml:lang="sme"><body><p''' +
types +
'''>ášŧŋđžčøåæ</p></body></document>'''))
buffer = xml_printer.process_file()
self.assertEqual(buffer.getvalue(), 'ášŧŋđžčøåæ ¶\n')
def test_process_file_listitem_set(self):
'''When the listitem option is True, check that only p elements with
type=listitem gets output.
'''
xml_printer = ccat.XMLPrinter(listitem=True)
for types in ['',
' type="text"',
' type="title"',
' type="tablecell"']:
xml_printer.etree = etree.parse(io.BytesIO(
'''<document id="no_id" xml:lang="sme"><body><p''' +
types +
'''>ášŧŋđžčøåæ</p></body></document>'''))
buffer = xml_printer.process_file()
self.assertEqual(buffer.getvalue(), '')
for types in [' type="listitem"']:
xml_printer.etree = etree.parse(io.BytesIO(
'''<document id="no_id" xml:lang="sme"><body><p''' +
types +
'''>ášŧŋđžčøåæ</p></body></document>'''))
buffer = xml_printer.process_file()
self.assertEqual(buffer.getvalue(), 'ášŧŋđžčøåæ ¶\n')
def test_process_file_tablecell_set(self):
'''When the table option is True, check that only p elements with
type=title gets output.
'''
xml_printer = ccat.XMLPrinter(table=True)
for types in ['',
' type="text"',
' type="title"',
' type="listitem"']:
xml_printer.etree = etree.parse(io.BytesIO(
'''<document id="no_id" xml:lang="sme"><body><p''' +
types +
'''>ášŧŋđžčøåæ</p></body></document>'''))
buffer = xml_printer.process_file()
self.assertEqual(buffer.getvalue(), '')
for types in [' type="tablecell"']:
xml_printer.etree = etree.parse(io.BytesIO(
'''<document id="no_id" xml:lang="sme"><body><p''' +
types +
'''>ášŧŋđžčøåæ</p></body></document>'''))
buffer = xml_printer.process_file()
self.assertEqual(buffer.getvalue(), 'ášŧŋđžčøåæ ¶\n')
def test_process_file_allp_set(self):
'''When the all_paragraphs option is True, check that all p elements
get output.
'''
xml_printer = ccat.XMLPrinter(all_paragraphs=True)
for types in ['',
' type="text"',
' type="title"',
' type="listitem"',
' type="tablecell"']:
xml_printer.etree = etree.parse(io.BytesIO(
'''<document id="no_id" xml:lang="sme"><body><p''' +
types +
'''>ášŧŋđžčøåæ</p></body></document>'''))
buffer = xml_printer.process_file()
self.assertEqual(buffer.getvalue(), 'ášŧŋđžčøåæ ¶\n')
def test_process_file_one_word_per_line_errorlex(self):
'''Check the output of a p element containing two error elements,
a plain errorort one, and a nested errorlex one when
the one_word_per_line and errorlex options are True.
'''
xml_printer = ccat.XMLPrinter(one_word_per_line=True,
errorlex=True)
xml_printer.etree = etree.parse(
io.BytesIO(
'<document id="no_id" xml:lang="sme">'
' <body>'
' <p>'
' livččii'
' <errorort correct="makkárge" errtype="á" '
' pos="adv">'
' makkarge'
' </errorort>'
' politihkka, muhto rahpasit baicca muitalivčče'
' <errorlex correct="man soga">'
' <errorort correct="makkár" errtype="á" '
' pos="interr">'
' makkar'
' </errorort>'
' soga'
' </errorlex>'
' sii'
' </p>'
' </body>'
'</document>'))
buffer = xml_printer.process_file()
self.assertEqual(buffer.getvalue(), (
'livččii\n'
'makkarge\n'
'politihkka,\n'
'muhto\n'
'rahpasit\n'
'baicca\n'
'muitalivčče\n'
'makkár soga\tman soga\n'
'sii\n'))
def test_process_file_one_word_per_line_errorort(self):
'''Check the output of a p element containing two error elements,
a plain errorort one, and a nested errorlex one when
the one_word_per_line and errorort options are True
'''
xml_printer = ccat.XMLPrinter(one_word_per_line=True,
errorort=True)
xml_printer.etree = etree.parse(
io.BytesIO(
'<document id="no_id" xml:lang="sme">'
' <body>'
' <p>'
' livččii'
' <errorort correct="makkárge" errtype="á" '
' pos="adv">'
' makkarge'
' </errorort>'
' politihkka, muhto rahpasit baicca muitalivčče'
' <errorlex correct="man soga">'
' <errorort correct="makkár" errtype="á" '
' pos="interr">'
' makkar'
' </errorort>'
' soga'
' </errorlex>'
' sii'
' </p>'
' </body>'
'</document>'))
buffer = xml_printer.process_file()
self.assertEqual(buffer.getvalue(), (
'livččii\n'
'makkarge\tmakkárge\t#errtype=á,pos=adv\n'
'politihkka,\n'
'muhto\n'
'rahpasit\n'
'baicca\n'
'muitalivčče\n'
'makkar\tmakkár\t#errtype=á,pos=interr\n'
'soga\n'
'sii\n'))
def test_process_file_typos(self):
'''Check the output of a p element containing two error elements,
a plain errorort one, and a nested errorlex one when
the typos option True
'''
xml_printer = ccat.XMLPrinter(typos=True)
xml_printer.etree = etree.parse(
io.BytesIO(
'<document id="no_id" xml:lang="sme">'
' <body>'
' <p>'
' livččii'
' <errorort correct="makkárge" errtype="á" '
' pos="adv">'
' makkarge'
' </errorort>'
' politihkka, muhto rahpasit baicca muitalivčče'
' <errorlex correct="man soga">'
' <errorort correct="makkár" errtype="á" '
' pos="interr">'
' makkar'
' </errorort>'
' soga'
' </errorlex>'
' sii'
' </p>'
' </body>'
'</document>'))
buffer = xml_printer.process_file()
self.assertEqual(buffer.getvalue(), (
'makkarge\tmakkárge\t#errtype=á,pos=adv\n'
'makkár soga\tman soga\n'
'makkar\tmakkár\t#errtype=á,pos=interr\n'))
def test_process_file_typos_errorlex(self):
'''Check the output of a p element containing two error elements,
a plain errorort one, and a nested errorlex one when
the typos and errorlex options are True
'''
xml_printer = ccat.XMLPrinter(typos=True, errorlex=True)
xml_printer.etree = etree.parse(
io.BytesIO(
'<document id="no_id" xml:lang="sme">'
' <body>'
' <p>'
' livččii'
' <errorort correct="makkárge" errtype="á" '
' pos="adv">'
' makkarge'
' </errorort>'
' politihkka, muhto rahpasit baicca muitalivčče'
' <errorlex correct="man soga">'
' <errorort correct="makkár" errtype="á" '
' pos="interr">'
' makkar'
' </errorort>'
' soga'
' </errorlex>'
' sii'
' </p>'
' </body>'
'</document>'))
buffer = xml_printer.process_file()
self.assertEqual(buffer.getvalue(),
'makkár soga\tman soga\n')
def test_process_file_typos_errorort(self):
'''Check the output of a p element containing two error elements,
a plain errorort one, and a nested errorlex one when
the one_word_per_line, typos and errorort options are True
'''
xml_printer = ccat.XMLPrinter(typos=True,
one_word_per_line=True,
errorort=True)
xml_printer.etree = etree.parse(
io.BytesIO(
'<document id="no_id" xml:lang="sme">'
' <body>'
' <p>'
' livččii'
' <errorort correct="makkárge" errtype="á" '
' pos="adv">'
' makkarge'
' </errorort>'
' politihkka, muhto rahpasit baicca muitalivčče'
' <errorlex correct="man soga">'
' <errorort correct="makkár" errtype="á" '
' pos="interr">'
' makkar'
' </errorort>'
' soga'
' </errorlex>'
' sii'
' </p>'
' </body>'
'</document>'))
buffer = xml_printer.process_file()
self.assertEqual(buffer.getvalue(), (
'makkarge\tmakkárge\t#errtype=á,pos=adv\n'
'makkar\tmakkár\t#errtype=á,pos=interr\n'))
def test_get_lang(self):
'''Check that get_lang finds the main lang of the document
'''
xml_printer = ccat.XMLPrinter()
xml_printer.etree = etree.parse(io.BytesIO(
'<document id="no_id" xml:lang="sme"/>'))
self.assertEqual(xml_printer.get_lang(), 'sme')
def test_get_element_language_same_as_parent(self):
'''Check that get_element_language returns the same language as the
main lang of the document when the xml:lang is not set in the p
element.
'''
xml_printer = ccat.XMLPrinter()
element = etree.fromstring('<p/>')
self.assertEqual(xml_printer.get_element_language(element, 'sme'),
'sme')
def test_get_element_language_different_from_parent(self):
'''Check that the value of xml:lang is returned when it is set.
'''
xml_printer = ccat.XMLPrinter()
element = etree.fromstring('<p xml:lang="nob"/>')
self.assertEqual(xml_printer.get_element_language(element, 'sme'),
'nob')
def test_process_file_language_nob(self):
'''Check that only content with the same language as the lang
options is output
'''
xml_printer = ccat.XMLPrinter(lang='nob')
xml_printer.etree = etree.parse(
io.BytesIO(
'<document id="no_id" xml:lang="nob">'
' <body>'
' <p>'
' nob1'
' <span type="quote" xml:lang="dan">'
' dan1'
' </span>'
' nob2'
' </p>'
' </body>'
'</document>'))
buffer = xml_printer.process_file()
self.assertEqual(buffer.getvalue(), 'nob1 nob2 ¶\n')
def test_process_file_language_dan(self):
'''Check that only content with the same language as the lang
options is output
'''
xml_printer = ccat.XMLPrinter(lang='dan')
xml_printer.etree = etree.parse(
io.BytesIO(
'<document id="no_id" xml:lang="nob">'
' <body>'
' <p>'
' nob1'
' <span type="quote" xml:lang="dan">'
' dan1'
' </span>'
' nob2'
' </p>'
' </body>'
'</document>'))
buffer = xml_printer.process_file()
self.assertEqual(buffer.getvalue(), 'dan1 ¶\n')
def test_process_two_paragraphs(self):
'''Check that the ¶ character is printed out when the content of
a p is output
'''
xml_printer = ccat.XMLPrinter()
xml_printer.etree = etree.parse(
io.BytesIO(
'<document id="no_id" xml:lang="nob">'
' <body>'
' <p>'
' nob1'
' </p>'
' <p>'
' nob2'
' </p>'
' </body>'
'</document>'))
buffer = xml_printer.process_file()
self.assertEqual(buffer.getvalue(), 'nob1 ¶\nnob2 ¶\n')
def test_process_minus_l_sme(self):
'''Check that nothing is output when the wanted language
(set in the lang option) is not the same language as any of the
content of the elements.
'''
xml_printer = ccat.XMLPrinter(lang='sme')
xml_printer.etree = etree.parse(
io.BytesIO(
'<document id="no_id" xml:lang="nob">'
' <body>'
' <p type="text">'
' men'
' <errormorphsyn cat="x" const="spred"'
' correct="skoledagene er så vanskelige" '
' errtype="agr" orig="x" pos="adj">'
' skoledagene er så'
' <errorort correct="vanskelig" '
' errtype="nosilent" pos="adj">'
' vanskerlig'
' </errorort>'
' </errormorphsyn>'
' å komme igjennom,'
' </p>'
' </body>'
'</document>'))
buffer = xml_printer.process_file()
self.assertEqual(buffer.getvalue(), '')
def test_foreign(self):
'''Check the output of a p containing an errorlang element
when the errorlang option is True.
'''
xml_printer = ccat.XMLPrinter(errorlang=True)
xml_printer.etree = etree.parse(
io.BytesIO(
'<document id="no_id" xml:lang="nob">'
' <body>'
' <p>'
' Vijmak bierjjedak!'
' <errorlang correct="nor">'
' Pjuh'
' </errorlang>'
' vijmak de bierjjedak'
' <errorort correct="sjattaj" '
' errorinfo="vowlat,á-a">'
' sjattáj'
' </errorort>'
' .'
' </p>'
' </body>'
'</document>'))
buffer = xml_printer.process_file()
self.assertEqual(buffer.getvalue(), (
'Vijmak bierjjedak! nor vijmak de bierjjedak sjattáj . ¶\n'))
def test_no_foreign(self):
'''When the noforeign option is True, neither the errorlang.text
nor the correct attribute should be output. Check that this really
happens.
'''
xml_printer = ccat.XMLPrinter(noforeign=True)
xml_printer.etree = etree.parse(
io.BytesIO(
'<document id="no_id" xml:lang="nob">'
' <body>'
' <p>'
' Vijmak bierjjedak!'
' <errorlang correct="nor">'
' Pjuh'
' </errorlang>'
' vijmak de bierjjedak'
' <errorort correct="sjattaj" '
' errorinfo="vowlat,á-a">'
' sjattáj'
' </errorort>'
' .'
' </p>'
' </body>'
'</document>'))
buffer = xml_printer.process_file()
self.assertEqual(buffer.getvalue(), (
'Vijmak bierjjedak! vijmak de bierjjedak sjattáj . ¶\n'))
def test_no_foreign_typos(self):
'''When the noforeign option is True, neither the errorlang.text
nor the correct attribute should be output. Check that this really
happens even when the typos option is set.
'''
xml_printer = ccat.XMLPrinter(typos=True, noforeign=True)
xml_printer.etree = etree.parse(
io.BytesIO(
'<document id="no_id" xml:lang="nob">'
' <body>'
' <p>'
' Vijmak bierjjedak!'
' <errorlang correct="nor">'
' Pjuh'
' </errorlang>'
' vijmak de bierjjedak'
' <errorort correct="sjattaj" '
' errorinfo="vowlat,á-a">'
' sjattáj'
' </errorort>'
' .'
' </p>'
' </body>'
'</document>'))
buffer = xml_printer.process_file()
self.assertEqual(buffer.getvalue(),
'sjattáj\tsjattaj\t#errorinfo=vowlat,á-a\n')
def test_typos_errordepth3(self):
'''Check the output of a p containing a nested error element of
depth 3 when the typos option is True.
'''
xml_printer = ccat.XMLPrinter(typos=True)
xml_printer.etree = etree.parse(
io.BytesIO(
'<document id="no_id" xml:lang="nob">'
' <body>'
' <p>'
' <errormorphsyn cat="genpl" const="obj"'
' correct="čoggen ollu joŋaid ja sarridiid" '
' errtype="case" orig="nompl" pos="noun">'
' <errormorphsyn cat="genpl" const="obj" '
' correct="čoggen ollu joŋaid" errtype="case"'
' orig="nompl" pos="noun">'
' <errorort correct="čoggen" errtype="mono" '
' pos="verb">'
' čoaggen'
' </errorort>'
' ollu jokŋat'
' </errormorphsyn>'
' ja sarridat'
' </errormorphsyn>'
' </p>'
' </body>'
'</document>'))
buffer = xml_printer.process_file()
self.assertEqual(
buffer.getvalue(),
(
'čoggen ollu joŋaid ja sarridat'
'\tčoggen ollu joŋaid ja sarridiid'
'\t#cat=genpl,const=obj,errtype=case,orig=nompl,pos=noun\n'
'čoggen ollu jokŋat\tčoggen ollu joŋaid'
'\t#cat=genpl,const=obj,errtype=case,orig=nompl,pos=noun\n'
'čoaggen\tčoggen\t#errtype=mono,pos=verb\n'))
def test_typos_errormorphsyn_twice(self):
'''Check the output of a plain p containing a doubly nested
errormorphsyn element when the typos and errormorphsyn
options are True
'''
xml_printer = ccat.XMLPrinter(typos=True, errormorphsyn=True)
xml_printer.etree = etree.parse(
io.BytesIO(
'<document id="no_id" xml:lang="nob">'
' <body>'
' <p>'
' <errormorphsyn cat="sg3prs" const="v" '
' correct="lea okta mánná" errtype="agr" '
' orig="pl3prs" pos="v">'
' leat'
' <errormorphsyn cat="nomsg" const="spred" '
' correct="okta mánná" errtype="case" '
' orig="gensg" pos="n">'
' okta máná'
' </errormorphsyn>'
' </errormorphsyn>'
' </p>'
' </body>'
'</document>'))
buffer = xml_printer.process_file()
self.assertEqual(buffer.getvalue(), (
'leat okta mánná\tlea okta mánná'
'\t#cat=sg3prs,const=v,errtype=agr,orig=pl3prs,pos=v\n'
'okta máná\tokta mánná'
'\t#cat=nomsg,const=spred,errtype=case,orig=gensg,pos=n\n'))
def test_process_file1(self):
'''Test process_file with a disambiguation element as input
'''
xml_printer = ccat.XMLPrinter(disambiguation=True)
xml_printer.etree = etree.parse(
io.BytesIO(
'<document id="no_id" xml:lang="nob">\n'
' <body>\n'
' <disambiguation>"<Muhto>"\n'
'\t"muhto" CC <sme> @CVP\n"<gaskkohagaid>"\n'
'\t"gaskkohagaid" Adv <sme>\n"<,>"\n'
'\t"," CLB\n"<ja>"\n'
'\t"ja" CC <sme> @CNP\n"<erenoamážit>"\n'
'\t"erenoamážit" Adv <sme>\n"<dalle_go>"\n'
'\t"dalle_go" MWE CS <sme> @CVP\n"<lei>"\n'
'\t"leat" V <sme> IV Ind Prt Sg3 @+FMAINV\n'
'"<buolaš>"\n'
'\t"buolaš" Sem/Wthr N <sme> Sg Nom\n"<,>"\n'
'\t"," CLB\n"<de>"\n'
'\t"de" Adv <sme>\n"<aggregáhta>"\n'
'\t"aggregáhta" N <sme> Sg Nom\n"<billánii>"\n'
'\t"billánit" V <sme> IV Ind Prt Sg3 @+FMAINV\n'
'"<.>"\n\t"." CLB\n\n"<¶>"\n'
'\t"¶" CLB\n\n</disambiguation></body></document>'))
buffer = xml_printer.process_file()
self.assertEqual(buffer.getvalue(), (
'"<Muhto>"\n\t"muhto" CC <sme> @CVP\n"<gaskkohagaid>"\n'
'\t"gaskkohagaid" Adv <sme>\n"<,>"\n'
'\t"," CLB\n"<ja>"\n\t"ja" CC <sme> @CNP\n"<erenoamážit>"\n'
'\t"erenoamážit" Adv <sme>\n"<dalle_go>"\n'
'\t"dalle_go" MWE CS <sme> @CVP\n"<lei>"\n'
'\t"leat" V <sme> IV Ind Prt Sg3 @+FMAINV\n"<buolaš>"\n'
'\t"buolaš" Sem/Wthr N <sme> Sg Nom\n"<,>"\n'
'\t"," CLB\n"<de>"\n\t"de" Adv <sme>\n"<aggregáhta>"\n'
'\t"aggregáhta" N <sme> Sg Nom\n"<billánii>"\n'
'\t"billánit" V <sme> IV Ind Prt Sg3 @+FMAINV\n"<.>"\n'
'\t"." CLB\n\n"<¶>"\n\t"¶" CLB\n\n'))
def test_process_file2(self):
'''Test process_file with a dependency element as input
'''
xml_printer = ccat.XMLPrinter(dependency=True)
xml_printer.etree = etree.parse(
io.BytesIO(
'<document id="no_id" xml:lang="nob">\n'
' <body>\n'
' <dependency>"<Muhto>"\n'
'\t"muhto" CC @CVP #1->1 \n"<gaskkohagaid>"\n'
'\t"gaskkohagaid" Adv @ADVL> #2->12 \n"<,>"\n'
'\t"," CLB #3->4 \n"<ja>"\n'
'\t"ja" CC @CNP #4->2 \n"<erenoamážit>"\n'
'\t"erenoamážit" Adv @ADVL> #5->12 \n'
'"<dalle_go>"\n'
'\t"dalle_go" CS @CVP #6->7 \n"<lei>"\n'
'\t"leat" V IV Ind Prt Sg3 @FS-ADVL> #7->12 \n'
'"<buolaš>"\n'
'\t"buolaš" N Sg Nom @<SPRED #8->7 \n"<,>"\n'
'\t"," CLB #9->6 \n"<de>"\n'
'\t"de" Adv @ADVL> #10->12 \n"<aggregáhta>"\n'
'\t"aggregáhta" N Sg Nom @SUBJ> #11->12 \n'
'"<billánii>"\n'
'\t"billánit" V IV Ind Prt Sg3 @FS-ADVL> #12->0 \n'
'"<.>"\n\t"." CLB #13->12 \n\n"<¶>"\n'
'\t"¶" CLB #1->1 \n\n</dependency>\n'
' </body>\n'
'</document>'))
buffer = xml_printer.process_file()
self.assertEqual(
buffer.getvalue(),
(
'"<Muhto>"\n\t"muhto" CC @CVP #1->1 \n"<gaskkohagaid>"\n'
'\t"gaskkohagaid" Adv @ADVL> #2->12 \n"<,>"\n'
'\t"," CLB #3->4 \n"<ja>"\n'
'\t"ja" CC @CNP #4->2 \n"<erenoamážit>"\n'
'\t"erenoamážit" Adv @ADVL> #5->12 \n"<dalle_go>"\n'
'\t"dalle_go" CS @CVP #6->7 \n"<lei>"\n'
'\t"leat" V IV Ind Prt Sg3 @FS-ADVL> #7->12 \n"<buolaš>"\n'
'\t"buolaš" N Sg Nom @<SPRED #8->7 \n"<,>"\n'
'\t"," CLB #9->6 \n"<de>"\n'
'\t"de" Adv @ADVL> #10->12 \n"<aggregáhta>"\n'
'\t"aggregáhta" N Sg Nom @SUBJ> #11->12 \n"<billánii>"\n'
'\t"billánit" V IV Ind Prt Sg3 @FS-ADVL> #12->0 \n"<.>"\n'
'\t"." CLB #13->12 \n\n"<¶>"\n\t"¶" CLB #1->1 \n\n'))
|
import unittest
from ...compatibility import StringIO
from ..helperfunctions import _xml_to_list
from ...worksheet import Worksheet
class TestAssembleWorksheet(unittest.TestCase):
"""
Test assembling a complete Worksheet file.
"""
def test_assemble_xml_file(self):
"""Test writing a worksheet with conditional formatting."""
self.maxDiff = None
fh = StringIO()
worksheet = Worksheet()
worksheet._set_filehandle(fh)
worksheet.select()
worksheet.write('A1', 10)
worksheet.write('A2', 20)
worksheet.write('A3', 30)
worksheet.write('A4', 40)
worksheet.write('B1', 5)
worksheet.conditional_format('A1:A1',
{'type': 'cell',
'format': None,
'criteria': 'greater than',
'value': '$B$1',
})
worksheet._assemble_xml_file()
exp = _xml_to_list("""
<?xml version="1.0" encoding="UTF-8" standalone="yes"?>
<worksheet xmlns="http://schemas.openxmlformats.org/spreadsheetml/2006/main" xmlns:r="http://schemas.openxmlformats.org/officeDocument/2006/relationships">
<dimension ref="A1:B4"/>
<sheetViews>
<sheetView tabSelected="1" workbookViewId="0"/>
</sheetViews>
<sheetFormatPr defaultRowHeight="15"/>
<sheetData>
<row r="1" spans="1:2">
<c r="A1">
<v>10</v>
</c>
<c r="B1">
<v>5</v>
</c>
</row>
<row r="2" spans="1:2">
<c r="A2">
<v>20</v>
</c>
</row>
<row r="3" spans="1:2">
<c r="A3">
<v>30</v>
</c>
</row>
<row r="4" spans="1:2">
<c r="A4">
<v>40</v>
</c>
</row>
</sheetData>
<conditionalFormatting sqref="A1">
<cfRule type="cellIs" priority="1" operator="greaterThan">
<formula>$B$1</formula>
</cfRule>
</conditionalFormatting>
<pageMargins left="0.7" right="0.7" top="0.75" bottom="0.75" header="0.3" footer="0.3"/>
</worksheet>
""")
got = _xml_to_list(fh.getvalue())
self.assertEqual(got, exp)
if __name__ == '__main__':
unittest.main()
|
import requests
import json
import os
def onde(palavras):
tam = len(palavras)
if(palavras[1] == u'fica' or palavras[1] == u'é' or palavras[1] == u'e'):
try:
if(tam == 3):
req=requests.get('http://maps.googleapis.com/maps/api/geocode/json?address='+palavras[2])
elif(tam == 4):
req=requests.get('http://maps.googleapis.com/maps/api/geocode/json?address='+palavras[2]+' '+palavras[3])
elif(tam == 5):
req=requests.get('http://maps.googleapis.com/maps/api/geocode/json?address='+palavras[2]+' '+palavras[3]+' '+palavras[4])
except:
print('Erro de conexão')
os.system('espeak -v pt-br -g 4 -a 100 " Erro na conexão"')
return
dicionario = json.loads(req.text)
if(dicionario['status']=='ZERO_RESULTS'):
print('Não encontrado ou não existe')
os.system('espeak -v pt-br -g 4 -a 100 "Não encontrado ou não existe "')
else:
try: # pais
nome = dicionario['results'][0]['address_components'][0]['long_name']
sigla = dicionario['results'][0]['address_components'][2]['short_name']
pais = dicionario['results'][0]['address_components'][3]['long_name']
sigla_pais = dicionario['results'][0]['address_components'][3]['short_name']
except IndexError: # estado
try:
nome = dicionario['results'][0]['address_components'][0]['long_name']
sigla = nome=dicionario['results'][0]['address_components'][0]['short_name']
pais = dicionario['results'][0]['address_components'][1]['long_name']
sigla_pais = dicionario['results'][0]['address_components'][1]['short_name']
except IndexError: # país
nome = dicionario['results'][0]['address_components'][0]['long_name']
sigla = nome=dicionario['results'][0]['address_components'][0]['short_name']
pais = None
sigla_pais = None
if(tam == 3):
try:
print(str(palavras[2])+', '+str(nome))
print(str(pais)+', '+str(sigla_pais))
os.system('espeak -v pt-br -g 4 -a 100 "'+str(palavras[2])+' ou '+str(nome)+'"')
os.system('espeak -v pt-br -g 4 -a 100 " fica no pais '+str(pais)+' e tem a sigla '+str(sigla_pais)+'"')
if(pais==None):
print(str(nome))
print(str(sigla))
except UnicodeDecodeError:
print('Erro na codificação, tente usar a sigla')
os.system('espeak -v pt-br -g 4 -a 100 "Erro na codificação, tente usar a sigla"')
elif(tam == 4):
try:
print(str(palavras[2])+' '+ str(palavras[3])+', '+nome)
print(str(pais)+', '+str(sigla_pais))
os.system('espeak -v pt-br -g 4 -a 100 "'+str(palavras[2])+' '+str(palavras[3])+' ou '+nome+'"')
os.system('espeak -v pt-br -g 4 -a 100 " fica no pais '+str(pais)+' e tem a sigla '+str(sigla_pais)+'"')
if(pais == None):
print(str(nome))
print(str(sigla))
except UnicodeDecodeError:
print('Erro na codificação, tente usar a sigla\nEx: São paulo -> sp')
os.system('espeak -v pt-br -g 4 -a 100 "Erro na codificação, tente usar a sigla"')
elif(tam == 5):
try:
print(str(palavras[2])+' '+ str(palavras[3])+' ' + str(palavras[4])+ ', ' +str(nome))
os.system('espeak -v pt-br -g 4 -a 100 "'+str(palavras[2])+' '+str(palavras[3])+' '+palavras[4]+' ou '+nome+'"')
print(str(pais)+', '+str(sigla_pais))
os.system('espeak -v pt-br -g 4 -a 100 " fica no pais '+str(pais)+' e tem a sigla '+str(sigla_pais)+'"')
except UnicodeDecodeError:
print('Erro na codificação, tente usar a sigla')
os.system('espeak -v pt-br -g 4 -a 100 "Erro na codificação, tente usar a sigla"')
|
from bbio import *
import itertools
ss = input("Enter the Number\n")
qq = 1
for i in range(ss+1):
if i != 0:
qq = qq * i
print qq
"\n"
s = " "
strg = ""
for ii in range(ss+1):
if ii != 0:
strg = strg + str(ii)
comb = itertools.permutations(strg)
for x in comb:
print s.join(x)
|
<<<<<<< Updated upstream
This is a testing file for editing.
<<<<<<< HEAD
<<<<<<< HEAD
<<<<<<< HEAD
<<<<<<< HEAD
<<<<<<< HEAD
<<<<<<< HEAD
<<<<<<< HEAD
<<<<<<< Updated upstream
<<<<<<< Updated upstream
<<<<<<< HEAD
=======
>>>>>>> Stashed changes
=======
>>>>>>> Stashed changes
does
=======
>>>>>>> Stashed changes
=======
does
>>>>>>> 09bbd33ae318d7d556251a0af0a2dd69b1423e82
=======
doesgdfsgsdfgdf
>>>>>>> f4e179d91f53ee1ba732904e354201c6aac4aa41
=======
doesgdfsgsdfgdf
>>>>>>> f4e179d91f53ee1ba732904e354201c6aac4aa41
=======
doesgdfsgsdfgdf
>>>>>>> f4e179d91f53ee1ba732904e354201c6aac4aa41
=======
doesgdfsgsdfgdf
>>>>>>> f4e179d91f53ee1ba732904e354201c6aac4aa41
=======
doesgdfsgsdfgdf
>>>>>>> f4e179d91f53ee1ba732904e354201c6aac4aa41
=======
doesgdfsgsdfgdf
>>>>>>> f4e179d91f53ee1ba732904e354201c6aac4aa41
<<<<<<< Updated upstream
<<<<<<< Updated upstream
=======
doesgdfsgsdfgdf
>>>>>>> f4e179d91f53ee1ba732904e354201c6aac4aa41
=======
>>>>>>> Stashed changes
=======
>>>>>>> Stashed changes
|
import pygame
pygame.display.set_caption("multi bingo")
screen = pygame.display.set_mode((0,0))
screen.fill([0,0,0])
pygame.mouse.set_visible(False)
meter = pygame.image.load('graphics/assets/silver_register_cover.png').convert()
number = pygame.image.load('playtime/assets/number.png').convert_alpha()
feature = pygame.image.load('playtime/assets/feature.png').convert_alpha()
ms_letter = pygame.image.load('playtime/assets/ms_letter.png').convert_alpha()
ms_arrow = pygame.image.load('playtime/assets/ms_arrow.png').convert_alpha()
select_now = pygame.image.load('playtime/assets/select_now.png').convert_alpha()
corners = pygame.image.load('playtime/assets/feature.png').convert_alpha()
ballyhole = pygame.image.load('playtime/assets/feature.png').convert_alpha()
orange_odds1 = pygame.image.load('playtime/assets/orange_odds1.png').convert_alpha()
orange_odds2 = pygame.image.load('playtime/assets/orange_odds2.png').convert_alpha()
orange_odds3 = pygame.image.load('playtime/assets/orange_odds3.png').convert_alpha()
orange_odds4 = pygame.image.load('playtime/assets/orange_odds4.png').convert_alpha()
orange_odds5 = pygame.image.load('playtime/assets/orange_odds5.png').convert_alpha()
orange_odds6 = pygame.image.load('playtime/assets/orange_odds6.png').convert_alpha()
orange_odds7 = pygame.image.load('playtime/assets/orange_odds7.png').convert_alpha()
orange_odds8 = pygame.image.load('playtime/assets/orange_odds8.png').convert_alpha()
yellow_odds1 = pygame.image.load('playtime/assets/yellow_odds1.png').convert_alpha()
yellow_odds2 = pygame.image.load('playtime/assets/yellow_odds2.png').convert_alpha()
yellow_odds3 = pygame.image.load('playtime/assets/yellow_odds3.png').convert_alpha()
yellow_odds4 = pygame.image.load('playtime/assets/yellow_odds4.png').convert_alpha()
yellow_odds5 = pygame.image.load('playtime/assets/yellow_odds5.png').convert_alpha()
yellow_odds6 = pygame.image.load('playtime/assets/yellow_odds6.png').convert_alpha()
yellow_odds7 = pygame.image.load('playtime/assets/yellow_odds7.png').convert_alpha()
yellow_odds8 = pygame.image.load('playtime/assets/yellow_odds8.png').convert_alpha()
red_odds1 = pygame.image.load('playtime/assets/red_odds1.png').convert_alpha()
red_odds2 = pygame.image.load('playtime/assets/red_odds2.png').convert_alpha()
red_odds3 = pygame.image.load('playtime/assets/red_odds3.png').convert_alpha()
red_odds4 = pygame.image.load('playtime/assets/red_odds4.png').convert_alpha()
red_odds5 = pygame.image.load('playtime/assets/red_odds5.png').convert_alpha()
red_odds6 = pygame.image.load('playtime/assets/red_odds6.png').convert_alpha()
red_odds7 = pygame.image.load('playtime/assets/red_odds7.png').convert_alpha()
red_odds8 = pygame.image.load('playtime/assets/red_odds8.png').convert_alpha()
extra_balls = pygame.image.load('playtime/assets/extra_balls.png').convert_alpha()
eb = pygame.image.load('playtime/assets/eb.png').convert_alpha()
eb_number = pygame.image.load('playtime/assets/eb_number.png').convert_alpha()
tilt = pygame.image.load('playtime/assets/tilt.png').convert_alpha()
time = pygame.image.load('playtime/assets/time.png').convert_alpha()
s_arrow = pygame.image.load('playtime/assets/sf_arrow.png').convert_alpha()
a0 = pygame.image.load('playtime/assets/a0.png').convert_alpha()
a1 = pygame.image.load('playtime/assets/a1.png').convert_alpha()
a2 = pygame.image.load('playtime/assets/a2.png').convert_alpha()
a3 = pygame.image.load('playtime/assets/a3.png').convert_alpha()
b0 = pygame.image.load('playtime/assets/b0.png').convert_alpha()
b1 = pygame.image.load('playtime/assets/b1.png').convert_alpha()
b2 = pygame.image.load('playtime/assets/b2.png').convert_alpha()
b3 = pygame.image.load('playtime/assets/b3.png').convert_alpha()
c0 = pygame.image.load('playtime/assets/c0.png').convert_alpha()
c1 = pygame.image.load('playtime/assets/c1.png').convert_alpha()
c2 = pygame.image.load('playtime/assets/c2.png').convert_alpha()
c3 = pygame.image.load('playtime/assets/c3.png').convert_alpha()
d0 = pygame.image.load('playtime/assets/d0.png').convert_alpha()
d1 = pygame.image.load('playtime/assets/d1.png').convert_alpha()
d2 = pygame.image.load('playtime/assets/d2.png').convert_alpha()
d3 = pygame.image.load('playtime/assets/d3.png').convert_alpha()
e0 = pygame.image.load('playtime/assets/e0.png').convert_alpha()
e1 = pygame.image.load('playtime/assets/e1.png').convert_alpha()
e2 = pygame.image.load('playtime/assets/e2.png').convert_alpha()
e3 = pygame.image.load('playtime/assets/e3.png').convert_alpha()
rollover = pygame.image.load('playtime/assets/rollover.png').convert_alpha()
bg_menu = pygame.image.load('playtime/assets/playtime_menu.png')
bg_gi = pygame.image.load('playtime/assets/playtime_gi.png')
bg_off = pygame.image.load('playtime/assets/playtime_off.png')
a_1 = pygame.image.load('playtime/assets/a-1.png').convert_alpha()
a_2 = pygame.image.load('playtime/assets/a-2.png').convert_alpha()
a_3 = pygame.image.load('playtime/assets/a-3.png').convert_alpha()
a_4 = pygame.image.load('playtime/assets/a-4.png').convert_alpha()
b_1 = pygame.image.load('playtime/assets/b-1.png').convert_alpha()
b_2 = pygame.image.load('playtime/assets/b-2.png').convert_alpha()
b_3 = pygame.image.load('playtime/assets/b-3.png').convert_alpha()
b_4 = pygame.image.load('playtime/assets/b-4.png').convert_alpha()
c_1 = pygame.image.load('playtime/assets/c-1.png').convert_alpha()
c_2 = pygame.image.load('playtime/assets/c-2.png').convert_alpha()
c_3 = pygame.image.load('playtime/assets/c-3.png').convert_alpha()
c_4 = pygame.image.load('playtime/assets/c-4.png').convert_alpha()
d_1 = pygame.image.load('playtime/assets/d-1.png').convert_alpha()
d_2 = pygame.image.load('playtime/assets/d-2.png').convert_alpha()
d_3 = pygame.image.load('playtime/assets/d-3.png').convert_alpha()
d_4 = pygame.image.load('playtime/assets/d-4.png').convert_alpha()
e_1 = pygame.image.load('playtime/assets/e-1.png').convert_alpha()
e_2 = pygame.image.load('playtime/assets/e-2.png').convert_alpha()
e_3 = pygame.image.load('playtime/assets/e-3.png').convert_alpha()
e_4 = pygame.image.load('playtime/assets/e-4.png').convert_alpha()
class scorereel():
""" Score Reels are used to count replays """
def __init__(self, pos, image):
self.position = pos
self.default_y = self.position[1]
self.image = pygame.image.load(image).convert()
reel1 = scorereel([101,325], "graphics/assets/white_reel.png")
reel10 = scorereel([82,325], "graphics/assets/white_reel.png")
reel100 = scorereel([63,325], "graphics/assets/white_reel.png")
def display(s, replays=0, menu=False):
meter.set_colorkey((255,0,252))
meter_position = [53,325]
screen.blit(reel1.image, reel1.position)
screen.blit(reel10.image, reel10.position)
screen.blit(reel100.image, reel100.position)
screen.blit(meter, meter_position)
if s.game.square_a.position == 0:
p = [223,334]
screen.blit(a0, p)
if s.game.square_a.position == 1:
p = [223,334]
screen.blit(a1, p)
if s.game.square_a.position == 2:
p = [223,334]
screen.blit(a2, p)
if s.game.square_a.position == 3:
p = [223,334]
screen.blit(a3, p)
if s.game.square_b.position == 0:
p = [220,443]
screen.blit(b0, p)
if s.game.square_b.position == 1:
p = [220,443]
screen.blit(b1, p)
if s.game.square_b.position == 2:
p = [220,443]
screen.blit(b2, p)
if s.game.square_b.position == 3:
p = [220,443]
screen.blit(b3, p)
if s.game.square_c.position == 0:
p = [332,334]
screen.blit(c0, p)
if s.game.square_c.position == 1:
p = [332,334]
screen.blit(c1, p)
if s.game.square_c.position == 2:
p = [332,334]
screen.blit(c2, p)
if s.game.square_c.position == 3:
p = [332,334]
screen.blit(c3, p)
if s.game.square_d.position == 0:
p = [332,440]
screen.blit(d0, p)
if s.game.square_d.position == 1:
p = [332,440]
screen.blit(d1, p)
if s.game.square_d.position == 2:
p = [332,440]
screen.blit(d2, p)
if s.game.square_d.position == 3:
p = [332,440]
screen.blit(d3, p)
if s.game.square_e.position == 0:
p = [223,544]
screen.blit(e0, p)
if s.game.square_e.position == 1:
p = [223,544]
screen.blit(e1, p)
if s.game.square_e.position == 2:
p = [223,544]
screen.blit(e2, p)
if s.game.square_e.position == 3:
p = [223,544]
screen.blit(e3, p)
backglass_position = [0, 0]
backglass = pygame.Surface(screen.get_size(), flags=pygame.SRCALPHA)
backglass.fill((0, 0, 0))
if menu == True:
screen.blit(bg_menu, backglass_position)
else:
if (s.game.anti_cheat.status == True):
screen.blit(bg_gi, backglass_position)
else:
screen.blit(bg_off, backglass_position)
if s.game.tilt.status == False:
if s.holes:
if 1 in s.holes:
if s.game.square_a.position == 0:
p = [281,340]
screen.blit(number, p)
elif s.game.square_a.position == 1:
p = [281,392]
screen.blit(number, p)
elif s.game.square_a.position == 2:
p = [227,393]
screen.blit(number, p)
else:
p = [227,340]
screen.blit(number, p)
if 2 in s.holes:
if s.game.square_c.position == 0:
p = [337,339]
screen.blit(number, p)
elif s.game.square_c.position == 1:
p = [391,339]
screen.blit(number, p)
elif s.game.square_c.position == 2:
p = [390,391]
screen.blit(number, p)
else:
p = [337,391]
screen.blit(number, p)
if 3 in s.holes:
if s.game.square_e.position == 0:
p = [392,551]
screen.blit(number, p)
elif s.game.square_e.position == 1:
p = [228,551]
screen.blit(number, p)
elif s.game.square_e.position == 2:
p = [282,551]
screen.blit(number, p)
else:
p = [336,551]
screen.blit(number, p)
if 4 in s.holes:
if s.game.square_a.position == 0:
p = [227,394]
screen.blit(number, p)
elif s.game.square_a.position == 1:
p = [228,343]
screen.blit(number, p)
elif s.game.square_a.position == 2:
p = [281,334]
screen.blit(number, p)
else:
p = [281,386]
screen.blit(number, p)
if 5 in s.holes:
if s.game.square_d.position == 0:
p = [337,499]
screen.blit(number, p)
elif s.game.square_d.position == 1:
p = [335,445]
screen.blit(number, p)
elif s.game.square_d.position == 2:
p = [390,445]
screen.blit(number, p)
else:
p = [390,499]
screen.blit(number, p)
if 6 in s.holes:
if s.game.square_b.position == 0:
p = [228,500]
screen.blit(number, p)
elif s.game.square_b.position == 1:
p = [227,448]
screen.blit(number, p)
elif s.game.square_b.position == 2:
p = [281,446]
screen.blit(number, p)
else:
p = [281,499]
screen.blit(number, p)
if 7 in s.holes:
if s.game.square_c.position == 0:
p = [336,393]
screen.blit(number, p)
elif s.game.square_c.position == 1:
p = [337,340]
screen.blit(number, p)
elif s.game.square_c.position == 2:
p = [392,341]
screen.blit(number, p)
else:
p = [392,393]
screen.blit(number, p)
if 8 in s.holes:
if s.game.square_e.position == 0:
p = [283,552]
screen.blit(number, p)
elif s.game.square_e.position == 1:
p = [337,552]
screen.blit(number, p)
elif s.game.square_e.position == 2:
p = [392,552]
screen.blit(number, p)
else:
p = [228,552]
screen.blit(number, p)
if 9 in s.holes:
if s.game.square_a.position == 0:
p = [228,340]
screen.blit(number, p)
elif s.game.square_a.position == 1:
p = [281,341]
screen.blit(number, p)
elif s.game.square_a.position == 2:
p = [281,393]
screen.blit(number, p)
else:
p = [227,393]
screen.blit(number, p)
if 10 in s.holes:
p = [448,552]
screen.blit(number, p)
if 11 in s.holes:
if s.game.square_c.position == 0:
p = [392,340]
screen.blit(number, p)
elif s.game.square_c.position == 1:
p = [391,392]
screen.blit(number, p)
elif s.game.square_c.position == 2:
p = [336,393]
screen.blit(number, p)
else:
p = [337,340]
screen.blit(number, p)
if 12 in s.holes:
if s.game.square_e.position == 0:
p = [229,553]
screen.blit(number, p)
elif s.game.square_e.position == 1:
p = [283,553]
screen.blit(number, p)
elif s.game.square_e.position == 2:
p = [336,553]
screen.blit(number, p)
else:
p = [393,553]
screen.blit(number, p)
if 13 in s.holes:
if s.game.square_d.position == 0:
p = [391,445]
screen.blit(number, p)
elif s.game.square_d.position == 1:
p = [393,499]
screen.blit(number, p)
elif s.game.square_d.position == 2:
p = [337,499]
screen.blit(number, p)
else:
p = [337,445]
screen.blit(number, p)
if 14 in s.holes:
if s.game.square_e.position == 0:
p = [337,553]
screen.blit(number, p)
elif s.game.square_e.position == 1:
p = [391,553]
screen.blit(number, p)
elif s.game.square_e.position == 2:
p = [228,553]
screen.blit(number, p)
else:
p = [282,553]
screen.blit(number, p)
if 15 in s.holes:
p = [448,339]
screen.blit(number, p)
if 16 in s.holes:
if s.game.square_d.position == 0:
p = [337,445]
screen.blit(number, p)
elif s.game.square_d.position == 1:
p = [391,446]
screen.blit(number, p)
elif s.game.square_d.position == 2:
p = [393,499]
screen.blit(number, p)
else:
p = [337,499]
screen.blit(number, p)
if 17 in s.holes:
p = [447,445]
screen.blit(number, p)
if 18 in s.holes:
number_position = [447,393]
screen.blit(number, number_position)
if 19 in s.holes:
if s.game.square_a.position == 0:
p = [282,392]
screen.blit(number, p)
elif s.game.square_a.position == 1:
p = [227,393]
screen.blit(number, p)
elif s.game.square_a.position == 2:
p = [229,340]
screen.blit(number, p)
else:
p = [281,340]
screen.blit(number, p)
if 20 in s.holes:
p = [447,499]
screen.blit(number, p)
if 21 in s.holes:
if s.game.square_d.position == 0:
p = [392,499]
screen.blit(number, p)
elif s.game.square_d.position == 1:
p = [337,499]
screen.blit(number, p)
elif s.game.square_d.position == 2:
p = [337,446]
screen.blit(number, p)
else:
p = [392,446]
screen.blit(number, p)
if 22 in s.holes:
if s.game.square_c.position == 0:
p = [393,392]
screen.blit(number, p)
elif s.game.square_c.position == 1:
p = [337,395]
screen.blit(number, p)
elif s.game.square_c.position == 2:
p = [338,341]
screen.blit(number, p)
else:
p = [393,341]
screen.blit(number, p)
if 23 in s.holes:
if s.game.square_b.position == 0:
p = [283,500]
screen.blit(number, p)
elif s.game.square_b.position == 1:
p = [228,500]
screen.blit(number, p)
elif s.game.square_b.position == 2:
p = [228,447]
screen.blit(number, p)
else:
p = [281,446]
screen.blit(number, p)
if 24 in s.holes:
if s.game.square_b.position == 0:
p = [281,446]
screen.blit(number, p)
elif s.game.square_b.position == 1:
p = [283,500]
screen.blit(number, p)
elif s.game.square_b.position == 2:
p = [227,500]
screen.blit(number, p)
else:
p = [227,447]
screen.blit(number, p)
if 25 in s.holes:
if s.game.square_b.position == 0:
p = [227,447]
screen.blit(number, p)
elif s.game.square_b.position == 1:
p = [281,447]
screen.blit(number, p)
elif s.game.square_b.position == 2:
p = [283,500]
screen.blit(number, p)
else:
p = [227,501]
screen.blit(number, p)
if s.game.magic_squares_feature.position == 1:
p = [13,655]
screen.blit(ms_arrow, p)
if s.game.magic_squares_feature.position == 2:
p = [51,655]
screen.blit(ms_arrow, p)
if s.game.magic_squares_feature.position == 3:
p = [91,655]
screen.blit(ms_arrow, p)
if s.game.magic_squares_feature.position == 4:
p = [131,655]
screen.blit(ms_arrow, p)
if s.game.magic_squares_feature.position >= 5:
p = [172,643]
screen.blit(ms_letter, p)
if s.game.magic_squares_feature.position >= 6:
p = [223,643]
screen.blit(ms_letter, p)
if s.game.magic_squares_feature.position >= 7:
p = [275,643]
screen.blit(ms_letter, p)
if s.game.magic_squares_feature.position >= 8:
p = [328,643]
screen.blit(ms_letter, p)
if s.game.magic_squares_feature.position == 9:
p = [380,643]
screen.blit(ms_letter, p)
if s.game.magic_squares_feature.position >= 5:
sf = s.game.selection_feature.position
if sf <= 6:
p = [564,567]
screen.blit(time, p)
if sf == 1:
p = [539,593]
screen.blit(s_arrow, p)
if sf == 2:
p = [540,556]
screen.blit(s_arrow, p)
if sf == 3:
p = [539,519]
screen.blit(s_arrow, p)
if sf == 4:
p = [538,482]
screen.blit(s_arrow, p)
if sf == 5:
p = [539,443]
screen.blit(s_arrow, p)
if sf == 6:
p = [539,409]
screen.blit(s_arrow, p)
if s.game.ball_count.position == 3:
s.cancel_delayed(name="blink")
blink([s,1,1])
else:
s.cancel_delayed(name="blink")
if sf == 3 or sf == 4:
p = [33,932]
screen.blit(rollover, p)
p = [567,495]
screen.blit(time, p)
if sf == 5 or sf == 6:
p = [631,929]
screen.blit(rollover, p)
p = [565,418]
screen.blit(time, p)
if sf == 7 or sf == 8:
if sf == 7:
p = [539,369]
screen.blit(s_arrow, p)
if sf == 8:
p = [539,333]
screen.blit(s_arrow, p)
p = [563,344]
screen.blit(time, p)
if s.game.ball_count.position == 4:
s.cancel_delayed(name="blink")
blink([s,1,1])
else:
s.cancel_delayed(name="blink")
if sf == 9:
p = [539,297]
screen.blit(s_arrow, p)
p = [565,272]
screen.blit(time, p)
if s.game.ball_count.position == 5:
s.cancel_delayed(name="blink")
blink([s,1,1])
else:
s.cancel_delayed(name="blink")
if s.game.corners.status == True:
p = [37,493]
screen.blit(corners, p)
if s.game.ballyhole.status == True:
p = [37,569]
screen.blit(ballyhole, p)
if s.game.extra_ball.position >= 1:
p = [135,998]
screen.blit(eb_number, p)
if s.game.extra_ball.position >= 2:
p = [186,998]
screen.blit(eb, p)
if s.game.extra_ball.position >= 3:
p = [253,999]
screen.blit(eb, p)
if s.game.extra_ball.position >= 4:
p = [323,998]
screen.blit(eb_number, p)
if s.game.extra_ball.position >= 5:
p = [375,998]
screen.blit(eb, p)
if s.game.extra_ball.position >= 6:
p = [441,999]
screen.blit(eb, p)
if s.game.extra_ball.position >= 7:
p = [514,998]
screen.blit(eb_number, p)
if s.game.extra_ball.position >= 8:
p = [565,997]
screen.blit(eb, p)
if s.game.extra_ball.position == 9:
p = [631,996]
screen.blit(eb, p)
if s.game.eb_play.status == True:
p = [23,997]
screen.blit(extra_balls, p)
if s.game.red_odds.position == 1:
p = [29,761]
screen.blit(red_odds1, p)
elif s.game.red_odds.position == 2:
p = [140,714]
screen.blit(red_odds2, p)
elif s.game.red_odds.position == 3:
p = [197,714]
screen.blit(red_odds3, p)
elif s.game.red_odds.position == 4:
p = [284,704]
screen.blit(red_odds4, p)
elif s.game.red_odds.position == 5:
p = [403,709]
screen.blit(red_odds5, p)
elif s.game.red_odds.position == 6:
p = [493,713]
screen.blit(red_odds6, p)
elif s.game.red_odds.position == 7:
p = [546,713]
screen.blit(red_odds7, p)
elif s.game.red_odds.position == 8:
p = [676,745]
screen.blit(red_odds8, p)
if s.game.orange_odds.position == 1:
p = [26,817]
screen.blit(orange_odds1, p)
elif s.game.orange_odds.position == 2:
p = [145,787]
screen.blit(orange_odds2, p)
elif s.game.orange_odds.position == 3:
p = [187,786]
screen.blit(orange_odds3, p)
elif s.game.orange_odds.position == 4:
p = [274,776]
screen.blit(orange_odds4, p)
elif s.game.orange_odds.position == 5:
p = [401,776]
screen.blit(orange_odds5, p)
elif s.game.orange_odds.position == 6:
p = [509,785]
screen.blit(orange_odds6, p)
elif s.game.orange_odds.position == 7:
p = [551,780]
screen.blit(orange_odds7, p)
elif s.game.orange_odds.position == 8:
p = [665,819]
screen.blit(orange_odds8, p)
if s.game.yellow_odds.position == 1:
p = [36,879]
screen.blit(yellow_odds1, p)
elif s.game.yellow_odds.position == 2:
p = [147,868]
screen.blit(yellow_odds2, p)
elif s.game.yellow_odds.position == 3:
p = [193,859]
screen.blit(yellow_odds3, p)
elif s.game.yellow_odds.position == 4:
p = [277,873]
screen.blit(yellow_odds4, p)
elif s.game.yellow_odds.position == 5:
p = [400,872]
screen.blit(yellow_odds5, p)
elif s.game.yellow_odds.position == 6:
p = [502,851]
screen.blit(yellow_odds6, p)
elif s.game.yellow_odds.position == 7:
p = [552,849]
screen.blit(yellow_odds7, p)
elif s.game.yellow_odds.position == 8:
p = [665,878]
screen.blit(yellow_odds8, p)
if s.game.tilt.status == True:
tilt_position = [59,405]
screen.blit(tilt, tilt_position)
pygame.display.update()
def blink(args):
dirty_rects = []
s = args[0]
b = args[1]
sn = args[2]
if b == 0:
if sn == 1:
p = [556,649]
dirty_rects.append(screen.blit(select_now, p))
pygame.display.update(dirty_rects)
else:
dirty_rects.append(screen.blit(bg_gi, (556,649), pygame.Rect(556,649,156,43)))
pygame.display.update(dirty_rects)
b = not b
args = [s,b,sn]
s.delay(name="blink", delay=0.1, handler=blink, param=args)
def squarea_animation(args):
dirty_rects = []
s = args[0]
num = args[1]
square = args[2]
if square == 1:
p = [223,334]
if s.game.square_a.position == 0:
image = a3
topleft = a_2
topright = a_4
bottomleft = a_1
bottomright = a_3
elif s.game.square_a.position == 1:
image = a0
topleft = a_1
topright = a_2
bottomleft = a_3
bottomright = a_4
elif s.game.square_a.position == 2:
image = a1
topleft = a_3
topright = a_1
bottomleft = a_4
bottomright = a_2
else:
image = a2
topleft = a_4
topright = a_3
bottomleft = a_2
bottomright = a_1
rect = pygame.Rect(p[0],p[1],200,200)
#letter A
if square == 1:
dirty_rects.append(screen.blit(topleft, (242 - num - 20, 338)))
dirty_rects.append(screen.blit(topright, (279, 347 - num - 10)))
dirty_rects.append(screen.blit(bottomright, (273 + num + 15, 389)))
dirty_rects.append(screen.blit(bottomleft, (225, 390 + num + 5)))
if (s.game.anti_cheat.status == True):
dirty_rects.append(screen.blit(bg_gi, p, pygame.Rect(p[0],p[1],130,130)))
else:
dirty_rects.append(screen.blit(bg_off, p, pygame.Rect(p[0],p[1],130,130)))
pygame.display.update(dirty_rects)
def squareb_animation(args):
dirty_rects = []
s = args[0]
num = args[1]
square = args[2]
if square == 2:
p = [220,443]
if s.game.square_b.position == 0:
image = b3
topleft = b_2
topright = b_4
bottomleft = b_1
bottomright = b_3
elif s.game.square_b.position == 1:
image = b0
topleft = b_1
topright = b_2
bottomleft = b_3
bottomright = b_4
elif s.game.square_b.position == 2:
image = b1
topleft = b_3
topright = b_1
bottomleft = b_4
bottomright = b_2
else:
image = b2
topleft = b_4
topright = b_3
bottomleft = b_2
bottomright = b_1
rect = pygame.Rect(p[0],p[1],200,200)
if square == 2:
dirty_rects.append(screen.blit(topleft, (238 - num - 20, 447)))
dirty_rects.append(screen.blit(topright, (276, 451 - num - 9)))
dirty_rects.append(screen.blit(bottomright, (276 + num + 10, 499)))
dirty_rects.append(screen.blit(bottomleft, (222, 501 + num + 8)))
if (s.game.anti_cheat.status == True):
dirty_rects.append(screen.blit(bg_gi, p, pygame.Rect(p[0],p[1],130,130)))
else:
dirty_rects.append(screen.blit(bg_off, p, pygame.Rect(p[0],p[1],130,130)))
pygame.display.update(dirty_rects)
def squarec_animation(args):
dirty_rects = []
s = args[0]
num = args[1]
square = args[2]
if square == 3:
p = [332,334]
if s.game.square_c.position == 0:
image = c3
topleft = c_2
topright = c_4
bottomleft = c_1
bottomright = c_3
elif s.game.square_c.position == 1:
image = c0
topleft = c_1
topright = c_2
bottomleft = c_3
bottomright = c_4
elif s.game.square_c.position == 2:
image = c1
topleft = c_3
topright = c_1
bottomleft = c_4
bottomright = c_2
else:
image = c2
topleft = c_4
topright = c_3
bottomleft = c_2
bottomright = c_1
rect = pygame.Rect(p[0],p[1],200,200)
if square == 3:
dirty_rects.append(screen.blit(topleft, (343 - num - 10, 337)))
dirty_rects.append(screen.blit(topright, (387, 338 - num - 5)))
dirty_rects.append(screen.blit(bottomright, (379 + num + 15, 390)))
dirty_rects.append(screen.blit(bottomleft, (334, 390 + num + 6)))
if (s.game.anti_cheat.status == True):
dirty_rects.append(screen.blit(bg_gi, p, pygame.Rect(p[0],p[1],130,130)))
else:
dirty_rects.append(screen.blit(bg_off, p, pygame.Rect(p[0],p[1],130,130)))
pygame.display.update(dirty_rects)
def squared_animation(args):
dirty_rects = []
s = args[0]
num = args[1]
square = args[2]
p = [332,440]
if s.game.square_d.position == 0:
image = d3
topleft = d_2
topright = d_4
bottomleft = d_1
bottomright = d_3
elif s.game.square_d.position == 1:
image = d0
topleft = d_1
topright = d_2
bottomleft = d_3
bottomright = d_4
elif s.game.square_d.position == 2:
image = d1
topleft = d_3
topright = d_1
bottomleft = d_4
bottomright = d_2
else:
image = d2
topleft = d_4
topright = d_3
bottomleft = d_2
bottomright = d_1
rect = pygame.Rect(p[0],p[1],200,200)
if square == 4:
dirty_rects.append(screen.blit(topleft, (341 - num - 10, 444)))
dirty_rects.append(screen.blit(topright, (387, 452 - num - 13)))
dirty_rects.append(screen.blit(bottomright, (364 + num + 29, 498)))
dirty_rects.append(screen.blit(bottomleft, (337, 497 + num + 12)))
if (s.game.anti_cheat.status == True):
dirty_rects.append(screen.blit(bg_gi, p, pygame.Rect(p[0],p[1],130,130)))
else:
dirty_rects.append(screen.blit(bg_off, p, pygame.Rect(p[0],p[1],130,130)))
pygame.display.update(dirty_rects)
def squaree_animation(args):
dirty_rects = []
s = args[0]
num = args[1]
square = args[2]
p = [223,544]
if s.game.square_e.position == 0:
image = e3
topleft = e_2
topright = e_3
bottomleft = e_4
bottomright = e_1
elif s.game.square_e.position == 1:
image = e0
topleft = e_1
topright = e_2
bottomleft = e_3
bottomright = e_4
elif s.game.square_e.position == 2:
image = e1
topleft = e_4
topright = e_1
bottomleft = e_2
bottomright = e_3
else:
image = e2
topleft = e_3
topright = e_4
bottomleft = e_1
bottomright = e_2
rect = pygame.Rect(p[0],p[1],200,200)
#images are actually rendered left-right, but keeping naming convention in case I want to add some fancier rotation
dirty_rects.append(screen.blit(topleft, (231 - num - 10, 551)))
if num > -40:
dirty_rects.append(screen.blit(topright, (277, 549 - num)))
else:
dirty_rects.append(screen.blit(topright, (332, 607 + num)))
dirty_rects.append(screen.blit(bottomleft, (332 - num - 10, 549)))
if num > -40:
dirty_rects.append(screen.blit(bottomright, (387, 551 - num)))
else:
dirty_rects.append(screen.blit(bottomright, (223, 607 + num)))
if (s.game.anti_cheat.status == True):
dirty_rects.append(screen.blit(bg_gi, p, pygame.Rect(p[0],p[1],220,100)))
else:
dirty_rects.append(screen.blit(bg_off, p, pygame.Rect(p[0],p[1],220,100)))
pygame.display.update(dirty_rects)
def eb_animation(args):
global screen
dirty_rects = []
s = args[0]
num = args[1]
if s.game.extra_ball.position < 1:
dirty_rects.append(screen.blit(bg_gi, (135,998), pygame.Rect(135,998,56,35)))
if s.game.extra_ball.position < 2:
dirty_rects.append(screen.blit(bg_gi, (186,998), pygame.Rect(186,998,73,35)))
if s.game.extra_ball.position < 3:
dirty_rects.append(screen.blit(bg_gi, (253,999), pygame.Rect(253,999,73,35)))
if s.game.extra_ball.position < 4:
dirty_rects.append(screen.blit(bg_gi, (323,998), pygame.Rect(323,998,56,35)))
if s.game.extra_ball.position < 5:
dirty_rects.append(screen.blit(bg_gi, (375,998), pygame.Rect(375,998,73,35)))
if s.game.extra_ball.position < 6:
dirty_rects.append(screen.blit(bg_gi, (441,999), pygame.Rect(441,999,73,35)))
if s.game.extra_ball.position < 7:
dirty_rects.append(screen.blit(bg_gi, (514,998), pygame.Rect(514,998,56,35)))
if s.game.extra_ball.position < 8:
dirty_rects.append(screen.blit(bg_gi, (565,997), pygame.Rect(565,997,73,35)))
if s.game.extra_ball.position < 9:
dirty_rects.append(screen.blit(bg_gi, (631,996), pygame.Rect(631,996,73,35)))
pygame.display.update(dirty_rects)
if num in [0,24,25,49,14,39]:
if s.game.extra_ball.position < 1:
p = [135,998]
dirty_rects.append(screen.blit(eb_number, p))
pygame.display.update(dirty_rects)
return
elif num in [1,15,26,40]:
if s.game.extra_ball.position < 2:
p = [186,998]
dirty_rects.append(screen.blit(eb, p))
pygame.display.update(dirty_rects)
return
elif num in [3,4,17,28,29,42]:
if s.game.extra_ball.position < 3:
p = [253,999]
dirty_rects.append(screen.blit(eb, p))
pygame.display.update(dirty_rects)
return
elif num in [5,18,30,43]:
if s.game.extra_ball.position < 4:
p = [323,998]
dirty_rects.append(screen.blit(eb_number, p))
pygame.display.update(dirty_rects)
return
elif num in [7,8,19,32,33,44]:
if s.game.extra_ball.position < 5:
p = [375,998]
dirty_rects.append(screen.blit(eb, p))
pygame.display.update(dirty_rects)
return
elif num in [9,10,20,34,35,45]:
if s.game.extra_ball.position < 6:
p = [441,999]
dirty_rects.append(screen.blit(eb, p))
pygame.display.update(dirty_rects)
return
elif num in [11,21,36,46]:
if s.game.extra_ball.position < 7:
p = [514,998]
dirty_rects.append(screen.blit(eb_number, p))
pygame.display.update(dirty_rects)
return
elif num in [12,22,37,47]:
if s.game.extra_ball.position < 8:
p = [565,997]
dirty_rects.append(screen.blit(eb, p))
pygame.display.update(dirty_rects)
return
elif num in [2,6,13,16,23,27,31,38,41,48]:
if s.game.extra_ball.position < 9:
p = [631,996]
dirty_rects.append(screen.blit(eb, p))
pygame.display.update(dirty_rects)
return
def clear_odds(s, num):
global screen
dirty_rects = []
if s.game.yellow_odds.position != 1:
dirty_rects.append(screen.blit(bg_gi, (36,879), pygame.Rect(36,879,28,53)))
if s.game.yellow_odds.position != 2:
dirty_rects.append(screen.blit(bg_gi, (147,868), pygame.Rect(147,868,28,53)))
if s.game.yellow_odds.position != 3:
dirty_rects.append(screen.blit(bg_gi, (193,859), pygame.Rect(193,859,28,53)))
if s.game.yellow_odds.position != 4:
dirty_rects.append(screen.blit(bg_gi, (277,873), pygame.Rect(277,873,28,53)))
if s.game.yellow_odds.position != 5:
dirty_rects.append(screen.blit(bg_gi, (400,872), pygame.Rect(400,872,37,53)))
if s.game.yellow_odds.position != 6:
dirty_rects.append(screen.blit(bg_gi, (502,851), pygame.Rect(502,851,37,53)))
if s.game.yellow_odds.position != 7:
dirty_rects.append(screen.blit(bg_gi, (552,849), pygame.Rect(552,849,37,53)))
if s.game.yellow_odds.position != 8:
dirty_rects.append(screen.blit(bg_gi, (665,878), pygame.Rect(665,878,37,53)))
if s.game.red_odds.position != 1:
dirty_rects.append(screen.blit(bg_gi, (29,761), pygame.Rect(29,761,26,53)))
if s.game.red_odds.position != 2:
dirty_rects.append(screen.blit(bg_gi, (140,714), pygame.Rect(140,714,28,53)))
if s.game.red_odds.position != 3:
dirty_rects.append(screen.blit(bg_gi, (197,714), pygame.Rect(197,714,28,53)))
if s.game.red_odds.position != 4:
dirty_rects.append(screen.blit(bg_gi, (284,704), pygame.Rect(284,704,28,53)))
if s.game.red_odds.position != 5:
dirty_rects.append(screen.blit(bg_gi, (403,709), pygame.Rect(403,709,37,53)))
if s.game.red_odds.position != 6:
dirty_rects.append(screen.blit(bg_gi, (493,713), pygame.Rect(493,713,37,53)))
if s.game.red_odds.position != 7:
dirty_rects.append(screen.blit(bg_gi, (546,713), pygame.Rect(546,713,37,53)))
if s.game.red_odds.position != 8:
dirty_rects.append(screen.blit(bg_gi, (676,745), pygame.Rect(676,745,37,53)))
if s.game.orange_odds.position != 1:
dirty_rects.append(screen.blit(bg_gi, (26,817), pygame.Rect(26,817,28,53)))
if s.game.orange_odds.position != 2:
dirty_rects.append(screen.blit(bg_gi, (145,787), pygame.Rect(145,787,28,53)))
if s.game.orange_odds.position != 3:
dirty_rects.append(screen.blit(bg_gi, (187,786), pygame.Rect(187,786,28,53)))
if s.game.orange_odds.position != 4:
dirty_rects.append(screen.blit(bg_gi, (274,776), pygame.Rect(274,776,28,53)))
if s.game.orange_odds.position != 5:
dirty_rects.append(screen.blit(bg_gi, (401,776), pygame.Rect(401,776,37,53)))
if s.game.orange_odds.position != 6:
dirty_rects.append(screen.blit(bg_gi, (509,785), pygame.Rect(509,785,37,53)))
if s.game.orange_odds.position != 7:
dirty_rects.append(screen.blit(bg_gi, (551,780), pygame.Rect(551,780,37,53)))
if s.game.orange_odds.position != 8:
dirty_rects.append(screen.blit(bg_gi, (665,819), pygame.Rect(665,819,37,53)))
pygame.display.update(dirty_rects)
def draw_odds_animation(s, num):
global screen
dirty_rects = []
if num in [2,7,36,27,32,11]:
if s.game.yellow_odds.position != 1:
p = [36,879]
dirty_rects.append(screen.blit(yellow_odds1, p))
pygame.display.update(dirty_rects)
return
if num in [47,23]:
if s.game.yellow_odds.position != 2:
p = [147,868]
dirty_rects.append(screen.blit(yellow_odds2, p))
pygame.display.update(dirty_rects)
return
if num in [8,31]:
if s.game.yellow_odds.position != 3:
p = [193,859]
dirty_rects.append(screen.blit(yellow_odds3, p))
pygame.display.update(dirty_rects)
return
if num in [25,50]:
if s.game.yellow_odds.position != 4:
p = [277,873]
dirty_rects.append(screen.blit(yellow_odds4, p))
pygame.display.update(dirty_rects)
return
if num in [3,28]:
if s.game.yellow_odds.position != 5:
p = [400,872]
dirty_rects.append(screen.blit(yellow_odds5, p))
pygame.display.update(dirty_rects)
return
if num in [14,30]:
if s.game.yellow_odds.position != 6:
p = [502,851]
dirty_rects.append(screen.blit(yellow_odds6, p))
pygame.display.update(dirty_rects)
return
if num in [18,43]:
if s.game.yellow_odds.position != 7:
p = [552,849]
dirty_rects.append(screen.blit(yellow_odds7, p))
pygame.display.update(dirty_rects)
return
if num in [16,41]:
if s.game.yellow_odds.position != 8:
p = [665,878]
dirty_rects.append(screen.blit(yellow_odds8, p))
pygame.display.update(dirty_rects)
return
if num in [6,31]:
if s.game.red_odds.position != 1:
p = [29,761]
dirty_rects.append(screen.blit(red_odds1, p))
pygame.display.update(dirty_rects)
return
if num in [1,26]:
if s.game.red_odds.position != 2:
p = [140,714]
dirty_rects.append(screen.blit(red_odds2, p))
pygame.display.update(dirty_rects)
return
if num in [12,37]:
if s.game.red_odds.position != 3:
p = [197,714]
dirty_rects.append(screen.blit(red_odds3, p))
pygame.display.update(dirty_rects)
return
if num in [20,45]:
if s.game.red_odds.position != 4:
p = [284,704]
dirty_rects.append(screen.blit(red_odds4, p))
pygame.display.update(dirty_rects)
return
if num in [9,34]:
if s.game.red_odds.position != 5:
p = [403,709]
dirty_rects.append(screen.blit(red_odds5, p))
pygame.display.update(dirty_rects)
return
if num in [0,26]:
if s.game.red_odds.position != 6:
p = [493,713]
dirty_rects.append(screen.blit(red_odds6, p))
pygame.display.update(dirty_rects)
return
if num in [15,40]:
if s.game.red_odds.position != 7:
p = [546,713]
dirty_rects.append(screen.blit(red_odds7, p))
pygame.display.update(dirty_rects)
return
if num in [2,27]:
if s.game.red_odds.position != 8:
p = [676,745]
dirty_rects.append(screen.blit(red_odds8, p))
pygame.display.update(dirty_rects)
return
if num in [5,30]:
if s.game.orange_odds.position != 1:
p = [26,817]
dirty_rects.append(screen.blit(orange_odds1, p))
pygame.display.update(dirty_rects)
return
if num in [19,44]:
if s.game.orange_odds.position != 2:
p = [145,787]
dirty_rects.append(screen.blit(orange_odds2, p))
pygame.display.update(dirty_rects)
return
if num in [17,42]:
if s.game.orange_odds.position != 3:
p = [187,786]
dirty_rects.append(screen.blit(orange_odds3, p))
pygame.display.update(dirty_rects)
return
if num in [22,47]:
if s.game.orange_odds.position != 4:
p = [274,776]
dirty_rects.append(screen.blit(orange_odds4, p))
pygame.display.update(dirty_rects)
return
if num in [13,38]:
if s.game.orange_odds.position != 5:
p = [401,776]
dirty_rects.append(screen.blit(orange_odds5, p))
pygame.display.update(dirty_rects)
return
if num in [7,32]:
if s.game.orange_odds.position != 6:
p = [509,785]
dirty_rects.append(screen.blit(orange_odds6, p))
pygame.display.update(dirty_rects)
return
if num in [4,29]:
if s.game.orange_odds.position != 7:
p = [551,780]
dirty_rects.append(screen.blit(orange_odds7, p))
pygame.display.update(dirty_rects)
return
if num in [11,36]:
if s.game.orange_odds.position != 8:
p = [665,819]
dirty_rects.append(screen.blit(orange_odds8, p))
pygame.display.update(dirty_rects)
return
def odds_animation(args):
global screen
dirty_rects = []
s = args[0]
num = args[1]
clear_odds(s, num)
draw_odds_animation(s, num)
def clear_features(s, num):
global screen
dirty_rects = []
if s.game.magic_squares_feature.position < 5:
dirty_rects.append(screen.blit(bg_gi, (172,643), pygame.Rect(172,643,56,56)))
if s.game.magic_squares_feature.position < 6:
dirty_rects.append(screen.blit(bg_gi, (223,643), pygame.Rect(223,643,56,56)))
if s.game.magic_squares_feature.position < 7:
dirty_rects.append(screen.blit(bg_gi, (275,643), pygame.Rect(275,643,56,56)))
if s.game.magic_squares_feature.position < 8:
dirty_rects.append(screen.blit(bg_gi, (328,643), pygame.Rect(328,643,56,56)))
if s.game.magic_squares_feature.position < 9:
dirty_rects.append(screen.blit(bg_gi, (380,643), pygame.Rect(380,643,56,56)))
if s.game.selection_feature.position not in [3,4]:
dirty_rects.append(screen.blit(bg_gi, (33,932), pygame.Rect(33,932,63,59)))
dirty_rects.append(screen.blit(bg_gi, (567,495), pygame.Rect(567,495,144,76)))
if s.game.selection_feature.position not in [5,6]:
dirty_rects.append(screen.blit(bg_gi, (631,929), pygame.Rect(631,929,63,59)))
dirty_rects.append(screen.blit(bg_gi, (565,418), pygame.Rect(565,418,144,76)))
if s.game.ballyhole.status == False:
dirty_rects.append(screen.blit(bg_gi, (37,569), pygame.Rect(37,569,124,74)))
if s.game.corners.status == False:
dirty_rects.append(screen.blit(bg_gi, (37,493), pygame.Rect(37,493,124,74)))
if s.game.selection_feature.position not in [7,8]:
dirty_rects.append(screen.blit(bg_gi, (563,344), pygame.Rect(563,344,144,76)))
if s.game.selection_feature.position != 9:
dirty_rects.append(screen.blit(bg_gi, (565,272), pygame.Rect(565,272,144,76)))
pygame.display.update(dirty_rects)
def draw_feature_animation(s, num):
global screen
dirty_rects = []
if num in [10,20,25,35,45,50]:
if s.game.magic_squares_feature.position < 6:
p = [172,643]
dirty_rects.append(screen.blit(ms_letter, p))
p = [223,643]
dirty_rects.append(screen.blit(ms_letter, p))
pygame.display.update(dirty_rects)
return
if num in [5,15,24,30,40,49]:
if s.game.magic_squares_feature.position < 7:
p = [275,643]
dirty_rects.append(screen.blit(ms_letter, p))
pygame.display.update(dirty_rects)
return
if num in [17,23,42,48]:
if s.game.magic_squares_feature.position < 8:
p = [328,643]
dirty_rects.append(screen.blit(ms_letter, p))
pygame.display.update(dirty_rects)
return
if num in [12,22,37,47]:
if s.game.magic_squares_feature.position < 9:
p = [380,643]
dirty_rects.append(screen.blit(ms_letter, p))
pygame.display.update(dirty_rects)
return
if num in [8,18,33,43]:
if s.game.selection_feature.position not in [3,4]:
p = [33,932]
dirty_rects.append(screen.blit(rollover, p))
p = [567,495]
dirty_rects.append(screen.blit(time, p))
s.game.coils.redROLamp.pulse(85)
pygame.display.update(dirty_rects)
return
if num in [3,13,28,38]:
if s.game.selection_feature.position not in [5,6]:
p = [631,929]
dirty_rects.append(screen.blit(rollover, p))
p = [565,418]
dirty_rects.append(screen.blit(time, p))
s.game.coils.yellowROLamp.pulse(85)
pygame.display.update(dirty_rects)
return
if num in [4,14,29,39]:
if s.game.ballyhole.status == False:
p = [37,569]
dirty_rects.append(screen.blit(ballyhole, p))
pygame.display.update(dirty_rects)
return
if num in [9,19,34,44]:
if s.game.corners.status == False:
p = [37,493]
dirty_rects.append(screen.blit(corners, p))
pygame.display.update(dirty_rects)
return
if num in [11,21,36,46]:
if s.game.selection_feature.position not in [7,8]:
p = [563,344]
dirty_rects.append(screen.blit(time, p))
pygame.display.update(dirty_rects)
return
if num in [0,6,16,25,31,41]:
if s.game.selection_feature.position != 9:
p = [565,272]
dirty_rects.append(screen.blit(time, p))
pygame.display.update(dirty_rects)
return
def feature_animation(args):
global screen
dirty_rects = []
s = args[0]
num = args[1]
clear_features(s, num)
draw_feature_animation(s, num)
def both_animation(args):
global screen
dirty_rects = []
s = args[0]
num = args[1]
clear_features(s, num)
clear_odds(s, num)
draw_odds_animation(s, num)
draw_feature_animation(s, num)
|
"""
A script to estimate the HIV epidemic model parameters using ABC for the toy data.
"""
from sandbox.util.PathDefaults import PathDefaults
from wallhack.viroscopy.model.HIVModelUtils import HIVModelUtils
from wallhack.viroscopy.model.HIVABCParameters import HIVABCParameters
from sandbox.predictors.ABCSMC import ABCSMC
import os
import logging
import sys
import numpy
import multiprocessing
assert False, "Must run with -O flag"
if len(sys.argv) > 1:
numProcesses = int(sys.argv[1])
else:
numProcesses = multiprocessing.cpu_count()
FORMAT = "%(levelname)s:root:%(process)d:%(message)s"
logging.basicConfig(stream=sys.stdout, level=logging.DEBUG, format=FORMAT)
logging.debug("Number of processes: " + str(numProcesses))
numpy.set_printoptions(suppress=True, precision=4, linewidth=150)
numpy.seterr(invalid='raise')
resultsDir = PathDefaults.getOutputDir() + "viroscopy/toy/"
startDate, endDate, recordStep, M, targetGraph = HIVModelUtils.toySimulationParams()
N, matchAlpha, breakScale, numEpsilons, epsilon, minEpsilon, matchAlg, abcMaxRuns, batchSize, pertScale = HIVModelUtils.toyABCParams()
logging.debug("Total time of simulation is " + str(endDate-startDate))
logging.debug("Posterior sample size " + str(N))
epsilonArray = numpy.ones(numEpsilons)*epsilon
breakSize = (targetGraph.subgraph(targetGraph.removedIndsAt(endDate)).size - targetGraph.subgraph(targetGraph.removedIndsAt(startDate)).size) * breakScale
logging.debug("Largest acceptable graph is " + str(breakSize))
def createModel(t):
"""
The parameter t is the particle index.
"""
return HIVModelUtils.createModel(targetGraph, startDate, endDate, recordStep, M, matchAlpha, breakSize, matchAlg)
meanTheta, sigmaTheta, pertTheta = HIVModelUtils.toyTheta()
abcParams = HIVABCParameters(meanTheta, sigmaTheta, pertTheta)
thetaDir = resultsDir + "theta/"
if not os.path.exists(thetaDir):
os.mkdir(thetaDir)
logging.debug((meanTheta, sigmaTheta))
abcSMC = ABCSMC(epsilonArray, createModel, abcParams, thetaDir, True, minEpsilon=minEpsilon)
abcSMC.setPosteriorSampleSize(N)
abcSMC.batchSize = batchSize
abcSMC.maxRuns = abcMaxRuns
abcSMC.setNumProcesses(numProcesses)
abcSMC.pertScale = pertScale
thetasArray = abcSMC.run()
meanTheta = numpy.mean(thetasArray, 0)
stdTheta = numpy.std(thetasArray, 0)
logging.debug(thetasArray)
logging.debug("meanTheta=" + str(meanTheta))
logging.debug("stdTheta=" + str(stdTheta))
logging.debug("realTheta=" + str(HIVModelUtils.toyTheta()[0]))
logging.debug("Final epsilon array: " + str(abcSMC.epsilonArray))
logging.debug("Number of ABC runs: " + str(abcSMC.numRuns))
logging.debug("All done!")
|
import sys
sys.path.append("../")
import settings
from modules import manager
def main():
"""
python postgresql_backup.py filepath <from gmail address> <to address> <gmail username> <gmail application password>
"""
manage=manager.init(settings=settings)
postgresql=settings.Postgresql("postgresql",manage.dataStore.get("localhost"))
argvs = sys.argv
filepath=argvs[1]
if(len(argvs)==6):
fromAddr=argvs[2]
toAddr=argvs[3]
username=argvs[4]
password=argvs[5]
else:
fromAddr=None
print "Running diagnosis"
result=manage.dataStore.get("localhost").diagnosis()
print "Running pg_backupall"
print postgresql.lowlevel.pg_dumpall(filename=filepath,withCompress=True)
if(fromAddr!=None):
import datetime
ymd=datetime.date.today().strftime("%Y%m%d")
print "Sending mail"
mailer=settings.mailer.Mailer("mailer",manage.dataStore.get("localhost"),fromAddr)
mailer.sendByGmail(toAddr,username,password,"PostgreSQL Backup data "+ymd,"This is Backup data of PostgreSQL at "+ymd,attachFilenames=[filepath])
if __name__ == '__main__':
main()
|
from __future__ import division, print_function
from argparse import ArgumentParser, SUPPRESS
from atom_tools import rmsdDist
import copy
from glob import glob
from lineMD import eventLoop, determineSplit, getFinishedRuns, stitchTrajectory # and exportRestarts
from numpy import zeros
from operator import attrgetter
import random
from shared import *
import shutil
from stat import S_IEXEC
from time import strftime
__author__ = "Charles Yuan"
__license__ = "GPL"
__version__ = "2.0"
__email__ = "charlesyuan314@gmail.com"
__status__ = "Development"
def main():
"""Calls major subroutines, prepares files, and prints messages. Hands control over to eventLoop afterwards."""
parse()
log(CYAN + BOLD + "lineMD" + END + CYAN + " version " + MAGENTA + str(__version__) + CYAN + " starting up on "
+ MAGENTA + "%s.\n" % args.queue_name + END)
log("Configured for " + MAGENTA + str(args.precision) + END + " decimal places.\n" + END)
log("Using topology at " + UNDERLINE + "%s\n" % args.prmtop + END)
log("Using coordinates at " + UNDERLINE + "%s\n" % args.coord + END)
log("Using reference topology at " + UNDERLINE + "%s\n" % args.refprmtop + END)
log("Using reference coordinates at " + UNDERLINE + "%s\n" % args.ref + END)
log("Using " + MAGENTA + str(args.steps) + END + " timesteps per simulation.\n")
log(MAGENTA + str(args.sample) + END + " samples will be taken per run.\n")
if args.restart_out is not None:
log(MAGENTA + str(args.frame) + END + " frames will be in every run upon completion.\n")
log(BLUE + "Simulation will end when RMSD is within " + MAGENTA + "%.*f" % (args.precision, args.min) + BLUE +
" angstroms.\n" + END)
log(BLUE + "Runs past " + MAGENTA + "%.*f" % (args.precision, args.max) + BLUE +
" angstroms in the opposite direction will be rejected.\n" + END)
log("Dynamic explored counts are ")
if args.adjust:
log(MAGENTA + "enabled.\n" + END)
else:
log(MAGENTA + "disabled.\n" + END)
runInit = not prep()
global COORDPATH
if not os.path.isfile(WORKDIR + "/init.rst.gz"):
system("gzip -c %s > init.rst.gz" % COORDPATH)
COORDPATH = WORKDIR + "/init.rst.gz"
if not os.path.isfile(WORKDIR + "/reference.pdb"):
with open("ptraj.in", 'w') as script:
script.write("parm %s\n" % REFPRMTOPPATH)
script.write("trajin %s 1 1 1\n" % args.ref)
script.write("trajout reference.pdb pdb\n")
system("cpptraj < ptraj.in > /dev/null 2> /dev/null")
calcRefCoords()
if args.stitch:
log("Reading cluster information.\n")
readClusterInfo(readExplored=False) # print, explored count is unnecessary
stitchTrajectory()
sys.exit(0)
if runInit:
init()
if args.migrate:
log("Reading cluster information.\n")
readClusterInfo(silent=True, readInfo=False, readRuns=False, readExplored=False) # Read nothing
determineSplit()
readClusterInfo(readExplored=False) # do not read explored
# move runs for each cluster
for cluster in sorted([c for c in CLUSTERS.values() if c.ID != 'R'], key=attrgetter("dist")):
migrateRuns([run.ID for run in cluster.runs.values()], cluster)
if not os.path.isdir(WORKDIR + "/CR"):
fail(RED + UNDERLINE + "Error:" + END + RED + " the running cluster is missing.\n" + END)
elif not runInit:
finishedRuns = getFinishedRuns()
if finishedRuns:
analysis(finishedRuns)
else:
analysis([])
if args.loop > 0:
eventLoop()
def parse():
"""Prepare the argument parser."""
parser = ArgumentParser(description="execute linear MD simulations with AMBER.")
parser.add_argument("--prmtop", '-p', help="AMBER topology file", type=str, action=FullPath, required=True)
parser.add_argument("--coord", '-c', help="restart file from equilibration", type=str, action=FullPath)
parser.add_argument("--ref", help="reference coordinate file at endpoint", type=str, action=FullPath, required=True)
parser.add_argument("--refprmtop", help="topology for reference file", type=str, action=FullPath)
parser.add_argument("--min", help="endpoint RMSD", action="store", type=float)
parser.add_argument("--max", help="maximum RMSD change permitted (traveling in incorrect direction)",
action="store", type=float)
parser.add_argument("--bin", '-b',
help="width, in angstroms, per bin. "
"Ensure that both max and min are multiples of this width, and do not change"
" even if splits have occurred.", action="store", type=float)
parser.add_argument("--steps", '-s', help="nstlim value for AMBER config (default is 50000). Should be a "
"multiple of the \"--sample\" and \"--frame\" parameters.",
type=int, action="store", default=50000)
parser.add_argument("--sample", '-w', help="number of sample frames desired per run during execution "
"(default is 100)", type=int, action="store", default=100)
parser.add_argument("--frame", '-f', help="number of frames desired per run in the output (default is 100)",
type=int, action="store", default=100)
parser.add_argument("--threads", '-t', help="number of simultaneous runs", type=int, action="store", default=1)
parser.add_argument("--queue_name", '-q', help="queue name", type=str, required=True)
parser.add_argument("--loop", '-l', help="number of seconds before the loop checks status. "
"Set to 0 to disable the loop.", type=int, action="store", default=30)
parser.add_argument("--migrate", help=SUPPRESS, action="store_true")
parser.add_argument("--split", help=SUPPRESS, action="store_true")
parser.add_argument("--adjust", help="use the dynamic explored count (experimental)", action="store_true")
parser.add_argument("--precision", help="number of decimal places used in calculations. "
"Increase to allow more splits. Less than 8 recommended.",
type=int, action="store", default=6)
parser.add_argument("--stitch", help="do nothing but stitch the trajectory", action="store_true")
parser.add_argument("--trash", help="specify this directory to hold runs that have been deleted", type=str,
action=FullPath)
parser.add_argument("--log", '-o', help="log output file", type=str, action=FullPath)
parser.add_argument("--restart_out", '-r', help="specify this path to save the restarts from the final trajectory"
" and stitch longer trajectories using the --frame parameter",
type=str, action=FullPath)
parser.add_argument("--segments", '-g', help="Python list containing tuples representing segments to be processed;"
" each tuple specifies a begin and end residue for the segment "
"(inclusive)",
type=str, action="store")
global args
args = parser.parse_args()
def prep():
"""Sets up variables for the entire script. Also detects whether analysis runs are necessary and
returns a boolean to indicate this."""
global WORKDIR
WORKDIR = os.getcwd()
global PAUSE
if args.loop <= 0:
PAUSE = 1
else:
PAUSE = args.loop
global BINWIDTH
BINWIDTH = round(args.bin, args.precision)
global RUNNING # Number of threads currently running
RUNNING = 0
global THREADS
THREADS = args.threads
global CLUSTERS # Dictionary of Cluster IDs and clusters
CLUSTERS = {}
global RUNANALYSIS
RUNANALYSIS = False
if os.path.exists(WORKDIR + "/C0_0") or os.path.exists(WORKDIR + "/CR"):
RUNANALYSIS = True
# Basic verifications
if args.prmtop is None or args.coord is None:
fail(RED + UNDERLINE + "Error:" + END + RED + " please provide the topology and coordinate files.\n" + END)
if not os.path.splitext(args.coord)[1].lower() == ".rst":
fail(RED + UNDERLINE + "Error:" + END + RED +
" coordinate file extension is invalid. Please specify a formatted RST file.\n" + END)
if RUNANALYSIS and args.max is None:
fail(RED + UNDERLINE + "Error:" + END + RED + " Please provide the maximum RMSD.\n" + END)
if RUNANALYSIS and args.min is None:
fail(RED + UNDERLINE + "Error:" + END + RED + " Please provide the minimum RMSD.\n" + END)
global PRMTOPPATH
global REFPRMTOPPATH
global COORDPATH
global TRASHPATH
global RESTARTPATH
# Path modifications
if args.prmtop is not None and not os.path.isabs(args.prmtop):
PRMTOPPATH = WORKDIR + "/" + args.prmtop
else:
PRMTOPPATH = args.prmtop
if args.refprmtop is None:
REFPRMTOPPATH = PRMTOPPATH
elif args.refprmtop is not None and not os.path.isabs(args.refprmtop):
REFPRMTOPPATH = WORKDIR + "/" + args.refprmtop
else:
REFPRMTOPPATH = args.refprmtop
if args.coord is not None and not os.path.isabs(args.coord):
COORDPATH = WORKDIR + "/" + args.coord
else:
COORDPATH = args.coord
if args.restart_out is not None and not os.path.isabs(args.restart_out):
RESTARTPATH = WORKDIR + "/" + args.restart_out
else:
RESTARTPATH = args.restart_out
if args.restart_out is not None and not os.path.isdir(RESTARTPATH):
os.mkdir(RESTARTPATH)
if args.trash is not None and not os.path.isabs(args.trash):
TRASHPATH = WORKDIR + "/" + args.trash
else:
TRASHPATH = args.trash
if args.trash is not None and not os.path.isdir(TRASHPATH):
os.mkdir(TRASHPATH)
global NOPROGRESS # Number of times returned to the initial bin
NOPROGRESS = 0
global NOPROGRESSCUTOFF # If we have to return to the initial bin this many times, split the bins
NOPROGRESSCUTOFF = 10
if NOPROGRESSCUTOFF < THREADS:
NOPROGRESSCUTOFF = THREADS
global SPLIT # Number of times we have split the bins
SPLIT = 0
global SPLITMAX # Maximum number of times permitted for splitting bins.
# Determine SPLITMAX; this code is still experimental
tempBinWidth = BINWIDTH
SPLITMAX = 0
while True:
oldBinWidth = tempBinWidth
tempBinWidth = round(tempBinWidth / 2.0, args.precision)
if oldBinWidth == 2 * tempBinWidth: # Divide and round, then multiply. If not equal, then splitting should end
SPLITMAX += 1
else:
break
SPLITMAX -= 1
if SPLITMAX > 4:
SPLITMAX = 4
if SPLITMAX > 0:
log("Splitting will occur for " + MAGENTA + str(SPLITMAX) + END + " times maximum.\n")
else:
log("Splitting will not occur on such a small bin width.\n")
if args.steps / args.sample != int(args.steps / args.sample) or \
args.steps / args.frame != int(args.steps / args.frame):
log(YELLOW + UNDERLINE + "Warning:" + END + YELLOW +
" the sample or frame frequencies (--sample or --frame) do not divide evenly into the "
"total number of simulation timesteps (--steps). Proceed with caution.\n" + END)
if args.max / args.bin != int(args.max / args.bin) or \
args.min / args.bin != int(args.min / args.bin):
log(YELLOW + UNDERLINE + "Warning:" + END + YELLOW +
" the bin size (--bin) does not divide evenly into the "
"max or min distance (--max or --min). Proceed with caution.\n" + END)
global SEGMENTS
if args.segments is None:
SEGMENTS = []
log("Will process all segments.\n")
else:
error = RED + UNDERLINE + "Error:" + END + RED + " please provide a valid segment string.\n" + END
SEGMENTS = eval(args.segments)
if not isinstance(SEGMENTS, list):
fail(error)
for tup in SEGMENTS:
if not isinstance(tup, tuple) or len(tup) != 2 \
or not isinstance(tup[0], int) or not isinstance(tup[1], int) or tup[0] > tup[1]:
fail(error)
log("Will process segments ")
for tup in SEGMENTS:
log("%i to %i; " % (tup[0], tup[1]))
log("\n")
return RUNANALYSIS
class Run(object):
"""A Run object holds its own topology, input, output, and coordinate files. It can calculate its own
distance and knows its Cluster. Its folder format is R? where ? is the ID of the run. It can execute itself
with the execute() method. The folder holds a "run_info" file to store the path of the run from which input
coordinates were copied. A folder and run_info will automatically be created."""
_ID = 0 # chronologically assigned ID number, also part of name of folder
_UID = 0 # permanent ID, assigned on a rolling basis.
_frame = 0 # the number of frames in the coordinate file after processing
# Note: always provide the UID as Run.getNextUID() when initializing the Run,
# unless the Run can immediately readInfo() or create() with a run_info file.
_clusterID = '0_0' # the cluster with which this run is associated. May be a number or 'R'
_previous = 0 # the UID of the previous run, will be "initial" for the initial run
_explored = 0 # the number of times this run has been copied
_dist = 0 # distance of the ligand from the protein
def __init__(self, ID=0, UID=0, clusterID='0_0', previous=None, explored=0, frame=0):
self._ID = int(ID)
self._clusterID = str(clusterID)
self._previous = previous
self._UID = int(UID)
self._explored = int(explored)
self._frame = int(frame)
def __str__(self):
return "<Run: ID %i, UID %i, cluster %s>" % (self._ID, self._UID, self._clusterID)
@property
def path(self): # directory holding trajectory files
return "%s/C%s/R%i" % (WORKDIR, self._clusterID, self._ID)
@property
def shortPath(self):
return "C%s/R%i" % (self._clusterID, self._ID)
@property
def ID(self):
return self._ID
@property
def UID(self):
return self._UID
@property
def clusterID(self):
return self._clusterID
@property
def previous(self):
return self._previous
@property
def explored(self):
return self._explored
@explored.setter
def explored(self, explored):
self._explored = explored
@staticmethod
def getNextUID():
"""Read the next UID from the currentUID file and update the file."""
newUID = 0 # default
# Try opening the existing currentUID file
try:
with open(WORKDIR + "/currentUID") as currentUID:
info = []
for line in currentUID:
info.append(int(line))
newUID = info[0] + 1
except IOError:
pass # No existing file
# Write a new currentUID file
with open(WORKDIR + "/currentUID", 'w') as currentUID:
currentUID.write(str(newUID) + '\n')
# Set the UID
return newUID
@staticmethod
def move(r, c):
"""Move the run r from wherever it is to the cluster c, compressing files if necessary. Returns the new run."""
if r.clusterID == c.ID: # Already in that cluster
return r
# Find the next appropriate ID
runDirectories = [int(name[1:]) for name in os.listdir(c.path) if
os.path.isdir(os.path.join(c.path, name))]
if runDirectories:
newID = max(runDirectories) + 1
else:
newID = 0
new = Run(ID=newID, clusterID=c.ID) # No UID because we will create with run_info
coord = None
outFile = None
inFile = None
info = None
endRestart = None
beginRestart = None
with directory(r.path):
if os.path.isfile("end.rst"):
compress("end.rst")
if os.path.isfile("end.rst.gz"):
endRestart = r.path + "/end.rst.gz"
if os.path.isfile("coord.nc"):
compress("coord.nc")
if os.path.isfile("coord.nc.gz"):
coord = r.path + "/coord.nc.gz"
if os.path.isfile("line.out"):
compress("line.out")
if os.path.isfile("line.out.gz"):
outFile = r.path + "/line.out.gz"
if os.path.isfile("line.in"):
inFile = r.path + "/line.in"
if os.path.isfile("begin.rst"):
compress("begin.rst")
if os.path.isfile("begin.rst.gz"):
beginRestart = r.path + "/begin.rst.gz"
if os.path.isfile("run_info"):
info = r.path + "/run_info"
else:
fail(RED + UNDERLINE + "Error:" + END + RED + " run at %s has no run_info.\n" % r.path + END)
new.create(endRestart=endRestart, info=info, coord=coord, outFile=outFile, inFile=inFile,
beginRestart=beginRestart)
# Delete the old run
r.delete(trash=False)
# Change Cluster object data
if new.ID in c.runs:
fail(RED + UNDERLINE + "Error:" + END + RED + " the new run is already in the dictionary.\n" + END)
c.addRun(ID=new.ID, run=new)
return new
def execute(self):
"""Call a run from its script"""
global RUNNING
if RUNNING >= THREADS: # Reached maximum, abandon this run
self.delete(trash=False)
return
with directory(self.path):
if not os.path.isfile("./run.sh"):
log(RED + UNDERLINE + "Error:" + END + RED + " run.sh does not exist at %s.\n" % self.path + END)
system("./run.sh >> out 2>&1")
RUNNING += 1 # Increment the global counter
return
def check(self, coordName):
"""Verify that the run's ending coordinates are present"""
with directory(self.path):
if os.path.isfile(coordName):
size = os.stat(coordName)[6]
if size > 22 + len(coordName): # minimal file size for compressed files
return True
else:
return False # file is empty
else:
return False # file does not exist
def processDist(self):
"""Calculates the distance between the protein and ligand at the final frame and processes coordinate files.
Returns (dist, frame) of the smallest dist
"""
if self._dist == 0 or self._frame == 0:
# assume that an empty property means distance has not been calculated
global PRMTOPPATH
if self._clusterID == '0_0' and self._ID == 0: # This is initial, skip all that stuff
with open(self.path + "/frame_0.pdb") as pdb:
self._dist = rmsdDist(pdbLines=list(pdb), refCoords=REFCOORDS, segments=SEGMENTS)
self.writeInfo()
return self._dist, 0
else: # This is not initial
def getDist(fr):
with open(self.path + "/frame_%i.pdb" % fr) as thisPDB:
dist = rmsdDist(pdbLines=list(thisPDB), refCoords=REFCOORDS, segments=SEGMENTS)
return fr, dist
with directory(self.path):
sampleFrames = range(int(args.steps / args.sample), args.steps + int(args.steps / args.sample),
int(args.steps / args.sample))
distances = parMap(getDist, sampleFrames, n=(cpu_count() / 2))
# Get the right frame with min ending distance
minDistFrame, minDist = min(distances, key=itemgetter(1))
os.rename("frame_%i.rst" % minDistFrame, "end.rst")
compress("end.rst")
# recreate the coordinate file
with open("ptraj.in", 'w') as script:
script.write("parm %s\n" % PRMTOPPATH)
script.write("trajin coord.nc 1 %i 1\n" % int(minDistFrame / int(args.steps / args.sample)))
script.write("trajout coord_new.nc netcdf\n")
system("cpptraj < ptraj.in | gzip -f > ptraj.out.gz")
os.remove("coord.nc")
os.rename("coord_new.nc", "coord.nc")
compress("coord.nc")
self._dist = minDist
self._frame = minDistFrame
self.writeInfo()
return self._dist, self._frame
else:
return self._dist, self._frame
def createFolder(self):
"""Creates the top-level folder for this run"""
if not os.path.exists(self.path):
os.mkdir(self.path)
def create(self, beginRestart=None, endRestart=None, info=None, coord=None, outFile=None, inFile=None,
initial=False):
"""Creates the folder structure, writes scripts, and copies compressed input files for a run."""
global PRMTOPPATH
self.createFolder()
with directory(self.path):
if beginRestart is not None:
shutil.copy(beginRestart, "begin.rst.gz")
if endRestart is not None:
shutil.copy(endRestart, "end.rst.gz")
if info is not None: # Copy the run_info and load it
shutil.copy(info, "run_info")
self.readInfo()
else: # Write a new run_info
if self._UID == 0 and self._previous != "initial": # Irresponsible UID assignment
self._UID = Run.getNextUID() # Get a new one
self.writeInfo()
self.writeScripts(initial)
if inFile is not None:
shutil.copy(inFile, "line.in")
if coord is not None:
shutil.copy(coord, "coord.nc.gz")
if outFile is not None:
shutil.copy(outFile, "line.out.gz")
def delete(self, trash=True):
"""Deletes the files associated with this run."""
global CLUSTERS
global TRASHPATH
if self.ID in CLUSTERS[self.clusterID].runs.keys():
del CLUSTERS[self.clusterID].runs[self.ID]
if TRASHPATH is not None and trash:
from time import time
system("mv -f %s %s/R%s_%i > /dev/null 2> /dev/null" % (self.path, TRASHPATH, self.ID, int(time())))
else:
system("rm -rf %s > /dev/null 2> /dev/null" % self.path) # Works better for some reason
def writeScripts(self, initial=False):
"""Write the qsub and qscript scripts for this run"""
global PRMTOPPATH
with directory(self.path):
frameSeparation = int(args.steps / args.sample)
if args.queue_name in ["dept_gpu", "any_gpu", "bahar_gpu"]:
with open("run.sh", 'w') as runScript:
runScript.write("#!/bin/bash\n")
runScript.write("sleep 0.5; qsub -d . -q %s -S /bin/bash -N lineMD_R%i -l "
"nodes=1:ppn=1:gpus=1 %s/qscript\n" % (args.queue_name, self._ID, self.path))
with open("qscript", 'w') as qscript:
qscript.write("""#!/bin/bash
AMBERHOME=/usr/local/amber14
PATH=/usr/local/amber14/bin:$PATH
gunzip begin.rst.gz >> out 2>&1
pmemd.cuda -O -i line.in -o line.out -p %s -c begin.rst -r frame -x coord.nc
for f in frame*; do mv "$f" "$f.rst" >> out 2>&1; done
gzip line.out >> out 2>&1
rm mdinfo >> out 2>&1
for i in `seq %i %i %i`;
do
echo -e "parm %s" >> ptraj_${i}.in
echo -e "trajin frame_${i}.rst 1 1 1" >> ptraj_${i}.in
echo -e "trajout frame_${i}.pdb pdb" >> ptraj_${i}.in
cpptraj < ptraj_${i}.in >> ptraj_frames.out 2>&1
done
gzip ptraj_frames.out >> out 2>&1
touch finished >> out 2>&1
$cmd
""" % (PRMTOPPATH, frameSeparation, frameSeparation, args.steps, PRMTOPPATH))
elif args.queue_name == "gpu_short":
with open("run.sh", 'w') as runScript:
runScript.write("#!/bin/bash\n")
runScript.write("sleep 0.5; qsub -d . -q gpu_short -S /bin/bash -N lineMD_R%i -l "
"nodes=1:ppn=1:gpus=1 "
"-l feature=titan -l walltime=23:59:59 %s/qscript" % (self._ID, self.path))
with open("qscript", 'w') as qscript:
qscript.write("""#!/bin/bash
module purge
module load intel/2013.0
module load amber/14-intel-2013-cuda-5.0
gunzip begin.rst.gz >> out 2>&1
pmemd.cuda -O -i line.in -o line.out -p %s -c begin.rst -r frame -x coord.nc
for f in frame*; do mv "$f" "$f.rst" >> out 2>&1; done
gzip line.out >> out 2>&1
rm mdinfo >> out 2>&1
for i in `seq %i %i %i`;
do
echo -e "parm %s" >> ptraj_${i}.in
echo -e "trajin frame_${i}.rst 1 1 1" >> ptraj_${i}.in
echo -e "trajout frame_${i}.pdb pdb" >> ptraj_${i}.in
cpptraj < ptraj_${i}.in >> ptraj_frames.out 2>&1
done
gzip ptraj_frames.out >> out 2>&1
touch finished >> out 2>&1
$cmd
""" % (PRMTOPPATH, frameSeparation, frameSeparation, args.steps, PRMTOPPATH))
os.chmod("run.sh", os.stat("run.sh").st_mode | S_IEXEC)
with open("line.in", 'w') as inputFile:
if initial:
ig = int(random.random() * 1000.0 % 999) # For the initial, set the random seed and never rerun it
inputFile.write("""&cntrl
imin = 0, ntx = 1, irest = 0,
ntpr = 10000, ntwr = -%i, ntwx = %i, ntxo = 1,
ntf = 2, ntc = 2, cut = 8.0,
ntb = 2, nstlim = %i, dt = 0.002,
temp0 = 300.0, ntt = 3, ig = %i,
gamma_ln = 1, ioutfm = 1,
ntp = 1, pres0 = 1.0, taup = 5.0,
/
""" % (int(args.steps / args.sample), int(args.steps / args.sample), args.steps, ig)) # sample, not frame here
else: # Restart file
ig = self._UID % 999999 # the random seed is UID; should be preserved across moves but still random
inputFile.write("""&cntrl
imin = 0, ntx = 5, irest = 1,
ntpr = 10000, ntwr = -%i, ntwx = %i, ntxo = 1,
ntf = 2, ntc = 2, cut = 8.0,
ntb = 2, nstlim = %i, dt = 0.002,
temp0 = 300.0, ntt = 3, ig = %i,
gamma_ln = 1, ioutfm = 1,
ntp = 1, pres0 = 1.0, taup = 5.0,
/
""" % (int(args.steps / args.sample), int(args.steps / args.sample), args.steps, ig))
def writeInfo(self):
"""Write a run_info file for a run."""
with open(self.path + "/run_info", 'w') as runInfo:
runInfo.write("""UniqueID: %i
Dist: %.*f
PreviousUID: %s
Explored: %i
Frame: %i
""" % (self._UID, args.precision, self._dist, self._previous, self._explored, self._frame))
def readInfo(self):
"""Read a run_info file for a run."""
try:
info = []
with open(self.path + "/run_info") as runInfo:
for line in runInfo:
info.append(line.split()[1])
self._UID = int(info[0])
self._dist = float(info[1])
self._previous = info[2]
self._explored = int(info[3])
self._frame = int(info[4])
except IOError:
log(YELLOW + UNDERLINE +
"Warning:" + END + YELLOW + " run_info is corrupt or missing; this run will be rejected.\n" + END)
self.delete()
class Cluster(object):
"""A Cluster object holds runs in a dictionary with ID keys. It knows its innermost distance and can calculate
the number of runs it holds. The folder format is "C*_*" where "*_*" is the ID or 'R' for the running folder.
The folder holds a "cluster_info" file to store the distance in persistence, which can be
manipulated by the writeInfo() and readInfo() methods. The special running cluster should not specify the
distance and does not support the info file."""
_ID = '0_0' # chronologically assigned ID string, also part of name of folder, or may be 'R'
_runs = None # dictionary of Run IDs and objects
_dist = 0 # innermost distance this Cluster holds
_explored = 0 # cached value for times explored
def __init__(self, ID, runs=None, dist=0.0, explored=0):
self._ID = str(ID)
if runs is None:
self._runs = {} # A hack since Python complains about mutable default parameters
else:
self._runs = runs
self._dist = float(dist)
self._explored = int(explored)
def __str__(self):
return "<Cluster: ID %s, %i runs, distance %.*f>" % \
(self._ID, self.count, args.precision, self._dist)
@property
def count(self):
return len(self.runs)
@property
def dist(self):
return round(self._dist, args.precision)
@dist.setter
def dist(self, dist):
self._dist = round(dist, args.precision)
@property
def path(self): # directory holding runs and the "cluster_info" file
return "%s/C%s" % (WORKDIR, self._ID)
@property
def shortPath(self):
return "C" + str(self._ID)
@property
def ID(self):
return self._ID
@property
def majorID(self): # part of ID before '_'
if self._ID == 'R':
return 'R'
return int(self._ID.split('_')[0])
@property
def minorID(self): # part of ID after '_'
if self._ID == 'R':
return 'R'
return int(self._ID.split('_')[1])
@property
def rawID(self): # unformatted ID, simply a number
return int(self.majorID * (int(pow(2, SPLIT))) + self.minorID)
@property
def explored(self):
return self._explored
@property
def runs(self):
return self._runs
def addRun(self, ID, run):
"""Add a Run to the runs dictionary using ID. Will do nothing if ID already exists."""
if ID not in self._runs:
self._runs[ID] = run
def removeRun(self, ID):
"""Remove the Run with ID ID from the runs dictionary. Returns None if ID is not
in runs. Returns the run previously there."""
if ID in self._runs:
temp = self._runs[ID]
del self._runs[ID]
return temp
return None
def getRun(self, ID):
"""Gets the run at ID. Returns None if ID is not in runs."""
if ID in self._runs:
return self._runs[ID]
return None
def setRun(self, ID, run):
"""Adds run to runs at position ID. Returns the run previously there or None."""
if ID in self._runs:
temp = self._runs[ID]
self._runs[ID] = run
return temp
return None
def readRuns(self):
"""Loads the runs in folders into the runs dictionary."""
if self._ID == 'R':
return
self._runs = {}
runDirectories = [int(name[1:]) for name in os.listdir(self.path) if
os.path.isdir(os.path.join(self.path, name))]
for runID in runDirectories:
run = Run(ID=runID, clusterID=self._ID) # No UID because we will readInfo()
if not run.check("end.rst.gz"):
if run.check("end.rst"):
compress(run.path + "/end.rst")
else:
log(YELLOW + UNDERLINE + "Warning:" + END + YELLOW + " run at " + MAGENTA + str(run.shortPath) +
YELLOW + " did not have valid restart file and will be deleted.\n" + END)
run.delete()
continue
run.readInfo()
self._runs[runID] = run
def readExplored(self, exploredDist, start=0):
"""Read the explored count, given a descending-sorted exploredDist based on the explored_dist file.
Returns an integer that can be used as start to shorten the array loop."""
exploredCount = 0
rightBound = self.dist + BINWIDTH
for index, dist in enumerate(exploredDist[start:]):
if dist < self.dist: # We have passed this bin
self._explored = exploredCount
return index
if dist <= rightBound: # It is inside this bin, counting the right bound
exploredCount += 1
self._explored = exploredCount
return 0 # failsafe
def adjustedExplored(self, maxBinID):
"""Given the maximum bin with runs, compute the adjusted explored count for this bin."""
if self.count == 0 or self.explored == 0 or maxBinID <= 0:
return self.explored
# favor further bins
binBias = 2 * (maxBinID - self.rawID)
# favor bins with more runs
if self.count == 0:
countBias = 0
else:
countBias = -int(3 * math.log(self.count))
return self.explored + binBias + countBias
def writeInfo(self):
"""Write a cluster_info file for a cluster."""
if self._ID == 'R':
return
with open(self.path + "/cluster_info", 'w') as clusterInfo:
clusterInfo.write("""MinDistance: %.*f
""" % (args.precision, self._dist))
def readInfo(self):
"""Read a cluster_info file for a cluster."""
if self._ID == 'R':
return
try:
info = []
with open(self.path + "/cluster_info") as clusterInfo:
for line in clusterInfo:
info.append(line.split()[1])
self._dist = float(info[0])
except IOError:
log(YELLOW + UNDERLINE + "Warning:" + END + YELLOW +
" cluster_info corrupt or missing. Will try to rewrite now.\n" + END)
self.writeInfo()
def create(self):
"""Create the basic folder structure of a cluster."""
if not os.path.exists(self.path):
os.mkdir(self.path)
self.writeInfo()
def explore(self, dist):
"""Mark the distance in dist as explored in the central file and increment the internal count."""
try:
with open(WORKDIR + "/explored_dist", 'a') as exploredDist:
exploredDist.write("%.*f\n" % (args.precision, dist))
except IOError:
# Write a new file
with open(WORKDIR + "/explored_dist", 'w') as exploredDist:
exploredDist.write("%.*f\n" % (args.precision, dist))
if self._ID != 'R':
self._explored += 1
def init():
"""Prepares the initial cluster by copying files. Also begins initial runs based on the thread parameter."""
global CLUSTERS
global WORKDIR
global PRMTOPPATH
global COORDPATH
global THREADS
log(CYAN + "Beginning initialization.\n" + END)
log("Bin width is " + MAGENTA + "%.*f" % (args.precision, BINWIDTH) + END + ".\n")
log("Copying and writing input files.\n")
# Register cluster 0_0 with the dictionary
CLUSTERS['0_0'] = Cluster(ID='0_0', runs={}) # do not know distance yet
CLUSTERS['0_0'].create()
initRun = Run(previous="initial", UID=Run.getNextUID())
CLUSTERS['0_0'].addRun(ID=0, run=initRun)
initRun.create(endRestart=COORDPATH, initial=True)
if not initRun.check("end.rst.gz"):
fail(RED + UNDERLINE + "Error: " + END + RED + " initial coordinate file is invalid.\n" + END)
if not os.path.isfile(WORKDIR + "/C0_0/R0/frame_0.pdb"):
with directory(WORKDIR + "/C0_0/R0"):
with open("ptraj.in", 'w') as script:
decompress("end.rst.gz")
script.write("""parm %s
trajin end.rst 1 1 1
trajout frame_0.pdb pdb
""" % PRMTOPPATH)
system("cpptraj < ptraj.in > /dev/null 2> /dev/null")
compress("end.rst")
# Calculate initial distance
calcInitDist()
# Register the running cluster
CLUSTERS['R'] = Cluster(ID='R', runs={})
CLUSTERS['R'].create()
for i in xrange(THREADS):
# Create runs in the running cluster
thisRun = Run(ID=i, clusterID='R', previous=initRun.UID, UID=Run.getNextUID())
CLUSTERS['R'].addRun(ID=i, run=thisRun)
thisRun.create(beginRestart=COORDPATH, initial=True)
initRun.explored += 1
initRun.writeInfo()
thisRun.execute() # Begin the run
# Write to the explored_dist file
CLUSTERS['R'].explore(CLUSTERS['0_0'].dist + BINWIDTH)
log(GREEN + "%i initial runs have begun on %s.\n" % (RUNNING, strftime("%c")) + END)
def calcRefCoords():
"""Reads the coordinates of the reference file into the global variable."""
global REFCOORDS
global SEGMENTS
pdbLines = []
with open(WORKDIR + "/reference.pdb") as pdb:
if SEGMENTS is None: # Process everything
for pdbLine in pdb:
# Skip non ATOM lines and non-alpha carbons
if pdbLine[0:4] != "ATOM" or pdbLine[13:15] != "CA":
continue
# Get line
pdbLines.append(pdbLine)
else: # Process only segments
for pdbLine in pdb:
if pdbLine[0:4] != "ATOM" or pdbLine[13:15] != "CA":
continue
resID = int(pdbLine[21:28])
for tup in SEGMENTS:
if tup[0] <= resID <= tup[1]:
pdbLines.append(pdbLine)
coords = zeros((len(pdbLines), 3), float)
for index, line in enumerate(pdbLines):
for j in range(3):
coords[index, j] = float(line[(30 + j * 8):(38 + j * 8)])
REFCOORDS = coords
def calcInitDist():
"""Calculates the "initial distance" based on a given run. Sets the variable initDist based on either
the result of dist() on C0/R0. Also sets the endpoint of the initial bin."""
global RUNANALYSIS
if not RUNANALYSIS:
log("Calculating initial distance.\n")
initDist = CLUSTERS['0_0'].getRun(ID=0).processDist()[0]
CLUSTERS['0_0'].dist = initDist - BINWIDTH # This bin ends here
CLUSTERS['0_0'].writeInfo()
else: # analysis
initDist = CLUSTERS['0_0'].dist + BINWIDTH
if not RUNANALYSIS:
log(BLUE + "Initial RMSD is " + MAGENTA + "%.*f" % (args.precision, initDist) + BLUE +
" and the first bin ends at " + MAGENTA + "%.*f" % (args.precision,
CLUSTERS['0_0'].dist) + BLUE + ".\n" + END)
if args.min is not None:
log(BLUE + "Analysis RMSD endpoint will be " + MAGENTA + "%.*f" %
(args.precision, args.min) + BLUE + " angstroms.\n" + END)
log(BLUE + "Runs ending after " + MAGENTA + "%.*f" % (args.precision, initDist + args.max) +
BLUE + " angstroms will be rejected.\n")
else:
log('\n')
def analysis(runDirectories):
"""Calls the various analysis methods runs with IDs in runDirectories and in the running cluster,
and decides whether further analysis is necessary."""
global CLUSTERS
global WORKDIR
global SPLIT
global BINWIDTH
global RUNANALYSIS
RUNANALYSIS = True
log(CYAN + "Beginning analysis.".ljust(getTerminalWidth()) + '\n' + END)
log("Reading cluster information.\n")
readClusterInfo(silent=True, readInfo=False, readRuns=False, readExplored=False) # Read nothing
determineSplit()
if not runDirectories:
readClusterInfo() # Read everything and print
else:
readClusterInfo(silent=True, readExplored=False) # do not read explored right now
migrateRuns(runDirectories, oldCluster=CLUSTERS['R'])
log("Rereading cluster information.\n")
readClusterInfo() # Re-read everything after migration and print
# Signal completion if end cluster reached
maxCluster = int(max([c.rawID for c in CLUSTERS.values() if c.ID != 'R' and c.count > 0]))
# Calculate initial and ending distance
calcInitDist()
desiredCluster = round((CLUSTERS["0_0"].dist + BINWIDTH - args.min) / BINWIDTH)
percentage = float(maxCluster) / desiredCluster * 100
if percentage > 100.0:
percentage = 100.0
log(GREEN + "Completed cluster %i of %i total (%.1f%%).\n" % (maxCluster, desiredCluster,
percentage) + END)
if maxCluster < desiredCluster or args.split:
findNewRuns()
else:
stitchTrajectory()
log(GREEN + "Endpoint reached.\n" + END)
sys.exit(0) # End the program now
def readClusterInfo(silent=False, readInfo=True, readRuns=True, readExplored=True):
"""Populate the CLUSTERS database and instruct each cluster to readInfo().
Note: components of this method are optional for speed.
silent controls logging, correct is currently disabled, readInfo controls reading distance,
readRuns controls getting run counts and lists, and readExplored (slowest) controls getting explored count.
All are True by default. If all are off, the cluster will only know its ID."""
global CLUSTERS
CLUSTERS = {}
clusterDirectories = sorted([name[1:] for name in glob("C*_*") if os.path.isdir(os.path.join(WORKDIR, name))],
key=lambda i: (int(i.split('_')[0]), int(i.split('_')[1])))
# Generate Clusters, read info files, and populate the master dictionary
# lastDist = None
# lastID = None
exploredDist = []
if readExplored:
with open(WORKDIR + "/explored_dist") as exploredDistFile:
for line in exploredDistFile:
exploredDist.append(float(line))
exploredDist.sort(reverse=True)
with open(WORKDIR + "/explored_dist", 'w') as exploredDistFile:
for dist in exploredDist:
exploredDistFile.write("%.*f\n" % (args.precision, dist))
lastStart = 0
for index, clusterID in enumerate(clusterDirectories):
cluster = Cluster(ID=clusterID, runs={}) # Do not know dist yet
CLUSTERS[clusterID] = cluster
if readInfo:
cluster.readInfo() # Now it should have dist
if readRuns:
cluster.readRuns() # Now it has runs and count
if readExplored:
lastStart = cluster.readExplored(exploredDist, start=lastStart) # Now it has explored
desiredCluster = len(clusterDirectories)
percentage = float(index) / desiredCluster * 100
if percentage > 100.0:
percentage = 100.0
log("\rReading clusters: %.1f%% complete." % percentage)
log("\n")
if not silent: # print cluster information
if readInfo:
maxBinID = 0
if args.adjust:
maxBinID = max([c.rawID for c in CLUSTERS.values() if c.count > 0])
for cluster in sorted(CLUSTERS.values(), key=lambda cl: (cl.majorID, cl.minorID)):
log("Cluster " + MAGENTA + str(cluster.ID) + END + " from " + MAGENTA + "%.*f"
% (args.precision, cluster.dist) + END + " to " + MAGENTA + "%.*f" %
(args.precision, cluster.dist + BINWIDTH) + END)
if readRuns:
log(", with " + MAGENTA + str(cluster.count) + END + " runs")
if readExplored:
log(", explored " + MAGENTA + str(cluster.explored) + END + " times")
if args.adjust:
log(", adjusted " + MAGENTA + str(cluster.adjustedExplored(maxBinID)) + END + " times.\n")
else:
log(".\n")
else:
log(".\n")
else:
log(".\n")
CLUSTERS['R'] = Cluster(ID='R', runs={}) # Running cluster
def migrateRuns(runDirectories, oldCluster):
"""Examines the runs with IDs in runDirectories within the provided cluster.
Moves folders to appropriate clusters and creates new ones (if the cluster is the running cluster)."""
global CLUSTERS
global NOPROGRESS
global SPLIT
if oldCluster.ID != 'R':
log(BLUE + "Examining cluster " + MAGENTA + "%s.\n" % oldCluster.ID + END)
else:
log(BLUE + "Examining the running cluster.\n" + END)
# Sort the clusters by dist
sortedClustersList = sorted(CLUSTERS.values(), key=attrgetter("dist"))
sortedIDsList = [c.ID for c in sortedClustersList]
for runID in runDirectories: # should already be sorted
run = Run(ID=runID, clusterID=oldCluster.ID) # No UID because we will readInfo()
if oldCluster.ID == 'R': # expect a decompressed file
outFiles = glob(run.path + "/lineMD_R*.o*")
if outFiles:
with open(outFiles[0]) as output:
out = output.read()
if "Calculation halted" in out or "unspecified launch failure" in out \
or "busy or unavailable" in out or "STOP PMEMD Terminated Abnormally" in out:
log(YELLOW + UNDERLINE + "Warning:" + END + YELLOW + " new run at " + MAGENTA +
str(run.shortPath) + YELLOW + " has failed and will be deleted.\n" + END)
run.delete()
continue
if not run.check("coord.nc"): # run verification has failed
log(YELLOW + UNDERLINE + "Warning:" + END + YELLOW + " new run at " + MAGENTA + str(run.shortPath) +
YELLOW + " did not have valid coordinate file and will be deleted.\n" + END)
run.delete()
continue
# otherwise expect compressed file (except initial run); will not be necessary during processDist anyway
elif not (run.clusterID == '0_0' and run.ID == 0) and not run.check("coord.nc.gz"): # run verification failed
log(YELLOW + UNDERLINE + "Warning:" + END + YELLOW + " run at " + MAGENTA + str(run.shortPath) +
YELLOW + " did not have valid coordinate file and will be deleted.\n" + END)
run.delete()
continue
# run verification succeeded; read distance
run.readInfo()
log("Run ID " + MAGENTA + str(run.ID) + END + " (UID " + MAGENTA + str(run.UID) + END + ")")
dist, frame = run.processDist()
if dist is None: # dist returns None if calculation failed
log(YELLOW + " has failed unexpectedly and will be deleted.\n" + END)
run.delete()
continue
dist = round(float(dist), args.precision)
log(" has distance " + MAGENTA + "%.*f" % (args.precision, dist) + END + " at frame " + MAGENTA +
str(frame) + END)
# Find the right place for this one, if it is in an existing cluster
found = False
for cluster in sortedClustersList:
if cluster.dist < dist <= cluster.dist + BINWIDTH: # It is inside this bin, counting the left bound
if cluster.count == 0: # Successfully moved into a "new" empty bin
NOPROGRESS = 0
newRun = Run.move(run, cluster)
if run.clusterID != cluster.ID:
if cluster.count == 1: # just populated it
log(" and belongs (in new cluster) at " + MAGENTA + "%s.\n" % newRun.shortPath + END)
else:
log(" and belongs in " + MAGENTA + "%s.\n" % newRun.shortPath + END)
else:
log(" and will stay in the same cluster.\n")
found = True
break # go to next directory
# Otherwise, make a new cluster if possible
if not found:
if oldCluster.ID != 'R':
log(".\n")
log(YELLOW + UNDERLINE + "Warning:" + END + YELLOW +
" a run does not fit inside new split bins and will be deleted.\n" + END)
run.delete()
continue
# Make a new bin for this one
initRight = CLUSTERS['0_0'].dist + BINWIDTH
diff = abs(initRight - dist)
newRawID = int(math.ceil(diff / float(BINWIDTH))) # simple number of bins, assuming no split
if dist > initRight: # Right of initial cluster
if dist > initRight + args.max: # Right of cutoff
log(" and is out of cluster range.\n")
run.delete()
continue
newMajor = -int(math.ceil(newRawID / pow(2, SPLIT)))
newMinor = int(math.floor((abs(newMajor) * pow(2, SPLIT) * BINWIDTH - diff) / float(BINWIDTH)))
newRawID *= -1
else:
newRawID -= 1 # There is an off-by-one situation here
newMajor = int(math.floor(newRawID / pow(2, SPLIT)))
newMinor = int(newRawID % int(pow(2, SPLIT)))
newID = "%i_%i" % (newMajor, newMinor)
if newID in sortedIDsList:
log(".\n")
fail(RED + UNDERLINE + "Error:" + END + RED + " run was not found but should be in clusters.\n" + END)
newDist = CLUSTERS['0_0'].dist - newRawID * BINWIDTH
cluster = Cluster(ID=newID, runs={}, dist=newDist)
CLUSTERS[newID] = cluster
cluster.create()
newRun = Run.move(run, cluster)
log(" and belongs (in new cluster) at " + MAGENTA + "%s.\n" % newRun.shortPath + END)
# Create supplementary split folders
if SPLIT > 0:
for thisMinor in xrange(int(pow(2, SPLIT))):
thisID = "%i_%i" % (newMajor, thisMinor)
if thisID not in sortedIDsList and thisID != newID: # Is new and not the one we just made
thisRawID = newMajor * int(pow(2, SPLIT)) + thisMinor
thisDist = round(CLUSTERS['0_0'].dist - thisRawID * BINWIDTH, args.precision)
thisCluster = Cluster(ID=thisID, runs={}, dist=thisDist)
CLUSTERS[thisID] = thisCluster
thisCluster.create()
if newDist < 0: # We made a new positive bin
NOPROGRESS = 0 # We have made progress
# Re-read to prepare for next iteration through the loop, since CLUSTERS should be accurate now
sortedClustersList = sorted(CLUSTERS.values(), key=attrgetter("dist"))
sortedIDsList = [c.ID for c in sortedClustersList]
def findNewRuns():
"""Find the appropriate cluster from which to begin new runs, and prepare and execute runs"""
global NOPROGRESS
global NOPROGRESSCUTOFF
global SPLIT
global SPLITMAX
if args.split and SPLIT < SPLITMAX:
# Split the bins in two, then force analysis to restart. Abandon this attempt to find new runs.
splitBins()
finishedRuns = getFinishedRuns()
if finishedRuns:
analysis(finishedRuns)
return
alreadySelected = False
# Find cluster with least explored value, then largest bin number (bin will never tie)
clusterList = [c for c in CLUSTERS.values() if c.ID != 'R' and c.count > 0]
maxBinID = max([c.rawID for c in clusterList if c.count > 0])
while RUNNING < THREADS:
if args.adjust:
selCluster = min(clusterList, key=lambda cl: (cl.adjustedExplored(maxBinID), cl.dist))
log("Selected cluster " + MAGENTA + str(selCluster.ID) + END +
" with " + MAGENTA + str(selCluster.count) + END + " runs, explored " +
str(selCluster.explored) + " times, adjusted " + MAGENTA +
str(selCluster.adjustedExplored(maxBinID)) + END + " times.\n")
else:
selCluster = min(clusterList, key=lambda cl: (cl.explored, cl.dist))
log("Selected cluster " + MAGENTA + str(selCluster.ID) + END +
" with " + MAGENTA + str(selCluster.count) + END + " runs, explored " + MAGENTA +
str(selCluster.explored) + END + " times.\n")
if selCluster.ID == '0_0' and SPLIT <= SPLITMAX and not alreadySelected: # returned to the original bin
NOPROGRESS += 1
alreadySelected = True
log("No progress has been made for " + MAGENTA + str(NOPROGRESS) + END + " of " + MAGENTA +
str(NOPROGRESSCUTOFF) + END + " iterations.\n")
# If we have returned to the original cluster too many times:
if NOPROGRESS >= NOPROGRESSCUTOFF and SPLIT < SPLITMAX:
# Split the bins in two, then force analysis to restart. Abandon this attempt to find new runs.
splitBins()
finishedRuns = getFinishedRuns()
if finishedRuns:
analysis(finishedRuns)
return
# Pick the least explored run in this cluster. If there is a tie, choose randomly
runList = selCluster.runs.values()
def run_cmp(runA, runB):
if runA.explored > runB.explored:
return 1
if runA.explored < runB.explored:
return -1
else:
return random.choice([-1, 1]) # Not 0 because we do not want a tie in this case; non-stable sort
selRun = min(runList, key=cmp_to_key(run_cmp)) # Sort by explored
selRun.readInfo()
log("Selected run " + MAGENTA + "C%s/R%i" % (selCluster.ID, selRun.ID) + END + " (UID " + MAGENTA +
str(selRun.UID) + END + "), explored " + MAGENTA + str(selRun.explored) + END + " times.\n")
# Go to running, get the name of the next available run
runningDirectories = [int(name[1:]) for name in os.listdir(WORKDIR + "/CR") if
os.path.isdir(os.path.join(WORKDIR + "/CR", name))]
if not runningDirectories:
nextNum = 0
else:
nextNum = max(runningDirectories) + 1
newRun = Run(ID=nextNum, clusterID='R', previous=selRun.UID, UID=Run.getNextUID())
newRun.create(beginRestart=selRun.path + "/end.rst.gz")
selRun.explored += 1
selRun.writeInfo()
newRun.execute()
# Write to the explored_dist file
startDist = selRun.processDist()[0]
selCluster.explore(startDist)
log(GREEN + "Executed run %i of %i (new ID %i, unique ID %i, start distance %.*f) on %s.\n" %
(RUNNING, THREADS, newRun.ID, newRun.UID, args.precision, startDist, strftime("%c")) + END)
def splitBins():
"""Perform bin splitting and run migration to new clusters"""
global SPLIT
global BINWIDTH
global CLUSTERS
global NOPROGRESS
SPLIT += 1
BINWIDTH = round(BINWIDTH / 2.0, args.precision)
log(CYAN + "Splitting bins for iteration %i of %i.\n" % (SPLIT, SPLITMAX) + END)
# delete clusters beyond range
clustersList = copy.copy(CLUSTERS.values())
for cluster in clustersList:
if cluster.dist >= CLUSTERS['0_0'].dist + BINWIDTH + args.max and cluster.ID != 'R': # Found one bad cluster
majorID = cluster.majorID
for otherCluster in clustersList:
if otherCluster.majorID == majorID and otherCluster.ID in CLUSTERS:
# Don't care about the minor ID. Just delete the whole set
shutil.rmtree(otherCluster.path)
del CLUSTERS[otherCluster.ID]
# find available IDs
nextMinor = max([c.minorID for c in CLUSTERS.values() if c.ID != 'R']) + 1
# create new clusters in database
# The clusters before split:
oldClusterList = sorted([c for c in CLUSTERS.values() if c.ID != 'R'], key=attrgetter("dist"))
zeroClusterList = [c for c in oldClusterList if c.minorID == 0] # Only clusters "C*_0"
for zeroCluster in zeroClusterList:
# Create new clusters
for i in xrange(int(pow(2, SPLIT - 1))): # Repeat for each new folder, based on the number of previous splits
newID = "%i_%i" % (zeroCluster.majorID, nextMinor + i)
newCluster = Cluster(ID=newID, runs={}, dist=0)
CLUSTERS[newID] = newCluster
# create new folders and cluster info
newCluster.create()
newCluster.writeInfo()
# change cluster info
rightBound = zeroCluster.dist + BINWIDTH * 2 # BINWIDTH has changed
for i in xrange(int(pow(2, SPLIT))): # Repeat for each existing folder
ID = "%i_%i" % (zeroCluster.majorID, i) # This includes zeroCluster
cluster = CLUSTERS[ID]
# Set the distance to the old cluster's right bound, then move it to the left
cluster.dist = round(rightBound - BINWIDTH * (i + 1), args.precision)
cluster.writeInfo()
# re-read cluster info into database to prepare for move
log("Reading new cluster information.\n")
readClusterInfo(silent=True, readExplored=False) # explored count is not necessary
# move runs for each old cluster; new clusters should not be necessary now.
for cluster in oldClusterList:
migrateRuns([run.ID for run in cluster.runs.values()], cluster)
log("Rereading cluster information.\n")
readClusterInfo() # read everything
log("Finished run migration.\n")
NOPROGRESS = 0
return # Allow findNewRuns to take back to analysis
if __name__ == "__main__":
main()
|
__author__ = 'civa'
import tornado.web
class GeneralHandler(tornado.web.RequestHandler):
def url_scheme(self):
return r"/api/general/([a-zA-Z0-9_\-]+)/?$"
def get(self):
return ''
|
from __future__ import unicode_literals
import datetime
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
('tableaubord', '0051_auto_20170508_1813'),
]
operations = [
migrations.AlterField(
model_name='evenement',
name='dateheure',
field=models.DateTimeField(default=datetime.datetime(2017, 5, 8, 18, 21, 21, 857638), verbose_name='Date/heure evenement '),
),
]
|
"""
This file contains all the forms for the debate modules.
"""
from django.forms import ModelForm, Textarea, TextInput
from django.forms.models import modelformset_factory
from apps.ecidadania.debate.models import Debate, Note, Row, Column
class DebateForm(ModelForm):
"""
Returns an empty form for creating a new Debate.
:rtype: HTML Form
.. versionadded:: 0.1b
"""
class Meta:
model = Debate
widgets = {
'title': TextInput(attrs={'class': 'medium'}),
}
class RowForm(ModelForm):
"""
"""
class Meta:
model = Row
class ColumnForm(ModelForm):
"""
"""
class Meta:
model = Column
class NoteForm(ModelForm):
"""
Returns an HTML Form to create or edit a new 'note' or 'proposal' like it's
called on the sociologists argot.
:rtype: HTML Form
.. versionadded:: 0.1b
"""
class Meta:
model = Note
class UpdateNoteForm(ModelForm):
"""
Returns a more simple version of the NoteForm for the AJAX interaction,
preventing modification of significative fields non relevant to AJAX.
:rtype: HTML Form
.. versionadded:: 0.1b
"""
class Meta:
model = Note
exclude = ('debate', 'author', 'row', 'column', 'date')
class UpdateNotePosition(ModelForm):
"""
This is a partial form to save only the position updates of the notes in the
debates. This form excludes all the fields except Column and Row just for
security, this wau the original data of the note cannot be modified. Moving
notes does not count as modification, so we also exclude last modification data.
:rtype: HTML Form
.. versionadded:: 0.1.5
"""
class Meta:
model = Note
exclude = ('author', 'debate', 'last_mod', 'last_mod_author', 'date',
'message', 'title')
|
def welcome_message():
import random
from pymc.util import chat
message_builder = chat.MessageBuilder()
element = chat.TextElement("I can haz ")
message_builder.append(element)
colorz = ["yellow", "gold", "aqua", "blue", "light_purple", "red", "green"]
text = "colorz!"
for char in text:
element = chat.TextElement(char)
element.italic = True
element.color = random.choice(colorz)
colorz.remove(element.color)
message_builder.append(element)
return message_builder.encode()
def start():
import pymc.network.connection
from pymc.util import event
@event.handler(pymc.network.connection.ping_event)
def ping_handler(data):
print "Handling ping!"
data.description = welcome_message()
import pymc.network.server
pymc.network.server.start("0.0.0.0", 25565)
|
import os
from ROOT import TCanvas
from cmstoolsac3b.test.test_histotoolsbase import TestHistoToolsBase
from cmstoolsac3b.rendering import CanvasBuilder
from cmstoolsac3b.wrappers import HistoWrapper
import cmstoolsac3b.diskio as diskio
class TestRendering(TestHistoToolsBase):
def setUp(self):
super(TestRendering, self).setUp()
if not os.path.exists("test"):
os.mkdir("test")
def test_canvasBuilder_make(self):
wrp1 = self.test_wrp
wrp2 = HistoWrapper(wrp1.histo, history="Fake history")
wrp2.histo.Scale(1.5)
cb = CanvasBuilder((wrp1, wrp2))
wrp = cb.build_canvas()
# check for stack and data to be in canvas primitives
prim = wrp.canvas.GetListOfPrimitives()
self.assertTrue(wrp1.histo in prim)
self.assertTrue(wrp2.histo in prim)
self.test_wrp = wrp
def test_canvas_info_file(self):
fname = "test/cnv_save.info"
self.test_canvasBuilder_make()
diskio.write(self.test_wrp, fname)
# file should have 23 lines (with history written out)
with open(fname) as fhandle:
self.assertEqual(len(list(fhandle)), 23)
import unittest
suite = unittest.TestLoader().loadTestsFromTestCase(TestRendering)
if __name__ == '__main__':
unittest.main()
|
"""This module is responsible for the translation of the content of the DB into
a suitable data structure (which can be list, tree or forest) for the views to
interact with. To this purpose, the underlying structures are built from the
DB content and wrapped in QAbstractItemModel subclasses that expose those
properties that are needed for the UI to draw itself. Since these models can be
changed by the user, they also expose an API for the *controller* to add,
remove, reparent or rename items, notifying the attached views about changes.
"""
from PySide import QtCore
from src.model import dal
_reqm = None
_testm = None
_ucm = None
_srcm = None
class ItemNode(object):
"""This is used to represent an item (requirement, test or use case...) in
a tree-like data structure with parents an children. These objects are
used as internal data structures for ItemModels.
"""
def __init__(self, item_id, parent=None):
self.item_id = item_id
self.children = []
self.parent = parent
def get_requirement_model():
"""Returns a reference to the single instance of the requirement model.
"""
global _reqm
if not _reqm:
_reqm = RequirementModel()
return _reqm
def get_source_model():
"""Gets a reference to the single instance of the source model.
"""
global _srcm
if not _srcm:
_srcm = SourceModel()
return _srcm
def get_test_model():
"""Returns a reference to the single instance of the test model.
"""
global _testm
if not _testm:
_testm = TestModel()
return _testm
def get_use_case_model():
"""Returns a reference to the single instance of the use case model.
"""
global _ucm
if not _ucm:
_ucm = UseCaseModel()
return _ucm
def get_use_case_list_model(requirement):
"""Returns a use case list model for the given requirement.
"""
return UseCaseListModel(requirement)
def get_test_list_model(requirement):
"""Returns a test list model for the given requirement
"""
return TestListModel(requirement)
def get_requirement_list_model(item):
"""Returns a requirement list model for the given item (test or use case).
"""
return RequirementListModel(item)
class ItemModel(QtCore.QAbstractItemModel):
"""Abstract item model subclass used to represent a forest of items.
"""
def __init__(self):
super(ItemModel, self).__init__()
self._item_forest = []
self.initialize()
@classmethod
def _generate_tree(cls, item_id, parent=None):
"""Given an item with the given ID, it recursively generates a tree of
ItemNodes rooted in the item with the given ID. It internally uses the
_get_children method to obtain a list of the ID of the item's children.
"""
item = ItemNode(item_id, parent)
children_list = cls._get_children(item_id)
for child_id in children_list:
item.children.append(cls._generate_tree(child_id, item))
return item
@classmethod
def _get_children(cls, item_id):
"""This hook method should be implemented by subclasses to obtain a
list of item IDs corresponding to the children of the item with the
given item ID.
"""
raise NotImplementedError('Implement me!')
@classmethod
def _find_in_tree(cls, item, item_id):
"""Returns a reference to the ItemNode with the given item_id if it is
found in the tree rooted in the given item, None otherwise.
"""
if item.item_id == item_id:
return item
for child in item.children:
rv = cls._find_in_tree(child, item_id)
if rv: # stop recurring if the desired item has been found
return rv
def _search_forest(self, item_id):
"""Search the whole list of trees for a node with the given item ID.
"""
for tree in self._item_forest:
rv = self._find_in_tree(tree, item_id)
if rv: # stop iterating once the item has been found
return rv
def append_child_to_parent(self, item_id, parent_id=None):
"""Appends a new item in the correct place in the model, notifying the
associated views of the change happened.
"""
if not parent_id: # adding a top level item
child_count = len(self._item_forest)
parent_index = QtCore.QModelIndex()
self.beginInsertRows(parent_index, child_count, child_count)
new_child = ItemNode(item_id)
self._item_forest.append(new_child)
self.endInsertRows()
else: # adding an item as a leaf in some tree (where the parent is)
parent = self._search_forest(parent_id)
child_count = len(parent.children)
parent_index = self.createIndex(0, 0, parent)
self.layoutAboutToBeChanged.emit() # why is this needed?
self.beginInsertRows(parent_index, child_count, child_count)
new_child = ItemNode(item_id, parent)
parent.children.append(new_child)
self.endInsertRows()
self.layoutChanged.emit()
def update_item_id(self, old_id, new_id):
"""Changes the ID of the item with the given old ID to the new one.
"""
item = self._search_forest(old_id)
index = self.createIndex(0, 0, item)
item.item_id = new_id
self.dataChanged.emit(index, index)
def delete_item(self, item_id):
"""Deletes an item by removing the data from the structure (notifying
the view about rows being removed) and, if the item had any children,
moves them to become top level items (as this is what happens in the
DB since the parent_id field is set to null).
"""
item = self._search_forest(item_id)
index = self.createIndex(0, 0, item)
parent_index = self.parent(index)
parent = item.parent
if parent:
row = parent.children.index(item)
else:
row = self._item_forest.index(item)
# remove the item from its parent's children
self.beginRemoveRows(parent_index, row, row)
if parent:
parent.children.remove(item)
else:
self._item_forest.remove(item)
self.endRemoveRows()
# adds the children to the root of the model (they have no parent)
root_index = QtCore.QModelIndex()
tree_count = len(self._item_forest)
self.beginInsertRows(root_index, tree_count,
tree_count + len(item.children) - 1)
while item.children:
child = item.children.pop()
child.parent = None
self._item_forest.append(child)
self.endInsertRows()
del item
self.layoutChanged.emit()
def update_item_parent(self, item_id, new_parent_id):
"""This is responsible for moving some piece of tree around in the
tree model and informing the view about the changes occurred.
"""
item = self._search_forest(item_id)
index = self.createIndex(0, 0, item)
old_parent_index = self.parent(index)
new_parent = self._search_forest(new_parent_id)
if not new_parent: # it has become first level
new_parent_index = QtCore.QModelIndex()
else: # it gets moved somewhere else in the forest
new_parent_index = self.createIndex(0, 0, new_parent)
# determines the old row number
if item.parent:
row = item.parent.children.index(item)
else:
row = self._item_forest.index(item)
# determines the new row number
if new_parent:
new_row = len(new_parent.children)
else:
new_row = len(self._item_forest)
# begin the actual operation
self.beginMoveRows(
old_parent_index, row, row, new_parent_index, new_row)
# removes the item from its parent's children
if item.parent:
item.parent.children.remove(item)
else:
self._item_forest.remove(item)
# appends it to the list of its new siblings
if new_parent:
new_parent.children.append(item)
else:
self._item_forest.append(item)
# reparents the item
item.parent = new_parent
self.endMoveRows()
self.layoutChanged.emit()
@classmethod
def _get_top_level_items(cls):
"""This hook method should be implemented in subclasses to obtain a
list of those IDs corresponding to items with no parent.
"""
raise NotImplementedError('Implement me!')
def initialize(self):
"""Rebuilds the internal data structure based on the DB.
"""
self.beginResetModel()
self._item_forest = []
for item_id in self._get_top_level_items():
# creates the forest as a list of trees
self._item_forest.append(self._generate_tree(item_id))
self.endResetModel()
def flags(self, index=QtCore.QModelIndex()):
"""Returns an integer defining item properties, valid index will always
point to items that are enabled and can be selected.
"""
if not index.isValid():
return QtCore.Qt.ItemIsEnabled
return QtCore.Qt.ItemIsEnabled | QtCore.Qt.ItemIsSelectable
def headerData(self, section, orientation, role=QtCore.Qt.DisplayRole):
"""Returns te names of the columns that will be displayed in the view.
"""
if (role == QtCore.Qt.DisplayRole and
orientation == QtCore.Qt.Horizontal):
if section == 0:
return self.tr('Name')
def index(self, row, column, parent=QtCore.QModelIndex()):
"""Allows views to go one step further in the tree starting from
the parent index, going down from the invisible root to first level
(i.e. orphaned) items or to some child of a parent item.
"""
if not self.hasIndex(row, column, parent):
return QtCore.QModelIndex()
if not parent.isValid():
# descending from the tree 'root', so access first level items
return self.createIndex(row, column, self._item_forest[row])
children = parent.internalPointer().children
if column < self.columnCount() and row < len(children):
# access further level items
return self.createIndex(row, column, children[row])
def parent(self, index=QtCore.QModelIndex()):
"""Allows views to go one step backward in the tree structure, i.e.
obtaining a new index that points to the parent of the current item.
"""
if not index.isValid():
return QtCore.QModelIndex()
item = index.internalPointer()
parent = item.parent
if not parent: # can't go backwards past the root
return QtCore.QModelIndex()
row = parent.children.index(item)
return self.createIndex(row, 0, parent)
def rowCount(self, parent=QtCore.QModelIndex()):
"""This boils down to the number of children that the item pointed by
the parent index has or the number of trees in the forest if the index
corresponds to the 'root' of the model or is invalid.
"""
if not parent.isValid() or not parent.internalPointer():
return len(self._item_forest)
item = parent.internalPointer()
return len(item.children)
def columnCount(self, unused_parent=QtCore.QModelIndex()):
"""These models only have one column, no matter what the parent is.
"""
return 1
def data(self, index=QtCore.QModelIndex(), role=QtCore.Qt.DisplayRole):
"""Allow views to access the data stored inside the items pointed by
model indexes, depending on which column the current index has.
"""
if index.isValid() and role == QtCore.Qt.DisplayRole:
section = index.column()
item = index.internalPointer()
if section == 0:
return item.item_id
class RequirementModel(ItemModel):
"""This is used to store the requirement forest (list of trees).
"""
def __init__(self):
super(RequirementModel, self).__init__()
@classmethod
def _get_children(cls, item_id):
"""Returns an iterable with all the children of a given requirement.
"""
return dal.get_requirement_children_ids(item_id)
@classmethod
def _get_top_level_items(cls):
"""Returns an iterable of all those requirements that have no parent.
"""
return dal.get_top_level_requirement_ids()
class UseCaseModel(ItemModel):
"""This is used to store the use case forest (list of trees).
"""
def __init__(self):
super(UseCaseModel, self).__init__()
@classmethod
def _get_children(cls, item_id):
"""Returns an iterable with all the children of a given requirement.
"""
return dal.get_use_case_children_ids(item_id)
@classmethod
def _get_top_level_items(cls):
"""Returns an iterable of all those use cases that have no parent.
"""
return dal.get_top_level_use_case_ids()
class TestModel(ItemModel):
"""Abstract item model subclass used to represent the list of system tests.
"""
def __init__(self):
super(TestModel, self).__init__()
@classmethod
def _get_children(cls, unused_item_id):
"""Tests have no children (flat model) so this is always an empty list.
"""
return []
@classmethod
def _get_top_level_items(cls):
"""Returns an iterable of all those tests that have no parent.
"""
return dal.get_all_test_ids()
class SourceModel(ItemModel):
"""Abstract item model subclass used to represent the requirement sources.
"""
def __init__(self):
super(SourceModel, self).__init__()
@classmethod
def _get_children(cls, unused_item_id):
"""This is (hopefully) a flat model so this is always an empty list.
"""
return []
@classmethod
def _get_top_level_items(cls):
"""Returns an iterable of all requirement sources.
"""
return dal.get_all_source_ids()
class ItemListModel(QtCore.QAbstractItemModel):
"""This is the base class for all 'flat' models used to represent a list of
items (requirements, tests or use cases) that are associated to other items
of the business model (requirements to use cases and vice versa, tests to
requirements and vice versa).
"""
def __init__(self, item):
super(ItemListModel, self).__init__()
self._item_data_list = self._get_item_names_and_descriptions()
self.associated_item_ids = self._get_associated_item_ids(item)
@classmethod
def _get_item_names_and_descriptions(cls):
"""This must be implemented by subclasses to obtain a sequence of
dictionaries having the 'id' and 'description' keys which represent
the items being listed.
"""
raise NotImplementedError('Implement me!')
@classmethod
def _get_associated_item_ids(cls, item):
"""Gets a list of the IDs of those items that are linked to the given
item, e.g. the list of use case IDs that are linked to a requirement.
"""
raise NotImplementedError('Implement me!')
def flags(self, index=QtCore.QModelIndex()):
"""Items of this model are enabled and can be selected by default.
"""
if not index.isValid():
return QtCore.Qt.ItemIsEnabled
return QtCore.Qt.ItemIsEnabled | QtCore.Qt.ItemIsUserCheckable
def headerData(self, section, orientation, role=QtCore.Qt.DisplayRole):
"""Provides views with the required information for displaying headers.
"""
if (role == QtCore.Qt.DisplayRole and
orientation == QtCore.Qt.Horizontal):
if section == 0:
return self.tr('Name')
if section == 1:
return self.tr('Description')
if section == 2:
return self.tr('Linked')
def rowCount(self, index=QtCore.QModelIndex()):
"""Since this is a flat model, indexes pointing to first level items
have no children, whereas if the index corresponds to the model 'root',
it has as many children as are the elements in the internal data list.
"""
if index.internalPointer() in self._item_data_list:
return 0
return len(self._item_data_list)
def columnCount(self, unused_index=QtCore.QModelIndex()):
"""These models have a fixed number of columns.
"""
return 3
def index(self, row, column, unused_parent=QtCore.QModelIndex()):
"""The only way to go down in such a model is from the root to one of
the first level items, so it returns the corresponding index.
"""
return self.createIndex(row, column, self._item_data_list[row])
def parent(self, unused_index=QtCore.QModelIndex()):
"""The only way to go up is from a first level item to the model 'root'
so it returns an invalid index (i.e. an index pointing to the root).
"""
return QtCore.QModelIndex()
def data(self, index=QtCore.QModelIndex(), role=QtCore.Qt.DisplayRole):
"""Allow views to access the information that is stored inside the
dictionaries depending on the column that is being displayed.
"""
item_data = index.internalPointer()
section = index.column()
if role == QtCore.Qt.DisplayRole:
if section == 0:
return item_data['id']
if section == 1:
return item_data['description']
if role == QtCore.Qt.CheckStateRole:
if section == 2:
if item_data['id'] in self.associated_item_ids:
return QtCore.Qt.Checked
return QtCore.Qt.Unchecked
def setData(self, index, value, role):
"""This model is editable to a very limited extent, i.e. only items in
the third column can be modified by checking or unchecking the
corresponding checkbox.
"""
section = index.column()
item_id = index.internalPointer()['id']
if role == QtCore.Qt.CheckStateRole and section == 2:
if (value == QtCore.Qt.Checked and
item_id not in self.associated_item_ids):
self.associated_item_ids.append(item_id)
self.dataChanged.emit(index, index)
return True
if (value == QtCore.Qt.Unchecked and
item_id in self.associated_item_ids):
self.associated_item_ids.remove(item_id)
self.dataChanged.emit(index, index)
return True
return False
class UseCaseListModel(ItemListModel):
"""This is used to represent the use cases for a given requirement.
"""
def __init__(self, item):
super(UseCaseListModel, self).__init__(item)
@classmethod
def _get_item_names_and_descriptions(cls):
"""Returns IDs and descriptions of all uses cases.
"""
return dal.get_all_uc_names_and_descriptions()
@classmethod
def _get_associated_item_ids(cls, item):
"""Returns an iterable containing the IDs of all those use cases that
are linked to the given requirement item.
"""
return [uc.uc_id for uc in item.use_cases]
class TestListModel(ItemListModel):
"""This is used to represent the tests for a given requirement.
"""
def __init__(self, item):
super(TestListModel, self).__init__(item)
@classmethod
def _get_item_names_and_descriptions(cls):
"""Returns IDs and descriptions of all tests.
"""
return dal.get_all_test_names_and_descriptions()
@classmethod
def _get_associated_item_ids(cls, item):
"""Returns an iterable containing the IDs of all those tests that
are linked to the given requirement item.
"""
return [test.test_id for test in item.tests]
class RequirementListModel(ItemListModel):
"""This is used to represent the requirement list that can be associated to
a given use case or to a test.
"""
def __init__(self, item):
super(RequirementListModel, self).__init__(item)
@classmethod
def _get_item_names_and_descriptions(cls):
"""Returns IDs and descriptions of all requirements.
"""
return dal.get_all_requirement_names_and_descriptions()
@classmethod
def _get_associated_item_ids(cls, item):
"""Returns an iterable containing the IDs of all those requirements
that are linked to the given item (either a use case or a test).
"""
return [req.req_id for req in item.requirements]
|
import enigma
import pprint
import NavigationInstance
from enigma import iServiceInformation
nin = NavigationInstance.instance
print dir(nin.RecordTimer)
pprint.pprint(nin.RecordTimer.timer_list)
print dir(nin.RecordTimer.timer_list[0])
first = nin.RecordTimer.timer_list[0]
print "EIT:"
print first.eit
print first.begin
print first.disabled
print first.name
|
"""Models for racks and rack items"""
from __future__ import unicode_literals
import json
from itertools import chain
from django.db import models
from django.utils.encoding import python_2_unicode_compatible
from nav.models.fields import VarcharField
from nav.models.manage import Room, Sensor
class RackManager(models.Manager):
"""A manager for the rack model"""
def get_all_sensor_pks_in_room(self, room):
"""Returns an exhaustive list of the primary keys of sensors added to
all racks in the given room.
:type room: nav.models.manage.Room
"""
sensor_pks = (rack.get_all_sensor_pks() for rack in self.filter(room=room))
return set(chain(*sensor_pks))
@python_2_unicode_compatible
class Rack(models.Model):
"""A physical rack placed in a room."""
objects = RackManager()
id = models.AutoField(primary_key=True, db_column='rackid')
room = models.ForeignKey(Room, on_delete=models.CASCADE, db_column='roomid')
rackname = VarcharField(blank=True)
ordering = models.IntegerField()
_configuration = VarcharField(default=None, db_column='configuration')
__configuration = None
item_counter = models.IntegerField(default=0, null=False, db_column='item_counter')
class Meta(object):
db_table = 'rack'
def __str__(self):
return "'{}' in {}".format(self.rackname or self.id, self.room.pk)
@property
def configuration(self):
"""Gets (and sets) the rackitem configuration for this rack
The rack item configuration is stored as JSONB, and is returned as a
dict by psycopg.
"""
if self.__configuration is None:
if self._configuration is None:
self._configuration = {}
self._configuration.setdefault('left', [])
self._configuration.setdefault('center', [])
self._configuration.setdefault('right', [])
self._configuration['left'] = [
rack_decoder(x) for x in self._configuration['left']
]
self._configuration['right'] = [
rack_decoder(x) for x in self._configuration['right']
]
self._configuration['center'] = [
rack_decoder(x) for x in self._configuration['center']
]
self.__configuration = self._configuration
return self.__configuration
def save(self, *args, **kwargs):
self._configuration = json.dumps(self.configuration, cls=RackEncoder)
return super(Rack, self).save(*args, **kwargs)
def _column(self, column):
return self.configuration[column]
@property
def left_column(self):
"""Gets all rackitems in the left column"""
return self._column('left')
@property
def right_column(self):
"""Gets all rackitems in the right column"""
return self._column('right')
@property
def center_column(self):
"""Gets all rackitems in the center column"""
return self._column('center')
def add_left_item(self, item):
"""
:type item: RackItem
"""
self.item_counter += 1
item.id = self.item_counter
self.left_column.append(item)
def add_center_item(self, item):
"""
:type item: RackItem
"""
self.item_counter += 1
item.id = self.item_counter
self.center_column.append(item)
def add_right_item(self, item):
"""
:type item: RackItem
"""
self.item_counter += 1
item.id = self.item_counter
self.right_column.append(item)
def remove_left_item(self, index):
"""
:type index: int
"""
self.left_column.pop(index)
def remove_center_item(self, index):
"""
:type index: int
"""
self.center_column.pop(index)
def remove_right_item(self, index):
"""
:type index: int
"""
self.right_column.pop(index)
def get_all_sensor_pks(self):
"""Returns an exhaustive list of the primary keys of sensors in this
rack
"""
return []
def rack_decoder(obj):
"""Instantiates the correct object based on __type__ internal"""
if '__type__' in obj:
if obj['__type__'] == 'SensorRackItem':
return SensorRackItem(**obj)
if obj['__type__'] == 'SensorsDiffRackItem':
return SensorsDiffRackItem(**obj)
if obj['__type__'] == 'SensorsSumRackItem':
return SensorsSumRackItem(**obj)
return obj
class RackEncoder(json.JSONEncoder):
"""TODO: Write doc"""
def default(self, obj):
if isinstance(obj, BaseRackItem):
return obj.to_json()
# Let the base class default method raise the TypeError
return json.JSONEncoder.default(self, obj)
class BaseRackItem(object):
"""The super class for rack items
This class should never be used directly
"""
def __init__(self, id=None, **kwargs):
self.id = id
def to_json(self):
"""TODO: Not really to_json is it?"""
return {
'__type__': self.__class__.__name__,
'id': self.id,
}
def title(self):
"""A possible long description"""
return self.human_readable
def get_metric(self):
"""Returns the metric used for getting the values"""
raise NotImplementedError
def unit_of_measurement(self):
"""Returns the unit of measurement
:rtype: str
"""
raise NotImplementedError
def get_absolute_url(self):
"""Returns the linktarget"""
pass
def human_readable(self):
"""A short and consise description"""
raise NotImplementedError
def get_display_range(self):
"""Gets the range of values for this sensor
Is a list to simplify front-end usage
"""
raise NotImplementedError
def get_display_configuration(self):
"""Return any other configuration required to display this rack item"""
return {}
class SensorRackItem(BaseRackItem):
"""A rackitem that display the value of a sensor"""
def __init__(self, sensor, **kwargs):
super(SensorRackItem, self).__init__(**kwargs)
self.sensor = sensor
if isinstance(sensor, int):
try:
self.sensor = Sensor.objects.get(pk=sensor)
except Sensor.DoesNotExist:
pass
def to_json(self):
data = super(SensorRackItem, self).to_json()
data['sensor'] = self.sensor.pk if self.sensor_exists() else self.sensor
return data
def title(self):
if self.sensor_exists():
return str(self.sensor)
else:
return "Sensor {} no longer exists".format(self.sensor)
def get_metric(self):
if self.sensor_exists():
return self.sensor.get_metric_name()
def unit_of_measurement(self):
if self.sensor_exists():
return self.sensor.unit_of_measurement
def get_absolute_url(self):
if self.sensor_exists():
return self.sensor.get_absolute_url()
def human_readable(self):
if self.sensor_exists():
return self.sensor.human_readable
def get_display_range(self):
if self.sensor_exists():
return list(self.sensor.get_display_range())
else:
return []
def get_display_configuration(self):
if self.sensor_exists():
return self.sensor.get_display_configuration()
return {}
def sensor_exists(self):
return isinstance(self.sensor, Sensor)
class SensorsDiffRackItem(BaseRackItem):
"""A rackitem that display the difference of two sensors"""
def __init__(self, minuend, subtrahend, **kwargs):
super(SensorsDiffRackItem, self).__init__(**kwargs)
self.minuend = minuend
self.subtrahend = subtrahend
if isinstance(minuend, int):
try:
self.minuend = Sensor.objects.get(pk=minuend)
except Sensor.DoesNotExist:
pass
if isinstance(subtrahend, int):
try:
self.subtrahend = Sensor.objects.get(pk=subtrahend)
except Sensor.DoesNotExist:
pass
def to_json(self):
data = super(SensorsDiffRackItem, self).to_json()
data['minuend'] = self.minuend.pk
data['subtrahend'] = self.subtrahend.pk
return data
def title(self):
return "Difference between {} and {}".format(self.minuend, self.subtrahend)
def get_metric(self):
return "diffSeries({minuend},{subtrahend})".format(
minuend=self.minuend.get_metric_name(),
subtrahend=self.subtrahend.get_metric_name(),
)
def unit_of_measurement(self):
return self.minuend.unit_of_measurement
def get_absolute_url(self):
return ""
def human_readable(self):
return "{} - {}".format(
self.minuend.human_readable, self.subtrahend.human_readable
)
def get_display_range(self):
return list(self.minuend.get_display_range())
class SensorsSumRackItem(BaseRackItem):
"""A rackitem that display the sum of several sensors"""
def __init__(self, title, sensors, **kwargs):
super(SensorsSumRackItem, self).__init__(**kwargs)
self.sensors = sensors
self._title = title
for i, sensor in enumerate(self.sensors):
if isinstance(sensor, int):
try:
self.sensors[i] = Sensor.objects.get(pk=sensor)
except Sensor.DoesNotExist:
pass
def to_json(self):
data = super(SensorsSumRackItem, self).to_json()
data['sensors'] = [sensor.pk for sensor in self.sensors]
data['title'] = self._title
return data
def title(self):
return ", ".join([s.human_readable for s in self.sensors])
def get_metric(self):
return "sumSeries({})".format(
",".join((s.get_metric_name() for s in self.sensors))
)
def unit_of_measurement(self):
if self.sensors:
return self.sensors[0].unit_of_measurement
return 'N/A'
def get_absolute_url(self):
return ""
def human_readable(self):
return self._title
def get_display_range(self):
return [sum(r) for r in zip(*[s.get_display_range() for s in self.sensors])]
|
"""
BORIS
Behavioral Observation Research Interactive Software
Copyright 2012-2022 Olivier Friard
This program is free software; you can redistribute it and/or modify
it under the terms of the GNU General Public License as published by
the Free Software Foundation; either version 2 of the License, or
(at your option) any later version.
This program is distributed in the hope that it will be useful,
but WITHOUT ANY WARRANTY; without even the implied warranty of
MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
GNU General Public License for more details.
You should have received a copy of the GNU General Public License
along with this program; if not, write to the Free Software
Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston,
MA 02110-1301, USA.
"""
import os
import pathlib
import logging
from PyQt5.QtCore import Qt, QSettings
from PyQt5.QtWidgets import QAbstractItemView
from boris import observations_list
from boris.config import (INDEPENDENT_VARIABLES, OBSERVATIONS, DESCRIPTION, TEXT, NUMERIC, TYPE, MEDIA, FILE, LIVE,
OPEN, VIEW, EDIT, ETHOGRAM, EVENTS, SINGLE, MULTIPLE, SELECT1, NO_FOCAL_SUBJECT, HHMMSS,
STATE, BEHAVIOR_CODE)
from boris import gui_utilities
from boris import utilities
from boris import project_functions
def select_observations(pj: dict, mode: str, windows_title: str = "") -> tuple:
"""
allow user to select observations
mode: accepted values: OPEN, EDIT, SINGLE, MULTIPLE, SELECT1
Args:
pj (dict): BORIS project dictionary
mode (str): mode for selection: OPEN, EDIT, SINGLE, MULTIPLE, SELECT1
windows_title (str): title for windows
Returns:
str: selected mode: OPEN, EDIT, VIEW
list: list of selected observations
"""
obsListFields = ["id", "date", "description", "subjects", "observation duration", "exhaustivity %", "media"]
indepVarHeader, column_type = [], [TEXT, TEXT, TEXT, TEXT, NUMERIC, NUMERIC, TEXT]
if INDEPENDENT_VARIABLES in pj:
for idx in utilities.sorted_keys(pj[INDEPENDENT_VARIABLES]):
indepVarHeader.append(pj[INDEPENDENT_VARIABLES][idx]["label"])
column_type.append(pj[INDEPENDENT_VARIABLES][idx]["type"])
data = []
not_paired = []
state_events_list = [pj[ETHOGRAM][x][BEHAVIOR_CODE] for x in pj[ETHOGRAM] if STATE in pj[ETHOGRAM][x][TYPE].upper()]
for obs in sorted(list(pj[OBSERVATIONS].keys())):
date = pj[OBSERVATIONS][obs]["date"].replace("T", " ")
descr = utilities.eol2space(pj[OBSERVATIONS][obs][DESCRIPTION])
# subjects
observedSubjects = [
NO_FOCAL_SUBJECT if x == "" else x for x in project_functions.extract_observed_subjects(pj, [obs])
]
subjectsList = ", ".join(observedSubjects)
# observed time interval
interval = project_functions.observed_interval(pj[OBSERVATIONS][obs])
observed_interval_str = str(interval[1] - interval[0])
# media
mediaList = []
if pj[OBSERVATIONS][obs][TYPE] in [MEDIA]:
if pj[OBSERVATIONS][obs][FILE]:
for player in sorted(pj[OBSERVATIONS][obs][FILE].keys()):
for media in pj[OBSERVATIONS][obs][FILE][player]:
mediaList.append(f"#{player}: {media}")
if len(mediaList) > 8:
media = " ".join(mediaList)
else:
media = "\n".join(mediaList)
elif pj[OBSERVATIONS][obs][TYPE] in [LIVE]:
media = LIVE
# independent variables
indepvar = []
if INDEPENDENT_VARIABLES in pj[OBSERVATIONS][obs]:
for var_label in indepVarHeader:
if var_label in pj[OBSERVATIONS][obs][INDEPENDENT_VARIABLES]:
indepvar.append(pj[OBSERVATIONS][obs][INDEPENDENT_VARIABLES][var_label])
else:
indepvar.append("")
# check unpaired events
ok, _ = project_functions.check_state_events_obs(obs, pj[ETHOGRAM], pj[OBSERVATIONS][obs], HHMMSS)
if not ok:
not_paired.append(obs)
# check exhaustivity of observation
exhaustivity = project_functions.check_observation_exhaustivity(pj[OBSERVATIONS][obs][EVENTS], [],
state_events_list)
data.append([obs, date, descr, subjectsList, observed_interval_str, str(exhaustivity), media] + indepvar)
obsList = observations_list.observationsList_widget(data,
header=obsListFields + indepVarHeader,
column_type=column_type,
not_paired=not_paired)
if windows_title:
obsList.setWindowTitle(windows_title)
obsList.pbOpen.setVisible(False)
obsList.pbView.setVisible(False)
obsList.pbEdit.setVisible(False)
obsList.pbOk.setVisible(False)
obsList.pbSelectAll.setVisible(False)
obsList.pbUnSelectAll.setVisible(False)
obsList.mode = mode
if mode == OPEN:
obsList.view.setSelectionMode(QAbstractItemView.SingleSelection)
obsList.pbOpen.setVisible(True)
if mode == VIEW:
obsList.view.setSelectionMode(QAbstractItemView.SingleSelection)
obsList.pbView.setVisible(True)
if mode == EDIT:
obsList.view.setSelectionMode(QAbstractItemView.SingleSelection)
obsList.pbEdit.setVisible(True)
if mode == SINGLE:
obsList.view.setSelectionMode(QAbstractItemView.SingleSelection)
obsList.pbOpen.setVisible(True)
obsList.pbView.setVisible(True)
obsList.pbEdit.setVisible(True)
if mode == MULTIPLE:
obsList.view.setSelectionMode(QAbstractItemView.MultiSelection)
obsList.pbOk.setVisible(True)
obsList.pbSelectAll.setVisible(True)
obsList.pbUnSelectAll.setVisible(True)
if mode == SELECT1:
obsList.view.setSelectionMode(QAbstractItemView.SingleSelection)
obsList.pbOk.setVisible(True)
# restore window geometry
gui_utilities.restore_geometry(obsList, "observations list", (900, 600))
obsList.view.sortItems(0, Qt.AscendingOrder)
for row in range(obsList.view.rowCount()):
obsList.view.resizeRowToContents(row)
selected_observations = []
result = obsList.exec_()
# saving window geometry in ini file
gui_utilities.save_geometry(obsList, "observations list")
if result:
if obsList.view.selectedIndexes():
for idx in obsList.view.selectedIndexes():
if idx.column() == 0: # first column
selected_observations.append(idx.data())
if result == 0: # cancel
resultStr = ""
if result == 1: # select
resultStr = "ok"
if result == 2: # open
resultStr = OPEN
if result == 3: # edit
resultStr = EDIT
if result == 4: # view
resultStr = VIEW
return resultStr, selected_observations
|
import os
import MySQLdb
HOST = os.getenv("DB_HOST")
USER = os.getenv("DB_USERNAME")
PASS = os.getenv("DB_PASSWORD")
NAME = os.getenv("DB_NAME")
if HOST != "":
conn = MySQLdb.connection (host=HOST, user=USER, passwd=PASS, db=NAME)
strstat = [ 'Accepted!', 'Wrong answer', 'Time limit exceeded', 'Memory limit exceeded',
'Run time error', 'Unexpected error', 'Signal #' ]
def status (query, SUBID, status, test = -1, time = -1, mem = -1, score = -1):
if query == 'COMPILE':
if status == -1:
msg = 'compiling...'
elif status == 0:
msg = 'successfully compiled'
elif status == 124:
msg = 'compile time limit exceeded'
else:
msg = 'compilation error'
if HOST != "":
conn.query ("""UPDATE `submissions`
SET status='{MSG}' WHERE id='{SUBID}'""".format (MSG = msg, SUBID = SUBID))
elif query == 'RUN':
if status == -1:
if test == -1:
msg = 'running...'
else:
msg = 'running on test ' + str (test + 1)
else:
msg = 'running on test ' + str (test + 1)
conn.query ("UPDATE `submissions` SET status='{MSG}' WHERE id='{SUBID}'".format (MSG = msg, SUBID = SUBID))
return status > 0
elif query == 'END':
if status == -1:
status = 5;
msg = strstat [min (6, status)]
if status > 5:
msg += str (status - 6)
if status > 0:
msg += ' on test ' + str (test + 1)
if HOST != "":
conn.query ("""UPDATE `submissions`
SET status='{MSG}' WHERE id='{SUBID}'""".format (MSG = msg, SUBID = SUBID))
|
import util
from .models import *
|
import random
from mathmaker.lib import shared
from mathmaker.lib.tools.wording import setup_wording_format_of
from mathmaker.lib.core.root_calculus import Value
from mathmaker.lib.document.content import component
class sub_object(component.structure):
def __init__(self, build_data, picture='true', **options):
super().setup("minimal", **options)
super().setup("length_units", **options)
super().setup("right_triangle", **options)
# There's no need to setup numbers for this question.
if self.variant in ['default', 'random']:
variant = shared.trigo_vocabulary_source.next()[0]
else:
variant = self.variant
if variant not in ['adjacent', 'opposite']:
raise ValueError('XMLFileFormatError: Invalid variant: {v}, '
.format(v=variant)
+ 'It should be in: '
'[\'adjacent\', \'opposite\']')
angle_nb = random.choice([0, 2])
self.right_triangle.setup_for_trigonometry(
angle_nb=angle_nb,
trigo_fct='cos',
down_length_val=Value(''),
up_length_val=Value(''),
length_unit=self.length_unit,
only_mark_unknown_angle=True)
self.acute_angle = shared.machine.write_math_style2(
self.right_triangle.angle[angle_nb].printed)
self.wording = {
'adjacent': _('Which side is adjacent to {acute_angle} ?'),
'opposite': _('Which side is opposite to {acute_angle} ?')
}[variant]
setup_wording_format_of(self)
side_getter = getattr(self.right_triangle,
'side_' + variant + '_to')
self.correct_answer = side_getter(
angle=self.right_triangle.angle[angle_nb]).length_name
self.answer_wording = {
'adjacent': _('The side adjacent to {acute_angle} is:'
' {correct_answer}'),
'opposite': _('The side opposite to {acute_angle} is:'
' {correct_answer}')
}[variant]
setup_wording_format_of(self, w_prefix='answer_')
self.q_nb_included_in_wording = False
def q(self, **options):
q_nb = options.get('number_of_the_question', '')
if q_nb:
q_nb = shared.machine.write(q_nb + '. ', emphasize='bold')
if self.wording:
self.q_nb_included_in_wording = True
return shared.machine.write_layout(
(1, 2),
[12, 8],
[q_nb + self.wording.format(**self.wording_format),
shared.machine.insert_picture(
self.right_triangle,
scale=0.8,
vertical_alignment_in_a_tabular=True)])
else:
return shared.machine.insert_picture(
self.right_triangle,
scale=0.8,
vertical_alignment_in_a_tabular=True)
def a(self, **options):
return self.answer_wording.format(**self.answer_wording_format)
|
from sheepsense import db
from sheepsense.model import List_C
import sheepsense.model.entry
import sheepsense.model.trial
class Run():
def __init__(
self,
id=None, identifier=None, trial=None, trialid=None, name=None, enddate=None,
serial=None, order=None, ordr=None, judge=None, resultsuri=None):
self.id = id
self.identifier = identifier
self._trial = trial
try:
self._trial_id = trial.id
except AttributeError:
self._trial_id = trialid
self.name = name
self.enddate = enddate
self.serial = serial
self.order = order
self.judge = judge
self.resultsuri = resultsuri
self._entries = None
# pprint("_trial_id: %s" % self._trial_id)
if(
(self.id and not self.identifier)
or
(self.identifier and not self.id)
):
sql = '''
SELECT r.*
FROM runs r
WHERE r.id=%s OR r.identifier=%s
LIMIT 1
'''
res = db.exec_sql(sql, [
self.id,
self.identifier
])[0]
self.id = res['id']
self.identifier = res['identifier']
self._trial_id = res['trialid']
self.name = res['name']
self.enddate = res['enddate']
self.serial = res['serial']
self.order = res['ordr']
self.judge = res['judge']
self.resultsuri = res['resultsuri']
def _get_entries(self):
if not self._entries:
self._entries = sheepsense.model.entry.Entry_List(run_id=self.id)
return self._entries
entries = property(_get_entries)
def _get_trial(self):
if not self._trial:
self._trial = sheepsense.model.trial.Trial(id=self._trial_id)
return self._trial
trial = property(_get_trial)
class Run_List(List_C):
def __init__(
self,
size=0,
trial_id=None,
):
List_C.__init__(self, size=size)
self.trial_id = trial_id
def _search(self, force=False):
self._elements = []
if not self.trial_id:
return
sql = '''
SELECT *
FROM runs r
WHERE r.trialid=%s
'''
res = db.exec_sql(sql, [self.trial_id])
for r in res:
self._elements.append(Run(**r))
def _get_runs(self):
self._search()
return self._elements
runs = property(_get_runs)
|
"GPropertyGrid unittest suite"
import unittest
import load_module
import propertygrid
import properties
LOADER = unittest.TestLoader()
SUITE = LOADER.loadTestsFromModule(propertygrid)
SUITE.addTests(LOADER.loadTestsFromModule(properties))
RUNNER = unittest.TextTestRunner(verbosity=2)
RESULT = RUNNER.run(SUITE)
|
from __future__ import unicode_literals
from django.db import models, migrations
class Migration(migrations.Migration):
dependencies = [
('rango', '0003_auto_20160117_1552'),
]
operations = [
migrations.AddField(
model_name='bares',
name='slug',
field=models.SlugField(default=0),
preserve_default=False,
),
]
|
import unittest
from ripe.atlas.tools.helpers.sanitisers import sanitise
class TestSanitisersHelper(unittest.TestCase):
def test_sanitise(self):
self.assertEqual("clean", sanitise("clean"))
for i in list(range(0, 32)) + [127]:
self.assertEqual("unclean", sanitise("unclean" + chr(i)))
self.assertEqual(None, sanitise(None))
self.assertEqual(7, sanitise(7))
def test_sanitise_with_newline_exception(self):
self.assertEqual("unc\nlean", sanitise("unc\nlean", strip_newlines=False))
for i in set(list(range(0, 32)) + [127]).difference({10}):
self.assertEqual(
"unc\nlean", sanitise("unc\nlean" + chr(i), strip_newlines=False)
)
|
"""
Definition of the wflow_gr4 model.
----------------------------------
Usage:
wflow_gr4 [-l loglevel][-c configfile][-f][-h] -C case -R Runid -
-C: set the name of the case (directory) to run
-R: set the name runId within the current case
-c name of the config file (in the case directory)
-f: Force overwrite of existing results
-h: print usage information
-l: loglevel (most be one of DEBUG, WARNING, ERROR)
$Author: schelle $
$Id: wflow_gr4.py 923 2014-03-13 13:48:37Z schelle $
$Rev: 923 $
NOTES
-----
- The max length of the arrays is determined by the X4 parameter (int(X4))
- The X4 parameter is always uniform over that catchment. Howvere, the state
of the UH is determined per grid cell.
"""
import os.path
import pcraster.framework
from wflow.wf_DynamicFramework import *
from wflow.wflow_adapt import *
def usage(*args):
sys.stdout = sys.stderr
for msg in args:
print(msg)
print(__doc__)
sys.exit(0)
def pcr_tanh(x):
"""
define tanh for pcraster objects
"""
return (pcr.exp(x) - pcr.exp(-x)) / (pcr.exp(x) + pcr.exp(-x))
def initUH1(X4, D):
"""
Initialize the UH1 unit hydrograph
Input:
- X4
- D
Returns:
- UH1, SH1
"""
NH = int(numpy.ceil(X4))
t = np.arange(1, NH + 1)
SH1 = numpy.minimum(1.0, (t / X4) ** D)
# Use numpy.diff to get the UH, insert value at zero to complete
UH1 = numpy.diff(SH1, axis=0)
UH1 = numpy.insert(UH1, 0, SH1[0])
return UH1, SH1
def initUH2(X4, D):
"""
Initialize the UH2 unit hydrograph
Input:
- X4
- D
Returns:
- UH2, SH2
"""
NH = int(numpy.ceil(X4))
t1 = np.arange(1, NH)
t2 = np.arange(NH, 2 * NH + 1)
SH2_1 = 0.5 * (t1 / X4) ** D
SH2_2 = 1 - 0.5 * (numpy.maximum(0, 2 - t2 / X4)) ** D
SH2 = numpy.minimum(1.0, numpy.hstack((SH2_1, SH2_2)))
# Use numpy.diff to get the UH, insert value at zero to complete
UH2 = numpy.diff(SH2, axis=0)
UH2 = numpy.insert(UH2, 0, SH2[0])
return UH2, SH2
def mk_qres(N):
"""
Returns an array (or ayyar of maps) to store the
delayed flow in
Input:
- N nr op steps
Ouput:
- nr of steps elemenst initialized with zeros's
"""
uhq = []
for i in range(0, N):
uhq.append(pcr.cover(0.0))
return uhq
class WflowModel(pcraster.framework.DynamicModel):
"""
The user defined model class. This is your work!
"""
def __init__(self, cloneMap, Dir, RunDir, configfile):
"""
*Required*
The init function **must** contain what is shown below. Other functionality
may be added by you if needed.
"""
pcraster.framework.DynamicModel.__init__(self)
self.caseName = os.path.abspath(Dir)
self.clonemappath = os.path.join(os.path.abspath(Dir), "staticmaps", cloneMap)
pcr.setclone(self.clonemappath)
self.runId = RunDir
self.Dir = os.path.abspath(Dir)
self.configfile = configfile
self.SaveDir = os.path.join(self.Dir, self.runId)
def stateVariables(self):
"""
returns a list of state variables that are essential to the model.
This list is essential for the resume and suspend functions to work.
This function is specific for each model and **must** be present.
:var self.S_X1: production reservoir content at the beginning of the time step (divided by X1) [mm]
:var self.R_X3: routing reservoir content at the beginning of the time step (divided by X3) [mm]
.. todo::
add routing state vars
"""
states = ["S_X1", "R_X3", "QUH1", "QUH2"]
return states
def supplyCurrentTime(self):
"""
*Optional*
Supplies the current time in seconds after the start of the run
This function is optional. If it is not set the framework assumes
the model runs with daily timesteps.
Ouput:
- time in seconds since the start of the model run
"""
return self.currentTimeStep() * int(
configget(self.config, "model", "timestepsecs", "3600")
)
def suspend(self):
"""
*Required*
Suspends the model to disk. All variables needed to restart the model
are saved to disk as pcraster maps. Use resume() to re-read them
This function is required.
"""
self.logger.info("Saving initial conditions...")
self.wf_suspend(os.path.join(self.SaveDir, "outstate"))
if self.OverWriteInit:
self.logger.info("Saving initial conditions over start conditions...")
self.wf_suspend(os.path.join(self.SaveDir, "/instate"))
def initial(self):
"""
Initial part of the gr4 model, executed only once. Reads all static model
information (parameters) and sets-up the variables used in modelling.
:var dt.tbl: time step (1) [hour]
:var B.tbl: routing ratio (0.9) [-]
:var NH: UH dimension (number) taken from ini file [-]
:var D.tbl: variable for hourly time steps (1.25) [-]
:var C.tbl: variable (number) [hour]
*Parameters*
:var X1.tbl: capacity of the production store, accounts for soil moisture (number) [mm]
:var X2.tbl: water exchange coefficient (number) [mm]
:var X3.tbl: capacity of the routing store (number) [mm]
:var X4 (in ini): time base of the unit hydrograph (number) [hour]
"""
#: pcraster option to calculate with units or cells. Not really an issue
#: in this model but always good to keep in mind.
pcr.setglobaloption("unittrue")
self.thestep = pcr.scalar(0)
self.ZeroMap = pcr.cover(0.0)
self.timestepsecs = int(configget(self.config, "model", "timestepsecs", "3600"))
self.basetimestep = 3600
self.reinit = int(configget(self.config, "run", "reinit", "0"))
self.OverWriteInit = int(configget(self.config, "model", "OverWriteInit", "0"))
self.SaveMapDir = self.Dir + "/" + self.runId + "/outmaps"
self.TEMP_mapstack = self.Dir + configget(
self.config, "inputmapstacks", "Temperature", "/inmaps/TEMP"
)
self.intbl = configget(self.config, "model", "intbl", "intbl")
self.Altitude = pcr.readmap(self.Dir + "/staticmaps/wflow_dem")
wflow_subcatch = configget(
self.config, "model", "wflow_subcatch", "/staticmaps/wflow_subcatch.map"
)
wflow_landuse = configget(
self.config, "model", "wflow_landuse", "/staticmaps/wflow_landuse.map"
)
wflow_soil = configget(
self.config, "model", "wflow_soil", "/staticmaps/wflow_soil.map"
)
self.P_mapstack = self.Dir + configget(
self.config, "inputmapstacks", "Precipitation", "/inmaps/P"
) # timeseries for rainfall
self.PET_mapstack = self.Dir + configget(
self.config, "inputmapstacks", "EvapoTranspiration", "/inmaps/PET"
) # timeseries for rainfall"/inmaps/PET" # potential evapotranspiration
sizeinmetres = int(configget(self.config, "layout", "sizeinmetres", "0"))
subcatch = pcr.ordinal(
pcr.readmap(self.Dir + wflow_subcatch)
) # Determines the area of calculations (all cells > 0)
subcatch = pcr.ifthen(subcatch > 0, subcatch)
self.xl, self.yl, self.reallength = pcrut.detRealCellLength(
self.ZeroMap, sizeinmetres
)
self.ToCubic = (
self.reallength * self.reallength * 0.001
) / self.timestepsecs # m3/s
self.LandUse = pcr.readmap(
self.Dir + wflow_landuse
) #: Map with lan-use/cover classes
self.LandUse = pcr.cover(self.LandUse, pcr.nominal(pcr.ordinal(subcatch) > 0))
self.Soil = pcr.readmap(self.Dir + wflow_soil) #: Map with soil classes
self.Soil = pcr.cover(self.Soil, pcr.nominal(pcr.ordinal(subcatch) > 0))
self.OutputId = pcr.readmap(
self.Dir + wflow_subcatch
) # location of subcatchment
# hourly time step
self.dt = int(configget(self.config, "gr4", "dt", "1"))
# routing ratio found in criteria validation file, first line
self.B = float(configget(self.config, "gr4", "B", "0.9"))
# hourly time-steps
self.D = float(configget(self.config, "gr4", "D", "1.25"))
# The following parameters are spatial (apart from X4)
# capacity of the production store, accounts for soil moisture (mm) (>=0)
self.X1 = self.readtblDefault(
self.Dir + "/" + self.intbl + "/X1.tbl",
self.LandUse,
subcatch,
self.Soil,
285.72,
)
# water exchange coefficient
self.X2 = self.readtblDefault(
self.Dir + "/" + self.intbl + "/X2.tbl",
self.LandUse,
subcatch,
self.Soil,
-0.42,
)
# capacity of the routing store (mm)
self.X3 = self.readtblDefault(
self.Dir + "/" + self.intbl + "/X3.tbl",
self.LandUse,
subcatch,
self.Soil,
169.02,
)
# time base of the unit hydrograph (hr)
# self.X4=self.readtblDefault(self.Dir + "/" + self.intbl + "/X4.tbl",self.LandUse,subcatch,self.Soil,32.85)
self.X4 = float(configget(self.config, "gr4", "X4", "32.85"))
# Set static initial values here #########################################
# Number of UH units
self.NH = int(numpy.ceil(self.X4))
self.UH1, self.SH1 = initUH1(self.X4, self.D)
self.UH2, self.SH2 = initUH2(self.X4, self.D)
self.QUH1 = mk_qres(self.NH)
self.QUH2 = mk_qres(self.NH * 2)
self.logger.info("End of initial section...")
def resume(self):
"""
*Required*
This function is required. Read initial state maps (they are output of a
previous call to suspend()). The implementation showns her is the most basic
setup needed.
"""
self.logger.info("Reading initial conditions...")
#: It is advised to use the wf_resume() function
#: here which pick upt the variable save by a call to wf_suspend()
if self.reinit == 1:
# STATES
self.S_X1 = 245.4900 / self.X1 # STATE(1),level in production store
self.R_X3 = 43.9031 / self.X3 # STATE(2),level in routing store
self.QUH1 = mk_qres(self.NH)
self.QUH2 = mk_qres(self.NH * 2)
else:
self.wf_resume(os.path.join(self.Dir, "instate"))
def dynamic(self):
"""
*Required*
:var self.Pn: net precipitation [mm]
:var self.En: net evapotranspiration [mm]
:var self.Ps: part of Pn that feeds the production reservoir [mm]
:var self.Es: evaporation quantity substracted from the production reservoir [mm]
"""
self.logger.debug(
"Step: "
+ str(int(self.thestep + self._d_firstTimeStep))
+ "/"
+ str(int(self._d_nrTimeSteps))
)
self.thestep = self.thestep + 1
self.Precipitation = pcr.cover(self.wf_readmap(self.P_mapstack, 0.0), 0.0)
self.PotEvaporation = pcr.cover(self.wf_readmap(self.PET_mapstack, 0.0), 0.0)
# ROUTING WATER AND PRODUCTION RESERVOIR PERCOLATION ========================================================
self.Pn = pcr.ifthenelse(
self.Precipitation >= self.PotEvaporation,
self.Precipitation - self.PotEvaporation,
pcr.scalar(0.0),
)
self.En = pcr.ifthenelse(
self.Precipitation >= self.PotEvaporation,
pcr.scalar(0.0),
self.PotEvaporation - self.Precipitation,
)
self.Ps = (self.X1 * (1 - (self.S_X1) ** 2) * pcr_tanh(self.Pn / self.X1)) / (
1 + self.S_X1 * pcr_tanh(self.Pn / self.X1)
)
self.Es = (
self.S_X1 * self.X1 * (2 - self.S_X1) * pcr_tanh(self.En / self.X1)
) / (1 + (1 - self.S_X1) * pcr_tanh(self.En / self.X1))
self.Ps = pcr.ifthenelse(
self.Precipitation >= self.PotEvaporation, self.Ps, pcr.scalar(0.0)
)
self.Es = pcr.ifthenelse(
self.Precipitation >= self.PotEvaporation, pcr.scalar(0.0), self.Es
)
self.Sprim_X1 = (
self.S_X1 + ((self.Ps - self.Es) * self.dt) / self.X1
) # reservoir new content
# Filter out value < 0 in self.Sprim_X1
self.Sprim_X1 = pcr.max(0.0, self.Sprim_X1)
self.Perc = (
self.Sprim_X1 * self.X1 * (1 - (1 + (self.Sprim_X1 / 5.25) ** 4) ** -0.25)
) # percolation
self.S_X1 = (
self.Sprim_X1 - (self.Perc * self.dt) / self.X1
) # reservoir new content
self.Pr = self.Perc + (self.Pn - self.Ps) # quantity to routing
# ACTUAL ROUTING =====================================================
# UH1 has a memory of int(X4) steps
# ouput of UH1 =========================================================
for j in range(0, self.NH): # UH1 output for each time step
self.QUH1[j] = self.QUH1[j] + float(self.UH1[j]) * self.Pr
self.Q9 = self.B * self.QUH1[0]
# Add the current Q to the UH res
for j in range(0, 2 * self.NH): # UH2 output for each time step
self.QUH2[j] = self.QUH2[j] + float(self.UH2[j]) * self.Pr
self.Q1prim = self.QUH2[0]
# Get final runoff
self.Q1 = (1 - self.B) * self.Q1prim
self.F = self.X2 * (self.R_X3) ** 3.5 # water subterranean exchange
self.Rprim_X3 = (
self.R_X3 + (self.Q9 + self.F) / self.X3
) # new routing reservoir level
self.Qr = (
self.Rprim_X3 * self.X3 * (1.0 - (1.0 + (self.Rprim_X3) ** 4) ** -0.25)
) # routing output
self.R_X3 = self.Rprim_X3 - self.Qr / self.X3 # new routing reservoir level
self.Qd = pcr.max(0.0, self.Q1 + self.F) # flow component Qd
self.Q = self.Qr + self.Qd # total flow Q in mm/hr
# Updated this line to get total Q per basin
self.SurfaceRunoff = pcr.areatotal(self.Q * self.ToCubic, self.OutputId)
# Remove first item from the UH stacks and add a new empty one at the end
self.QUH1 = delete(self.QUH1, 0)
self.QUH1 = append(self.QUH1, pcr.cover(0.0))
self.QUH2 = delete(self.QUH2, 0)
self.QUH2 = append(self.QUH2, pcr.cover(0.0))
def main(argv=None):
"""
*Optional*
Perform command line execution of the model. This example uses the getopt
module to parse the command line options.
The user can set the caseName, the runDir, the timestep and the configfile.
"""
global multpars
caseName = "default"
runId = "run_default"
configfile = "wflow_gr4.ini"
_lastTimeStep = 0
_firstTimeStep = 0
timestepsecs = 3600
wflow_cloneMap = "wflow_subcatch.map"
NoOverWrite = True
loglevel = logging.DEBUG
# This allows us to use the model both on the command line and to call
# the model usinge main function from another python script.
if argv is None:
argv = sys.argv[1:]
if len(argv) == 0:
usage()
return
opts, args = getopt.getopt(argv, "C:S:T:c:s:R:fhIXi:l:")
for o, a in opts:
if o == "-C":
caseName = a
if o == "-R":
runId = a
if o == "-c":
configfile = a
if o == "-s":
timestepsecs = int(a)
if o == "-T":
_lastTimeStep = int(a)
if o == "-S":
_firstTimeStep = int(a)
if o == "-f":
NoOverWrite = 0
if o == "-h":
usage()
if o == "-l":
exec("loglevel = logging." + a)
if len(opts) <= 1:
usage()
if _lastTimeStep < _firstTimeStep:
print(
"The starttimestep ("
+ str(_firstTimeStep)
+ ") is smaller than the last timestep ("
+ str(_lastTimeStep)
+ ")"
)
usage()
myModel = WflowModel(wflow_cloneMap, caseName, runId, configfile)
dynModelFw = wf_DynamicFramework(
myModel, _lastTimeStep, firstTimestep=_firstTimeStep
)
dynModelFw.createRunId(NoOverWrite=NoOverWrite, level=loglevel)
for o, a in opts:
if o == "-X":
configset(myModel.config, "model", "OverWriteInit", "1", overwrite=True)
if o == "-I":
configset(myModel.config, "model", "reinit", "1", overwrite=True)
if o == "-i":
configset(myModel.config, "model", "intbl", a, overwrite=True)
if o == "-s":
configset(myModel.config, "model", "timestepsecs", a, overwrite=True)
if o == "-c":
configset(myModel.config, "model", "configfile", a, overwrite=True)
dynModelFw._runInitial()
dynModelFw._runResume()
# dynModelFw._runDynamic(0,0)
dynModelFw._runDynamic(_firstTimeStep, _lastTimeStep)
dynModelFw._runSuspend()
dynModelFw._wf_shutdown()
if __name__ == "__main__":
main()
|
from sqlalchemy import (
Integer,
String,
Column,
UniqueConstraint,
Index,
ForeignKey,
)
from sqlalchemy.orm import (
synonym,
relationship,
backref,
)
from sqlalchemy.dialects.postgresql import JSON
from ..models import (
DBSession,
Base
)
from ..lib import EnumIntType
from ..lib.utils.common_utils import translate as _
class ResourceType(Base):
__tablename__ = 'resource_type'
__table_args__ = (
UniqueConstraint(
'module',
'resource_name',
name='unique_idx_resource_type_module',
),
UniqueConstraint(
'name',
name='unique_idx_resource_type_name',
),
Index('idx_resource_type_name', 'name'),
Index('idx_resource_type_module', 'module'),
Index('idx_resource_type_resource_name', 'resource_name'),
)
STATUS = (
('active', _(u'active')),
('disabled', _(u'disabled')),
)
id = Column(
Integer(),
primary_key=True,
nullable=False,
autoincrement=True
)
resource_id = Column(
Integer,
ForeignKey(
'resource.id',
name="fk_resource_id_resource_type",
ondelete='restrict',
onupdate='cascade',
),
nullable=False,
)
name = Column(
String(length=32),
nullable=False,
)
humanize = Column(
String(length=32),
nullable=False,
)
_resource = Column(
'resource_name',
String(length=32),
nullable=False,
)
module = Column(
String(length=128),
nullable=False,
)
settings = Column(
JSON,
primary_key=False,
)
descr = Column(
String(length=255),
)
status = Column(
EnumIntType(STATUS),
nullable=False,
)
resource_obj = relationship(
'Resource',
backref=backref(
'resource_type_obj',
uselist=False,
cascade="all,delete"
),
cascade="all,delete",
foreign_keys=[resource_id],
uselist=False
)
@classmethod
def get(cls, id):
if id is None:
return None
return DBSession.query(cls).get(id)
@classmethod
def by_resource_id(cls, resource_id):
if resource_id is None:
return None
return (
DBSession.query(cls).filter(cls.resource_id == resource_id).first()
)
@classmethod
def by_name(cls, name):
return DBSession.query(cls).filter(cls.name == name).first()
@classmethod
def by_humanize(cls, humanize):
return DBSession.query(cls).filter(cls.humanize == humanize).first()
@classmethod
def by_resource_name(cls, module, resource_name):
return (
DBSession.query(cls)
.filter(
cls.module == module,
cls._resource == resource_name
)
.first()
)
@property
def resource(self):
return self._resource
@resource.setter
def resource(self, resource):
assert isinstance(resource, (str, unicode)), type(resource)
path = resource.split('.')
self.module = '.'.join(path[:-1])
self._resource = path[-1]
@property
def resource_full(self):
return "%s.%s" % (self.module, self.resource)
def is_active(self):
return self.status == 'active'
def __repr__(self):
return (
"%s (id=%s, resource_id=%s, context=%s)"
% (
self.__class__.__name__,
self.id,
self.resource_id,
self.resource
)
)
resource = synonym('resource', descriptor=resource)
|
from collections import namedtuple
import StringIO
import json
import urllib
import urllib2
import os
import collections
import urlparse
import gzip
import re
import sys
import zipfile
from CTMagic import Whatype
newLine = os.linesep
conversations = []
objects = []
Errors = []
hosts = collections.OrderedDict()
request_logs = []
plugins = []
plugins_folder = "plugins/"
pcap_file = ""
VERSION = "0.3"
BUILD = "11"
ABOUT = "CapTipper v" + VERSION + " b" + BUILD + " - Malicious HTTP traffic explorer tool" + newLine + \
"Copyright 2015 Omri Herscovici <omriher@gmail.com>" + newLine
USAGE = ("CapTipper.py <pcap_file> [options]" + newLine + newLine +
"Examples: CapTipper.py ExploitKit.pcap - explore and start server on port 80" + newLine +
" CapTipper.py ExploitKit.pcap -p 1234 - explore and start server on port 1234" + newLine +
" CapTipper.py ExploitKit.pcap -d /tmp/ - dumps all files and exit" + newLine +
" CapTipper.py ExploitKit.pcap -r /tmp/ - create json & html report and exit" + newLine +
" CapTipper.py ExploitKit.pcap -s - explore without web server" + newLine)
web_server_turned_on = False
HOST = "0.0.0.0"
PORT = 80
console_output = False
b_use_short_uri = False
b_auto_ungzip = False
class msg_type:
GOOD = 0
ERROR = 1
INFO = 2
class colors:
SKY = '\033[36m'
PINK = '\033[35m'
BLUE = '\033[34m'
GREEN = '\033[32m'
YELLOW = '\033[33m'
RED = '\033[31m'
END = '\033[0;0m'
#END = '\033[37m'
STRONG_BRIGHT = '\033[1m'
NORMAL_BRIGHT = '\033[22m'
VT_APIKEY = ""
try:
WhatypeMagic = Whatype(os.path.join(os.path.dirname(os.path.realpath(__file__)),"magics.csv"))
except Exception, e:
Errors.append("Couldn't load Whatype for magic identification: " + e.message)
class client_struct:
def __init__(self):
self.headers = collections.OrderedDict()
self.headers["IP"] = ""
self.headers["MAC"] = ""
self.ignore_headers = ['ACCEPT','ACCEPT-ENCODING','ACCEPT-LANGUAGE','CONNECTION','HOST','REFERER', \
'CACHE-CONTROL','CONTENT-TYPE', 'COOKIE', 'CONTENT-LENGTH', 'X-REQUESTED-WITH', \
'IF-MODIFIED-SINCE','IF-NONE-MATCH','ORIGIN','ACCEPT-ASTEROPE','IF-UNMODIFIED-SINCE']
def add_header(self, key, value):
if not self.headers.has_key(key.upper()) and not key.upper() in self.ignore_headers:
self.headers[key.upper()] = value
def get_information(self):
return self.headers
def alert_message(text, type):
if type == msg_type.GOOD:
message = colors.GREEN + "[+] " + colors.END
elif type == msg_type.ERROR:
message = colors.RED + " [E] " + colors.END
elif type == msg_type.INFO:
message = colors.YELLOW + "[!] " + colors.END
message += text
print message
def show_errors():
global Errors
if len(Errors) > 0:
for err in Errors:
print err
def check_errors():
global Errors
if len(Errors) > 0:
return True
else:
return False
client = client_struct()
activity_date_time = ""
def add_object(type, value, id=-1, empty=False, name=""):
object_num = len(objects)
objects.append(collections.namedtuple('obj', ['type', 'value', 'conv_id', 'name']))
objects[object_num].type = type
if not empty:
objects[object_num].value = value
if id != -1:
objects[object_num].conv_id = str(id)
objects[object_num].name = type + "-" + get_name(id)
else:
objects[object_num].conv_id = str(object_num)
objects[object_num].name = name
return object_num
def fmt_size(size_bytes):
for unit in ['B','KB','MB','GB','TB']:
if size_bytes < 1024.0:
return "%3.1f %s" % (size_bytes, unit)
size_bytes /= 1024.0
return "{%3.1f} {}".format(size_bytes, 'PB')
def get_name(id):
name = ""
try:
name = objects[int(id)].name
finally:
return name
def show_hosts():
for host, ip in hosts.keys():
print " " + host + " ({})".format(ip)
hostkey = (host, ip)
for host_uri,obj_num in hosts[hostkey]:
#chr_num = 195 # Extended ASCII tree symbol
chr_num = 9500 # UNICODE tree symbol
# Checks if last one
if ((host_uri,obj_num) == hosts[hostkey][len(hosts[hostkey]) - 1]):
#chr_num = 192 # Extended ASCII tree symbol
chr_num = 9492 # UNICODE tree symbol
try:
print " " + unichr(chr_num) + "-- " + host_uri.encode('utf8') + " [{}]".format(obj_num)
except:
print " |-- " + host_uri.encode('utf8') + " [{}]".format(obj_num)
print newLine
def check_duplicate_url(host, uri):
bDup = False
for conv in conversations:
if (conv.uri.lower() == uri.lower()) and (conv.host.lower() == host.lower()):
bDup = True
break
return bDup
def check_duplicate_uri(uri):
bDup = False
for conv in conversations:
if (conv.uri.lower() == uri.lower()):
bDup = True
break
return bDup
def create_next_uri(uri):
duplicate_uri = True
orig_uri = uri
uri_num = 2
while duplicate_uri:
duplicate_uri = False
for conv in conversations:
if (conv.uri.lower() == uri.lower()):
uri = orig_uri + "(" + str(uri_num) + ")"
uri_num += 1
duplicate_uri = True
break
return uri
SHORT_URI_SIZE = 20
def getShortURI(uri):
shortURL = uri
if len(uri) > SHORT_URI_SIZE:
shortURL = uri[0:int(SHORT_URI_SIZE/2)] + "..." + uri[len(uri)-int(SHORT_URI_SIZE/2):len(uri)]
return shortURL
def byTime(Conv):
return int(Conv.req_microsec)
def sort_convs():
conversations.sort(key=byTime)
for cnt, conv in enumerate(conversations):
conv.id = cnt
add_object("body", conv.res_body)
objects[cnt].name = conv.filename
def check_order(Conv):
for curr_conv in conversations:
if int(curr_conv.req_microsec) > int(str(Conv.time)[:10]):
return False
return True
def finish_conversation(self):
if not (check_duplicate_url(self.host, self.uri)):
#if check_duplicate_uri(self.uri):
# self.uri = create_next_uri(self.uri)
obj_num = len(conversations)
conversations.append(namedtuple('Conv',
['id','server_ip_port', 'uri','req','res_body','res_head','res_num','res_type','host','referer', \
'filename','method','redirect_to','req_microsec', 'res_len','magic_name', 'magic_ext']))
host_tuple = (self.host, str(self.remote_host[0]) + ":" + str(self.remote_host[1]))
# hosts list
if (hosts.has_key(host_tuple)):
hosts[host_tuple].append((self.uri,str(obj_num)))
else:
hosts[host_tuple] = [(self.uri,str(obj_num))]
# convs list
conversations[obj_num].id = obj_num
conversations[obj_num].server_ip_port = str(self.remote_host[0]) + ":" + str(self.remote_host[1])
conversations[obj_num].uri = self.uri
conversations[obj_num].redirect_to = self.redirect_to
conversations[obj_num].short_uri = getShortURI(self.uri)
conversations[obj_num].req = self.req
conversations[obj_num].res_body = self.res_body
try:
# FindMagic
mgc_name = ""
mgc_ext = ""
mgc_name, mgc_ext = WhatypeMagic.identify_buffer(self.res_body)
except:
pass
conversations[obj_num].magic_name = mgc_name.rstrip()
conversations[obj_num].magic_ext = mgc_ext.rstrip()
conversations[obj_num].orig_chunked_resp = self.orig_chunked_resp
conversations[obj_num].orig_resp = self.orig_resp
conversations[obj_num].res_head = self.res_head
conversations[obj_num].res_num = self.res_num
if ";" in self.res_type:
conversations[obj_num].res_type = self.res_type[:self.res_type.find(";")]
else:
conversations[obj_num].res_type = self.res_type
conversations[obj_num].host = self.host
conversations[obj_num].referer = self.referer
conversations[obj_num].filename = self.filename
conversations[obj_num].method = self.method
conversations[obj_num].req_microsec = str(self.time)[:10]
# In case no filename was given from the server, split by URI
if (conversations[obj_num].filename == ""):
uri_name = urlparse.urlsplit(str(conversations[obj_num].uri)).path
conversations[obj_num].filename = uri_name.split('/')[-1]
if (str(conversations[obj_num].filename).find('?') > 0):
conversations[obj_num].filename = \
conversations[obj_num].filename[:str(conversations[obj_num].filename).find('?')]
if (str(conversations[obj_num].filename).find('&') > 0):
conversations[obj_num].filename = \
conversations[obj_num].filename[:str(conversations[obj_num].filename).find('&')]
# In case the URI was '/' then this is still empty
if (conversations[obj_num].filename == ""):
conversations[obj_num].filename = str(obj_num) + ".html"
conversations[obj_num].res_len = self.res_len
def show_conversations():
if (b_use_short_uri):
alert_message("Displaying shortened URI paths" + newLine, msg_type.INFO)
for cnt, conv in enumerate(conversations):
try:
typecolor = colors.END
if ("pdf" in conv.res_type):
typecolor = colors.RED
elif ("javascript" in conv.res_type):
typecolor = colors.BLUE
elif ("octet-stream" in conv.res_type) or ("application" in conv.res_type):
typecolor = colors.YELLOW
elif ("image" in conv.res_type):
typecolor = colors.GREEN
print str(conv.id) + ": " + colors.PINK,
if (b_use_short_uri):
print conv.short_uri,
else:
print conv.uri,
print colors.END + " -> " + conv.res_type,
if (conv.filename != ""):
print typecolor + "(" + conv.filename.rstrip() + ")" + colors.END + " [" + str(fmt_size(conv.res_len)) + "]",
# If magic found
if conv.magic_ext != "":
print " (Magic: " + colors.STRONG_BRIGHT + "{}".format(conv.magic_ext) + colors.NORMAL_BRIGHT + ")"
else:
print ""
else:
print newLine
except:
pass
print ""
def show_objects():
print "Displaying Objects:" + newLine
print " ID CID TYPE NAME"
print "---- ----- ----------- --------"
for id, obj in enumerate(objects):
print "{0:3} | {1:3} | {2:11} | {3}".format(id, obj.conv_id, obj.type, obj.name)
def hexdump(src, length=16):
result = []
digits = 4 if isinstance(src, unicode) else 2
for i in xrange(0, len(src), length):
s = src[i:i + length]
hexa = b' '.join(["%0*X" % (digits, ord(x)) for x in s])
text = b''.join([x if 0x20 <= ord(x) < 0x7F else b'.' for x in s])
result.append(b"%04X %-*s %s" % (i, length * (digits + 1), hexa, text))
return b'\n'.join(result)
from HTMLParser import HTMLParser
class srcHTMLParser(HTMLParser):
def __init__(self, find_tag):
HTMLParser.__init__(self)
self.find_tag = find_tag
self.tags = []
def handle_starttag(self, tag, attrs):
if tag == self.find_tag:
for att in attrs:
if att[0] == "src":
self.tags.append(att[1])
def print_objects(self):
if len(self.tags) > 0:
print " " + str(len(self.tags)) + " {}(s) Found!".format(self.find_tag) + newLine
for cnt, curr_tag in enumerate(self.tags):
print " [I] " + str(cnt + 1) + " : " + curr_tag
else:
print " No {} Found".format(self.find_tag)
def update_captipper():
currentVersion = "v{} b{}".format(VERSION,BUILD)
rawURL = "https://raw.githubusercontent.com/omriher/CapTipper/master/"
archiveURL = "https://github.com/omriher/CapTipper/archive/"
CoreFile = "CTCore.py"
CTArchive = "master.zip"
CoreURL = rawURL + CoreFile
print "Checking for updates (Current version: {})".format(currentVersion)
try:
print "Connecting to CapTipper Repository"
coreRepFile = urllib2.urlopen(CoreURL).read()
except:
sys.exit("[-] Error connecting to CapTipper repository")
verPattern = "VERSION = " + chr(34) + "(.*)" + chr(34) + "\s*?BUILD = " + chr(34) + "(.*)" + chr(34)
repoVer = re.findall(verPattern, coreRepFile)
if repoVer:
newVersion = "v{} b{}".format(repoVer[0][0],repoVer[0][1])
else:
sys.exit('[-] Error getting repository version')
if newVersion == currentVersion:
sys.exit("[+] You have the newest version!")
else:
print "[+] Updating CapTipper to {}".format(newVersion)
bPackSize = False
nAttempts = 0
while (not bPackSize and nAttempts < 3):
try:
url = archiveURL + CTArchive
u = urllib2.urlopen(url)
content_length = u.info().getheaders("content-length")
if len(content_length) > 0:
file_size = int(content_length[0])
bPackSize = True
else:
print("[-] Couldn't get package size, Retrying ({} / 3)...".format(str(nAttempts)))
except Exception,e:
sys.exit("[-] Error downloading update: {}".format(e.message))
finally:
nAttempts += 1
if not bPackSize:
sys.exit("[-] Couldn't get package size, Please try again later...")
try:
package_name = "CapTipper-package.zip"
f = open(package_name, 'wb')
file_downloaded = 0
block_size = 8192
while True:
buffer = u.read(block_size)
if not buffer:
break
file_downloaded += len(buffer)
f.write(buffer)
output = "[+] Downloading {0:.2f}%".format(file_downloaded * 100. / file_size)
sys.stdout.write('\r%s' % output)
sys.stdout.flush()
f.close()
CapTipper_Folder = os.path.dirname(os.path.realpath(__file__))
except Exception, e:
print "[-] Error downloading file: {}".format(e.message)
print "\nExtracting Files..."
try:
z = zipfile.ZipFile('CapTipper-package.zip')
master_folder = ""
for name in z.namelist():
if not master_folder:
master_folder = name[:-1]
full_path = CapTipper_Folder + name.replace(master_folder,"")
# Case of directory
if full_path.endswith(r"/"):
if not os.path.exists(full_path):
os.makedirs(full_path)
else:
if os.name == 'nt':
full_path = full_path.replace("/",r"\\")
print "Extracting {}".format(full_path)
with open(full_path,"wb") as out:
out.write(z.read(name))
try:
os.remove("CapTipper-package.zip")
except Exception, ed:
print "Failed deleting CapTipper-package.zip : " + ed.message
print "Update Complete! (New version: {})".format(newVersion)
except Exception,e:
sys.exit("Failed extracting files: {}".format(e.message))
sys.exit("Finished updating CapTipper")
def send_to_vt(md5, key_vt):
if key_vt == "":
return(-1, "No Public API Key Found")
url_vt = 'https://www.virustotal.com/vtapi/v2/file/report'
params = {'resource':md5,'apikey':key_vt}
try:
body = urllib.urlencode(params)
req = urllib2.Request(url_vt, body)
res = urllib2.urlopen(req)
res_json = res.read()
except:
return (-1, 'Request to VirusTotal Failed')
try:
json_dict = json.loads(res_json)
except:
return (-1, 'Error during VirusTotal response parsing')
return (0, json_dict)
def get_strings(content):
strings = re.findall("[\x1f-\x7e]{5,}", content)
strings += [str(ws.decode("utf-16le")) for ws in re.findall("(?:[\x1f-\x7e][\x00]){5,}", content)]
return strings
def get_request_size(id, size, full_request=False):
if int(id) >= len(objects) or int(id) < 0:
raise Exception(" ID number " + str(id) + " isn't within range")
request = conversations[int(id)].req
if (size.lower() == "all"):
size = len(request)
else:
size = int(size)
request = request[0:size]
if len(request) < size:
size = len(request)
return request, size
def get_response_and_size(id, size, full_response=False):
global Errors
Errors = []
if int(id) >= len(objects) or int(id) < 0:
raise Exception(" ID number " + str(id) + " isn't within range")
# if full response is needed and not just the body
if (full_response):
body = conversations[int(id)].header + '\r\n\r\n' + conversations[int(id)].res_body
else:
body = objects[int(id)].value
if not (isinstance(body, basestring)):
comp_header = conversations[int(id)].res_head
test_res = conversations[int(id)].res_head + "\r\n\r\n" + conversations[int(id)].orig_chunked_resp
body = ""
if (comp_header + "\r\n\r\n" == test_res):
#print colors.RED + newLine + "[E] Object: {} ({}) : Response body was empty, showing header instead".format(id, CTCore.objects[int(id)].name) + colors.END + newLine
Errors.append(colors.RED + newLine + "[E] Object: {} ({}) : Response body was empty".format(id, objects[int(id)].name) + colors.END + newLine)
else:
#print colors.RED + newLine + "[E] Object: {} ({}) : Couldn't retrieve BODY, showing full response instead".format(id, CTCore.objects[int(id)].name) + colors.END + newLine
Errors.append(colors.RED + newLine + "[E] Object: {} ({}) : Couldn't retrieve BODY".format(id, objects[int(id)].name) + colors.END + newLine)
if (size != "all"):
size = size * 2
if (size == "all"):
response = body
size = len(response)
else:
size = int(size)
response = body[0:size]
if len(response) < size:
size = len(response)
return response, size
def ungzip_all():
for conv in conversations:
try:
if conv.res_head.lower().find("gzip") > -1:
name = ""
try:
id = int(conv.id)
name = get_name(id)
obj_num, name = ungzip_and_add(id)
if obj_num != -1:
print " GZIP Decompression of object {} ({}) successful!".format(str(id), name)
print " New object created: {}".format(obj_num) + newLine
except Exception, e:
print "Error in: {} - {}".format(name,str(e))
except:
pass
def ungzip(id):
body, sz = get_response_and_size(id, "all")
obj_num = -1
name = ""
if not check_errors():
name = get_name(id)
decomp = gzip.GzipFile('', 'rb', 9, StringIO.StringIO(body))
page = decomp.read()
return page, name
def ungzip_and_add(id):
page, name = ungzip(id)
obj_num = add_object("ungzip",page,id=id)
return obj_num, name
def dump_all_files(path, dump_exe):
for i in range(0,len(objects)):
try:
if (not objects[i].name.lower().endswith(".exe")) or dump_exe:
dump_file(i, os.path.join(path, str(i) + "-" + objects[i].name))
except Exception, ef:
print str(ef)
def dump_file(id, path):
id = int(id)
body, sz = get_response_and_size(id, "all")
show_errors()
f = open(path, "wb")
f.write(body)
f.close()
print " Object {} written to {}".format(id, path)
def find_plugin(name):
for plug in plugins:
if plug.name.lower() == name.lower():
return plug.module
return None
def run_plugin(name, *args):
try:
module = find_plugin(name)
if module:
current = module()
result = current.run(*args)
return result
else:
return "Plugin " + name + " Does not exist"
except Exception,e:
print str(e)
|
import copy, os, pyPdf, sys
def main():
if len(sys.argv) > 1:
if os.path.isfile(sys.argv[1]):
name_input = sys.argv[1]
else:
sys.exit("file %s not found" % sys.argv[1])
else:
sys.exit("missing origin file operand")
if len(sys.argv) > 2:
name_output = sys.argv[2]
else:
new_name = "cut_" + os.path.basename(sys.argv[1])
name_output = os.path.join(os.path.dirname(sys.argv[1]), new_name)
cutpdf(name_input,name_output)
def cutpdf(name_input,name_output):
pdf_input = file(name_input, "rb")
pdf_output = file(name_output, "wb")
output = pyPdf.PdfFileWriter()
input1 = pyPdf.PdfFileReader(pdf_input)
pg= input1.getNumPages()
for i in range(0,pg):
page1 = input1.getPage(i)
page2 = copy.copy(page1)
cutline= (page1.mediaBox.getUpperRight_x() / 2, page1.mediaBox.getUpperRight_y())
page1.mediaBox.upperRight = cutline
output.addPage(page1)
page2.mediaBox.upperLeft = cutline
output.addPage(page2)
output.write(pdf_output)
pdf_output.close
return True
if __name__ == "__main__":
main()
|
from gnuradio import gr, gr_unittest
from gnuradio import blocks
import ieee802_15_4_swig as ieee802_15_4
import numpy as np
class qa_costas_loop_cc (gr_unittest.TestCase):
def setUp (self):
self.tb = gr.top_block ()
def tearDown (self):
self.tb = None
def test_001_t (self): # perfect sync, known start
# set up fg
self.tb.run ()
nsym = 1000
data_in = 2*(np.random.randint(0,2,nsym)-0.5) + 2j*(np.random.randint(0,2,nsym)-0.5) # stream of random qpsk symbols
data_in[0] = 1+1j
src = blocks.vector_source_c(data_in)
costas = ieee802_15_4.costas_loop_cc((1+1j, -1+1j, -1-1j, 1-1j), 0)
snk = blocks.vector_sink_c()
self.tb.connect(src, costas, snk)
self.tb.run()
# check data
data_out = snk.data()
self.assertComplexTuplesAlmostEqual(np.angle(data_in), np.angle(data_out))
def test_002_t (self): # perfect sync, random start
# set up fg
self.tb.run ()
nsym = 1000
data_in = 2*(np.random.randint(0,2,nsym)-0.5) + 2j*(np.random.randint(0,2,nsym)-0.5) # stream of random qpsk symbols
src = blocks.vector_source_c(data_in)
costas = ieee802_15_4.costas_loop_cc((1+1j, -1+1j, -1-1j, 1-1j), -1)
snk = blocks.vector_sink_c()
self.tb.connect(src, costas, snk)
self.tb.run()
# check data
data_out = snk.data()
self.assertComplexTuplesAlmostEqual(np.angle(data_in), np.angle(data_out))
def test_003_t (self): # phase offset
# set up fg
self.tb.run ()
nsym = 1000
phi_off = np.pi*0.2
data_in = 2*(np.random.randint(0,2,nsym)-0.5) + 2j*(np.random.randint(0,2,nsym)-0.5) # stream of random qpsk symbols
data_in[0] = 1+1j
data_in_off = data_in*np.exp(1j*phi_off)
src = blocks.vector_source_c(data_in_off)
costas = ieee802_15_4.costas_loop_cc((1+1j, -1+1j, -1-1j, 1-1j), 0)
snk = blocks.vector_sink_c()
self.tb.connect(src, costas, snk)
self.tb.run()
# check data
data_out = snk.data()
self.assertComplexTuplesAlmostEqual(np.angle(data_in), np.angle(data_out))
def test_004_t (self): # phase offset, lock to "wrong" symbol
# set up fg
self.tb.run ()
nsym = 1000
phi_off = np.pi*0.4
data_in = 2*(np.random.randint(0,2,nsym)-0.5) + 2j*(np.random.randint(0,2,nsym)-0.5) # stream of random qpsk symbols
data_in[0] = 1+1j
data_in_off = data_in*np.exp(1j*phi_off)
src = blocks.vector_source_c(data_in_off)
costas = ieee802_15_4.costas_loop_cc((1+1j, -1+1j, -1-1j, 1-1j), 0)
snk = blocks.vector_sink_c()
self.tb.connect(src, costas, snk)
self.tb.run()
# check data
data_out = snk.data()
self.assertComplexTuplesAlmostEqual(np.angle(data_in*np.exp(1j*np.pi/2)), np.angle(data_out))
def test_005_t (self): # frequency offset
# set up fg
self.tb.run ()
nsym = 1000
phi_off = np.arange(nsym)*np.pi/7
data_in = 2*(np.random.randint(0,2,nsym)-0.5) + 2j*(np.random.randint(0,2,nsym)-0.5) # stream of random qpsk symbols
data_in[0] = 1+1j
data_in_off = data_in*np.exp(1j*phi_off)
src = blocks.vector_source_c(data_in_off)
costas = ieee802_15_4.costas_loop_cc((1+1j, -1+1j, -1-1j, 1-1j), 0)
snk = blocks.vector_sink_c()
self.tb.connect(src, costas, snk)
self.tb.run()
# check data
data_out = snk.data()
self.assertComplexTuplesAlmostEqual(np.angle(data_in), np.angle(data_out))
if __name__ == '__main__':
gr_unittest.run(qa_costas_loop_cc)
|
def _H(prev, trans):
for c in trans:
prev = (prev*31+ord(c)) % 1000000007
return prev
def find_hash(string, prev, hashes):
for h in hashes:
_h = _H(prev, string)
token = h-7*_h
while token < 0:
token += 1000000007
token %= 1000000007
if token < 1000000000:
print(string, token )
return h
def main():
hashes = {int(str(i)+'0'*7) for i in range(1,101)}
prev = int(input())
prev = find_hash('a', prev, hashes)
hashes.remove(prev)
find_hash('b', prev, hashes)
if __name__ == "__main__":
main()
|
import matplotlib.pyplot as mpp
import unittest
import timetools.synchronization.clock as sc
import timetools.synchronization.oscillator as tso
import timetools.synchronization.oscillator.noise.gaussian as tsong
import timetools.synchronization.time as st
import timetools.synchronization.compliance.visualization as tscv
import timetools.synchronization.compliance.ituTG8263.compute as tscg8263
import timetools.synchronization.compliance.ituTG8263.wanderGeneration as tscg8263wg
import timetools.synchronization.compliance.ituTG8263.holdoverTransient as tscg8263h
class TestItuTG8263( unittest.TestCase ) :
def testConstantTemperatureWanderGenerationMask( self ) :
thisMask = tscg8263wg.constantTemperatureMtieNs
figureHandle = mpp.figure( )
# Set the plot limits before the mask plot so that it will figure out
# appropriate ranges in the absence of signal data
mpp.xlim( (0.01, 20e3) )
mpp.ylim( (100, 200e3) )
thisMask.addToPlot( figureHandle.number )
mpp.yscale( 'log' )
mpp.xscale( 'log' )
mpp.grid( which = 'minor' )
mpp.title( self.testConstantTemperatureWanderGenerationMask.__name__ )
def testVariableTemperatureWanderGenerationMask( self ) :
constTempMask = tscg8263wg.constantTemperatureMtieNs
thisMask = tscg8263wg.variableTemperatureMtieNs
figureHandle = mpp.figure( )
# Set the plot limits before the mask plot so that it will figure out
# appropriate ranges in the absence of signal data
mpp.xlim( (0.01, 20e3) )
mpp.ylim( (100, 200e3) )
thisMask.addToPlot( figureHandle.number, color = 'b' )
constTempMask.addToPlot( figureHandle.number, color = 'r', linestyle = '--' )
mpp.yscale( 'log' )
mpp.xscale( 'log' )
mpp.grid( which = 'minor' )
mpp.title( self.testVariableTemperatureWanderGenerationMask.__name__ )
def testHoldoverTransientPhaseErrorMask( self ) :
thisMask = tscg8263h.phaseErrorNs
figureHandle = mpp.figure( )
mpp.title( self.testHoldoverTransientPhaseErrorMask.__name__ )
# Set the plot limits before the mask plot so that it will figure out
# appropriate ranges in the absence of signal data
mpp.xlim( (0, 100) )
mpp.ylim( (-1000, 1000) )
mpp.grid( )
thisMask.addToPlot( figureHandle.number, linewidth = 4, color = 'b', marker = 'o' )
mpp.grid( which = 'minor' )
def testHoldoverTransientFfoMask( self ) :
thisMask = tscg8263h.ffoPpb
figureHandle = mpp.figure( )
mpp.title( self.testHoldoverTransientFfoMask.__name__ )
# Set the plot limits before the mask plot so that it will figure out
# appropriate ranges in the absence of signal data
mpp.xlim( (0, 24 * 3600) )
mpp.ylim( (-15, 15) )
mpp.grid( )
thisMask.addToPlot( figureHandle.number, linewidth = 4, color = 'b', marker = 'o' )
mpp.grid( which = 'minor' )
def testHoldoverTransientFfoRateMask( self ) :
thisMask = tscg8263h.ffoRatePpbPerSecond
figureHandle = mpp.figure( )
mpp.title( self.testHoldoverTransientFfoRateMask.__name__ )
# Set the plot limits before the mask plot so that it will figure out
# appropriate ranges in the absence of signal data
mpp.xlim( (0, 3600) )
mpp.ylim( (-2e-5, 2e-5) )
mpp.grid( )
thisMask.addToPlot( figureHandle.number, linewidth = 4, color = 'b', marker = 'o' )
mpp.grid( which = 'minor' )
def testWanderGenerationConstantTemperatureNs1( self ) :
timeStepSeconds = 1
numberSamples = 10000
desiredNumberObservations = 15
clockFfoPpb = 0.5
clockRmsJitterPpb = 2
referenceTimeGenerator = st.referenceGenerator( timeStepSeconds )
referenceTimeSeconds = referenceTimeGenerator.generate( numberSamples )
clockModel = sc.ClockModel( tso.OscillatorModel( initialFfoPpb = clockFfoPpb,
noiseModel = tsong.GaussianNoise(
standardDeviationPpb = clockRmsJitterPpb,
seed = 1459 ) ) )
localTimeSeconds, instantaneousLoFfoPpb = clockModel.generate( referenceTimeSeconds )
analysisResult, thisMask, mtieData = tscg8263.analyzeItuTG8263Mask( localTimeSeconds, referenceTimeSeconds,
timeStepSeconds, desiredNumberObservations )
thisPlot = tscv.plot( )
thisPlot.addMask( thisMask, linewidth = 4, color = 'r', linestyle = '--', marker = 'o' )
thisPlot.addSignal( mtieData )
thisPlot.go( )
mpp.yscale( 'log' )
mpp.xscale( 'log' )
mpp.grid( which = 'minor' )
mpp.title( self.testWanderGenerationConstantTemperatureNs1.__name__ )
self.assertTrue( analysisResult, 'Failed 16 ppb mask when should not have' )
def testWanderGenerationConstantTemperatureNs2( self ) :
timeStepSeconds = 1
numberSamples = 10000
desiredNumberObservations = 15
clockFfoPpb = 5
clockRmsJitterPpb = 2
referenceTimeGenerator = st.referenceGenerator( timeStepSeconds )
referenceTimeSeconds = referenceTimeGenerator.generate( numberSamples )
clockModel = sc.ClockModel( tso.OscillatorModel( initialFfoPpb = clockFfoPpb,
noiseModel = tsong.GaussianNoise(
standardDeviationPpb = clockRmsJitterPpb,
seed = 1459 ) ) )
localTimeSeconds, instantaneousLoFfoPpb = clockModel.generate( referenceTimeSeconds )
analysisResult, thisMask, mtieData = tscg8263.analyzeItuTG8263Mask( localTimeSeconds, referenceTimeSeconds,
timeStepSeconds, desiredNumberObservations )
thisPlot = tscv.plot( )
thisPlot.addMask( thisMask, linewidth = 4, color = 'r', linestyle = '--' )
thisPlot.addSignal( mtieData, linestyle = '--', marker = 'o' )
thisPlot.go( )
mpp.yscale( 'log' )
mpp.xscale( 'log' )
mpp.grid( which = 'minor' )
mpp.title( self.testWanderGenerationConstantTemperatureNs2.__name__ )
self.assertFalse( analysisResult, 'Passed 16 ppb mask when should not have' )
def tearDown( self ) :
if __name__ == "__main__" :
mpp.show( )
if __name__ == "__main__" :
unittest.main( )
|
"""
This script is a member of the system
"Detector of cyber-harassment" developed
for the Master's thesis "Detection of cyber-harassment on social network"
realized by Emilien Peretti during the academic year on 2016-2017 within the framework of
the IT Master's degree in sciences
at the university of Mons.
Author :
Emilien Peretti
"""
__author__ = 'Emilien Peretti'
class Filter(object):
"""
This filter give the ratio between lower and upper letters in a text
"""
@classmethod
def get_data(cls, sentense):
"""
Get the filter result as explain for the usability of the filter
:param sentense: the sentence
:return: a float
"""
count_upper = 0.0
count_lower = 0.0
for lettre in sentense:
if lettre.upper() == lettre:
count_upper += 1.0
else:
count_lower += 1.0
if count_lower == 0:
return 1.0
else:
return count_upper / count_lower
@classmethod
def get_output_name(cls):
"""
Get the name (a small explanation) of the output of the filter
:return: the name as a string
"""
return "percent upper vs lower"
|
from test_support import gcc, gnatprove
gcc("always_fail.adb", opt=["-c", "-gnatv"])
print("--")
gnatprove("--version")
|
from math import sqrt, atan, sin, cos, pi
import matplotlib
import cairo
import os.path
r3b2 = sqrt(3.0)/2.0
yoffset = 2 * r3b2 / 3
br3 = 1.0/sqrt(3.0)
c_rad = 0.1
scaling = 200
offset = 250, 250
linew = 10
partlinew = 6
text_size = 40
text_offsetx = 20
text_offsety = -10
gap_greater = 1
dashed_no = True
dashes_list = [10,10]
pr, pg, pb = 0, 0.7, 0
rr, rg, rb = 0.7, 0, 0
ir, ig, ib = 0, 0, 0
background = False
br, bg, bb = 1, 1, 1
include_dimers = True
include_vertices = False
include_labels = False
include_scaffold = True
sr, sg, sb = 0.75, 0.75, 0.75
move_C5 = [('a', 't', 'N', 'u', 'd'),
('L', '6', 'R', 'O', 'M'),
('p', 'I', 'K', 's', 'r'),
('b', 'f', 'j', 'm', 'q'),
('c', 'w', 'y', 'g', 'e'),
('v', 'P', 'S', 'W', 'x'),
('Q', '2', 'Y', 'V', 'T'),
('G', '5', '3', '7', 'J'),
('l', 'F', 'H', 'o', 'n'),
('h', 'A', 'C', 'k', 'i'),
('z', 'X', 'U', '0', 'B'),
('D', '1', 'Z', '4', 'E')]
vertex_names = {('a', 't', 'N', 'u', 'd'): 1,
('b', 'f', 'j', 'm', 'q'): 2,
('c', 'w', 'y', 'g', 'e'): 3,
('v', 'P', 'S', 'W', 'x'): 4,
('L', '6', 'R', 'O', 'M'): 5,
('p', 'I', 'K', 's', 'r'): 6,
('h', 'A', 'C', 'k', 'i'): 7,
('z', 'X', 'U', '0', 'B'): 8,
('Q', '2', 'Y', 'V', 'T'): 9,
('G', '5', '3', '7', 'J'): 10,
('l', 'F', 'H', 'o', 'n'): 11,
('D', '1', 'Z', '4', 'E'): 12}
move_DS = [('a', 'b'),
('c', 'd'),
('e', 'f'),
('g', 'h'),
('i', 'j'),
('k', 'l'),
('m', 'n'),
('o', 'p'),
('q', 'r'),
('s', 't'),
('u', 'v'),
('w', 'x'),
('y', 'z'),
('A', 'B'),
('C', 'D'),
('E', 'F'),
('G', 'H'),
('I', 'J'),
('K', 'L'),
('M', 'N'),
('O', 'P'),
('Q', 'R'),
('S', 'T'),
('U', 'V'),
('W', 'X'),
('Y', 'Z'),
('0', '1'),
('2', '3'),
('4', '5'),
('6', '7')]
protein_vertex = dict(((k, v) for (ks, v) in vertex_names.items() for k in ks))
face_name_list = [
('a', 'f', 'c'),
('d', 'w', 'v'),
('u', 'P', 'M'),
('N', 'L', 's'),
('t', 'r', 'b'),
('g', 'A', 'z'),
('W', 'U', 'T'),
('R', '2', '7'),
('I', 'G', 'o'),
('m', 'l', 'i'),
('D', 'k', 'F'),
('E', 'H', '5'),
('4' ,'3', 'Y'),
('Z', 'V', '0'),
('1', 'B', 'C'),
('n', 'q', 'p'),
('J', 'K', '6'),
('Q', 'O', 'S'),
('X', 'x', 'y'),
('h', 'e', 'j')]
face_2d_mapping = [
(0.0, 0.0, False),
(1.0, 0.0, False),
(2.0, 0.0, False),
(3.0, 0.0, False),
(4.0, 0.0, False),
(0.5, r3b2, False),
(1.5, r3b2, False),
(2.5, r3b2, False),
(3.5, r3b2, False),
(4.5, r3b2, False),
(4.5, (r3b2 + br3), True),
(3.5, (r3b2 + br3), True),
(2.5, (r3b2 + br3), True),
(1.5, (r3b2 + br3), True),
(0.5, (r3b2 + br3), True),
(4.0, br3, True),
(3.0, br3, True),
(2.0, br3, True),
(1.0, br3, True),
(0.0, br3, True)
]
def clockwise(pos, move):
'''
Cycle through the moves clockwise.
'''
for cyc in move:
if pos in cyc:
return cyc[(cyc.index(pos) + 1) % len(cyc)]
def anticlockwise(pos, move):
'''
Cycle through the moves anticlockwise.
'''
for cyc in move:
if pos in cyc:
return cyc[(cyc.index(pos) - 1) % len(cyc)]
def m1(pos):
'''
Dimer switch is move 1.
'''
return clockwise(pos, move_DS)
def m2(pos):
'''
Clockwise around five-fold is move 2.
'''
return clockwise(pos, move_C5)
def m3(pos):
'''
Anticlockwise around five-fold is move 3.
'''
return anticlockwise(pos, move_C5)
class Val_Creator(object):
'''
classdocs
'''
def __init__(self, ratio, rotation=0):
'''
Constructor
'''
lam = 1 / sqrt((ratio*ratio) + (2*ratio) + 4)
theta = atan(sqrt(3)/(ratio + 1))
extend = lam*sin(pi / 3) / sin((2 * pi / 3) - theta)
self.vals = dict()
self.vals['vertex'] = (0,
yoffset)
self.vals['stem_pos'] = (-lam * sin((pi / 6) - theta),
yoffset - (lam * cos((pi / 6) - theta)))
self.vals['+3 -1'] = (extend * sin(pi / 6),
yoffset - (extend * cos(pi / 6)))
self.vals['+1 -3'] = (- extend * sin(pi / 6),
yoffset - (extend * cos(pi / 6)))
self.vals['+2 -2'] = (0.25,
yoffset - (r3b2 / 2))
self.vals['centre'] = (0,0)
self.vals['homodimer 1'] = ((1.0 / 3.0), 0)
self.vals['homodimer 2'] = ((1.0 / 3.0) * sin(pi / 6.0), (1.0 / 3.0) * cos(pi / 6.0))
self.vals['homodimer mid'] = ((self.vals['homodimer 1'][0] + self.vals['homodimer 2'][0]) / 2.0, (self.vals['homodimer 1'][1] + self.vals['homodimer 2'][1]) / 2.0)
self.vals['homodimer split'] = (sum([self.vals['homodimer 1'][0], self.vals['centre'][0], self.vals['centre'][0]]) / 3.0, sum([self.vals['homodimer 1'][1], self.vals['centre'][1], self.vals['centre'][1]]) / 3.0)
self.vals['hetdimer 1'] = ((1.0 / 3.0) * sin(pi / 6.0), (1.0 / 3.0) * cos(pi / 6.0))
self.vals['hetdimer 2'] = (-(1.0 / 3.0) * sin(pi / 6.0), (1.0 / 3.0) * cos(pi / 6.0))
self.vals['hetdimer split 1'] = (sum([self.vals['hetdimer 1'][0], self.vals['centre'][0], self.vals['centre'][0]]) / 3.0, sum([self.vals['hetdimer 1'][1], self.vals['centre'][1], self.vals['centre'][1]]) / 3.0)
self.vals['hetdimer split 2'] = (sum([self.vals['hetdimer 2'][0], self.vals['vertex'][0], self.vals['vertex'][0]]) / 3.0, sum([self.vals['hetdimer 2'][1], self.vals['vertex'][1], self.vals['vertex'][1]]) / 3.0)
for i in range(rotation * 2):
self.rotate_grid()
def rotate_grid(self):
for i in self.vals:
j, face = self.vals[i]
m = (0.5 * j) - (r3b2 * face)
n = (r3b2 * j) + (0.5 * face)
self.vals[i] = (m, n)
def flip_grid(self):
for i in range(3):
self.rotate_grid()
def translate(self, xt, yt):
valslist = self.vals.items()
for i, (x,y) in valslist:
self.vals[i] = (x + xt, y + yt)
def give_face_name(v):
for f in face_name_list:
if v in f:
return f
def draw_scaffold():
'''
Draw the baseline of the MS2 architecture to lay the path description above.
'''
surface = cairo.ImageSurface(cairo.FORMAT_ARGB32, 1400, 800)
if background:
bg = cairo.Context(surface)
bg.set_source_rgb(1,1,1)
bg.paint()
draw_dict = dict()
for ind, face in enumerate(face_name_list):
f0, f1, f2 = face
g0 = Val_Creator(1.95, rotation=0)
g1 = Val_Creator(1.95, rotation=2)
g2 = Val_Creator(1.95, rotation=1)
if not face_2d_mapping[ind][2]:
g0.flip_grid()
g1.flip_grid()
g2.flip_grid()
g0.translate(face_2d_mapping[ind][0], face_2d_mapping[ind][1])
g1.translate(face_2d_mapping[ind][0], face_2d_mapping[ind][1])
g2.translate(face_2d_mapping[ind][0], face_2d_mapping[ind][1])
g0.position = f0
g1.position = f1
g2.position = f2
g0.face = face
g1.face = face
g2.face = face
draw_dict[f0] = g0
draw_dict[f1] = g1
draw_dict[f2] = g2
for face in face_name_list:
f0, f1, f2 = face
vl = cairo.Context(surface)
v0x, v0y = draw_dict[f0].vals['vertex']
v1x, v1y = draw_dict[f1].vals['vertex']
v2x, v2y = draw_dict[f2].vals['vertex']
vl.set_source_rgb(0.3, 0.3, 0.3)
vl.move_to(offset[0] + scaling*v0x, offset[1] + scaling*v0y)
vl.line_to(offset[0] + scaling*v1x, offset[1] + scaling*v1y)
vl.line_to(offset[0] + scaling*v2x, offset[1] + scaling*v2y)
vl.line_to(offset[0] + scaling*v0x, offset[1] + scaling*v0y)
vl.stroke()
if include_dimers:
# include the colourful hetero and homodimers
for e1, e2 in move_DS:
testc = cairo.Context(surface)
testc.set_source_rgb(0, 0, 0)
testc.move_to(offset[0] + scaling*draw_dict[e1].vals['hetdimer split 1'][0], offset[1] + scaling*draw_dict[e1].vals['hetdimer split 1'][1])
testc.line_to(offset[0] + scaling*draw_dict[e1].vals['hetdimer 1'][0], offset[1] + scaling*draw_dict[e1].vals['hetdimer 1'][1])
testc.line_to(offset[0] + scaling*draw_dict[e1].vals['vertex'][0], offset[1] + scaling*draw_dict[e1].vals['vertex'][1])
testc.line_to(offset[0] + scaling*draw_dict[e1].vals['hetdimer split 2'][0], offset[1] + scaling*draw_dict[e1].vals['hetdimer split 2'][1])
testc.line_to(offset[0] + scaling*draw_dict[e1].vals['hetdimer split 1'][0], offset[1] + scaling*draw_dict[e1].vals['hetdimer split 1'][1])
testc.stroke_preserve()
testc.set_source_rgb(0.8, 1, 0.8)
testc.fill()
testc.set_source_rgb(0, 0, 0)
testc.move_to(offset[0] + scaling*draw_dict[e1].vals['centre'][0], offset[1] + scaling*draw_dict[e1].vals['centre'][1])
testc.line_to(offset[0] + scaling*draw_dict[e1].vals['hetdimer split 1'][0], offset[1] + scaling*draw_dict[e1].vals['hetdimer split 1'][1])
testc.line_to(offset[0] + scaling*draw_dict[e1].vals['hetdimer split 2'][0], offset[1] + scaling*draw_dict[e1].vals['hetdimer split 2'][1])
testc.line_to(offset[0] + scaling*draw_dict[e1].vals['hetdimer 2'][0], offset[1] + scaling*draw_dict[e1].vals['hetdimer 2'][1])
testc.line_to(offset[0] + scaling*draw_dict[e1].vals['centre'][0], offset[1] + scaling*draw_dict[e1].vals['centre'][1])
testc.stroke_preserve()
testc.set_source_rgb(0.8, 0.8, 1)
testc.fill()
testc.set_source_rgb(0, 0, 0)
testc.move_to(offset[0] + scaling*draw_dict[e2].vals['hetdimer split 1'][0], offset[1] + scaling*draw_dict[e2].vals['hetdimer split 1'][1])
testc.line_to(offset[0] + scaling*draw_dict[e2].vals['hetdimer 1'][0], offset[1] + scaling*draw_dict[e2].vals['hetdimer 1'][1])
testc.line_to(offset[0] + scaling*draw_dict[e2].vals['vertex'][0], offset[1] + scaling*draw_dict[e2].vals['vertex'][1])
testc.line_to(offset[0] + scaling*draw_dict[e2].vals['hetdimer split 2'][0], offset[1] + scaling*draw_dict[e2].vals['hetdimer split 2'][1])
testc.line_to(offset[0] + scaling*draw_dict[e2].vals['hetdimer split 1'][0], offset[1] + scaling*draw_dict[e2].vals['hetdimer split 1'][1])
testc.stroke_preserve()
testc.set_source_rgb(0.8, 1, 0.8)
testc.fill()
testc.set_source_rgb(0, 0, 0)
testc.move_to(offset[0] + scaling*draw_dict[e2].vals['centre'][0], offset[1] + scaling*draw_dict[e2].vals['centre'][1])
testc.line_to(offset[0] + scaling*draw_dict[e2].vals['hetdimer split 1'][0], offset[1] + scaling*draw_dict[e2].vals['hetdimer split 1'][1])
testc.line_to(offset[0] + scaling*draw_dict[e2].vals['hetdimer split 2'][0], offset[1] + scaling*draw_dict[e2].vals['hetdimer split 2'][1])
testc.line_to(offset[0] + scaling*draw_dict[e2].vals['hetdimer 2'][0], offset[1] + scaling*draw_dict[e2].vals['hetdimer 2'][1])
testc.line_to(offset[0] + scaling*draw_dict[e2].vals['centre'][0], offset[1] + scaling*draw_dict[e2].vals['centre'][1])
testc.stroke_preserve()
testc.set_source_rgb(0.8, 0.8, 1)
testc.fill()
testc.set_source_rgb(0, 0, 0)
testc.move_to(offset[0] + scaling*draw_dict[e1].vals['homodimer 2'][0], offset[1] + scaling*draw_dict[e1].vals['homodimer 2'][1])
testc.line_to(offset[0] + scaling*draw_dict[e1].vals['centre'][0], offset[1] + scaling*draw_dict[e1].vals['centre'][1])
testc.line_to(offset[0] + scaling*draw_dict[e1].vals['homodimer split'][0], offset[1] + scaling*draw_dict[e1].vals['homodimer split'][1])
testc.line_to(offset[0] + scaling*draw_dict[e1].vals['homodimer mid'][0], offset[1] + scaling*draw_dict[e1].vals['homodimer mid'][1])
testc.move_to(offset[0] + scaling*draw_dict[e1].vals['homodimer 2'][0], offset[1] + scaling*draw_dict[e1].vals['homodimer 2'][1])
testc.stroke_preserve()
testc.set_source_rgb(1, 0.8, 0.8)
testc.fill()
testc.set_source_rgb(0, 0, 0)
testc.move_to(offset[0] + scaling*draw_dict[e1].vals['homodimer 1'][0], offset[1] + scaling*draw_dict[e1].vals['homodimer 1'][1])
testc.line_to(offset[0] + scaling*draw_dict[e1].vals['homodimer split'][0], offset[1] + scaling*draw_dict[e1].vals['homodimer split'][1])
testc.line_to(offset[0] + scaling*draw_dict[e1].vals['homodimer mid'][0], offset[1] + scaling*draw_dict[e1].vals['homodimer mid'][1])
testc.move_to(offset[0] + scaling*draw_dict[e1].vals['homodimer 1'][0], offset[1] + scaling*draw_dict[e1].vals['homodimer 1'][1])
testc.stroke_preserve()
testc.set_source_rgb(1, 0.8, 0.8)
testc.fill()
testc.set_source_rgb(0, 0, 0)
testc.move_to(offset[0] + scaling*draw_dict[e2].vals['homodimer 2'][0], offset[1] + scaling*draw_dict[e2].vals['homodimer 2'][1])
testc.line_to(offset[0] + scaling*draw_dict[e2].vals['centre'][0], offset[1] + scaling*draw_dict[e2].vals['centre'][1])
testc.line_to(offset[0] + scaling*draw_dict[e2].vals['homodimer split'][0], offset[1] + scaling*draw_dict[e2].vals['homodimer split'][1])
testc.line_to(offset[0] + scaling*draw_dict[e2].vals['homodimer mid'][0], offset[1] + scaling*draw_dict[e2].vals['homodimer mid'][1])
testc.move_to(offset[0] + scaling*draw_dict[e2].vals['homodimer 2'][0], offset[1] + scaling*draw_dict[e2].vals['homodimer 2'][1])
testc.stroke_preserve()
testc.set_source_rgb(1, 0.8, 0.8)
testc.fill()
testc.set_source_rgb(0, 0, 0)
testc.move_to(offset[0] + scaling*draw_dict[e2].vals['homodimer 1'][0], offset[1] + scaling*draw_dict[e2].vals['homodimer 1'][1])
testc.line_to(offset[0] + scaling*draw_dict[e2].vals['homodimer split'][0], offset[1] + scaling*draw_dict[e2].vals['homodimer split'][1])
testc.line_to(offset[0] + scaling*draw_dict[e2].vals['homodimer mid'][0], offset[1] + scaling*draw_dict[e2].vals['homodimer mid'][1])
testc.move_to(offset[0] + scaling*draw_dict[e2].vals['homodimer 1'][0], offset[1] + scaling*draw_dict[e2].vals['homodimer 1'][1])
testc.stroke_preserve()
testc.set_source_rgb(1, 0.8, 0.8)
testc.fill()
testc.set_source_rgb(0, 0, 0)
testc.move_to(offset[0] + scaling*draw_dict['L'].vals['homodimer 2'][0], offset[1] + scaling*draw_dict['L'].vals['homodimer 2'][1])
testc.line_to(offset[0] + scaling*draw_dict['L'].vals['centre'][0], offset[1] + scaling*draw_dict['L'].vals['centre'][1])
testc.line_to(offset[0] + scaling*draw_dict['L'].vals['homodimer 1'][0], offset[1] + scaling*draw_dict['L'].vals['homodimer 1'][1])
testc.move_to(offset[0] + scaling*draw_dict['L'].vals['homodimer 2'][0], offset[1] + scaling*draw_dict['L'].vals['homodimer 2'][1])
testc.stroke_preserve()
testc.set_source_rgb(0.2, 0.8, 0.8)
testc.fill()
testc.set_source_rgb(0, 0, 0)
testc.move_to(offset[0] + scaling*draw_dict['K'].vals['homodimer 2'][0], offset[1] + scaling*draw_dict['K'].vals['homodimer 2'][1])
testc.line_to(offset[0] + scaling*draw_dict['K'].vals['centre'][0], offset[1] + scaling*draw_dict['K'].vals['centre'][1])
testc.line_to(offset[0] + scaling*draw_dict['K'].vals['homodimer 1'][0], offset[1] + scaling*draw_dict['K'].vals['homodimer 1'][1])
testc.move_to(offset[0] + scaling*draw_dict['K'].vals['homodimer 2'][0], offset[1] + scaling*draw_dict['K'].vals['homodimer 2'][1])
testc.stroke_preserve()
testc.set_source_rgb(0.2, 0.8, 0.8)
testc.fill()
testc.set_source_rgb(0.3, 0.3, 0.3)
coordmp = (offset[0] + (scaling*draw_dict['L'].vals['vertex'][0] + scaling*draw_dict['K'].vals['vertex'][0])/2.0, offset[1] +(scaling*draw_dict['L'].vals['vertex'][1] + scaling*draw_dict['K'].vals['vertex'][1])/2.0 )
testc.set_font_size(30)
testc.move_to(coordmp[0] - 12, coordmp[1] - 10)
testc.show_text('M')
testc.move_to(coordmp[0] - 9, coordmp[1] + 35)
testc.show_text('P')
testc.fill()
if include_vertices:
# label the vertices from 1 to 12
for i,j in draw_dict.items():
cc = cairo.Context(surface)
cr = cairo.Context(surface)
cc.set_source_rgb(1, 1, 1)
cr.set_source_rgb(0,0,0)
x, y = j.vals['vertex']
#cc.move_to(offset[0] + (scaling*x), offset[1] + (scaling*(y + c_rad)))
cc.arc(offset[0] + (scaling*x), offset[1] + (scaling*y), scaling*c_rad*1.5, 0, 2 * pi)
cr.arc(offset[0] + (scaling*x), offset[1] + (scaling*y), scaling*c_rad*1.5, 0, 2 * pi)
cc.fill()
cc.stroke()
cr.select_font_face("Arial", cairo.FONT_SLANT_NORMAL, cairo.FONT_WEIGHT_NORMAL)
cr.set_font_size(text_size)
(ex, ey, ewidth, eheight, edx, edy) = cr.text_extents(str(protein_vertex[i[0]]))
cr.move_to(offset[0] + scaling*x - ((ewidth / 2.0) + 2.5), offset[1] + scaling*y + (eheight / 2.0))
cr.show_text(str(protein_vertex[i[0]]))
cr.stroke()
if include_scaffold:
for e1, e2 in move_DS:
st1x, st1y = draw_dict[e1].vals['stem_pos']
p1x, p1y = draw_dict[e1].vals['+2 -2']
p2x, p2y = draw_dict[e2].vals['+2 -2']
st2x, st2y = draw_dict[e2].vals['stem_pos']
sc = cairo.Context(surface)
sc.set_line_width(linew)
sc.set_source_rgb(sr, sg, sb)
sc.move_to(offset[0] + scaling*st1x, offset[1] + scaling*st1y)
sc.line_to(offset[0] + scaling*p1x, offset[1] + scaling*p1y)
sc.move_to(offset[0] + scaling*p2x, offset[1] + scaling*p2y)
sc.line_to(offset[0] + scaling*st2x, offset[1] + scaling*st2y)
sc.stroke()
sc.arc(offset[0] + scaling*st1x, offset[1] + scaling*st1y, linew/2.0, 0, 2 * pi)
sc.fill()
sc.arc(offset[0] + scaling*p1x, offset[1] + scaling*p1y, linew/2.0, 0, 2 * pi)
sc.fill()
sc.arc(offset[0] + scaling*p2x, offset[1] + scaling*p2y, linew/2.0, 0, 2 * pi)
sc.fill()
sc.arc(offset[0] + scaling*st2x, offset[1] + scaling*st2y, linew/2.0, 0, 2 * pi)
sc.fill()
for e1, e2 in move_DS:
e0 = m1(e1)
st1x, st1y = draw_dict[e0].vals['stem_pos']
p1x, p1y = draw_dict[e0].vals['+3 -1']
p2x, p2y = draw_dict[e0].vals['+1 -3']
st2x, st2y = draw_dict[e0].vals['stem_pos']
sc = cairo.Context(surface)
sc.set_line_width(linew)
sc.set_source_rgb(sr, sg, sb)
sc.move_to(offset[0] + scaling*st1x, offset[1] + scaling*st1y)
sc.line_to(offset[0] + scaling*p1x, offset[1] + scaling*p1y)
sc.move_to(offset[0] + scaling*p2x, offset[1] + scaling*p2y)
sc.line_to(offset[0] + scaling*st2x, offset[1] + scaling*st2y)
sc.stroke()
sc.arc(offset[0] + scaling*st1x, offset[1] + scaling*st1y, linew/2.0, 0, 2 * pi)
sc.fill()
sc.arc(offset[0] + scaling*p1x, offset[1] + scaling*p1y, linew/2.0, 0, 2 * pi)
sc.fill()
sc.arc(offset[0] + scaling*p2x, offset[1] + scaling*p2y, linew/2.0, 0, 2 * pi)
sc.fill()
sc.arc(offset[0] + scaling*st2x, offset[1] + scaling*st2y, linew/2.0, 0, 2 * pi)
sc.fill()
for e1, e2 in move_DS:
e0 = m1(e2)
st1x, st1y = draw_dict[e0].vals['stem_pos']
p1x, p1y = draw_dict[e0].vals['+3 -1']
p2x, p2y = draw_dict[e0].vals['+1 -3']
st2x, st2y = draw_dict[e0].vals['stem_pos']
sc = cairo.Context(surface)
sc.set_line_width(linew)
sc.set_source_rgb(sr, sg, sb)
sc.move_to(offset[0] + scaling*st1x, offset[1] + scaling*st1y)
sc.line_to(offset[0] + scaling*p1x, offset[1] + scaling*p1y)
sc.move_to(offset[0] + scaling*p2x, offset[1] + scaling*p2y)
sc.line_to(offset[0] + scaling*st2x, offset[1] + scaling*st2y)
sc.stroke()
sc.arc(offset[0] + scaling*st1x, offset[1] + scaling*st1y, linew/2.0, 0, 2 * pi)
sc.fill()
sc.arc(offset[0] + scaling*p1x, offset[1] + scaling*p1y, linew/2.0, 0, 2 * pi)
sc.fill()
sc.arc(offset[0] + scaling*p2x, offset[1] + scaling*p2y, linew/2.0, 0, 2 * pi)
sc.fill()
sc.arc(offset[0] + scaling*st2x, offset[1] + scaling*st2y, linew/2.0, 0, 2 * pi)
sc.fill()
#if not os.path.exists('draw'):
# os.makedirs('draw')
#surface.write_to_png(os.path.join('draw', 'scaffold.png'))
return surface, draw_dict
def hami_draw(constrain_occ, constrain_unocc, draw, name, movestr, protstr):
surface, draw_dict = draw_scaffold()
for (e1, e2), ev in constrain_occ:
if m1(e1) == e2:
st1x, st1y = draw_dict[e1].vals['stem_pos']
p1x, p1y = draw_dict[e1].vals['+2 -2']
p2x, p2y = draw_dict[e2].vals['+2 -2']
st2x, st2y = draw_dict[e2].vals['stem_pos']
elif m2(e1) == e2:
st1x, st1y = draw_dict[e1].vals['stem_pos']
p1x, p1y = draw_dict[e1].vals['+3 -1']
p2x, p2y = draw_dict[e2].vals['+1 -3']
st2x, st2y = draw_dict[e2].vals['stem_pos']
elif m3(e1) == e2:
st1x, st1y = draw_dict[e2].vals['stem_pos']
p1x, p1y = draw_dict[e2].vals['+3 -1']
p2x, p2y = draw_dict[e1].vals['+1 -3']
st2x, st2y = draw_dict[e1].vals['stem_pos']
br = cairo.Context(surface)
br.set_line_width(linew)
br.set_source_rgb(pr, pg, pb)
br.move_to(offset[0] + scaling*st1x, offset[1] + scaling*st1y)
br.line_to(offset[0] + scaling*p1x, offset[1] + scaling*p1y)
br.move_to(offset[0] + scaling*p2x, offset[1] + scaling*p2y)
br.line_to(offset[0] + scaling*st2x, offset[1] + scaling*st2y)
br.stroke()
br.arc(offset[0] + scaling*st1x, offset[1] + scaling*st1y, linew/2.0, 0, 2 * pi)
br.fill()
br.arc(offset[0] + scaling*p1x, offset[1] + scaling*p1y, linew/2.0, 0, 2 * pi)
br.fill()
br.arc(offset[0] + scaling*p2x, offset[1] + scaling*p2y, linew/2.0, 0, 2 * pi)
br.fill()
br.arc(offset[0] + scaling*st2x, offset[1] + scaling*st2y, linew/2.0, 0, 2 * pi)
br.fill()
for (e1, e2), ev in constrain_unocc:
if dashed_no:
if m1(e1) == e2:
st1x, st1y = draw_dict[e1].vals['stem_pos']
p1x, p1y = draw_dict[e1].vals['+2 -2']
p2x, p2y = draw_dict[e2].vals['+2 -2']
st2x, st2y = draw_dict[e2].vals['stem_pos']
elif m2(e1) == e2:
st1x, st1y = draw_dict[e1].vals['stem_pos']
p1x, p1y = draw_dict[e1].vals['+3 -1']
p2x, p2y = draw_dict[e2].vals['+1 -3']
st2x, st2y = draw_dict[e2].vals['stem_pos']
elif m3(e1) == e2:
st1x, st1y = draw_dict[e2].vals['stem_pos']
p1x, p1y = draw_dict[e2].vals['+3 -1']
p2x, p2y = draw_dict[e1].vals['+1 -3']
st2x, st2y = draw_dict[e1].vals['stem_pos']
br = cairo.Context(surface)
br.set_line_width(linew)
br.set_dash(dashes_list)
br.set_source_rgb(rr, rg, rb)
br.move_to(offset[0] + scaling*st1x, offset[1] + scaling*st1y)
br.line_to(offset[0] + scaling*p1x, offset[1] + scaling*p1y)
br.move_to(offset[0] + scaling*p2x, offset[1] + scaling*p2y)
br.line_to(offset[0] + scaling*st2x, offset[1] + scaling*st2y)
br.stroke()
for (e1, e2) in draw:
if m1(e1) == e2:
st1x, st1y = draw_dict[e1].vals['stem_pos']
p1x, p1y = draw_dict[e1].vals['+2 -2']
p2x, p2y = draw_dict[e2].vals['+2 -2']
st2x, st2y = draw_dict[e2].vals['stem_pos']
elif m2(e1) == e2:
st1x, st1y = draw_dict[e1].vals['stem_pos']
p1x, p1y = draw_dict[e1].vals['+3 -1']
p2x, p2y = draw_dict[e2].vals['+1 -3']
st2x, st2y = draw_dict[e2].vals['stem_pos']
elif m3(e1) == e2:
st1x, st1y = draw_dict[e2].vals['stem_pos']
p1x, p1y = draw_dict[e2].vals['+3 -1']
p2x, p2y = draw_dict[e1].vals['+1 -3']
st2x, st2y = draw_dict[e1].vals['stem_pos']
br = cairo.Context(surface)
br.set_line_width(linew)
br.set_source_rgb(ir, ig, ib)
br.move_to(offset[0] + scaling*st1x, offset[1] + scaling*st1y)
br.line_to(offset[0] + scaling*p1x, offset[1] + scaling*p1y)
br.move_to(offset[0] + scaling*p2x, offset[1] + scaling*p2y)
br.line_to(offset[0] + scaling*st2x, offset[1] + scaling*st2y)
br.stroke()
br.arc(offset[0] + scaling*st1x, offset[1] + scaling*st1y, linew/2.0, 0, 2 * pi)
br.fill()
br.arc(offset[0] + scaling*p1x, offset[1] + scaling*p1y, linew/2.0, 0, 2 * pi)
br.fill()
br.arc(offset[0] + scaling*p2x, offset[1] + scaling*p2y, linew/2.0, 0, 2 * pi)
br.fill()
br.arc(offset[0] + scaling*st2x, offset[1] + scaling*st2y, linew/2.0, 0, 2 * pi)
br.fill()
if include_labels:
cc = cairo.Context(surface)
cr = cairo.Context(surface)
cc.set_source_rgb(1, 1, 1)
cr.set_source_rgb(0,0,0)
cr.select_font_face("Courier", cairo.FONT_SLANT_NORMAL, cairo.FONT_WEIGHT_NORMAL)
cr.set_font_size(text_size/2.0)
for v in move_C5:
for p in v:
x, y = draw_dict[p].vals['stem_pos']
cc.arc(offset[0] + (scaling*x), offset[1] + (scaling*y), scaling*c_rad*0.7, 0, 2 * pi)
cr.arc(offset[0] + (scaling*x), offset[1] + (scaling*y), scaling*c_rad*0.7, 0, 2 * pi)
cc.fill()
cc.stroke()
(ex, ey, ewidth, eheight, edx, edy) = cr.text_extents(p)
cr.move_to(offset[0] + scaling*x - ((ewidth / 2.0) + 2.5), offset[1] + scaling*y + (eheight / 2.0))
cr.show_text(p)
cr.stroke()
tx = cairo.Context(surface)
tx.select_font_face("Courier", cairo.FONT_SLANT_NORMAL, cairo.FONT_WEIGHT_NORMAL)
tx.set_font_size(text_size/2.0)
tx.move_to(55, 50)
tx.show_text(movestr)
tx.move_to(50, 75)
tx.show_text(protstr)
tx.stroke()
surface.write_to_png(name)
|
'''!
@file epwins.py
@package gui.epwins
@brief Windows classes for epcalc gui
This holds window classes for generating and updating (level ups) characters.
@date (C) 2016-2020
@author Marcus Schwamberger
@email marcus@lederzeug.de
@version 1.5
----
@todo The following has to be implemented:
- a separate Character Status Window which shows the following:
-# name, culture, race, profession
-# old & new EPs
-# No of level-ups
-# remaining DPs
-# remaining stat gain rolls
'''
import random
import os
import sys
import json
from tkinter import *
from tkinter.filedialog import *
from tkinter.ttk import *
from rpgtoolbox.lang import *
from rpgtoolbox.globaltools import *
from rpgtoolbox import logbox as log
from rpgtoolbox import handlemagic
from rpgtoolbox.errbox import *
from rpgtoolbox.confbox import *
from rpgtoolbox.rpgtools import getLvl
from rpgtoolbox.rolemaster import stats
from rpgtoolbox.rpgtools import calcTotals
from gui.winhelper import AutoScrollbar
from gui.winhelper import InfoCanvas
from gui.window import *
from gui.gmtools import *
from gui.mangroup import *
from pprint import pprint # for debugging purposes only
__updated__ = "18.11.2021"
__author__ = "Marcus Schwamberger"
__copyright__ = "(C) 2015-" + __updated__[-4:] + " " + __author__
__email__ = "marcus@lederzeug.de"
__version__ = "1.5"
__license__ = "GNU V3.0"
__me__ = "A RPG tool package for Python 3.6"
logger = log.createLogger('window', 'debug', '1 MB', 1, './')
class MainWindow(blankWindow):
"""!
This is the class for the main window object.
@param lang The chosen language for window's and button's texts. At
the moment, only English (en, default value) and German
(de) are supported.
@param title title of the window
@param storepath path where things like options have to be stored
@todo storepath has to be changed to default installation path empty
"""
def __init__(self, lang = 'en', storepath = None, title = "Main Window",
char = None):
"""
Class constructor
@param lang The chosen language for window's and button's
texts. At the moment, only English (en, default
value) and German (de) are supported.
@param title title of the window
@param storepath path where things like options have to be stored
@param char Character as JSON
"""
if storepath == None:
#needs to be changed
self.mypath = os.getcwd() + "/data/"
logger.debug('mainwindow: Set storepath to %s' % (storepath))
else:
self.mypath = storepath
logger.debug('mainwindow: storepath set to %s' % (storepath))
self.picpath = "./gui/pic/"
self.lang = lang
self.myfile = "MyRPG.exp"
self.char = char
blankWindow.__init__(self, self.lang)
self.window.title(title)
Label(self.window, width = 60).pack()
self.__addFileMenu()
self.__addEditMenu()
self.__addGMMenu()
self.__addOptionMenu()
self.__addHelpMenu()
self.mask = [txtwin['json_files'][self.lang],
txtwin['grp_files'][self.lang],
txtwin['all_files'][self.lang]
]
"""
set picture for the window background of the main window
"""
self.__canvas = Canvas(self.window, width = '11.0c', height = '13.0c')
__background = PhotoImage(file = self.picpath + 'demon.gif')
self.__canvas.create_image(0, 0, image = __background, anchor = NW)
self.__canvas.pack()
self.window.mainloop()
def __addFileMenu(self):
"""
This method adds a File menu to the windows menu bar.
"""
self.filemenu = Menu(master = self.menu)
self.menu.add_cascade(label = txtmenu['menu_file'][self.lang],
menu = self.filemenu)
self.filemenu.add_command(label = submenu['file'][self.lang]['new'],
command = self.__newFile)
self.filemenu.add_command(label = submenu['file'][self.lang]['open'],
command = self.__openFile)
self.filemenu.add_command(label = submenu['file'][self.lang]['save'],
command = self.__saveFile)
self.filemenu.add_command(label = submenu['file'][self.lang]['sv_as'],
command = self.__saveFile)
self.filemenu.add_command(label = submenu['file'][self.lang]['export'] + "(LaTeX/PDF)",
command = self.__exportLaTeX)
self.filemenu.add_command(label = "{} {} {}".format("short format",
submenu['file'][self.lang]['export'],
"(LaTeX/PDF)"),
command = self.__exportShortLaTeX)
self.filemenu.add_command(label = "{} {} {}".format(labels["spellbook"][self.lang],
submenu['file'][self.lang]['export'],
"(LaTeX/PDF)"),
command = self.__exportSpellbook)
self.filemenu.add_separator()
self.filemenu.add_command(label = submenu['file'][self.lang]['quit'],
command = self.window.destroy)
def __newFile(self):
"""
This method opens a new window for generation of a new
functional structure.
"""
self.window.destroy()
logger.debug("newfile: %s " % (self.mypath))
self.window = inputWin(lang = self.lang,
filename = None,
storepath = self.mypath)
def __openFile(self):
"""
This method opens a dialogue window (Tk) for opening files.
The content of the opened file will be saved in \e file
\e content as an array.
"""
self.__filein = askopenfilename(filetypes = self.mask,
initialdir = self.mypath)
if self.__filein != "" and type(self.__filein) == type(""):
with open(self.__filein, 'r') as filecontent:
# checking whether link of char pic fits - important for LaTeX export.
if self.__filein[-4:].lower() == "json":
self.char = json.load(filecontent)
if os.getcwd() not in self.char["piclink"]:
pl = self.char["piclink"].split("src/")[1]
if os.path.exists(pl):
self.char["piclink"] = "{}/{}".format(os.getcwd(), pl)
else:
self.char["piclink"] = "{}/{}/default.jpg".format(os.getcwd(), pl[:pl.rfind("/")])
elif self.__filein[-3:].lower == "grp":
self.grp = json.load(filecontent)
else:
msg = messageWindow()
msg.showinfo(errmsg['wrong_type'][self.lang])
logger.warn(errmsg['wrong_type'][self.lang])
pass
def __saveFile(self):
'''!
This method opens a file dialogue window (Tk) for saving the results
of the EP calculation into an .json or .grp file.
@todo has to be implemented
'''
self.notdoneyet("'saveFile'")
def __exportLaTeX(self):
'''!
This method exports character data into a LaTeX file from which a PDF
will be generated
'''
from rpgtoolbox import latexexport
if self.char == None:
msg = messageWindow()
msg.showinfo(errmsg['no_data'][self.lang])
else:
export = latexexport.charsheet(self.char, "./data/", short = False)
msg = messageWindow()
msg.showinfo("LaTeX generated")
def __exportShortLaTeX(self):
'''!
This method exports character data into a LaTeX file from which a PDF
will be generated
'''
from rpgtoolbox import latexexport
if self.char == None:
msg = messageWindow()
msg.showinfo(errmsg['no_data'][self.lang])
else:
export = latexexport.charsheet(self.char, "./data/", short = True)
msg = messageWindow()
msg.showinfo("short LaTeX generated") #
def __exportSpellbook(self):
'''!
This generates a Spellbook PDF out aof character's data
'''
from rpgtoolbox.latexexport import spellbook
if self.char == None:
msg = messageWindow()
msg.showinfo(errmsg['no_data'][self.lang])
else:
spellbook(self.char, self.mypath)
msg = messageWindow()
msg.showinfo("Spellbook generated")
def __addEditMenu(self):
'''!
This method adds an edit menu to the menu bar
'''
self.edtmenu = Menu(master = self.menu)
self.menu.add_cascade(label = txtmenu['menu_edit'][self.lang],
menu = self.edtmenu)
self.edtmenu.add_command(label = submenu['edit'][self.lang]['ed_char'],
command = self.__edcharWin)
self.edtmenu.add_command(label = submenu['edit'][self.lang]['char_back'],
command = self.__bckgrndWin)
self.edtmenu.add_command(label = submenu['edit'][self.lang]['statgain'],
command = self.__statGainRoll)
self.edtmenu.add_command(label = submenu['edit'][self.lang]['ed_BGO'],
command = self.__BGOWin)
self.edtmenu.add_separator()
self.edtmenu.add_command(label = submenu['edit'][self.lang]['ed_EP'],
command = self.__edtEPWin)
self.edtmenu.add_separator()
self.edtmenu.add_command(label = submenu['edit'][self.lang]['ed_fight'],
command = self.__edfightWin)
self.edtmenu.add_command(label = submenu['edit'][self.lang]['ed_grp'],
command = self.__edtgrpWin)
def __edcharWin(self):
'''!
Generating a window for editing Characters/Character lists/Parties
'''
if self.char != None:
self.window.destroy()
self.window2 = skillcatWin(self.lang, self.mypath, self.char)
else:
msg = messageWindow()
msg.showinfo(errmsg['no_data'][self.lang])
def __edtgrpWin(self):
"""!
Opens a window for editing character parties
"""
grpwin = groupWin(self.lang)
def __BGOWin(self):
'''!
Opens a window to enter and store new EPs in character data
@todo edtEPWin has to be fully implemented
'''
self.notdoneyet("BGOWin")
def __edtEPWin(self):
'''!
Opens a window to enter and store new EPs in character data
'''
if self.char != None:
self.window.destroy()
self.window2 = editEPWin(self.lang, self.mypath, self.char)
else:
msg = messageWindow()
msg.showinfo(errmsg['no_data'][self.lang])
def __edfightWin(self):
'''!
Editing all Hits/Crits/Killed Monsters for calculating EPs
@todo has to be implemented
'''
self.notdoneyet()
def __edcalcWin(self):
'''!
Calculating and displaying the whole EPs for the RPG party.
@todo not implemented yet
'''
self.notdoneyet()
def __statGainRoll(self):
'''!
This opens a window for Stats Gain Roll for the character.
'''
self.window.destroy()
self.window2 = statGainWin(lang = self.lang, storepath = self.mypath, char = self.char)
def __addGMMenu(self):
"""
This method adds a Gamemaster Menu for generating special stuff like
treasures or magical items.
"""
self.gmmenu = Menu(master = self.menu)
self.menu.add_cascade(label = txtmenu['menu_gm'][self.lang],
menu = self.gmmenu)
self.gmmenu.add_command(label = submenu['items'][self.lang]['treasure'],
command = self.__treasureWin)
self.gmmenu.add_command(label = submenu['items'][self.lang]['magical'],
command = self.__magicWin)
logger.debug("GM Menu build...")
def __treasureWin(self):
"""
This privat method invokes a window to generate descriptions of a treasures (gmtools.py).
"""
createTreasureWin(lang = self.lang, filename = 'treasure.txt')
def __magicWin(self):
"""
This privat method invokes a window to generate descriptions of magic items (gmtools.py).
"""
createMagicWin(lang = self.lang)
def __addOptionMenu(self):
"""
This method adds an option/preferences menu to the menu bar.
"""
self.optmenu = Menu(master = self.menu)
self.menu.add_cascade(label = txtmenu['menu_opt'][self.lang],
menu = self.optmenu)
self.optmenu.add_command(label = submenu['opts'][self.lang]['lang'],
command = self.__optWin)
def __optWin(self):
'''!
Opens an options window and closes the main window.
'''
self.window.destroy()
self.window = confWindow(self.lang)
def __addMenu(self):
'''!
This private method just adds the menu bar into the window's
layout
'''
self.menu = Menu(self.window)
self.window.configure(menu = self.menu)
def __addHelpMenu(self):
"""!
This methods defines a help menu.
"""
self.helpmenu = Menu(master = self.menu)
self.menu.add_cascade(label = txtmenu['help'][self.lang],
menu = self.helpmenu)
self.helpmenu.add_command(label = submenu['help'][self.lang]['global'],
command = self.helpHandbook)
self.helpmenu.add_separator()
self.helpmenu.add_command(label = submenu['help'][self.lang]['about'],
command = self.helpAbout)
def __bckgrndWin(self):
'''!
This opens the background window of a loaded character
'''
if self.char != None:
self.window.destroy()
self.window2 = charInfo(self.lang, self.mypath, self.char)
else:
msg = messageWindow()
msg.showinfo(errmsg['no_data'][self.lang])
def helpHandbook(self):
"""!
This method will show the rpg-tools Handbook
@todo this needs to be implemented
"""
self.notdoneyet()
def helpAbout(self):
'''!
This method just opens a message window with the basic
information about the PROGRAM (like version and
copyright)
'''
self.about = "%s\nVersion %s\n\n%s\n%s\n%s" % (__me__,
__version__,
__copyright__,
__license__,
__email__)
self.msg = messageWindow()
self.msg.showinfo(self.about)
class confWindow(blankWindow):
"""!
This class builds a window for selecting and saving options of
rpg-tools. For now it is just choosing the language for menus and
dialogues.
@param lang Laguage which shall be used in messages and menus.
----
@todo The following has to be implemented:
- improve design of options window
"""
def __init__(self, lang = 'en'):
"""
Class constructor
@param lang Laguage which shall be used in messages and menus
"""
self.lang = lang
self._cnf = chkCfg(lang = self.lang)
logger.debug("read cfg data: {}".format(self._cnf.cnfparam))
blankWindow.__init__(self, self.lang)
self.window.title(wintitle['opt_lang'][self.lang])
self.wert = StringVar()
self.index = sortIndex(shortcut)
self.__buildOptMenu()
self.__buildWinRadio()
def __buildOptMenu(self):
"""
This private method builds an option menu button in the option's window
to make a choice between the used supported RPGs.
"""
self.RPG = StringVar()
if 'rpg' in list(self._cnf.cnfparam.keys()):
self.RPG.set(self._cnf.cnfparam['rpg'])
else:
self.RPG.set('Rolemaster')
self.optMenu = OptionMenu(*(self.window, self.RPG) + tuple(supportedrpg[self.lang]))
self.optMenu.grid(column = 0, row = 0)
def __buildWinRadio(self):
"""!
This private method builds the option's window with radio
buttons of supported languages dynamically.
@todo switch language chooser from radio buttons to pull-down
menu
"""
self.sto_path = StringVar()
self.log_path = StringVar()
if 'datapath' in list(self._cnf.cnfparam.keys()):
self.sto_path.set(self._cnf.cnfparam['datapath'])
else:
self.sto_path.set(str(str(os.getcwd())) + "/data")
if 'lang' in list(self._cnf.cnfparam.keys()):
if self._cnf.cnfparam['lang'] != self.lang:
self.lang = self._cnf.cnfparam['lang']
if 'logpath' in list(self._cnf.cnfparam.keys()):
self.log_path.set(self._cnf.cnfparam['logpath'])
else:
self.log_path.set("./")
self.rb = {}
i = 1
for key in self.index:
self.rb[key] = Radiobutton(master = self.window,
text = shortcut[key],
variable = self.wert,
value = key
)
if key == self.lang:
self.rb[key].select()
self.rb[key].grid(column = 0, row = i)
i += 1
i += 1
Label(master = self.window,
width = 35
).grid(column = 0, row = i)
i += 1
Label(master = self.window,
width = 35,
text = labels['cfg_path'][self.lang]
).grid(column = 0, row = i)
i += 1
Entry(master = self.window,
width = 35,
textvariable = self.sto_path
).grid(column = 0, row = i)
i += 1
Label(master = self.window,
width = 35
).grid(column = 0, row = i)
i += 1
Label(master = self.window,
width = 35,
text = labels['log_path'][self.lang]
).grid(column = 0, row = i)
i += 1
Entry(master = self.window,
width = 35,
textvariable = self.log_path
).grid(column = 0, row = i)
i += 1
Button(self.window,
text = txtbutton['but_sav'][self.lang],
width = 15,
command = self.__save).grid(column = 0, row = i)
i += 1
Button(self.window,
text = txtbutton['but_clos'][self.lang],
width = 15,
command = self.__closewin).grid(column = 0, row = i)
def chosenLang(self):
"""
A public method which return the string value of the chosen
language.
"""
return self.wert.get()
def __save(self):
"""!
A method for saving options in the user directory.
@todo variables to store have to be completed/adapted
"""
self.lang = self.wert.get()
self.path = self.sto_path.get()
self.log = self.log_path.get()
self.crpg = self.RPG.get()
if self.path[-1:] != '/':
self.path += '/'
if self.log[-1:] != '/':
self.log += '/'
self.cont = {'lang': self.lang,
'datapath': self.path,
'logpath': self.log,
'rpg': self.crpg
}
logger.debug('SAVE: lang=%s; datapath=%s; logpath=%s' %
(self.lang, self.path, self.log))
self._cnf.saveCnf(path = './conf',
filename = 'rpg-tools.cfg',
content = self.cont)
self._cnf = chkCfg(lang = self.lang)
logger.debug("saved cfg: {}".format(self._cnf.cnfparam))
self.msg = messageWindow()
self.msg.showinfo(processing['saved']
[self.lang] + '\n' + shortcut[self.lang])
def __closewin(self):
"""!
A method for closing the window and opening the main window.
@todo give RPG type to main window
"""
self.path = self.sto_path.get()
self.window.destroy()
self.window = MainWindow(self.lang, self.path)
class inputWin(blankWindow):
"""!
Objects of this class type are windows for input the wanted data
structure. A exp structure will be build of the input.
@param lang This parameter holds the language chosen for the menus
and messages. Default value is 'en'.
@param filename this holds the filename of a read exp file holding
the functional structure.
@param storepath the path where the XML files shall be stored in.
"""
def __init__(self,
lang = 'en',
csvcontent = {},
filename = None,
storepath = None):
"""
Constructor!
@param lang This parameter holds the language chosen for the
menus and messages. Default value is 'en'
@param csvcontent a dictionary holding the information of CSV
@param filename this holds the filename and path of a read data
file containing the functional structure.
@param storepath the path where the data files shall be stored
in.
"""
self.lang = lang
self.csvcont = csvcontent
self.fname = filename
self.picpath = "./gui/pic/"
if self.fname != "" and self.fname != None:
self._last = getLast(string = self.fname, sep = "/")
else:
self._last = ""
self.mypath = storepath
blankWindow.__init__(self, self.lang)
self.window.title(wintitle['edit'][self.lang] + " - " + self._last)
self.filemenu = Menu(master = self.menu)
self.menu.add_cascade(label = txtmenu['menu_file'][self.lang],
menu = self.filemenu)
self.filemenu.add_command(label = submenu['file'][self.lang]['new_char'],
command = self.__createchar)
self.filemenu.add_command(label = submenu['file'][self.lang]['new_grp'],
command = self.__creategroup)
self.filemenu.add_separator()
self.filemenu.add_command(label = submenu['file'][self.lang]['close'],
command = self.__closewin)
self.edtmenu = Menu(master = self.menu)
self.menu.add_cascade(label = txtmenu['menu_edit'][self.lang],
menu = self.edtmenu)
self.edtmenu.add_command(label = submenu['edit'][self.lang]['ed_char'],
command = self.__editchar)
self.edtmenu.add_separator()
self.edtmenu.add_command(label = submenu['edit'][self.lang]['ed_fight'],
command = self.__epfight)
self.edtmenu.add_command(label = submenu['edit'][self.lang]['ed_other'],
command = self.__epother)
self.edtmenu.add_command(label = submenu['edit'][self.lang]['ed_indiv'],
command = self.__epindiv)
self.edtmenu.add_command(label = submenu['edit'][self.lang]['ed_calc'],
command = self.__epcalc)
self.edtmenu.add_separator()
self.edtmenu.add_command(label = submenu['edit'][self.lang]['ed_sim'],
command = self.__fightsim)
self.helpmenu = Menu(master = self.menu)
self.menu.add_cascade(label = txtmenu['menu_help'][self.lang],
menu = self.helpmenu)
self.helpmenu.add_command(label = submenu['help'][self.lang]['page'],
command = self.__helppage)
self.helpmenu.add_command(label = submenu['help'][self.lang]['global'],
command = self.__helpglobal)
self.helpmenu.add_separator()
self.helpmenu.add_command(label = submenu['help'][self.lang]['about'],
command = self._helpAbout)
"""
set picture for the window background of the main window
"""
self.__canvas = Canvas(self.window, width = '11.5c', height = '13.5c')
__background = PhotoImage(file = self.picpath + 'assassin.gif')
self.__canvas.create_image(0, 0, image = __background, anchor = NW)
self.__canvas.pack()
self.window.mainloop()
def __createchar(self):
'''!
Method to open a new window for character creation.
'''
self.window.destroy()
self.window3 = genAttrWin(lang = self.lang,
storepath = self.mypath)
def __creategroup(self):
'''!
Method to create a now character party/group
@todo chreategroup has to be implemented
'''
print("input win --> creategroup")
self.notdoneyet('creategroup')
def __editchar(self):
'''!
Method to edit a character for the EP sheet.
@todo editchar is to be implemented
'''
print("input win --> editchar")
self.notdoneyet('editchar')
def __editgrp(self):
'''!
Method to edit a character group and keep track on it
@todo editgrp has to be implemented
'''
self.notdoneyet("editgrp")
def __epfight(self):
'''!
Method to calculate EPs from a fight (hits and criticals)
@todo epfight has to be implemented
'''
self.notdoneyet('epfight')
def __epother(self):
'''!
Method to calculate EPs from Spells, maneuvers, travel.
@todo epother has to be implemented
'''
self.notdoneyet('epother')
def __epindiv(self):
'''!
Method for adding invidual EPs
@todo epindiv has to be implemented
'''
self.notdoneyet('epindiv')
def __epcalc(self):
'''!
Method to finalize EP calculation for a single gaming date
@todo epcalc has to be implemented
'''
self.notdoneyet('epcalc')
def __fightsim(self):
'''!
Method for simulating a fight and calculate potential EPs
@todo fightsim has to be implemented
'''
self.notdoneyet('fightsim')
def __closewin(self):
"""!
Method for closing the window and opening the main window.
"""
self.window.destroy()
self.window = MainWindow(self.lang, self.mypath)
def __helppage(self):
"""!
Method for help on this page.
@todo helppage has to be implemented
"""
self.notdoneyet("helppage")
def __helpglobal(self):
"""
Method to call the handbook as help for this page
"""
self.handbook("chapter %s" % (wintitle['edit'][self.lang]))
class genAttrWin(blankWindow):
'''!
A window class for generating name, race, profession and attributes of a new
character.
'''
def __init__(self, lang = 'en', storepath = './data', rpg = "RoleMaster"):
'''!
@param lang Choosen display language (default en)
@param storepath Path to store data (default: ./data)
'''
if rpg == "RoleMaster":
from rpgtoolbox import rolemaster as rm
else:
self.notdoneyet("support for %s" % (rpg))
# @var self.character
# the attribute where to store the character data in as 'JSON'
self.character = {}
# @var self.lang
# used language
self.lang = lang
# @var self.spath
# storage path for character data file
self.spath = storepath
if self.spath[-1] != "/":
self.spath += "/"
self.__cultures = rm.cultures[self.lang][:6]
# @var self.profs
# a dictionary/JSON structure where a profession specific data (read from
# a CSV file) is stored in
self.profs = rm.choseProfession(self.lang)
# @var proflist
# list of all available professions
proflist = list(self.profs.keys())
# @var rmraces
# a list of all the RoleMaster races
rmraces = rm.races[self.lang]
# @var rmcultures
# list of available cultures
rmcultures = rm.cultures[self.lang]
# @var self.stats
# holds player, name, profession, race, realm and temp stats
self.stats = {}
# @var self.pots
# holds potential stats (maximum values)
self.pots = {}
# @var self.specs
# holds special stats if anyrger
self.specs = {}
# @var self.__race
# holds race stats bonuses
self.__race = {}
# @var self.__rr
# holds all resistance roll bonusses
self.__rr = {}
self.__labels = {}
# @var self..__totals
# holds total stat bonusses
self.__totals = {}
# @var self.__std
# holds standard stat bonusses
self.__std = {}
self.__count = 0
# @var self.__rmstats
# list of all stats' short cuts in English
self.__rmstats = rm.stats
# @var self.__rangeOK
# just for check up whether the stats are in the correct ranges
self.__rangeOK = True
blankWindow.__init__(self, lang = self.lang)
self.window.title(wintitle['rm_charg'][self.lang] + " - Attributes")
self.showno = IntVar()
self.showno.set(660)
self.points = 660
dummy = ['player', 'name', 'prof', 'race', 'realm', 'culture']
for a in dummy:
self.stats[a] = StringVar()
for a in rm.stats:
self.stats[a] = IntVar()
self.stats[a].set(0)
self.pots[a] = IntVar()
self.pots[a].set(0)
self.specs[a] = IntVar()
self.specs[a].set(0)
self.__labels[a] = StringVar()
self.__labels[a].set(rm.labels[self.lang][a])
self.__race[a] = IntVar()
self.__race[a].set(0)
self.__std[a] = IntVar()
self.__std[a].set(0)
self.__totals[a] = IntVar()
self.__totals[a].set(0)
self.filemenu = Menu(master = self.menu)
self.menu.add_cascade(label = txtmenu['menu_file'][self.lang],
menu = self.filemenu)
self.filemenu.add_command(label = submenu['file'][self.lang]['save'],
command = self.notdoneyet)
self.filemenu.add_separator()
self.filemenu.add_command(label = submenu['file'][self.lang]['close'],
command = self.__closewin)
self.__addHelpMenu()
self.helpmenu.add_separator()
self.helpmenu.add_command(label = submenu['help'][self.lang]['about'],
command = self._helpAbout)
Label(master = self.window,
width = 25,
text = rm.labels[self.lang]['player']
).grid(column = 0, row = 0, columnspan = 2)
Entry(master = self.window,
width = 35,
textvariable = self.stats['player'],
).grid(column = 2, row = 0, columnspan = 2)
Label(master = self.window,
width = 15,
text = rm.labels[self.lang]['culture']
).grid(column = 4, row = 0, columnspan = 2)
self.optMenu0 = OptionMenu(self.window,
self.stats['culture'],
*rmcultures,
command = self.__setCulture)
self.optMenu0.grid(column = 6, row = 0, columnspan = 2, sticky = "ew")
Label(master = self.window,
width = 25,
text = rm.labels[self.lang]['name']
).grid(column = 0, row = 1, columnspan = 2)
Entry(master = self.window,
width = 35,
textvariable = self.stats['name'],
).grid(column = 2, row = 1, columnspan = 2)
Label(master = self.window,
width = 15,
text = rm.labels[self.lang]['race']
).grid(column = 4, row = 1, columnspan = 2)
self.optMenu1 = OptionMenu(self.window,
self.stats['race'],
*rmraces,
command = self.__setRBonus)
self.optMenu1.grid(column = 6, row = 1, columnspan = 2, sticky = "ew")
Label(master = self.window,
width = 25,
text = rm.labels[self.lang]['prof']
).grid(column = 0, row = 2, columnspan = 2)
self.optMenu2 = OptionMenu(self.window,
self.stats['prof'],
*proflist,
command = self.__setRealm)
self.optMenu2.grid(column = 2, row = 2, columnspan = 2, sticky = "ew")
Label(master = self.window,
width = 15,
text = rm.labels[self.lang]['realm']
).grid(column = 4, row = 2, columnspan = 2)
self.optMenu3 = OptionMenu(self.window,
self.stats['realm'],
*rm.realms[self.lang],
command = self.__chkRealm)
self.optMenu3.grid(column = 6, row = 2, columnspan = 2, sticky = "ew")
Button(master = self.window,
text = txtbutton['but_roll'][self.lang],
width = 15,
command = self.rollDice).grid(column = 4, row = 3)
Label(master = self.window,
text = rm.labels[self.lang]['DP'],
).grid(column = 5, row = 3, columnspan = 2)
Message(master = self.window,
width = 35,
textvariable = self.showno,
font = "bold"
).grid(column = 7, row = 3)
Label(master = self.window,
width = 15,
relief = RIDGE,
font = "bold",
text = rm.labels[self.lang]['stats']
).grid(column = 0, row = 4, sticky = "ew")
Label(master = self.window,
width = 10,
relief = RIDGE,
font = "bold",
text = rm.labels[self.lang]['short']
).grid(column = 1, row = 4)
Label(master = self.window,
width = 10,
relief = RIDGE,
font = "bold",
text = "Temp"
).grid(column = 2, row = 4, sticky = "ew")
Label(master = self.window,
width = 10,
relief = RIDGE,
font = "bold",
text = "Pot"
).grid(column = 3, row = 4, sticky = "ew")
Label(master = self.window,
width = 10,
relief = RIDGE,
font = "bold",
text = rm.labels[self.lang]['race']
).grid(column = 4, row = 4, sticky = "ew")
Label(master = self.window,
width = 10,
relief = RIDGE,
font = "bold",
text = "Spec"
).grid(column = 5, row = 4, sticky = "ew")
Label(master = self.window,
width = 10,
relief = RIDGE,
font = "bold",
text = "Std"
).grid(column = 6, row = 4, sticky = "ew")
Label(master = self.window,
width = 10,
relief = RIDGE,
font = "bold",
text = rm.labels[self.lang]['total']
).grid(column = 7, row = 4, sticky = "ew")
i = 5
for s in rm.stats:
Label(master = self.window,
width = 15,
textvariable = self.__labels[s]
).grid(column = 0, row = i, sticky = "ew")
Label(master = self.window,
text = s # momentary only English shortcuts
).grid(column = 1, row = i, sticky = "ew")
Entry(master = self.window,
width = 15,
textvariable = self.stats[s]
).grid(column = 2, row = i)
Message(master = self.window,
width = 25,
textvariable = self.pots[s],
).grid(column = 3, row = i)
Message(master = self.window,
width = 15,
textvariable = self.__race[s],
).grid(column = 4, row = i)
Entry(master = self.window,
width = 15,
textvariable = self.specs[s]
).grid(column = 5, row = i)
Message(master = self.window,
width = 25,
textvariable = self.__std[s],
).grid(column = 6, row = i)
Message(master = self.window,
width = 25,
font = "bold",
textvariable = self.__totals[s],
).grid(column = 7, row = i)
i += 1
Button(master = self.window,
text = txtbutton['but_calc'][self.lang],
width = 15,
command = self.__calcBonus).grid(column = 0, row = i)
Button(master = self.window,
text = txtbutton['but_next'][self.lang],
width = 10,
command = self.__nextStep).grid(column = 7, row = i)
self.window.mainloop()
def __addHelpMenu(self):
"""!
This methods defines a help menu.
@todo The following has to be implemented:
- global help information (function)
- window help information
"""
self.helpmenu = Menu(master = self.menu)
self.menu.add_cascade(label = txtmenu['help'][self.lang],
menu = self.helpmenu)
self.helpmenu.add_command(label = submenu['help'][self.lang]['win'],
command = self.__winHelp)
self.helpmenu.add_command(label = submenu['help'][self.lang]['global'],
command = self.notdoneyet)
self.helpmenu.add_separator()
self.helpmenu.add_command(label = submenu['help'][self.lang]['about'],
command = self._helpAbout)
def __winHelp(self):
'''!
This displays a window speciffic helper message box
'''
messageWindow(self.lang).showinfo(winhelpmsg["genAttrWin"][self.lang], "getnAttrWin")
def __nextStep(self):
'''!
Checks whether all developing points (and not more) are used and player
and character names are set. If so it proceeds with collecting all data.
'''
if self.points != self.__used:
messageWindow(self.lang).showinfo(errmsg['stats_dp'][self.lang])
elif self.stats['player'].get() == "":
messageWindow(self.lang).showinfo(errmsg['player'][self.lang])
elif self.stats['name'].get() == "":
messageWindow(self.lang).showinfo(errmsg['name'][self.lang])
else:
self.__collectData()
def __calcBonus(self):
'''!
Totals and update all bonusses. If that sum is higher than the developing
points for the stats it raises an error message window.
'''
self.__statBonus()
self.__used = 0
for s in self.__rmstats:
total = self.__race[s].get() + \
self.specs[s].get() + self.__std[s].get()
self.__totals[s].set(total)
stat = self.stats[s].get()
self.__used += stat
self.pots[s].set(self.__creatPot(stat))
if self.__used > self.points:
self.showno.set(self.points - self.__used)
messageWindow(self.lang).showinfo(
errmsg['too_much_stats'][self.lang])
self.showno.set(self.points - self.__used)
self.__testStats()
def __testStats(self):
'''!
This checks the temp value of the stats and warns if they are correct.
That means the primary have to be at least 90+ and the others not below
20.
'''
testp = self.stats['prof'].get()
if testp != "":
primestats = self.profs[testp]['Prime Stats']
self.__rangeOK = True
for s in self.__rmstats:
if self.stats[s.strip('*')].get() < 20:
self.__rangeOK = False
self.stats[s.strip('*')].set(20)
messageWindow(self.lang).showinfo(
errmsg['wrong_stat'][self.lang] + "\n%s --> 20" % s)
for s in primestats:
if self.stats[s.strip('*')].get() < 90:
self.__rangeOK = False
self.stats[s.strip('*')].set(90)
messageWindow(self.lang).showinfo(
errmsg['wrong_stat'][self.lang] + "\ns %s --> 90 " % s)
def __setPStats(self):
'''!
Sets the primary (and magic) stats for a profession
@todo set the magic stat for chosen realms to semi spell users
'''
testp = self.stats['prof'].get()
if testp != "":
primestats = self.profs[testp]['Prime Stats']
for s in self.__rmstats:
dummy = self.__labels[s].get()
dummy = dummy.strip(" ()+*")
if s in primestats:
self.__labels[s].set(dummy + ' (+)')
if self.stats[s].get() < 90:
self.stats[s].set(90)
elif s + '*' in primestats:
self.__labels[s].set(dummy + ' (+)(*)')
if self.stats[s].get() < 90:
self.stats[s].set(90)
else:
self.__labels[s].set(dummy)
if self.stats[s].get() < 20:
self.stats[s].set(20)
potstat = self.__creatPot(self.stats[s].get())
self.pots[s].set(potstat)
self.__calcBonus()
def __setCulture(self, event):
'''!
Sets the right culture selection dependent on the chosen race.
If the race is set this method will adapt the list of choice concerning
to the chosen race.
----
@todo -# set the initial language ranks for the culture based languages
-# add the additional cultured from MERP
'''
from rpgtoolbox.rolemaster import races, cultures
from rpgtoolbox.lang import errmsg
testc = self.stats['culture'].get()
testr = self.stats['race'].get()
omenu = self.optMenu0.children['menu']
omenu.delete(0, "end")
if testr == "" or testr == None:
msg = messageWindow()
msg.showinfo(errmsg['no_race'][self.lang], 'Info')
elif testr in races[self.lang][:2]:
for cult in cultures[self.lang][:6]:
omenu.add_command(
label = cult, command = lambda v = cult: self.stats['culture'].set(v))
self.stats['culture'].set("")
else:
omenu.add_command(
label = testr, command = lambda v = testr: self.stats['culture'].set(v))
self.stats['culture'].set(testr)
def __statBonus(self):
'''!
Sets/calculates standard stat bonus
'''
from rpgtoolbox.rolemaster import statbonus
for s in self.__rmstats:
value = self.stats[s].get()
self.__std[s].set(statbonus(value))
def __chkRealm(self, event):
'''!
This method checks whether the right magic realm is chosen for the
selected profession
@param event object event given by OptionMenu but not used
----
@bug potential cause for false DP calculations. It is not clear how to
reproduce this bug.
@bug if testr != self.profs[testp]['Realm'] and self.profs[testp]['Realm'] != "choice": KeyError: ''
@bug if realm chosen before profession an error occurs (sdtout)
@note bug should be fixed
'''
testr = self.stats['realm'].get()
testp = self.stats['prof'].get()
if testp != "":
if testr != self.profs[testp]['Realm'] and self.profs[testp]['Realm'] != "choice":
self.stats['realm'].set(self.profs[testp]['Realm'])
self.__setPStats()
self.__calcBonus()
def __setRBonus(self, event):
'''!
This method sets the races bonusses , the race based RR bonusses,
Background Options and Hobby Ranks.
@param event object event given by OptionMenu but not used
----
@todo prepare race based additional bonusses which may be added when going to
the next window.
'''
from rpgtoolbox import rolemaster as rm
race = self.stats['race'].get()
pos = rm.races[self.lang].index(race)
race = rm.races['en'][pos]
for i in list(rm.raceAbilities[race].keys()):
if "RR" in i:
self.__rr[i] = rm.raceAbilities[race][i]
self.character['BGO'] = rm.raceAbilities[race]['BGO']
self.character['Hobby Ranks'] = rm.raceAbilities[race]['Hobby Ranks']
for a in rm.stats:
self.__race[a].set(rm.raceAbilities[race][a])
self.__setCulture("")
def __setRealm(self, event):
'''!
Sets the connected Realm if profession is chosen
@param event object event given by OptionMenu but not used
'''
testp = self.stats['prof'].get()
self.stats['realm'].set(self.profs[testp]['Realm'])
self.__setPStats()
def __creatPot(self, temp = 20, fixed = False):
'''!
This method creates a potential stat from a temporary stat.
@param temp value of the temporary stat
@param fixed a parameter that turns the fixed creation mode on/off
@retval result the resulting potential stat value
'''
result = 1
if 19 < temp < 25:
if not fixed:
result = 20 + self.dice(10, 8)
else:
result = temp + 44
elif 24 < temp < 35:
if not fixed:
result = 30 + self.dice(10, 7)
else:
result = temp + 39
elif 34 < temp < 45:
if not fixed:
result = 40 + self.dice(10, 6)
else:
result = temp + 33
elif 44 < temp < 55:
if not fixed:
result = 50 + self.dice(10, 5)
else:
result = temp + 28
elif 54 < temp < 65:
if not fixed:
result = 60 + self.dice(10, 4)
else:
result = temp + 22
elif 64 < temp < 75:
if not fixed:
result = 70 + self.dice(10, 3)
else:
result = temp + 17
elif 74 < temp < 85:
if not fixed:
result = 80 + self.dice(10, 2)
else:
result = temp + 11
elif 84 < temp < 92:
if not fixed:
result = 90 + self.dice(10, 1)
else:
result = temp + 6
elif temp == 92:
if not fixed:
result = temp - 1 + self.dice(9, 1)
else:
result = temp + 5
elif temp == 93:
if not fixed:
result = temp - 1 + self.dice(8, 1)
else:
result = temp + 4
elif temp == 94:
if not fixed:
result = temp - 1 + self.dice(7, 1)
else:
result = temp + 4
elif temp == 95:
if not fixed:
result = temp - 1 + self.dice(6, 1)
else:
result = temp + 3
elif temp == 96:
if not fixed:
result = temp - 1 + self.dice(5, 1)
else:
result = temp + 3
elif temp == 97:
if not fixed:
result = temp - 1 + self.dice(4, 1)
else:
result = temp + 2
elif temp == 98:
if not fixed:
result = temp - 1 + self.dice(3, 1)
else:
result = temp + 2
elif 98 < temp:
if not fixed:
result = temp - 1 + self.dice(2, 1)
else:
result = temp + 1
if result < temp:
result = temp
return result
def dice(self, sides = 6, number = 1):
'''!
This function delivers the result of a dice roll as a list.
@param sides number of sides of the used dice
@param number number of used dices/rolls
@retval result sum of the dice rolls
'''
i = 0
result = 0
while i < number:
roll = random.randint(1, sides)
result += roll
i += 1
return result
def rollDice(self):
"""!
Creates the pool for stat generation by rolling the dices.
"""
self.__count += 1
if 0 < self.__count < 4:
result = 600 + self.dice(10, 10)
self.showno.set(result)
self.points = self.showno.get()
def __collectData(self):
'''!
This method collects all data, adds them to the character's data structure
and saves that on disk.
After that it destroys the current window and opens the window for the #
next creation step.
'''
from rpgtoolbox import rolemaster as rm
import json
for key in ['player', 'name', 'prof', 'race', 'realm', 'culture']:
self.character[key] = self.stats[key].get()
race = rm.races['en'][rm.races[self.lang].index(
self.character['race'])]
for stat in self.__rmstats:
self.character[stat] = {}
self.character[stat]['name'] = rm.labels[self.lang][stat]
self.character[stat]['temp'] = self.stats[stat].get()
self.character[stat]['pot'] = self.pots[stat].get()
self.character[stat]['race'] = self.__race[stat].get()
self.character[stat]['spec'] = self.specs[stat].get()
self.character[stat]['std'] = self.__std[stat].get()
self.character[stat]['total'] = self.__totals[stat].get()
self.character['RREss'] = self.character['Em']['total'] * \
3 + rm.raceAbilities[race]['RREss']
self.character['RRChan'] = self.character['In']['total'] * \
3 + rm.raceAbilities[race]['RRChan']
self.character['RRMent'] = self.character['Pr']['total'] * \
3 + rm.raceAbilities[race]['RRMent']
self.character['RRArc'] = self.character['Pr']['total'] + \
self.character['Em']['total'] + self.character['In']['total']
self.character['RRC/E'] = self.character['In']['total'] + self.character['Em']['total'] + \
(rm.raceAbilities[race]['RREss'] +
rm.raceAbilities[race]['RRChan']) / 2
self.character['RRC/M'] = self.character['In']['total'] + self.character['Pr']['total'] + \
(rm.raceAbilities[race]['RRMent'] +
rm.raceAbilities[race]['RRChan']) / 2
self.character['RRE/M'] = self.character['Pr']['total'] + self.character['Em']['total'] + \
(rm.raceAbilities[race]['RREss'] +
rm.raceAbilities[race]['RRMent']) / 2
self.character['RRDisease'] = self.character['Co']['total'] * \
3 + rm.raceAbilities[race]['RRDisease']
self.character['RRPoison'] = self.character['Co']['total'] * \
3 + rm.raceAbilities[race]['RRPoison']
self.character['RRFear'] = self.character["SD"]['total'] * 3
self.character['purse'] = {'GP': 2,
'SP': 0,
'CP': 0,
'TP': 0,
'IP': 0}
self.character['old_exp'] = 0
self.character['exp'] = 10000
self.character['lvl'] = 1
self.character["soul dep"] = rm.raceHealingFactors[self.character["race"]]["soul dep"]
self.character["Stat Loss"] = rm.raceHealingFactors[self.character["race"]]["Stat Loss"]
self.character["Recovery"] = rm.raceHealingFactors[self.character["race"]]["Recovery"]
self.__addCatnSkills()
if not os.path.exists(self.spath + self.character['player']):
os.mkdir(self.spath + self.character['player'])
else:
try:
with open(self.spath + self.character['player'] + '/' + self.character['name'] + ".json", "w") as outfile:
json.dump(self.character, outfile, sort_keys = True,
indent = 4, ensure_ascii = False)
except:
with open(self.spath + self.character['player'] + '/' + self.character['name'] + ".json", "w") as outfile:
json.dump(self.character, outfile, indent = 4)
self.window.destroy()
self.window3 = priorizeWeaponsWin(
self.lang, self.spath, self.character)
def __addCatnSkills(self):
'''!
This method adds skill categories and skills to the character's dictionary
as well as bonus (special, profession and items)
@note Skills wont have a profession bonus. It is already applied to the
category.
'''
from rpgtoolbox import rolemaster as rm
fp = open("%sdefault/Skills_%s.csv" % (self.spath, self.lang))
content = fp.readlines()
fp.close()
if '\n' in content:
content.remove('\n')
for i in range(0, len(content)):
content[i] = content[i].strip('\n\t ').split(',')
skillcat = {}
for i in range(1, len(content)):
skillcat[content[i][0]] = {content[0][2]: content[i][2],
content[0][1]: {},
'spec bonus': 0,
'prof bonus': 0,
'item bonus': 0,
'rank': 0
}
for pb in list(self.profs[self.character['prof']]['Profession Bonusses'].keys()):
if pb in content[i][0]:
skillcat[content[i][0]]['prof bonus'] = self.profs[self.character['prof']
]['Profession Bonusses'][pb]
skillcat[content[i][0]][content[0][1]] = {}
for skill in content[i][1].split(';'):
skillcat[content[i][0]][content[0][1]][skill] = {content[0][2]: content[i][2],
'rank': 0,
'rank bonus': 0,
'item bonus': 0,
'spec bonus': 0,
}
del(content)
self.profs = rm.choseProfession(self.lang)
for key in skillcat.keys():
for pbonus in self.profs[self.character['prof']]['Profession Bonusses'].keys():
if pbonus in skillcat.keys():
skillcat[key]['prof bonus'] = int(self.profs[self.character['prof']]['Profession Bonusses'][pbonus])
fp = open('%s/default/SkillCat_%s.csv' % (self.spath, self.lang), 'r')
content = fp.readlines()
fp.close()
content[0] = content[0].strip("\n").split(',')
for i in range(1, len(content)):
content[i] = content[i].strip('\n').split(',')
if content[i][0] not in list(skillcat.keys()):
skillcat[content[i][0]] = {'rank': 0,
'rank bonus': 0,
'item bonus': 0,
'spec bonus': 0
}
skillcat[content[i][0]]['Skill'] = {}
skillcat[content[i][0]][content[0][2]] = content[i][2]
skillcat[content[i][0]]["Skill"][content[0][2]] = content[i][2]
skillcat[content[i][0]][content[0][1]] = content[i][1].split('/')
if rm.catnames[self.lang]['spells'] in content[i][0][:7]:
temp = []
if '[' in self.character['realm']:
self.character['realm'] = self.character['realm'].strip(
"'[ ]\n").split("', '")
if type(self.character['realm']) == type([]):
for r in self.character['realm']:
temp.append(rm.realmstats[self.lang][r])
elif self.character['realm'] != "choice":
if " " in self.character['realm']:
self.character['realm'] = self.character['realm'].strip("(')")
self.character['realm'] = self.character['realm'].split("', '")
for r in self.character['realm']:
temp.append(rm.realmstats[self.lang][r])
else:
temp.append(rm.realmstats[self.lang][self.character['realm']])
skillcat[content[i][0]][content[0][1]] = temp
skillcat[content[i][0]]["Skill"][content[0][1]] = temp
self.character['cat'] = skillcat
if self.character['realm'] != "choice":
self.spellbook = handlemagic.getSpells(self.spath,
self.character['prof'],
self.character['realm'],
self.character['lvl']
)
logger.debug("addCatnSkills: path: {}, prof: {}, realm: {}, lvl:{}".format(self.spath,
self.character['prof'],
self.character['realm'],
self.character['lvl']))
for cat in list(self.character['cat'].keys()):
if cat[:8] == "Spells -":
for slcat in list(self.spellbook.spelllists.keys()):
print("DeBUG: addCatnSkills (slcat) {}".format(slcat))
print("Debug: keys: {}".format(list(self.spellbook.spelllists[slcat].keys())))
if self.spellbook.spelllists[slcat]['Category'] in cat:
for spell in list(self.spellbook.spelllists[slcat].keys()):
if spell != "Category":
self.character['cat'][cat]['Skill'][spell] = self.spellbook.spelllists[slcat][spell]
self.character['cat'][cat]['Skill'][spell]['rank'] = 0
self.character['cat'][cat]['Skill'][spell]["Progression"] = "Skill Only"
self.character['cat'][cat]['Skill'][spell]['rank bonus'] = 0
self.character['cat'][cat]['Skill'][spell]['item bonus'] = 0
self.character['cat'][cat]['Skill'][spell]["spec bonus"] = 0
def __closewin(self):
'''!
A method to destroy the current window and go back to MainWindow.
'''
self.window.destroy()
self.window = MainWindow(
lang = self.lang, char = self.character, storepath = self.spath)
class priorizeWeaponsWin(blankWindow):
"""!
This is the class for a window object to chose the priority of weapon skills
at the character's generation. It will also set the category and skill ranks
during adolescence.
@bug sometimes double chosen weapon categories cause list index errors and
were not detected as doublets.
"""
def __init__(self, lang = 'en', storepath = os.getcwd() + "/data", char = None):
"""!
Class constructor
@param lang The chosen language for window's and button's
texts. At the moment, only English (en, default
value) and German (de) are supported.
@param title title of the window
@param storepath path where things like options have to be stored
@param char Character as JSON
"""
from rpgtoolbox.rolemaster import catnames
self.__catnames = catnames
if storepath == None:
self.spath = os.getcwd() + "/data"
logger.debug('Set storepath to %s' % (storepath)) + "/data"
else:
self.spath = storepath
logger.debug('priorizeWeaponsWin: storepath set to %s' %
(storepath))
self.lang = lang
self.character = char
blankWindow.__init__(self, self.lang)
self.window.title('%s - %s (%s)' % (wintitle['rm_create'][self.lang],
self.character['name'],
self.character['prof']
)
)
self.filemenu = Menu(master = self.menu)
self.menu.add_cascade(label = txtmenu['menu_file'][self.lang],
menu = self.filemenu)
self.filemenu.add_command(label = submenu['file'][self.lang]['save'],
command = self.notdoneyet)
self.filemenu.add_separator()
self.filemenu.add_command(label = submenu['file'][self.lang]['close'],
command = self.__closewin)
self.__addHelpMenu()
self.__getWeaponCats()
self.__buildWin()
self.window.mainloop()
def __buildWin(self):
'''!
Sets up all the needed Widgets in the window
'''
self.__prio = {}
self.__optWdg = {}
for i in range(1, 8):
self.__prio["%s - %d" %
(self.__catnames[self.lang]['weapon'], i)] = StringVar()
self.__prio["%s - %d" % (self.__catnames[self.lang]['weapon'], i)].set(
"%s - %d" % (self.__catnames[self.lang]['weapon'], i))
Label(master = self.window,
width = 15,
text = "Prio #%d %s" % (
i, self.__catnames[self.lang]['weapon'])
).grid(column = 0, row = i)
self.__optWdg[str(i)] = OptionMenu(self.window,
self.__prio["%s - %d" %
(self.__catnames[self.lang]['weapon'], i)],
*self.weaponcats,
command = self.__getPrio)
self.__optWdg[str(i)].config(width = 50)
self.__optWdg[str(i)].grid(column = 1, row = i, sticky = "W")
Button(master = self.window,
text = txtbutton['but_next'][self.lang],
width = 10,
command = self.__nextStep).grid(column = 1, row = i + 1, sticky = "E")
def __getPrio(self, event):
'''!
This generates the priority list by the chosen priorities.
@param event has to be catched but is not used
@todo check for double priorities. If any don't proceed
@bug when you chose double entries: File "/home/mongol/git/rpg-tools/src/gui/epwins.py", line 1808, in __getPrio
for i in range(len(content) - 7, len(content)):
IndexError: list index out of range
'''
self.__priolist = []
self.__block = False
for i in range(1, 8):
dummy = self.__prio["%s - %d" %
(self.__catnames[self.lang]['weapon'], i)].get()
if dummy not in self.__priolist and dummy != "":
self.__priolist.append(dummy)
elif dummy in self.__priolist and i < 7:
self.__block = True
msg = messageWindow()
msg.showinfo(errmsg['double'][self.lang], "Info")
break
if not self.__block:
fp = open('./data/default/CatDPC_%s.csv' % self.lang, 'r')
content = fp.readlines()
fp.close()
j = 1
for i in range(len(content) - 7, len(content)):
content[i] = content[i].replace("%s - %d" % (self.__catnames[self.lang]['weapon'], j),
self.__priolist[j - 1])
j += 1
self.__content = content
def __buildJSON(self):
'''!
Makes a JSON out of CatDPC.csv
Skill cat --> Profession : costs
'''
self.__catDBC = {}
self.__content[0] = self.__content[0].strip('\n \t').split(',')
for i in range(1, len(self.__content[0])):
self.__catDBC[self.__content[0][i]] = {}
for i in range(1, len(self.__content)):
self.__content[i] = self.__content[i].strip('\n').split(',')
self.__content[i][0] = self.__content[i][0].strip(' \t')
for j in range(1, len(self.__content[0])):
self.__catDBC[self.__content[0][j]
][self.__content[i][0]] = self.__content[i][j]
def __addToChar(self):
'''!
This method adds the concerned developing costs and category/skill ranks
during adolescence to the character data structure (JSON).
It also calculates the rank bonus for the first time.
'''
from rpgtoolbox.rolemaster import races, labels, progressionType, rankbonus, catnames, exceptions, cultures
# @var prof
# dummy variable that holds character's profession
prof = self.character['prof']
# @var crace
# dummy variable that holds character's race
crace = races['en'][races[self.lang].index(self.character['race'])]
for skillcat in list(self.__catDBC[prof].keys()):
dbcdummy = self.__catDBC[prof][skillcat].split('/')
skprog = ""
for i in range(0, len(dbcdummy)):
if dbcdummy != "":
dbcdummy[i] = int(dbcdummy[i])
self.character['cat'][skillcat][labels["en"]['costs']] = dbcdummy
for s in list(self.character['cat'][skillcat]['Skill'].keys()):
if s not in exceptions:
self.character['cat'][skillcat]['Skill'][s][labels["en"]
['costs']] = dbcdummy
if self.character['cat'][skillcat]['Progression'] == "Standard":
self.character['cat'][skillcat]['Progression'] = progressionType['standard cat']
skprog = progressionType['standard skill']
elif self.character['cat'][skillcat]['Progression'] == "BD":
self.character['cat'][skillcat]['Progression'] = progressionType['null']
skprog = progressionType['BD %s' % crace]
elif self.character['cat'][skillcat]['Progression'] == "Null" or self.character['cat'][skillcat]['Progression'] == "Skill Only":
self.character['cat'][skillcat]['Progression'] = progressionType['null']
skprog = progressionType['skill only']
elif self.character['cat'][skillcat]['Progression'] == "Combined":
self.character['cat'][skillcat]['Progression'] = progressionType['null']
skprog = progressionType['combined']
for skill in list(self.character['cat'][skillcat]['Skill'].keys()):
if skill not in exceptions:
self.character['cat'][skillcat]['Skill'][skill]['Progression'] = skprog
self.__setPPD()
self.saveChar()
# adding adolescence skill ranks
fp = open('./data/default/AdoRanks_%s.csv' % self.lang, "r")
content = fp.readlines()
fp.close()
self.__adoranks = {}
content[0] = content[0].strip('\n').split(',')
for i in range(1, len(content[0])):
self.__adoranks[content[0][i]] = {}
for j in range(1, len(content)):
content[j] = content[j].strip('\n').split(',')
content[j][0] = content[j][0].strip(' \t')
for i in range(1, len(content[0])):
if content[j][0][:1] != "-":
self.__adoranks[content[0][i]][content[j][0]] = {"rank": int(content[j][i]),
"rank bonus": rankbonus(rank = int(content[j][i]),
progression = self.character['cat'][
content[j][0]]['Progression']
)
}
lastcat = content[j][0]
if "Skill" not in list(self.__adoranks[content[0][i]][content[j][0]].keys()):
self.__adoranks[content[0][i]
][content[j][0]]['Skill'] = {}
else:
self.__adoranks[content[0][i]][lastcat]['Skill'][content[j][0].strip('-')] = {'rank': int(content[j][i]),
'rank bonus': 0,
}
if self.lang != "en":
race = races['en'][races[self.lang].index(self.character['race'])]
culture = cultures['en'][cultures[self.lang].index(
self.character['culture'])]
else:
race = self.character['race']
culture = self.character['culture']
if race in ['Common Men', 'Mixed Men']:
race = culture
for cat in list(self.__adoranks[race].keys()):
self.character['cat'][cat]['rank'] = self.__adoranks[race][cat]['rank']
self.character['cat'][cat]['rank bonus'] = self.__adoranks[race][cat]['rank bonus']
if self.__adoranks[race][cat]['Skill'] != {}:
for skill in list(self.__adoranks[race][cat]['Skill'].keys()):
if skill not in list(self.character['cat'][cat]['Skill'].keys()):
self.character['cat'][cat]['Skill'][skill] = {}
self.character['cat'][cat]['Skill'][skill]['rank'] = self.__adoranks[race][cat]['Skill'][skill]['rank']
self.character['cat'][cat]['Skill'][skill]['rank bonus'] = self.__adoranks[race][cat]['Skill'][skill]['rank bonus']
self.saveChar()
def __setPPD(self):
'''!
This sets the Progression and Stats for Power Point Development
'''
from rpgtoolbox.rolemaster import races, realms, ppds, magicstats, progressionType, speccat
param = {}
param['realm'] = self.character['realm']
for l in list(races.keys()):
if self.character['race'] in races[l]:
param['lang'] = l
param['race'] = races['en'][races[l].index(self.character['race'])]
if self.character['realm'] in realms[l]:
param['ppd'] = ppds[realms[l].index(self.character['realm'])]
param['Stats'] = magicstats[realms[l].index(self.character['realm'])]
if type(param['ppd']) == type(''):
param['ppd'] = progressionType[param['ppd'] + param['race']]
elif type(param['ppd']) == type([]):
for i in range(0, len(param['ppd'])):
param['ppd'][i] = progressionType[param['ppd'][i] + param['race']]
if param['ppd'][0] > param['ppd'][1]:
param['ppd'] = param['ppd'][0]
else:
param['ppd'] = param['ppd'][1]
self.character['cat'][speccat[param['lang']][1]]['Progression'] = progressionType['null']
self.character['cat'][speccat[param['lang']][1]]['Stats'] = param['Stats']
self.character['cat'][speccat[param['lang']][1]]['Skill'][speccat[param['lang']][1]]['Progression'] = param['ppd']
def saveChar(self):
'''!
This method saves the character as JSON file
'''
import json
charname = self.character['name'] #.replace(" ", "_")
try:
with open(self.spath + self.character['player'] + '/' + charname + ".json", "w") as outfile:
json.dump(self.character, outfile, sort_keys = True,
indent = 4, ensure_ascii = False)
except:
logger.error("saveChar: could not save {} sorted".format(self.spath + self.character['player'] + '/' + charname + ".json"))
with open(self.spath + self.character['player'] + '/' + charname + ".json", "w") as outfile:
json.dump(self.character, outfile, indent = 4)
def __getWeaponCats(self):
'''!
Extracts the weapon categories from character
'''
self.weaponcats = []
for cat in list(self.character['cat'].keys()):
if self.__catnames[self.lang]['weapon'] in cat:
self.weaponcats.append(cat)
self.weaponcats.sort()
def __nextStep(self):
'''!
Opens the next window to modify categories and skills
'''
self.__getPrio("")
self.__buildJSON()
self.__addToChar()
self.window.destroy()
self.window2 = skillcatWin(self.lang, self.spath, self.character)
def __closewin(self):
'''!
A method to destroy the current window and go back to MainWindow.
'''
self.window.destroy()
self.window = MainWindow(lang = self.lang, char = self.character)
def __addHelpMenu(self):
"""!
This methods defines a help menu.
"""
self.helpmenu = Menu(master = self.menu)
self.menu.add_cascade(label = txtmenu['help'][self.lang],
menu = self.helpmenu)
self.helpmenu.add_command(label = submenu['help'][self.lang]['global'],
command = self._helpPriorize)
self.helpmenu.add_separator()
self.helpmenu.add_command(label = submenu['help'][self.lang]['about'],
command = self._helpAbout)
def _helpPriorize(self):
'''!
Opens a message window with help text
'''
helptext = {'de': 'Die Priorisierung der Waffenfertigkeiten ist wichtig für '
+'die Steigerungskosten und mögliche Anzahl der Steigerungen.\n'
+'1 ist die höchste und 7 die geringste Priorität.',
'en': 'It is important to priorize the weapon categoies because of '
+'developing costs and levels possible to develop.\n'
+'1 is the highest priority and 7 the lowest.'
}
helper = messageWindow()
helper.showinfo(helptext[self.lang], 'Info')
class skillcatWin(blankWindow):
"""!
This is the class for a window object to chose the priority of weapon skills
at the character's generation. It will also set the category and skill ranks
during adolescence.
----
@todo not finished yet:
- selecting items here can make changes undone or change them again
- hide skill/cat line when not selected in treeview
@bug - DP are not always shown correctly
- you lose DPs when you try to level up a skill/cat in multiple times
"""
def __init__(self, lang = 'en', storepath = os.getcwd() + "/data", char = None):
"""!
Class constructor
@param lang The chosen language for window's and button's
texts. At the moment, only English (en, default
value) and German (de) are supported.
@param title title of the window
@param storepath path where things like options have to be stored
@param char Character as JSON
"""
from rpgtoolbox.rolemaster import catnames, rankbonus
self.__catnames = catnames
self.__rankbonus = rankbonus
if storepath == None:
self.spath = os.getcwd() + "/data"
logger.debug('Set storepath to %s' % (storepath)) + "/data"
else:
self.spath = storepath
logger.debug('priorizeWeaponsWin: storepath set to %s' %
(storepath))
self.lang = lang
self._character = dict(calcTotals(char))
self.__save()
if os.path.isfile("{}/{}/{}_changes.json".format(self.spath, self._character['player'], self._character['name'])):
self.__changed = readJSON("{}/{}/{}_changes.json".format(self.spath, self._character['player'], self._character['name']))
else:
# @var self.__changed
# dictionary with the changed categories/skills
self.__changed = {'name': self._character['name'],
'player': self._character['player'],
'cat': {}
}
self.__calcLvlup()
blankWindow.__init__(self, self.lang)
self.window.title("%s - %s (%s)" % (wintitle['edit'][self.lang],
self._character['name'],
self._character['prof']
)
)
self.filemenu = Menu(master = self.menu)
self.__addFileMenu()
self.__addHelpMenu()
self.__buildWin()
self.__buildTree()
self.window.mainloop()
def __addFileMenu(self):
'''!
Adds a file menu to menu bar.
'''
self.menu.add_cascade(label = txtmenu['menu_file'][self.lang],
menu = self.filemenu)
self.filemenu.add_command(label = submenu['file'][self.lang]['save'],
command = self.__save)
self.filemenu.add_separator()
self.filemenu.add_command(label = submenu['file'][self.lang]['close'],
command = self.__closewin)
def __closewin(self):
'''!
A method to destroy the current window and go back to MainWindow.
'''
if self.__usedDP > 0:
self._character['lvlup'] -= 1
if self._character['lvlup'] < 0:
self._character['lvlup'] = 0
self.window.destroy()
self.window = MainWindow(lang = self.lang, storepath = self.spath , char = self._character)
def __addHelpMenu(self):
'''!
Adds a help menu entry to menu bar.
'''
self.helpmenu = Menu(master = self.menu)
self.menu.add_cascade(label = txtmenu['help'][self.lang],
menu = self.helpmenu)
self.helpmenu.add_command(label = submenu['help'][self.lang]['win'],
command = self.__helpAWin)
self.helpmenu.add_separator()
self.helpmenu.add_command(label = submenu['help'][self.lang]['about'],
command = self._helpAbout)
def __buildWin(self):
'''!
Builds the window's elements.
- a frame containing:
-# treeview widget
-# vertical (auto)scrollbar linked to the treeview widget
-# horizontal (auto)scrollbar linked to the treeview widget
- Labels for specific category/skill values
'''
from rpgtoolbox.rolemaster import labels as rmlabels
self.__treeframe = Frame()
self.__treeframe.grid(column = 0, row = 0, columnspan = 7, rowspan = 3, sticky = "NEWS")
self.__rmlabels = rmlabels
self.__treecolumns = []
self.catentry = StringVar()
self.skillentry = ""
self.catrank = StringVar()
self.skillrank = StringVar()
self.__usedDP = 0
self.catcost = []
self.skillcost = []
self.__calcDP()
# @var self.__changes
# list of edited/added skills and categories
self.__changes = {}
for key in ['skill', 'progress', 'costs', 'rank', 'total']:
self.__treecolumns.append(rmlabels[self.lang][key])
## @var self.__tree
# The first Treeview widget with the character data to change
self.__tree = Treeview(self.__treeframe,
columns = self.__treecolumns,
show = "headings"
)
vscroll = AutoScrollbar(orient = "vertical", command = self.__tree.yview)
hscroll = AutoScrollbar(orient = "horizontal", command = self.__tree.xview)
self.__tree.configure(yscrollcommand = vscroll.set,
xscrollcommand = hscroll.set)
self.__tree.grid(column = 0, row = 0, sticky = "NEWS", in_ = self.__treeframe)
vscroll.grid(column = 1, row = 0, in_ = self.__treeframe, sticky = "NS")
hscroll.grid(column = 0, row = 1, in_ = self.__treeframe, sticky = "EW")
## @var self.__chgtree
# The second Treeview widged where the changes will be shown.
self.__chgtree = Treeview(self.window,
columns = self.__treecolumns,
show = "headings",
)
chgvscroll = AutoScrollbar(
orient = "vertical", command = self.__chgtree.xview)
self.__chgtree.configure(yscrollcommand = chgvscroll.set)
self.__chgtree.grid(column = 0,
row = 7,
columnspan = 7,
rowspan = 3,
sticky = "NEW"
)
chgvscroll.grid(column = 7, row = 7, rowspan = 3, in_ = self.window, sticky = "NS")
Label(master = self.window, width = 30,
justify = LEFT,
text = labels['name'][self.lang]).grid(column = 0,
row = 3,
sticky = "W",
padx = 5,
pady = 5)
Label(master = self.window,
width = 10,
justify = LEFT,
text = labels['dp_costs'][self.lang]).grid(column = 1,
row = 3,
sticky = "W",
padx = 5,
pady = 5)
Label(master = self.window,
width = 20,
justify = LEFT,
text = labels['progr'][self.lang]).grid(column = 2,
row = 3,
sticky = "W",
padx = 5,
pady = 5)
Label(master = self.window, width = 4,
justify = LEFT,
text = labels['ranks'][self.lang]).grid(column = 3,
row = 3,
sticky = "W",
padx = 5,
pady = 5)
Label(master = self.window, width = 4,
justify = LEFT,
text = labels['total'][self.lang]).grid(column = 4,
row = 3,
sticky = "W",
padx = 5,
pady = 5)
self._catentry = Label(master = self.window,
width = 30,
justify = LEFT,
textvariable = self.catentry
)
self._catentry.grid(column = 0, row = 4, sticky = "NW", padx = 5, pady = 2)
self.SpinSkillVal = StringVar()
self.SpinCatVal = StringVar()
self.CatProg = StringVar()
self.SkillProg = StringVar()
self.SpinSkillVal.set(0)
self.SpinCatVal.set(0)
self.CatProg.set("0 0 0 0 0")
self.SkillProg.set("0 0 0 0 0")
self._catprog = Label(master = self.window,
width = 20,
justify = LEFT,
textvariable = self.CatProg
)
self._catprog.grid(column = 2,
row = 4,
sticky = "NW",
padx = 5,
pady = 2
)
self._skillprog = Label(master = self.window,
width = 20,
justify = LEFT,
textvariable = self.SkillProg
)
self._skillprog.grid(column = 2,
row = 5,
sticky = "NW",
padx = 5,
pady = 2
)
self._catspin = Spinbox(master = self.window,
from_ = 0,
to = len(self.catcost),
width = 2,
textvariable = self.SpinCatVal
)
self._catspin.grid(column = 3,
row = 4,
sticky = "NW",
padx = 5,
pady = 2
)
self._skillspin = Spinbox(master = self.window,
from_ = 0,
to = len(self.skillcost),
width = 2,
textvariable = self.SpinSkillVal
)
self._skillspin.grid(column = 3,
row = 5,
sticky = "NW",
padx = 5,
pady = 2
)
self._skillentry = Entry(master = self.window,
width = 30,
textvariable = self.skillentry
)
self._skillentry.grid(column = 0,
row = 5,
sticky = "NW",
padx = 5,
pady = 2
)
Label(master = self.window,
text = "remaining DPs:").grid(column = 5,
row = 3,
sticky = "NW")
self.DPtext = StringVar()
self._remainDP = Label(master = self.window,
width = 4,
justify = LEFT,
textvariable = self.DPtext
)
self._remainDP.grid(column = 6,
row = 3,
sticky = "NW",
padx = 5,
pady = 2
)
self.DPtext.set(str(self._character['DP'] - self.__usedDP))
self.DPcost = StringVar()
self._lDPcostcat = Label(master = self.window,
width = 6,
justify = CENTER,
textvariable = self.DPcost
)
self._lDPcostcat.grid(column = 1,
row = 4,
sticky = "NW",
padx = 5,
pady = 2
)
self._lDPcostskill = Label(master = self.window,
width = 6,
justify = CENTER,
textvariable = self.DPcost
)
self._lDPcostskill.grid(column = 1,
row = 5,
sticky = "NW",
padx = 5,
pady = 2
)
self.DPcost.set("---")
# add a 'take over changes' button (submit)
Button(self.window,
text = txtbutton['but_take'][self.lang],
command = self.__takeValsCat).grid(column = 5,
row = 4,
sticky = "NW"
)
# add a 'take over changes' button (submit)
Button(self.window,
text = txtbutton['but_take'][self.lang],
command = self.__takeValsSkill).grid(column = 5,
row = 5,
sticky = "NW"
)
# add a 'finalize' button to save changes and proceed.
Button(self.window,
text = txtbutton['but_fin'][self.lang],
command = self.__finalize).grid(column = 6,
row = 4,
sticky = "NW"
)
# add a 'rename' button for skills.
Button(self.window,
text = txtbutton['but_ren'][self.lang],
command = self.__renameSkill).grid(column = 6,
row = 5,
sticky = "NW"
)
def __buildTree(self):
'''!
Fills the treeview widget with skills and categories etc.
@todo this has to be implemented:
- Menu save functionality will save the current work state if not finalized.
- force a name modify of skills with +
- If not finalized clicking on items in edit skill/cat treeview will
cause an editing option. That means:
-# create a JSON structure with modified but not finalized cats/skills.
-# put it into the treeview and update it after every change
-# remove it from treeview if changes were reversed
'''
from rpgtoolbox.rolemaster import exceptions
for col in self.__treecolumns:
self.__tree.heading(col, text = col.title())
self.__tree.column(col, width = 200)
catID = {}
catNo = 0
ckeys = list(self._character['cat'].keys())
ckeys.sort()
for cat in ckeys:
if cat != None:
catID[cat] = self.__tree.insert("",
catNo,
text = cat,
values = (cat,
self._character['cat'][cat]['Progression'],
self._character['cat'][cat][self.__rmlabels['en']['costs']],
self._character['cat'][cat]['rank'],
self._character['cat'][cat]['total bonus']
),
tag = "category"
)
for skill in list(self._character['cat'][cat]['Skill'].keys()):
if skill not in exceptions:
self.__tree.insert(catID[cat],
"end",
text = skill,
values = (skill,
self._character['cat'][cat]['Skill'][skill]['Progression'],
self._character['cat'][cat][self.__rmlabels['en']['costs']],
self._character['cat'][cat]['Skill'][skill]['rank'],
self._character['cat'][cat]['Skill'][skill]['total bonus']
),
tag = cat
)
catNo += 1
self.__tree.tag_configure('category', background = 'lightblue')
self.__tree.bind('<ButtonRelease-1>', self.__selectTreeItem)
def __buildChangedTree(self):
'''!
Adding all Changed cat/skill entries to the self.__chgtree
@todo the following has to be done:
-# selected items have to be taken to the entry fields
-# if changes have been taken back add the DP again
@bug - slider does not work
'''
from rpgtoolbox.rolemaster import exceptions
for kids in self.__chgtree.get_children():
self.__chgtree.delete(kids)
for col in self.__treecolumns:
self.__chgtree.heading(col, text = col.title())
self.__chgtree.column(col, width = 200)
catID = {}
catNo = 0
ckeys = list(self.__changed['cat'].keys())
ckeys.sort()
dummy = "--"
for cat in ckeys:
if cat != None:
if 'Progression' in list(self.__changed['cat'][cat].keys()):
progression = self.__changed['cat'][cat]['Progression']
else:
progression = dummy
if self.__rmlabels['en']['costs'] in list(self.__changed['cat'][cat].keys()):
costs = self.__changed['cat'][cat][self.__rmlabels['en']['costs']]
else:
costs = dummy
if 'rank' in list(self.__changed['cat'][cat].keys()):
rank = self.__changed['cat'][cat]['rank']
else:
rank = "n/a"
if 'total bonus' in list(self.__changed['cat'][cat].keys()):
total = str(self.__changed['cat'][cat]['total bonus'])
else:
total = "n/a"
catID[cat] = self.__chgtree.insert("",
catNo,
text = cat,
values = (cat,
progression,
costs,
rank,
total
),
tag = "category"
)
if 'Skill' in list(self.__changed['cat'][cat].keys()):
for skill in list(self.__changed['cat'][cat]['Skill'].keys()):
if skill != "Progression" and skill != "Stats":
if 'Progression' in list(self.__changed['cat'][cat]['Skill'][skill].keys()):
progression = self.__changed['cat'][cat]['Skill'][skill]['Progression']
if self.__rmlabels['en']['costs'] in list(self.__changed['cat'][cat]['Skill'][skill].keys()):
costs = self.__changed['cat'][cat]['Skill'][skill][self.__rmlabels['en']['costs']]
if 'rank' in list(self.__changed['cat'][cat]['Skill'][skill].keys()):
srank = self.__changed['cat'][cat]['Skill'][skill]['rank']
else:
srank = -1
if 'total bonus' in list(self.__changed['cat'][cat]['Skill'][skill].keys()):
stotal = self.__changed['cat'][cat]['Skill'][skill]['total bonus']
else:
stotal = -1
self.__chgtree.insert(catID[cat],
'end',
text = skill,
values = (skill,
progression,
costs,
srank,
stotal
),
tag = cat
)
catNo += 1
self.__chgtree.tag_configure('category', background = 'lightblue')
self.__chgtree.bind('<ButtonRelease-1>', self.__selectChangedItem)
def __selectTreeItem(self, event):
'''!
Select an item from the treeview list.
@param event responding treeview event which is not used for anything.
'''
self.__curItem = self.__tree.focus()
self.DPcost.set(self.__tree.item(self.__curItem)['values'][2])
if self.__tree.item(self.__curItem)['tags'][0] == 'category':
self.catentry.set(self.__tree.item(self.__curItem)['text'])
self._skillentry.delete(0, END)
self.SpinSkillVal.set(0)
self.catcost = self.__tree.item(self.__curItem)['values'][2]
self.CatProg.set(self.__tree.item(self.__curItem)['values'][1])
self.SpinCatVal.set(self.__tree.item(self.__curItem)['values'][3])
if type(self.catcost) == type(2):
self.catcost = [self.catcost]
elif type(self.catcost) == type("") or type(self.catcost) == type(""):
self.catcost = self.catcost.split(' ')
for i in range(0, len(self.catcost)):
self.catcost[i] = int(self.catcost[i])
else:
self.catcost = []
self._catspin.config(from_ = self.__tree.item(self.__curItem)['values'][3],
to = self.__tree.item(self.__curItem)['values'][3] + len(self.catcost),
)
if self.__tree.item(self.__curItem)['tags'][0] == "category":
self.catrank = str(self.__tree.item(self.__curItem)['values'][-2])
else:
self._skillentry.delete(0, END)
self._skillentry.insert(
0, self.__tree.item(self.__curItem)['values'][0])
self.skillcost = self.__tree.item(self.__curItem)['values'][2]
self.SkillProg.set(self.__tree.item(self.__curItem)['values'][1])
if type(self.skillcost) == type(2):
self.skillcost = [self.skillcost]
elif type(self.skillcost) == type("") or type(self.skillcost) == type(""):
self.skillcost = self.skillcost.split(' ')
for i in range(0, len(self.skillcost)):
self.skillcost[i] = int(self.skillcost[i])
else:
self.skillcost = []
self._skillspin.config(from_ = self.__tree.item(self.__curItem)['values'][3],
to = self.__tree.item(self.__curItem)['values'][3] + len(self.skillcost),
)
self.SpinSkillVal.set(self.__tree.item(self.__curItem)['values'][3])
if self.__tree.item(self.__curItem)['tags'][0] != "category":
self.skillrank = self.__tree.item(self.__curItem)['values'][-2]
if self.__tree.item(self.__curItem)['tags'] != "category":
linkedcat = ""
for elem in self.__tree.item(self.__curItem)['tags']:
linkedcat += elem + " "
self.linkedcat = linkedcat.strip(" ")
def __selectChangedItem(self, event):
'''!
Getting cat/skill entries from self.__chgtree for further modification.
@todo It has to be fully implemented
'''
pass
def __calcLvlup(self):
'''!
This determines current level by current EPs and calculates number of
level-ups by the level difference of old EP's level and current EP's
level
'''
self._character['lvl'] = int(getLvl(self._character['exp']))
oldlvl = int(getLvl(self._character['old_exp']))
if "lvlup" in self._character.keys():
self._character['lvlup'] += self._character['lvl'] - oldlvl
else:
self._character['lvlup'] = 1
self._character['statgain'] = 0
self._character['statgain'] = int(self._character['lvlup'] * 10)
def __calcDP(self):
'''!
This calculates the number of Development Points (DP) of a character per level up.
In case of remaining DPs of the last level up this will be added too.
'''
attrlist = ['Ag', 'Co', 'Me', 'Re', 'SD']
if 'DP' not in list(self._character.keys()):
self._character['DP'] = 0
if 'lvlup' in list(self._character.keys()):
if self._character['lvlup'] > 0:
devpoints = 0
for attr in attrlist:
devpoints += self._character[attr]['temp']
if 'lvlup' in self._character.keys():
if self._character['lvlup'] > 0:
self._character['DP'] = int(devpoints / 5)
else:
self._character['DP'] = int(devpoints / 5)
if self._character['Hobby Ranks'] > 0:
self._character['DP'] += self._character['Hobby Ranks']
self._character['Hobby Ranks'] = 0
def __calcRanks(self, progression, rank):
'''!
This method calculates the rank bonusses of a category or skill. if a
single category or skill is given to this method only this single one will
be (re-)calculated
@param progression A list containing the progression values as int
@param rank rank value for which the bonus has to be calculated
'''
if rank == 0:
result = progression[0]
elif 0 < rank < 11:
result = progression[1] * rank
elif 10 < rank < 21:
result = progression[1] * 10 + progression[2] * (rank % 10)
elif 20 < rank < 31:
result = (progression[1] + progression[2]) * 10 + progression[3] * (rank % 10)
elif 30 < rank:
result = (progression[1] + progression[2] + progression[3]) * 10 + progression[3] * (rank % 10)
return int(result)
def __calcTotals(self):
'''!
This method calculate all rank bonus of categories and skills of the
character loaded.
At least it is a wrapper for rpgtoobox.rpgtools.calcTotals()
'''
self._character = calcTotals(self._character)
def __takeValsSkill(self):
'''!
This method takes added/modified skills/cats to a dict and treeview
@todo The following__chgtree has to be implemented:
-# check whether it is a new skill.
@bug
- after finalize button use 'edit' and levelups of already parially leveled skills can not be leveled to the given limit.
'''
## @var oldval
# old catefory's rank value
oldval = self.__tree.item(self.__curItem)['values'][3]
## @var newrank
# new/changed skills's rank value
newrank = int(self._skillspin.get())
if type(self.__tree.item(self.__curItem)['values'][1]) != type(2):
## @val currdev
# list of current development progression
currdev = self.__tree.item(self.__curItem)['values'][1].split(" ")
else:
currdev = [str(self.__tree.item(self.__curItem)['values'][1])]
for i in range(0, len(currdev)):
currdev[i] = float(currdev[i])
## @var oldtotal
# Total bonus before any manipulation.
oldtotal = self.__tree.item(self.__curItem)['values'][-1]
newtotal = self.__calcRanks(currdev, int(newrank)) - self.__calcRanks(currdev, int(oldval)) + int(oldtotal)
newbonus = self.__calcRanks(currdev, int(newrank))
# prepare category name
cat = ""
dpCosts = self.__tree.item(self.__curItem)['values'][2]
if type(dpCosts) == type("") or type(dpCosts) == type(""):
dpCosts = dpCosts.split(' ')
elif type(dpCosts) == type(1):
dpCosts = [dpCosts]
for elem in self.__tree.item(self.__curItem)["tags"]:
cat += elem + " "
cat = cat.strip(" ")
skill = self.__tree.item(self.__curItem)['text']
diff = newrank - oldval
diffcost = 0
if cat not in list(self.__changed['cat'].keys()):
self.__changed['cat'][cat] = self._character['cat'][cat]
newtotal = newbonus + self.__changed['cat'][cat]['total bonus']
if 'Skill' in list(self.__changed['cat'][cat].keys()):
if 'Progression' in list(self.__changed['cat'][cat]['Skill'].keys()):
del(self.__changed['cat'][cat]['Skill']['Progression'])
if skill in list(self.__changed['cat'][cat]['Skill'].keys()):
diff = newrank - self.__changed['cat'][cat]['Skill'][skill]['rank']
diffcost = 0
if 'lvlups' not in list(self.__changed['cat'][cat]['Skill'][skill].keys()):
##@var lowval
# if a skill was not leveled up maximum times this is a marker where to
# start with further leveling.
lowval = 0
elif len(dpCosts) > self.__changed['cat'][cat]['Skill'][skill]['lvlups'] >= 0:
lowval = self.__changed['cat'][cat]['Skill'][skill]['lvlups']
else:
lowval = diff
if diff >= 0:
for i in range(lowval, diff):
diffcost += int(dpCosts[i])
else:
for i in range(diff, lowval):
diffcost -= int(dpCosts[i])
if (self._character['DP'] - (self.__usedDP + diffcost)) >= 0:
if 'lvlups' not in list(self.__changed['cat'][cat]['Skill'][skill].keys()):
self.__changed['cat'][cat]['Skill'][skill]['rank'] = newrank
else:
self.__changed['cat'][cat]['Skill'][skill]['rank'] = newrank - self.__changed['cat'][cat]['Skill'][skill]['lvlups']
self.__changed['cat'][cat]["Skill"][skill]['lvlups'] = diff
self.__changed['cat'][cat]['Skill'][skill]['total bonus'] = newtotal
self.__usedDP += diffcost
else:
self.__info(screenmesg['epwins_no_dp'][self.lang])
else:
if diff >= 0:
for i in range(0, diff):
diffcost += int(dpCosts[i])
else:
for i in range(diff, 0):
diffcost -= int(dpCosts[i])
if (self._character['DP'] - (self.__usedDP + diffcost)) >= 0:
self.__changed['cat'][cat]['Skill'][skill]['rank'] = newrank
self.__changed['cat'][cat]["Skill"][skill]['lvlups'] = diff
self.__changed['cat'][cat]['Skill'][skill]['total bonus'] = newtotal
self.__usedDP += diffcost
else:
self.__info(screenmesg['epwins_no_dp'][self.lang])
else:
if self.__changed['cat'][cat]['Skill'][skill]['rank'] > newrank:
diff = -diff
diffcost = 0
if diff >= 0:
for i in range(0, diff):
diffcost += int(dpCosts[i])
else:
for i in range(diff, 0):
diffcost -= int(dpCosts[i])
if (self._character['DP'] - (self.__usedDP + diffcost)) >= 0:
if 'lvlups' not in list(self.__changed['cat'][cat]['Skill'][skill].keys()):
self.__changed['cat'][cat]['Skill'][skill]['rank'] = newrank
else:
self.__changed['cat'][cat]['Skill'][skill]['rank'] = newrank - self.__changed['cat'][cat]['Skill'][skill]['lvlups']
self.__changed['cat'][cat]["Skill"][skill]['lvlups'] = diff
self.__changed['cat'][cat]['Skill'][skill]['total bonus'] = newtotal
self.__usedDP += diffcost
else:
self.__info(screenmesg['epwins_no_dp'][self.lang])
else:
newtotal = newbonus + self.__changed['cat'][cat]['total bonus']
if skill in list(self.__changed['cat'][cat]['Skill'].keys()):
if self.__changed['cat'][cat]['Skill'][skill]['rank'] > newrank:
diff = newrank - self.__changed['cat'][cat]['Skill'][skill]['rank']
diffcost = 0
if diff >= 0:
for i in range(0, diff):
diffcost += int(dpCosts[i])
else:
for i in range(diff, 0):
diffcost -= int(dpCosts[i])
if (self._character['DP'] - (self.__usedDP + diffcost)) >= 0:
if 'lvlups' not in list(self.__changed['cat'][cat]['Skill'][skill].keys()):
self.__changed['cat'][cat]['Skill'][skill]['rank'] = newrank
else:
self.__changed['cat'][cat]['Skill'][skill]['rank'] = newrank - self.__changed['cat'][cat]['Skill'][skill]['lvlups']
self.__changed['cat'][cat]["Skill"][skill]['lvlups'] = diff
self.__changed['cat'][cat]['Skill'][skill]['total bonus'] = newtotal
self.__usedDP += diffcost
else:
messg = messageWindow()
messg.showinfo(screenmesg['epwins_no_dp'][self.lang])
else:
self.__changed['cat'][cat]['Skill'] = self._character['cat'][cat]['Skill']
if skill not in list(self.__changed['cat'][cat]['Skill'].keys()):
self.__changed['cat'][cat]['Skill'][skill] = {}
diffcost = 0
if diff >= 0:
for i in range(0, diff):
diffcost += int(dpCosts[i])
else:
for i in range(diff, 0):
diffcost -= int(dpCosts[i])
if (self._character['DP'] - (self.__usedDP + diffcost)) >= 0:
if 'lvlups' not in list(self.__changed['cat'][cat]['Skill'][skill].keys()):
self.__changed['cat'][cat]['Skill'][skill]['rank'] = newrank
else:
self.__changed['cat'][cat]['Skill'][skill]['rank'] = newrank - self.__changed['cat'][cat]['Skill'][skill]['lvlups']
self.__changed['cat'][cat]["Skill"][skill]['lvlups'] = diff
self.__changed['cat'][cat]['Skill'][skill]['total bonus'] = newtotal
self.__usedDP += diffcost
else:
messg = messageWindow()
messg.showinfo(screenmesg['epwins_no_dp'][self.lang])
self.DPtext.set(str(self._character['DP'] - self.__usedDP))
self.__buildChangedTree()
def __takeValsCat(self):
'''!
This method takes added/modified skills/cats to a dict and treeview
'''
## @var currcat
# current category name
currcat = self.__tree.item(self.__curItem)['text']
## @var olval
# old catefory's rank value
oldval = self.__tree.item(self.__curItem)['values'][3]
## @var newval
# new/changed category's rank value
newval = int(self._catspin.get())
## @val currdev
# list of current development progression
currdev = self.__tree.item(self.__curItem)['values'][1].split(" ")
for i in range(0, len(currdev)):
currdev[i] = float(currdev[i])
## @var oldtotal
# Total bonus before any manipulation.
oldtotal = self.__tree.item(self.__curItem)['values'][-1]
## @var newtotal
# Total bonus after manipulation
newtotal = self.__calcRanks(currdev, int(newval)) - self.__calcRanks(currdev, int(oldval)) + int(oldtotal)
# calc DP consume:
dpCosts = self.__tree.item(self.__curItem)['values'][2]
if type(dpCosts) == type("") or type(dpCosts) == type(""):
dpCosts = dpCosts.split(' ')
elif type(dpCosts) == type(1):
dpCosts = [dpCosts]
if currcat not in list(self.__changed['cat'].keys()) and currcat in list(self._character['cat'].keys()):
diff = newval - oldval
self.__changed['cat'][currcat] = self._character["cat"][currcat]
self.__changed['cat'][currcat]['rank'] = newval
self.__changed['cat'][currcat]['lvlups'] = diff
lowval = 0
else:
self.__changed['cat'][currcat]['rank'] = newval
if "lvlups" in list(self.__changed['cat'][currcat].keys()):
lowval = self.__changed['cat'][currcat]['lvlups']
if self.__changed['cat'][currcat]['lvlups'] < len(dpCosts):
diff = newval - self.__changed['cat'][currcat]['rank']
else:
diff = 0
newval = oldval
newtotal = self.__calcRanks(currdev, int(newval)) - self.__calcRanks(currdev, int(oldval)) + int(oldtotal)
else:
lowval = 0
diff = newval - self.__changed['cat'][currcat]['rank']
self.__changed['cat'][currcat]['lvlups'] = diff
bkpusedDP = int(self.__usedDP)
if diff > 0:
for i in range(lowval, diff):
self.__usedDP += int(dpCosts[i])
elif diff < 0:
for i in range(diff, lowval):
self.__usedDP -= int(dpCosts[i])
if (self._character['DP'] - self.__usedDP) >= 0:
if currcat not in list(self.__changed['cat'].keys()) and currcat in list(self._character['cat'].keys()):
self.__changed['cat'][currcat] = self._character['cat'][currcat]
if "Skill" in list(self._character['cat'][currcat].keys()):
self.__changed['cat'][currcat]['Skill'] = self._character['cat'][currcat]['Skill']
else:
self.__changed['cat'][currcat]['Skill'] = {}
if "lvlups" in list(self.__changed['cat'][currcat].keys()):
self.__changed['cat'][currcat]['rank'] = newval
if self.__changed['cat'][currcat]['lvlups'] < newval:
self.__changed['cat'][currcat]['lvlups'] += 1
else:
self.__changed['cat'][currcat]['rank'] = newval
self.__changed['cat'][currcat]['total bonus'] = newtotal
else:
self.__usedDP = bkpusedDP
messg = messageWindow()
messg.showinfo(screenmesg['epwins_no_dp'][self.lang])
self.DPtext.set(str(self._character['DP'] - self.__usedDP))
self.__buildChangedTree()
def __finalize(self):
'''!
This method finalizes and saves all changes into character data
The changes done before are saved in the file <charname>_changes.json
'''
from rpgtoolbox import rolemaster as rm
self.profs = rm.choseProfession(self.lang)
#refreshing/recalculating stat bonusses
self._character = rm.refreshStatBonus(self._character)
# remove usedDP from character's available DP
self._character['DP'] -= self.__usedDP
self._character["soul dep"] = rm.raceHealingFactors[self._character["race"]]["soul dep"]
self._character["Stat Loss"] = rm.raceHealingFactors[self._character["race"]]["Stat Loss"]
self._character["Recovery"] = rm.raceHealingFactors[self._character["race"]]["Recovery"]
if self._character['DP'] == 0 and self._character['lvlup'] > 0:
self._character['lvlup'] -= 1
for c in self._character["cat"].keys():
self._character['cat'][c]['lvlups'] = 0
for sk in self._character["cat"][c]['Skill'].keys():
if type(self._character["cat"][c]['Skill'][sk]) == type({}):
self._character["cat"][c]['Skill'][sk]['lvlups'] = 0
self._character["old_exp"] = int(self._character['exp'])
for cat in list(self.__changed["cat"].keys()):
self._character['cat'][cat]["rank"] = self.__changed["cat"][cat]["rank"]
self._character['cat'][cat]["total bonus"] = self.__changed['cat'][cat]["total bonus"]
for skill in list(self.__changed["cat"][cat]["Skill"].keys()):
if skill != "Progression" and skill != "Stats":
if skill not in list(self._character["cat"][cat]["Skill"].keys()):
self._character["cat"][cat]["Skill"][skill] = self.__changed['cat'][cat]["Skill"][skill]
else:
self._character["cat"][cat]["Skill"][skill]["rank"] = self.__changed["cat"][cat]["Skill"][skill]["rank"]
self._character["cat"][cat]["Skill"][skill]["total bonus"] = self.__changed["cat"][cat]["Skill"][skill]["total bonus"]
# setting prof bonusses again
for cat in self._character['cat'].keys():
for pb in self.profs[self._character['prof']]['Profession Bonusses']:
if pb in cat:
self._character['cat'][cat]['prof bonus'] = int(self.profs[self._character['prof']]['Profession Bonusses'][pb])
break
else:
self._character['cat'][cat]['prof bonus'] = 0
handlemagic.updateSL(character = self._character, datadir = self.spath)
# save character data
self.__save('.json')
if self._character['DP'] > 0:
# save changes
writeJSON("{}/{}/{}_changes.json".format(self.spath, self._character['player'], self._character['name']), self.__changed)
else:
if os.path.isfile("{}/{}/{}_changes.json".format(self.spath, self._character['player'], self._character['name'])):
os.remove("{}/{}/{}_changes.json".format(self.spath, self._character['player'], self._character['name']))
self.window.destroy()
if "background" in self._character.keys():
self.window2 = MainWindow(lang = self.lang, storepath = self.spath, char = self._character)
else:
self.window2 = charInfo(self.lang, self.spath, self._character)
def __renameSkill(self):
'''!
This method renames all skill+ and adds new ones
----
@todo checkup whether values exist in self.__changed. If so take rank value from self.__changed.
'''
from rpgtoolbox.rolemaster import progressionType
self._character['DP'] -= self.__usedDP
self.__usedDP = 0
curitem = self.__tree.item(self.__curItem)
skillentry = self._skillentry.get()
cat = ""
for elem in curitem['tags']:
cat += elem + " "
cat = cat.strip(" ")
skill = {skillentry: {"item bonus":0,
"rank": int(curitem["values"][3]),
"rank bonus": 0,
"spec bonus": 0,
"total bonus": 0,
"Progression": list(progressionType["standard skill"]), #list(self._character['cat'][cat]["Skill"]['Progression']),
"Costs": list(self._character['cat'][cat]['Costs'])
}
}
if skillentry in self.__changed['cat'][cat]['Skill'].keys():
skill[skillentry]["rank"] = self.__changed['cat'][cat]['Skill'][skillentry]["rank"]
self._character['cat'][cat]["Skill"][skillentry] = skill[skillentry]
dummyDP = int(self._character['DP'])
for entry in ["item bonus", "rank"]:
self._character['cat'][cat]["Skill"][curitem['text']][entry] = 0
self.__calcTotals()
self.__buildWin()
self.__buildTree()
self.__buildChangedTree()
self._character['DP'] = dummyDP
def __save(self, ending = '.snap'):
'''!
This method quickly saves a snapshot of current character's data into
a file.
@param ending ending of the filename
'''
pathfile = self.spath + "/" + \
self._character['player'] + "/" + self._character['name'] + ending
writeJSON(pathfile, self._character)
def __info(self, text = ""):
'''!
This method just opens a message window to display information.
@param text the text to display
'''
self.__mesg = messageWindow(self.lang)
self.__mesg.showinfo(str(text))
def __helpAWin(self):
'''!
Help information about this window.
@todo has to be implemented
'''
self.notdoneyet("helpAWin")
class charInfo(blankWindow):
"""!
This is the class for the window with all the background information such as hair color, height etc.
"""
def __init__(self, lang = 'en', storepath = os.getcwd() + "/data", char = None):
"""!
Class constructor charInfo
@param lang The chosen language for window's and button's
texts. At the moment, only English (en, default
value) and German (de) are supported.
@param title title of the window
@param storepath path where things like options have to be stored
@param char Character as JSON/dictionary
"""
if storepath == None:
self.spath = os.getcwd() + "/data"
logger.info('Set storepath to %s' % (storepath)) + "/data"
else:
self.spath = storepath
logger.info('charInfo: storepath set to %s' %
(storepath))
self.lang = lang
self._character = dict(calcTotals(char))
self.mypath = storepath + "default/pics"
self.cmask = [txtwin['json_files'][self.lang],
txtwin['grp_files'][self.lang],
txtwin['all_files'][self.lang]
]
self.pmask = [txtwin['jpg_files'][self.lang],
txtwin['jpeg_files'][self.lang],
txtwin['png_files'][self.lang]
]
if "piclink" in list(self._character.keys()) and self._character["piclink"] != "":
self.charpic = self._character["piclink"]
else:
self.charpic = "./data/default/pics/default.jpg"
self._character['piclink'] = "./data/default/pics/default.jpg"
blankWindow.__init__(self, self.lang)
self.window.title("%s - %s (%s)" % (wintitle['background'][self.lang],
self._character['name'],
self._character['prof']
)
)
self.filemenu = Menu(master = self.menu)
self.__addFileMenu()
self.__addEditMenu()
self.__addHelpMenu()
self.__buildWin()
self.window.mainloop()
def __addFileMenu(self):
'''!
Adds a file menu to menu bar.
'''
self.menu.add_cascade(label = txtmenu['menu_file'][self.lang],
menu = self.filemenu)
self.filemenu.add_command(label = submenu['file'][self.lang]['export'],
command = self.notdoneyet)
self.filemenu.add_command(label = submenu['file'][self.lang]['print'],
command = self.notdoneyet)
self.filemenu.add_separator()
self.filemenu.add_command(label = submenu['file'][self.lang]['save'],
command = self.notdoneyet)
self.filemenu.add_separator()
self.filemenu.add_command(label = submenu['file'][self.lang]['close'],
command = self.__closewin)
def __addEditMenu(self):
'''!
This adds an edit menu to the menu bar.
'''
self.edtmenu = Menu(master = self.menu)
self.menu.add_cascade(label = txtmenu['menu_edit'][self.lang],
menu = self.edtmenu)
self.edtmenu.add_command(label = submenu['edit'][self.lang]['add_pic'],
command = self.__addPicMenue)
self.edtmenu.add_command(label = submenu['edit'][self.lang]['add_story'],
command = self.__addStory)
self.edtmenu.add_command(label = submenu['edit'][self.lang]['statgain'],
command = self.__statGainRoll)
def __addHelpMenu(self):
'''!
Adds a help menu entry to menu bar.
'''
self.helpmenu = Menu(master = self.menu)
self.menu.add_cascade(label = txtmenu['help'][self.lang],
menu = self.helpmenu)
self.helpmenu.add_command(label = submenu['help'][self.lang]['win'],
command = self.__helpWin)
self.helpmenu.add_separator()
self.helpmenu.add_command(label = submenu['help'][self.lang]['about'],
command = self._helpAbout)
def __closewin(self):
'''!
A method to destroy the current window and go back to MainWindow.
'''
self.window.destroy()
logger.debug("charInfo: Call MainWindow(lang={},storepath={},char={}".format(self.lang, self.spath, self._character))
self.window = MainWindow(lang = self.lang, storepath = self.spath , char = self._character)
def __openFile(self):
"""!
This method opens a dialogue window (Tk) for opening files.
The content of the opened file will be saved in \e file
\e content as an array.
"""
self.__filein = askopenfilename(filetypes = self.cmask,
initialdir = self.mypath)
if self.__filein != "":
with open(self.__filein, 'r') as filecontent:
if self.__filein[-4:].lower() == "json":
self.char = json.load(filecontent)
logger.debug("charInfo:(character) content read from {}.".fomat(self.__filein))
elif self.__filein[-3:].lower == "grp":
self.grp = json.load(filecontent)
logger.debug("charInfo:(group) content read from {}.".fomat(self.__filein))
else:
msg = messageWindow()
msg.showinfo(errmsg['wrong_type'][self.lang])
logger.warn(errmsg['wrong_type'][self.lang])
pass
def __buildWin(self):
'''!
Builds the window's elements.
'''
self.background = {}
for elem in charattribs.keys():
self.background[elem] = StringVar()
alreadyset = False
if "background" in list(self._character.keys()):
alreadyset = True
# row 0; column 0 -3
Label(master = self.window,
width = 15,
text = self._character["player"]
).grid(column = 0, row = 0)
Label(master = self.window,
width = 30,
text = self._character['name']
).grid(column = 1, row = 0)
Label(master = self.window,
width = 20,
text = self._character['prof']
).grid(column = 2, row = 0)
Label(master = self.window,
width = 20,
text = self._character['race']
).grid(column = 3, row = 0)
# row 1 column 0-1
Label(master = self.window,
width = 15,
text = charattribs['sex'][self.lang] + ":"
).grid(column = 0,
row = 1)
Entry(master = self.window,
width = 35,
textvariable = self.background['sex']
).grid(column = 1, row = 1)
# row 2 column 0-1
Label(master = self.window,
width = 15,
text = charattribs['hair'][self.lang] + ":"
).grid(column = 0, row = 2)
Entry(master = self.window,
width = 35,
textvariable = self.background['hair']
).grid(column = 1, row = 2)
# row 3 column 0-1
Label(master = self.window,
width = 15,
text = charattribs['eyes'][self.lang] + ":"
).grid(column = 0, row = 3)
Entry(master = self.window,
width = 35,
textvariable = self.background['eyes']
).grid(column = 1, row = 3)
# row 4 column 0-1
Label(master = self.window,
width = 15,
text = charattribs['skin'][self.lang] + ":"
).grid(column = 0, row = 4)
Entry(master = self.window,
width = 35,
textvariable = self.background['skin']
).grid(column = 1, row = 4)
# row 5 column 0-1
Label(master = self.window,
width = 15,
text = charattribs['height'][self.lang] + ":"
).grid(column = 0, row = 5)
Entry(master = self.window,
width = 35,
textvariable = self.background['height']
).grid(column = 1, row = 5)
# row 6 column 0-1
Label(master = self.window,
width = 15,
text = charattribs['weight'][self.lang] + ":"
).grid(column = 0, row = 6)
Entry(master = self.window,
width = 35,
textvariable = self.background['weight']
).grid(column = 1, row = 6)
# row 7 column 0-1
Label(master = self.window,
width = 15,
text = charattribs['app_age'][self.lang] + ":"
).grid(column = 0, row = 7)
Entry(master = self.window,
width = 35,
textvariable = self.background['app_age']
).grid(column = 1, row = 7)
# row 8 column 0-1
Label(master = self.window,
width = 15,
text = charattribs['act_age'][self.lang] + ":"
).grid(column = 0, row = 8)
Entry(master = self.window,
width = 35,
textvariable = self.background['act_age']
).grid(column = 1, row = 8)
# row 9 column 0-1
Label(master = self.window,
width = 15,
text = charattribs['parents'][self.lang] + ":"
).grid(column = 0, row = 9)
Entry(master = self.window,
width = 35,
textvariable = self.background['parents']
).grid(column = 1, row = 9)
# row 10 column 0-1
Label(master = self.window,
width = 15,
text = charattribs['siblings'][self.lang] + ":"
).grid(column = 0, row = 10)
Entry(master = self.window,
width = 35,
textvariable = self.background['siblings']
).grid(column = 1, row = 10)
# row 11 column 0-1
Label(master = self.window,
width = 15,
text = charattribs['partner'][self.lang] + ":"
).grid(column = 0, row = 11)
Entry(master = self.window,
width = 35,
textvariable = self.background['partner']
).grid(column = 1, row = 11)
# row 12 column 0-1
Label(master = self.window,
width = 15,
text = charattribs['kids'][self.lang] + ":"
).grid(column = 0, row = 12)
Entry(master = self.window,
width = 35,
textvariable = self.background['kids']
).grid(column = 1, row = 12)
# row 13 column 0-1
Label(master = self.window,
width = 15,
text = charattribs['deity'][self.lang] + ":"
).grid(column = 0, row = 13)
Entry(master = self.window,
width = 35,
textvariable = self.background['deity']
).grid(column = 1, row = 13)
# row 14 column 0-1
Label(master = self.window,
width = 15,
text = charattribs['home'][self.lang] + ":"
).grid(column = 0, row = 14)
Entry(master = self.window,
width = 35,
textvariable = self.background['home']
).grid(column = 1, row = 14)
#set values into Entry widgets if there are any
if alreadyset:
for elem in self._character['background'].keys():
self.background[elem].set(self._character['background'][elem])
# row 15-19 column 0-1
Label(master = self.window,
width = 15,
text = charattribs['pers'][self.lang] + ":"
).grid(column = 0, row = 15, columnspan = 2)
self.tw1 = Text(master = self.window,
width = 50,
height = 20,
wrap = WORD
)
if "history" in list(self._character.keys()):
self.tw1.insert(1.0, self._character['history'])
self.tw1.grid(column = 0, row = 16, columnspan = 2)
# row 15-19 column 2-3
Label(master = self.window,
width = 15,
text = charattribs['motiv'][self.lang] + ":"
).grid(column = 2, row = 15, columnspan = 2)
self.tw2 = Text(master = self.window,
width = 50,
height = 20,
wrap = WORD
)
if "motivation" in list(self._character.keys()):
self.tw2.insert(1.0, self._character['motivation'])
self.tw2.grid(column = 2, row = 16, columnspan = 2)
Button(self.window,
text = txtbutton['but_story'][self.lang],
command = self.__addStory).grid(column = 0,
row = 17,
sticky = "NW",
columnspan = 2
)
Button(self.window,
text = txtbutton['but_sav'][self.lang] + "\n" + txtbutton['but_quit'][self.lang],
command = self.__saveAndExit).grid(column = 3,
row = 17,
sticky = "NW",
columnspan = 2
)
#charpic row 1-8 column 2-4
#BUG pic does not work
from PIL import Image, ImageTk
self.cpic = ImageTk.PhotoImage(Image.open(self.charpic).resize((310, 310), Image.ANTIALIAS))
self.picLabel = Label(master = self.window,
image = self.cpic
)
self.picLabel.grid(column = 2,
row = 1,
columnspan = 2,
rowspan = 14,
sticky = "NEWS",
padx = 5,
pady = 5)
self.picLabel.bind("<Button-1>", self.__addPic)
def __addPic(self, event):
'''!
This method adds the link to a character's picture (jpg/png)
'''
self.charpic = askopenfilename(filetypes = self.pmask,
initialdir = self.mypath)
if type(self.charpic) == type(""):
self._character['piclink'] = self.charpic
from PIL import Image, ImageTk
self.cpic = ImageTk.PhotoImage(Image.open(self.charpic).resize((300, 300), Image.ANTIALIAS))
self.picLabel.configure(image = self.cpic)
def __addPicMenue(self):
'''!
This method adds the link to a character's picture (jpg/png)
'''
self.charpic = askopenfilename(filetypes = self.pmask,
initialdir = self.mypath)
if type(self.charpic) == type(""):
self._character['piclink'] = self.charpic
from PIL import Image, ImageTk
self.cpic = ImageTk.PhotoImage(Image.open(self.charpic).resize((300, 300), Image.ANTIALIAS))
self.picLabel.configure(image = self.cpic)
def __statGainRoll(self):
'''!
This opens a window for Stats Gain Roll for the character.
'''
self.window.destroy()
self.window2 = statGainWin(lang = self.lang, storepath = self.spath, char = self._character)
def __addStory(self):
self.notdoneyet("charInfo.addStory: \n not done yet")
def __saveAndExit(self):
'''
This method gets all data from entries, puts them into character data struct
and saves the updated character.
'''
if self.spath[-1] not in ["\\", "/"]:
self.spath += "/"
bg = {}
self._character["motivation"] = self.tw2.get("0.0", END)
self._character['history'] = self.tw1.get("0.0", END)
for el in list(self.background.keys()):
bg[el] = self.background[el].get()
self._character["background"] = bg
self._character["background"]["motiv"] = self.tw2.get("0.0", END)
self._character['background']['pers'] = self.tw1.get("0.0", END)
with open(self.spath + '/' + self._character['player'] + '/' + self._character['name'] + ".json", "w") as outfile:
json.dump(self._character,
outfile,
sort_keys = True,
indent = 4,
ensure_ascii = False)
self.window.destroy()
self.window = MainWindow(lang = self.lang, storepath = self.spath, char = self._character)
def __helpWin(self):
self.notdoneyet("charInfo.__helpWin:\ņ\n not done yet")
class statGainWin(blankWindow):
"""
This is the class for the window to execute Stat gain rolls.
"""
def __init__(self, lang = 'en', storepath = os.getcwd() + "/data", char = None):
"""!
Class constructor statGainWin
@param lang The chosen language for window's and button's
texts. At the moment, only English (en, default
value) and German (de) are supported.
@param title title of the window
@param storepath path where things like options have to be stored
@param char Character as JSON/dictionary
"""
if storepath == None:
self.spath = os.getcwd() + "/data"
logger.debug('Set storepath to %s' % (os.getcwd())) + "/data"
elif "/data" not in storepath:
self.spath = os.getcwd() + "/data"
else:
self.spath = storepath
logger.debug('statGainWin: storepath set to %s' %
(storepath))
self.lang = lang
self._character = dict(calcTotals(char))
self.statgain = 10
if "statgain" in self._character.keys():
if self._character['statgain'] > 0:
self.statgain = int(self._character['statgain'])
else:
self.statgain = 0
self.mypath = storepath + '/' + self._character['player'] + '/'
self.cmask = [txtwin['json_files'][self.lang],
txtwin['grp_files'][self.lang],
txtwin['all_files'][self.lang]
]
blankWindow.__init__(self, self.lang)
self.window.title("%s - %s (%s)" % (wintitle['rm_statgain'][self.lang],
self._character['name'],
self._character['prof']
)
)
self.filemenu = Menu(master = self.menu)
self.__addFileMenu()
self.__addHelpMenu()
self.__buildWin()
self.window.mainloop()
def __addFileMenu(self):
'''
Adds a file menu to menu bar.
'''
self.menu.add_cascade(label = txtmenu['menu_file'][self.lang],
menu = self.filemenu)
self.filemenu.add_separator()
self.filemenu.add_command(label = submenu['file'][self.lang]['save'] + " & " + submenu['file'][self.lang]['close'],
command = self.saveData)
self.filemenu.add_separator()
self.filemenu.add_command(label = submenu['file'][self.lang]['close'],
command = self.__closewin)
self.__stats = stats
def __addHelpMenu(self):
'''!
Adds a help menu entry to menu bar.
'''
self.helpmenu = Menu(master = self.menu)
self.menu.add_cascade(label = txtmenu['help'][self.lang],
menu = self.helpmenu)
self.helpmenu.add_command(label = submenu['help'][self.lang]['win'],
command = self.__helpWin)
self.helpmenu.add_separator()
self.helpmenu.add_command(label = submenu['help'][self.lang]['about'],
command = self._helpAbout)
def __closewin(self):
'''!
A method to destroy the current window and go back to MainWindow.
'''
self.window.destroy()
self.window = MainWindow(lang = self.lang, char = self._character)
def __helpWin(self):
self.notdoneyet("charInfo.__helpWin:\ņ\n not done yet")
def __buildWin(self):
'''!
Builds the window's elements.
'''
Label(master = self.window,
width = 20,
text = self._character["player"]
).grid(column = 0, row = 0)
Label(master = self.window,
width = 20,
text = self._character['name']
).grid(column = 1, row = 0)
Label(master = self.window,
width = 20,
text = self._character['race']
).grid(column = 2, row = 0)
Label(master = self.window,
width = 20,
text = self._character['prof']
).grid(column = 3, row = 0)
Label(master = self.window,
width = 20,
text = "Stats"
).grid(column = 0, row = 1, pady = 5, padx = 2)
Label(master = self.window,
width = 20,
text = "Temp"
).grid(column = 1, row = 1, pady = 5, padx = 2)
Label(master = self.window,
width = 20,
text = "Pot"
).grid(column = 2, row = 1, pady = 5, padx = 2)
Label(master = self.window,
width = 20,
text = labels['new_val'][self.lang]
).grid(column = 3, row = 1, pady = 5, padx = 2)
self.var = {}
self.__cb = {}
self.__nl = {}
self.__nv = {}
#row and column
r = 2
self.__sgr = StringVar()
self.__sgr.set(labels['count'][self.lang] + ": " + str(self.statgain))
for s in self.__stats:
self.var[s] = IntVar()
self.__nv[s] = StringVar()
self.__nv[s].set("--")
self.__cb[s] = Checkbutton(master = self.window,
text = self._character[s]["name"],
variable = self.var[s]
)
self.__cb[s].grid(column = 0, row = r, pady = 2, padx = 2, sticky = W)
Label(master = self.window,
width = 20,
text = str(self._character[s]['temp'])
).grid(column = 1, row = r, pady = 2, padx = 2, sticky = E)
Label(master = self.window,
width = 20,
text = str(self._character[s]['pot'])
).grid(column = 2, row = r, pady = 2, padx = 2, sticky = E)
Label(master = self.window,
width = 20,
textvariable = self.__nv[s],
).grid(column = 3, row = r, pady = 2, padx = 2, sticky = E)
r += 1
if self.statgain == 10:
self.var[s].set(1)
else:
self.var[s].set(0)
Button(self.window,
text = txtbutton['but_all'][self.lang],
command = self.__selectAll,
width = 20
).grid(column = 0,
row = r,
pady = 2,
sticky = "NW"
)
Button(self.window,
text = txtbutton['but_none'][self.lang],
command = self.__selectNone,
width = 20
).grid(column = 1,
row = r,
pady = 2,
sticky = "NW"
)
Label(master = self.window,
width = 20,
textvariable = self.__sgr
).grid(column = 2,
row = r)
Button(self.window,
text = txtbutton['but_roll'][self.lang],
command = self.statGainRoll,
width = 20
).grid(column = 3,
row = r,
pady = 2,
sticky = "NW"
)
def __selectAll(self):
'''!
This method selects all Checkbuttons
'''
for s in self.__stats:
self.var[s].set(1)
def __selectNone(self):
'''!
This method unselects all Checkbuttons
'''
for s in self.__stats:
self.var[s].set(0)
def statGainRoll(self):
'''!
This method does Stat Gain Rolls for all selected stats
'''
from rpgtoolbox.rolemaster import statbonus
from rpgtoolbox.rpgtools import dice, statGain
self.newstats = {}
for s in self.__stats:
doGainRoll = self.var[s].get()
if doGainRoll and self.statgain > 0:
d = dice(10, 2)
self.newstats[s] = statGain(d[0], d[1], self._character[s]['temp'], self._character[s]['pot'])
self.__nv[s].set(self.newstats[s])
self.statgain -= 1
self.__sgr.set(labels['count'][self.lang] + ": " + str(self.statgain))
if self.statgain <= 0:
for elem in self.newstats.keys():
self._character[elem]['temp'] = int(self.newstats[elem])
self._character[elem]['std'] = statbonus(self._character[elem]['temp'])
self._character[elem]['total'] = self._character[elem]['std'] + self._character[elem]['race'] + self._character[elem]['spec']
self._character['statgain'] = 0
Button(self.window,
text = txtbutton['but_sav'][self.lang] + " & " + txtbutton['but_quit'][self.lang],
command = self.saveData,
width = 40
).grid(column = 1,
row = 13,
columnspan = 2,
pady = 3,
sticky = "NEWS"
)
def saveData(self):
'''!
This recalculates character's category and skill bonusses, saves character and goes back to main window.
'''
self._character = dict(calcTotals(self._character))
with open(self.spath + '/' + self._character['player'] + '/' + self._character['name'] + ".json", "w") as outfile:
json.dump(self._character,
outfile,
sort_keys = True,
indent = 4,
ensure_ascii = False)
self.window.destroy()
self.window = MainWindow(lang = self.lang, storepath = self.spath, char = self._character)
class editEPWin(blankWindow):
'''!
This window class generates a window to enter new EPs manually to character data
'''
def __init__(self, lang = 'en', storepath = os.getcwd() + "/data", char = None):
"""!
Class constructor editEPWin
@param lang The chosen language for window's and button's
texts. At the moment, only English (en, default
value) and German (de) are supported.
@param title title of the window
@param storepath path where things like options have to be stored
@param char Character as JSON/dictionary
"""
if storepath == None:
self.spath = os.getcwd() + "/data"
logger.debug('Set storepath to %s' % (storepath)) + "/data"
else:
self.spath = storepath
logger.debug('editEPWin: storepath set to %s' %
(storepath))
self.lang = lang
self._character = dict(calcTotals(char))
self.mypath = storepath + '/' + self._character['player'] + '/'
self.cmask = [txtwin['json_files'][self.lang],
txtwin['grp_files'][self.lang],
txtwin['all_files'][self.lang]
]
blankWindow.__init__(self, self.lang)
self.window.title("%s - %s (%s)" % (wintitle['rm_statgain'][self.lang],
self._character['name'],
self._character['prof']
)
)
self.filemenu = Menu(master = self.menu)
self.__addFileMenu()
self.__addHelpMenu()
self.__buildWin()
self.window.mainloop()
def __addFileMenu(self):
'''!
Adds a file menu to menu bar.
'''
self.menu.add_cascade(label = txtmenu['menu_file'][self.lang],
menu = self.filemenu)
self.filemenu.add_separator()
self.filemenu.add_command(label = submenu['file'][self.lang]['save'],
command = self.saveData)
self.filemenu.add_separator()
self.filemenu.add_command(label = submenu['file'][self.lang]['close'],
command = self.__closewin)
self.__stats = stats
def __addHelpMenu(self):
'''!
Adds a help menu entry to menu bar.
'''
self.helpmenu = Menu(master = self.menu)
self.menu.add_cascade(label = txtmenu['help'][self.lang],
menu = self.helpmenu)
self.helpmenu.add_command(label = submenu['help'][self.lang]['win'],
command = self.__helpWin)
self.helpmenu.add_separator()
self.helpmenu.add_command(label = submenu['help'][self.lang]['about'],
command = self._helpAbout)
def __closewin(self):
'''!
A method to destroy the current window and go back to MainWindow.
'''
self.window.destroy()
self.window = MainWindow(lang = self.lang, char = self._character, storepath = self.spath)
def __openFile(self):
"""!
This method opens a dialogue window (Tk) for opening files.
The content of the opened file will be saved in \e file
\e content as an array.
"""
self.__filein = askopenfilename(filetypes = self.cmask,
initialdir = self.mypath)
if self.__filein != "":
with open(self.__filein, 'r') as filecontent:
if self.__filein[-4:].lower() == "json":
self.char = json.load(filecontent)
elif self.__filein[-3:].lower == "grp":
self.grp = json.load(filecontent)
else:
msg = messageWindow()
msg.showinfo(errmsg['wrong_type'][self.lang])
logger.warn(errmsg['wrong_type'][self.lang])
pass
def __helpWin(self):
self.notdoneyet("charInfo.__helpWin:\ņ\n not done yet")
def saveData(self):
'''!
This recalculates character's category and skill bonusses, saves character and goes back to main window.
'''
self._character = dict(calcTotals(self._character))
with open(self.spath + '/' + self._character['player'] + '/' + self._character['name'] + ".json", "w") as outfile:
json.dump(self._character,
outfile,
sort_keys = True,
indent = 4,
ensure_ascii = False)
def __buildWin(self):
'''!
Builds the window's elements.
'''
self.epval = StringVar()
self.inputval = StringVar()
self.lvlval = StringVar()
self.statbar = StringVar()
Label(master = self.window,
width = 20,
text = "{}: {}".format(labels["player"][self.lang], self._character['player'])
).grid(column = 0,
row = 0,
padx = 3,
pady = 5)
Label(master = self.window,
width = 20,
text = "{}: {}".format(labels["name"][self.lang], self._character['name'])
).grid(column = 1,
row = 0,
padx = 3,
pady = 5)
Label(master = self.window,
width = 20,
text = "{}: {}".format(labels["prof"][self.lang], self._character['prof'])
).grid(column = 2,
row = 0,
padx = 3,
pady = 5)
Label(master = self.window,
width = 20,
textvariable = self.lvlval
).grid(column = 3,
row = 0,
padx = 3,
pady = 5)
self.lvlval.set("{}: {}".format(labels["lvl"][self.lang], self._character['lvl']))
Label(master = self.window,
width = 20,
textvariable = self.epval
).grid(column = 0,
row = 1,
padx = 3,
pady = 5)
self.epval.set("EP: {}".format(self._character['exp']))
Label(master = self.window,
width = 20,
text = "+ {}:".format(labels["new_ep"][self.lang])
).grid(column = 1,
row = 1,
padx = 3,
pady = 5,
sticky = EW)
Entry(master = self.window,
width = 20,
textvariable = self.inputval
).grid(column = 2,
row = 1,
padx = 3,
pady = 5,
sticky = W)
Button(self.window,
text = txtbutton['but_take'][self.lang],
width = 20,
command = self.__add).grid(column = 3, row = 1)
Label(master = self.window,
width = 20,
textvariable = self.statbar,
relief = SUNKEN
).grid(column = 0,
row = 3,
pady = 10,
columnspan = 4,
sticky = EW)
self.statbar.set(screenmesg["input_eps"][self.lang])
def __add(self):
'''!
This method adds new EPs to character's old EP count.
'''
newep = int(self.inputval.get())
self._character["exp"] += newep
self.epval.set("EP: {}".format(self._character['exp']))
# calc new level if any
self._character['lvl'] = int(getLvl(self._character['exp']))
self.lvlval.set("{}: {}".format(labels["lvl"][self.lang], self._character['lvl']))
#save new EP data
self.saveData()
self.statbar.set(screenmesg["file_saved"][self.lang])
class BGOselectWin(blankWindow):
'''!
This window class will display the choices one have for his BGOs
@todo The following has to be impemented
- window building
- special items
- more money
- spec Bonus Cat/Skill
- Edges/Flaws
'''
def __init__(self, lang = 'en', storepath = os.getcwd() + "/data", char = None):
"""!
Class constructor BGOselectWin
@param lang The chosen language for window's and button's
texts. At the moment, only English (en, default
value) and German (de) are supported.
@param title title of the window
@param storepath path where things like options have to be stored
@param char Character as JSON/dictionary
"""
if storepath == None:
self.spath = os.getcwd() + "/data"
logger.debug('Set storepath to %s' % (storepath)) + "/data"
else:
self.spath = storepath
logger.debug('editEPWin: storepath set to %s' %
(storepath))
self.lang = lang
self._character = dict(calcTotals(char))
self.mypath = storepath + '/' + self._character['player'] + '/'
self.cmask = [txtwin['json_files'][self.lang],
txtwin['grp_files'][self.lang],
txtwin['all_files'][self.lang]
]
blankWindow.__init__(self, self.lang)
self.window.title("%s - %s (%s)" % (wintitle['rm_statgain'][self.lang],
self._character['name'],
self._character['prof']
)
)
self.filemenu = Menu(master = self.menu)
self.__addFileMenu()
self.__addSelectMenu()
self.__addHelpMenu()
self.__buildWin()
self.window.mainloop()
def __addFileMenu(self):
'''!
Adds a file menu to menu bar.
'''
self.menu.add_cascade(label = txtmenu['menu_file'][self.lang],
menu = self.filemenu)
self.filemenu.add_separator()
self.filemenu.add_command(label = submenu['file'][self.lang]['open'],
command = self.__openFile)
self.filemenu.add_separator()
self.filemenu.add_command(label = submenu['file'][self.lang]['close'],
command = self.__closewin)
self.__stats = stats
def __addEditMenu(self):
'''!
This adds an select menu to the menu bar.
@todo to be implemented:
-# extra money
-# stat gain rolls
-# extra items
-# languages
-# spec skill
-# spec cats
-# talents/flaws
'''
self.edtmenu = Menu(master = self.menu)
self.menu.add_cascade(label = txtmenu['menu_select'][self.lang],
menu = self.edtmenu)
self.edtmenu.add_command(label = submenu['select'][self.lang]['bgo_money'],
command = self.notdoneyet)
self.edtmenu.add_command(label = submenu['select'][self.lang]['bgo_stats'],
command = self.notdoneyet)
self.edtmenu.add_command(label = submenu['select'][self.lang]['bgo_items'],
command = self.notdoneyet)
self.edtmenu.add_command(label = submenu['select'][self.lang]['bgo_lang'],
command = self.notdoneyet)
self.edtmenu.add_command(label = submenu['select'][self.lang]['bgo_spec_skill'],
command = self.notdoneyet)
self.edtmenu.add_command(label = submenu['select'][self.lang]['bgo_spec_cat'],
command = self.notdoneyet)
self.edtmenu.add_command(label = submenu['select'][self.lang]['bgo_talens'],
command = self.notdoneyet)
def __addHelpMenu(self):
'''!
Adds a help menu entry to menu bar.
'''
self.helpmenu = Menu(master = self.menu)
self.menu.add_cascade(label = txtmenu['help'][self.lang],
menu = self.helpmenu)
self.helpmenu.add_command(label = submenu['help'][self.lang]['win'],
command = self.__helpWin)
self.helpmenu.add_separator()
self.helpmenu.add_command(label = submenu['help'][self.lang]['about'],
command = self._helpAbout)
def __closewin(self):
'''!
A method to destroy the current window and go back to MainWindow.
'''
self.window.destroy()
self.window = MainWindow(lang = self.lang, char = self._character)
def __openFile(self):
"""!
This method opens a dialogue window (Tk) for opening files.
The content of the opened file will be saved in \e file
\e content as an array.
"""
self.__filein = askopenfilename(filetypes = self.cmask,
initialdir = self.mypath)
if self.__filein != "":
with open(self.__filein, 'r') as filecontent:
if self.__filein[-4:].lower() == "json":
self.char = json.load(filecontent)
elif self.__filein[-3:].lower == "grp":
self.grp = json.load(filecontent)
else:
msg = messageWindow()
msg.showinfo(errmsg['wrong_type'][self.lang])
logger.warn(errmsg['wrong_type'][self.lang])
pass
def __helpWin(self):
'''!
Help windows
@todo this has to be fully implemented
'''
self.notdoneyet("charInfo.__helpWin:\ņ\n not done yet")
def __buildWin(self):
'''!
This method builds all window components
@todo This has to be fully implemented.
'''
self._f_money = Frame(master = self.window)
|
from __future__ import unicode_literals
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
('investments', '0008_auto_20180927_1306'),
]
operations = [
migrations.CreateModel(
name='NewInvestment',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')), # noqa
('user_name', models.CharField(max_length=30)),
('date', models.DateField()),
('money', models.DecimalField(decimal_places=2, max_digits=8)),
('kind', models.CharField(max_length=20)),
('tx_op', models.DecimalField(decimal_places=2, max_digits=4)),
('brokerage', models.CharField(max_length=15)),
],
options={
'ordering': ['-date'],
'abstract': False,
},
),
migrations.CreateModel(
name='NewInvestmentDetails',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')), # noqa
('user_name', models.CharField(max_length=30)),
('date', models.DateField()),
('money', models.DecimalField(decimal_places=2, max_digits=8)),
('kind', models.CharField(max_length=20)),
('which_target', models.CharField(default='---', max_length=20)),
('segment', models.CharField(max_length=20)),
('tx_or_price', models.DecimalField(decimal_places=2, max_digits=8)),
('quant', models.DecimalField(decimal_places=2, max_digits=8)),
],
options={
'ordering': ['-date'],
'abstract': False,
},
),
]
|
from pytest import fixture, mark
from pylada import vasp_program
@fixture
def path():
from os.path import dirname
return dirname(__file__)
@mark.skipif(vasp_program is None, reason="vasp not configured")
def test(tmpdir, path):
from pylada.crystal import Structure
from pylada.vasp import Vasp
from epirelax import epitaxial
from pylada import default_comm
structure = Structure([[0, 0.5, 0.5], [0.5, 0, 0.5], [0.5, 0.5, 0]], scale=5.55, name='has a name')\
.add_atom(0, 0, 0, "Si")\
.add_atom(0.25, 0.25, 0.25, "Si")
vasp = Vasp()
vasp.kpoints = "Automatic generation\n0\nMonkhorst\n2 2 2\n0 0 0"
vasp.prec = "accurate"
vasp.ediff = 1e-5
vasp.encut = 1.4
vasp.ismear = "fermi"
vasp.sigma = 0.01
vasp.relaxation = "volume"
vasp.add_specie = "Si", "{0}/pseudos/Si".format(path)
result = epitaxial(vasp, structure, outdir=str(tmpdir), epiconv=1e-4, comm=default_comm)
assert result.success
|
"""
Django settings for firma_e project.
For more information on this file, see
https://docs.djangoproject.com/en/1.7/topics/settings/
For the full list of settings and their values, see
https://docs.djangoproject.com/en/1.7/ref/settings/
"""
import os
BASE_DIR = os.path.dirname(os.path.dirname(__file__))
SECRET_KEY = ''
DEBUG = True
ALLOWED_HOSTS = []
INSTALLED_APPS = (
'django.contrib.admin',
'django.contrib.auth',
'django.contrib.contenttypes',
'django.contrib.sessions',
'django.contrib.messages',
'django.contrib.staticfiles',
'creacion_firma',
)
MIDDLEWARE_CLASSES = (
'django.contrib.sessions.middleware.SessionMiddleware',
'django.middleware.common.CommonMiddleware',
'django.middleware.csrf.CsrfViewMiddleware',
'django.contrib.auth.middleware.AuthenticationMiddleware',
'django.contrib.auth.middleware.SessionAuthenticationMiddleware',
'django.contrib.messages.middleware.MessageMiddleware',
'django.middleware.clickjacking.XFrameOptionsMiddleware',
)
TEMPLATES = [
{
'BACKEND': 'django.template.backends.django.DjangoTemplates',
'DIRS': [],
'APP_DIRS': True,
'OPTIONS': {
'context_processors': [
'django.contrib.auth.context_processors.auth',
'django.contrib.messages.context_processors.messages',
'django.template.context_processors.debug',
'django.template.context_processors.request',
],
},
},
]
ROOT_URLCONF = 'firma_e.urls'
WSGI_APPLICATION = 'firma_e.wsgi.application'
DATABASES = {
'default': {
'ENGINE': 'django.db.backends.sqlite3',
'NAME': os.path.join(BASE_DIR, 'db.sqlite3'),
}
}
LANGUAGE_CODE = 'es'
gettext = lambda s: s
LANGUAGES = (
('es', gettext('Spanish')),
('en', gettext('English')),
)
TIME_ZONE = 'UTC'
USE_I18N = True
USE_L10N = True
USE_TZ = True
STATIC_ROOT = BASE_DIR + '/static/'
STATIC_URL = '/static/'
MEDIA_ROOT = BASE_DIR + '/media/'
MEDIA_URL = '/media/'
TMP_DIR = '/home/agmartinez/Programas/firma_electronica/files/'
TEST_DS = True
MAGIC_NUMBER = 'HMFTEN&")!)#MWSUALG29b347cuhH#(Ndy=1N2'
CERTS = {"C0":
{"issuer": "/home/agmartinez/ocsp_3_uat/ac2_4096.pem",
"ocsp": "/home/agmartinez/ocsp_3_uat/OCSP_AC_4096_SHA256.pem",
"chain": "/home/agmartinez/ocsp_3_uat/chain.pem"},
"C2":
{"issuer": "/home/agmartinez/ocsp_produccion/certificados/AC2_SAT.pem",
"ocsp": "/home/agmartinez/ocsp_produccion/certificados/ocsp.ac2_sat.pem",
"chain": "/home/agmartinez/ocsp_produccion/certificados/chain2.pem"},
"C3":
{"issuer": "/home/agmartinez/ocsp_produccion/certificados/AC3_SAT.pem",
"ocsp": "/home/agmartinez/ocsp_produccion/certificados/ocsp.ac3_sat.pem",
"chain": "/home/agmartinez/ocsp_produccion/certificados/chain3.pem"},
"C4":
{"issuer": "/home/agmartinez/ocsp_produccion/certificados/AC4_SAT.pem",
"ocsp": "/home/agmartinez/ocsp_produccion/certificados/ocsp.ac4_sat.pem",
"chain": "/home/agmartinez/ocsp_produccion/certificados/chain4.pem"}}
XSLT = "/home/agmartinez/csd_test/SF.CadenaOriginal/cadenaoriginal_3_2.xslt"
USER_CERTS_TEST = ["/home/agmartinez/ocsp_produccion/usuarios/mara800822jq4.pem",
"/home/agmartinez/ocsp_produccion/usuarios/vasr820908s87.pem"]
|
import sys
import re
import pdb
def is_failtree(linetree):
#perl -pe 's/.*\([^a-z]+ .*//'
if re.search(r'.*\([^a-z]+ .*', linetree) is not None:
return True
else:
return False
def gen_failtree(words):
#use failnode labels?
assert len(words) > 0
#if ("" not in words):
# pdb.set_trace()
FTAG = "FAIL"
if len(words) == 1: #base case
return "({} {})".format(FTAG,words[0]) #(X word)
else: #recursion
return "({} {} {})".format(FTAG, gen_failtree([words[0]]), gen_failtree(words[1:])) #(X (X word) recursion)
for line in sys.stdin:
if is_failtree(line):
#get words from failed bracketed linetree
words = [x[1:-1] for x in re.findall(r' [^\) ]+\)', line)] #TODO test this. should match things with a space, then anynumber of non-paren chars, then a close paren.
#generate right-branching failtree, keeping words.
print(gen_failtree(words))
else:
print(line[:-1])
|
import datetime
import logging
import os
import sys
import warnings
from configobj import ConfigObj
sys.path.append('scripts')
from scripts import pfp_batch
from scripts import pfp_log
warnings.filterwarnings("ignore", category=Warning)
logger = logging.getLogger("pfp_log")
class Bunch:
def __init__(self, **kwds):
self.__dict__.update(kwds)
if (__name__ == '__main__'):
# get the logger
now = datetime.datetime.now()
log_file_name = "pfp_" + now.strftime("%Y%m%d%H%M") + ".log"
log_file_name = os.path.join("logfiles", log_file_name)
logger = pfp_log.CreateLogger("pfp_log", log_file_name=log_file_name,
to_screen=True)
cfg_file_name = sys.argv[1]
if not os.path.isfile(cfg_file_name):
msg = "Batch control file " + cfg_file_name + " not found"
logger.error(msg)
sys.exit()
cfg_batch = ConfigObj(cfg_file_name, indent_type=" ", list_values=False,
write_empty_values=True)
main_ui = Bunch(stop_flag=False, cfg=cfg_batch, mode="batch")
pfp_batch.do_levels_batch(main_ui)
|
from __future__ import unicode_literals
import shogi
import unittest
from mock import patch
from shogi import CSA
TEST_CSA = """'----------棋譜ファイルの例"example.csa"-----------------
'バージョン
V2.2
'対局者名
N+NAKAHARA
N-YONENAGA
'棋譜情報
'棋戦名
$EVENT:13th World Computer Shogi Championship
'対局場所
$SITE:KAZUSA ARC
'開始日時
$START_TIME:2003/05/03 10:30:00
'終了日時
$END_TIME:2003/05/03 11:11:05
'持ち時間:25分、切れ負け
$TIME_LIMIT:00:25+00
'戦型:矢倉
$OPENING:YAGURA
'平手の局面
P1-KY-KE-GI-KI-OU-KI-GI-KE-KY
P2 * -HI * * * * * -KA *
P3-FU-FU-FU-FU-FU-FU-FU-FU-FU
P4 * * * * * * * * *
P5 * * * * * * * * *
P6 * * * * * * * * *
P7+FU+FU+FU+FU+FU+FU+FU+FU+FU
P8 * +KA * * * * * +HI *
P9+KY+KE+GI+KI+OU+KI+GI+KE+KY
'先手番
+
'指し手と消費時間(optional)
+2726FU
T12
-3334FU
T6
+7776FU
%TORYO
'---------------------------------------------------------
"""
TEST_CSA_SUMMARY = {'moves': ['2g2f', '3c3d', '7g7f'], 'sfen': 'lnsgkgsnl/1r5b1/ppppppppp/9/9/9/PPPPPPPPP/1B5R1/LNSGKGSNL b - 1', 'names': ['NAKAHARA', 'YONENAGA'], 'win': 'b'}
TEST_CSA_WITH_PI = '''
V2.2
N+先手
N-後手
$START_TIME:2020/05/04 12:40:52
PI82HI22KA
+
+7776FU
T1
-8384FU
T11
%TORYO
T0
'''
TEST_CSA_SUMMARY_WITH_PI = {
'moves': ['7g7f', '8c8d'],
'sfen': 'lnsgkgsnl/9/ppppppppp/9/9/9/PPPPPPPPP/1B5R1/LNSGKGSNL b - 1',
'names': ['先手', '後手'],
'win': 'w'
}
class ParserTest(unittest.TestCase):
def parse_str_test(self):
result = CSA.Parser.parse_str(TEST_CSA)
self.assertEqual(result[0], TEST_CSA_SUMMARY)
def parse_str_test_with_PI(self):
result = CSA.Parser.parse_str(TEST_CSA_WITH_PI)
self.assertEqual(result[0], TEST_CSA_SUMMARY_WITH_PI)
TEST_SUMMARY = {
'names': ['kiki_no_onaka_black', 'kiki_no_omata_white'],
'sfen': 'lnsgkgsnl/1r5b1/ppppppppp/9/9/9/PPPPPPPPP/1B5R1/LNSGKGSNL b - 1',
'moves': [],
'time': {'Time_Unit': '1sec', 'Total_Time': '900', 'Byoyomi': '0', 'Least_Time_Per_Move': '1'}
}
TEST_SUMMARY_STR = '''BEGIN Game_Summary
Protocol_Version:1.1
Protocol_Mode:Server
Format:Shogi 1.0
Declaration:Jishogi 1.1
Game_ID:20150505-CSA25-3-5-7
Name+:kiki_no_onaka_black
Name-:kiki_no_omata_white
Your_Turn:-
Rematch_On_Draw:NO
To_Move:+
Max_Moves:123
BEGIN Time
Time_Unit:1sec
Total_Time:900
Byoyomi:0
Least_Time_Per_Move:1
END Time
BEGIN Position
P1-KY-KE-GI-KI-OU-KI-GI-KE-KY
P2 * -HI * * * * * -KA *
P3-FU-FU-FU-FU-FU-FU-FU-FU-FU
P4 * * * * * * * * *
P5 * * * * * * * * *
P6 * * * * * * * * *
P7+FU+FU+FU+FU+FU+FU+FU+FU+FU
P8 * +KA * * * * * +HI *
P9+KY+KE+GI+KI+OU+KI+GI+KE+KY
+
END Position
END Game_Summary
'''
class TCPProtocolTest(unittest.TestCase):
def setUp(self):
patchers = []
patchers.append(patch.object(CSA.TCPProtocol, 'connect', return_value=None))
patchers.append(patch.object(CSA.TCPProtocol, 'write'))
patchers.append(patch.object(CSA.TCPProtocol, 'read', return_value=0))
for patcher in patchers:
self.addCleanup(patcher.stop)
patcher.start()
self.maxDiff = None
def add_response(self, csa_protocol, response):
csa_protocol.recv_buf += response
def test_login(self):
tcp = CSA.TCPProtocol('127.0.0.1')
self.add_response(tcp, 'LOGIN:python-syogi OK\n')
login_result = tcp.login('python-syogi', 'password')
self.assertTrue(login_result)
def test_fail_login(self):
tcp = CSA.TCPProtocol('127.0.0.1')
self.add_response(tcp, 'LOGIN:incorrect\n')
with self.assertRaises(ValueError):
tcp.login('python-syogi', 'password')
def test_wait_match(self):
tcp = CSA.TCPProtocol('127.0.0.1')
self.add_response(tcp, TEST_SUMMARY_STR)
game_summary = tcp.wait_match()
self.assertEqual(game_summary, {
'summary': TEST_SUMMARY,
'my_color': shogi.WHITE
})
def test_match(self):
tcp = CSA.TCPProtocol('127.0.0.1')
self.add_response(tcp, 'LOGIN:username OK\n')
tcp.login('username', 'password')
self.add_response(tcp, TEST_SUMMARY_STR)
game_summary = tcp.wait_match()
board = shogi.Board(game_summary['summary']['sfen'])
self.add_response(tcp, 'START:20150505-CSA25-3-5-7\n')
tcp.agree()
self.add_response(tcp, '+5756FU,T1\n')
(turn, usi, spend_time, message) = tcp.wait_server_message(board)
board.push(shogi.Move.from_usi(usi))
self.assertEqual(turn, shogi.BLACK)
self.assertEqual(spend_time, 1.0)
self.assertEqual(board.sfen(), 'lnsgkgsnl/1r5b1/ppppppppp/9/9/4P4/PPPP1PPPP/1B5R1/LNSGKGSNL w - 2')
next_move = shogi.Move.from_usi('8c8d')
board.push(next_move)
self.add_response(tcp, '-8384FU,T2\n')
response_line = tcp.move(board.pieces[next_move.to_square], shogi.WHITE, next_move)
(turn, usi, spend_time, message) = tcp.parse_server_message(response_line, board)
self.assertEqual(turn, shogi.WHITE)
self.assertEqual(spend_time, 2.0)
# without spent time ex.) Shogidokoro
self.add_response(tcp, '+5655FU\n')
(turn, usi, spend_time, message) = tcp.wait_server_message(board)
board.push(shogi.Move.from_usi(usi))
self.assertEqual(turn, shogi.BLACK)
self.assertEqual(spend_time, None)
if __name__ == '__main__':
unittest.main()
|
"""
Classifier is an image classifier specialization of Net.
"""
import numpy as np
import caffe
class Classifier(caffe.Net):
"""
Classifier extends Net for image class prediction
by scaling, center cropping, or oversampling.
Parameters
----------
image_dims : dimensions to scale input for cropping/sampling.
Default is to scale to net input size for whole-image crop.
mean, input_scale, raw_scale, channel_swap: params for
preprocessing options.
"""
def __init__(self, model_file, pretrained_file, image_dims=None,
mean=None, input_scale=None, raw_scale=None,
channel_swap=None):
caffe.Net.__init__(self, model_file, pretrained_file, caffe.TEST)
# configure pre-processing
in_ = self.inputs[0]
self.transformer = caffe.io.Transformer(
{in_: self.blobs[in_].data.shape})
self.transformer.set_transpose(in_, (2, 0, 1))
if mean is not None:
self.transformer.set_mean(in_, mean)
if input_scale is not None:
self.transformer.set_input_scale(in_, input_scale)
if raw_scale is not None:
self.transformer.set_raw_scale(in_, raw_scale)
if channel_swap is not None:
self.transformer.set_channel_swap(in_, channel_swap)
self.crop_dims = np.array(self.blobs[in_].data.shape[2:])
if not image_dims:
image_dims = self.crop_dims
self.image_dims = image_dims
def predict(self, inputs, oversample=True):
"""
Predict classification probabilities of inputs.
Parameters
----------
inputs : iterable of (H x W x K) input ndarrays.
oversample : boolean
average predictions across center, corners, and mirrors
when True (default). Center-only prediction when False.
Returns
-------
predictions: (N x C) ndarray of class probabilities for N images and C
classes.
"""
# Scale to standardize input dimensions.
input_ = np.zeros((len(inputs),
self.image_dims[0],
self.image_dims[1],
inputs[0].shape[2]),
dtype=np.float32)
for ix, in_ in enumerate(inputs):
input_[ix] = caffe.io.resize_image(in_, self.image_dims)
if oversample:
# Generate center, corner, and mirrored crops.
input_ = caffe.io.oversample(input_, self.crop_dims)
else:
# Take center crop.
center = np.array(self.image_dims) / 2.0
crop = np.tile(center, (1, 2))[0] + np.concatenate([
-self.crop_dims / 2.0,
self.crop_dims / 2.0
])
crop = crop.astype(int)
input_ = input_[:, crop[0]:crop[2], crop[1]:crop[3], :]
# Classify
caffe_in = np.zeros(np.array(input_.shape)[[0, 3, 1, 2]],
dtype=np.float32)
for ix, in_ in enumerate(input_):
caffe_in[ix] = self.transformer.preprocess(self.inputs[0], in_)
out = self.forward_all(**{self.inputs[0]: caffe_in})
predictions = out[self.outputs[0]]
# For oversampling, average predictions across crops.
if oversample:
predictions = predictions.reshape((len(predictions) // 10, 10, -1))
predictions = predictions.mean(1)
return predictions
|
""" Test specific of JobParameters with and without the flag in for ES backend
flag in /Operations/[]/Services/JobMonitoring/useESForJobParametersFlag
"""
import os
import time
import DIRAC
DIRAC.initialize() # Initialize configuration
from DIRAC.WorkloadManagementSystem.Client.WMSClient import WMSClient
from DIRAC.WorkloadManagementSystem.Client.JobMonitoringClient import JobMonitoringClient
from DIRAC.WorkloadManagementSystem.Client.JobStateUpdateClient import JobStateUpdateClient
from DIRAC.tests.Utilities.WMS import helloWorldJob, createFile
jobMonitoringClient = JobMonitoringClient()
jobStateUpdateClient = JobStateUpdateClient()
def createJob():
job = helloWorldJob()
jobDescription = createFile(job)
wmsClient = WMSClient()
res = wmsClient.submitJob(job._toJDL(xmlFile=jobDescription))
assert res["OK"], res["Message"]
jobID = int(res["Value"])
return jobID
def updateFlag():
# Here now setting the flag as the following inside /Operations/Defaults:
# in Operations/Defaults/Services/JobMonitoring/useESForJobParametersFlag
from DIRAC.ConfigurationSystem.Client.CSAPI import CSAPI
csAPI = CSAPI()
res = csAPI.createSection("Operations/Defaults/Services/")
if not res["OK"]:
print(res["Message"])
exit(1)
res = csAPI.createSection("Operations/Defaults/Services/JobMonitoring/")
if not res["OK"]:
print(res["Message"])
exit(1)
csAPI.setOption("Operations/Defaults/Services/JobMonitoring/useESForJobParametersFlag", True)
csAPI.commit()
# Now we need to restart the services for the new configuration to be picked up
time.sleep(2)
os.system("dirac-restart-component WorkloadManagement JobMonitoring")
os.system("dirac-restart-component WorkloadManagement JobStateUpdate")
time.sleep(5)
def _checkWithRetries(fcn, args, expected):
for i in range(3):
res = fcn(*args)
assert res["OK"], res["Message"]
if res["Value"] == expected:
return
time.sleep(1)
assert res["Value"] == expected, "Failed to call %s after 3 retries"
def test_MySQLandES_jobParameters():
"""a basic put - remove test, changing the flag in between"""
# First, create a job
jobID = createJob()
# Use the MySQL backend
res = jobStateUpdateClient.setJobParameter(jobID, "ParName-fromMySQL", "ParValue-fromMySQL")
assert res["OK"], res["Message"]
_checkWithRetries(
jobMonitoringClient.getJobParameter,
(jobID, "ParName-fromMySQL"),
{"ParName-fromMySQL": "ParValue-fromMySQL"},
)
res = jobMonitoringClient.getJobParameters(jobID) # This will be looked up in MySQL only
assert res["OK"], res["Message"]
assert isinstance(res["Value"], dict), res["Value"]
assert res["Value"] == {jobID: {"ParName-fromMySQL": "ParValue-fromMySQL"}}, res["Value"]
res = jobMonitoringClient.getJobOwner(jobID)
assert res["OK"], res["Message"]
assert res["Value"] == "adminusername", res["Value"]
res = jobStateUpdateClient.setJobsParameter({jobID: ["SomeStatus", "Waiting"]})
assert res["OK"], res["Message"]
_checkWithRetries(
jobMonitoringClient.getJobParameters,
(jobID,),
{jobID: {"ParName-fromMySQL": "ParValue-fromMySQL", "SomeStatus": "Waiting"}},
)
res = jobMonitoringClient.getJobAttributes(jobID)
assert res["OK"], res["Message"]
# changing to use the ES flag
updateFlag()
# So now we are using the ES backend
# This will still be in MySQL, but first it will look if it's in ES
res = jobMonitoringClient.getJobParameter(jobID, "ParName-fromMySQL")
assert res["OK"], res["Message"]
assert res["Value"] == {"ParName-fromMySQL": "ParValue-fromMySQL"}, res["Value"]
# Now we insert (in ES)
res = jobStateUpdateClient.setJobParameter(jobID, "ParName-fromES", "ParValue-fromES")
time.sleep(2) # sleep to give time to ES to index
assert res["OK"], res["Message"]
res = jobMonitoringClient.getJobParameter(jobID, "ParName-fromES") # This will be in ES
assert res["OK"], res["Message"]
assert res["Value"] == {"ParName-fromES": "ParValue-fromES"}, res["Value"]
res = jobMonitoringClient.getJobOwner(jobID)
assert res["OK"], res["Message"]
assert res["Value"] == "adminusername", res["Value"]
# These parameters will be looked up in MySQL and in ES, and combined
res = jobMonitoringClient.getJobParameters(jobID)
assert res["OK"], res["Message"]
assert res["Value"] == {
jobID: {"ParName-fromMySQL": "ParValue-fromMySQL", "SomeStatus": "Waiting", "ParName-fromES": "ParValue-fromES"}
}, res["Value"]
# Do it again
res = jobMonitoringClient.getJobParameters(jobID)
assert res["OK"], res["Message"]
assert res["Value"] == {
jobID: {"ParName-fromMySQL": "ParValue-fromMySQL", "SomeStatus": "Waiting", "ParName-fromES": "ParValue-fromES"}
}, res["Value"]
# this is updating an existing parameter, but in practice it will be in ES only,
# while in MySQL the old status "Waiting" will stay
res = jobStateUpdateClient.setJobsParameter({jobID: ["SomeStatus", "Matched"]})
time.sleep(2) # sleep to give time to ES to index
assert res["OK"], res["Message"]
res = jobMonitoringClient.getJobParameters(jobID)
assert res["OK"], res["Message"]
assert res["Value"][jobID]["SomeStatus"] == "Matched", res["Value"]
# again updating the same parameter
res = jobStateUpdateClient.setJobsParameter({jobID: ["SomeStatus", "Running"]})
time.sleep(2) # sleep to give time to ES to index
assert res["OK"], res["Message"]
res = jobMonitoringClient.getJobParameters(jobID)
assert res["OK"], res["Message"]
assert res["Value"][jobID]["SomeStatus"] == "Running", res["Value"]
# Now we create a second job
secondJobID = createJob()
res = jobMonitoringClient.getJobParameter(secondJobID, "ParName-fromMySQL")
assert res["OK"], res["Message"]
# Now we insert (in ES)
res = jobStateUpdateClient.setJobParameter(secondJobID, "ParName-fromES-2", "ParValue-fromES-2")
time.sleep(2) # sleep to give time to ES to index
assert res["OK"], res["Message"]
res = jobMonitoringClient.getJobParameter(secondJobID, "ParName-fromES-2") # This will be in ES
assert res["OK"], res["Message"]
assert res["Value"] == {"ParName-fromES-2": "ParValue-fromES-2"}, res["Value"]
# These parameters will be looked up in MySQL and in ES, and combined
res = jobMonitoringClient.getJobParameters([jobID, secondJobID])
assert res["OK"], res["Message"]
assert res["Value"] == {
jobID: {
"ParName-fromMySQL": "ParValue-fromMySQL",
"SomeStatus": "Running",
"ParName-fromES": "ParValue-fromES",
},
secondJobID: {"ParName-fromES-2": "ParValue-fromES-2"},
}, res["Value"]
# These parameters will be looked up in MySQL and in ES, and combined
res = jobMonitoringClient.getJobParameters([jobID, secondJobID], "SomeStatus")
assert res["OK"], res["Message"]
assert res["Value"][jobID] == {"SomeStatus": "Running"}, res["Value"]
res = jobMonitoringClient.getJobAttributes(jobID) # these will still be all in MySQL
assert res["OK"], res["Message"]
|
from math import*
from random import*
from sys import*
def insertionsort(A,p,r):
for i in range(p+1,r+1):
key = A[i]
j = i-1
while j>=p and A[j] > key:
A[j+1] = A[j]
j -= 1
A[j+1] = key
def maxHeapify(A, i, heapSize, p):
left = 2*i + 1 - p
right = 2*i + 2 - p
if left-p < heapSize and A[left] > A[i]:
largest = left
else:
largest = i
if right-p < heapSize and A[right] > A[largest]:
largest = right
if largest != i:
A[largest], A[i] = A[i], A[largest]
maxHeapify(A, largest, heapSize, p)
def buildMaxHeap(A, heapSize, p, r):
for i in range(r - heapSize//2, p-1, -1):
maxHeapify(A, i, heapSize, p)
def heapsort(A,p,r):
heapSize = r - p + 1
buildMaxHeap(A, heapSize, p, r)
for i in range(p,r):
A[p], A[p + heapSize -1] = A[p + heapSize - 1], A[p]
heapSize -= 1
maxHeapify(A, p, heapSize, p)
def Partition(A,p,r,x):
i = p-1
j = r+1
while True:
while True:
j-=1
if A[j] <= x:
break
while True:
i+=1
if A[i]>=x:
break
if i < j:
A[i],A[j] = A[j],A[i]
else:
return j
#Descripcion: Funcion que retorna el elemento medio entre 3 dados como parametros.
def median_of_three(a,b,c):
if a < b and b < c:
return b
elif b < a and a < c:
return a
else:
return c
def median_of_threeQuicksort(A,p,r):
quicksort_loop(A,p,r)
insertionsort(A,p,r) #Se llama a Insertion sort al final para ordenar los subarreglos de longitud
#menor o igual a 15, esto se realiza en tiempo lineal.
def quicksort_loop(A,p,r):
while r-p+1>15:
m = Partition(A,p,r,median_of_three(A[p],A[r],A[(p+r)//2]))
if m-p >= r-m: #Esta condicion garantiza que siempre se escoja el segmento de menor
#longitud para asegurar que la profundidad del arbol de decisiones sera O(NlogN)
quicksort_loop(A,m,r)
r = m
else:
quicksort_loop(A,p,m)
p = m+1
def introsort(A,p,r):
introsort_loop(A,p,r,2*int(log(len(A),2)))
insertionsort(A,p,r)
def introsort_loop(A,p,r,limit):
while r-p+1>15:
if limit == 0: #Se llama a la funcion heapsort cuando la profundidad de la pila de recursion es
#mayor al doble del piso del logaritmo base 2 de la longitud del arreglo. A partir
#de ese momento heapsort ordena el resto del subarreglo.
heapsort(A,p,r)
return
else:
limit-=1
m = Partition(A,p,r,median_of_three(A[p],A[r],A[(p+r)//2]))
introsort_loop(A,m,r,limit)
r = m
def quicksort_3_way_partitioning(A,l,r):
setrecursionlimit(len(A) + 100000000) #Se aumenta la longitud de la pila de recursion debido a la cantidad
#de posibles llamadas recursivas a realizar
if r-l+1<=15:
insertionsort(A,l,r)
else:
#u = randint(l,r)
#A[u],A[r] = A[r],A[u]
i,j,p,q,v = l-1,r,l-1,r,A[r] #A partir de este momento, empieza el procedimiento de la
if r>l: #particion de Hoare con algunas modificaciones, ya que
while True: #al finalizar se garantiza que los elementos iguales al pivote
while True: #estaran en el centro, los mayores estrictos a la derecha y
i+=1 #los menores estrictos a la izquierda. Esto se logra
if A[i]>=v: break #separando a todos los elementos iguales al pivote a los extremos
while True: #y luego pasandolos al centro.
j-=1
if A[j]<=v: break
if i>=j: break
A[i],A[j] = A[j],A[i]
if A[i] == v:
p+=1
A[p],A[i] = A[i],A[p]
if v == A[j]:
q-=1
A[j],A[q] = A[q],A[j]
A[i],A[r] = A[r],A[i]
j = i-1
i+=1
for k in range(l,p):
A[k],A[j] = A[j],A[k]
j-=1
for k in range(r-1,q,-1):
A[k],A[i] = A[i],A[k]
i+=1
quicksort_3_way_partitioning(A,l,j)
quicksort_3_way_partitioning(A,i,r)
def quicksort_2p(A,left,right):
setrecursionlimit(len(A)+100000000)
if right-left+1<=15:
insertionsort(A,left,right)
else:
#x,y = randint(left,right),randint(left,right)
#A[left],A[right],A[x],A[y] = A[x],A[y],A[left],A[right]
if A[left]>A[right]:
p,q = A[right],A[left] #Para este Quicksort, el sistema de particion es distinto, puesto
else: #que toma 2 pivotes a y b tales que a<=b. El resultado final de la
q,p = A[right],A[left] #particion es que los menores que a quedan a la izquierda, los mayores
l,g = left+1,right-1 #o iguales que a y menores o iguales que b quedan entre a y b, y
k = l #los mayores estrictos que b quedan a la derecha.
while k<=g:
if A[k] < p:
A[k],A[l] = A[l],A[k]
l+=1
else:
if A[k]>=q:
while A[g]>q and k<g:
g-=1
if A[g]>=p:
A[k],A[g] = A[g],A[k]
else:
A[k],A[g] = A[g],A[k]
A[k],A[l] = A[l],A[k]
l+=1
g-=1
k+=1
l-=1
g+=1
A[left] = A[l]
A[l] = p
A[right] = A[g]
A[g] = q
quicksort_2p(A,left,l-1)
quicksort_2p(A,l+1,g-1)
quicksort_2p(A,g+1,right)
|
"""abydos.tests.distance.test_distance_ncd_lzss.
This module contains unit tests for abydos.distance.NCDlzss
"""
import unittest
from abydos.distance import NCDlzss
class NCDlzssTestCases(unittest.TestCase):
"""Test NCDlzss functions.
abydos.distance.NCDlzss
"""
cmp = NCDlzss()
def test_ncd_lzss_dist(self):
"""Test abydos.distance.NCDlzss.dist."""
try:
import lzss # noqa: F401
except ImportError: # pragma: no cover
return
# Base cases
self.assertEqual(self.cmp.dist('', ''), 0.0)
self.assertEqual(self.cmp.dist('a', ''), 1.0)
self.assertEqual(self.cmp.dist('', 'a'), 1.0)
self.assertEqual(self.cmp.dist('abc', ''), 1.0)
self.assertEqual(self.cmp.dist('', 'abc'), 1.0)
self.assertEqual(self.cmp.dist('abc', 'abc'), 0.0)
self.assertEqual(self.cmp.dist('abcd', 'efgh'), 0.8)
self.assertAlmostEqual(self.cmp.dist('Nigel', 'Niall'), 0.8333333333)
self.assertAlmostEqual(self.cmp.dist('Niall', 'Nigel'), 0.8333333333)
self.assertAlmostEqual(self.cmp.dist('Colin', 'Coiln'), 0.8333333333)
self.assertAlmostEqual(self.cmp.dist('Coiln', 'Colin'), 0.8333333333)
self.assertAlmostEqual(self.cmp.dist('ATCAACGAGT', 'AACGATTAG'), 0.5)
def test_ncd_lzss_sim(self):
"""Test abydos.distance.NCDlzss.sim."""
try:
import lzss # noqa: F401
except ImportError: # pragma: no cover
return
# Base cases
self.assertEqual(self.cmp.sim('', ''), 1.0)
self.assertEqual(self.cmp.sim('a', ''), 0.0)
self.assertEqual(self.cmp.sim('', 'a'), 0.0)
self.assertEqual(self.cmp.sim('abc', ''), 0.0)
self.assertEqual(self.cmp.sim('', 'abc'), 0.0)
self.assertEqual(self.cmp.sim('abc', 'abc'), 1.0)
self.assertEqual(self.cmp.sim('abcd', 'efgh'), 0.19999999999999996)
self.assertAlmostEqual(self.cmp.sim('Nigel', 'Niall'), 0.1666666667)
self.assertAlmostEqual(self.cmp.sim('Niall', 'Nigel'), 0.1666666667)
self.assertAlmostEqual(self.cmp.sim('Colin', 'Coiln'), 0.1666666667)
self.assertAlmostEqual(self.cmp.sim('Coiln', 'Colin'), 0.1666666667)
self.assertAlmostEqual(self.cmp.sim('ATCAACGAGT', 'AACGATTAG'), 0.5)
if __name__ == '__main__':
unittest.main()
|
from __future__ import absolute_import, division, print_function
__metaclass__ = type
ANSIBLE_METADATA = {'metadata_version': '1.1',
'status': ['preview'],
'supported_by': 'community'}
DOCUMENTATION = '''
---
module: azure_rm_managed_disk
version_added: "2.4"
short_description: Manage Azure Manage Disks
description:
- Create, update and delete an Azure Managed Disk
options:
resource_group:
description:
- Name of a resource group where the managed disk exists or will be created.
required: true
name:
description:
- Name of the managed disk.
required: true
state:
description:
- Assert the state of the managed disk. Use C(present) to create or update a managed disk and 'absent' to delete a managed disk.
default: present
choices:
- absent
- present
location:
description:
- Valid Azure location. Defaults to location of the resource group.
storage_account_type:
description:
- "Type of storage for the managed disk: C(Standard_LRS) or C(Premium_LRS). If not specified the disk is created C(Standard_LRS)."
choices:
- Standard_LRS
- Premium_LRS
create_option:
description:
- "Allowed values: empty, import, copy.
- C(import) from a VHD file in I(source_uri) and C(copy) from previous managed disk I(source_uri)."
choices:
- empty
- import
- copy
source_uri:
description:
- URI to a valid VHD file to be used or the resource ID of the managed disk to copy.
aliases:
- source_resource_uri
os_type:
description:
- "Type of Operating System: C(linux) or C(windows). Used when I(create_option) is either C(copy) or C(import) and the source is an OS disk."
choices:
- linux
- windows
disk_size_gb:
description:
- Size in GB of the managed disk to be created. If I(create_option) is C(copy) then the value must be greater than or equal to the source's size.
managed_by:
description:
- Name of an existing virtual machine with which the disk is or will be associated, this VM should be in the same resource group.
- To detach a disk from a vm, explicitly set to ''.
- If this option is unset, the value will not be changed.
version_added: 2.5
tags:
description:
- Tags to assign to the managed disk.
extends_documentation_fragment:
- azure
- azure_tags
author:
- "Bruno Medina (@brusMX)"
'''
EXAMPLES = '''
- name: Create managed disk
azure_rm_managed_disk:
name: mymanageddisk
location: eastus
resource_group: Testing
disk_size_gb: 4
- name: Mount the managed disk to VM
azure_rm_managed_disk:
name: mymanageddisk
location: eastus
resource_group: Testing
disk_size_gb: 4
managed_by: testvm001
- name: Unmount the managed disk to VM
azure_rm_managed_disk:
name: mymanageddisk
location: eastus
resource_group: Testing
disk_size_gb: 4
- name: Delete managed disk
azure_rm_manage_disk:
name: mymanageddisk
location: eastus
resource_group: Testing
state: absent
'''
RETURN = '''
id:
description: The managed disk resource ID.
returned: always
type: dict
state:
description: Current state of the managed disk
returned: always
type: dict
changed:
description: Whether or not the resource has changed
returned: always
type: bool
'''
import re
from ansible.module_utils.azure_rm_common import AzureRMModuleBase
try:
from msrestazure.tools import parse_resource_id
from msrestazure.azure_exceptions import CloudError
except ImportError:
# This is handled in azure_rm_common
pass
def managed_disk_to_dict(managed_disk):
create_data = managed_disk.creation_data
return dict(
id=managed_disk.id,
name=managed_disk.name,
location=managed_disk.location,
tags=managed_disk.tags,
create_option=create_data.create_option.value.lower(),
source_uri=create_data.source_uri or create_data.source_resource_id,
disk_size_gb=managed_disk.disk_size_gb,
os_type=managed_disk.os_type.value if managed_disk.os_type else None,
storage_account_type=managed_disk.sku.name.value if managed_disk.sku else None,
managed_by=managed_disk.managed_by
)
class AzureRMManagedDisk(AzureRMModuleBase):
"""Configuration class for an Azure RM Managed Disk resource"""
def __init__(self):
self.module_arg_spec = dict(
resource_group=dict(
type='str',
required=True
),
name=dict(
type='str',
required=True
),
state=dict(
type='str',
default='present',
choices=['present', 'absent']
),
location=dict(
type='str'
),
storage_account_type=dict(
type='str',
choices=['Standard_LRS', 'Premium_LRS']
),
create_option=dict(
type='str',
choices=['empty', 'import', 'copy']
),
source_uri=dict(
type='str',
aliases=['source_resource_uri']
),
os_type=dict(
type='str',
choices=['linux', 'windows']
),
disk_size_gb=dict(
type='int'
),
managed_by=dict(
type='str'
)
)
required_if = [
('create_option', 'import', ['source_uri']),
('create_option', 'copy', ['source_uri']),
('create_option', 'empty', ['disk_size_gb'])
]
self.results = dict(
changed=False,
state=dict())
self.resource_group = None
self.name = None
self.location = None
self.storage_account_type = None
self.create_option = None
self.source_uri = None
self.os_type = None
self.disk_size_gb = None
self.tags = None
self.managed_by = None
super(AzureRMManagedDisk, self).__init__(
derived_arg_spec=self.module_arg_spec,
required_if=required_if,
supports_check_mode=True,
supports_tags=True)
def exec_module(self, **kwargs):
"""Main module execution method"""
for key in list(self.module_arg_spec.keys()) + ['tags']:
setattr(self, key, kwargs[key])
result = None
changed = False
resource_group = self.get_resource_group(self.resource_group)
if not self.location:
self.location = resource_group.location
disk_instance = self.get_managed_disk()
result = disk_instance
# need create or update
if self.state == 'present':
parameter = self.generate_managed_disk_property()
if not disk_instance or self.is_different(disk_instance, parameter):
changed = True
if not self.check_mode:
result = self.create_or_update_managed_disk(parameter)
else:
result = True
# unmount from the old virtual machine and mount to the new virtual machine
if self.managed_by or self.managed_by == '':
vm_name = parse_resource_id(disk_instance.get('managed_by', '')).get('name') if disk_instance else None
vm_name = vm_name or ''
if self.managed_by != vm_name:
changed = True
if not self.check_mode:
if vm_name:
self.detach(vm_name, result)
if self.managed_by:
self.attach(self.managed_by, result)
result = self.get_managed_disk()
if self.state == 'absent' and disk_instance:
changed = True
if not self.check_mode:
self.delete_managed_disk()
result = True
self.results['changed'] = changed
self.results['state'] = result
return self.results
def attach(self, vm_name, disk):
vm = self._get_vm(vm_name)
# find the lun
luns = ([d.lun for d in vm.storage_profile.data_disks]
if vm.storage_profile.data_disks else [])
lun = max(luns) + 1 if luns else 0
# prepare the data disk
params = self.compute_models.ManagedDiskParameters(id=disk.get('id'), storage_account_type=disk.get('storage_account_type'))
data_disk = self.compute_models.DataDisk(lun=lun, create_option=self.compute_models.DiskCreateOptionTypes.attach, managed_disk=params)
vm.storage_profile.data_disks.append(data_disk)
self._update_vm(vm_name, vm)
def detach(self, vm_name, disk):
vm = self._get_vm(vm_name)
leftovers = [d for d in vm.storage_profile.data_disks if d.name.lower() != disk.get('name').lower()]
if len(vm.storage_profile.data_disks) == len(leftovers):
self.fail("No disk with the name '{0}' was found".format(disk.get('name')))
vm.storage_profile.data_disks = leftovers
self._update_vm(vm_name, vm)
def _update_vm(self, name, params):
try:
poller = self.compute_client.virtual_machines.create_or_update(self.resource_group, name, params)
self.get_poller_result(poller)
except Exception as exc:
self.fail("Error updating virtual machine {0} - {1}".format(name, str(exc)))
def _get_vm(self, name):
try:
return self.compute_client.virtual_machines.get(self.resource_group, name, expand='instanceview')
except Exception as exc:
self.fail("Error getting virtual machine {0} - {1}".format(name, str(exc)))
def generate_managed_disk_property(self):
disk_params = {}
creation_data = {}
disk_params['location'] = self.location
disk_params['tags'] = self.tags
if self.storage_account_type:
storage_account_type = self.compute_models.DiskSku(name=self.storage_account_type)
disk_params['sku'] = storage_account_type
disk_params['disk_size_gb'] = self.disk_size_gb
# TODO: Add support for EncryptionSettings
creation_data['create_option'] = self.compute_models.DiskCreateOption.empty
if self.create_option == 'import':
creation_data['create_option'] = self.compute_models.DiskCreateOption.import_enum
creation_data['source_uri'] = self.source_uri
elif self.create_option == 'copy':
creation_data['create_option'] = self.compute_models.DiskCreateOption.copy
creation_data['source_resource_id'] = self.source_uri
disk_params['creation_data'] = creation_data
return disk_params
def create_or_update_managed_disk(self, parameter):
try:
poller = self.compute_client.disks.create_or_update(
self.resource_group,
self.name,
parameter)
aux = self.get_poller_result(poller)
return managed_disk_to_dict(aux)
except CloudError as e:
self.fail("Error creating the managed disk: {0}".format(str(e)))
# This method accounts for the difference in structure between the
# Azure retrieved disk and the parameters for the new disk to be created.
def is_different(self, found_disk, new_disk):
resp = False
if new_disk.get('disk_size_gb'):
if not found_disk['disk_size_gb'] == new_disk['disk_size_gb']:
resp = True
if new_disk.get('sku'):
if not found_disk['storage_account_type'] == new_disk['sku'].name:
resp = True
# Check how to implement tags
if new_disk.get('tags') is not None:
if not found_disk['tags'] == new_disk['tags']:
resp = True
return resp
def delete_managed_disk(self):
try:
poller = self.compute_client.disks.delete(
self.resource_group,
self.name)
return self.get_poller_result(poller)
except CloudError as e:
self.fail("Error deleting the managed disk: {0}".format(str(e)))
def get_managed_disk(self):
try:
resp = self.compute_client.disks.get(
self.resource_group,
self.name)
return managed_disk_to_dict(resp)
except CloudError as e:
self.log('Did not find managed disk')
def main():
"""Main execution"""
AzureRMManagedDisk()
if __name__ == '__main__':
main()
|
from __future__ import unicode_literals, print_function
import os
import sys
from signal import signal, SIGPIPE, SIG_DFL
import argparse
import twitter_bot_utils as tbu
from . import TwitterMarkov
from . import checking
from . import __version__ as version
TWEETER_DESC = 'Post markov chain ("ebooks") tweets to Twitter'
LEARNER_DESC = 'Turn a twitter archive into a twitter_markov-ready text file'
def main():
parser = argparse.ArgumentParser(
'twittermarkov', description='Tweet with a markov bot, or teach it from a twitter archive.')
tbu.args.add_default_args(parser, version, ())
subparsers = parser.add_subparsers()
tweeter = subparsers.add_parser('tweet', description=TWEETER_DESC, usage='%(prog)s [options]')
tbu.args.add_default_args(tweeter, include=('user', 'config', 'dry-run', 'verbose', 'quiet'))
tweeter.add_argument('-r', '--reply', action='store_const', const='reply',
dest='action', help='tweet responses to recent mentions')
tweeter.add_argument('--corpus', dest='corpus', metavar='corpus', type=str,
help='text file, one sentence per line')
tweeter.add_argument('--max-len', type=int, default=140, help='maximum output length. default: 140')
tweeter.add_argument('--state-size', type=int, help='model state size. default: 2')
tweeter.add_argument('--no-learn', dest='learn', action='store_false',
help='skip learning (by default, recent tweets from the "parent" account are added to corpus)')
tweeter.set_defaults(subparser='tweet', func=tweet_func, action='tweet')
learner = subparsers.add_parser('corpus', description=LEARNER_DESC, usage="%(prog)s [options] archive corpus")
learner.add_argument('-o', type=str, dest='output', metavar='corpus',
help='output text file (defaults to stdout)', default='/dev/stdout')
learner.add_argument('--no-retweets', action='store_true', help='skip retweets')
learner.add_argument('--no-replies', action='store_true', help='filter out replies')
learner.add_argument('--no-mentions', action='store_true', help='filter out mentions')
learner.add_argument('--no-urls', action='store_true', help='filter out urls')
learner.add_argument('--no-media', action='store_true', help='filter out media')
learner.add_argument('--no-hashtags', action='store_true', help='filter out hashtags')
learner.add_argument('-q', '--quiet', action='store_true', help='run quietly')
learner.add_argument('archive', type=str, metavar='archive',
default=os.getcwd(), help='archive csv file (e.g. tweets.csv found in Twitter archive)')
learner.set_defaults(subparser='learn', func=learn_func, action='learn')
args = parser.parse_args()
try:
func = args.func
except AttributeError:
parser.parse_args(['--help'])
argdict = vars(args)
del argdict['func']
if args.subparser == 'tweet':
func(**argdict)
elif args.subparser == 'learn':
func(**argdict)
def tweet_func(action, max_len=None, **kwargs):
tm = TwitterMarkov(**kwargs)
try:
if action == 'tweet':
tm.log.debug('tweeting')
tm.tweet(max_len=max_len)
elif action == 'reply':
tm.log.debug('replying')
tm.reply_all(max_len=max_len)
except RuntimeError:
tm.log.error('model was unable to compose a tweet')
return
def learn_func(**kwargs):
if not kwargs['quiet']:
print("Reading " + kwargs['archive'], file=sys.stderr)
archive = tbu.archive.read_csv(kwargs.get('archive'))
gen = checking.generator(archive, **kwargs)
tweets = (tweet.replace(u'\n', u' ') + '\n' for tweet in gen)
if kwargs['output'] in ('-', '/dev/stdout'):
signal(SIGPIPE, SIG_DFL)
sys.stdout.writelines(tweets)
else:
if not kwargs['quiet']:
print("Writing " + kwargs['output'], file=sys.stderr)
with open(kwargs.get('output'), 'w') as f:
f.writelines(tweets)
if __name__ == '__main__':
main()
|
from operations import *
_gradient_registry = {}
class RegisterGradient:
""" A decorator for registering the gradient function for an
op type."""
def __init__(self, op_type):
""" Creates a new decorator with 'op_type' as the Operation type.
"""
self._op_type = eval(op_type)
def __call__(self, f):
""" Registers the function 'f' as gradient function for 'op_type'"""
_gradient_registry[self._op_type] = f
return f
@RegisterGradient("add")
def _add_gradient(op, grad):
"""Computes the gradients for `add`.
Args:
op: The `add` `Operation` that we are differentiating
grad: Gradient with respect to the output of the `add` op.
Returns:
Gradients with respect to the input of `add`.
"""
a = op.inputs[0]
b = op.inputs[1]
grad_wrt_a = grad
while np.ndim(grad_wrt_a) > len(a.shape):
grad_wrt_a = np.sum(grad_wrt_a, axis=0)
for axis, size in enumerate(a.shape):
if size == 1:
grad_wrt_a = np.sum(grad_wrt_a, axis=axis, keepdims=True)
grad_wrt_b = grad
while np.ndim(grad_wrt_b) > len(b.shape):
grad_wrt_b = np.sum(grad_wrt_b, axis=0)
for axis, size in enumerate(b.shape):
if size == 1:
grad_wrt_b = np.sum(grad_wrt_b, axis=axis, keepdims=True)
return [grad_wrt_a, grad_wrt_b]
@RegisterGradient("matmul")
def _matmul_gradient(op, grad):
"""Computes the gradients for `matmul`.
Args:
op: The `matmul` `Operation` that we are differentiating
grad: Gradient with respect to the output of the `matmul` op.
Returns:
Gradients with respect to the input of `matmul`.
"""
A = op.inputs[0]
B = op.inputs[1]
return [grad.dot(B.T), A.T.dot(grad)]
@RegisterGradient("sigmoid")
def _sigmoid_gradient(op, grad):
"""Computes the gradients for `sigmoid`.
Args:
op: The `sigmoid` `Operation` that we are differentiating
grad: Gradient with respect to the output of the `sigmoid` op.
Returns:
Gradients with respect to the input of `sigmoid`.
"""
sigmoid = op.output
return grad * sigmoid * (1 - sigmoid)
@RegisterGradient("softmax")
def _softmax_gradient(op, grad):
"""Computes the gradients for `softmax`.
Args:
op: The `softmax` `Operation` that we are differentiating
grad: Gradient with respect to the output of the `softmax` op.
Returns:
Gradients with respect to the input of `softmax`.
"""
softmax = op.output
return (grad - np.reshape(
np.sum(grad * softmax, 1),
[-1, 1]
)) * softmax
@RegisterGradient("log")
def _log_gradient(op, grad):
"""Computes the gradients for `log`.
Args:
op: The `log` `Operation` that we are differentiating
grad: Gradient with respect to the output of the `log` op.
Returns:
Gradients with respect to the input of `log`.
"""
x = op.inputs[0]
return grad/x
@RegisterGradient("multiply")
def _multiply_gradient(op, grad):
"""Computes the gradients for `multiply`.
Args:
op: The `multiply` `Operation` that we are differentiating
grad: Gradient with respect to the output of the `multiply` op.
Returns:
Gradients with respect to the input of `multiply`.
"""
A = op.inputs[0]
B = op.inputs[1]
return [grad * B, grad * A]
@RegisterGradient("reduce_sum")
def _reduce_sum_gradient(op, grad):
"""Computes the gradients for `reduce_sum`.
Args:
op: The `reduce_sum` `Operation` that we are differentiating
grad: Gradient with respect to the output of the `reduce_sum` op.
Returns:
Gradients with respect to the input of `reduce_sum`.
"""
A = op.inputs[0]
output_shape = np.array(A.shape)
output_shape[op.axis] = 1
tile_scaling = A.shape // output_shape
grad = np.reshape(grad, output_shape)
return np.tile(grad, tile_scaling)
@RegisterGradient("negative")
def _negative_gradient(op, grad):
"""Computes the gradients for `negative`.
Args:
op: The `negative` `Operation` that we are differentiating
grad: Gradient with respect to the output of the `negative` op.
Returns:
Gradients with respect to the input of `negative`.
"""
return -grad
|
class Vorstand(object):
pass
|
import unittest
import snake.geometry
import copy
class TestPoint(unittest.TestCase):
def test_zero_point(self):
point = snake.geometry.Point()
self.assertEqual(point.x, 0)
self.assertEqual(point.y, 0)
def test_vector_point(self):
first_point = snake.geometry.Point(2, 3)
second_point = snake.geometry.Point(4, 3)
vector = snake.geometry.Point.vector_from_points(first_point, second_point)
third_point = first_point + vector
self.assertEqual(vector.x, 2)
self.assertEqual(vector.y, 0)
self.assertEqual(second_point.x, third_point.x)
self.assertEqual(second_point.y, third_point.y)
def test_point_equality(self):
first_point = snake.geometry.Point(2, 3)
second_point = snake.geometry.Point(3, 2)
self.assertNotEqual(first_point, second_point)
second_point = snake.geometry.Point(2, 3)
self.assertEqual(first_point, second_point)
class TestFreeVector(unittest.TestCase):
def test_zero_vector(self):
vector = snake.geometry.FreeVector()
self.assertEqual(vector.x, 0)
self.assertEqual(vector.y, 0)
self.assertEqual(vector.length(), 0)
def test_normalize_vector(self):
vector = snake.geometry.FreeVector(6, 0)
self.assertEqual(vector.length(), 6)
vector.normalize()
self.assertEqual(vector.length(), 1)
def test_horizontal_vector(self):
vector = snake.geometry.FreeVector(-3, 0)
self.assertEqual(vector.x, -3)
self.assertEqual(vector.y, 0)
self.assertEqual(vector.length(), 3)
vector.normalize()
self.assertEqual(vector.x, -1)
self.assertEqual(vector.y, 0)
def test_vertical_vector(self):
vector = snake.geometry.FreeVector(0, 2)
self.assertEqual(vector.x, 0)
self.assertEqual(vector.y, 2)
self.assertEqual(vector.length(), 2)
vector.normalize()
self.assertEqual(vector.x, 0)
self.assertEqual(vector.y, 1)
def test_irregular_vector(self):
vector = snake.geometry.FreeVector(2, -3)
self.assertRaises(AssertionError, vector.length)
def test_multiply_vector(self):
vector = snake.geometry.FreeVector(3, 0)
vector.normalize()
multiplied_vector = vector * 3
self.assertEqual(vector.length(), 1)
self.assertEqual(multiplied_vector.length(), 3)
self.assertEqual(multiplied_vector.x, 3)
if __name__ == '__main__':
unittest.main()
|
"""
Module for testing _data.py
"""
import os as _os # For loading fixtures
import numpy as _n
import spinmob as _s
import unittest as _ut
a = b = c = d = x = y = f = None
class Test_fitter(_ut.TestCase):
"""
Test class for fitter.
"""
debug = False
def setUp(self):
# Path to the spinmob module
self.data_path = _os.path.join(_os.path.dirname(_s.__file__), 'tests', 'fixtures')
self.x1 = [0,1,2,3,4,5,6,7]
self.y1 = [10,1,2,1,3,4,5,3]
self.y2 = [2,1,2,4,5,2,1,5]
self.ey = [0.3,0.5,0.7,0.9,1.1,1.3,1.5,1.7]
self.x3 = [1, 2, 4.2, 4]
self.y3 = [0.3, 0.5, 0.2, 0.7]
self.plot_delay = 0.1
return
def test_basics_function_first(self):
"""
Basic tests for a simple example smallish data set.
"""
# Load a test file and fit it, making sure "f" is defined at each step.
f = _s.data.fitter()
f.__repr__()
f.set_functions('a1 + a2*x + a3*x**2.', 'a1=-1., a2=0.04, a3=0.00006')
f.__repr__()
f.plot()
f.set_data(self.x1, self.y1, 0.5)
_s.pylab.ginput(timeout=self.plot_delay)
f.__repr__()
f.fit()
# Check results and error bars. The small error / large reduced chi^2
# ensures that there is no autoscaling of the covariance matrix / error bars,
# which is the default (CRAZY!!) behavior of many professional packages including
# lmfit and scipy's curve_fit()
self.assertAlmostEqual(6.958333341520, f.p_fit['a1'].value)
self.assertAlmostEqual(0.2808363344234, f.p_fit['a2'].stderr)
_s.tweaks.set_figure_window_geometry(position=[0,0])
_s.pylab.ginput(timeout=self.plot_delay)
f.__repr__()
# Check that the reduced chi^2 is roughly correct
r = f.get_reduced_chi_squareds()
self.assertIs(type(r), list)
self.assertAlmostEqual(r[0], 29.2238095)
# trim the data
f.set(xmin=1.5, xmax=6.5)
_s.pylab.ginput(timeout=self.plot_delay)
f.__repr__()
f.fit()
_s.pylab.ginput(timeout=self.plot_delay)
f.__repr__()
# trim the data and test what happens when there are 0 DOF
f.set(xmin=1.5, coarsen=2, plot_all_data=True, plot_guess_zoom=True)
_s.pylab.ginput(timeout=self.plot_delay)
f.__repr__()
f.fit()
_s.pylab.ginput(timeout=self.plot_delay)
f.__repr__()
# Change the first figure
f(first_figure=1)
# Change the function and starting plot, then refit
f.set_functions('a*x+b', 'a,b')
_s.tweaks.set_figure_window_geometry(position=[0,400])
_s.pylab.ginput(timeout=self.plot_delay)
f.__repr__()
f.fit()
_s.pylab.ginput(timeout=self.plot_delay)
f.__repr__()
# Coarsen and untrim
def test_multi_data_sets(self):
"""
Two-data-set-fit.
Also includes
- trim, zoom, etc
"""
global f
f = _s.data.fitter(first_figure=10)
f.__repr__()
# Set the data first
f.set_data(self.x1, [self.y1,self.y2], self.ey)
_s.tweaks.set_figure_window_geometry(10, position=[500,0])
_s.tweaks.set_figure_window_geometry(11, position=[500,400])
f.__repr__()
# Set the functions
f.set_functions(['a*x+b', 'a*cos(b*x)+c'], 'a=-1,b,c')
f.__repr__()
# Fit
f.fit()
f.__repr__()
# Trim
f.trim()
f.__repr__()
# Zoom
f.zoom()
f.__repr__()
# Untrim
f.untrim()
f.__repr__()
# Make sure untrim worked
self.assertEqual(f['xmin'][0], None)
# Fit
f.fit()
f.__repr__()
# Two data sets with different x-datas, different lengths, and different bit depth
f.set_data([self.x1, self.x1, self.x3], [self.y1,self.y2,self.y3], [self.ey,self.ey,45], dtype=_n.float32)
f.__repr__()
f.set_functions(['a*x+b', 'a*cos(b*x)+c', 'a*x**2'], 'a=-1,b,c')
f.__repr__()
# Fit it.
f.fit()
f.__repr__()
# Set the wrong number of functions
f.set_functions(['a*x+b', 'a*cos(b*x)+c'], 'a=-1,b,c')
f.__repr__()
f.set_functions(['a*x+b', 'a*cos(b*x)+c', 'a*x**2', 'a*x'], 'a=-1,b,c')
f.__repr__()
def test_get_processed_data(self):
"""
Test self.get_processed_data().
"""
f = _s.data.fitter(first_figure=7, autoplot=False)
f.__repr__()
# Set the data first
f.set_data(self.x1, [self.y1,self.y2], self.ey)
f.__repr__()
# Massage conditions
f(xmin=1.5, ymax=3, coarsen=2)
f.__repr__()
# Levels of process
self.assertAlmostEqual(f.get_processed_data( )[0][1][1], 6.5)
self.assertAlmostEqual(f.get_processed_data(do_trim=False )[0][1][3], 6.5)
self.assertAlmostEqual(f.get_processed_data(do_coarsen=False)[2][0][3], 1.7)
def test_dtype(self):
"""
Ensures that the dtype is set.
"""
f = _s.data.fitter(first_figure=7, autoplot=False)
f.set_data(_n.array([1,2,3,4], dtype=_n.float16),
_n.array([1,2,3,4], dtype=_n.float64),
_n.array([1,2,3,4], dtype=_n.float32))
self.assertEqual(f.get_processed_data()[0][0].dtype, _n.float16)
f.set_data(_n.array([1,2,3,4], dtype=_n.float16),
_n.array([1,2,3,4], dtype=_n.float64),
_n.array([1,2,3,4], dtype=_n.float32),
dtype=_n.float32)
self.assertEqual(f.get_processed_data()[1][0].dtype, _n.float32)
def test_fix_free_and_function_globals(self):
"""
Tests whether we can specify globals for the functions and do a fix()
and free() call.
"""
def my_fun(x,a,b): return a*x+b
f = _s.data.fitter(autoplot=False)
f.set_functions('stuff(x,a,b)', 'a,b', stuff=my_fun)
f.fix('a')
f.set_data([1,2,3],[1,2,1],0.1)
f.fix(b=2)
self.assertEqual(f['b'].value, 2)
self.assertFalse(f['b'].vary)
self.assertFalse(f['a'].vary)
f.__repr__()
f.fit()
f.__repr__()
f.free('a')
f.__repr__()
f.fit()
f.__repr__()
def test_background(self):
f = _s.data.fitter(xmin=2, plot_guess_zoom=True, plot_all_data=True)
f.set_data()
f.set_functions('stuff*cos(x*other_stuff)+final_stuff', 'stuff, other_stuff, final_stuff', bg='final_stuff')
f.fit()
def test_autoscale_eydata(self):
f = _s.data.fitter(autoplot=False).set_data(None, [[1,2,1,3],[1,2,1,4]]).set_functions(['a*x+b','a*cos(b*x)+c'], 'a,b,c').fit()
f.autoscale_eydata().fit()
self.assertAlmostEqual(f.results.redchi,0.9958597)
def test_set_guess_to_fit_result(self):
f = _s.data.fitter(autoplot=False).set_data(None, [[1,2,1,3],[1,2,1,4]]).set_functions(['a*x+b','a*cos(b*x)+c'], 'a,b,c').fit()
f.set_guess_to_fit_result()
self.assertEqual(f.p_fit['a'].value,f.p_in['a'].value)
if __name__ == "__main__":
_ut.main(failfast=True)
|
import sys
import os
import os.path
sys.path.insert(0, os.path.abspath(os.pardir))
import matholymp
extensions = [
'sphinx.ext.autodoc',
]
templates_path = ['_templates']
source_suffix = '.rst'
master_doc = 'index'
project = u'matholymp'
copyright = u'2014-2022, Joseph Samuel Myers'
version = matholymp.__version__
release = matholymp.__version__
exclude_patterns = ['_build']
pygments_style = 'sphinx'
html_theme = 'alabaster'
html_theme_options = {
'github_button': 'false'
}
html_static_path = ['_static']
html_sidebars = {
'*': [
'about.html',
'navigation.html',
'relations.html',
'searchbox.html',
'donate.html',
]
}
htmlhelp_basename = 'matholympdoc'
latex_elements = {
}
latex_documents = [
('index', 'matholymp.tex', u'matholymp Documentation',
u'Joseph Myers', 'manual'),
]
man_pages = [
('index', 'matholymp', u'matholymp Documentation',
[u'Joseph Myers'], 1)
]
texinfo_documents = [
('index', 'matholymp', u'matholymp Documentation',
u'Joseph Myers', 'matholymp', 'One line description of project.',
'Miscellaneous'),
]
|
import math
import random
import time
import ModuleOperators
def mr_num(n):
(r, s) = (n - 1, 0)
while r % 2 == 0:
r //= 2
s += 1
return (r, s)
def isPrime(number, rounds):
#checking by list of prime numbers
f = open('prime_numbers_list.txt', 'r')
for line in f:
if number % int(line) == 0:
return False
f.close()
#miller-rabin test
(r, s) = mr_num(number)
seed = time.time()
for i in range(rounds):
is_prob_prime = False
seed += i
random.seed(seed)
a = random.randint(2, number - 2)
a = ModuleOperators.power(a, r, number)
if a == 1 or a == (number - 1):
continue
for j in range(s):
a = ModuleOperators.power2(a, number)
if a == (number - 1):
is_prob_prime = True
break
if not is_prob_prime:
return False
return True
def generatePrime(length, rounds):
while True:
random.seed()
candidate = random.randint(2 ** length, 2 ** (length + 1))
if (isPrime(candidate, rounds)):
return candidate
file = open('primeNumber_P.txt', 'w')
file.write(str(generatePrime(512, 2048)))
file.close()
time.sleep(10)
file = open('primeNumber_Q.txt', 'w')
file.write(str(generatePrime(512, 2048)))
file.close()
|
import math
def vec2_from_direction(angle, length):
return Vec2(math.cos(angle), math.sin(angle)) * length
def radians_to_degrees(angle):
return (angle / math.pi) * 180
class Vec2:
def __init__(self, x, y = None):
if y is None:
#This is a tuple
self.x = x[0]
self.y = x[1]
else:
self.x = x
self.y = y
def __add__(self, other):
return Vec2(self.x + other.x, self.y + other.y)
def __sub__(self, other):
return Vec2(self.x - other.x, self.y - other.y)
def __iadd__(self, other):
new_x = self.x + other.x
new_y = self.y + other.y
return Vec2(new_x, new_y)
def __mul__(self, other):
return Vec2(self.x * other, self.y * other)
def __abs__(self):
return math.sqrt(self.x ** 2 + self.y ** 2)
def is_within_bounds(self, center_point, size):
"""Checks whether this vector is within size distance from center_point"""
return self.distance_to(center_point) <= size
def angle(self):
return math.atan2(self.y, self.x)
def relative_angle_to(self, other):
return (self - other).angle()
def to_tuple(self):
return (self.x, self.y)
def distance_to(self, other):
return abs(self - other)
|
import numpy as np
import pandas as pd
def create(target):
'''
Create NutritionTarget
Parameters
----------
target : pd.DataFrame
Nutrient constraints. Nutrient amounts are in SI units (i.e. g most of
the time).
Index: str. Nutrient name, may contain spaces.
Columns:
min : float
Minimum required amount of nutrient. min > 0 or is NaN. If NaN, no
minimum constraint is imposed on the nutrient.
max : float
Maximum allowed amount of nutrient. max > min, 0 or is NaN. If NaN,
no maximum constraint is imposed on the nutrient.
Either of min, max must be finite.
Returns
-------
NutritionTarget
'''
# TODO what's the effect of close to zero minima? -> close to zero pseudo targets in some cases. Is 1e-8 really that close to 0?
# Copy input so we don't mutate it
original = target
target = target.copy()
# Skip if empty (avoids pd.DataFrame.applymap bug)
if target.empty:
return pd.DataFrame(columns=('min', 'max', 'pseudo_target'), dtype=float)
# Set dtype to float
target = target.astype(float)
# Validate
_validate(target, original)
# Return
return target
def _validate(target, original):
# finite or NaN
mask = target.applymap(np.isinf).any(axis=1)
invalid_rows = original[mask]
if not invalid_rows.empty:
raise ValueError(
'Encountered np.inf or -np.inf. '
'Values must be finite or NaN. '
'Invalid rows:\n{}'
.format(invalid_rows.to_string())
)
# min > 0 or NaN
mask = target['min'] <= 0
invalid_rows = original[mask]
if not invalid_rows.empty:
raise ValueError(
'min <= 0. '
'Expected: min > 0 or min=NaN. '
'Invalid rows:\n{}'
.format(invalid_rows['min'].to_string())
)
# max > min and 0 if max not NaN
mask = (target['max'] <= 0) | (target['max'] <= target['min'])
invalid_rows = original[mask]
if not invalid_rows.empty:
raise ValueError(
'max <= min, 0. '
'Expected: max > min, 0 or max=NaN. '
'Invalid rows:\n{}'
.format(invalid_rows[['min', 'max']].to_string())
)
# No all NaN rows
mask = target.isnull().all(axis=1)
invalid_rows = original[mask]
if not invalid_rows.empty:
raise ValueError(
'Some rows are all NaN (when treating min=0 as min=NaN). '
'This does not define any constraint for the nutrient. Expected: min>0 or max>min,0. '
'Either remove the nutrient or constrain it. '
'Invalid rows:\n{}'
.format(invalid_rows.to_string())
)
class NutritionTarget(object):
'''
Interface: constraints and nutrition preferences
This is a pd.DataFrame. Nutrient amounts are in SI units (i.e. g most of the
time).
Index: str. Nutrient name, may contain spaces.
Columns:
min : float
Minimum required amount of nutrient. min > 0 or is NaN. If NaN, no
minimum constraint is imposed on the nutrient.
max : float
Maximum allowed amount of nutrient. max > min, 0 or is NaN. If NaN,
no maximum constraint is imposed on the nutrient.
min or max (or both) is finite.
'''
def __init__(self, *args, **kwargs):
raise Exception('This is an interface, for use in documentation only. Do not reference it in code')
def assert_satisfied(nutrition_target, result):
'''
Assert nutrition target satisfied by given nutrition (of a recipe)
Parameters
----------
nutrition_target : NutritionTarget
Nutrition target to satisfy
nutrition : pd.Series(float, columns=nutrients)
Amount of each nutrient (in a recipe)
'''
nutrition_target = nutrition_target.join(result)
# min
mins = nutrition_target.dropna(subset=['min'])
if not mins.empty:
is_less = mins['min'] < mins['amount']
is_close = mins[['min', 'amount']].apply(lambda row: np.isclose(*row), axis=1, raw=True)
assert (is_less | is_close).all()
# max
maxes = nutrition_target.dropna(subset=['max'])
if not maxes.empty:
is_greater = maxes['max'] > maxes['amount']
is_close = maxes[['max', 'amount']].apply(lambda row: np.isclose(*row), axis=1, raw=True)
assert (is_greater | is_close).all()
def from_config():
'''
Create nutrition target from config file
'''
from .config import target
return create(target)
|
from .base import XMLObject
class Source(XMLObject):
@property
def text(self):
# this should probably be a list of elements rather than `text`
return self.xml.text or ""
|
from django.conf.urls.defaults import include, patterns, url
from django.conf import settings
from django.contrib import admin
from django.contrib.auth.decorators import login_required
from lizard_riool import views
from lizard_ui.urls import debugmode_urlpatterns
admin.autodiscover()
urlpatterns = patterns(
'',
# Upload pages
(r'^beheer/uploads/$', login_required(views.UploadsView.as_view())),
(r'^beheer/uploads/files/$', views.uploaded_file_list),
# The next line expects DELE requests
url('^beheer/uploads/files/uploaded-file-(?P<upload_id>\d+)/$',
login_required(views.delete_uploaded_file),
name='lizard_riool_delete_uploaded_file'),
url('^beheer/uploads/files/uploaded-file-(?P<upload_id>\d+)/errors/$',
login_required(views.UploadedFileErrorsView.as_view()),
name='lizard_riool_uploaded_file_error_view'),
# Stelsels, profielen
(r'^stelsels/$', login_required(views.SewerageView.as_view())),
(r'^langsprofielen/graph/$', login_required(
views.SideProfileGraph2.as_view())),
(r'^langsprofielen/popup/$', login_required(
views.SideProfilePopup.as_view())),
# Archiefpagina
url(r'^archief/$',
login_required(views.ArchivePage.as_view()),
name='lizard_riool_archive_page'),
url(r'^archief/(?P<page_number>\d+)/$',
login_required(views.ArchivePage.as_view()),
name='lizard_riool_archive_page_numbered'),
# Activate / deactivate and also deletion. One uses POST the other DELETE
url(r'^stelsels/(?P<sewerage_id>\d+)/$',
login_required(views.activate_sewerage_view),
name='lizard_riool_activate_sewerage'),
# Download originals
url(r'^stelsels/(?P<sewerage_id>\d+)/(?P<filename>.+)$',
login_required(views.download_original_view),
name='lizard_riool_download_original'),
(r'^beheer/files/$', login_required(
views.FileView.as_view(template_name="lizard_riool/files.html"))),
url(r'^beheer/files/upload/$', login_required(views.UploadView.as_view()),
name="upload_dialog_url"),
(r'^beheer/files/delete/(?P<id>\d+)/$', login_required(
views.DeleteFileView.as_view())),
(r'^langsprofielen/$', login_required(views.SideProfileView.as_view())),
(r'^beheer/$', login_required(views.FileView.as_view())),
(r'^put/$', login_required(views.ManholeFinder.as_view())),
(r'^bar/$', login_required(views.PathFinder.as_view())),
)
if settings.DEBUG:
urlpatterns += patterns('', ('^djcelery/', include('djcelery.urls')))
urlpatterns += debugmode_urlpatterns()
|
network_listing = """<!DOCTYPE HTML PUBLIC "-//W3C//DTD HTML 3.2 Final//EN">
<html>
<head>
<title>Index of /observations/swob-ml/partners/bc-env-snow</title>
</head>
<body>
<h1>Index of /observations/swob-ml/partners/bc-env-snow</h1>
<pre><img src="/icons/blank.gif" alt="Icon "> <a href="?C=N;O=D">Name</a> <a href="?C=M;O=A">Last modified</a> <a href="?C=S;O=A">Size</a> <a href="?C=D;O=A">Description</a><hr><img src="/icons/back.gif" alt="[PARENTDIR]"> <a href="/observations/swob-ml/partners/">Parent Directory</a> -
<img src="/icons/folder.gif" alt="[DIR]"> <a href="20191006/">20191006/</a> 2019-10-06 00:46 -
<img src="/icons/folder.gif" alt="[DIR]"> <a href="20191007/">20191007/</a> 2019-10-07 00:47 -
<img src="/icons/folder.gif" alt="[DIR]"> <a href="20191008/">20191008/</a> 2019-10-08 15:31 -
<img src="/icons/folder.gif" alt="[DIR]"> <a href="20191009/">20191009/</a> 2019-10-09 00:44 -
<img src="/icons/folder.gif" alt="[DIR]"> <a href="20191010/">20191010/</a> 2019-10-10 23:06 -
<img src="/icons/folder.gif" alt="[DIR]"> <a href="20191011/">20191011/</a> 2019-10-11 00:44 -
<img src="/icons/folder.gif" alt="[DIR]"> <a href="20191012/">20191012/</a> 2019-10-12 00:44 -
<img src="/icons/folder.gif" alt="[DIR]"> <a href="20191013/">20191013/</a> 2019-10-13 00:44 -
<img src="/icons/folder.gif" alt="[DIR]"> <a href="20191014/">20191014/</a> 2019-10-14 00:44 -
<img src="/icons/folder.gif" alt="[DIR]"> <a href="20191015/">20191015/</a> 2019-10-15 00:44 -
<img src="/icons/folder.gif" alt="[DIR]"> <a href="20191016/">20191016/</a> 2019-10-16 00:44 -
<img src="/icons/folder.gif" alt="[DIR]"> <a href="20191017/">20191017/</a> 2019-10-17 00:44 -
<img src="/icons/folder.gif" alt="[DIR]"> <a href="20191018/">20191018/</a> 2019-10-18 00:44 -
<hr></pre>
</body></html>
"""
day_listing = """
<!DOCTYPE HTML PUBLIC "-//W3C//DTD HTML 3.2 Final//EN">
<html>
<head>
<title>Index of /observations/swob-ml/partners/bc-env-snow/20191015</title>
</head>
<body>
<h1>Index of /observations/swob-ml/partners/bc-env-snow/20191015</h1>
<pre><img src="/icons/blank.gif" alt="Icon "> <a href="?C=N;O=D">Name</a> <a href="?C=M;O=A">Last modified</a> <a href="?C=S;O=A">Size</a> <a href="?C=D;O=A">Description</a><hr><img src="/icons/back.gif" alt="[PARENTDIR]"> <a href="/observations/swob-ml/partners/bc-env-snow/">Parent Directory</a> -
<img src="/icons/folder.gif" alt="[DIR]"> <a href="1a01p/">1a01p/</a> 2019-10-15 23:29 -
<img src="/icons/folder.gif" alt="[DIR]"> <a href="1a02p/">1a02p/</a> 2019-10-15 23:28 -
<img src="/icons/folder.gif" alt="[DIR]"> <a href="1a03p/">1a03p/</a> 2019-10-15 23:44 -
<img src="/icons/folder.gif" alt="[DIR]"> <a href="1a14p/">1a14p/</a> 2019-10-15 23:13 -
<img src="/icons/folder.gif" alt="[DIR]"> <a href="1a15p/">1a15p/</a> 2019-10-15 23:04 -
<img src="/icons/folder.gif" alt="[DIR]"> <a href="1a17p/">1a17p/</a> 2019-10-15 23:43 -
<img src="/icons/folder.gif" alt="[DIR]"> <a href="1a19p/">1a19p/</a> 2019-10-15 23:27 -
<img src="/icons/folder.gif" alt="[DIR]"> <a href="1c20p/">1c20p/</a> 2019-10-15 23:44 -
<img src="/icons/folder.gif" alt="[DIR]"> <a href="1c41p/">1c41p/</a> 2019-10-15 23:29 -
<img src="/icons/folder.gif" alt="[DIR]"> <a href="1d06p/">1d06p/</a> 2019-10-15 23:03 -
<img src="/icons/folder.gif" alt="[DIR]"> <a href="1d19p/">1d19p/</a> 2019-10-15 23:23 -
<img src="/icons/folder.gif" alt="[DIR]"> <a href="1e02p/">1e02p/</a> 2019-10-15 23:13 -
<img src="/icons/folder.gif" alt="[DIR]"> <a href="1e08p/">1e08p/</a> 2019-10-15 23:44 -
<img src="/icons/folder.gif" alt="[DIR]"> <a href="1e10p/">1e10p/</a> 2019-10-15 23:43 -
<img src="/icons/folder.gif" alt="[DIR]"> <a href="1f06p/">1f06p/</a> 2019-10-15 23:27 -
<img src="/icons/folder.gif" alt="[DIR]"> <a href="2c10p/">2c10p/</a> 2019-10-15 23:44 -
<img src="/icons/folder.gif" alt="[DIR]"> <a href="2d14p/">2d14p/</a> 2019-10-15 23:28 -
<img src="/icons/folder.gif" alt="[DIR]"> <a href="2f05p/">2f05p/</a> 2019-10-15 23:30 -
<img src="/icons/folder.gif" alt="[DIR]"> <a href="2f08p/">2f08p/</a> 2019-10-15 23:42 -
<img src="/icons/folder.gif" alt="[DIR]"> <a href="2g03p/">2g03p/</a> 2019-10-15 23:42 -
<img src="/icons/folder.gif" alt="[DIR]"> <a href="3a22p/">3a22p/</a> 2019-10-15 23:28 -
<img src="/icons/folder.gif" alt="[DIR]"> <a href="3a24p/">3a24p/</a> 2019-10-15 23:28 -
<img src="/icons/folder.gif" alt="[DIR]"> <a href="3a25p/">3a25p/</a> 2019-10-15 23:23 -
<img src="/icons/folder.gif" alt="[DIR]"> <a href="3a28p/">3a28p/</a> 2019-10-15 23:05 -
<img src="/icons/folder.gif" alt="[DIR]"> <a href="3b23p/">3b23p/</a> 2019-10-15 23:23 -
<img src="/icons/folder.gif" alt="[DIR]"> <a href="3b24p/">3b24p/</a> 2019-10-15 23:04 -
<img src="/icons/folder.gif" alt="[DIR]"> <a href="3b25p/">3b25p/</a> 2019-10-15 23:07 -
<img src="/icons/folder.gif" alt="[DIR]"> <a href="3b26p/">3b26p/</a> 2019-10-15 23:04 -
<img src="/icons/folder.gif" alt="[DIR]"> <a href="3c08p/">3c08p/</a> 2019-10-15 23:44 -
<img src="/icons/folder.gif" alt="[DIR]"> <a href="4b12p/">4b12p/</a> 2019-10-15 23:23 -
<img src="/icons/folder.gif" alt="[DIR]"> <a href="4b15p/">4b15p/</a> 2019-10-15 23:28 -
<img src="/icons/folder.gif" alt="[DIR]"> <a href="4b16p/">4b16p/</a> 2019-10-15 23:29 -
<img src="/icons/folder.gif" alt="[DIR]"> <a href="4b17p/">4b17p/</a> 2019-10-15 23:29 -
<img src="/icons/folder.gif" alt="[DIR]"> <a href="4b18p/">4b18p/</a> 2019-10-15 23:27 -
<hr></pre>
</body></html>
"""
station_listing = """
<!DOCTYPE HTML PUBLIC "-//W3C//DTD HTML 3.2 Final//EN">
<html>
<head>
<title>Index of /observations/swob-ml/partners/bc-env-snow/20191015/1d06p</title>
</head>
<body>
<h1>Index of /observations/swob-ml/partners/bc-env-snow/20191015/1d06p</h1>
<pre><img src="/icons/blank.gif" alt="Icon "> <a href="?C=N;O=D">Name</a> <a href="?C=M;O=A">Last modified</a> <a href="?C=S;O=A">Size</a> <a href="?C=D;O=A">Description</a><hr><img src="/icons/back.gif" alt="[PARENTDIR]"> <a href="/observations/swob-ml/partners/bc-env-snow/20191015/">Parent Directory</a> -
<img src="/icons/unknown.gif" alt="[ ]"> <a href="2019-10-15-0000-bc-env-asw-1d06p-AUTO-swob.xml">2019-10-15-0000-bc-env-asw-1d06p-AUTO-swob.xml</a> 2019-10-15 00:03 5.6K
<img src="/icons/unknown.gif" alt="[ ]"> <a href="2019-10-15-0100-bc-env-asw-1d06p-AUTO-swob.xml">2019-10-15-0100-bc-env-asw-1d06p-AUTO-swob.xml</a> 2019-10-15 01:03 5.6K
<img src="/icons/unknown.gif" alt="[ ]"> <a href="2019-10-15-0200-bc-env-asw-1d06p-AUTO-swob.xml">2019-10-15-0200-bc-env-asw-1d06p-AUTO-swob.xml</a> 2019-10-15 02:04 5.6K
<img src="/icons/unknown.gif" alt="[ ]"> <a href="2019-10-15-0300-bc-env-asw-1d06p-AUTO-swob.xml">2019-10-15-0300-bc-env-asw-1d06p-AUTO-swob.xml</a> 2019-10-15 03:03 5.6K
<img src="/icons/unknown.gif" alt="[ ]"> <a href="2019-10-15-0400-bc-env-asw-1d06p-AUTO-swob.xml">2019-10-15-0400-bc-env-asw-1d06p-AUTO-swob.xml</a> 2019-10-15 04:04 5.6K
<img src="/icons/unknown.gif" alt="[ ]"> <a href="2019-10-15-0500-bc-env-asw-1d06p-AUTO-swob.xml">2019-10-15-0500-bc-env-asw-1d06p-AUTO-swob.xml</a> 2019-10-15 05:03 5.6K
<img src="/icons/unknown.gif" alt="[ ]"> <a href="2019-10-15-0600-bc-env-asw-1d06p-AUTO-swob.xml">2019-10-15-0600-bc-env-asw-1d06p-AUTO-swob.xml</a> 2019-10-15 06:04 5.6K
<img src="/icons/unknown.gif" alt="[ ]"> <a href="2019-10-15-0700-bc-env-asw-1d06p-AUTO-swob.xml">2019-10-15-0700-bc-env-asw-1d06p-AUTO-swob.xml</a> 2019-10-15 07:03 5.6K
<img src="/icons/unknown.gif" alt="[ ]"> <a href="2019-10-15-0800-bc-env-asw-1d06p-AUTO-swob.xml">2019-10-15-0800-bc-env-asw-1d06p-AUTO-swob.xml</a> 2019-10-15 08:03 5.6K
<img src="/icons/unknown.gif" alt="[ ]"> <a href="2019-10-15-0900-bc-env-asw-1d06p-AUTO-swob.xml">2019-10-15-0900-bc-env-asw-1d06p-AUTO-swob.xml</a> 2019-10-15 09:03 5.6K
<img src="/icons/unknown.gif" alt="[ ]"> <a href="2019-10-15-1000-bc-env-asw-1d06p-AUTO-swob.xml">2019-10-15-1000-bc-env-asw-1d06p-AUTO-swob.xml</a> 2019-10-15 10:03 5.6K
<img src="/icons/unknown.gif" alt="[ ]"> <a href="2019-10-15-1100-bc-env-asw-1d06p-AUTO-swob.xml">2019-10-15-1100-bc-env-asw-1d06p-AUTO-swob.xml</a> 2019-10-15 11:03 5.6K
<img src="/icons/unknown.gif" alt="[ ]"> <a href="2019-10-15-1200-bc-env-asw-1d06p-AUTO-swob.xml">2019-10-15-1200-bc-env-asw-1d06p-AUTO-swob.xml</a> 2019-10-15 12:03 5.6K
<img src="/icons/unknown.gif" alt="[ ]"> <a href="2019-10-15-1300-bc-env-asw-1d06p-AUTO-swob.xml">2019-10-15-1300-bc-env-asw-1d06p-AUTO-swob.xml</a> 2019-10-15 13:03 5.6K
<img src="/icons/unknown.gif" alt="[ ]"> <a href="2019-10-15-1400-bc-env-asw-1d06p-AUTO-swob.xml">2019-10-15-1400-bc-env-asw-1d06p-AUTO-swob.xml</a> 2019-10-15 14:03 5.6K
<img src="/icons/unknown.gif" alt="[ ]"> <a href="2019-10-15-1500-bc-env-asw-1d06p-AUTO-swob.xml">2019-10-15-1500-bc-env-asw-1d06p-AUTO-swob.xml</a> 2019-10-15 15:09 5.6K
<img src="/icons/unknown.gif" alt="[ ]"> <a href="2019-10-15-1600-bc-env-asw-1d06p-AUTO-swob.xml">2019-10-15-1600-bc-env-asw-1d06p-AUTO-swob.xml</a> 2019-10-15 16:03 5.6K
<img src="/icons/unknown.gif" alt="[ ]"> <a href="2019-10-15-1700-bc-env-asw-1d06p-AUTO-swob.xml">2019-10-15-1700-bc-env-asw-1d06p-AUTO-swob.xml</a> 2019-10-15 17:03 5.6K
<img src="/icons/unknown.gif" alt="[ ]"> <a href="2019-10-15-1800-bc-env-asw-1d06p-AUTO-swob.xml">2019-10-15-1800-bc-env-asw-1d06p-AUTO-swob.xml</a> 2019-10-15 18:04 5.6K
<img src="/icons/unknown.gif" alt="[ ]"> <a href="2019-10-15-1900-bc-env-asw-1d06p-AUTO-swob.xml">2019-10-15-1900-bc-env-asw-1d06p-AUTO-swob.xml</a> 2019-10-15 19:03 5.6K
<img src="/icons/unknown.gif" alt="[ ]"> <a href="2019-10-15-2000-bc-env-asw-1d06p-AUTO-swob.xml">2019-10-15-2000-bc-env-asw-1d06p-AUTO-swob.xml</a> 2019-10-15 20:04 5.6K
<img src="/icons/unknown.gif" alt="[ ]"> <a href="2019-10-15-2100-bc-env-asw-1d06p-AUTO-swob.xml">2019-10-15-2100-bc-env-asw-1d06p-AUTO-swob.xml</a> 2019-10-15 21:03 5.6K
<img src="/icons/unknown.gif" alt="[ ]"> <a href="2019-10-15-2200-bc-env-asw-1d06p-AUTO-swob.xml">2019-10-15-2200-bc-env-asw-1d06p-AUTO-swob.xml</a> 2019-10-15 22:04 5.6K
<img src="/icons/unknown.gif" alt="[ ]"> <a href="2019-10-15-2300-bc-env-asw-1d06p-AUTO-swob.xml">2019-10-15-2300-bc-env-asw-1d06p-AUTO-swob.xml</a> 2019-10-15 23:03 5.6K
<hr></pre>
</body></html>
"""
multi_xml_bytes = b"""<?xml version="1.0" encoding="UTF-8" standalone="no"?>
<foo />
<?xml version="1.0" encoding="UTF-8" standalone="no"?>
<bar />
<?xml version="1.0" encoding="UTF-8" standalone="no"?>
<blah />
<?xml version="1.0" encoding="UTF-8" standalone="no"?>
<stuff />
"""
multi_xml_download = """<?xml version="1.0" encoding="UTF-8" standalone="no"?>
<om:ObservationCollection xmlns:om="http://www.opengis.net/om/1.0" xmlns="http://dms.ec.gc.ca/schema/point-observation/2.0" xmlns:gml="http://www.opengis.net/gml" xmlns:xlink="http://www.w3.org/1999/xlink" xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance">
<om:member>
<om:Observation>
<om:metadata>
<set>
<general>
<author build="build.21" name="MSC-DMS-PG-SWOB" version="2.8"/>
<dataset name="partners/observation/atmospheric/surface_weather/bc_env_snowwx-1.0-ascii"/>
<phase name="product_generic_swob-xml-2.0"/>
<id xlink:href="/data/partners/observation/atmospheric/surface_weather/bc_env_snowwx-1.0-ascii/product_generic_swob-xml-2.0/201911051300/bc_env-asw_1a01p/1a01p/orig/data_60"/>
<parent xlink:href="/data/partners/observation/atmospheric/surface_weather/bc_env_snowwx-1.0-ascii/decoded_enhanced-xml-2.0/201911051300/bc_env-asw_1a01p/1a01p/orig/data_60"/>
</general>
<identification-elements>
<element name="stn_nam" uom="unitless" value="Yellowhead Lake"/>
<element name="nesdis_id" uom="unitless" value="43485654"/>
<element name="msc_id" uom="unitless" value="BC_ENV-ASW_1A01P"/>
<element name="lat" uom="°" value="52.906300"/>
<element name="long" uom="°" value="-118.547800"/>
<element name="stn_elev" uom="m" value="1860.000"/>
<element name="date_tm" uom="datetime" value="2019-11-05T13:00:00.000Z"/>
<element name="data_pvdr" uom="unitless" value="BC-ENV"/>
<element name="data_attrib_not" uom="unitless" value="Observational data provided by the Government of British Columbia: Ministry of Environment (BC-ENV). All rights reserved. | Données d’observation fournies par le Gouvernement de la Colombie-Britannique: Ministère de l’Environnement (CB-ENV). Tous droits réservés."/>
</identification-elements>
</set>
</om:metadata>
<om:samplingTime>
<gml:TimeInstant>
<gml:timePosition>2019-11-05T13:00:00.000Z</gml:timePosition>
</gml:TimeInstant>
</om:samplingTime>
<om:resultTime>
<gml:TimeInstant>
<gml:timePosition>2019-11-05T13:29:24.151Z</gml:timePosition>
</gml:TimeInstant>
</om:resultTime>
<om:procedure xlink:href="/metadata/partners/mr/sfc_wx_cfg/bc_env_snowwx/instance-xml-2.0/bc_env-asw_1a01p/43485654?version=1.2"/>
<om:observedProperty gml:remoteSchema="/schema/point-observation/2.0.xsd"/>
<om:featureOfInterest>
<gml:FeatureCollection>
<gml:location>
<gml:Point>
<gml:pos>52.9063 -118.5478</gml:pos>
</gml:Point>
</gml:location>
</gml:FeatureCollection>
</om:featureOfInterest>
<om:result>
<elements>
<element name="air_temp_1" uom="°C" value="-3.2">
<qualifier name="qa_summary" uom="unitless" value="100"/>
</element>
<element name="air_temp_1hr_ago_1" uom="°C" value="-2.2">
<qualifier name="qa_summary" uom="unitless" value="100"/>
</element>
<element name="air_temp_2hrs_ago_1" uom="°C" value="-0.4">
<qualifier name="qa_summary" uom="unitless" value="100"/>
</element>
<element name="air_temp_2" uom="°C" value="-3.1">
<qualifier name="qa_summary" uom="unitless" value="100"/>
</element>
<element name="air_temp_1hr_ago_2" uom="°C" value="-1.5">
<qualifier name="qa_summary" uom="unitless" value="100"/>
</element>
<element name="air_temp_2hrs_ago_2" uom="°C" value="-0.1">
<qualifier name="qa_summary" uom="unitless" value="100"/>
</element>
<element name="snw_dpth_wtr_equiv" uom="mm" value="21.0">
<qualifier name="qa_summary" uom="unitless" value="100"/>
</element>
<element name="snw_dpth_wtr_equiv_1hr_ago" uom="mm" value="20.0">
<qualifier name="qa_summary" uom="unitless" value="100"/>
</element>
<element name="snw_dpth_wtr_equiv_2hrs_ago" uom="mm" value="20.0">
<qualifier name="qa_summary" uom="unitless" value="100"/>
</element>
<element name="snw_dpth" uom="cm" value="21">
<qualifier name="qa_summary" uom="unitless" value="100"/>
</element>
<element name="snw_dpth_1hr_ago" uom="cm" value="18">
<qualifier name="qa_summary" uom="unitless" value="100"/>
</element>
<element name="snw_dpth_2hrs_ago" uom="cm" value="16">
<qualifier name="qa_summary" uom="unitless" value="100"/>
</element>
<element name="cum_pcpn_amt" uom="mm" value="488.0">
<qualifier name="qa_summary" uom="unitless" value="100"/>
</element>
<element name="cum_pcpn_amt_1hr_ago" uom="mm" value="486.0">
<qualifier name="qa_summary" uom="unitless" value="100"/>
</element>
<element name="cum_pcpn_amt_2hrs_ago" uom="mm" value="485.0">
<qualifier name="qa_summary" uom="unitless" value="100"/>
</element>
<element name="batry_volt" uom="V" value="12.6"/>
<element name="batry_volt_1hr_ago" uom="V" value="12.6"/>
<element name="batry_volt_2hrs_ago" uom="V" value="12.6"/>
<element name="solr_volt" uom="V" value="0.00"/>
<element name="solr_volt_1hr_ago" uom="V" value="0.00"/>
<element name="solr_volt_2hrs_ago" uom="V" value="0.00"/>
<element name="trans_batry_volt" uom="V" value="12.0"/>
<element name="trans_batry_volt_1hr_ago" uom="V" value="12.0"/>
<element name="trans_batry_volt_2hrs_ago" uom="V" value="12.0"/>
<element name="pcpn_amt_pst12hrs" uom="mm" value="7.0">
<qualifier code-src="std_code_src" code-type="data_flags" name="data_flag" uom="code" value="1"/>
</element>
<element name="pcpn_amt_pst3hrs" uom="mm" value="3.0">
<qualifier code-src="std_code_src" code-type="data_flags" name="data_flag" uom="code" value="1"/>
</element>
<element name="pcpn_amt_pst1hr" uom="mm" value="2.0">
<qualifier code-src="std_code_src" code-type="data_flags" name="data_flag" uom="code" value="1"/>
</element>
<element name="pcpn_amt_pst6hrs" uom="mm" value="4.0">
<qualifier code-src="std_code_src" code-type="data_flags" name="data_flag" uom="code" value="1"/>
</element>
<element name="pcpn_amt_pst24hrs" uom="mm" value="9.0">
<qualifier code-src="std_code_src" code-type="data_flags" name="data_flag" uom="code" value="1"/>
</element>
</elements>
</om:result>
</om:Observation>
</om:member>
</om:ObservationCollection>
<?xml version="1.0" encoding="UTF-8" standalone="no"?>
<om:ObservationCollection xmlns:om="http://www.opengis.net/om/1.0" xmlns="http://dms.ec.gc.ca/schema/point-observation/2.0" xmlns:gml="http://www.opengis.net/gml" xmlns:xlink="http://www.w3.org/1999/xlink" xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance">
<om:member>
<om:Observation>
<om:metadata>
<set>
<general>
<author build="build.21" name="MSC-DMS-PG-SWOB" version="2.8"/>
<dataset name="partners/observation/atmospheric/surface_weather/bc_env_snowwx-1.0-ascii"/>
<phase name="product_generic_swob-xml-2.0"/>
<id xlink:href="/data/partners/observation/atmospheric/surface_weather/bc_env_snowwx-1.0-ascii/product_generic_swob-xml-2.0/201911051300/bc_env-asw_1a01p/1a01p/orig/data_60"/>
<parent xlink:href="/data/partners/observation/atmospheric/surface_weather/bc_env_snowwx-1.0-ascii/decoded_enhanced-xml-2.0/201911051300/bc_env-asw_1a01p/1a01p/orig/data_60"/>
</general>
<identification-elements>
<element name="stn_nam" uom="unitless" value="Yellowhead Lake"/>
<element name="nesdis_id" uom="unitless" value="43485654"/>
<element name="msc_id" uom="unitless" value="BC_ENV-ASW_1A01P"/>
<element name="lat" uom="°" value="52.906300"/>
<element name="long" uom="°" value="-118.547800"/>
<element name="stn_elev" uom="m" value="1860.000"/>
<element name="date_tm" uom="datetime" value="2019-11-05T13:00:00.000Z"/>
<element name="data_pvdr" uom="unitless" value="BC-ENV"/>
<element name="data_attrib_not" uom="unitless" value="Observational data provided by the Government of British Columbia: Ministry of Environment (BC-ENV). All rights reserved. | Données d’observation fournies par le Gouvernement de la Colombie-Britannique: Ministère de l’Environnement (CB-ENV). Tous droits réservés."/>
</identification-elements>
</set>
</om:metadata>
<om:samplingTime>
<gml:TimeInstant>
<gml:timePosition>2019-11-05T13:00:00.000Z</gml:timePosition>
</gml:TimeInstant>
</om:samplingTime>
<om:resultTime>
<gml:TimeInstant>
<gml:timePosition>2019-11-05T13:29:24.151Z</gml:timePosition>
</gml:TimeInstant>
</om:resultTime>
<om:procedure xlink:href="/metadata/partners/mr/sfc_wx_cfg/bc_env_snowwx/instance-xml-2.0/bc_env-asw_1a01p/43485654?version=1.2"/>
<om:observedProperty gml:remoteSchema="/schema/point-observation/2.0.xsd"/>
<om:featureOfInterest>
<gml:FeatureCollection>
<gml:location>
<gml:Point>
<gml:pos>52.9063 -118.5478</gml:pos>
</gml:Point>
</gml:location>
</gml:FeatureCollection>
</om:featureOfInterest>
<om:result>
<elements>
<element name="air_temp_1" uom="°C" value="-3.2">
<qualifier name="qa_summary" uom="unitless" value="100"/>
</element>
<element name="air_temp_1hr_ago_1" uom="°C" value="-2.2">
<qualifier name="qa_summary" uom="unitless" value="100"/>
</element>
<element name="air_temp_2hrs_ago_1" uom="°C" value="-0.4">
<qualifier name="qa_summary" uom="unitless" value="100"/>
</element>
<element name="air_temp_2" uom="°C" value="-3.1">
<qualifier name="qa_summary" uom="unitless" value="100"/>
</element>
<element name="air_temp_1hr_ago_2" uom="°C" value="-1.5">
<qualifier name="qa_summary" uom="unitless" value="100"/>
</element>
<element name="air_temp_2hrs_ago_2" uom="°C" value="-0.1">
<qualifier name="qa_summary" uom="unitless" value="100"/>
</element>
<element name="snw_dpth_wtr_equiv" uom="mm" value="21.0">
<qualifier name="qa_summary" uom="unitless" value="100"/>
</element>
<element name="snw_dpth_wtr_equiv_1hr_ago" uom="mm" value="20.0">
<qualifier name="qa_summary" uom="unitless" value="100"/>
</element>
<element name="snw_dpth_wtr_equiv_2hrs_ago" uom="mm" value="20.0">
<qualifier name="qa_summary" uom="unitless" value="100"/>
</element>
<element name="snw_dpth" uom="cm" value="21">
<qualifier name="qa_summary" uom="unitless" value="100"/>
</element>
<element name="snw_dpth_1hr_ago" uom="cm" value="18">
<qualifier name="qa_summary" uom="unitless" value="100"/>
</element>
<element name="snw_dpth_2hrs_ago" uom="cm" value="16">
<qualifier name="qa_summary" uom="unitless" value="100"/>
</element>
<element name="cum_pcpn_amt" uom="mm" value="488.0">
<qualifier name="qa_summary" uom="unitless" value="100"/>
</element>
<element name="cum_pcpn_amt_1hr_ago" uom="mm" value="486.0">
<qualifier name="qa_summary" uom="unitless" value="100"/>
</element>
<element name="cum_pcpn_amt_2hrs_ago" uom="mm" value="485.0">
<qualifier name="qa_summary" uom="unitless" value="100"/>
</element>
<element name="batry_volt" uom="V" value="12.6"/>
<element name="batry_volt_1hr_ago" uom="V" value="12.6"/>
<element name="batry_volt_2hrs_ago" uom="V" value="12.6"/>
<element name="solr_volt" uom="V" value="0.00"/>
<element name="solr_volt_1hr_ago" uom="V" value="0.00"/>
<element name="solr_volt_2hrs_ago" uom="V" value="0.00"/>
<element name="trans_batry_volt" uom="V" value="12.0"/>
<element name="trans_batry_volt_1hr_ago" uom="V" value="12.0"/>
<element name="trans_batry_volt_2hrs_ago" uom="V" value="12.0"/>
<element name="pcpn_amt_pst12hrs" uom="mm" value="7.0">
<qualifier code-src="std_code_src" code-type="data_flags" name="data_flag" uom="code" value="1"/>
</element>
<element name="pcpn_amt_pst3hrs" uom="mm" value="3.0">
<qualifier code-src="std_code_src" code-type="data_flags" name="data_flag" uom="code" value="1"/>
</element>
<element name="pcpn_amt_pst1hr" uom="mm" value="2.0">
<qualifier code-src="std_code_src" code-type="data_flags" name="data_flag" uom="code" value="1"/>
</element>
<element name="pcpn_amt_pst6hrs" uom="mm" value="4.0">
<qualifier code-src="std_code_src" code-type="data_flags" name="data_flag" uom="code" value="1"/>
</element>
<element name="pcpn_amt_pst24hrs" uom="mm" value="9.0">
<qualifier code-src="std_code_src" code-type="data_flags" name="data_flag" uom="code" value="1"/>
</element>
</elements>
</om:result>
</om:Observation>
</om:member>
</om:ObservationCollection>
<?xml version="1.0" encoding="UTF-8" standalone="no"?>
<om:ObservationCollection xmlns:om="http://www.opengis.net/om/1.0" xmlns="http://dms.ec.gc.ca/schema/point-observation/2.0" xmlns:gml="http://www.opengis.net/gml" xmlns:xlink="http://www.w3.org/1999/xlink" xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance">
<om:member>
<om:Observation>
<om:metadata>
<set>
<general>
<author build="build.21" name="MSC-DMS-PG-SWOB" version="2.8"/>
<dataset name="partners/observation/atmospheric/surface_weather/bc_env_snowwx-1.0-ascii"/>
<phase name="product_generic_swob-xml-2.0"/>
<id xlink:href="/data/partners/observation/atmospheric/surface_weather/bc_env_snowwx-1.0-ascii/product_generic_swob-xml-2.0/201911051300/bc_env-asw_1a01p/1a01p/orig/data_60"/>
<parent xlink:href="/data/partners/observation/atmospheric/surface_weather/bc_env_snowwx-1.0-ascii/decoded_enhanced-xml-2.0/201911051300/bc_env-asw_1a01p/1a01p/orig/data_60"/>
</general>
<identification-elements>
<element name="stn_nam" uom="unitless" value="Yellowhead Lake"/>
<element name="nesdis_id" uom="unitless" value="43485654"/>
<element name="msc_id" uom="unitless" value="BC_ENV-ASW_1A01P"/>
<element name="lat" uom="°" value="52.906300"/>
<element name="long" uom="°" value="-118.547800"/>
<element name="stn_elev" uom="m" value="1860.000"/>
<element name="date_tm" uom="datetime" value="2019-11-05T13:00:00.000Z"/>
<element name="data_pvdr" uom="unitless" value="BC-ENV"/>
<element name="data_attrib_not" uom="unitless" value="Observational data provided by the Government of British Columbia: Ministry of Environment (BC-ENV). All rights reserved. | Données d’observation fournies par le Gouvernement de la Colombie-Britannique: Ministère de l’Environnement (CB-ENV). Tous droits réservés."/>
</identification-elements>
</set>
</om:metadata>
<om:samplingTime>
<gml:TimeInstant>
<gml:timePosition>2019-11-05T13:00:00.000Z</gml:timePosition>
</gml:TimeInstant>
</om:samplingTime>
<om:resultTime>
<gml:TimeInstant>
<gml:timePosition>2019-11-05T13:29:24.151Z</gml:timePosition>
</gml:TimeInstant>
</om:resultTime>
<om:procedure xlink:href="/metadata/partners/mr/sfc_wx_cfg/bc_env_snowwx/instance-xml-2.0/bc_env-asw_1a01p/43485654?version=1.2"/>
<om:observedProperty gml:remoteSchema="/schema/point-observation/2.0.xsd"/>
<om:featureOfInterest>
<gml:FeatureCollection>
<gml:location>
<gml:Point>
<gml:pos>52.9063 -118.5478</gml:pos>
</gml:Point>
</gml:location>
</gml:FeatureCollection>
</om:featureOfInterest>
<om:result>
<elements>
<element name="air_temp_1" uom="°C" value="-3.2">
<qualifier name="qa_summary" uom="unitless" value="100"/>
</element>
<element name="air_temp_1hr_ago_1" uom="°C" value="-2.2">
<qualifier name="qa_summary" uom="unitless" value="100"/>
</element>
<element name="air_temp_2hrs_ago_1" uom="°C" value="-0.4">
<qualifier name="qa_summary" uom="unitless" value="100"/>
</element>
<element name="air_temp_2" uom="°C" value="-3.1">
<qualifier name="qa_summary" uom="unitless" value="100"/>
</element>
<element name="air_temp_1hr_ago_2" uom="°C" value="-1.5">
<qualifier name="qa_summary" uom="unitless" value="100"/>
</element>
<element name="air_temp_2hrs_ago_2" uom="°C" value="-0.1">
<qualifier name="qa_summary" uom="unitless" value="100"/>
</element>
<element name="snw_dpth_wtr_equiv" uom="mm" value="21.0">
<qualifier name="qa_summary" uom="unitless" value="100"/>
</element>
<element name="snw_dpth_wtr_equiv_1hr_ago" uom="mm" value="20.0">
<qualifier name="qa_summary" uom="unitless" value="100"/>
</element>
<element name="snw_dpth_wtr_equiv_2hrs_ago" uom="mm" value="20.0">
<qualifier name="qa_summary" uom="unitless" value="100"/>
</element>
<element name="snw_dpth" uom="cm" value="21">
<qualifier name="qa_summary" uom="unitless" value="100"/>
</element>
<element name="snw_dpth_1hr_ago" uom="cm" value="18">
<qualifier name="qa_summary" uom="unitless" value="100"/>
</element>
<element name="snw_dpth_2hrs_ago" uom="cm" value="16">
<qualifier name="qa_summary" uom="unitless" value="100"/>
</element>
<element name="cum_pcpn_amt" uom="mm" value="488.0">
<qualifier name="qa_summary" uom="unitless" value="100"/>
</element>
<element name="cum_pcpn_amt_1hr_ago" uom="mm" value="486.0">
<qualifier name="qa_summary" uom="unitless" value="100"/>
</element>
<element name="cum_pcpn_amt_2hrs_ago" uom="mm" value="485.0">
<qualifier name="qa_summary" uom="unitless" value="100"/>
</element>
<element name="batry_volt" uom="V" value="12.6"/>
<element name="batry_volt_1hr_ago" uom="V" value="12.6"/>
<element name="batry_volt_2hrs_ago" uom="V" value="12.6"/>
<element name="solr_volt" uom="V" value="0.00"/>
<element name="solr_volt_1hr_ago" uom="V" value="0.00"/>
<element name="solr_volt_2hrs_ago" uom="V" value="0.00"/>
<element name="trans_batry_volt" uom="V" value="12.0"/>
<element name="trans_batry_volt_1hr_ago" uom="V" value="12.0"/>
<element name="trans_batry_volt_2hrs_ago" uom="V" value="12.0"/>
<element name="pcpn_amt_pst12hrs" uom="mm" value="7.0">
<qualifier code-src="std_code_src" code-type="data_flags" name="data_flag" uom="code" value="1"/>
</element>
<element name="pcpn_amt_pst3hrs" uom="mm" value="3.0">
<qualifier code-src="std_code_src" code-type="data_flags" name="data_flag" uom="code" value="1"/>
</element>
<element name="pcpn_amt_pst1hr" uom="mm" value="2.0">
<qualifier code-src="std_code_src" code-type="data_flags" name="data_flag" uom="code" value="1"/>
</element>
<element name="pcpn_amt_pst6hrs" uom="mm" value="4.0">
<qualifier code-src="std_code_src" code-type="data_flags" name="data_flag" uom="code" value="1"/>
</element>
<element name="pcpn_amt_pst24hrs" uom="mm" value="9.0">
<qualifier code-src="std_code_src" code-type="data_flags" name="data_flag" uom="code" value="1"/>
</element>
</elements>
</om:result>
</om:Observation>
</om:member>
</om:ObservationCollection>""".encode(
"utf-8"
)
MSNG_values_xml = """<?xml version="1.0" encoding="UTF-8" standalone="no"?>
<om:ObservationCollection xmlns:om="http://www.opengis.net/om/1.0" xmlns="http://dms.ec.gc.ca/schema/point-observation/2.0" xmlns:gml="http://www.opengis.net/gml" xmlns:xlink="http://www.w3.org/1999/xlink" xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance">
<om:member>
<om:Observation>
<om:metadata>
<set>
<general>
<author build="build.24" name="MSC-DMS-PG-SWOB" version="3.0"/>
<dataset name="partners/observation/atmospheric/surface_weather/bc_tran-1.0-csv"/>
<phase name="product_generic_swob-xml-2.0"/>
<id xlink:href="/data/partners/observation/atmospheric/surface_weather/bc_tran-1.0-csv/product_generic_swob-xml-2.0/20200103000000000/bc_tran_11091/11091/orig/data_60"/>
<parent xlink:href="/data/partners/observation/atmospheric/surface_weather/bc_tran-1.0-csv/decoded_enhanced-xml-2.0/20200103000000000/bc_tran_11091/11091/orig/data_60"/>
</general>
<identification-elements>
<element name="data_pvdr" uom="unitless" value="BC-TRAN"/>
<element name="data_attrib_not" uom="unitless" value="Observational data provided by the Government of British Columbia: Ministry of Transportation and Infrastructure (BC-TRAN). All rights reserved. | Données d’observation fournies par le Gouvernement de la Colombie-Britannique: Ministère des Transports et de l’Infrastructure (CB-TRAN). Tous droits réservés."/>
<element name="stn_id" uom="unitless" value="11091"/>
<element name="stn_nam" uom="unitless" value="BRANDWYINE"/>
<element name="stn_shrt_nam" uom="unitless" value="BRNWRW"/>
<element name="lat" uom="°" value="50.054170"/>
<element name="long" uom="°" value="-123.118060"/>
<element name="stn_elev" uom="m" value="496.000"/>
<element name="msc_id" uom="unitless" value="BC_TRAN_11091"/>
<element name="date_tm" uom="datetime" value="2020-01-03T00:00:00.000Z"/>
<element name="last_reset_date_tm" uom="datetime" value="2020-01-02T14:00:00.000Z"/>
</identification-elements>
</set>
</om:metadata>
<om:samplingTime>
<gml:TimeInstant>
<gml:timePosition>2020-01-03T00:00:00.000Z</gml:timePosition>
</gml:TimeInstant>
</om:samplingTime>
<om:resultTime>
<gml:TimeInstant>
<gml:timePosition>2020-01-03T00:10:34.920Z</gml:timePosition>
</gml:TimeInstant>
</om:resultTime>
<om:procedure xlink:href="/metadata/partners/mr/sfc_wx_cfg/bc_tran/instance-xml-2.0/bc_tran_11091/11091?version=1.2"/>
<om:observedProperty gml:remoteSchema="/schema/point-observation/2.0.xsd"/>
<om:featureOfInterest>
<gml:FeatureCollection>
<gml:location>
<gml:Point>
<gml:pos>50.05417 -123.11806</gml:pos>
</gml:Point>
</gml:location>
</gml:FeatureCollection>
</om:featureOfInterest>
<om:result>
<elements>
<element name="pcpn_amt_pst3hrs" uom="mm" value="MSNG">
<qualifier code-src="std_code_src" code-type="data_flags" name="data_flag" uom="code" value="4"/>
</element>
<element name="pcpn_amt_pst6hrs" uom="mm" value="MSNG">
<qualifier code-src="std_code_src" code-type="data_flags" name="data_flag" uom="code" value="4"/>
</element>
<element name="pcpn_amt_pst24hrs" uom="mm" value="MSNG">
<qualifier code-src="std_code_src" code-type="data_flags" name="data_flag" uom="code" value="4"/>
</element>
<element name="mslp" uom="hPa" value="1012.7">
<qualifier code-src="std_code_src" code-type="data_flags" name="data_flag" uom="code" value="1"/>
</element>
</elements>
</om:result>
</om:Observation>
</om:member>
</om:ObservationCollection>
""".encode(
"utf-8"
)
|
"""Python module for running unit tests.
:author: David Hoese (davidh)
:contact: david.hoese@ssec.wisc.edu
:organization: Space Science and Engineering Center (SSEC)
:copyright: Copyright (c) 2013 University of Wisconsin SSEC. All rights reserved.
:date: Mar 2013
:license: GNU GPLv3
Copyright (C) 2013 Space Science and Engineering Center (SSEC),
University of Wisconsin-Madison.
This program is free software: you can redistribute it and/or modify
it under the terms of the GNU General Public License as published by
the Free Software Foundation, either version 3 of the License, or
(at your option) any later version.
This program is distributed in the hope that it will be useful,
but WITHOUT ANY WARRANTY; without even the implied warranty of
MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
GNU General Public License for more details.
You should have received a copy of the GNU General Public License
along with this program. If not, see <http://www.gnu.org/licenses/>.
This file is part of the polar2grid software package. Polar2grid takes
satellite observation data, remaps it, and writes it to a file format for
input into another program.
Documentation: http://www.ssec.wisc.edu/software/polar2grid/
Written by David Hoese January 2013
University of Wisconsin-Madison
Space Science and Engineering Center
1225 West Dayton Street
Madison, WI 53706
david.hoese@ssec.wisc.edu
"""
__docformat__ = "restructuredtext en"
from polar2grid.core import roles,constants
from .grids import grids
from . import compare
import os
import sys
import logging
import unittest
log = logging.getLogger(__name__)
class FakeBackend(roles.BackendRole):
"""Fake backend to help with testing.
Tests will overwrite these methods
"""
def __init__(self): pass
def can_handle_inputs(self, sat, instrument, nav_set_uid,
kind, band, data_kind):
pass
def create_product(self, sat, instrument, nav_set_uid,
kind, band, data_kind, **kwargs):
pass
class AWIPSBackendTestCase(unittest.TestCase):
pass
class GtiffBackendTestCase(unittest.TestCase):
pass
class BinaryBackendTestCase(unittest.TestCase):
pass
class NinjoBackendTestCase(unittest.TestCase):
pass
class GridJobsTestCase(unittest.TestCase):
"""Test the `get_grid_jobs` method from `polar2grid.grids.grids`.
This test case should only check for the behavior of grid jobs and not of
the grid determination specifically. The grid determination test case will
focus on different grid determination possibilities.
"""
@classmethod
def setUpClass(cls):
# Turn off logging if it hasn't been configured already
if not logging.getLogger('').handlers:
logging.basicConfig(level=logging.CRITICAL)
# Mimic a frontend's bands meta dictionary
band_info = {
( constants.BKIND_I, constants.BID_01 ) : {
"data_kind" : constants.DKIND_REFLECTANCE,
"remap_data_as" : constants.DKIND_REFLECTANCE,
"kind" : constants.BKIND_I,
"band" : constants.BID_01,
"fbf_img" : None, # Invalid, but we don't have one so...
"fill_value" : constants.DEFAULT_FILL_VALUE,
"swath_rows" : 320,
"swath_cols" : 6400,
"swath_scans" : 10,
"rows_per_scan" : 32,
},
( constants.BKIND_I, constants.BID_02 ) : {
"data_kind" : constants.DKIND_REFLECTANCE,
"remap_data_as" : constants.DKIND_REFLECTANCE,
"kind" : constants.BKIND_I,
"band" : constants.BID_02,
"fbf_img" : None, # Invalid, but we don't have one so...
"fill_value" : constants.DEFAULT_FILL_VALUE,
"swath_rows" : 320,
"swath_cols" : 6400,
"swath_scans" : 10,
"rows_per_scan" : 32,
},
( constants.BKIND_I, constants.BID_03 ) : {
"data_kind" : constants.DKIND_BTEMP,
"remap_data_as" : constants.DKIND_BTEMP,
"kind" : constants.BKIND_I,
"band" : constants.BID_03,
"fbf_img" : None, # Invalid, but we don't have one so...
"fill_value" : constants.DEFAULT_FILL_VALUE,
"swath_rows" : 320,
"swath_cols" : 6400,
"swath_scans" : 10,
"rows_per_scan" : 32,
},
( constants.BKIND_I, constants.BID_04 ) : {
"data_kind" : constants.DKIND_BTEMP,
"remap_data_as" : constants.DKIND_BTEMP,
"kind" : constants.BKIND_I,
"band" : constants.BID_04,
"fbf_img" : None, # Invalid, but we don't have one so...
"fill_value" : constants.DEFAULT_FILL_VALUE,
"swath_rows" : 320,
"swath_cols" : 6400,
"swath_scans" : 10,
"rows_per_scan" : 32,
},
( constants.BKIND_I, constants.BID_05 ) : {
"data_kind" : constants.DKIND_BTEMP,
"remap_data_as" : constants.DKIND_BTEMP,
"kind" : constants.BKIND_I,
"band" : constants.BID_05,
"fbf_img" : None, # Invalid, but we don't have one so...
"fill_value" : constants.DEFAULT_FILL_VALUE,
"swath_rows" : 320,
"swath_cols" : 6400,
"swath_scans" : 10,
"rows_per_scan" : 32,
},
}
cls._band_info = band_info
# Make our own grid configuration file
grid_config_str = """# Comment - Copy of grids.conf with spaces removed
211e,gpd,grid211e.gpd,0,0,0,0,0,0,0,0
211e,gpd,grid211e.gpd,-123.044,59.844,-49.385,57.289,-65.091,14.335,-113.133,16.369
211w,gpd,grid211w.gpd,-152.855,54.536,-91.444,61.257,-92.720,17.514,-133.459,12.190
203,gpd,grid203.gpd,115.601,44.646,-53.660,57.635,-123.435,24.362,174.162,19.132
204,gpd,grid204.gpd,110.000,60.644,-109.129,60.644,-109.129,25.000,110.000,25.000
dwd_germany,gpd,griddwd_germany.gpd,-2.000,56.000,25.000,56.000,25.000,40.000,-2.000,40.000
p4_211e,proj4, +proj=lcc +datum=NAD83 +ellps=GRS80 +lat_1=25 +lon_0=-95 +no_defs,5120,5120,1015.9,-1015.9,-1956254.806724622,4364276.201489102
lcc_fit,proj4, +proj=lcc +datum=WGS84 +ellps=WGS84 +lat_0=25 +lon_0=-95,None,None,1000,-1000,None,None
lcc_fit_hr,proj4, +proj=lcc +datum=WGS84 +ellps=WGS84 +lat_0=25 +lon_0=-95,None,None,400,-400,None,None
wgs84_fit,proj4, +proj=latlong +datum=WGS84 +ellps=WGS84 +no_defs,None,None,0.0001,-0.0001,None,None
polar_canada,proj4, +proj=stere +datum=WGS84 +ellps=WGS84 +lat_0=90 +lat_ts=45.0 +lon_0=-150,None,None,1000,-1000,None,None
polar_north_pacific,proj4, +proj=stere +datum=WGS84 +ellps=WGS84 +lat_0=90 +lat_ts=45.0 +lon_0=-170,None,None,400,-400,None,None
polar_south_pacific,proj4, +proj=stere +datum=WGS84 +ellps=WGS84 +lat_0=-90 +lat_ts=-45.0 +lon_0=-170,None,None,400,-400,None,None
"""
cls._grid_config_str = grid_config_str
def test_except_grids_any_forced_0bands(self,
can_handle=constants.GRIDS_ANY,
forced_grids=["211e"]
):
"""Test that create_grid_jobs reports a ValueError when provided an empty band_info dictionary.
Also provided that the backend reports any grid
can be provided, and a grid is forced (skipping grid determination).
"""
def fake_handle_inputs(self, *args, **kwargs):
return can_handle
backend = FakeBackend()
backend.can_handle_inputs = fake_handle_inputs
cart = grids.Cartographer(no_defaults=True)
cart.add_grid_config_str(self._grid_config_str)
self.assertRaises(ValueError, grids.create_grid_jobs,
constants.SAT_NPP,
constants.INST_VIIRS,
constants.IBAND_NAV_UID,
{},
backend,
cart,
forced_grids=forced_grids
)
def test_except_grids_proj4_forced_0bands(self):
"""Same as `test_except_grids_any_forced_0bands`, but backend supports any PROJ.4 grid.
"""
self.test_except_grids_any_forced_0bands(can_handle=constants.GRIDS_ANY_PROJ4)
def test_except_grids_gpd_forced_0bands(self):
"""Same as `test_except_grids_any_forced_0bands`, but backend supports any GPD grid.
"""
self.test_except_grids_any_forced_0bands(can_handle=constants.GRIDS_ANY_GPD)
def test_output_grids_any_forced(self):
"""Test that create_grid_jobs returns the correct structure when any grid is supported.
"""
def fake_handle_inputs(self, *args, **kwargs):
return constants.GRIDS_ANY
backend = FakeBackend()
backend.can_handle_inputs = fake_handle_inputs
cart = grids.Cartographer(no_defaults=True)
cart.add_grid_config_str(self._grid_config_str)
forced_grids = cart.grid_information.keys()[::2]
grid_jobs = grids.create_grid_jobs(
constants.SAT_NPP,
constants.INST_VIIRS,
constants.IBAND_NAV_UID,
self._band_info,
backend,
cart,
forced_grids=forced_grids
)
self.assertEqual(len(grid_jobs.keys()), len(forced_grids))
for grid_name in forced_grids:
self.assertIn(grid_name, grid_jobs.keys())
def test_output_grids_proj4_forced(self):
def fake_handle_inputs(self, *args, **kwargs):
return constants.GRIDS_ANY_PROJ4
backend = FakeBackend()
backend.can_handle_inputs = fake_handle_inputs
cart = grids.Cartographer(no_defaults=True)
cart.add_grid_config_str(self._grid_config_str)
forced_grids = ["wgs84_fit", "211e", "203", "dwd_germany", "p4_211e"]
grid_jobs = grids.create_grid_jobs(
constants.SAT_NPP,
constants.INST_VIIRS,
constants.IBAND_NAV_UID,
self._band_info,
backend,
cart,
forced_grids=forced_grids
)
self.assertEqual(len(grid_jobs.keys()), 2)
self.assertIn("wgs84_fit", grid_jobs.keys())
self.assertIn("p4_211e", grid_jobs.keys())
def test_output_grids_gpd_forced(self):
def fake_handle_inputs(self, *args, **kwargs):
return constants.GRIDS_ANY_GPD
backend = FakeBackend()
backend.can_handle_inputs = fake_handle_inputs
cart = grids.Cartographer(no_defaults=True)
cart.add_grid_config_str(self._grid_config_str)
forced_grids = ["wgs84_fit", "211e", "203", "dwd_germany", "p4_211e"]
grid_jobs = grids.create_grid_jobs(
constants.SAT_NPP,
constants.INST_VIIRS,
constants.IBAND_NAV_UID,
self._band_info,
backend,
cart,
forced_grids=forced_grids
)
self.assertEqual(len(grid_jobs.keys()), 3)
self.assertIn("211e", grid_jobs.keys())
self.assertIn("dwd_germany", grid_jobs.keys())
self.assertIn("203", grid_jobs.keys())
self.assertIn((constants.BKIND_I,constants.BID_01), grid_jobs["211e"])
self.assertIn((constants.BKIND_I,constants.BID_02), grid_jobs["211e"])
self.assertIn((constants.BKIND_I,constants.BID_03), grid_jobs["211e"])
self.assertIn((constants.BKIND_I,constants.BID_04), grid_jobs["211e"])
self.assertIn((constants.BKIND_I,constants.BID_05), grid_jobs["211e"])
self.assertIn((constants.BKIND_I,constants.BID_01), grid_jobs["203"])
self.assertIn((constants.BKIND_I,constants.BID_02), grid_jobs["203"])
self.assertIn((constants.BKIND_I,constants.BID_03), grid_jobs["203"])
self.assertIn((constants.BKIND_I,constants.BID_04), grid_jobs["203"])
self.assertIn((constants.BKIND_I,constants.BID_05), grid_jobs["203"])
self.assertIn((constants.BKIND_I,constants.BID_01), grid_jobs["dwd_germany"])
self.assertIn((constants.BKIND_I,constants.BID_02), grid_jobs["dwd_germany"])
self.assertIn((constants.BKIND_I,constants.BID_03), grid_jobs["dwd_germany"])
self.assertIn((constants.BKIND_I,constants.BID_04), grid_jobs["dwd_germany"])
self.assertIn((constants.BKIND_I,constants.BID_05), grid_jobs["dwd_germany"])
def test_output_grids_any_determined(self):
"""Test that create_grid_jobs returns the correct structure when any grid is supported and determined.
"""
def fake_handle_inputs(self, *args, **kwargs):
return constants.GRIDS_ANY
backend = FakeBackend()
backend.can_handle_inputs = fake_handle_inputs
cart = grids.Cartographer(no_defaults=True)
cart.add_grid_config_str(self._grid_config_str)
grid_jobs = grids.create_grid_jobs(
constants.SAT_NPP,
constants.INST_VIIRS,
constants.IBAND_NAV_UID,
self._band_info,
backend,
cart,
bbox=(-110,50,-80,10)
)
self.assertEqual(len(grid_jobs.keys()), 3)
self.assertIn("211e", grid_jobs.keys())
self.assertIn("211w", grid_jobs.keys())
self.assertIn("p4_211e", grid_jobs.keys())
class GridDeterminationTestCase(unittest.TestCase):
pass
class VIIRSFrontendTestCase(unittest.TestCase):
pass
class MODISFrontendTestCase(unittest.TestCase):
pass
if __name__ == "__main__":
unittest.main()
|
from astropy.io import fits
import numpy
from ximpol.utils.matplotlib_ import pyplot as plt
from ximpol.utils.logging_ import logger, startmsg, abort
from ximpol.evt.binning import xBinnedCountSpectrum
from ximpol.evt.binning import xBinnedMap
from ximpol.evt.binning import xBinnedModulationCube
from ximpol.evt.subselect import xEventSelect
from ximpol.evt.binning import xEventBinningMCUBE
import os,sys
from matplotlib import rc
rc('text', usetex=True)
file_map='casa_cmap.fits'
evt_file_path='casa.fits'
file_selected_path='casa_sel.fits'
file_selected_cube_path='casa_sel_mcube.fits'
outfile=None
for i,a in enumerate(sys.argv):
if '-rad' in a: rad = float(sys.argv[i+1])
elif '-dec' in a: dec = float(sys.argv[i+1])
elif '-ra' in a: ra = float(sys.argv[i+1])
elif '-o' in a: outfile = sys.argv[i+1]
pass
fig=xBinnedMap(file_map).plot(show=False,subplot=(1,2,1))#,figure=fig)
fig.show_circles(ra,dec,rad,lw=1)
rad*=60
evtSelect=xEventSelect(evt_file_path, ra=ra, dec=dec, rad=rad, outfile=file_selected_path,
emax=None, emin=None, mc=False, mcsrcid=[], phasemax=None, phasemin=None, phimax=None, phimin=None, tmax=None, tmin=None)
evtSelect.select()
evtBin=xEventBinningMCUBE(file_selected_path, ebins=1, outfile=file_selected_cube_path, evfile=file_selected_path,
emax=10.0,nypix=256, ebinfile=None, phasebins=50,ebinalg='LIN',
xref=None,phibins=75,nxpix=256,tbins=100,proj='TAN',tstart=None,tbinalg='LIN',algorithm='MCUBE',mc=False,binsz=2.5,yref=None,tbinfile=None,emin=1.0,tstop=None)
evtBin.bin_()
binModulation = xBinnedModulationCube(file_selected_cube_path)
binModulation.plot(show=False,xsubplot=1)
for fit in binModulation.fit_results:
print fit
angle = fit.phase
angle_err = fit.phase_error
visibility = fit.visibility
scale=10.
dx1=visibility/scale*numpy.cos(angle+10*angle_err)
dy1=visibility/scale*numpy.sin(angle+10*angle_err)
dx2=visibility/scale*numpy.cos(angle-10*angle_err)
dy2=visibility/scale*numpy.sin(angle-10*angle_err)
print fit.phase,angle,dx1,dy1,dx2,dy2
fig.show_arrows(ra,dec,dx1,dy1,color='g',alpha=1,width=1)
fig.show_arrows(ra,dec,-dx1,-dy1,color='g',alpha=1,width=1)
fig.show_arrows(ra,dec,dx2,dy2,color='g',alpha=1,width=1)
fig.show_arrows(ra,dec,-dx2,-dy2,color='g',alpha=1,width=1)
pass
fig.save(outfile)
if outfile is None: plt.show()
|
"""Pylons environment configuration"""
import os
from mako.lookup import TemplateLookup
from pylons.configuration import PylonsConfig
from pylons.error import handle_mako_error
from sqlalchemy.pool import NullPool
from sqlalchemy import engine_from_config, create_engine
try:
from MySQLdb.converters import conversions
except ImportError:
pass
import baruwa.lib.app_globals as app_globals
import baruwa.lib.helpers
from baruwa.lib import mq
from baruwa.config.routing import make_map
from baruwa.model import init_model
def load_environment(global_conf, app_conf):
"""Configure the Pylons environment via the ``pylons.config``
object
"""
config = PylonsConfig()
# Pylons paths
root = os.path.dirname(os.path.dirname(os.path.abspath(__file__)))
paths = dict(root=root,
controllers=os.path.join(root, 'controllers'),
static_files=os.path.join(root, 'public'),
templates=[os.path.join(root, 'templates')])
# Initialize config with the basic options
config.init_app(global_conf, app_conf, package='baruwa', paths=paths)
config['routes.map'] = make_map(config)
config['pylons.app_globals'] = app_globals.Globals(config)
config['pylons.h'] = baruwa.lib.helpers
# Setup cache object as early as possible
import pylons
pylons.cache._push_object(config['pylons.app_globals'].cache)
# Create the Mako TemplateLookup, with the default auto-escaping
config['pylons.app_globals'].mako_lookup = TemplateLookup(
directories=paths['templates'],
error_handler=handle_mako_error,
module_directory=os.path.join(app_conf['cache_dir'], 'templates'),
input_encoding='utf-8', default_filters=['escape'],
imports=['from webhelpers.html import escape'])
# Setup the SQLAlchemy database engine
surl = config['sqlalchemy.url']
if surl.startswith('mysql'):
conv = conversions.copy()
conv[246] = float
engine = create_engine(surl, pool_recycle=1800, connect_args=dict(conv=conv))
else:
engine = engine_from_config(config, 'sqlalchemy.', poolclass=NullPool)
init_model(engine)
# CONFIGURATION OPTIONS HERE (note: all config options will override
# any Pylons config options)
return config
|
import sys
sys.setrecursionlimit(2000)
from fractions import *
from math import *
def row(n):
if n in d:
return d[n]
o = int(floor(sqrt(9+8*(n-1))-3)/4.0)+1
l = o*2
r = 4*o+1
q = 2*o*(o-1)+o-1
i = n-q-1
if i <= r/2: # top half
p = [Fraction(1)/(1<<i)]
for j in range(i):
p.append(p[-1]*(i-j)/(j+1))
s = map(lambda x: max(0, i-x), range(i))
d[n] = l, s, p
else: # bottom half
l, ps, pp = row(n-1)
pp = map(float, pp)
p = []
for k in range(len(pp)-1):
p.append((pp[k]+pp[k+1])/2)
p[0] += pp[0]/2
p[-1] += pp[-1]/2
s = [r-i]*(l-r+i)+range(1, r-i+1)[::-1]
d[n] = l, s, p
return d[n]
d = {}
for case in range(int(raw_input())):
n, x, y = map(int, raw_input().split())
l, s, p = row(n)
x = abs(x)
if y < -x + l-1: # inside heap
ans = 1.0
elif y > -x + l+1: # outside heap
ans = 0.0
else: # edge of heap
try:
ans = float(sum(p[:s[y]]))
except:
ans = 0.0
print "Case #%d: %s" % (case+1, ans)
|
from __future__ import unicode_literals
from django.conf import settings
from django.db import migrations, models
import django.db.models.deletion
class Migration(migrations.Migration):
initial = True
dependencies = [
migrations.swappable_dependency(settings.AUTH_USER_MODEL),
]
operations = [
migrations.CreateModel(
name='Post',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('name', models.CharField(max_length=200)),
('description', models.TextField(blank=True)),
('image', models.ImageField(upload_to='Images/Post/')),
('created_at', models.DateTimeField(auto_now_add=True)),
('updated_at', models.DateTimeField(auto_now=True)),
('user', models.ForeignKey(default='', on_delete=django.db.models.deletion.CASCADE, related_name='user', to=settings.AUTH_USER_MODEL)),
],
),
migrations.CreateModel(
name='Topic',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('name', models.CharField(max_length=200)),
('status', models.BooleanField(default=False)),
('description', models.TextField(blank=True)),
('created_at', models.DateTimeField(auto_now_add=True)),
('updated_at', models.DateTimeField(auto_now=True)),
('reply', models.TextField(blank=True)),
('owner', models.ForeignKey(default='', on_delete=django.db.models.deletion.CASCADE, related_name='owner', to=settings.AUTH_USER_MODEL)),
],
),
]
|
from requests import Request
from requests_oauthlib import OAuth1
from requests_oauthlib.oauth1_auth import SIGNATURE_TYPE_BODY
from .tool_base import ToolBase
from .launch_params import LAUNCH_PARAMS_REQUIRED
from .utils import parse_qs, InvalidLTIConfigError
class ToolOutbound(ToolBase):
def __init__(self, consumer_key, consumer_secret,
params=None, launch_url=None):
'''
Create new Outbound Tool.
See ToolConsumer and ContentItemResponse for examples
'''
# allow launch_url to be specified in launch_params for
# backwards compatibility
if launch_url is None:
if 'launch_url' not in params:
raise InvalidLTIConfigError('missing \'launch_url\' arg!')
else:
launch_url = params['launch_url']
del params['launch_url']
self.launch_url = launch_url
super(ToolOutbound, self).__init__(consumer_key, consumer_secret,
params=params)
def has_required_params(self):
return True
def generate_launch_request(self, **kwargs):
"""
returns a Oauth v1 "signed" requests.PreparedRequest instance
"""
if not self.has_required_params():
raise InvalidLTIConfigError(
'Consumer\'s launch params missing one of '
+ str(LAUNCH_PARAMS_REQUIRED)
)
params = self.to_params()
r = Request('POST', self.launch_url, data=params).prepare()
sign = OAuth1(self.consumer_key, self.consumer_secret,
signature_type=SIGNATURE_TYPE_BODY, **kwargs)
return sign(r)
def generate_launch_data(self, **kwargs):
"""
Provided for backwards compatibility
"""
r = self.generate_launch_request(**kwargs)
return parse_qs(r.body.decode('utf-8'), keep_blank_values=True)
|
import os
def mkdir(data_dir):
if not os.path.exists(data_dir):
os.makedirs(data_dir)
|
from django.conf.urls import include
from django.urls import path
from integrations import views
app_name = 'integrations'
urlpatterns = [
path('', views.HomeView.as_view(), name="index"),
path('dhis/', include('integrations.dhis.urls')),
path('xapi/', include('integrations.xapi.urls')),
]
|
from flask.ext.restful import Resource
from models.audio.composer import ComposerSchema, Composer
from util import marshmallow_with
from util import inject_user
class ComposerIndexEndpoint(Resource):
@inject_user
@marshmallow_with(ComposerSchema, many=True)
def get(self):
return Composer.query.all()
|
import codecs
from database_access import *
import MySQLdb
import simplejson
if __name__ == '__main__':
insertSource = False
idDataSource = 56
print "Transforming SI drive data"
fname = "si_drive_1005_bycity.json"
with codecs.open(fname,"rb",encoding='utf-8') as f:
se_data = f.read()
json_data = simplejson.loads(se_data)
#organisation_type = "Non-profit or Social Enterprise"
db = MySQLdb.connect(host, username, password, database, charset='utf8')
cursor = db.cursor()
if insertSource:
insertDataSource = "Insert into DataSources (Name,Type,URL,DataIsOpen,RelatedToEU,AssociatedProject,DataDurationStart,DataDurationEnd,Theme,CountryCoverage,SocialInnovationDef,MainEntities,DataSource)" \
"VALUES (%s,%s,%s,%s,%s,%s,%s,%s,%s,%s,%s,%s,%s)"
cursor.execute(insertDataSource,("SI-drive","Database","https://mapping.si-drive.eu/","Open","Yes","SI-Drive","2014-01-01","2018-01-01","Social innovation","all, predominantly EU","EC,2013","Projects,Actors","search and case studies"))
db.commit()
project_overlap = 0
url_overlap = 0
for item in json_data:
city = item['city']
city_local = item['city_local']
country = item['country']
regioon = item['region']
longitude = item['lon']
latitude = item['lat']
projects_in_city = item['projects']
for pro in projects_in_city:
newProject = True
newURL = True
projectname_en = pro['projectname_en']
if projectname_en!= None:
projectname_en = projectname_en.encode('utf-8').replace('"','').replace("'",'').replace('%','')
else:
projectname_en=pro['projectname_orig'].encode('utf-8')
print projectname_en
projectname_orig = pro['projectname_orig']
if projectname_orig==None:
projectname_orig = projectname_en
else:
projectname_orig = projectname_orig.encode('utf-8').replace("'",'').replace('"','')
existing_pro = "SELECT * from Projects where ProjectName like '%"+projectname_en+"%' or ProjectName like '%"+projectname_orig+"%'"
cursor.execute(existing_pro)
rows_affected = cursor.rowcount
if rows_affected != 0:
project_overlap = project_overlap + 1
newProject == False
project_stage = pro['projectstage']
website = pro['website']
if website !=None:
matchingUrl = "SELECT * from Projects where ProjectWebpage like '" + website + "'"
cursor.execute(matchingUrl)
rows_affected_url = cursor.rowcount
website = website.encode('utf-8')
if rows_affected_url != 0:
url_overlap = url_overlap + 1
newURL = False
year = pro['year']
date = str(year) + "-01-01"
if newProject:
print(projectname_en+" "+date+ " "+str(website))
newProjectQuery = "Insert into Projects (ProjectName,DateStart,ProjectWebpage,FirstDataSource,DataSources_idDataSources) VALUES ('{0}','{1}','{2}','{3}',{4})".format(projectname_en,date,website,"SI-drive",idDataSource)
cursor.execute(newProjectQuery)
projectid = cursor.lastrowid
db.commit()
else:
UpdateProjectQuery = "Update Projects set DateStart='"+date+"', ProjectWebpage='"+website+"',FirstDataSource='"+"SI-drive"+"',DataSources_idDataSources="+idDataSource+" where ProjectName='"+projectname_en+"'"
cursor.execute(UpdateProjectQuery)
projectid = cursor.lastrowid
db.commit()
InsertLocation = "Insert into ProjectLocation (Type,City,Country,Longitude,Latitude,Projects_idProjects) VALUES ('{0}','{1}','{2}',{3},{4},{5})".format("Main",city.encode('utf-8').replace("'",""),country.encode('utf-8').replace("'",""),longitude,latitude,str(projectid))
cursor.execute(InsertLocation)
db.commit()
partner_main = pro['partners']['main']
if partner_main != {}:
main_partner_name = partner_main['name'].encode('utf-8').replace("'",'')
main_partner_sector = partner_main['sector']
if main_partner_sector!= None:
main_partner_sector = main_partner_sector.encode('utf-8')
main_partner_country = partner_main['country']
if main_partner_country != None:
main_partner_country = main_partner_country.encode('utf-8').replace("'", "")
PartnerExists = False
SelectPartner = "Select * from Actors where ActorName like '%"+main_partner_name+"%'"
cursor.execute(SelectPartner)
rows_affected_mainPartner = cursor.rowcount
if rows_affected_mainPartner>0:
row = cursor.fetchone()
parner_id = row[0]
else:
InsertParner = "Insert into Actors (ActorName,Type,SubType,SourceOriginallyObtained,DataSources_idDataSources)" \
" Values ('{0}','{1}','{2}','{3}',{4})".format(main_partner_name,"S",main_partner_sector,"SI-Drive",str(idDataSource))
cursor.execute(InsertParner)
db.commit()
parner_id = cursor.lastrowid
ParnerLocation = "Insert into ActorLocation (Type,Country, Actors_idActors) Values('{0}','{1}',{2})".format("Headquarters",main_partner_country,str(parner_id))
cursor.execute(ParnerLocation)
db.commit()
Connection = "Insert into Actors_has_Projects (Actors_idActors,Projects_idProjects,OrganisationRole) Values ({0},{1},'{2}')".format(parner_id,projectid,"Main partner")
cursor.execute(Connection)
db.commit()
other_partners = pro['partners']['others']
for o_partner in other_partners:
o_partner_name = o_partner['name'].encode('utf-8').replace("'","")
o_partner_sector = o_partner['sector']
o_partner_country = o_partner['country']
SelectPartner2 = "Select * from Actors where ActorName like '%" + o_partner_name + "%'"
cursor.execute(SelectPartner2)
rows_affected_oPartner = cursor.rowcount
if rows_affected_oPartner > 0:
row = cursor.fetchone()
parner_id = row[0]
else:
InsertParner = "Insert into Actors (ActorName,Type,SubType,SourceOriginallyObtained,DataSources_idDataSources)" \
" Values ('{0}','{1}','{2}','{3}',{4})".format(o_partner_name, "S", o_partner_sector, "SI-Drive", str(idDataSource))
cursor.execute(InsertParner)
db.commit()
parner_id = cursor.lastrowid
ParnerLocation = "Insert into ActorLocation (Type,Country, Actors_idActors) Values('{0}','{1}',{2})".format("Headquarters", o_partner_country, str(parner_id))
cursor.execute(ParnerLocation)
db.commit()
try:
Connection = "Insert into Actors_has_Projects (Actors_idActors,Projects_idProjects,OrganisationRole) Values ({0},{1},'{2}')".format(str(parner_id), str(projectid), "Other partner")
cursor.execute(Connection)
db.commit()
except Exception:
print "Existing pair"
print project_overlap
print url_overlap
|
import pandas as pd
import numpy as np
import matplotlib.pyplot as plt
import keras
seed = 123
np.random.seed(seed) # for reproducibility
xl = pd.ExcelFile("data.xlsx")
df = xl.parse(xl.sheet_names[0])
df.head()
disciplinas = np.unique(df.disciplina)
disciplinas_dict = {}
for i,it in enumerate(disciplinas):
disciplinas_dict[i]=it
disciplinas_dict[it]=i
aluno_map_dict = { }
c = 0
for p in range(0,300,10):
aluno_map_dict[p] = c
aluno_map_dict[p+1] = c+1
c += 2
def converte_periodo_cod(cod_inicio,cod_fim):
def converte_periodo(ano_inicial,semestre_inicial,ano_final,semestre_final):
anos = ano_final-ano_inicial
return 2*anos + semestre_final-semestre_inicial
ano_inicial = int(str(cod_inicio)[0:4])
ano_final = int(str(cod_fim)[0:4])
semestre_inicial = int(str(cod_inicio)[4:])
semestre_final = int(str(cod_fim)[4:])
periodos_totais = converte_periodo(ano_inicial,semestre_inicial,ano_final,semestre_final)
print(ano_inicial,ano_final,semestre_inicial,semestre_final,periodos_totais)
return periodos_totais
converte_periodo_cod('20061','20061')
converte_periodo_cod('20061','20062')
converte_periodo_cod('20061','20071')
converte_periodo_cod('20061','20072')
converte_periodo_cod('20061','20081')
converte_periodo_cod('20061','20082')
def create_aluno_array(id_aluno,max_periodos = 8):
aluno = {}
aluno_df = df[df.aluno == id_aluno]
aluno_concluiu = aluno_df.concluiu.values[0]
aluno_ano_inicio = np.min(aluno_df.periodo.values)
aluno_periodo = [converte_periodo_cod(aluno_ano_inicio,p) for p in aluno_df.periodo.values]
aluno_periodo = np.array(aluno_periodo)
aluno_disciplinas = aluno_df.disciplina.values
aluno_notas = aluno_df.nota.values
aluno['aluno_concluiu'] = aluno_concluiu
print(aluno_concluiu)
print(aluno_disciplinas)
print(aluno_notas)
print(aluno_periodo)
aluno_dict = {}
for i in range(max_periodos):
ids = np.where(aluno_periodo == i)[0]
print(ids)
aluno_dict[i] = [[aluno_disciplinas[ii], aluno_notas[ii]] for ii in ids]
print(aluno_dict)
aluno['periodos'] = aluno_dict
def create_aluno_matrix(aluno_dict, disciplinas, disciplinas_dict, max_periodos=8):
aluno_matrix = np.zeros((max_periodos, len(disciplinas))) - 1
for per in range(max_periodos):
for it in aluno_dict['periodos'][per]:
aluno_matrix[per, disciplinas_dict[it[0]]] = it[1] # matrix recebe nota na posicao correta
return aluno_matrix
aluno_matrix = create_aluno_matrix(aluno, disciplinas, disciplinas_dict, max_periodos=8)
aluno['matrix'] = aluno_matrix
return aluno
alunos = np.unique(df.aluno)
len(alunos)
X = []
Y = []
for aluno in alunos:
aluno_dict = create_aluno_array(aluno)
X.append(aluno_dict['matrix'].ravel())
Y.append(aluno_dict['aluno_concluiu'])
X = np.array(X)
Y = np.array(Y)
plt.matshow(X)
print('numero alunos', Y.shape[0])
print('numero alunos aprovados', np.sum(Y))
from sklearn.decomposition import pca
pca = PCA(n_components=200)
pca.fit(X)
print(pca.explained_variance_ratio_)
X = pca.transform(X)
plt.matshow(X)
from matplotlib.colors import ListedColormap
cm_bright = ListedColormap(['#FF0000', '#0000FF'])
plt.scatter(X[:,0],X[:,1], c=Y,cmap=cm_bright)
from keras.models import Sequential
from keras.layers import Dense, Dropout, Activation, Flatten
from keras.layers import Convolution2D, MaxPooling2D
from keras.utils import np_utils
from keras.models import Sequential
from keras.layers import Dense
from keras.wrappers.scikit_learn import KerasClassifier
from sklearn.model_selection import cross_val_score
from sklearn.preprocessing import LabelEncoder
from sklearn.model_selection import StratifiedKFold
from sklearn.preprocessing import StandardScaler
from sklearn.pipeline import Pipeline
from sklearn.model_selection import train_test_split
encoder = LabelEncoder()
encoder.fit(Y)
encoded_Y = encoder.transform(Y)
X_train, X_test, Y_train, Y_test = train_test_split(X, Y, test_size=0.33, random_state=seed)
def create_baseline():
# create model
model = Sequential()
model.add(Dense(50, input_dim=X.shape[1], kernel_initializer='normal', activation='sigmoid'))
model.add(Dropout(0.3))
model.add(Dense(50, input_dim=X.shape[1], kernel_initializer='normal', activation='sigmoid'))
model.add(Dropout(0.3))
model.add(Dense(1, kernel_initializer='normal', activation='sigmoid'))
# Compile model
model.compile(loss='binary_crossentropy', optimizer='adam', metrics=['accuracy'])
return model
estimator = KerasClassifier(build_fn=create_baseline, nb_epoch=10, verbose=1)
kfold = StratifiedKFold(n_splits=10, shuffle=True, random_state=seed)
results = cross_val_score(estimator, X, encoded_Y, cv=kfold)
print("Results: %.2f%% (%.2f%%)" % (results.mean()*100, results.std()*100))
model = create_baseline()
model.summary()
class_weight = {0 : 1., 1: 10.}
model.fit(X_train,Y_train,validation_split=0.1,epochs=406,class_weight=class_weight)
yhat = model.predict(X_test)
plt.scatter(Y_test,yhat)
plt.plot(Y_test)
plt.plot(yhat)
from matplotlib.colors import ListedColormap
cm_bright = ListedColormap(['#FF0000', '#0000FF'])
plt.scatter(X_test[:,0],X_test[:,1], c=Y_test,cmap=cm_bright)
plt.scatter(X_test[:,0],X_test[:,1], s=5,c=yhat.ravel(),cmap=cm_bright)
import matplotlib.pyplot as plt
from sklearn.svm import SVC
from sklearn.model_selection import StratifiedKFold
from sklearn.feature_selection import RFECV
from sklearn.datasets import make_classification
X = X
y = Y
svc = SVC(kernel="linear")
rfecv = RFECV(estimator=svc, step=1, cv=StratifiedKFold(2),
scoring='accuracy',verbose=2)
rfecv.fit(X[:,0:25], y)
print("Optimal number of features : %d" % rfecv.n_features_)
plt.figure()
plt.xlabel("Number of features selected")
plt.ylabel("Cross validation score (nb of correct classifications)")
plt.plot(range(1, len(rfecv.grid_scores_) + 1), rfecv.grid_scores_)
plt.show()
|
from PyQt4 import QtCore
class Loader(QtCore.QObject):
progress = QtCore.pyqtSignal(int)
done = QtCore.pyqtSignal()
def __init__(self, extension):
super(Loader, self).__init__()
if not isinstance(extension, list):
raise TypeError
self.extension = extension
self.data = []
def load(self, file_name):
raise NotImplementedError("Must subclass me")
def length_data(self):
return len(self.data)
def _clear_data(self):
self.data = []
|
'''
SASSIE: Copyright (C) 2011 Joseph E. Curtis, Ph.D.
This program is free software: you can redistribute it and/or modify
it under the terms of the GNU General Public License as published by
the Free Software Foundation, either version 3 of the License, or
(at your option) any later version.
This program is distributed in the hope that it will be useful,
but WITHOUT ANY WARRANTY; without even the implied warranty of
MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
GNU General Public License for more details.
You should have received a copy of the GNU General Public License
along with this program. If not, see <http://www.gnu.org/licenses/>.
'''
import sys
import numpy
import math
import sasmol.sasmol as sasmol
import sasmol.sasmath as sasmath
import sassie.simulate.constraints.constraints as constraints
import sassie.simulate.monomer_monte_carlo.dihedral_rotate as nrotate
from sassie.simulate.monomer_monte_carlo.dihedral_rotate import overlap
def overlap_check(m1,frame,cut,asegs,abasis,all_segment_basis_full_mask,interatom,interres):
#print 'ZHL overlap ',interatom
# Check for each individual segments
for i in range(len(asegs)):
mask_seg = all_segment_basis_full_mask[i]
#import pprint; pprint.pprint(mask_seg.tolist()); exit(0)
error,coor_tmp = m1.get_coor_using_mask(frame,mask_seg)
check = overlap.overlap(coor_tmp[0],float(cut))
if check:
return check
#print 'ZHL check ', check
# Check between segments
for i in range(len(asegs)-1):
mask_seg = all_segment_basis_full_mask[i]
error,coor_1 = m1.get_coor_using_mask(frame,mask_seg)
for j in range(i+1,len(asegs)):
mask_seg = all_segment_basis_full_mask[j]
error,coor_2 = m1.get_coor_using_mask(frame,mask_seg)
#atomlist=moloverlap(coor_1,coor_2,float(cut),interres)
#if (len(atomlist)):
# check = 1
check=overlap.moloverlap(coor_1[0],coor_2[0],float(cut),numpy.array(interres))
if check:
return check
'''
for i in range(len(asegs)):
tmask_seg = 'segname[i] == "'+asegs[i]+'" and name[i] == "'+abasis[i].strip()+'"'
error,mask_seg = m1.get_subset_mask(tmask_seg)
error,coor_tmp = m1.get_coor_using_mask(frame,mask_seg)
check = overlap.overlap(coor_tmp[0],float(cut))
#print 'ZHL check ', check
# Check between segments
for i in range(len(asegs)-1):
tmask_seg = 'segname[i] == "'+asegs[i]+'" and name[i] == "'+abasis[i].strip()+'"'
error,mask_seg = m1.get_subset_mask(tmask_seg)
error,coor_1 = m1.get_coor_using_mask(frame,mask_seg)
for j in range(i+1,len(asegs)):
tmask_seg = 'segname[i] == "'+asegs[j]+'" and name[i] == "'+abasis[j].strip()+'"'
error,mask_seg = m1.get_subset_mask(tmask_seg)
error,coor_2 = m1.get_coor_using_mask(frame,mask_seg)
#atomlist=moloverlap(coor_1,coor_2,float(cut),interres)
#if (len(atomlist)):
# check = 1
check=overlap.moloverlap(coor_1[0],coor_2[0],float(cut),numpy.array(interres))
'''
return check
def rotate(coor,m1,q0,th,an,cut,lowrg,highrg,re,taccepted,zflag,zval,cflag,dcdoutfile,indices,this_mask,basis_mask,sub_m2,align_mask,coor_sub_m1,com_sub_m1,mask_a_array,mask_b_array,distance_array,type_array,first_last_resid,molecule_type,segment_mask,segment_full_mask,all_segment_basis_full_mask,basis_full_mask,segment_mol,asegs,abasis,interatom,interres):
over=0 ; badrg=0 ; accepted=0
arg=0.0 ; lowestrg=re[5] ; hrg=re[6]
frame=0 ; badz=0 ; badc=0
check=0
minmax=[]
#print 'this segment_basis_mask = ',segment_basis_mask
#print 'this sum(segment_basis_mask) = ',numpy.sum(segment_basis_mask)
#print 'len(segment_basis_mask) = ',len(segment_basis_mask)
error,new_coor = segment_mol.get_coor_using_mask(frame,segment_mask)
segment_mol.setCoor(new_coor)
result = nrotate.rotate_dihedral(new_coor,segment_mol,frame,q0,th,an,indices,this_mask,first_last_resid,molecule_type)
#nrotate.rotate_dihedral(coor,segment_mol,frame,q0,th,an,indices,this_mask,first_last_resid,molecule_type)
#nrotate.rotate_dihedral(coor,m1,frame,q0,th,an,indices,this_mask,first_last_resid,molecule_type)
#error = segment_mol.set_coor_using_mask(segment_mol,frame,segment_basis_mask)
if(error != []):
print 'error = ',error
sys.exit()
#error,coor = m1.get_coor_using_mask(frame,segment_basis_mask)
thisrg = m1.calcrg(frame)
if(thisrg>hrg):
hrg=thisrg
if(thisrg<lowestrg):
lowestrg=thisrg
filename=''
'''
tmask = ''
for i in range(len(asegs)):
tmask+='segname[i] == "'+asegs[i]+'" and name[i] =="'+abasis[i].strip()+'"'
if i!=len(asegs)-1:
tmask+=' or '
error,basis_full_mask= m1.get_subset_mask(tmask)
'''
if(result == 0):
check = 1
if(check==0):
if(thisrg>lowrg and thisrg<highrg):
filename='winner'
#m1.center(frame)
#print 'sum(align_mask)= ',numpy.sum(align_mask)
'''
error,sub_m2.coor = m1.get_coor_using_mask(frame,align_mask)
sub_m2.setCoor(sub_m2.coor)
com_sub_m2 = sub_m2.calccom(0)
sub_m2.center(0)
coor_sub_m2 = sub_m2.coor[0]
'''
error,coor_tmp = segment_mol.get_coor_using_mask(frame,align_mask)
sub_m2.setCoor(coor_tmp)
com_sub_m2 = sub_m2.calccom(0)
sub_m2.center(0)
coor_sub_m2 = coor_tmp[0]
'''
print '\n'
print 'com_sub_m1 = ',com_sub_m1
print 'com_sub_m2 = ',com_sub_m2,'\n'
print 'first_coor: m1.coor()[0][0] = ',m1.coor()[0][0]
print 'last_coor: m1.coor()[0][-1] = ',m1.coor()[0][-1]
print '>>>>> CALLING ALIGN <<<<<<'
'''
segment_mol.align(frame,coor_sub_m2,com_sub_m2,coor_sub_m1,com_sub_m1)
error = m1.set_coor_using_mask(segment_mol,frame,segment_full_mask)
'''
print 'first_coor: m1.coor()[0][0] = ',m1.coor()[0][0]
print 'last_coor: m1.coor()[0][-1] = ',m1.coor()[0][-1]
'''
check = overlap_check(m1,frame,cut,asegs,abasis,all_segment_basis_full_mask,interatom,interres)
if(check == 0 and zflag==1):
error,sub_m2.coor = m1.get_coor_using_mask(frame,basis_full_mask)
sub_m2.setCoor(sub_m2.coor)
zee=sub_m2.coor[0,:,2]
zcheck=numpy.alltrue(numpy.greater_equal(zee,zval))
if(zcheck==0):
check=1
badz=1
if(check == 0 and cflag == 1):
check = constraints.check_constraints(m1,mask_a_array,mask_b_array,distance_array,type_array)
if(check == 1):
badc=1
if(check == 0):
m1.write_dcd_step(dcdoutfile,0,taccepted+1)
minmax = m1.calcminmax_frame(0)
accepted=1
arg=thisrg
else:
over=1
else:
badrg=1
else:
over=1
re[0]=accepted ; re[1]=over ; re[2]=badrg ; re[3]=thisrg ; re[4]=arg ; re[5]=lowestrg ; re[6]=hrg
re[7]=badz ; re[8]=badc ; re[9]=minmax
return filename
#align_segment(a,md1,st,tseg,abasis[tsegn],seglow[tsegn],seghigh[tsegn],zrefa1[tsegn],rcm1[tsegn],segatm_first[tsegn],segatm_last[tsegn])
#check=nmer_filter.noverlap(a,md1,st,cut,npairs,interpairs,tseg,tsegn,asegs,abasis)
|
"""**Vector Module**
.. tip:: Provides functionality for manipulation of vector data. The data can
be in-memory or file based.
Resources for understanding vector data formats and the OGR library:
Treatise on vector data model: http://www.esri.com/news/arcuser/0401/topo.html
OGR C++ reference: http://www.gdal.org/ogr
"""
__author__ = 'Ole Nielsen <ole.moller.nielsen@gmail.com>'
__revision__ = '$Format:%H$'
__date__ = '01/11/2010'
__license__ = "GPL"
__copyright__ = 'Copyright 2012, Australia Indonesia Facility for '
__copyright__ += 'Disaster Reduction'
import os
import sys
import numpy
import logging
QGIS_IS_AVAILABLE = True
try:
from qgis.core import QgsVectorLayer, QgsVectorFileWriter
except ImportError:
QGIS_IS_AVAILABLE = False
import copy as copy_module
from osgeo import ogr, gdal
from safe.common.exceptions import ReadLayerError, WriteLayerError
from safe.common.exceptions import GetDataError, InaSAFEError
from layer import Layer
from projection import Projection
from geometry import Polygon
from utilities import verify
from utilities import DRIVER_MAP, TYPE_MAP
from utilities import read_keywords
from utilities import write_keywords
from utilities import get_geometry_type
from utilities import is_sequence
from utilities import array_to_line
from utilities import calculate_polygon_centroid
from utilities import points_along_line
from utilities import geometry_type_to_string
from utilities import get_ring_data, get_polygon_data
from utilities import rings_equal
from utilities import safe_to_qgis_layer
from safe.common.utilities import unique_filename
from safe.utilities.unicode import get_string
from safe.utilities.i18n import tr
LOGGER = logging.getLogger('InaSAFE')
_pseudo_inf = float(99999999)
class Vector(Layer):
"""InaSAFE representation of vector data.
Args:
* data: Can be either
* A filename of a vector file format known to GDAL.
* List of dictionaries of field names and attribute values
associated with each point coordinate.
* A QgsVectorLayer associated with geometry and data.
* None
* projection: Geospatial reference in WKT format.
Only used if geometry is provided as a numeric array,
if None, WGS84 geographic is assumed.
* geometry: A list of either point coordinates or polygons/lines
(see note below).
* geometry_type: Desired interpretation of geometry.
Valid options are 'point', 'line', 'polygon' or
the ogr types: 1, 2, 3.
If None, a geometry_type will be inferred from the data.
* name: Optional name for layer. If None, basename is used.
* keywords: Optional dictionary with keywords that describe the
layer. When the layer is stored, these keywords will
be written into an associated file with extension
'.keywords'.
Keywords can for example be used to display text about the
layer in an application.
* style_info: Dictionary with information about how this layer
should be styled. See impact_functions/styles.py
for examples.
* sublayer: str Optional sublayer (band name in the case of raster,
table name in case of sqlite etc.) to load. Only applicable
to those dataformats supporting more than one layer in the
data file.
Returns:
* InaSAFE vector layer instance
Raises:
* TypeError, ReadLayerError, WriteLayerError, InaSAFEError,
GetDataError
Notes:
If data is a filename, all other arguments are ignored
as they will be inferred from the file.
The geometry type will be inferred from the dimensions of geometry.
If each entry is one set of coordinates the type will be
ogr.wkbPoint,
if it is an array of coordinates the type will be ogr.wkbPolygon.
To cast array entries as lines set geometry_type explicitly to
'line' in the call to Vector. Otherwise, they will default to
polygons.
Each polygon or line feature take the form of an Nx2 array
representing vertices where line segments are joined.
If polygons have holes, their geometry must be passed in as a
list of polygon geometry objects
(as defined in module geometry.py)
"""
def __init__(
self,
data=None,
projection=None,
geometry=None,
geometry_type=None,
name=None,
keywords=None,
style_info=None,
sublayer=None):
"""Initialise object with either geometry or filename
NOTE: Doc strings in constructor are not harvested and exposed in
online documentation. Hence the details are specified in the
class docstring.
"""
# Invoke common layer constructor
Layer.__init__(
self,
name=name,
projection=projection,
keywords=keywords,
style_info=style_info,
sublayer=sublayer)
# Input checks
if data is None and geometry is None:
# Instantiate empty object
self.geometry_type = None
self.extent = [0, 0, 0, 0]
return
if isinstance(data, basestring):
self.read_from_file(data)
# check QGIS_IS_AVAILABLE to avoid QgsVectorLayer undefined error
elif QGIS_IS_AVAILABLE and isinstance(data, QgsVectorLayer):
self.read_from_qgis_native(data)
else:
# Assume that data is provided as sequences provided as
# arguments to the Vector constructor
# with extra keyword arguments supplying metadata
msg = 'Geometry must be specified'
verify(geometry is not None, msg)
msg = 'Geometry must be a sequence'
verify(is_sequence(geometry), msg)
if len(geometry) > 0 and isinstance(geometry[0], Polygon):
self.geometry_type = ogr.wkbPolygon
self.geometry = geometry
else:
self.geometry_type = get_geometry_type(geometry, geometry_type)
if self.is_polygon_data:
# Convert to objects if input is a list of simple arrays
self.geometry = [Polygon(outer_ring=x) for x in geometry]
else:
# Convert to list if input is an array
if isinstance(geometry, numpy.ndarray):
self.geometry = geometry.tolist()
else:
self.geometry = geometry
if data is None:
# Generate default attribute as OGR will do that anyway
# when writing
data = []
for i in range(len(geometry)):
data.append({'ID': i})
# Check data
self.data = data
if data is not None:
msg = 'Data must be a sequence'
verify(is_sequence(data), msg)
msg = ('The number of entries in geometry (%s) and data (%s)'
'must be the same' % (len(geometry), len(data)))
verify(len(geometry) == len(data), msg)
# Establish extent
if len(geometry) == 0:
# Degenerate layer
self.extent = [0, 0, 0, 0]
return
# Compute bounding box for each geometry type
minx = miny = sys.maxint
maxx = maxy = -minx
if self.is_point_data:
A = numpy.array(self.get_geometry())
minx = min(A[:, 0])
maxx = max(A[:, 0])
miny = min(A[:, 1])
maxy = max(A[:, 1])
elif self.is_line_data:
for g in self.get_geometry():
A = numpy.array(g)
minx = min(minx, min(A[:, 0]))
maxx = max(maxx, max(A[:, 0]))
miny = min(miny, min(A[:, 1]))
maxy = max(maxy, max(A[:, 1]))
elif self.is_polygon_data:
# Do outer ring only
for g in self.get_geometry(as_geometry_objects=False):
A = numpy.array(g)
minx = min(minx, min(A[:, 0]))
maxx = max(maxx, max(A[:, 0]))
miny = min(miny, min(A[:, 1]))
maxy = max(maxy, max(A[:, 1]))
self.extent = [minx, maxx, miny, maxy]
def __str__(self):
"""Render as name, number of features, geometry type
"""
g_type_str = geometry_type_to_string(self.geometry_type)
return ('Vector data set: %s, %i features, geometry type '
'%s (%s)' % (self.name,
len(self),
str(self.geometry_type),
g_type_str))
def __len__(self):
"""Size of vector layer defined as number of features
"""
if hasattr(self, 'geometry') and self.geometry is not None:
return len(self.geometry)
else:
return 0
def __eq__(self, other, rtol=1.0e-5, atol=1.0e-8):
"""Override '==' to allow comparison with other vector objecs
Args:
* other: Vector instance to compare to
* rtol, atol: Relative and absolute tolerance.
See numpy.allclose for details
Note:
The algorithm will try to falsify every aspect of equality for the
two layers such as data, geometry, projection, keywords etc.
Only if none of them can be falsified will it return True.
"""
# Check type
if not isinstance(other, Vector):
msg = ('Vector instance cannot be compared to %s'
' as its type is %s ' % (str(other), type(other)))
raise TypeError(msg)
# Check keywords
if self.keywords != other.keywords:
return False
# Check number of features match
if len(self) != len(other):
return False
# Check projection
if self.projection != other.projection:
return False
# Check geometry type
if self.geometry_type != other.geometry_type:
return False
# Check geometry
if self.is_polygon_data:
geom0 = self.get_geometry(as_geometry_objects=True)
geom1 = other.get_geometry(as_geometry_objects=True)
else:
geom0 = self.get_geometry()
geom1 = other.get_geometry()
if len(geom0) != len(geom1):
return False
if self.is_point_data:
if not numpy.allclose(geom0, geom1,
rtol=rtol, atol=atol):
return False
elif self.is_line_data:
# Check vertices of each line
for i in range(len(geom0)):
if not rings_equal(geom0[i], geom1[i], rtol=rtol, atol=atol):
return False
elif self.is_polygon_data:
# Check vertices of outer and inner rings
for i in range(len(geom0)):
x = geom0[i].outer_ring
y = geom1[i].outer_ring
if not rings_equal(x, y, rtol=rtol, atol=atol):
return False
for j, ring0 in enumerate(geom0[i].inner_rings):
ring1 = geom1[i].inner_rings[j]
if not rings_equal(ring0, ring1, rtol=rtol, atol=atol):
return False
else:
msg = ('== not implemented for geometry type: %s'
% self.geometry_type)
# noinspection PyExceptionInherit
raise InaSAFEError(msg)
# Check keys for attribute values
x = self.get_data()
y = other.get_data()
if x is None:
if y is not None:
return False
else:
for key in x[0]:
for i in range(len(y)):
if key not in y[i]:
return False
for key in y[0]:
for i in range(len(x)):
if key not in x[i]:
return False
# Check attribute values
for i, a in enumerate(x):
for key in a:
X = a[key]
Y = y[i][key]
if X != Y:
# Not obviously equal, try some special cases
try:
# Try numerical comparison with tolerances
res = numpy.allclose(X, Y,
rtol=rtol, atol=atol)
except (NotImplementedError, TypeError):
# E.g. '' (Not implemented)
# or None or {} (Type error)
pass
else:
if not res:
return False
# Finally cast as booleans.
# This will e.g. match False with None or ''
if not (bool(X) is bool(Y)):
return False
# Vector layers are identical up to the specified tolerance
return True
# noinspection PyExceptionInherit
def read_from_file(self, filename):
"""Read and unpack vector data.
It is assumed that the file contains only one layer with the
pertinent features. Further it is assumed for the moment that
all geometries are points.
* A feature is a geometry and a set of attributes.
* A geometry refers to location and can be point, line, polygon or
combinations thereof.
* The attributes or obtained through GetField()
The full OGR architecture is documented at
* http://www.gdal.org/ogr/ogr_arch.html
* http://www.gdal.org/ogr/ogr_apitut.html
Examples are at
* danieljlewis.org/files/2010/09/basicpythonmap.pdf
* http://invisibleroads.com/tutorials/gdal-shapefile-points-save.html
* http://www.packtpub.com/article/geospatial-data-python-geometry
Limitation of the Shapefile are documented in
http://resources.esri.com/help/9.3/ArcGISDesktop/com/Gp_ToolRef/
geoprocessing_tool_reference/
geoprocessing_considerations_for_shapefile_output.htm
:param filename: a fully qualified location to the file
:type filename: str
:raises: ReadLayerError
"""
base_name = os.path.splitext(filename)[0]
# Look for any keywords
self.keywords = read_keywords(base_name + '.keywords')
# FIXME (Ole): Should also look for style file to populate style_info
# Determine name
if 'title' in self.keywords:
title = self.keywords['title']
# Lookup internationalised title if available
title = tr(title)
vector_name = title
else:
# Use base_name without leading directories as name
vector_name = os.path.split(base_name)[-1]
if self.name is None:
self.name = vector_name
self.filename = filename
self.geometry_type = None # In case there are no features
fid = ogr.Open(filename)
if fid is None:
msg = 'Could not open %s' % filename
raise ReadLayerError(msg)
# Assume that file contains all data in one layer
msg = 'Only one vector layer currently allowed'
if fid.GetLayerCount() > 1 and self.sublayer is None:
msg = ('WARNING: Number of layers in %s are %i. '
'Only the first layer will currently be '
'used. Specify sublayer when creating '
'the Vector if you wish to use a different layer.'
% (filename, fid.GetLayerCount()))
LOGGER.warn(msg)
# Why do we raise an exception if it is only a warning? TS
raise ReadLayerError(msg)
if self.sublayer is not None:
layer = fid.GetLayerByName(self.sublayer)
else:
layer = fid.GetLayerByIndex(0)
# Get spatial extent
self.extent = layer.GetExtent()
# Get projection
p = layer.GetSpatialRef()
self.projection = Projection(p)
layer.ResetReading()
# Extract coordinates and attributes for all features
geometry = []
data = []
# Use feature iterator
for feature in layer:
# Record coordinates ordered as Longitude, Latitude
G = feature.GetGeometryRef()
if G is None:
msg = ('Geometry was None in filename %s ' % filename)
raise ReadLayerError(msg)
else:
self.geometry_type = G.GetGeometryType()
if self.is_point_data:
geometry.append((G.GetX(), G.GetY()))
elif self.is_line_data:
ring = get_ring_data(G)
geometry.append(ring)
elif self.is_polygon_data:
polygon = get_polygon_data(G)
geometry.append(polygon)
elif self.is_multi_polygon_data:
try:
G = ogr.ForceToPolygon(G)
except:
msg = ('Got geometry type Multipolygon (%s) for '
'filename %s and could not convert it to '
'singlepart. However, you can use QGIS '
'functionality to convert multipart vector '
'data to singlepart (Vector -> Geometry Tools '
'-> Multipart to Singleparts and use the '
'resulting dataset.'
% (ogr.wkbMultiPolygon, filename))
raise ReadLayerError(msg)
else:
# Read polygon data as single part
self.geometry_type = ogr.wkbPolygon
polygon = get_polygon_data(G)
geometry.append(polygon)
else:
msg = ('Only point, line and polygon geometries are '
'supported. '
'Geometry type in filename %s '
'was %s.' % (filename,
self.geometry_type))
raise ReadLayerError(msg)
# Record attributes by name
number_of_fields = feature.GetFieldCount()
fields = {}
for j in range(number_of_fields):
name = feature.GetFieldDefnRef(j).GetName()
# FIXME (Ole): Ascertain the type of each field?
# We need to cast each appropriately?
# This is issue #66
# (https://github.com/AIFDR/riab/issues/66)
# feature_type = feature.GetFieldDefnRef(j).GetType()
fields[name] = feature.GetField(j)
# We do this because there is NaN problem on windows
# NaN value must be converted to _pseudo_in to solve the
# problem. But, when InaSAFE read the file, it'll be
# converted back to NaN value, so that NaN in InaSAFE is a
# numpy.nan
# please check https://github.com/AIFDR/inasafe/issues/269
# for more information
if fields[name] == _pseudo_inf:
fields[name] = float('nan')
# print 'Field', name, feature_type, j, fields[name]
data.append(fields)
# Store geometry coordinates as a compact numeric array
self.geometry = geometry
self.data = data
def read_from_qgis_native(self, qgis_layer):
"""Read and unpack vector data from qgis layer QgsVectorLayer.
A stub is used now:
save all data in a file,
then call safe.read_from_file
Raises:
* TypeError if qgis is not avialable
* IOError if can't store temporary file
"""
# FIXME (DK): this branch isn't covered by test
if not QGIS_IS_AVAILABLE:
msg = ('Used data is QgsVectorLayer instance, '
'but QGIS is not available.')
raise TypeError(msg)
base_name = unique_filename()
file_name = base_name + '.shp'
error = QgsVectorFileWriter.writeAsVectorFormat(
qgis_layer,
file_name,
"UTF8",
qgis_layer.crs(),
"ESRI Shapefile"
)
if error != QgsVectorFileWriter.NoError:
# FIXME (DK): this branch isn't covered by test
msg = ('Can not save data in temporary file.')
raise IOError(msg)
# Write keywords if any
write_keywords(self.keywords, base_name + '.keywords')
self.read_from_file(file_name)
def as_qgis_native(self):
"""Return vector layer data as qgis QgsVectorLayer.
A stub is used now:
save all data in a file,
then create QgsVectorLayer from the file.
Raises:
* TypeError if qgis is not avialable
"""
# FIXME (DK): this branch isn't covered by test
if not QGIS_IS_AVAILABLE:
msg = ('Tried to convert layer to QgsVectorLayer instance, '
'but QGIS is not available.')
raise TypeError(msg)
# FIXME (DK): ? move code from safe_to_qgis_layer to this method
# and call layer.as_qgis_native from safe_to_qgis_layer ?
qgis_layer = safe_to_qgis_layer(self)
return qgis_layer
def write_to_file(self, filename, sublayer=None):
"""Save vector data to file
:param filename: filename with extension .shp or .gml
:type filename: str
:param sublayer: Optional parameter for writing a sublayer. Ignored
unless we are writing to an sqlite file.
:type sublayer: str
:raises: WriteLayerError
Note:
Shp limitation, if attribute names are longer than 10
characters they will be truncated. This is due to limitations in
the shp file driver and has to be done here since gdal v1.7 onwards
has changed its handling of this issue:
http://www.gdal.org/ogr/drv_shapefile.html
**For this reason we recommend writing to spatialite.**
"""
# Check file format
base_name, extension = os.path.splitext(filename)
msg = ('Invalid file type for file %s. Only extensions '
'sqlite, shp or gml allowed.' % filename)
verify(extension in ['.sqlite', '.shp', '.gml'], msg)
driver = DRIVER_MAP[extension]
# FIXME (Ole): Tempory flagging of GML issue (ticket #18)
if extension == '.gml':
msg = ('OGR GML driver does not store geospatial reference.'
'This format is disabled for the time being. See '
'https://github.com/AIFDR/riab/issues/18')
raise WriteLayerError(msg)
# Derive layer_name from filename (excluding preceding dirs)
if sublayer is None or extension == '.shp':
layer_name = os.path.split(base_name)[-1]
else:
layer_name = sublayer
# Get vector data
if self.is_polygon_data:
geometry = self.get_geometry(as_geometry_objects=True)
else:
geometry = self.get_geometry()
data = self.get_data()
N = len(geometry)
# Clear any previous file of this name (ogr does not overwrite)
try:
os.remove(filename)
except OSError:
pass
# Create new file with one layer
drv = ogr.GetDriverByName(driver)
if drv is None:
msg = 'OGR driver %s not available' % driver
raise WriteLayerError(msg)
ds = drv.CreateDataSource(get_string(filename))
if ds is None:
msg = 'Creation of output file %s failed' % filename
raise WriteLayerError(msg)
lyr = ds.CreateLayer(get_string(layer_name),
self.projection.spatial_reference,
self.geometry_type)
if lyr is None:
msg = 'Could not create layer %s' % layer_name
raise WriteLayerError(msg)
# Define attributes if any
store_attributes = False
fields = []
if data is not None:
if len(data) > 0:
try:
fields = data[0].keys()
except:
msg = ('Input parameter "attributes" was specified '
'but it does not contain list of dictionaries '
'with field information as expected. The first '
'element is %s' % data[0])
raise WriteLayerError(msg)
else:
# Establish OGR types for each element
ogr_types = {}
for name in fields:
att = data[0][name]
py_type = type(att)
msg = ('Unknown type for storing vector '
'data: %s, %s' % (name, str(py_type)[1:-1]))
verify(py_type in TYPE_MAP, msg)
ogr_types[name] = TYPE_MAP[py_type]
else:
# msg = ('Input parameter "data" was specified '
# 'but appears to be empty')
# raise InaSAFEError(msg)
pass
# Create attribute fields in layer
store_attributes = True
for name in fields:
# Rizky : OGR can't handle unicode field name, thus we
# convert it to ASCII
fd = ogr.FieldDefn(str(name), ogr_types[name])
# FIXME (Ole): Trying to address issue #16
# But it doesn't work and
# somehow changes the values of MMI in test
# width = max(128, len(name))
# print name, width
# fd.SetWidth(width)
# Silent handling of warnings like
# Warning 6: Normalized/laundered field name:
# 'CONTENTS_LOSS_AUD' to 'CONTENTS_L'
gdal.PushErrorHandler('CPLQuietErrorHandler')
if lyr.CreateField(fd) != 0:
msg = 'Could not create field %s' % name
raise WriteLayerError(msg)
# Restore error handler
gdal.PopErrorHandler()
# Store geometry
geom = ogr.Geometry(self.geometry_type)
layer_def = lyr.GetLayerDefn()
for i in range(N):
# Create new feature instance
feature = ogr.Feature(layer_def)
# Store geometry and check
if self.is_point_data:
x = float(geometry[i][0])
y = float(geometry[i][1])
geom.SetPoint_2D(0, x, y)
elif self.is_line_data:
geom = array_to_line(
geometry[i], geometry_type=ogr.wkbLineString)
elif self.is_polygon_data:
# Create polygon geometry
geom = ogr.Geometry(ogr.wkbPolygon)
# Add outer ring
linear_ring = array_to_line(
geometry[i].outer_ring, geometry_type=ogr.wkbLinearRing)
geom.AddGeometry(linear_ring)
# Add inner rings if any
for A in geometry[i].inner_rings:
geom.AddGeometry(array_to_line(
A, geometry_type=ogr.wkbLinearRing))
else:
msg = 'Geometry type %s not implemented' % self.geometry_type
raise WriteLayerError(msg)
feature.SetGeometry(geom)
G = feature.GetGeometryRef()
if G is None:
msg = 'Could not create GeometryRef for file %s' % filename
raise WriteLayerError(msg)
# Store attributes
if store_attributes:
for j, name in enumerate(fields):
actual_field_name = layer_def.GetFieldDefn(j).GetNameRef()
val = data[i][name]
if isinstance(val, numpy.ndarray):
# A singleton of type <type 'numpy.ndarray'> works
# for gdal version 1.6 but fails for version 1.8
# in SetField with error: NotImplementedError:
# Wrong number of arguments for overloaded function
val = float(val)
elif val is None:
val = ''
# We do this because there is NaN problem on windows
# NaN value must be converted to _pseudo_in to solve the
# problem. But, when InaSAFE read the file, it'll be
# converted back to NaN value, so that NaN in InaSAFE is a
# numpy.nan
# please check https://github.com/AIFDR/inasafe/issues/269
# for more information
if val != val:
val = _pseudo_inf
feature.SetField(actual_field_name, val)
# Save this feature
if lyr.CreateFeature(feature) != 0:
msg = 'Failed to create feature %i in file %s' % (i, filename)
raise WriteLayerError(msg)
feature.Destroy()
# Write keywords if any
write_keywords(self.keywords, base_name + '.keywords')
# FIXME (Ole): Maybe store style_info
def copy(self):
"""Return copy of vector layer
This copy will be equal to self in the sense defined by __eq__
"""
if self.is_polygon_data:
geometry = self.get_geometry(copy=True, as_geometry_objects=True)
else:
geometry = self.get_geometry(copy=True)
return Vector(data=self.get_data(copy=True),
geometry=geometry,
projection=self.get_projection(),
keywords=self.get_keywords())
def get_attribute_names(self):
"""Get available attribute names.
These are the ones that can be used with get_data
"""
return self.data[0].keys()
def get_data(self, attribute=None, index=None, copy=False):
"""Get vector attributes.
:param attribute: Specify an attribute name of which to return data.
:type attribute: str
:param index: Indicates a specific value on which to call the
attribute. Ignored if no attribute is set.
:type index: int
:param copy: Indicate whether to return a pointer to the data,
or a copy of.
:type copy: bool
:raises: GetDataError
:returns: A list where each entry is a dictionary of attributes for one
feature.
:rtype: list,
Note:
Data is returned as a list where each entry is a dictionary of
attributes for one feature. Entries in get_geometry() and
get_data() are related as 1-to-1
If optional argument attribute is specified and a valid name,
then the list of values for that attribute is returned.
If optional argument index is specified on the that value will
be returned. Any value of index is ignored if attribute is None.
If optional argument copy is True and all attributes are requested,
a copy will be returned. Otherwise a pointer to the data is
returned.
"""
if hasattr(self, 'data'):
if attribute is None:
if copy:
return copy_module.deepcopy(self.data)
else:
return self.data
else:
msg = ('Specified attribute %s does not exist in '
'vector layer %s. Valid names are %s'
'' % (attribute, self, self.data[0].keys()))
verify(attribute in self.data[0], msg)
if index is None:
# Return all values for specified attribute
return [x[attribute] for x in self.data]
else:
# Return value for specified attribute and index
msg = ('Specified index must be either None or '
'an integer. I got %s' % index)
verify(isinstance(index, int), msg)
msg = ('Specified index must lie within the bounds '
'of vector layer %s which is [%i, %i]'
'' % (self, 0, len(self) - 1))
verify(0 <= index < len(self), msg)
return self.data[index][attribute]
else:
msg = 'Vector data instance does not have any attributes'
raise GetDataError(msg)
def get_geometry_type(self):
"""Return geometry type for vector layer
"""
return self.geometry_type
def get_geometry_name(self):
"""Return geometry name for vector layer
"""
return geometry_type_to_string(self.geometry_type)
def get_geometry(self, copy=False, as_geometry_objects=False):
"""Return geometry for vector layer.
Depending on the feature type, geometry is::
geometry type output type
point list of 2x1 array of longitudes and latitudes)
line list of arrays of coordinates
polygon list of arrays of coordinates
Optional boolean argument as_geometry_objects will change the return
value to a list of geometry objects rather than a list of arrays.
This currently only applies to polygon geometries
:param copy: Set to return a copy of the data rather than a pointer.
:type copy: bool
:param as_geometry_objects: Set to return geometry objects rather
than a list of arrays.
:type as_geometry_objects: bool
:raises: InaSAFEError
:returns: A list of geometry objects or arrays.
:rtype: list
"""
if copy:
geometry = copy_module.deepcopy(self.geometry)
else:
geometry = self.geometry
if self.is_polygon_data:
if not as_geometry_objects:
geometry = [p.outer_ring for p in geometry]
else:
if as_geometry_objects:
msg = ('Argument as_geometry_objects can currently '
'be True only for polygon data')
raise InaSAFEError(msg)
return geometry
def get_bounding_box(self):
"""Get bounding box coordinates for vector layer.
Format is [West, South, East, North]
"""
e = self.extent
return [e[0], # West
e[2], # South
e[1], # East
e[3]] # North
def get_extrema(self, attribute=None):
"""Get min and max values from specified attribute
:param attribute: Specify an attribute name of which to return data.
:type attribute: str
:raises: InaSAFEError
:returns: minimum and maximum attribute values
:rtype:
"""
if attribute is None:
msg = ('Valid attribute name must be specified in get_extrema '
'for vector layers. I got None.')
raise InaSAFEError(msg)
x = self.get_data(attribute)
return min(x), max(x)
def get_topN(self, attribute, N=10):
"""Get top N features
:param attribute: The name of attribute where values are sought
:type attribute: str
:param N: How many
:type N: int
:returns: New vector layer with selected features
"""
# Input checks
msg = ('Specfied attribute must be a string. '
'I got %s' % (type(attribute)))
verify(isinstance(attribute, basestring), msg)
msg = 'Specified attribute was empty'
verify(attribute != '', msg)
msg = 'N must be a positive number. I got %i' % N
verify(N > 0, msg)
# Create list of values for specified attribute
values = self.get_data(attribute)
# Sort and select using Schwarzian transform
A = zip(values, self.data, self.geometry)
A.sort()
# Pick top N and unpack
data, geometry = zip(*A[-N:])[1:]
# Create new Vector instance and return
return Vector(data=data,
projection=self.get_projection(),
geometry=geometry,
keywords=self.get_keywords())
@property
def is_point_data(self):
""" Check whether this is a point
:return: Test result
:rtype: bool
"""
return (
self.is_vector and (
self.geometry_type == ogr.wkbPoint or
self.geometry_type == ogr.wkbPoint25D))
@property
def is_line_data(self):
""" Check whether this is a line
:return: Test result
:rtype: bool
"""
return (
self.is_vector and (
self.geometry_type == ogr.wkbLineString or
self.geometry_type == ogr.wkbLineString25D))
@property
def is_polygon_data(self):
""" Check whether this is a polygon
:return: Test result
:rtype: bool
"""
return (
self.is_vector and (
self.geometry_type == ogr.wkbPolygon or
self.geometry_type == ogr.wkbPolygon25D))
@property
def is_multi_polygon_data(self):
""" Check whether this is multipolygon
:return: Test result
:rtype: bool
"""
return self.is_vector and self.geometry_type == ogr.wkbMultiPolygon
def convert_line_to_points(V, delta):
"""Convert line vector data to point vector data
:param V: Vector layer with line data
:type V: Vector
:param delta: Incremental step to find the points
:type delta: float
:returns: Vector layer with point data and the same attributes as V
:rtype: Vector
"""
msg = 'Input data %s must be line vector data' % V
verify(V.is_line_data, msg)
geometry = V.get_geometry()
data = V.get_data()
N = len(V)
# Calculate centroids for each polygon
points = []
new_data = []
for i in range(N):
c = points_along_line(geometry[i], delta)
# We need to create a data entry for each point.
# FIXME (Ole): What on earth is this?
# pylint: disable=W0621
new_data.extend([data[i] for _ in c])
# pylint: enable=W0621
points.extend(c)
# Create new point vector layer with same attributes and return
V = Vector(data=new_data,
projection=V.get_projection(),
geometry=points,
name='%s_point_data' % V.get_name(),
keywords=V.get_keywords())
return V
def convert_polygons_to_centroids(V):
"""Convert polygon vector data to point vector data
:param V: Vector layer with polygon data
:type V: Vector
:returns: Vector layer with point data and the same attributes as V
:rtype: Vector
"""
msg = 'Input data %s must be polygon vector data' % V
verify(V.is_polygon_data, msg)
geometry = V.get_geometry()
N = len(V)
# Calculate points for each polygon
centroids = []
for i in range(N):
c = calculate_polygon_centroid(geometry[i])
centroids.append(c)
# Create new point vector layer with same attributes and return
V = Vector(data=V.get_data(),
projection=V.get_projection(),
geometry=centroids,
name='%s_centroid_data' % V.get_name(),
keywords=V.get_keywords())
return V
|
""" The GuiStateDirector generates the state object from the models.
The GuiStateDirector gets the information from the table and state model and generates state objects. It delegates
the main part of the work to an StateDirectorISIS object.
"""
from __future__ import (absolute_import, division, print_function)
import copy
from sans.state.data import get_data_builder
from sans.user_file.state_director import StateDirectorISIS
class GuiStateDirector(object):
def __init__(self, table_model, state_gui_model, facility):
super(GuiStateDirector, self).__init__()
self._table_model = table_model
self._state_gui_model = state_gui_model
self._facility = facility
def create_state(self, row):
# 1. Get the data settings, such as sample_scatter, etc... and create the data state.
table_index_model = self._table_model.get_table_entry(row)
data_builder = get_data_builder(self._facility)
self._set_data_entry(data_builder.set_sample_scatter, table_index_model.sample_scatter)
self._set_data_entry(data_builder.set_sample_transmission, table_index_model.sample_transmission)
self._set_data_entry(data_builder.set_sample_direct, table_index_model.sample_direct)
self._set_data_entry(data_builder.set_can_scatter, table_index_model.can_scatter)
self._set_data_entry(data_builder.set_can_transmission, table_index_model.can_transmission)
self._set_data_entry(data_builder.set_can_direct, table_index_model.can_direct)
data = data_builder.build()
# 2. Add elements from the options column
state_gui_model = copy.deepcopy(self._state_gui_model)
options_column_model = table_index_model.options_column_model
self._apply_column_options_to_state(options_column_model, state_gui_model)
# 3. Add other columns
output_name = table_index_model.output_name
if output_name:
state_gui_model.output_name = output_name
# 4. Create the rest of the state based on the builder.
user_file_state_director = StateDirectorISIS(data)
settings = copy.deepcopy(state_gui_model.settings)
user_file_state_director.add_state_settings(settings)
return user_file_state_director.construct()
@staticmethod
def _set_data_entry(func, entry):
if entry:
func(entry)
@staticmethod
def _apply_column_options_to_state(options_column_model, state_gui_model):
"""
Apply the column setting of the user to the state for that particular row.
Note if you are extending the options functionality, then you will have to add the property here.
:param options_column_model: the option column model with the row specific settings
:param state_gui_model: the state gui model
"""
options = options_column_model.get_options()
# Here we apply the correction to the state depending on the settings in the options. This is not very nice,
# but currently it is not clear how to solve this differently.
if "WavelengthMin" in options.keys():
state_gui_model.wavelength_min = options["WavelengthMin"]
if "WavelengthMax" in options.keys():
state_gui_model.wavelength_max = options["WavelengthMax"]
|
import sys
from ctypes import *
def LoadLibrary():
try:
libcrypto = cdll.LoadLibrary('libcrypto.so')
except:
try:
libcrypto = cdll.LoadLibrary('libeay32.dll')
except:
raise Exception("Couldn't load OpenSSL lib ...")
return libcrypto
class ECC_key:
def __init__(self, pubkey_x = 0, pubkey_y = 0, privkey = 0):
self.curve = 734 # == NID_sect571r1
self.SIZE_ECC_KEY = 72 # With NID_sect571r1
self.libcrypto = LoadLibrary()
if pubkey_x != 0 and pubkey_y != 0:
if self.Check_EC_Key(privkey, pubkey_x, pubkey_y) < 0:
self.pubkey_x = 0
self.pubkey_y = 0
self.privkey = 0
raise -1
else:
self.pubkey_x = pubkey_x
self.pubkey_y = pubkey_y
self.privkey = privkey
else:
self.privkey, self.pubkey_x, self.pubkey_y = self.Get_EC_PairKey()
def Get_EC_PairKey(self):
try:
pub_key_x = self.libcrypto.BN_new()
pub_key_y = self.libcrypto.BN_new()
while 1:
key = self.libcrypto.EC_KEY_new_by_curve_name(self.curve)
if key == 0:
raise Exception("[OpenSSL] EC_KEY_new_by_curve_name FAIL ...")
if (self.libcrypto.EC_KEY_generate_key(key)) == 0:
raise Exception("[OpenSSL] EC_KEY_generate_key FAIL ...")
if (self.libcrypto.EC_KEY_check_key(key)) == 0:
raise Exception("[OpenSSL] EC_KEY_check_key FAIL ...")
priv_key = self.libcrypto.EC_KEY_get0_private_key(key)
group = self.libcrypto.EC_KEY_get0_group(key)
pub_key = self.libcrypto.EC_KEY_get0_public_key(key)
if (self.libcrypto.EC_POINT_get_affine_coordinates_GFp(group, pub_key, pub_key_x, pub_key_y, 0)) == 0:
raise Exception("[OpenSSL] EC_POINT_get_affine_coordinates_GFp FAIL ...")
privkey = malloc(0, self.SIZE_ECC_KEY)
pubkeyx = malloc(0, self.SIZE_ECC_KEY)
pubkeyy = malloc(0, self.SIZE_ECC_KEY)
self.libcrypto.BN_bn2bin(priv_key,privkey)
privkey = privkey.raw
self.libcrypto.BN_bn2bin(pub_key_x,pubkeyx)
pubkeyx = pubkeyx.raw
self.libcrypto.BN_bn2bin(pub_key_y,pubkeyy)
pubkeyy = pubkeyy.raw
try:
self.Check_EC_Key(privkey, pubkeyx, pubkeyy)
break
except:
self.libcrypto.EC_KEY_free(key)
pass
return privkey, pubkeyx, pubkeyy
finally:
self.libcrypto.EC_KEY_free(key)
self.libcrypto.BN_free(pub_key_x)
self.libcrypto.BN_free(pub_key_y)
def Get_EC_Key(self, pubkey_x, pubkey_y):
try:
ecdh_keybuffer = malloc(0, 32)
other_key = self.libcrypto.EC_KEY_new_by_curve_name(self.curve)
if other_key == 0:
raise Exception("[OpenSSL] EC_KEY_new_by_curve_name FAIL ...")
other_pub_key_x = self.libcrypto.BN_bin2bn(pubkey_x, self.SIZE_ECC_KEY, 0)
other_pub_key_y = self.libcrypto.BN_bin2bn(pubkey_y, self.SIZE_ECC_KEY, 0)
other_group = self.libcrypto.EC_KEY_get0_group(other_key)
other_pub_key = self.libcrypto.EC_POINT_new(other_group)
if (self.libcrypto.EC_POINT_set_affine_coordinates_GFp(other_group, other_pub_key, other_pub_key_x, other_pub_key_y, 0)) == 0:
raise Exception("[OpenSSL] EC_POINT_set_affine_coordinates_GFp FAIL ...")
if (self.libcrypto.EC_KEY_set_public_key(other_key, other_pub_key)) == 0:
raise Exception("[OpenSSL] EC_KEY_set_public_key FAIL ...")
if (self.libcrypto.EC_KEY_check_key(other_key)) == 0:
raise Exception("[OpenSSL] EC_KEY_check_key FAIL ...")
own_key = self.libcrypto.EC_KEY_new_by_curve_name(self.curve)
if own_key == 0:
raise Exception("[OpenSSL] EC_KEY_new_by_curve_name FAIL ...")
own_priv_key = self.libcrypto.BN_bin2bn(self.privkey, self.SIZE_ECC_KEY, 0)
if (self.libcrypto.EC_KEY_set_private_key(own_key, own_priv_key)) == 0:
raise Exception("[OpenSSL] EC_KEY_set_private_key FAIL ...")
# For 64bits
self.libcrypto.ECDH_set_method.argtypes = [c_void_p, c_void_p]
self.libcrypto.ECDH_OpenSSL.restype = c_void_p
#
self.libcrypto.ECDH_set_method(own_key, self.libcrypto.ECDH_OpenSSL())
ecdh_keylen = self.libcrypto.ECDH_compute_key(ecdh_keybuffer, 32, other_pub_key, own_key, 0)
if ecdh_keylen != 32:
raise Exception("[OpenSSL] ECDH keylen FAIL ...")
return ecdh_keybuffer.raw
finally:
self.libcrypto.EC_KEY_free(other_key)
self.libcrypto.BN_free(other_pub_key_x)
self.libcrypto.BN_free(other_pub_key_y)
self.libcrypto.EC_POINT_free(other_pub_key)
self.libcrypto.EC_KEY_free(own_key)
self.libcrypto.BN_free(own_priv_key)
def Check_EC_Key(self, privkey, pubkey_x, pubkey_y):
try:
key = self.libcrypto.EC_KEY_new_by_curve_name(self.curve)
if key == 0:
raise Exception("[OpenSSL] EC_KEY_new_by_curve_name FAIL ...")
if privkey != 0:
priv_key = self.libcrypto.BN_bin2bn(privkey, self.SIZE_ECC_KEY, 0)
pub_key_x = self.libcrypto.BN_bin2bn(pubkey_x, self.SIZE_ECC_KEY, 0)
pub_key_y = self.libcrypto.BN_bin2bn(pubkey_y, self.SIZE_ECC_KEY, 0)
if privkey != 0:
if (self.libcrypto.EC_KEY_set_private_key(key, priv_key)) == 0:
raise Exception("[OpenSSL] EC_KEY_set_private_key FAIL ...")
group = self.libcrypto.EC_KEY_get0_group(key)
pub_key = self.libcrypto.EC_POINT_new(group)
if (self.libcrypto.EC_POINT_set_affine_coordinates_GFp(group, pub_key, pub_key_x, pub_key_y, 0)) == 0:
raise Exception("[OpenSSL] EC_POINT_set_affine_coordinates_GFp FAIL ...")
if (self.libcrypto.EC_KEY_set_public_key(key, pub_key)) == 0:
raise Exception("[OpenSSL] EC_KEY_set_public_key FAIL ...")
if (self.libcrypto.EC_KEY_check_key(key)) == 0:
raise Exception("[OpenSSL] EC_KEY_check_key FAIL ...")
return 0
finally:
self.libcrypto.EC_KEY_free(key)
self.libcrypto.BN_free(pub_key_x)
self.libcrypto.BN_free(pub_key_y)
self.libcrypto.EC_POINT_free(pub_key)
if privkey != 0: self.libcrypto.BN_free(priv_key)
def Sign(self, inputb):
try:
size = len(inputb)
buff = malloc(inputb, size)
digest = malloc(0, 64)
md_ctx = self.libcrypto.EVP_MD_CTX_create()
dgst_len = pointer(c_int(0))
siglen = pointer(c_int(0))
sig = malloc(0, 151)
key = self.libcrypto.EC_KEY_new_by_curve_name(self.curve)
if key == 0:
raise Exception("[OpenSSL] EC_KEY_new_by_curve_name FAIL ...")
priv_key = self.libcrypto.BN_bin2bn(self.privkey, self.SIZE_ECC_KEY, 0)
pub_key_x = self.libcrypto.BN_bin2bn(self.pubkey_x, self.SIZE_ECC_KEY, 0)
pub_key_y = self.libcrypto.BN_bin2bn(self.pubkey_y, self.SIZE_ECC_KEY, 0)
if (self.libcrypto.EC_KEY_set_private_key(key, priv_key)) == 0:
raise Exception("[OpenSSL] EC_KEY_set_private_key FAIL ...")
group = self.libcrypto.EC_KEY_get0_group(key)
pub_key = self.libcrypto.EC_POINT_new(group)
if (self.libcrypto.EC_POINT_set_affine_coordinates_GFp(group, pub_key, pub_key_x, pub_key_y, 0)) == 0:
raise Exception("[OpenSSL] EC_POINT_set_affine_coordinates_GFp FAIL ...")
if (self.libcrypto.EC_KEY_set_public_key(key, pub_key)) == 0:
raise Exception("[OpenSSL] EC_KEY_set_public_key FAIL ...")
if (self.libcrypto.EC_KEY_check_key(key)) == 0:
raise Exception("[OpenSSL] EC_KEY_check_key FAIL ...")
self.libcrypto.EVP_MD_CTX_init(md_ctx)
# For 64bits
self.libcrypto.EVP_DigestInit.argtypes = [c_void_p, c_void_p]
self.libcrypto.EVP_ecdsa.restype = c_void_p
#
self.libcrypto.EVP_DigestInit(md_ctx, self.libcrypto.EVP_ecdsa())
if (self.libcrypto.EVP_DigestUpdate(md_ctx, buff, size)) == 0:
raise Exception("[OpenSSL] EVP_DigestUpdate FAIL ...")
self.libcrypto.EVP_DigestFinal(md_ctx, digest, dgst_len)
self.libcrypto.ECDSA_sign(0, digest, dgst_len.contents, sig, siglen, key)
if (self.libcrypto.ECDSA_verify(0, digest, dgst_len.contents, sig, siglen.contents, key)) != 1:
raise Exception("[OpenSSL] ECDSA_verify FAIL ...")
return sig.raw
finally:
self.libcrypto.EC_KEY_free(key)
self.libcrypto.BN_free(pub_key_x)
self.libcrypto.BN_free(pub_key_y)
self.libcrypto.BN_free(priv_key)
self.libcrypto.EC_POINT_free(pub_key)
self.libcrypto.EVP_MD_CTX_destroy(md_ctx)
def Check_sign(self, sig, inputb):
try:
bsig = malloc(sig, len(sig))
binputb = malloc(inputb, len(inputb))
digest = malloc(0, 64)
dgst_len = pointer(c_int(0))
md_ctx = self.libcrypto.EVP_MD_CTX_create()
key = self.libcrypto.EC_KEY_new_by_curve_name(self.curve)
if key == 0:
raise Exception("[OpenSSL] EC_KEY_new_by_curve_name FAIL ...")
pub_key_x = self.libcrypto.BN_bin2bn(self.pubkey_x, self.SIZE_ECC_KEY, 0)
pub_key_y = self.libcrypto.BN_bin2bn(self.pubkey_y, self.SIZE_ECC_KEY, 0)
group = self.libcrypto.EC_KEY_get0_group(key)
pub_key = self.libcrypto.EC_POINT_new(group)
if (self.libcrypto.EC_POINT_set_affine_coordinates_GFp(group, pub_key, pub_key_x, pub_key_y, 0)) == 0:
raise Exception("[OpenSSL] EC_POINT_set_affine_coordinates_GFp FAIL ...")
if (self.libcrypto.EC_KEY_set_public_key(key, pub_key)) == 0:
raise Exception("[OpenSSL] EC_KEY_set_public_key FAIL ...")
if (self.libcrypto.EC_KEY_check_key(key)) == 0:
raise Exception("[OpenSSL] EC_KEY_check_key FAIL ...")
self.libcrypto.EVP_MD_CTX_init(md_ctx)
# For 64bits
self.libcrypto.EVP_DigestInit.argtypes = [c_void_p, c_void_p]
self.libcrypto.EVP_ecdsa.restype = c_void_p
#
self.libcrypto.EVP_DigestInit(md_ctx, self.libcrypto.EVP_ecdsa())
if (self.libcrypto.EVP_DigestUpdate(md_ctx, binputb, len(inputb))) == 0:
raise Exception("[OpenSSL] EVP_DigestUpdate FAIL ...")
self.libcrypto.EVP_DigestFinal(md_ctx, digest, dgst_len)
ret = self.libcrypto.ECDSA_verify(0, digest, dgst_len.contents, bsig, len(sig), key)
if ret == -1:
return False # Fail to Check
else :
if ret == 0:
return False # Bad signature !
else:
return True # Good
return False
finally:
self.libcrypto.EC_KEY_free(key)
self.libcrypto.BN_free(pub_key_x)
self.libcrypto.BN_free(pub_key_y)
self.libcrypto.EC_POINT_free(pub_key)
self.libcrypto.EVP_MD_CTX_destroy(md_ctx)
def rand(size):
libcrypto = LoadLibrary()
buffer = malloc(0, size)
libcrypto.RAND_bytes(buffer, size)
return buffer.raw
def malloc(data, size):
if data != 0:
buffer = create_string_buffer(data, size)
else:
buffer = create_string_buffer(size)
return buffer
class aes:
def __init__(self, key, iv, do): # do == 1 => Encrypt; do == 0 => Decrypt
self.libcrypto = LoadLibrary()
self.ctx = self.libcrypto.EVP_CIPHER_CTX_new()
if do == 1 or do == 0:
k = malloc(key, len(key))
IV = malloc(iv, len(iv))
# For 64bits
self.libcrypto.EVP_CipherInit_ex.argtypes = [ c_void_p, c_void_p, c_void_p, c_void_p, c_void_p, c_void_p]
self.libcrypto.EVP_aes_256_cfb.restype = c_void_p
#
self.libcrypto.EVP_CipherInit_ex(self.ctx, self.libcrypto.EVP_aes_256_cfb(), 0, k, IV, do)
else:
raise Exception("RTFM ...")
def ciphering(self, input):
i = c_int(len(input))
buffer = malloc(0, len(input)+16)
inp = malloc(input,len(input))
if (self.libcrypto.EVP_CipherUpdate(self.ctx, byref(buffer), byref(i), inp, len(input))) == 0:
raise Exception("[OpenSSL] EVP_CipherUpdate FAIL ...")
y = i.value
i.value = 0
if (self.libcrypto.EVP_CipherFinal_ex(self.ctx, byref(buffer,y), byref(i))) == 0:
raise Exception("[OpenSSL] EVP_CipherFinal_ex FAIL ...")
return buffer.raw[0:i.value+y]
def __del__(self):
self.libcrypto.EVP_CIPHER_CTX_cleanup(self.ctx)
self.libcrypto.EVP_CIPHER_CTX_free(self.ctx)
def Hmac(k, m):
key = malloc(k, len(k))
d = malloc(m, len(m))
md = malloc(0, 64)
i = pointer(c_int(0))
libcrypto = LoadLibrary()
# For 64bits
libcrypto.HMAC.argtypes = [ c_void_p, c_void_p, c_int, c_void_p, c_int, c_void_p]
libcrypto.EVP_sha512.restype = c_void_p
#
libcrypto.HMAC(libcrypto.EVP_sha512(), key, len(k), d, len(m), md, i)
return md.raw
def test():
from binascii import hexlify
from base64 import b64encode, b64decode
print("Generate ECC pair key for Alice and Bob ...\n")
alice = ECC_key()
bob = ECC_key()
print("Alice :")
print("Public key X : %s" % b64encode(alice.pubkey_x).decode())
print("Public key Y : %s" % b64encode(alice.pubkey_y).decode())
print("Private key: %s" % b64encode(alice.privkey).decode())
print("\nBob :")
print("Public key X : %s" % b64encode(bob.pubkey_x).decode())
print("Public key Y : %s" % b64encode(bob.pubkey_y).decode())
print("Private key: %s" % b64encode(bob.privkey).decode())
alice.Check_EC_Key(0, bob.pubkey_x, bob.pubkey_y)
key = alice.Get_EC_Key(bob.pubkey_x, bob.pubkey_y)
key2 = bob.Get_EC_Key(alice.pubkey_x, alice.pubkey_y)
if key != key2:
print("Keys are !=, error !")
sys.exit(1)
print("\nECDH Key : %s" % b64encode(key).decode())
inputb = raw_input('\nInput to Sign : ')
sig = alice.Sign(inputb)
print("\nECDSA Signature : %s" % b64encode(sig).decode())
if ECC_key(alice.pubkey_x, alice.pubkey_y, 0).Check_sign(sig, inputb) is False:
print("Fail to check sign !")
else:
print("Sign Check !")
sys.exit(0)
if __name__ == "__main__":
test()
|
"""
Created on Sun Sep 15 00:37:11 2013
@author: mapologo
Program Assignment for Coursera's Linear and Integer Programming
"""
import numpy as np
def get_values(line, num_type="int"):
"""(str) -> list
Return a list of numbers of type num_type from a line of str values
separated with spaces
>>> get_values("3 4\n")
[3, 4]
>>> get_values("1 -1 0 -1\n")
[1, -1, 0, -1]
>>> get_values("-1.0 3.0 -1.0 -2.0 \n", "float")
[-1.0, 3.0, -1.0, -2.0]
"""
convert = {"int": int, "float": float, "complex": complex}
return [convert[num_type](item) for item in line.split()]
def check_array(array, shape, dtype):
"""(array, tuple, dtype) -> boolean
Check if array is of given shape and dtype.
"""
return isinstance(array, np.ndarray) and array.shape == shape and array.dtype == dtype
class SimplexPivoting():
"""
Simplex pivoting dictionary
m: number of constraints
n: number of variables
B: array of basic indices (m integers)
N: array of non-basic indices (n integers)
b: array of rhs (values of basic variables)
a: matrix of constraints coefficients (m x n)
c: array of objective function coeffcients
z: objective function value
num_iter: number of completed iterations
enter_var: index of entering variable
enter: position of enter_var in N
leave_var: index of leaving variable
leave: position of leave_var in B
unbounded: True if there isn't leaving variable
optimal: True if there isn't entering variable
"""
def check(self):
"""(SimplexPivoting) -> bool
Check Pivot Dictionary parameters.
todo: check dimensions
"""
print(isinstance(self.m, int))
print(isinstance(self.n, int))
print(check_array(self.B, (self.m, ), int))
print(check_array(self.N, (self.n, ), int))
print(check_array(self.b, (self.m, ), float))
print(check_array(self.A, (self.m, self.n), float))
print(isinstance(self.z, float))
print(check_array(self.c, (self.n, ), float))
def read_dfile(self, fdict):
"""(SimplexPivoting, text) -> NoneType
Get dictionary parameters from a fdict text file with the
following format:
[Line 1] m n
[Line 2] B1 B2 ... Bm [list of basic indices m integers]
[Line 3] N1 N2 ... Nn [list of non-basic indices n integers]
[Line 4] b1 .. bm (m floating point numbers)
[Line 5] a11 ... a1n (first row of A matrix)
....
[Line m+4] am1 ... amn (mth row of A matrix)
[Line m+5] z0 c1 .. cn (objective coefficients
(n+1 floating point numbers))
"""
fd = open(fdict, "r")
self.m, self.n = get_values(fd.readline())
self.B = np.array(get_values(fd.readline()))
self.N = np.array(get_values(fd.readline()))
self.b = np.array(get_values(fd.readline(), "float"))
self.A = np.array(get_values(fd.readline(), "float"))
for i in range(self.m - 1):
self.A = np.vstack((self.A, get_values(fd.readline(), "float")))
Z = np.array(get_values(fd.readline(), "float"))
self.z = Z[0]
self.c = Z[1:]
self.unbounded = False
self.optimal = False
fd.close()
def __repr__(self):
"""(SimplexPivoting) -> str
String representation of Simplex Pivoting
"""
s = 'm: {} n: {}\n'.format(self.m, self.n)
s = s + 'B:\n{}\n'.format(self.B)
s = s + 'N:\n{}\n'.format(self.N)
s = s + 'b:\n{}\n'.format(self.b)
s = s + 'A:\n{}\n'.format(self.A)
s = s + 'z:\n{}\n'.format(self.z)
s = s + 'c:\n{}\n'.format(self.c)
return s
def __entering(self):
"""(SimplexPivoting) -> int
Selects and returns the entering variable and set "enter",
its position in N array.
Use Bland's rule to prevent cycling: Choose the
lowest-numbered nonbasic with a positive c coeff.
"""
try:
self.enter_var = self.N[self.c > 0].min()
self.enter = np.where(self.N == self.enter_var)[0][0]
except ValueError:
self.optimal = True
return False
return True
def __leaving(self):
"""(SimplexPivoting) -> int
After entering is executed, selects and returns the leaving
variable and set "leave", its position in B array.
Use Bland's rule to prevent cycling: If the minimum ratio is
shared by several rows, choose the lowest-numbered one of
them.
"""
enter_col = -self.A[:, self.enter]
enter_col = np.ma.masked_array(enter_col, enter_col <= 0)
ratios = self.b / enter_col
if ratios.count():
min_ratios = np.ma.filled(ratios == ratios.min(), False)
# if masked when ratios.min() == 0, ratios == ratios.min()
# return all True
self.leave_var = self.B[min_ratios].min()
self.leave = np.where(self.B == self.leave_var)[0][0]
return True
else:
self.unbounded = True
return False
def pivoting(self):
"""(SimplexPivoting) -> float
Makes one pivoting.
todo: return properly Unbouded and optimal as exceptions
"""
if not self.__entering():
return "OPTIMAL"
if not self.__leaving():
return "UNBOUNDED"
pivot = -self.A[self.leave, self.enter]
self.A[self.leave, self.enter] = -1.0
self.b[self.leave] = self.b[self.leave] / pivot
self.A[self.leave, :] = self.A[self.leave, :] / pivot
idxB = np.arange(self.m)
for i in idxB[idxB != self.leave]:
pivot = self.A[i, self.enter]
self.A[i, self.enter] = 0.0
self.A[i, :] = self.A[self.leave, :] * pivot + self.A[i, :]
self.b[i] = self.b[self.leave] * pivot + self.b[i]
pivot = self.c[self.enter]
self.c[self.enter] = 0.0
self.c = self.A[self.leave, :] * pivot + self.c
self.z = self.b[self.leave] * pivot + self.z
# After pivoting exchange entering and leaving variables in
# N and B
self.B[self.leave] = self.enter_var
self.N[self.enter] = self.leave_var
return "STEP"
def output4step_one(self, filename=""):
"""(SimplexPivoting) -> str
Return the output for "Program the Pivot: Step 1"
If filename is given writes output for this file.
"""
if not self.unbounded:
out_str = "{}\n{}\n{}".format(self.enter_var,
self.leave_var,
self.z)
else:
out_str = "UNBOUNDED"
if filename:
out = open(filename, "w")
out.write(out_str)
out.close()
return out_str
def iterate(self):
"""(SimplexPivoting) -> str
Iterates over a feasible dictionary
"""
self.num_iter = 0
while True:
state = self.pivoting()
if state == "OPTIMAL" or state == "UNBOUNDED":
break
self.num_iter += 1
return state
def output4step_two(self, filename=""):
"""(SimplexPivoting) -> str
Return the output for "Program the Pivot: Step 2"
If filename is given writes output for this file.
"""
if not self.unbounded:
out_str = "{}\n{}".format(self.z,
self.num_iter)
else:
out_str = "UNBOUNDED"
if filename:
out = open(filename, "w")
out.write(out_str)
out.close()
return out_str
def initialize(self):
"""(SimplexPivoting) -> str
Iterates over a non-feasible dictionary
"""
self.num_iter = 0
while True:
state = self.pivoting()
if state == "OPTIMAL" or state == "UNBOUNDED":
break
self.num_iter += 1
return state
def __add_variable(self, index):
"""
"""
pass
|
'''
Created on 18 nov. 2016
@author: Jordi Marsal
Fast module exponentiation
2 classes to choose, exp2 is better for high numbers
'''
class exprap:
def __init__(self, base, exponent, modul, debug=False):
self.base=base
self.exponent=exponent
self.modul=modul
self.debug=debug
self.c = self.doExpRap(base, exponent, modul)
self.printC()
def doExpRap(self, base, exponent, modul):
c1=1
for _ in range(exponent):
c1 = (c1*base) % modul
return c1
def printC(self):
print "##########################"
print (" %i^%i mod %i = %i" % (self.base,self.exponent,self.modul,self.c))
print "##########################"
class exp2:
def __init__(self, base, exponent, modul,prin=True, debug=False):# Base, Exponent, Modul, boolPrint, boolDebug
self.base = base
self.exponent = exponent
self.e = [x for x in bin(exponent)[2:]]
if debug: print self.e
self.modul=modul
self.debug=debug
self.prin=prin
self.c = self.doExp2(base, exponent, modul)
if prin:self.printC()
def doExp2(self, base, exponent, modul):
exp = self.e[::-1]
res=1
if self.debug: print "res ini:"+str(res),"."
for i in exp:
if self.debug: print "i:"+ i
if int(i)==1:
res = (res * base) % modul
if self.debug:
print "base:"+str(base)
print "res:"+str(res),"."
base = base * base
return res
def printC(self):
print "##########################"
print (" %i^%i mod %i = %i" % (self.base,self.exponent,self.modul,self.c))
print "##########################"
def getC(self):
return self.c
e1=exprap(4,13,497)
e2=exp2(4,13,497)
|
import sys
from PyKDE4.kdeui import *
from PyKDE4.kio import KFileDialog
from PyKDE4.kdecore import i18n, KAutostart
from PyQt4.QtGui import *
from PyQt4.QtCore import SIGNAL, Qt
from ..configmanager import *
from .. import iomediator, interface, model
from .dialogs import GlobalHotkeyDialog
from . import generalsettings, specialhotkeysettings, enginesettings
class GeneralSettings(QWidget, generalsettings.Ui_Form):
def __init__(self, parent):
QWidget.__init__(self, parent)
generalsettings.Ui_Form.__init__(self)
self.setupUi(self)
self.promptToSaveCheckbox.setChecked(ConfigManager.SETTINGS[PROMPT_TO_SAVE])
self.showTrayCheckbox.setChecked(ConfigManager.SETTINGS[SHOW_TRAY_ICON])
#self.allowKbNavCheckbox.setChecked(ConfigManager.SETTINGS[MENU_TAKES_FOCUS])
self.allowKbNavCheckbox.setVisible(False)
self.sortByUsageCheckbox.setChecked(ConfigManager.SETTINGS[SORT_BY_USAGE_COUNT])
self.enableUndoCheckbox.setChecked(ConfigManager.SETTINGS[UNDO_USING_BACKSPACE])
self.triggerItemByInitial.setChecked(ConfigManager.SETTINGS[TRIGGER_BY_INITIAL]
def save(self):
ConfigManager.SETTINGS[PROMPT_TO_SAVE] = self.promptToSaveCheckbox.isChecked()
ConfigManager.SETTINGS[SHOW_TRAY_ICON] = self.showTrayCheckbox.isChecked()
#ConfigManager.SETTINGS[MENU_TAKES_FOCUS] = self.allowKbNavCheckbox.isChecked()
ConfigManager.SETTINGS[SORT_BY_USAGE_COUNT] = self.sortByUsageCheckbox.isChecked()
ConfigManager.SETTINGS[UNDO_USING_BACKSPACE] = self.enableUndoCheckbox.isChecked()
ConfigManager.SETTINGS[TRIGGER_BY_INITIAL] = self.triggerItemByInitial.isChecked()
class SpecialHotkeySettings(QWidget, specialhotkeysettings.Ui_Form):
KEY_MAP = GlobalHotkeyDialog.KEY_MAP
REVERSE_KEY_MAP = GlobalHotkeyDialog.REVERSE_KEY_MAP
def __init__(self, parent, configManager):
QWidget.__init__(self, parent)
specialhotkeysettings.Ui_Form.__init__(self)
self.setupUi(self)
self.configManager = configManager
self.showConfigDlg = GlobalHotkeyDialog(parent)
self.toggleMonitorDlg = GlobalHotkeyDialog(parent)
self.useConfigHotkey = self.__loadHotkey(configManager.configHotkey, self.configKeyLabel,
self.showConfigDlg, self.clearConfigButton)
self.useServiceHotkey = self.__loadHotkey(configManager.toggleServiceHotkey, self.monitorKeyLabel,
self.toggleMonitorDlg, self.clearMonitorButton)
def __loadHotkey(self, item, label, dialog, clearButton):
dialog.load(item)
if item.enabled:
# key = str(item.hotKey.encode("utf-8"))
key = item.hotKey
label.setText(item.get_hotkey_string(key, item.modifiers))
clearButton.setEnabled(True)
return True
else:
label.setText(i18n("(None configured)"))
clearButton.setEnabled(False)
return False
def save(self):
configHotkey = self.configManager.configHotkey
toggleHotkey = self.configManager.toggleServiceHotkey
if configHotkey.enabled:
self.configManager.app.hotkey_removed(configHotkey)
configHotkey.enabled = self.useConfigHotkey
if self.useConfigHotkey:
self.showConfigDlg.save(configHotkey)
self.configManager.app.hotkey_created(configHotkey)
if toggleHotkey.enabled:
self.configManager.app.hotkey_removed(toggleHotkey)
toggleHotkey.enabled = self.useServiceHotkey
if self.useServiceHotkey:
self.toggleMonitorDlg.save(toggleHotkey)
self.configManager.app.hotkey_created(toggleHotkey)
# ---- Signal handlers
def on_setConfigButton_pressed(self):
self.showConfigDlg.exec_()
if self.showConfigDlg.result() == QDialog.Accepted:
self.useConfigHotkey = True
key = self.showConfigDlg.key
modifiers = self.showConfigDlg.build_modifiers()
self.configKeyLabel.setText(self.showConfigDlg.targetItem.get_hotkey_string(key, modifiers))
self.clearConfigButton.setEnabled(True)
def on_clearConfigButton_pressed(self):
self.useConfigHotkey = False
self.clearConfigButton.setEnabled(False)
self.configKeyLabel.setText(i18n("(None configured)"))
self.showConfigDlg.reset()
def on_setMonitorButton_pressed(self):
self.toggleMonitorDlg.exec_()
if self.toggleMonitorDlg.result() == QDialog.Accepted:
self.useServiceHotkey = True
key = self.toggleMonitorDlg.key
modifiers = self.toggleMonitorDlg.build_modifiers()
self.monitorKeyLabel.setText(self.toggleMonitorDlg.targetItem.get_hotkey_string(key, modifiers))
self.clearMonitorButton.setEnabled(True)
def on_clearMonitorButton_pressed(self):
self.useServiceHotkey = False
self.clearMonitorButton.setEnabled(False)
self.monitorKeyLabel.setText(i18n("(None configured)"))
self.toggleMonitorDlg.reset()
class EngineSettings(QWidget, enginesettings.Ui_Form):
def __init__(self, parent, configManager):
QWidget.__init__(self, parent)
enginesettings.Ui_Form.__init__(self)
self.setupUi(self)
self.configManager = configManager
if configManager.userCodeDir is not None:
self.folderLabel.setText(configManager.userCodeDir)
if configManager.userCodeDir in sys.path:
sys.path.remove(configManager.userCodeDir)
self.path = configManager.userCodeDir
def save(self):
if self.path is not None:
self.configManager.userCodeDir = self.path
sys.path.append(self.path)
def on_browseButton_pressed(self):
path = KFileDialog.getExistingDirectory(self.parentWidget(), i18n("Choose Directory"))
if path != '':
self.path = path
self.folderLabel.setText(self.path)
class SettingsDialog(KPageDialog):
def __init__(self, parent):
KPageDialog.__init__(self, parent)
self.app = parent.topLevelWidget().app # Used by GlobalHotkeyDialog
self.genPage = self.addPage(GeneralSettings(self), i18n("General"))
self.genPage.setIcon(KIcon("preferences-other"))
self.hkPage = self.addPage(SpecialHotkeySettings(self, parent.app.configManager), i18n("Special Hotkeys"))
self.hkPage.setIcon(KIcon("preferences-desktop-keyboard"))
self.ePage = self.addPage(EngineSettings(self, parent.app.configManager), i18n("Script Engine"))
self.ePage.setIcon(KIcon("text-x-script"))
self.setCaption(i18n("Settings"))
def slotButtonClicked(self, button):
if button == KDialog.Ok:
self.genPage.widget().save()
self.hkPage.widget().save()
self.ePage.widget().save()
self.app.configManager.config_altered(True)
self.app.update_notifier_visibility()
KDialog.slotButtonClicked(self, button)
|
from . import polygon2d
def render_svg(obj, filename):
polygons = polygon2d.polygon(obj)
with open(filename, "w") as fp:
box = obj.bounding_box()
box_size = box.size()
fp.write('<svg xmlns="http://www.w3.org/2000/svg" ')
fp.write('width="{}mm" height="{}mm" '.format(box_size.x, box_size.y))
fp.write(
'viewBox="{} {} {} {}">'.format(box.a.x, -box.b.y, box_size.x, box_size.y)
)
fp.write('<style type="text/css">')
fp.write("path{")
fp.write("stroke:#000;")
fp.write("stroke-width:1px;")
fp.write("vector-effect:non-scaling-stroke;")
fp.write("fill:#BBF23C{};")
fp.write("}")
fp.write("</style>")
fp.write('<path d="')
for polygon in polygons:
it = reversed(polygon)
x, y = next(it)
fp.write("M{},{}".format(x, -y))
for x, y in it:
fp.write("L{},{}".format(x, -y))
fp.write("L{},{}".format(polygon[-1][0], -polygon[-1][1]))
fp.write('"/>')
fp.write("</svg>")
|
import sys,os,math,collections
from math import *
class Data:
def __init__(self, x,y,idd):
self.X=x
self.Y=y
self.ID=idd
def read_data(filename):
f = open(filename,'r')
L_object=[]
for line in f:
line.strip()
column = line.split()
mydata = Data(float(column[0]),float(column[1]),column[2])
L_object.append(mydata)
return L_object
def shannon(List_object):
labelCounts={}
for mylist in List_object:
if (mylist.ID) not in labelCounts.keys():
currentLabel = mylist.ID
labelCounts[currentLabel] = 0
labelCounts[currentLabel] += 1
entropy =0.
for key in labelCounts:
prob = float(labelCounts[key])/len(List_object)
entropy -= prob * log(prob,2)
return entropy
if __name__ == "__main__":
infile = "example3_1.txt"
myL=[]
myL=read_data("example3_1.txt")
final_entropy = shannon(myL)
print "The entropy of the system is: ", final_entropy,
|
r"""The sensorgraph subsystem is an embedded event based scripting engine for IOTile devices.
It is designed to allow you to embed a small set automatic actions that should
be run whenever events happen inside the device. Sensorgraph is structured as
a dependency tree where actions are linked by named FIFOs that route data from
inputs -> processing functions -> output sinks.
The sensor-graph language and implementation were designed to facilitate
static analysis of maximum resource needs as well as runtime properties like
power consumption.
The sensor-graph subsystem interacts with the sensor-log and stream-manager
subsystems in the following way:
sensor-graph has rules that respond to events on a device and generate data
in named FIFOs called streams. All of the actual data storage and stream
management on behalf of sensor-graph is handled by the sensor-log subsystem.
The streaming subsystem listens on configured streams to see if it should
package them up and send them to an external user in the form of a `report`.
So the interaction diagram looks like:
sensor-graph <------XXXX------> stream-manager
no direct contact
generates data in streams sole purpose in life is to
according to configured rules build reports from streams
\\ //
producer \\ // consumer only
and consumer \\ // (generates no readings!)
sensor-log
stores all streams and lets you
inspect their contents using
stream walkers that automatically
get updated when new data is available.
The actual work of simulating the functionality of the embedded sensor-graph
engine is performed by the iotile-sensorgraph package that makes it available
in a variety of contexts including a direct command line simulator named
iotile-sgrun as well as an optimizing compiler.
This controller subsystem mixin just wraps the underlying sensorgraph
simulator from iotile-sensorgraph and adds the correct RPC based interface to
it to emulate how you interact with sensor-graph in a physical IOTile based
device.
TODO:
- [ ] Add dump/restore support
- [ ] Add support for logging information on reset
"""
import logging
import struct
import asyncio
import inspect
from collections import deque
from iotile.core.hw.virtual import tile_rpc
from iotile.core.hw.exceptions import RPCErrorCode
from iotile.core.hw.reports import IOTileReading
from iotile.sg import DataStream, SensorGraph
from iotile.sg.sim.executor import RPCExecutor
from iotile.sg.node_descriptor import parse_binary_descriptor, create_binary_descriptor
from iotile.sg import streamer_descriptor
from iotile.sg.exceptions import NodeConnectionError, ProcessingFunctionError, ResourceUsageError, UnresolvedIdentifierError, StreamEmptyError
from ...constants import rpcs, pack_error, Error, ControllerSubsystem, SensorGraphError, SensorLogError
from .controller_system import ControllerSubsystemBase
def _pack_sgerror(short_code):
"""Pack a short error code with the sensorgraph subsystem."""
return pack_error(ControllerSubsystem.SENSOR_GRAPH, short_code)
class StreamerStatus(object):
"""A model representing the state of a streamer resource."""
def __init__(self):
self.last_attempt_time = 0
self.last_success_time = 0
self.last_error = 0
self.last_status = 0
self.attempt_number = 0
self.comm_status = 0
class EmulatedRPCExecutor(RPCExecutor):
def __init__(self, device):
super(EmulatedRPCExecutor, self).__init__()
self.device = device
self.logger = logging.getLogger(__name__)
async def rpc(self, address, rpc_id):
self.logger.debug("Sending rpc from sensorgraph to %d:%04X", address, rpc_id)
result, = await self.device.emulator.await_rpc(address, rpc_id, bytes(), resp_format="L")
return result
class SensorGraphSubsystem(ControllerSubsystemBase):
"""Container for sensor graph state.
There is a distinction between which sensor-graph is saved into persisted
storage vs currently loaded and running. The sensor-graph subsystem runs
a background task that receives inputs to process and processes them.
"""
def __init__(self, sensor_log_system, stream_manager, model, emulator, executor=None):
super(SensorGraphSubsystem, self).__init__(emulator)
self._logger = logging.getLogger(__name__)
self._model = model
self._sensor_log = sensor_log_system.storage
self._allocate_id = sensor_log_system.allocate_id
self._inputs = emulator.create_queue(register=True)
self._stream_manager = stream_manager
self._rsl = sensor_log_system
self._executor = executor
self.graph = SensorGraph(self._sensor_log, model=model, enforce_limits=True)
self.persisted_exists = False
self.persisted_nodes = []
self.persisted_streamers = []
self.persisted_constants = []
self.streamer_acks = {}
self.streamer_status = {}
self.enabled = False
# Clock manager linkage
self.get_timestamp = lambda: 0
async def _reset_vector(self):
"""Background task to initialize this system in the event loop."""
self._logger.debug("sensor_graph subsystem task starting")
# If there is a persistent sgf loaded, send reset information.
self.initialized.set()
while True:
stream, reading = await self._inputs.get()
try:
await process_graph_input(self.graph, stream, reading, self._executor)
self.process_streamers()
except: #pylint:disable=bare-except;This is a background task that should not die
self._logger.exception("Unhandled exception processing sensor_graph input (stream=%s), reading=%s", stream, reading)
finally:
self._inputs.task_done()
def clear_to_reset(self, config_vars):
"""Clear all volatile information across a reset.
The reset behavior is that:
- any persisted sensor_graph is loaded
- if there is a persisted graph found, enabled is set to True
- if there is a persisted graph found, reset readings are pushed
into it.
"""
super(SensorGraphSubsystem, self).clear_to_reset(config_vars)
self.graph.clear()
if not self.persisted_exists:
return
for node in self.persisted_nodes:
self.graph.add_node(node)
for streamer_desc in self.persisted_streamers:
streamer = streamer_descriptor.parse_string_descriptor(streamer_desc)
self.graph.add_streamer(streamer)
# Load in the constants
for stream, reading in self.persisted_constants:
self._sensor_log.push(stream, reading)
self.enabled = True
# Set up all streamers
for index, value in self.streamer_acks.items():
self._seek_streamer(index, value)
#FIXME: queue sending reset readings
def process_input(self, encoded_stream, value):
"""Process or drop a graph input.
This method asynchronously queued an item to be processed by the
sensorgraph worker task in _reset_vector. It must be called from
inside the emulation loop and returns immediately before the input is
processed.
"""
if not self.enabled:
return
if isinstance(encoded_stream, str):
stream = DataStream.FromString(encoded_stream)
encoded_stream = stream.encode()
elif isinstance(encoded_stream, DataStream):
stream = encoded_stream
encoded_stream = stream.encode()
else:
stream = DataStream.FromEncoded(encoded_stream)
reading = IOTileReading(self.get_timestamp(), encoded_stream, value)
self._inputs.put_nowait((stream, reading))
def _seek_streamer(self, index, value):
"""Complex logic for actually seeking a streamer to a reading_id.
This routine hides all of the gnarly logic of the various edge cases.
In particular, the behavior depends on whether the reading id is found,
and if it is found, whether it belongs to the indicated streamer or not.
If not, the behavior depends on whether the sought reading it too high
or too low.
"""
highest_id = self._rsl.highest_stored_id()
streamer = self.graph.streamers[index]
if not streamer.walker.buffered:
return _pack_sgerror(SensorLogError.CANNOT_USE_UNBUFFERED_STREAM)
find_type = None
try:
exact = streamer.walker.seek(value, target='id')
if exact:
find_type = 'exact'
else:
find_type = 'other_stream'
except UnresolvedIdentifierError:
if value > highest_id:
find_type = 'too_high'
else:
find_type = 'too_low'
# If we found an exact match, move one beyond it
if find_type == 'exact':
try:
streamer.walker.pop()
except StreamEmptyError:
pass
error = Error.NO_ERROR
elif find_type == 'too_high':
streamer.walker.skip_all()
error = _pack_sgerror(SensorLogError.NO_MORE_READINGS)
elif find_type == 'too_low':
streamer.walker.seek(0, target='offset')
error = _pack_sgerror(SensorLogError.NO_MORE_READINGS)
else:
error = _pack_sgerror(SensorLogError.ID_FOUND_FOR_ANOTHER_STREAM)
return error
def acknowledge_streamer(self, index, ack, force):
"""Acknowledge a streamer value as received from the remote side."""
if index >= len(self.graph.streamers):
return _pack_sgerror(SensorGraphError.STREAMER_NOT_ALLOCATED)
old_ack = self.streamer_acks.get(index, 0)
if ack != 0:
if ack <= old_ack and not force:
return _pack_sgerror(SensorGraphError.OLD_ACKNOWLEDGE_UPDATE)
self.streamer_acks[index] = ack
current_ack = self.streamer_acks.get(index, 0)
return self._seek_streamer(index, current_ack)
def _handle_streamer_finished(self, index, succeeded, highest_ack):
"""Callback when a streamer finishes processing."""
self._logger.debug("Rolling back streamer %d after streaming, highest ack from streaming subsystem was %d", index, highest_ack)
self.acknowledge_streamer(index, highest_ack, False)
def process_streamers(self):
"""Check if any streamers should be handed to the stream manager."""
# Check for any triggered streamers and pass them to stream manager
in_progress = self._stream_manager.in_progress()
triggered = self.graph.check_streamers(blacklist=in_progress)
for streamer in triggered:
self._stream_manager.process_streamer(streamer, callback=self._handle_streamer_finished)
def trigger_streamer(self, index):
"""Pass a streamer to the stream manager if it has data."""
self._logger.debug("trigger_streamer RPC called on streamer %d", index)
if index >= len(self.graph.streamers):
return _pack_sgerror(SensorGraphError.STREAMER_NOT_ALLOCATED)
if index in self._stream_manager.in_progress():
return _pack_sgerror(SensorGraphError.STREAM_ALREADY_IN_PROGRESS)
streamer = self.graph.streamers[index]
if not streamer.triggered(manual=True):
return _pack_sgerror(SensorGraphError.STREAMER_HAS_NO_NEW_DATA)
self._logger.debug("calling mark_streamer on streamer %d from trigger_streamer RPC", index)
self.graph.mark_streamer(index)
self.process_streamers()
return Error.NO_ERROR
def count_nodes(self):
"""Count the number of nodes."""
return len(self.graph.nodes)
def persist(self):
"""Trigger saving the current sensorgraph to persistent storage."""
self.persisted_nodes = self.graph.dump_nodes()
self.persisted_streamers = self.graph.dump_streamers()
self.persisted_exists = True
self.persisted_constants = self._sensor_log.dump_constants()
def reset(self):
"""Clear the sensorgraph from RAM and flash."""
self.persisted_exists = False
self.persisted_nodes = []
self.persisted_streamers = []
self.persisted_constants = []
self.graph.clear()
self.streamer_status = {}
def add_node(self, binary_descriptor):
"""Add a node to the sensor_graph using a binary node descriptor.
Args:
binary_descriptor (bytes): An encoded binary node descriptor.
Returns:
int: A packed error code.
"""
try:
node_string = parse_binary_descriptor(binary_descriptor)
except:
self._logger.exception("Error parsing binary node descriptor: %s", binary_descriptor)
return _pack_sgerror(SensorGraphError.INVALID_NODE_STREAM) # FIXME: Actually provide the correct error codes here
try:
self.graph.add_node(node_string)
except NodeConnectionError:
return _pack_sgerror(SensorGraphError.STREAM_NOT_IN_USE)
except ProcessingFunctionError:
return _pack_sgerror(SensorGraphError.INVALID_PROCESSING_FUNCTION)
except ResourceUsageError:
return _pack_sgerror(SensorGraphError.NO_NODE_SPACE_AVAILABLE)
return Error.NO_ERROR
def add_streamer(self, binary_descriptor):
"""Add a streamer to the sensor_graph using a binary streamer descriptor.
Args:
binary_descriptor (bytes): An encoded binary streamer descriptor.
Returns:
int: A packed error code
"""
streamer = streamer_descriptor.parse_binary_descriptor(binary_descriptor)
try:
self.graph.add_streamer(streamer)
self.streamer_status[len(self.graph.streamers) - 1] = StreamerStatus()
return Error.NO_ERROR
except ResourceUsageError:
return _pack_sgerror(SensorGraphError.NO_MORE_STREAMER_RESOURCES)
def inspect_streamer(self, index):
"""Inspect the streamer at the given index."""
if index >= len(self.graph.streamers):
return [_pack_sgerror(SensorGraphError.STREAMER_NOT_ALLOCATED), b'\0'*14]
return [Error.NO_ERROR, streamer_descriptor.create_binary_descriptor(self.graph.streamers[index])]
def inspect_node(self, index):
"""Inspect the graph node at the given index."""
if index >= len(self.graph.nodes):
raise RPCErrorCode(6) #FIXME: use actual error code here for UNKNOWN_ERROR status
return create_binary_descriptor(str(self.graph.nodes[index]))
def query_streamer(self, index):
"""Query the status of the streamer at the given index."""
if index >= len(self.graph.streamers):
return None
info = self.streamer_status[index]
highest_ack = self.streamer_acks.get(index, 0)
return [info.last_attempt_time, info.last_success_time, info.last_error, highest_ack, info.last_status, info.attempt_number, info.comm_status]
class SensorGraphMixin(object):
"""Mixin for an IOTileController that implements the sensor-graph subsystem.
Args:
sensor_log (SensorLog): The rsl subsystem.
stream_man (StreamManager): The stream manager subsystem
model (DeviceModel): A device model containing resource limits about the
emulated device.
"""
def __init__(self, emulator, sensor_log, stream_manager, model):
self.sensor_graph = SensorGraphSubsystem(sensor_log, stream_manager, model, emulator, executor=EmulatedRPCExecutor(self._device))
self._post_config_subsystems.append(self.sensor_graph)
@tile_rpc(*rpcs.SG_COUNT_NODES)
def sg_count_nodes(self):
"""Count the number of nodes in the sensor_graph."""
return [self.sensor_graph.count_nodes()]
@tile_rpc(*rpcs.SG_ADD_NODE)
def sg_add_node(self, descriptor):
"""Add a node to the sensor_graph using its binary descriptor."""
err = self.sensor_graph.add_node(descriptor)
return [err]
@tile_rpc(*rpcs.SG_SET_ONLINE)
def sg_set_online(self, online):
"""Set the sensor-graph online/offline."""
self.sensor_graph.enabled = bool(online)
return [Error.NO_ERROR]
@tile_rpc(*rpcs.SG_GRAPH_INPUT)
def sg_graph_input(self, value, stream_id):
""""Present a graph input to the sensor_graph subsystem."""
self.sensor_graph.process_input(stream_id, value)
return [Error.NO_ERROR]
@tile_rpc(*rpcs.SG_RESET_GRAPH)
def sg_reset_graph(self):
"""Clear the in-memory and persisted graph (if any)."""
self.sensor_graph.reset()
return [Error.NO_ERROR]
@tile_rpc(*rpcs.SG_PERSIST_GRAPH)
def sg_persist_graph(self):
"""Save the current in-memory graph persistently."""
self.sensor_graph.persist()
return [Error.NO_ERROR]
@tile_rpc(*rpcs.SG_INSPECT_GRAPH_NODE)
def sg_inspect_graph_node(self, index):
"""Inspect the given graph node."""
desc = self.sensor_graph.inspect_node(index)
return [desc]
@tile_rpc(*rpcs.SG_ADD_STREAMER)
def sg_add_streamer(self, desc):
"""Add a graph streamer using a binary descriptor."""
if len(desc) == 13:
desc += b'\0'
err = self.sensor_graph.add_streamer(desc)
return [err]
@tile_rpc(*rpcs.SG_INSPECT_STREAMER)
def sg_inspect_streamer(self, index):
"""Inspect a sensorgraph streamer by index."""
return self.sensor_graph.inspect_streamer(index)
@tile_rpc(*rpcs.SG_TRIGGER_STREAMER)
def sg_trigger_streamer(self, index):
"""Manually trigger a streamer."""
err = self.sensor_graph.trigger_streamer(index)
return [err]
@tile_rpc(*rpcs.SG_SEEK_STREAMER)
def sg_seek_streamer(self, index, force, value):
"""Ackowledge a streamer."""
force = bool(force)
err = self.sensor_graph.acknowledge_streamer(index, value, force)
return [err]
@tile_rpc(*rpcs.SG_QUERY_STREAMER)
def sg_query_streamer(self, index):
"""Query the current status of a streamer."""
resp = self.sensor_graph.query_streamer(index)
if resp is None:
return [struct.pack("<L", _pack_sgerror(SensorGraphError.STREAMER_NOT_ALLOCATED))]
return [struct.pack("<LLLLBBBx", *resp)]
async def process_graph_input(graph, stream, value, rpc_executor):
"""Process an input through this sensor graph.
The tick information in value should be correct and is transfered
to all results produced by nodes acting on this tick. This coroutine
is an asyncio compatible version of SensorGraph.process_input()
Args:
stream (DataStream): The stream the input is part of
value (IOTileReading): The value to process
rpc_executor (RPCExecutor): An object capable of executing RPCs
in case we need to do that.
"""
graph.sensor_log.push(stream, value)
# FIXME: This should be specified in our device model
if stream.important:
associated_output = stream.associated_stream()
graph.sensor_log.push(associated_output, value)
to_check = deque([x for x in graph.roots])
while len(to_check) > 0:
node = to_check.popleft()
if node.triggered():
try:
results = node.process(rpc_executor, graph.mark_streamer)
for result in results:
if inspect.iscoroutine(result.value):
result.value = await asyncio.ensure_future(result.value)
result.raw_time = value.raw_time
graph.sensor_log.push(node.stream, result)
except:
logging.getLogger(__name__).exception("Unhandled exception in graph node processing function for node %s", str(node))
# If we generated any outputs, notify our downstream nodes
# so that they are also checked to see if they should run.
if len(results) > 0:
to_check.extend(node.outputs)
|
from .module_definition import ModuleDefinition
from .link_type import LinkType
class Calls(ModuleDefinition):
@property
def name(self):
return 'Calls'
@property
def contacts_link_type(self):
return LinkType.RELATIONSHIP
@property
def contacts_link_name(self):
return 'calls'
@property
def accounts_link_type(self):
return LinkType.RELATIONSHIP
@property
def accounts_link_name(self):
return 'calls'
|
import lldb
from xnu import *
def _showStructPacking(symbol, prefix, begin_offset=0, typedef=None):
"""
recursively parse the field members of structure.
params : symbol (lldb.SBType) reference to symbol in binary
prefix (string) string to be prefixed for each line of output. Useful for recursive struct parsing.
returns: string containing lines of output.
"""
ctype = "unknown type"
if symbol.GetTypeClass() == lldb.eTypeClassUnion :
ctype = "union"
if symbol.GetTypeClass() == lldb.eTypeClassStruct :
ctype = "struct"
if typedef:
outstr = "[%4d] (%s) (%s) %s { " % (symbol.GetByteSize(), typedef, ctype, symbol.GetName()) + "\n"
else :
outstr = "[%4d] (%s) %s { " % (symbol.GetByteSize(), ctype, symbol.GetName()) + "\n"
numFields = symbol.GetNumberOfFields()
_has_memory_hole = False
_compact_size = 0 # asuming the struct is perfectly packed
_compact_offset = begin_offset
_previous_bit_offset = 0
for i in range(numFields):
member = symbol.GetFieldAtIndex(i)
m_offset = member.GetOffsetInBytes() + begin_offset
m_offset_bits = member.GetOffsetInBits()
m_type = member.GetType()
m_name = member.GetName()
m_size = m_type.GetByteSize()
warningstr = ""
debugstr = "" # + str((m_size, m_offset , m_offset_bits, _previous_bit_offset, _compact_offset, begin_offset))
if _compact_offset != m_offset and (m_offset_bits - _previous_bit_offset) > m_size*8 :
_has_memory_hole = True
warningstr = " *** Possible memory hole ***"
_compact_offset = m_offset
_compact_offset += m_size
_type_class = m_type.GetTypeClass()
_canonical_type = m_type.GetCanonicalType()
_canonical_type_class = m_type.GetCanonicalType().GetTypeClass()
if _type_class == lldb.eTypeClassTypedef and (_canonical_type_class == lldb.eTypeClassStruct or _canonical_type_class == lldb.eTypeClassUnion) :
outstr += prefix + ("*%4d," % m_offset) + _showStructPacking(_canonical_type, prefix+" ", m_offset, str(m_type)) + warningstr + debugstr + "\n"
elif _type_class == lldb.eTypeClassStruct or _type_class == lldb.eTypeClassUnion :
outstr += prefix + ("*%4d," % m_offset) + _showStructPacking(m_type, prefix+" ", m_offset) + warningstr + debugstr + "\n"
else:
outstr += prefix + ("+%4d,[%4d] (%s) %s" % (m_offset, m_size, m_type.GetName(), m_name)) + warningstr + debugstr + "\n"
if i > 0 :
_previous_bit_offset = m_offset_bits
outstr += prefix + "}"
if _has_memory_hole == True :
outstr += " *** Warning: Struct layout leaves memory hole *** "
return outstr
@lldb_command('showstructpacking')
def showStructInfo(cmd_args=None):
"""Show how a structure is packed in the binary. The format is
+<offset>, [<size_of_member>] (<type>) <name>
For example:
(lldb) script lldbmacros.showStructInfo("pollfd")
[ 8] (struct) pollfd {
+ 0,[ 4] (int) fd
+ 4,[ 2] (short) events
+ 6,[ 2] (short) revents
}
syntax: showstructpacking task
"""
if not cmd_args:
raise ArgumentError("Please provide a type name.")
sym = gettype(cmd_args[0])
if sym == None:
print "No such struct found"
if sym.GetTypeClass() == lldb.eTypeClassTypedef:
sym = sym.GetCanonicalType()
if sym.GetTypeClass() != lldb.eTypeClassStruct:
print "%s is not a structure" % cmd_args[0]
else:
print _showStructPacking(sym,"", 0)
|
__author__="aabilio"
__date__ ="$06-may-2011 11:03:38$"
from Descargar import Descargar
from utiles import salir, formatearNombre, printt
import sys
class CSur(object):
'''
Descripción de la clase CSur que maneja lo descarga de los vídeos de Canal Sur
'''
def __init__(self, url=""):
self._URL_recibida = url
def getURL(self):
return self._URL_recibida
def setURL(self, url):
self._URL_recibida = url
url = property(getURL, setURL)
# Funciones privadas que ayuden a procesarDescarga(self):
def __descHTML(self, url2down):
''' Método que utiliza la clase descargar para descargar el HTML '''
D = Descargar(url2down)
return D.descargar()
def __alacarta(self):
'''return URL y NAME de los vídeos de A la carta de Canal SUR'''
printt(u"[INFO] A la carta")
xmlStream = self.__descHTML(self.__descHTML(self._URL_recibida).split("_url_xml_datos=")[1].split("\"")[0])
url = xmlStream.split("<url>")[1].split("<")[0]
ext = "." + url.split(".")[-1]
name = xmlStream.split("<title><![CDATA[")[1].split("]")[0] + ext
return [url, name]
def __modoNormal(self):
'''return URL y NAME de los vídeos normales de Canal SUR'''
printt(u"[INFO] Vídeo Normal")
htmlStream = self.__descHTML(self._URL_recibida)
url = "http://www.canalsur.es" + htmlStream.split("flashvars=\"file=")[1].split("&")[0]
ext = "." + url.split(".")[-1]
name = htmlStream.split("<title>")[1].split("<")[0] + ext
return [url, name]
def procesarDescarga(self):
'''
Procesa lo necesario para obtener la url final del vídeo a descargar y devuelve
esta y el nombre como se quiere que se descarge el archivo de la siguiente forma:
return [ruta_url, nombre]
Si no se quiere especificar un nombre para el archivo resultante en disco, o no se
conoce un procedimiento para obtener este automáticamente se utilizará:
return [ruta_url, None]
Y el método de Descargar que descarga utilizará el nombre por defecto según la url.
Tanto "ruta_url" como "nombre" pueden ser listas (por supuesto, el nombre del ruta_url[0]
tiene que ser nombre[0] y así sucesivamente).
'''
if self._URL_recibida.find("canalsuralacarta.es") != -1: # CSur a la carta:
url, name = self.__alacarta()
elif self._URL_recibida.find("canalsur.es/") != -1: # Vídeos normales
url, name = self.__modoNormal()
else: # No debería de suceder nuca
salir(u"[!!!] Error inesperado")
if name:
name = formatearNombre(name)
return [url, name]
|
import os, sys, math, errno, argparse
pretend = False
verbose = False
def get_media_path(configfile):
with open(configfile, "r") as f:
for line in f:
if (line.strip()).startswith('media_path'):
item, value = (line.strip()).split(" ",2)
return(value)
def check_free_percent(rootfolder):
statv= os.statvfs(rootfolder)
return 100.0 - ( 100.0 * float(statv.f_blocks - statv.f_bfree) / float(statv.f_blocks - statv.f_bfree + statv.f_bavail) )
def check_free_space(rootfolder):
statv= os.statvfs(rootfolder)
return statv.f_bavail * statv.f_frsize / 1024
def check_used_blocks(rootfolder):
statv= os.statvfs(rootfolder)
return float(statv.f_blocks * statv.f_bsize) - float(statv.f_bfree * statv.f_bsize)
def check_space_to_min(rootfolder, minpercent):
statv= os.statvfs(rootfolder)
targetpercent = minpercent - check_free_percent(rootfolder);
if targetpercent > 0:
return float(targetpercent / 100.0) * float(statv.f_blocks * statv.f_frsize / 1024)
else:
return 0.0
def files_by_oldest(rootfolder):
# I don't know why this next line works, I found it on stackoverflow.
return sorted((os.path.join(dirname, filename) for dirname, dirnames, filenames in os.walk(rootfolder) for filename in filenames), key=lambda fn: os.stat(fn).st_mtime),reversed==True
def check_keepfiles(filename, keeplist):
for flap in keeplist:
if filename.endswith(flap):
return True
return False
def overwrite_with_zero_data(filename):
try:
if (pretend):
if (verbose): print("If this wasn't a dry run", filename, "would be overwritte with zero length data.")
else:
if (verbose): print("Overwriting", filename, "with zero length data.")
f = open(filename, 'w') # Open 'filename' for writing.
f.close() # and then immediately close it, resulting in a zero length file.
return True
except OSError as e:
if e.errno != errno.ENOENT:
return False
def silentremove(filename):
try:
if (pretend):
return True
else:
if os.path.isfile(filename):
os.remove(filename)
elif os.path.isdir(filename):
os.rmdir(filename)
else:
return False
return True
except OSError as e:
if e.errno != errno.ENOENT:
return False
def delete_files_by_oldest(basepath, keepfiles, target_freepercent):
totalsize = 0.0
targetsize = check_space_to_min(basepath,target_freepercent) # get how much actual space needs to be freed based on the target for percentage free
# If we don't need to do anything, don't do anything
if (totalsize >= targetsize):
if (verbose): print("Adequate space remaining, no need to remove any files.")
return True
# If the filesystem is completely full, we're going to have some problems on Copy On Write filesystems. This is part of a workaround.
if (check_free_percent(basepath) == 0.0):
filesystemfull_flag = True
else:
filesystemfull_flag = False
for files in files_by_oldest(basepath):
# When the follwing loop breaks, 'files' becomes a bool and the program throws an error. This is a workaround to make the loop exit cleanly.
if (type(files) == type(True)):
break
for fnord in files:
# Skip deletion if file is in the keep files list.
if (check_keepfiles(fnord,keepfiles)):
if (verbose): print("Skipping %s" % (fnord))
continue
size = os.stat(fnord).st_size / 1024
# Workaround for Copy On Write filesystems' inablility to delete files if there is zero free space.
if (filesystemfull_flag):
overwrite_with_zero_data(fnord)
filesystemfull_flag = False
# Remove the file
if (silentremove(fnord)):
totalsize = totalsize + size
if (verbose): print("%s\t%0.2f\t%0.2f/%0.2f" % (fnord,size,totalsize,targetsize))
# Break if we have deleted enough bytes to meet or exceed the target for free space.
if (totalsize >= targetsize):
break
return True
def recursive_delete_if_empty(path,keepfiles):
if not os.path.isdir(path):
return False
if check_keepfiles(path,keepfiles):
return False
if all([recursive_delete_if_empty(os.path.join(path, filename),keepfiles)
for filename in os.listdir(path)]):
try:
if (pretend):
if (verbose): print("Intended path:", path)
else:
if (verbose): print("Removing path: ",path)
os.rmdir(path)
return True
except OSError as e:
if e.errno != errno.ENOENT:
return False
else:
return False
def main():
motioneye_configfile = "/etc/motioneye/motioneye.conf" # path to motioneye config file
target_freepercent = 20 # target for free disk space in percent
keepfiles = [ ".donotdelete" ] # files that should be kept no matter how old they are
# Parse the command line arguments, if any
parser = argparse.ArgumentParser(
description="Prune motioneye media files until a minimum percentage of disk space is free.",
epilog='''
This program aims to provide an alternative to the built-in Preserve Pictures/Movies feature in Motioneye. Instead of deleting after a period of time, this program deletes files from the Motioneye media path directory - starting with the oldest - to achieve a certain percentage of disk space has been freed.\n\n
After the file deletion task has completed the program then deletes all empty directories, leaving the first-level directories inside the Motioneye media path directory.
'''
)
parser.add_argument("-c", "--config", help="path to motioneye config file (default: %s)" % motioneye_configfile)
parser.add_argument("-f", "--free", type=int, default=20, help="minimum free disk space, percent (default: %0.2f)" % target_freepercent)
parser.add_argument("-v", "--verbose", help="verbose output", action="store_true")
parser.add_argument("-n", "--dryrun", help="perform a trial run with no changes made", action="store_true")
args = parser.parse_args()
if args.config:
motioneye_configfile = args.config
if args.free:
target_freepercent = args.free
if args.verbose:
global verbose
verbose = True
if args.dryrun:
global pretend
pretend = True
basepath = get_media_path(motioneye_configfile) # get the base path from the motioneye config file
targetsize = check_space_to_min(basepath,target_freepercent) # Calculate the amount of disk space to delete to meet the target free percentage
keepfiles.append(basepath)
keepfiles = keepfiles + os.listdir(basepath) # Append directories in basepath to keepfiles list
if (verbose):
print("Motioneye Config Path:", motioneye_configfile)
print("Media Path:",basepath)
print("Keep files:",keepfiles)
print("Target free (percent):",target_freepercent)
print("Space free (percent):",round(check_free_percent(basepath)))
print("Space free (bytes):",check_free_space(basepath))
print("Required deletion to reach target free space:",targetsize)
if (pretend): print("Dry run. No files will be deleted.")
# First we delete files starting with the oldest first until we reach our target percentage free
delete_files_by_oldest(basepath, keepfiles, target_freepercent)
# Next, we'll recursively prune the empty paths
recursive_delete_if_empty(basepath,keepfiles)
return 0
main()
sys.exit(0)
|
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
from __future__ import unicode_literals
import os
import numpy as np
import pytest
from astropy import units
from astropy.io import fits
from pypeit import specobjs
from pypeit.core import save
from pypeit.tests.tstutils import dummy_fitstbl
from pypeit.spectrographs import util
def data_path(filename):
data_dir = os.path.join(os.path.dirname(__file__), 'files')
return os.path.join(data_dir, filename)
def mk_specobj(flux=5, objid=500):
# specobj
npix = 100
specobj = specobjs.SpecObj((100,100), 0, (0.4,0.6), objtype='science',
spat_pixpos=300)
specobj.boxcar = dict(wave=np.arange(npix)*units.AA, counts=np.ones(npix)*flux)
specobj.optimal = dict(wave=np.arange(npix)*units.AA, counts=np.ones(npix)*flux-0.5)
specobj.objid = objid
specobj.trace_spat = np.arange(npix) / npix
specobj.fwhmfit = np.arange(npix) / npix
# Return
return specobj
def test_save2d_fits():
#settings.dummy_settings()
#fitsdict = arutils.dummy_fitsdict(nfile=1, spectrograph='none', directory=data_path(''))
fitstbl = dummy_fitstbl(directory=data_path(''))
# Kludge
fitstbl.table.remove_column('filename')
fitstbl['filename'] = 'b1.fits.gz'
# Settings
#settings.argflag['run']['directory']['science'] = data_path('')
spectrograph = 'shane_kast_blue'
# Fill with dummy images
dum = np.ones((100,100))
sci_dict = {}
sci_dict[0] = {}
sci_dict[0]['sciframe'] = dum
sci_dict[0]['finalvar'] = dum * 2
sci_dict[0]['finalsky'] = dum + 0.1
sci_dict['meta'] = {}
sci_dict['meta']['vel_corr'] = 0.
sci_dict['meta']['ir_redux'] = False
basename = 'test'
scidx = 5
path = fitstbl['directory'][scidx]
ifile = fitstbl['filename'][scidx]
rawfile = os.path.join(path, ifile)
master_dir = data_path('MF')+'_'+spectrograph
outfile = data_path('') + 'spec2d_{:s}.fits'.format(basename)
# Create a dummy master_key_dict
master_key_dict = dict(frame='', bpm='bpmkey',bias='',arc='',trace='',flat='')
raw_hdr = fits.open(rawfile)[0].header
save.save_2d_images(sci_dict, raw_hdr, spectrograph, master_key_dict, master_dir, outfile)
# Read and test
head0 = fits.getheader(data_path('spec2d_test.fits'))
assert head0['PYPMFDIR'] == master_dir
assert head0['BPMMKEY'] == 'bpm' # See save_2d_images; removes last 3 characters
assert 'PYPEIT' in head0['PIPELINE']
def test_save1d_fits():
""" save1d to FITS and HDF5
"""
# Init
fitstbl = dummy_fitstbl(spectro_name='shane_kast_blue', directory=data_path(''))
sobj = mk_specobj()
specObjs = specobjs.SpecObjs([sobj])
spectrograph = util.load_spectrograph('shane_kast_blue')
# Write to FITS
basename = 'test'
outfile = data_path('') + 'spec1d_{:s}.fits'.format(basename)
save.save_1d_spectra_fits(specObjs, fitstbl[5], spectrograph, outfile)
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.