gt
stringclasses 1
value | context
stringlengths 2.49k
119k
|
|---|---|
#!/usr/bin/env python
"""
Auxiliary functions for f2py2e.
Copyright 1999,2000 Pearu Peterson all rights reserved,
Pearu Peterson <pearu@ioc.ee>
Permission to use, modify, and distribute this software is given under the
terms of the NumPy (BSD style) LICENSE.
NO WARRANTY IS EXPRESSED OR IMPLIED. USE AT YOUR OWN RISK.
$Date: 2005/07/24 19:01:55 $
Pearu Peterson
"""
__version__ = "$Revision: 1.65 $"[10:-1]
import __version__
f2py_version = __version__.version
import pprint
import sys
import types
import cfuncs
errmess=sys.stderr.write
#outmess=sys.stdout.write
show=pprint.pprint
options={}
debugoptions=[]
wrapfuncs = 1
def outmess(t):
if options.get('verbose',1):
sys.stdout.write(t)
def debugcapi(var):
return 'capi' in debugoptions
def _isstring(var):
return 'typespec' in var and var['typespec']=='character' and (not isexternal(var))
def isstring(var):
return _isstring(var) and not isarray(var)
def ischaracter(var):
return isstring(var) and 'charselector' not in var
def isstringarray(var):
return isarray(var) and _isstring(var)
def isarrayofstrings(var):
# leaving out '*' for now so that
# `character*(*) a(m)` and `character a(m,*)`
# are treated differently. Luckily `character**` is illegal.
return isstringarray(var) and var['dimension'][-1]=='(*)'
def isarray(var):
return 'dimension' in var and (not isexternal(var))
def isscalar(var):
return not (isarray(var) or isstring(var) or isexternal(var))
def iscomplex(var):
return isscalar(var) and var.get('typespec') in ['complex','double complex']
def islogical(var):
return isscalar(var) and var.get('typespec')=='logical'
def isinteger(var):
return isscalar(var) and var.get('typespec')=='integer'
def isreal(var):
return isscalar(var) and var.get('typespec')=='real'
def get_kind(var):
try:
return var['kindselector']['*']
except KeyError:
try:
return var['kindselector']['kind']
except KeyError:
pass
def islong_long(var):
if not isscalar(var):
return 0
if var.get('typespec') not in ['integer','logical']:
return 0
return get_kind(var)=='8'
def isunsigned_char(var):
if not isscalar(var):
return 0
if var.get('typespec') != 'integer':
return 0
return get_kind(var)=='-1'
def isunsigned_short(var):
if not isscalar(var):
return 0
if var.get('typespec') != 'integer':
return 0
return get_kind(var)=='-2'
def isunsigned(var):
if not isscalar(var):
return 0
if var.get('typespec') != 'integer':
return 0
return get_kind(var)=='-4'
def isunsigned_long_long(var):
if not isscalar(var):
return 0
if var.get('typespec') != 'integer':
return 0
return get_kind(var)=='-8'
def isdouble(var):
if not isscalar(var):
return 0
if not var.get('typespec')=='real':
return 0
return get_kind(var)=='8'
def islong_double(var):
if not isscalar(var):
return 0
if not var.get('typespec')=='real':
return 0
return get_kind(var)=='16'
def islong_complex(var):
if not iscomplex(var):
return 0
return get_kind(var)=='32'
def iscomplexarray(var):
return isarray(var) and var.get('typespec') in ['complex','double complex']
def isint1array(var):
return isarray(var) and var.get('typespec')=='integer' \
and get_kind(var)=='1'
def isunsigned_chararray(var):
return isarray(var) and var.get('typespec') in ['integer', 'logical']\
and get_kind(var)=='-1'
def isunsigned_shortarray(var):
return isarray(var) and var.get('typespec') in ['integer', 'logical']\
and get_kind(var)=='-2'
def isunsignedarray(var):
return isarray(var) and var.get('typespec') in ['integer', 'logical']\
and get_kind(var)=='-4'
def isunsigned_long_longarray(var):
return isarray(var) and var.get('typespec') in ['integer', 'logical']\
and get_kind(var)=='-8'
def issigned_chararray(var):
return isarray(var) and var.get('typespec') in ['integer', 'logical']\
and get_kind(var)=='1'
def issigned_shortarray(var):
return isarray(var) and var.get('typespec') in ['integer', 'logical']\
and get_kind(var)=='2'
def issigned_array(var):
return isarray(var) and var.get('typespec') in ['integer', 'logical']\
and get_kind(var)=='4'
def issigned_long_longarray(var):
return isarray(var) and var.get('typespec') in ['integer', 'logical']\
and get_kind(var)=='8'
def isallocatable(var):
return 'attrspec' in var and 'allocatable' in var['attrspec']
def ismutable(var):
return not (not 'dimension' in var or isstring(var))
def ismoduleroutine(rout):
return 'modulename' in rout
def ismodule(rout):
return ('block' in rout and 'module'==rout['block'])
def isfunction(rout):
return ('block' in rout and 'function'==rout['block'])
#def isfunction_wrap(rout):
# return wrapfuncs and (iscomplexfunction(rout) or isstringfunction(rout)) and (not isexternal(rout))
def isfunction_wrap(rout):
if isintent_c(rout):
return 0
return wrapfuncs and isfunction(rout) and (not isexternal(rout))
def issubroutine(rout):
return ('block' in rout and 'subroutine'==rout['block'])
def isroutine(rout):
return isfunction(rout) or issubroutine(rout)
def islogicalfunction(rout):
if not isfunction(rout):
return 0
if 'result' in rout:
a=rout['result']
else:
a=rout['name']
if a in rout['vars']:
return islogical(rout['vars'][a])
return 0
def islong_longfunction(rout):
if not isfunction(rout):
return 0
if 'result' in rout:
a=rout['result']
else:
a=rout['name']
if a in rout['vars']:
return islong_long(rout['vars'][a])
return 0
def islong_doublefunction(rout):
if not isfunction(rout):
return 0
if 'result' in rout:
a=rout['result']
else:
a=rout['name']
if a in rout['vars']:
return islong_double(rout['vars'][a])
return 0
def iscomplexfunction(rout):
if not isfunction(rout):
return 0
if 'result' in rout:
a=rout['result']
else:
a=rout['name']
if a in rout['vars']:
return iscomplex(rout['vars'][a])
return 0
def iscomplexfunction_warn(rout):
if iscomplexfunction(rout):
outmess("""\
**************************************************************
Warning: code with a function returning complex value
may not work correctly with your Fortran compiler.
Run the following test before using it in your applications:
$(f2py install dir)/test-site/{b/runme_scalar,e/runme}
When using GNU gcc/g77 compilers, codes should work correctly.
**************************************************************\n""")
return 1
return 0
def isstringfunction(rout):
if not isfunction(rout):
return 0
if 'result' in rout:
a=rout['result']
else:
a=rout['name']
if a in rout['vars']:
return isstring(rout['vars'][a])
return 0
def hasexternals(rout):
return 'externals' in rout and rout['externals']
def isthreadsafe(rout):
return 'f2pyenhancements' in rout and 'threadsafe' in rout['f2pyenhancements']
def hasvariables(rout):
return 'vars' in rout and rout['vars']
def isoptional(var):
return ('attrspec' in var and 'optional' in var['attrspec'] and 'required' not in var['attrspec']) and isintent_nothide(var)
def isexternal(var):
return ('attrspec' in var and 'external' in var['attrspec'])
def isrequired(var):
return not isoptional(var) and isintent_nothide(var)
def isintent_in(var):
if 'intent' not in var:
return 1
if 'hide' in var['intent']:
return 0
if 'inplace' in var['intent']:
return 0
if 'in' in var['intent']:
return 1
if 'out' in var['intent']:
return 0
if 'inout' in var['intent']:
return 0
if 'outin' in var['intent']:
return 0
return 1
def isintent_inout(var):
return 'intent' in var and ('inout' in var['intent'] or 'outin' in var['intent']) and 'in' not in var['intent'] and 'hide' not in var['intent'] and 'inplace' not in var['intent']
def isintent_out(var):
return 'out' in var.get('intent',[])
def isintent_hide(var):
return ('intent' in var and ('hide' in var['intent'] or ('out' in var['intent'] and 'in' not in var['intent'] and (not l_or(isintent_inout,isintent_inplace)(var)))))
def isintent_nothide(var):
return not isintent_hide(var)
def isintent_c(var):
return 'c' in var.get('intent',[])
# def isintent_f(var):
# return not isintent_c(var)
def isintent_cache(var):
return 'cache' in var.get('intent',[])
def isintent_copy(var):
return 'copy' in var.get('intent',[])
def isintent_overwrite(var):
return 'overwrite' in var.get('intent',[])
def isintent_callback(var):
return 'callback' in var.get('intent',[])
def isintent_inplace(var):
return 'inplace' in var.get('intent',[])
def isintent_aux(var):
return 'aux' in var.get('intent',[])
def isintent_aligned4(var):
return 'aligned4' in var.get('intent',[])
def isintent_aligned8(var):
return 'aligned8' in var.get('intent',[])
def isintent_aligned16(var):
return 'aligned16' in var.get('intent',[])
isintent_dict = {isintent_in:'INTENT_IN',isintent_inout:'INTENT_INOUT',
isintent_out:'INTENT_OUT',isintent_hide:'INTENT_HIDE',
isintent_cache:'INTENT_CACHE',
isintent_c:'INTENT_C',isoptional:'OPTIONAL',
isintent_inplace:'INTENT_INPLACE',
isintent_aligned4:'INTENT_ALIGNED4',
isintent_aligned8:'INTENT_ALIGNED8',
isintent_aligned16:'INTENT_ALIGNED16',
}
def isprivate(var):
return 'attrspec' in var and 'private' in var['attrspec']
def hasinitvalue(var):
return '=' in var
def hasinitvalueasstring(var):
if not hasinitvalue(var):
return 0
return var['='][0] in ['"',"'"]
def hasnote(var):
return 'note' in var
def hasresultnote(rout):
if not isfunction(rout):
return 0
if 'result' in rout:
a=rout['result']
else:
a=rout['name']
if a in rout['vars']:
return hasnote(rout['vars'][a])
return 0
def hascommon(rout):
return 'common' in rout
def containscommon(rout):
if hascommon(rout):
return 1
if hasbody(rout):
for b in rout['body']:
if containscommon(b):
return 1
return 0
def containsmodule(block):
if ismodule(block):
return 1
if not hasbody(block):
return 0
for b in block['body']:
if containsmodule(b):
return 1
return 0
def hasbody(rout):
return 'body' in rout
def hascallstatement(rout):
return getcallstatement(rout) is not None
def istrue(var):
return 1
def isfalse(var):
return 0
class F2PYError(Exception):
pass
class throw_error:
def __init__(self,mess):
self.mess = mess
def __call__(self,var):
mess = '\n\n var = %s\n Message: %s\n' % (var,self.mess)
raise F2PYError,mess
def l_and(*f):
l,l2='lambda v',[]
for i in range(len(f)):
l='%s,f%d=f[%d]'%(l,i,i)
l2.append('f%d(v)'%(i))
return eval('%s:%s'%(l,' and '.join(l2)))
def l_or(*f):
l,l2='lambda v',[]
for i in range(len(f)):
l='%s,f%d=f[%d]'%(l,i,i)
l2.append('f%d(v)'%(i))
return eval('%s:%s'%(l,' or '.join(l2)))
def l_not(f):
return eval('lambda v,f=f:not f(v)')
def isdummyroutine(rout):
try:
return rout['f2pyenhancements']['fortranname']==''
except KeyError:
return 0
def getfortranname(rout):
try:
name = rout['f2pyenhancements']['fortranname']
if name=='':
raise KeyError
if not name:
errmess('Failed to use fortranname from %s\n'%(rout['f2pyenhancements']))
raise KeyError
except KeyError:
name = rout['name']
return name
def getmultilineblock(rout,blockname,comment=1,counter=0):
try:
r = rout['f2pyenhancements'].get(blockname)
except KeyError:
return
if not r: return
if counter>0 and type(r) is type(''):
return
if type(r) is type([]):
if counter>=len(r): return
r = r[counter]
if r[:3]=="'''":
if comment:
r = '\t/* start ' + blockname + ' multiline ('+`counter`+') */\n' + r[3:]
else:
r = r[3:]
if r[-3:]=="'''":
if comment:
r = r[:-3] + '\n\t/* end multiline ('+`counter`+')*/'
else:
r = r[:-3]
else:
errmess("%s multiline block should end with `'''`: %s\n" \
% (blockname,repr(r)))
return r
def getcallstatement(rout):
return getmultilineblock(rout,'callstatement')
def getcallprotoargument(rout,cb_map={}):
r = getmultilineblock(rout,'callprotoargument',comment=0)
if r: return r
if hascallstatement(rout):
outmess('warning: callstatement is defined without callprotoargument\n')
return
from capi_maps import getctype
arg_types,arg_types2 = [],[]
if l_and(isstringfunction,l_not(isfunction_wrap))(rout):
arg_types.extend(['char*','size_t'])
for n in rout['args']:
var = rout['vars'][n]
if isintent_callback(var):
continue
if n in cb_map:
ctype = cb_map[n]+'_typedef'
else:
ctype = getctype(var)
if l_and(isintent_c,l_or(isscalar,iscomplex))(var):
pass
elif isstring(var):
pass
#ctype = 'void*'
else:
ctype = ctype+'*'
if isstring(var) or isarrayofstrings(var):
arg_types2.append('size_t')
arg_types.append(ctype)
proto_args = ','.join(arg_types+arg_types2)
if not proto_args:
proto_args = 'void'
#print proto_args
return proto_args
def getusercode(rout):
return getmultilineblock(rout,'usercode')
def getusercode1(rout):
return getmultilineblock(rout,'usercode',counter=1)
def getpymethoddef(rout):
return getmultilineblock(rout,'pymethoddef')
def getargs(rout):
sortargs,args=[],[]
if 'args' in rout:
args=rout['args']
if 'sortvars' in rout:
for a in rout['sortvars']:
if a in args: sortargs.append(a)
for a in args:
if a not in sortargs:
sortargs.append(a)
else: sortargs=rout['args']
return args,sortargs
def getargs2(rout):
sortargs,args=[],rout.get('args',[])
auxvars = [a for a in rout['vars'].keys() if isintent_aux(rout['vars'][a])\
and a not in args]
args = auxvars + args
if 'sortvars' in rout:
for a in rout['sortvars']:
if a in args: sortargs.append(a)
for a in args:
if a not in sortargs:
sortargs.append(a)
else: sortargs=auxvars + rout['args']
return args,sortargs
def getrestdoc(rout):
if 'f2pymultilines' not in rout:
return None
k = None
if rout['block']=='python module':
k = rout['block'],rout['name']
return rout['f2pymultilines'].get(k,None)
def gentitle(name):
l=(80-len(name)-6)/2
return '/*%s %s %s*/'%(l*'*',name,l*'*')
def flatlist(l):
if type(l)==types.ListType:
return reduce(lambda x,y,f=flatlist:x+f(y),l,[])
return [l]
def stripcomma(s):
if s and s[-1]==',': return s[:-1]
return s
def replace(str,dict,defaultsep=''):
if type(dict)==types.ListType:
return map(lambda d,f=replace,sep=defaultsep,s=str:f(s,d,sep),dict)
if type(str)==types.ListType:
return map(lambda s,f=replace,sep=defaultsep,d=dict:f(s,d,sep),str)
for k in 2*dict.keys():
if k=='separatorsfor':
continue
if 'separatorsfor' in dict and k in dict['separatorsfor']:
sep=dict['separatorsfor'][k]
else:
sep=defaultsep
if type(dict[k])==types.ListType:
str=str.replace('#%s#'%(k),sep.join(flatlist(dict[k])))
else:
str=str.replace('#%s#'%(k),dict[k])
return str
def dictappend(rd,ar):
if type(ar)==types.ListType:
for a in ar:
rd=dictappend(rd,a)
return rd
for k in ar.keys():
if k[0]=='_':
continue
if k in rd:
if type(rd[k])==types.StringType:
rd[k]=[rd[k]]
if type(rd[k])==types.ListType:
if type(ar[k])==types.ListType:
rd[k]=rd[k]+ar[k]
else:
rd[k].append(ar[k])
elif type(rd[k])==types.DictType:
if type(ar[k])==types.DictType:
if k=='separatorsfor':
for k1 in ar[k].keys():
if k1 not in rd[k]:
rd[k][k1]=ar[k][k1]
else:
rd[k]=dictappend(rd[k],ar[k])
else:
rd[k]=ar[k]
return rd
def applyrules(rules,dict,var={}):
ret={}
if type(rules)==types.ListType:
for r in rules:
rr=applyrules(r,dict,var)
ret=dictappend(ret,rr)
if '_break' in rr:
break
return ret
if '_check' in rules and (not rules['_check'](var)):
return ret
if 'need' in rules:
res = applyrules({'needs':rules['need']},dict,var)
if 'needs' in res:
cfuncs.append_needs(res['needs'])
for k in rules.keys():
if k=='separatorsfor':
ret[k]=rules[k]; continue
if type(rules[k])==types.StringType:
ret[k]=replace(rules[k],dict)
elif type(rules[k])==types.ListType:
ret[k]=[]
for i in rules[k]:
ar=applyrules({k:i},dict,var)
if k in ar:
ret[k].append(ar[k])
elif k[0]=='_':
continue
elif type(rules[k])==types.DictType:
ret[k]=[]
for k1 in rules[k].keys():
if type(k1)==types.FunctionType and k1(var):
if type(rules[k][k1])==types.ListType:
for i in rules[k][k1]:
if type(i)==types.DictType:
res=applyrules({'supertext':i},dict,var)
if 'supertext' in res:
i=res['supertext']
else: i=''
ret[k].append(replace(i,dict))
else:
i=rules[k][k1]
if type(i)==types.DictType:
res=applyrules({'supertext':i},dict)
if 'supertext' in res:
i=res['supertext']
else: i=''
ret[k].append(replace(i,dict))
else:
errmess('applyrules: ignoring rule %s.\n'%`rules[k]`)
if type(ret[k])==types.ListType:
if len(ret[k])==1:
ret[k]=ret[k][0]
if ret[k]==[]:
del ret[k]
return ret
|
|
import csv
import json
import math
import sys
results = {
'n': {
'2009': 0,
'2010': 0,
'2011': 0,
'2012': 0,
'2013': 0,
'2015': 0,
'2016': 0
},
'demographics': {
'age': {
'2009': {},
'2010': {},
'2011': {},
'2012': {},
'2013': {},
'2015': {},
'2016': {}
},
'biological_sex': {
'2009': {},
'2010': {},
'2011': {},
'2012': {},
'2013': {},
'2015': {},
},
'gender_identity': {
'2009': {},
'2010': {},
'2011': {},
'2012': {},
'2013': {},
'2015': {},
'2016': {}
},
'gender_alignment': {
'2016': {},
},
'sexual_orientation': {
'2009': {},
'2010': {},
'2011': {},
'2012': {},
'2013': {},
'2015': {},
'2016': {}
},
'race': {
'2009': {},
'2010': {},
'2011': {},
'2012': {},
'2013': {},
'2015': {},
'2016': {}
},
'relationship_status': {
'2009': {},
'2010': {},
'2011': {},
'2012': {},
'2013': {},
'2015': {},
'2016': {}
},
'polyamory': {
'sexuality': {
'2013': {},
#'2015': {},
},
'romantic': {
'2013': {},
#'2015': {},
}
},
'political_views': {
'social': {
'2010': {},
'2011': {},
'2012': {},
'2013': {},
'2015': {},
'2016': {}
},
'economic': {
'2010': {},
'2011': {},
'2012': {},
'2013': {},
'2015': {},
'2016': {}
},
}
},
'furry_metadata': {
'furry_status': {
'2015': {},
'2016': {}
},
'partner_is_furry': {
'2009': {},
'2010': {},
'2011': {},
'2012': {},
'2013': {},
'2015': {},
'2016': {}
},
},
'perception_of_fandom': {
'importance_of_sex': {
'self': {
'2009': {},
'2010': {},
'2011': {},
'2012': {},
'2013': {},
'2015': {},
'2016': {}
},
'others': {
'2009': {},
'2010': {},
'2011': {},
'2012': {},
'2013': {},
'2015': {},
'2016': {}
},
'public': {
'2009': {},
'2010': {},
'2011': {},
'2012': {},
'2013': {},
'2015': {},
'2016': {}
},
}
},
}
with open(sys.argv[1], 'rb') as f:
"""
Pertinent Columns:
year,furry_status,birthdate,biosex,gender,orientation,race_white,race_black,race_hispanic,race_asian,race_native,religion,politics_social,politics_economic,occupation,education,relationship,partner_is_furry,polyamorous_romantic,polyamorous_sexual,seximportance_overall,seximportance_personal,seximportance_others,seximportance_public
"""
reader = csv.DictReader(f)
for row in reader:
year = row['year']
results['n'][year] += 1
age = row['birthdate']
if age:
age = str(int(math.floor(float(age))))
if age and int(age) > 1900 and int(age) < 2015:
if age in results['demographics']['age'][year]:
results['demographics']['age'][year][age] += 1
else:
results['demographics']['age'][year][age] = 1
biosex = row['biosex']
if biosex and year in results['demographics']['biological_sex']:
if biosex in results['demographics']['biological_sex'][year]:
results['demographics']['biological_sex'][year][biosex] += 1
else:
results['demographics']['biological_sex'][year][biosex] = 1
gender = row['gender']
if gender:
if gender in results['demographics']['gender_identity'][year]:
results['demographics']['gender_identity'][year][gender] += 1
else:
results['demographics']['gender_identity'][year][gender] = 1
gender = row['gender_alignment']
if gender and year in results['demographics']['gender_alignment']:
if gender in results['demographics']['gender_alignment'][year]:
results['demographics']['gender_alignment'][year][gender] += 1
else:
results['demographics']['gender_alignment'][year][gender] = 1
orientation = row['orientation']
if orientation:
if orientation in results['demographics']['sexual_orientation'][year]:
results['demographics']['sexual_orientation'][year][orientation] += 1
else:
results['demographics']['sexual_orientation'][year][orientation] = 1
for i in ['white', 'black', 'hispanic', 'asian', 'native']:
race = row['race_' + i]
if race == 'True':
if i in results['demographics']['race'][year]:
results['demographics']['race'][year][i] += 1
else:
results['demographics']['race'][year][i] = 1
relationship_status = row['relationship']
if relationship_status:
if relationship_status in results['demographics']['relationship_status'][year]:
results['demographics']['relationship_status'][year][relationship_status] += 1
else:
results['demographics']['relationship_status'][year][relationship_status] = 1
if year == '2013':
poly_sexual = row['polyamorous_sexual']
if poly_sexual:
if poly_sexual in results['demographics']['polyamory']['sexuality'][year]:
results['demographics']['polyamory']['sexuality'][year][poly_sexual] += 1
else:
results['demographics']['polyamory']['sexuality'][year][poly_sexual] = 1
poly_romantic = row['polyamorous_romantic']
if poly_romantic:
if poly_romantic in results['demographics']['polyamory']['romantic'][year]:
results['demographics']['polyamory']['romantic'][year][poly_romantic] += 1
else:
results['demographics']['polyamory']['romantic'][year][poly_romantic] = 1
politics_social = row['politics_social']
if politics_social:
if politics_social in results['demographics']['political_views']['social'][year]:
results['demographics']['political_views']['social'][year][politics_social] += 1
else:
results['demographics']['political_views']['social'][year][politics_social] = 1
politics_economic = row['politics_economic']
if politics_economic:
if politics_economic in results['demographics']['political_views']['economic'][year]:
results['demographics']['political_views']['economic'][year][politics_economic] += 1
else:
results['demographics']['political_views']['economic'][year][politics_economic] = 1
if year in ['2015', '2016']:
furry_status = row['furry_status']
if furry_status:
if furry_status in results['furry_metadata']['furry_status'][year]:
results['furry_metadata']['furry_status'][year][furry_status] += 1
else:
results['furry_metadata']['furry_status'][year][furry_status] = 1
partner = row['partner_is_furry']
if partner == '':
partner = 'False'
if partner in results['furry_metadata']['partner_is_furry'][year]:
results['furry_metadata']['partner_is_furry'][year][partner] += 1
else:
results['furry_metadata']['partner_is_furry'][year][partner] = 1
ios = row['seximportance_personal']
if ios:
if ios in results['perception_of_fandom']['importance_of_sex']['self'][year]:
results['perception_of_fandom']['importance_of_sex']['self'][year][ios] += 1
else:
results['perception_of_fandom']['importance_of_sex']['self'][year][ios] = 1
ios = row['seximportance_others']
if ios:
if ios in results['perception_of_fandom']['importance_of_sex']['others'][year]:
results['perception_of_fandom']['importance_of_sex']['others'][year][ios] += 1
else:
results['perception_of_fandom']['importance_of_sex']['others'][year][ios] = 1
ios = row['seximportance_public']
if ios:
if ios in results['perception_of_fandom']['importance_of_sex']['public'][year]:
results['perception_of_fandom']['importance_of_sex']['public'][year][ios] += 1
else:
results['perception_of_fandom']['importance_of_sex']['public'][year][ios] = 1
print json.dumps(results)
# import pprint
# print pprint.pprint(results['demographics']['age']['2013'])
|
|
import base64
import hashlib
import json
import os
import re
import smtplib
import sys
import urllib
from django.core.context_processors import csrf
from django.core.validators import validate_email
from django.db.utils import IntegrityError
from django.http import *
from django.shortcuts import render_to_response
from django.utils.http import urlquote_plus
from django.views.decorators.csrf import csrf_exempt
from multiprocessing import Pool
from browser.utils import *
from core.db.manager import DataHubManager
from inventory.models import *
p = os.path.abspath(os.path.dirname(__file__))
'''
@author: Anant Bhardwaj
@date: Feb 12, 2012
'''
kEmail = "SESSION_EMAIL"
kUsername = "SESSION_USERNAME"
# for async calls
pool = Pool(processes=1)
'''
LOGIN/REGISTER/RESET
'''
def is_valid_username (username):
try:
if len(username) >3 and re.match(r'\w+', username).group() == username:
return True
except:
pass
return False
def login_required (f):
def wrap (request, *args, **kwargs):
if kEmail not in request.session.keys():
redirect_url = urlquote_plus(request.get_full_path())
return HttpResponseRedirect("/account/login?redirect_url=%s" %(redirect_url))
return f(request, *args, **kwargs)
wrap.__doc__ = f.__doc__
wrap.__name__ = f.__name__
return wrap
def login_form (request, redirect_url='/', errors=[]):
c = {'redirect_url':redirect_url, 'errors':errors, 'values':request.REQUEST}
c.update(csrf(request))
return render_to_response('login.html', c)
def register_form (request, redirect_url='/', errors=[]):
c = {'redirect_url':redirect_url, 'errors':errors, 'values':request.REQUEST}
c.update(csrf(request))
return render_to_response('register.html', c)
def login (request):
redirect_url = '/'
if('redirect_url' in request.GET.keys()):
redirect_url = urllib.unquote_plus(request.GET['redirect_url'])
if not redirect_url or redirect_url == '':
redirect_url = '/'
if request.method == "POST":
errors = []
login_email = ''
if('redirect_url' in request.POST.keys()):
redirect_url = urllib.unquote_plus(request.POST['redirect_url'])
email = None
try:
login_id = request.POST["login_id"].lower()
login_password = hashlib.sha1(request.POST["login_password"]).hexdigest()
# find the user email in the username, if it's there.
try:
validate_email(login_id.lower().strip())
email = login_id.lower().strip()
except:
pass
user = None
if email:
user = User.objects.get(email=login_id, password=login_password)
else:
user = User.objects.get(username=login_id, password=login_password)
clear_session(request)
request.session[kEmail] = user.email
request.session[kUsername] = user.username
redirect_url = redirect_url + urllib.unquote_plus('?auth_user=%s' %(user.username))
return HttpResponseRedirect(redirect_url)
except User.DoesNotExist:
try:
if email:
User.objects.get(email=login_id)
else:
User.objects.get(username=login_id)
errors.append(
'Wrong password. Please try again.<br /><br />'
'<a class="blue bold" href="/account/forgot">Click Here</a> '
'to reset your password.')
except User.DoesNotExist:
errors.append(
'Could not find any account associated with login_id: '
'%s.<br /><br /><a class="blue bold" '
'href="/account/register?redirect_url=%s">Click Here</a> '
'to create an account.' %(login_id,
urllib.quote_plus(redirect_url)))
return login_form(
request, redirect_url = urllib.quote_plus(redirect_url),
errors = errors)
except:
errors.append('Login failed.')
return login_form(
request, redirect_url = urllib.quote_plus(redirect_url),
errors = errors)
else:
try:
if request.session[kUsername]:
redirect_url = redirect_url + urllib.unquote_plus('?auth_user=%s' %(request.session[kUsername]))
return HttpResponseRedirect(redirect_url)
else:
return login_form(request, urllib.quote_plus(redirect_url))
except:
return login_form(request, urllib.quote_plus(redirect_url))
def register (request):
redirect_url = '/'
if('redirect_url' in request.GET.keys()):
redirect_url = urllib.unquote_plus(request.GET['redirect_url'])
if request.method == "POST":
errors = []
email = ''
try:
error = False
if('redirect_url' in request.POST.keys()):
redirect_url = urllib.unquote_plus(request.POST['redirect_url'])
username = request.POST["username"].lower()
email = request.POST["email"].lower()
password = request.POST["password"]
try:
validate_email(email.strip())
except:
errors.append("Invalid Email.")
error = True
if(not is_valid_username(username)):
errors.append("Invalid Username.")
error = True
if(password == ""):
errors.append("Empty Password.")
error = True
try:
user = User.objects.get(username=username)
errors.append("Username already taken.")
error = True
except User.DoesNotExist:
pass
if not error:
hashed_password = hashlib.sha1(password).hexdigest()
try:
DataHubManager.create_user(username=username, password=hashed_password)
except Exception, e:
print e
pass
try:
DataHubManager.change_password(username=username, password=hashed_password)
except Exception, e:
errors.append(str(e))
error = True
if(error):
return register_form(request, redirect_url = urllib.quote_plus(redirect_url), errors = errors)
user = User(username=username, email=email, password=hashed_password)
user.save()
clear_session(request)
request.session[kEmail] = user.email
request.session[kUsername] = user.username
encrypted_email = encrypt_text(user.email)
subject = "Welcome to DataHub"
msg_body = '''
Dear %s,
Thanks for registering to DataHub.
Please click the link below to start using DataHub:
%s://%s/account/verify/%s
''' % (
user.email,
'https' if request.is_secure() else 'http',
request.get_host(),
encrypted_email)
pool.apply_async(send_email, [user.email, subject, msg_body])
redirect_url = redirect_url + urllib.unquote_plus('?auth_user=%s' %(user.username))
return HttpResponseRedirect(redirect_url)
except IntegrityError:
errors.append(
'Account with the email address <a href="mailto:%s">%s</a> already exists.<br /> <br />Please <a class="blue bold" href="/account/login?login_email=%s">Sign In</a>.'
% (email, email, urllib.quote_plus(email)))
return register_form(request, redirect_url = urllib.quote_plus(redirect_url), errors = errors)
except Exception, e:
errors.append("Error %s." %(str(e)))
return register_form(request, redirect_url = urllib.quote_plus(redirect_url), errors = errors)
else:
return register_form(request, redirect_url = urllib.quote_plus(redirect_url))
def clear_session (request):
request.session.flush()
if kEmail in request.session.keys():
del request.session[kEmail]
if kUsername in request.session.keys():
del request.session[kUsername]
def logout (request):
clear_session(request)
c = {
'msg_title': 'Thank you for using DataHub!',
'msg_body': 'Your have been logged out.<br /><br /><a href="/account/login">Click Here</a> to sign in again.'
}
c.update(csrf(request))
return render_to_response('confirmation.html', c)
def forgot (request):
if request.method == "POST":
errors = []
try:
user_email = request.POST["email"].lower()
user = User.objects.get(email=user_email)
encrypted_email = encrypt_text(user_email)
subject = "DataHub Password Reset"
msg_body = '''
Dear %s,
Please click the link below to reset your DataHub password:
%s://%s/account/reset/%s
''' % (
user.email,
'https' if request.is_secure() else 'http',
request.get_host(),
encrypted_email)
pool.apply_async(send_email, [user_email, subject, msg_body])
c = {
'msg_title': 'DataHub Reset Password',
'msg_body': 'A link to reset your password has been sent to your email address.'
}
c.update(csrf(request))
return render_to_response('confirmation.html', c)
except User.DoesNotExist:
errors.append(
"Invalid Email Address.")
except Exception, e:
errors.append(
'Error: %s.'
'Please try again or send an email to '
'<a href="mailto:datahub@csail.mit.edu">datahub@csail.mit.edu</a>.' %(str(e)))
c = {'errors': errors, 'values': request.POST}
c.update(csrf(request))
return render_to_response('forgot.html', c)
else:
c = {'values': request.REQUEST}
c.update(csrf(request))
return render_to_response('forgot.html', c)
def verify (request, encrypted_email):
errors = []
c = {'msg_title': 'DataHub Account Verification'}
try:
user_email = decrypt_text(encrypted_email)
user = User.objects.get(email=user_email)
c.update({
'msg_body': 'Thanks for verifying your email address!<br /> <br /><a href="/">Click Here</a> to start using DataHub.'
})
clear_session(request)
request.session[kEmail] = user.email
request.session[kUsername] = user.username
except:
errors.append(
'Wrong verify code in the URL. '
'Please try again or send an email to '
'<a href="mailto:datahub@csail.mit.edu">datahub@csail.mit.edu</a>')
c.update({'errors': errors})
c.update(csrf(request))
return render_to_response('confirmation.html', c)
def reset (request, encrypted_email):
errors = []
error = False
if request.method == "POST":
try:
user_email = request.POST["user_email"].lower()
password = request.POST["new_password"]
password2 = request.POST["new_password2"]
if password == "":
errors.append("Empty Password.")
error = True
if password2 != password:
errors.append("Password and Confirm Password don't match.")
error = True
if not error:
hashed_password = hashlib.sha1(password).hexdigest()
user = User.objects.get(email=user_email)
try:
DataHubManager.create_user(username=user.username, password=hashed_password)
except Exception, e:
pass
try:
DataHubManager.change_password(username=user.username, password=hashed_password)
except Exception, e:
errors.append(str(e))
error = True
if error:
c = {
'user_email': user_email,
'encrypted_email': encrypted_email,
'errors': errors
}
c.update(csrf(request))
return render_to_response('reset.html', c)
else:
hashed_password = hashlib.sha1(password).hexdigest()
user = User.objects.get(email=user_email)
user.password = hashed_password
user.save()
c = {
'msg_title': 'DataHub Reset Password',
'msg_body': 'Your password has been changed successfully.<br /> <br />'
'<a href="/account/login" class="blue bold">Click Here</a>'
' to sign in.'
}
c.update(csrf(request))
return render_to_response('confirmation.html', c)
except:
errors.append(
'Some unknown error happened. '
'Please try again or send an email to '
'<a href="mailto:datahub@csail.mit.edu">datahub@csail.mit.edu</a>')
c = {'errors': errors}
c.update(csrf(request))
return render_to_response('reset.html', c)
else:
try:
user_email = decrypt_text(encrypted_email)
User.objects.get(email=user_email)
c = {
'user_email': user_email,
'encrypted_email': encrypted_email
}
c.update(csrf(request))
return render_to_response('reset.html', c)
except:
errors.append(
'Wrong reset code in the URL. '
'Please try again or send an email to '
'<a href="mailto:datahub@csail.mit.edu">datahub@csail.mit.edu</a>')
c = {'msg_title': 'DataHub Reset Password', 'errors': errors}
c.update(csrf(request))
return render_to_response('confirmation.html', c)
def get_login(request):
login = None
try:
login = request.session[kUsername]
except:
pass
return login
@login_required
def jdbc_password(request):
login = request.session[kUsername]
user = User.objects.get(username=login)
return HttpResponse(user.password)
|
|
from __future__ import print_function
import os
import logging
try:
from StringIO import StringIO
except ImportError:
from io import BytesIO as StringIO
from django.core.files.storage import default_storage
from django.core.files.images import ImageFile
from django.utils import six
from PIL import Image, ExifTags
logger = logging.getLogger(__name__)
def get_file_path(img_file):
if isinstance(img_file, six.string_types):
return img_file
if hasattr(img_file, 'path'):
return img_file.path
elif img_file is not None:
return img_file.name
def get_thumb_name(img_file, size):
if img_file is None:
return ''
filehead, filetail = os.path.split(get_file_path(img_file))
return os.path.join(
filehead,
'{}_thumbs/v1_at_{}.jpg'.format(filetail, str(size)),
)
def parse_size(size):
"parses the size argument, returning (width, height)"
w, h = None, None
if isinstance(size, int) or size.isdigit():
w = size
elif size != 'auto':
s = size.split('x')
if s[0].isdigit(): w = int(s[0])
if s[1].isdigit(): h = int(s[1])
if w is not None: w = int(w)
if h is not None: h = int(h)
return w, h
def lower(a, b):
if a < b:
return a
else:
return b
def get_scaled_down_size(current_size, target_size):
"""
Calculates a scaled size for the image. The result will fit on the target's
smallest side.
"""
target_w, target_h = target_size
cur_w, cur_h = current_size
def scale_w():
w = lower(target_w, cur_w)
return int(w), int((float(cur_h) / float(cur_w)) * w)
def scale_h():
h = lower(target_h, cur_h)
return int((float(cur_w) / float(cur_h)) * h), int(h)
if target_w is None and target_h is None:
return int(cur_w), int(cur_h)
elif target_h is None:
return scale_w()
elif target_w is None:
return scale_h()
elif target_w >= cur_w or target_h >= cur_h:
return int(cur_w), int(cur_h)
elif cur_w <= target_w and cur_h <= target_h:
return int(cur_w), int(cur_h)
elif cur_w < target_w and cur_h > target_h:
return int(target_w), int(cur_h)
elif cur_w > target_w and cur_h < target_h:
return int(cur_w), int(target_h)
# tall image
elif target_h == target_w:
if cur_h >= cur_w:
return scale_w()
else:
return scale_h()
elif target_h >= target_w:
return scale_h()
# wide image
elif target_w >= target_h:
return scale_w()
raise ValueError('Impossible error: Aron sucks at math. '
'target:%s, current:%s' % (str(target_size), str(current_size)))
def get_exif(img):
"returns the image's exif data as a dict"
try:
exif = hasattr(img, '_getexif') and img._getexif()
if exif:
exif = exif.items()
exif = dict(exif)
return exif
except (IOError, IndexError):
pass
return {}
def resize_image(img_file, size=100, storage=default_storage):
'''
The size argument can be in one of two forms: width or widthxheight. "auto"
is an acceptable value for either. Some examples:
resize_image(img, 50) - this will set with width to 50, with the height scaled
accordingly.
resize_image(img, 'auto') - this won't resize the image at all
resize_image(img, '50x50') - the width and height will be 50px, causing the
image to be letterboxed (never stretched).
resize_image(img, 'autox50') - this will set the height and scale the width
'''
if img_file is None:
return
if hasattr(img_file, 'storage'):
storage = img_file.storage
thumb_filename = get_thumb_name(img_file, size)
if storage is None:
exists = os.path.isfile(thumb_filename)
else:
exists = storage.exists(thumb_filename)
# if the image wasn't already resized, resize it
if not exists:
img_file.seek(0)
img = Image.open(img_file)
exif = get_exif(img)
for orientation in ExifTags.TAGS.keys():
if ExifTags.TAGS[orientation] == 'Orientation':
break
if orientation in exif:
if exif[orientation] == 3:
img = img.rotate(180, Image.BICUBIC, True)
elif exif[orientation] == 6:
img = img.rotate(270, Image.BICUBIC, True)
elif exif[orientation] == 8:
img = img.rotate(90, Image.BICUBIC, True)
current_size = [float(x) for x in img.size]
target_size = parse_size(size)
scaled_size = get_scaled_down_size(current_size, target_size)
img = img.resize(scaled_size, Image.ANTIALIAS)
target_size = list(target_size)
target_size[0] = target_size[0] or img.size[0]
target_size[1] = target_size[1] or img.size[1]
# transparent
# bg = Image.new('RGBA', target_size)
# bg.putalpha(Image.new('L', target_size, color=0))
bg = Image.new('RGB', target_size, color=(255, 255, 255))
box = (int((target_size[0] - int(img.size[0])) / 2),
int((target_size[1] - int(img.size[1])) / 2))
bg.paste(img, box)
img = bg
if img.mode != 'RGB': img = img.convert('RGB')
if storage is None:
thumb_dir = '/'.join(thumb_filename.split('/')[:-1])
try:
os.makedirs(thumb_dir)
except OSError:
pass
img.save(thumb_filename, 'JPEG', quality=80)
else:
image_io = StringIO()
img.save(image_io, 'JPEG', quality=80)
storage.save(thumb_filename, ImageFile(image_io))
return thumb_filename
|
|
# vim: tabstop=4 shiftwidth=4 softtabstop=4
#
# Copyright 2012 Cisco Systems, Inc. All rights reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
#
# @author: Sumit Naiksatam, Cisco Systems, Inc.
#
import logging
from quantum.db import api as db
from quantum.openstack.common import importutils
from quantum.plugins.cisco.common import cisco_constants as const
from quantum.plugins.cisco.common import cisco_credentials as cred
from quantum.plugins.cisco.common import cisco_exceptions as cexc
from quantum.plugins.cisco.common import cisco_utils as cutil
from quantum.plugins.cisco.db import network_db_v2 as cdb
from quantum.plugins.cisco.db import ucs_db_v2 as udb
from quantum.plugins.cisco.l2device_plugin_base import L2DevicePluginBase
from quantum.plugins.cisco.ucs import cisco_ucs_configuration as conf
LOG = logging.getLogger(__name__)
class UCSVICPlugin(L2DevicePluginBase):
"""UCS Device Plugin"""
def __init__(self):
self._driver = importutils.import_object(conf.UCSM_DRIVER)
LOG.debug("Loaded driver %s\n" % conf.UCSM_DRIVER)
# TODO (Sumit) Make the counter per UCSM
self._port_profile_counter = 0
def get_all_networks(self, tenant_id, **kwargs):
"""
Returns a dictionary containing all
<network_uuid, network_name> for
the specified tenant.
"""
LOG.debug("UCSVICPlugin:get_all_networks() called\n")
self._set_ucsm(kwargs[const.DEVICE_IP])
networks_list = db.network_list(tenant_id)
new_networks_list = []
for network in networks_list:
new_network_dict = cutil.make_net_dict(network[const.UUID],
network[const.NETWORKNAME],
[])
new_networks_list.append(new_network_dict)
return new_networks_list
def create_network(self, tenant_id, net_name, net_id, vlan_name, vlan_id,
**kwargs):
"""
Creates a new Virtual Network, and assigns it
a symbolic name.
"""
LOG.debug("UCSVICPlugin:create_network() called\n")
self._set_ucsm(kwargs[const.DEVICE_IP])
self._driver.create_vlan(vlan_name, str(vlan_id), self._ucsm_ip,
self._ucsm_username, self._ucsm_password)
ports_on_net = []
new_network_dict = cutil.make_net_dict(net_id,
net_name,
ports_on_net)
return new_network_dict
def delete_network(self, tenant_id, net_id, **kwargs):
"""
Deletes the network with the specified network identifier
belonging to the specified tenant.
"""
LOG.debug("UCSVICPlugin:delete_network() called\n")
self._set_ucsm(kwargs[const.DEVICE_IP])
vlan_binding = cdb.get_vlan_binding(net_id)
vlan_name = vlan_binding[const.VLANNAME]
self._driver.delete_vlan(vlan_name, self._ucsm_ip,
self._ucsm_username, self._ucsm_password)
#Rohit:passing empty network name, might not need fixing
net_dict = cutil.make_net_dict(net_id,
"",
[])
return net_dict
def get_network_details(self, tenant_id, net_id, **kwargs):
"""
Deletes the Virtual Network belonging to a the
spec
"""
LOG.debug("UCSVICPlugin:get_network_details() called\n")
self._set_ucsm(kwargs[const.DEVICE_IP])
network = db.network_get(net_id)
ports_list = network[const.NETWORKPORTS]
ports_on_net = []
for port in ports_list:
new_port = cutil.make_port_dict(port[const.UUID],
port[const.PORTSTATE],
port[const.NETWORKID],
port[const.INTERFACEID])
ports_on_net.append(new_port)
new_network = cutil.make_net_dict(network[const.UUID],
network[const.NETWORKNAME],
ports_on_net)
return new_network
def update_network(self, tenant_id, net_id, **kwargs):
"""
Updates the symbolic name belonging to a particular
Virtual Network.
"""
LOG.debug("UCSVICPlugin:update_network() called\n")
self._set_ucsm(kwargs[const.DEVICE_IP])
network = db.network_get(net_id)
net_dict = cutil.make_net_dict(network[const.UUID],
network[const.NETWORKNAME],
[])
return net_dict
def get_all_ports(self, tenant_id, net_id, **kwargs):
"""
Retrieves all port identifiers belonging to the
specified Virtual Network.
"""
LOG.debug("UCSVICPlugin:get_all_ports() called\n")
self._set_ucsm(kwargs[const.DEVICE_IP])
network = db.network_get(net_id)
ports_list = network[const.NETWORKPORTS]
ports_on_net = []
for port in ports_list:
port_binding = udb.get_portbinding(port[const.UUID])
ports_on_net.append(port_binding)
return ports_on_net
def create_port(self, tenant_id, net_id, port_state, port_id, **kwargs):
"""
Creates a port on the specified Virtual Network.
"""
LOG.debug("UCSVICPlugin:create_port() called\n")
self._set_ucsm(kwargs[const.DEVICE_IP])
qos = None
ucs_inventory = kwargs[const.UCS_INVENTORY]
least_rsvd_blade_dict = kwargs[const.LEAST_RSVD_BLADE_DICT]
chassis_id = least_rsvd_blade_dict[const.LEAST_RSVD_BLADE_CHASSIS]
blade_id = least_rsvd_blade_dict[const.LEAST_RSVD_BLADE_ID]
blade_data_dict = least_rsvd_blade_dict[const.LEAST_RSVD_BLADE_DATA]
new_port_profile = self._create_port_profile(tenant_id, net_id,
port_id,
conf.DEFAULT_VLAN_NAME,
conf.DEFAULT_VLAN_ID)
profile_name = new_port_profile[const.PROFILE_NAME]
rsvd_nic_dict = ucs_inventory.reserve_blade_interface(
self._ucsm_ip, chassis_id,
blade_id, blade_data_dict,
tenant_id, port_id,
profile_name)
port_binding = udb.update_portbinding(port_id,
portprofile_name=profile_name,
vlan_name=conf.DEFAULT_VLAN_NAME,
vlan_id=conf.DEFAULT_VLAN_ID,
qos=qos)
return port_binding
def delete_port(self, tenant_id, net_id, port_id, **kwargs):
"""
Deletes a port on a specified Virtual Network,
if the port contains a remote interface attachment,
the remote interface should first be un-plugged and
then the port can be deleted.
"""
LOG.debug("UCSVICPlugin:delete_port() called\n")
self._set_ucsm(kwargs[const.DEVICE_IP])
ucs_inventory = kwargs[const.UCS_INVENTORY]
chassis_id = kwargs[const.CHASSIS_ID]
blade_id = kwargs[const.BLADE_ID]
interface_dn = kwargs[const.BLADE_INTF_DN]
port_binding = udb.get_portbinding(port_id)
profile_name = port_binding[const.PORTPROFILENAME]
self._delete_port_profile(port_id, profile_name)
ucs_inventory.unreserve_blade_interface(self._ucsm_ip, chassis_id,
blade_id, interface_dn)
return udb.remove_portbinding(port_id)
def update_port(self, tenant_id, net_id, port_id, **kwargs):
"""
Updates the state of a port on the specified Virtual Network.
"""
LOG.debug("UCSVICPlugin:update_port() called\n")
self._set_ucsm(kwargs[const.DEVICE_IP])
pass
def get_port_details(self, tenant_id, net_id, port_id, **kwargs):
"""
This method allows the user to retrieve a remote interface
that is attached to this particular port.
"""
LOG.debug("UCSVICPlugin:get_port_details() called\n")
self._set_ucsm(kwargs[const.DEVICE_IP])
port_binding = udb.get_portbinding(port_id)
return port_binding
def plug_interface(self, tenant_id, net_id, port_id, remote_interface_id,
**kwargs):
"""
Attaches a remote interface to the specified port on the
specified Virtual Network.
"""
LOG.debug("UCSVICPlugin:plug_interface() called\n")
self._set_ucsm(kwargs[const.DEVICE_IP])
port_binding = udb.get_portbinding(port_id)
profile_name = port_binding[const.PORTPROFILENAME]
old_vlan_name = port_binding[const.VLANNAME]
new_vlan_name = self._get_vlan_name_for_network(tenant_id, net_id)
new_vlan_id = self._get_vlan_id_for_network(tenant_id, net_id)
self._driver.change_vlan_in_profile(profile_name, old_vlan_name,
new_vlan_name, self._ucsm_ip,
self._ucsm_username,
self._ucsm_password)
return udb.update_portbinding(port_id, vlan_name=new_vlan_name,
vlan_id=new_vlan_id)
def unplug_interface(self, tenant_id, net_id, port_id, **kwargs):
"""
Detaches a remote interface from the specified port on the
specified Virtual Network.
"""
LOG.debug("UCSVICPlugin:unplug_interface() called\n")
self._set_ucsm(kwargs[const.DEVICE_IP])
port_binding = udb.get_portbinding(port_id)
profile_name = port_binding[const.PORTPROFILENAME]
old_vlan_name = port_binding[const.VLANNAME]
new_vlan_name = conf.DEFAULT_VLAN_NAME
self._driver.change_vlan_in_profile(profile_name, old_vlan_name,
new_vlan_name, self._ucsm_ip,
self._ucsm_username,
self._ucsm_password)
return udb.update_portbinding(port_id, vlan_name=new_vlan_name,
vlan_id=conf.DEFAULT_VLAN_ID)
def create_multiport(self, tenant_id, net_id_list, ports_num,
port_id_list, **kwargs):
"""
Creates a port on the specified Virtual Network.
"""
LOG.debug("UCSVICPlugin:create_multiport() called\n")
self._set_ucsm(kwargs[const.DEVICE_IP])
qos = None
ucs_inventory = kwargs[const.UCS_INVENTORY]
least_rsvd_blade_dict = kwargs[const.LEAST_RSVD_BLADE_DICT]
chassis_id = least_rsvd_blade_dict[const.LEAST_RSVD_BLADE_CHASSIS]
blade_id = least_rsvd_blade_dict[const.LEAST_RSVD_BLADE_ID]
blade_data_dict = least_rsvd_blade_dict[const.LEAST_RSVD_BLADE_DATA]
port_binding_list = []
for port_id, net_id in zip(port_id_list, net_id_list):
new_port_profile = self._create_port_profile(
tenant_id, net_id, port_id,
conf.DEFAULT_VLAN_NAME,
conf.DEFAULT_VLAN_ID)
profile_name = new_port_profile[const.PROFILE_NAME]
rsvd_nic_dict = ucs_inventory.reserve_blade_interface(
self._ucsm_ip, chassis_id,
blade_id, blade_data_dict,
tenant_id, port_id,
profile_name)
port_binding = udb.update_portbinding(
port_id,
portprofile_name=profile_name,
vlan_name=conf.DEFAULT_VLAN_NAME,
vlan_id=conf.DEFAULT_VLAN_ID,
qos=qos)
port_binding_list.append(port_binding)
return port_binding_list
def detach_port(self, tenant_id, instance_id, instance_desc, **kwargs):
"""
Remove the association of the VIF with the dynamic vnic
"""
LOG.debug("detach_port() called\n")
port_id = kwargs[const.PORTID]
kwargs.pop(const.PORTID)
return self.unplug_interface(tenant_id, None, port_id, **kwargs)
def _get_profile_name(self, port_id):
"""Returns the port profile name based on the port UUID"""
profile_name = conf.PROFILE_NAME_PREFIX + cutil.get16ByteUUID(port_id)
return profile_name
def _get_vlan_name_for_network(self, tenant_id, network_id):
"""Return the VLAN name as set by the L2 network plugin"""
vlan_binding = cdb.get_vlan_binding(network_id)
return vlan_binding[const.VLANNAME]
def _get_vlan_id_for_network(self, tenant_id, network_id):
"""Return the VLAN id as set by the L2 network plugin"""
vlan_binding = cdb.get_vlan_binding(network_id)
return vlan_binding[const.VLANID]
def _create_port_profile(self, tenant_id, net_id, port_id, vlan_name,
vlan_id):
"""Create port profile in UCSM"""
if self._port_profile_counter >= int(conf.MAX_UCSM_PORT_PROFILES):
raise cexc.UCSMPortProfileLimit(net_id=net_id, port_id=port_id)
profile_name = self._get_profile_name(port_id)
self._driver.create_profile(profile_name, vlan_name, self._ucsm_ip,
self._ucsm_username, self._ucsm_password)
self._port_profile_counter += 1
new_port_profile = {const.PROFILE_NAME: profile_name,
const.PROFILE_VLAN_NAME: vlan_name,
const.PROFILE_VLAN_ID: vlan_id}
return new_port_profile
def _delete_port_profile(self, port_id, profile_name):
"""Delete port profile in UCSM"""
self._driver.delete_profile(profile_name, self._ucsm_ip,
self._ucsm_username, self._ucsm_password)
self._port_profile_counter -= 1
def _set_ucsm(self, ucsm_ip):
"""Set the UCSM IP, username, and password"""
self._ucsm_ip = ucsm_ip
self._ucsm_username = cred.Store.get_username(conf.UCSM_IP_ADDRESS)
self._ucsm_password = cred.Store.get_password(conf.UCSM_IP_ADDRESS)
|
|
#!/usr/bin/env python
# Copyright (c) 2017 The Chromium Authors. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
"""Parses OWNERS recursively and generates a machine readable component mapping.
OWNERS files are expected to contain a well-formatted pair of tags as shown
below. A presubmit check exists that validates this.
This script finds lines in the OWNERS files such as:
`# TEAM: team@chromium.org` and
`# COMPONENT: Tools>Test>Findit`
and dumps this information into a json file.
Refer to crbug.com/667952
"""
from __future__ import print_function
import json
import optparse
import os
import sys
from owners_file_tags import aggregate_components_from_owners, scrape_owners
_DEFAULT_SRC_LOCATION = os.path.join(
os.path.dirname(__file__), os.pardir, os.pardir)
_README = """
This file is generated by src/tools/checkteamtags/extract_components.py
by parsing the contents of OWNERS files throughout the chromium source code and
extracting `# TEAM:` and `# COMPONENT:` tags.
Manual edits of this file will be overwritten by an automated process.
""".splitlines()
def write_results(filename, data):
"""Write data to the named file, or the default location."""
if not filename:
filename = 'component_map.json'
with open(filename, 'w') as f:
f.write(data)
def display_stat(stats, root, options):
""""Display coverage statistic.
The following three values are always displayed:
- The total number of OWNERS files under directory root and its sub-
directories.
- The number of OWNERS files (and its percentage of the total) that have
component information but no team information.
- The number of OWNERS files (and its percentage of the total) that have
both component and team information.
Optionally, if options.stat_coverage or options.complete_coverage are given,
the same information will be shown for each depth level.
(up to the level given by options.stat_coverage, if any).
Args:
stats (dict): Tha statistics in dictionary form as produced by the
owners_file_tags module.
root (str): The root directory from which the depth level is calculated.
options (optparse.Values): The command line options as returned by
optparse.
"""
file_total = stats['OWNERS-count']
print("%d OWNERS files in total." % file_total)
file_with_component = stats['OWNERS-with-component-only-count']
file_pct_with_component = "N/A"
if file_total > 0:
file_pct_with_component = "{0:.2f}".format(
100.0 * file_with_component / file_total)
print('%(file_with_component)d (%(file_pct_with_component)s%%) OWNERS '\
'files have COMPONENT' % {
'file_with_component': file_with_component,
'file_pct_with_component': file_pct_with_component})
file_with_team_component = stats['OWNERS-with-team-and-component-count']
file_pct_with_team_component = "N/A"
if file_total > 0:
file_pct_with_team_component = "{0:.2f}".format(
100.0 * file_with_team_component / file_total)
print('%(file_with_team_component)d (%(file_pct_with_team_component)s%%) '\
'OWNERS files have TEAM and COMPONENT' % {
'file_with_team_component': file_with_team_component,
'file_pct_with_team_component': file_pct_with_team_component})
print("\nUnder directory %s " % root)
# number of depth to display, default is max depth under root
num_output_depth = len(stats['OWNERS-count-by-depth'])
if (options.stat_coverage > 0
and options.stat_coverage < num_output_depth):
num_output_depth = options.stat_coverage
for depth in range(0, num_output_depth):
file_total_by_depth = stats['OWNERS-count-by-depth'][depth]
file_with_component_by_depth =\
stats['OWNERS-with-component-only-count-by-depth'][depth]
file_pct_with_component_by_depth = "N/A"
if file_total_by_depth > 0:
file_pct_with_component_by_depth = "{0:.2f}".format(
100.0 * file_with_component_by_depth / file_total_by_depth)
file_with_team_component_by_depth =\
stats['OWNERS-with-team-and-component-count-by-depth'][depth]
file_pct_with_team_component_by_depth = "N/A"
if file_total_by_depth > 0:
file_pct_with_team_component_by_depth = "{0:.2f}".format(
100.0 * file_with_team_component_by_depth / file_total_by_depth)
print('%(file_total_by_depth)d OWNERS files at depth %(depth)d' % {
'file_total_by_depth': file_total_by_depth,
'depth': depth
})
print('have COMPONENT: %(file_with_component_by_depth)d, '\
'percentage: %(file_pct_with_component_by_depth)s%%' % {
'file_with_component_by_depth':
file_with_component_by_depth,
'file_pct_with_component_by_depth':
file_pct_with_component_by_depth})
print('have COMPONENT and TEAM: %(file_with_team_component_by_depth)d,'\
'percentage: %(file_pct_with_team_component_by_depth)s%%' % {
'file_with_team_component_by_depth':
file_with_team_component_by_depth,
'file_pct_with_team_component_by_depth':
file_pct_with_team_component_by_depth})
def display_missing_info_OWNERS_files(stats, num_output_depth):
"""Display OWNERS files that have missing team and component by depth.
OWNERS files that have no team and no component information will be shown
for each depth level (up to the level given by num_output_depth).
Args:
stats (dict): The statistics in dictionary form as produced by the
owners_file_tags module.
num_output_depth (int): number of levels to be displayed.
"""
print("OWNERS files that have missing team and component by depth:")
max_output_depth = len(stats['OWNERS-count-by-depth'])
if (num_output_depth < 0
or num_output_depth > max_output_depth):
num_output_depth = max_output_depth
for depth in range(0, num_output_depth):
print('at depth %(depth)d' % {'depth': depth})
print(stats['OWNERS-missing-info-by-depth'][depth])
def main(argv):
usage = """Usage: python %prog [options] [<root_dir>]
root_dir specifies the topmost directory to traverse looking for OWNERS
files, defaults to two levels up from this file's directory.
i.e. where src/ is expected to be.
Examples:
python %prog
python %prog /b/build/src
python %prog -v /b/build/src
python %prog -w /b/build/src
python %prog -o ~/components.json /b/build/src
python %prog -c /b/build/src
python %prog -s 3 /b/build/src
python %prog -m 2 /b/build/src
"""
parser = optparse.OptionParser(usage=usage)
parser.add_option('-w', '--write', action='store_true',
help='If no errors occur, write the mappings to disk.')
parser.add_option('-v', '--verbose', action='store_true',
help='Print warnings.')
parser.add_option('-o', '--output_file', help='Specify file to write the '
'mappings to instead of the default: <CWD>/'
'component_map.json (implies -w)')
parser.add_option('-c', '--complete_coverage', action='store_true',
help='Print complete coverage statistic')
parser.add_option('-s', '--stat_coverage', type="int",
help='Specify directory depth to display coverage stats')
parser.add_option('--include-subdirs', action='store_true', default=False,
help='List subdirectories without OWNERS file or component '
'tag as having same component as parent')
parser.add_option('-m', '--list_missing_info_by_depth', type="int",
help='List OWNERS files that have missing team and '
'component information by depth')
options, args = parser.parse_args(argv[1:])
if args:
root = args[0]
else:
root = _DEFAULT_SRC_LOCATION
scrape_result = scrape_owners(root, include_subdirs=options.include_subdirs)
mappings, warnings, stats = aggregate_components_from_owners(scrape_result,
root)
if options.verbose:
for w in warnings:
print(w)
if options.stat_coverage or options.complete_coverage:
display_stat(stats, root, options)
if options.list_missing_info_by_depth:
display_missing_info_OWNERS_files(stats,
options.list_missing_info_by_depth)
mappings['AAA-README']= _README
mapping_file_contents = json.dumps(mappings, sort_keys=True, indent=2)
if options.write or options.output_file:
write_results(options.output_file, mapping_file_contents)
else:
print(mapping_file_contents)
return 0
if __name__ == '__main__':
sys.exit(main(sys.argv))
|
|
import math
import operator
from datetime import date, datetime
from operator import methodcaller
import pandas as pd
import pandas.testing as tm
import pytest
from pytest import param
import ibis
import ibis.expr.datatypes as dt
import ibis.expr.types as ir
from ibis import literal as L
pytest.importorskip("clickhouse_driver")
@pytest.mark.parametrize(
('to_type', 'expected'),
[
('int8', 'CAST(`double_col` AS Int8)'),
('int16', 'CAST(`double_col` AS Int16)'),
('float', 'CAST(`double_col` AS Float32)'),
# alltypes.double_col is non-nullable
(dt.Double(nullable=False), '`double_col`'),
],
)
def test_cast_double_col(alltypes, translate, to_type, expected):
expr = alltypes.double_col.cast(to_type)
assert translate(expr) == expected
@pytest.mark.parametrize(
('to_type', 'expected'),
[
('int8', 'CAST(`string_col` AS Int8)'),
('int16', 'CAST(`string_col` AS Int16)'),
(dt.String(nullable=False), '`string_col`'),
('timestamp', 'CAST(`string_col` AS DateTime)'),
('date', 'CAST(`string_col` AS Date)'),
],
)
def test_cast_string_col(alltypes, translate, to_type, expected):
expr = alltypes.string_col.cast(to_type)
assert translate(expr) == expected
@pytest.mark.parametrize(
'column',
[
'index',
'Unnamed: 0',
'id',
'bool_col',
'tinyint_col',
'smallint_col',
'int_col',
'bigint_col',
'float_col',
'double_col',
'date_string_col',
'string_col',
'timestamp_col',
'year',
'month',
],
)
def test_noop_cast(alltypes, translate, column):
col = alltypes[column]
result = col.cast(col.type())
assert result.equals(col)
assert translate(result) == f'`{column}`'
def test_timestamp_cast_noop(alltypes, translate):
target = dt.Timestamp(nullable=False)
result1 = alltypes.timestamp_col.cast(target)
result2 = alltypes.int_col.cast(target)
assert isinstance(result1, ir.TimestampColumn)
assert isinstance(result2, ir.TimestampColumn)
assert translate(result1) == '`timestamp_col`'
assert translate(result2) == 'CAST(`int_col` AS DateTime)'
def test_timestamp_now(con, translate):
expr = ibis.now()
# now = datetime.now().strftime('%Y-%m-%d %H:%M:%S')
assert translate(expr) == 'now()'
# assert con.execute(expr) == now
@pytest.mark.parametrize(
('unit', 'expected'),
[
('y', '2009-01-01'),
param('m', '2009-05-01', marks=pytest.mark.xfail),
('d', '2009-05-17'),
('w', '2009-05-11'),
('h', '2009-05-17 12:00:00'),
('minute', '2009-05-17 12:34:00'),
],
)
def test_timestamp_truncate(con, translate, unit, expected):
stamp = ibis.timestamp('2009-05-17 12:34:56')
expr = stamp.truncate(unit)
assert con.execute(expr) == pd.Timestamp(expected)
@pytest.mark.parametrize(
('func', 'expected'),
[
(methodcaller('year'), 2015),
(methodcaller('month'), 9),
(methodcaller('day'), 1),
(methodcaller('hour'), 14),
(methodcaller('minute'), 48),
(methodcaller('second'), 5),
],
)
def test_simple_datetime_operations(con, func, expected):
value = ibis.timestamp('2015-09-01 14:48:05.359')
with pytest.raises(ValueError):
con.execute(func(value))
value = ibis.timestamp('2015-09-01 14:48:05')
con.execute(func(value)) == expected
@pytest.mark.parametrize(('value', 'expected'), [(0, None), (5.5, 5.5)])
def test_nullifzero(con, value, expected):
result = con.execute(L(value).nullifzero())
if expected is None:
assert pd.isnull(result)
else:
assert result == expected
@pytest.mark.parametrize(
('expr', 'expected'),
[
(L(None).isnull(), True),
(L(1).isnull(), False),
(L(None).notnull(), False),
(L(1).notnull(), True),
],
)
def test_isnull_notnull(con, expr, expected):
assert con.execute(expr) == expected
@pytest.mark.parametrize(
('expr', 'expected'),
[
(ibis.coalesce(5, None, 4), 5),
(ibis.coalesce(ibis.NA, 4, ibis.NA), 4),
(ibis.coalesce(ibis.NA, ibis.NA, 3.14), 3.14),
],
)
def test_coalesce(con, expr, expected):
assert con.execute(expr) == expected
@pytest.mark.parametrize(
('expr', 'expected'),
[
(ibis.NA.fillna(5), 5),
(L(5).fillna(10), 5),
(L(5).nullif(5), None),
(L(10).nullif(5), 10),
],
)
def test_fillna_nullif(con, expr, expected):
result = con.execute(expr)
if expected is None:
assert pd.isnull(result)
else:
assert result == expected
@pytest.mark.parametrize(
('value', 'expected'),
[
(L('foo_bar'), 'String'),
(L(5), 'UInt8'),
(L(1.2345), 'Float64'),
(L(datetime(2015, 9, 1, hour=14, minute=48, second=5)), 'DateTime'),
(L(date(2015, 9, 1)), 'Date'),
param(
ibis.NA,
'Null',
marks=pytest.mark.xfail(
raises=AssertionError,
reason=(
'Client/server version mismatch not handled in the '
'clickhouse driver'
),
),
),
],
)
def test_typeof(con, value, expected):
assert con.execute(value.typeof()) == expected
@pytest.mark.parametrize(('value', 'expected'), [('foo_bar', 7), ('', 0)])
def test_string_length(con, value, expected):
assert con.execute(L(value).length()) == expected
@pytest.mark.parametrize(
('op', 'expected'),
[
(methodcaller('substr', 0, 3), 'foo'),
(methodcaller('substr', 4, 3), 'bar'),
(methodcaller('substr', 1), 'oo_bar'),
],
)
def test_string_substring(con, op, expected):
value = L('foo_bar')
assert con.execute(op(value)) == expected
def test_string_column_substring(con, alltypes, translate):
expr = alltypes.string_col.substr(2)
assert translate(expr) == 'substring(`string_col`, 2 + 1)'
assert len(con.execute(expr))
expr = alltypes.string_col.substr(0, 3)
assert translate(expr) == 'substring(`string_col`, 0 + 1, 3)'
assert len(con.execute(expr))
def test_string_reverse(con):
assert con.execute(L('foo').reverse()) == 'oof'
def test_string_upper(con):
assert con.execute(L('foo').upper()) == 'FOO'
def test_string_lower(con):
assert con.execute(L('FOO').lower()) == 'foo'
def test_string_lenght(con):
assert con.execute(L('FOO').length()) == 3
@pytest.mark.parametrize(
('value', 'op', 'expected'),
[
(L('foobar'), methodcaller('contains', 'bar'), True),
(L('foobar'), methodcaller('contains', 'foo'), True),
(L('foobar'), methodcaller('contains', 'baz'), False),
(L('100%'), methodcaller('contains', '%'), True),
(L('a_b_c'), methodcaller('contains', '_'), True),
],
)
def test_string_contains(con, op, value, expected):
assert con.execute(op(value)) == expected
# TODO: clickhouse-driver escaping bug
def test_re_replace(con, translate):
expr1 = L('Hello, World!').re_replace('.', '\\\\0\\\\0')
expr2 = L('Hello, World!').re_replace('^', 'here: ')
assert con.execute(expr1) == 'HHeelllloo,, WWoorrlldd!!'
assert con.execute(expr2) == 'here: Hello, World!'
@pytest.mark.parametrize(
('value', 'expected'),
[(L('a'), 0), (L('b'), 1), (L('d'), -1)], # TODO: what's the expected?
)
def test_find_in_set(con, value, expected, translate):
vals = list('abc')
expr = value.find_in_set(vals)
assert con.execute(expr) == expected
def test_string_column_find_in_set(con, alltypes, translate):
s = alltypes.string_col
vals = list('abc')
expr = s.find_in_set(vals)
assert translate(expr) == "indexOf(['a','b','c'], `string_col`) - 1"
assert len(con.execute(expr))
@pytest.mark.parametrize(
('url', 'extract', 'expected'),
[
(L('https://www.cloudera.com'), 'HOST', 'www.cloudera.com'),
(L('https://www.cloudera.com'), 'PROTOCOL', 'https'),
(
L('https://www.youtube.com/watch?v=kEuEcWfewf8&t=10'),
'PATH',
'/watch',
),
(
L('https://www.youtube.com/watch?v=kEuEcWfewf8&t=10'),
'QUERY',
'v=kEuEcWfewf8&t=10',
),
],
)
def test_parse_url(con, translate, url, extract, expected):
expr = url.parse_url(extract)
assert con.execute(expr) == expected
def test_parse_url_query_parameter(con, translate):
url = L('https://www.youtube.com/watch?v=kEuEcWfewf8&t=10')
expr = url.parse_url('QUERY', 't')
assert con.execute(expr) == '10'
expr = url.parse_url('QUERY', 'v')
assert con.execute(expr) == 'kEuEcWfewf8'
@pytest.mark.parametrize(
('expr', 'expected'),
[
(L('foobar').find('bar'), 3),
(L('foobar').find('baz'), -1),
(L('foobar').like('%bar'), True),
(L('foobar').like('foo%'), True),
(L('foobar').like('%baz%'), False),
(L('foobar').like(['%bar']), True),
(L('foobar').like(['foo%']), True),
(L('foobar').like(['%baz%']), False),
(L('foobar').like(['%bar', 'foo%']), True),
(L('foobarfoo').replace('foo', 'H'), 'HbarH'),
],
)
def test_string_find_like(con, expr, expected):
assert con.execute(expr) == expected
def test_string_column_like(con, alltypes, translate):
expr = alltypes.string_col.like('foo%')
assert translate(expr) == "`string_col` LIKE 'foo%'"
assert len(con.execute(expr))
expr = alltypes.string_col.like(['foo%', '%bar'])
expected = "`string_col` LIKE 'foo%' OR `string_col` LIKE '%bar'"
assert translate(expr) == expected
assert len(con.execute(expr))
def test_string_column_find(con, alltypes, translate):
s = alltypes.string_col
expr = s.find('a')
assert translate(expr) == "position(`string_col`, 'a') - 1"
assert len(con.execute(expr))
expr = s.find(s)
assert translate(expr) == "position(`string_col`, `string_col`) - 1"
assert len(con.execute(expr))
@pytest.mark.parametrize(
('call', 'expected'),
[
(methodcaller('log'), 'log(`double_col`)'),
(methodcaller('log2'), 'log2(`double_col`)'),
(methodcaller('log10'), 'log10(`double_col`)'),
(methodcaller('round'), 'round(`double_col`)'),
(methodcaller('round', 0), 'round(`double_col`, 0)'),
(methodcaller('round', 2), 'round(`double_col`, 2)'),
(methodcaller('exp'), 'exp(`double_col`)'),
(methodcaller('abs'), 'abs(`double_col`)'),
(methodcaller('ceil'), 'ceil(`double_col`)'),
(methodcaller('floor'), 'floor(`double_col`)'),
(methodcaller('sqrt'), 'sqrt(`double_col`)'),
(
methodcaller('sign'),
'intDivOrZero(`double_col`, abs(`double_col`))',
),
],
)
def test_translate_math_functions(con, alltypes, translate, call, expected):
expr = call(alltypes.double_col)
assert translate(expr) == expected
assert len(con.execute(expr))
@pytest.mark.parametrize(
('expr', 'expected'),
[
pytest.param(L(-5).abs(), 5, id="abs_neg"),
pytest.param(L(5).abs(), 5, id="abs"),
pytest.param(L(5.5).round(), 6.0, id="round"),
pytest.param(L(5.556).round(2), 5.56, id="round_places"),
pytest.param(L(5.556).ceil(), 6.0, id="ceil"),
pytest.param(L(5.556).floor(), 5.0, id="floor"),
pytest.param(L(5.556).sign(), 1, id="sign"),
pytest.param(L(-5.556).sign(), -1, id="sign_neg"),
pytest.param(L(0).sign(), 0, id="sign_zero"),
pytest.param(L(5.556).sqrt(), math.sqrt(5.556), id="sqrt"),
pytest.param(L(5.556).log(2), math.log(5.556, 2), id="log2_arg"),
pytest.param(L(5.556).log2(), math.log(5.556, 2), id="log2"),
pytest.param(L(5.556).log10(), math.log10(5.556), id="log10"),
# clickhouse has different functions for exp/ln that are faster
# than the defaults, but less precise
#
# we can't use the e() function as it still gives different results
# from `math.exp`
pytest.param(
L(5.556).exp().round(8),
round(math.exp(5.556), 8),
id="exp",
),
pytest.param(
L(5.556).ln().round(7),
round(math.log(5.556), 7),
id="ln",
),
],
)
def test_math_functions(con, expr, expected, translate):
assert con.execute(expr) == expected
def test_greatest(con, alltypes, translate):
expr = ibis.greatest(alltypes.int_col, 10)
assert translate(expr) == "greatest(`int_col`, 10)"
assert len(con.execute(expr))
expr = ibis.greatest(alltypes.int_col, alltypes.bigint_col)
assert translate(expr) == "greatest(`int_col`, `bigint_col`)"
assert len(con.execute(expr))
def test_least(con, alltypes, translate):
expr = ibis.least(alltypes.int_col, 10)
assert translate(expr) == "least(`int_col`, 10)"
assert len(con.execute(expr))
expr = ibis.least(alltypes.int_col, alltypes.bigint_col)
assert translate(expr) == "least(`int_col`, `bigint_col`)"
assert len(con.execute(expr))
# TODO: clickhouse-driver escaping bug
@pytest.mark.parametrize(
('expr', 'expected'),
[
(L('abcd').re_search('[a-z]'), True),
(L('abcd').re_search(r'[\\d]+'), False),
(L('1222').re_search(r'[\\d]+'), True),
],
)
def test_regexp(con, expr, expected):
assert con.execute(expr) == expected
@pytest.mark.parametrize(
('expr', 'expected'),
[
(L('abcd').re_extract('([a-z]+)', 0), 'abcd'),
# (L('abcd').re_extract('(ab)(cd)', 1), 'cd'),
# valid group number but no match => empty string
(L('abcd').re_extract(r'(\\d)', 0), ''),
# match but not a valid group number => NULL
# (L('abcd').re_extract('abcd', 3), None),
],
)
def test_regexp_extract(con, expr, expected, translate):
assert con.execute(expr) == expected
def test_column_regexp_extract(con, alltypes, translate):
expected = r"extractAll(`string_col`, '[\d]+')[3 + 1]"
expr = alltypes.string_col.re_extract(r'[\d]+', 3)
assert translate(expr) == expected
assert len(con.execute(expr))
def test_column_regexp_replace(con, alltypes, translate):
expected = r"replaceRegexpAll(`string_col`, '[\d]+', 'aaa')"
expr = alltypes.string_col.re_replace(r'[\d]+', 'aaa')
assert translate(expr) == expected
assert len(con.execute(expr))
def test_numeric_builtins_work(con, alltypes, df, translate):
expr = alltypes.double_col
result = expr.execute()
expected = df.double_col.fillna(0)
tm.assert_series_equal(result, expected)
def test_null_column(alltypes, translate):
t = alltypes
nrows = t.count().execute()
expr = t.mutate(na_column=ibis.NA).na_column
result = expr.execute()
expected = pd.Series([None] * nrows, name='na_column')
tm.assert_series_equal(result, expected)
def test_literal_none_to_nullable_colum(alltypes):
# GH: 2985
t = alltypes
nrows = t.count().execute()
expr = t.mutate(
ibis.literal(None, dt.String(nullable=True)).name(
'nullable_string_column'
)
)
result = expr['nullable_string_column'].execute()
expected = pd.Series([None] * nrows, name='nullable_string_column')
tm.assert_series_equal(result, expected)
@pytest.mark.parametrize(
('attr', 'expected'),
[
(operator.methodcaller('year'), {2009, 2010}),
(operator.methodcaller('month'), set(range(1, 13))),
(operator.methodcaller('day'), set(range(1, 32))),
],
)
def test_date_extract_field(db, alltypes, attr, expected):
t = alltypes
expr = attr(t.timestamp_col.cast('date')).distinct()
result = expr.execute().astype(int)
assert set(result) == expected
def test_timestamp_from_integer(con, alltypes, translate):
# timestamp_col has datetime type
expr = alltypes.int_col.to_timestamp()
assert translate(expr) == 'toDateTime(`int_col`)'
assert len(con.execute(expr))
def test_count_distinct_with_filter(alltypes):
expr = alltypes.string_col.nunique(
where=alltypes.string_col.cast('int64') > 1
)
result = expr.execute()
expected = alltypes.string_col.execute()
expected = expected[expected.astype('int64') > 1].nunique()
assert result == expected
@pytest.mark.parametrize(
('sep', 'where_case', 'expected'),
[
(',', None, "arrayStringConcat(groupArray(`string_col`), ',')"),
('-', None, "arrayStringConcat(groupArray(`string_col`), '-')"),
pytest.param(
',',
0,
(
"arrayStringConcat(groupArray("
"CASE WHEN `bool_col` = 0 THEN "
"`string_col` ELSE Null END), ',')"
),
marks=pytest.mark.xfail(
reason=(
'`where` param needs `Nullable` column '
'but the all in testing data is not.'
'See also issue #2891'
)
),
),
],
)
def test_group_concat(alltypes, sep, where_case, expected, translate):
where = None if where_case is None else alltypes.bool_col == where_case
expr = alltypes.string_col.group_concat(sep, where)
assert translate(expr) == expected
|
|
# Copyright (c) 2014, James Hensman, Max Zwiessele
# Licensed under the BSD 3-clause license (see LICENSE.txt)
import numpy as np
from .parameterized import Parameterized
from .param import Param
class Remapping(Parameterized):
def mapping(self):
"""
The return value of this function gives the values which the re-mapped
parameters should take. Implement in sub-classes.
"""
raise NotImplementedError
def callback(self):
raise NotImplementedError
def __str__(self):
return self.name
def parameters_changed(self):
#ensure all out parameters have the correct value, as specified by our mapping
index = self._highest_parent_.constraints[self]
self._highest_parent_.param_array[index] = self.mapping()
[p.notify_observers(which=self) for p in self.tied_parameters]
class Fix(Remapping):
pass
class Tie(Parameterized):
"""
The new parameter tie framework. (under development)
All the parameters tied together get a new parameter inside the *Tie* object.
Its value should always be equal to all the tied parameters, and its gradient
is the sum of all the tied parameters.
=====Implementation Details=====
The *Tie* object should only exist on the top of param tree (the highest parent).
self.label_buf:
It uses a label buffer that has the same length as all the parameters (self._highest_parent_.param_array).
The buffer keeps track of all the tied parameters. All the tied parameters have a label (an interger) higher
than 0, and the parameters that have the same label are tied together.
self.buf_index:
An auxiliary index list for the global index of the tie parameter inside the *Tie* object.
================================
TODO:
* EVERYTHING
"""
def __init__(self, name='tie'):
super(Tie, self).__init__(name)
self.tied_param = None
# The buffer keeps track of tie status
self.label_buf = None
# The global indices of the 'tied' param
self.buf_idx = None
# A boolean array indicating non-tied parameters
self._tie_ = None
def getTieFlag(self, p=None):
if self.tied_param is None:
if self._tie_ is None or self._tie_.size != self._highest_parent_.param_array.size:
self._tie_ = np.ones((self._highest_parent_.param_array.size,),dtype=np.bool)
if p is not None:
return self._tie_[p._highest_parent_._raveled_index_for(p)]
return self._tie_
def _init_labelBuf(self):
if self.label_buf is None:
self.label_buf = np.zeros(self._highest_parent_.param_array.shape, dtype=np.int)
if self._tie_ is None or self._tie_.size != self._highest_parent_.param_array.size:
self._tie_ = np.ones((self._highest_parent_.param_array.size,),dtype=np.bool)
def _updateTieFlag(self):
if self._tie_.size != self.label_buf.size:
self._tie_ = np.ones((self._highest_parent_.param_array.size,),dtype=np.bool)
self._tie_[self.label_buf>0] = False
self._tie_[self.buf_idx] = True
def add_tied_parameter(self, p, p2=None):
"""
Tie the list of parameters p together (p2==None) or
Tie the list of parameters p with the list of parameters p2 (p2!=None)
"""
self._init_labelBuf()
if p2 is None:
idx = self._highest_parent_._raveled_index_for(p)
val = self._sync_val_group(idx)
if np.all(self.label_buf[idx]==0):
# None of p has been tied before.
tie_idx = self._expandTieParam(1)
print(tie_idx)
tie_id = self.label_buf.max()+1
self.label_buf[tie_idx] = tie_id
else:
b = self.label_buf[idx]
ids = np.unique(b[b>0])
tie_id, tie_idx = self._merge_tie_param(ids)
self._highest_parent_.param_array[tie_idx] = val
idx = self._highest_parent_._raveled_index_for(p)
self.label_buf[idx] = tie_id
else:
pass
self._updateTieFlag()
def _merge_tie_param(self, ids):
"""Merge the tie parameters with ids in the list."""
if len(ids)==1:
id_final_idx = self.buf_idx[self.label_buf[self.buf_idx]==ids[0]][0]
return ids[0],id_final_idx
id_final = ids[0]
ids_rm = ids[1:]
label_buf_param = self.label_buf[self.buf_idx]
idx_param = [np.where(label_buf_param==i)[0][0] for i in ids_rm]
self._removeTieParam(idx_param)
[np.put(self.label_buf, np.where(self.label_buf==i), id_final) for i in ids_rm]
id_final_idx = self.buf_idx[self.label_buf[self.buf_idx]==id_final][0]
return id_final, id_final_idx
def _sync_val_group(self, idx):
self._highest_parent_.param_array[idx] = self._highest_parent_.param_array[idx].mean()
return self._highest_parent_.param_array[idx][0]
def _expandTieParam(self, num):
"""Expand the tie param with the number of *num* parameters"""
if self.tied_param is None:
new_buf = np.empty((num,))
else:
new_buf = np.empty((self.tied_param.size+num,))
new_buf[:self.tied_param.size] = self.tied_param.param_array.copy()
self.remove_parameter(self.tied_param)
self.tied_param = Param('tied',new_buf)
self.add_parameter(self.tied_param)
buf_idx_new = self._highest_parent_._raveled_index_for(self.tied_param)
self._expand_label_buf(self.buf_idx, buf_idx_new)
self.buf_idx = buf_idx_new
return self.buf_idx[-num:]
def _removeTieParam(self, idx):
"""idx within tied_param"""
new_buf = np.empty((self.tied_param.size-len(idx),))
bool_list = np.ones((self.tied_param.size,),dtype=np.bool)
bool_list[idx] = False
new_buf[:] = self.tied_param.param_array[bool_list]
self.remove_parameter(self.tied_param)
self.tied_param = Param('tied',new_buf)
self.add_parameter(self.tied_param)
buf_idx_new = self._highest_parent_._raveled_index_for(self.tied_param)
self._shrink_label_buf(self.buf_idx, buf_idx_new, bool_list)
self.buf_idx = buf_idx_new
def _expand_label_buf(self, idx_old, idx_new):
"""Expand label buffer accordingly"""
if idx_old is None:
self.label_buf = np.zeros(self._highest_parent_.param_array.shape, dtype=np.int)
else:
bool_old = np.zeros((self.label_buf.size,),dtype=np.bool)
bool_old[idx_old] = True
bool_new = np.zeros((self._highest_parent_.param_array.size,),dtype=np.bool)
bool_new[idx_new] = True
label_buf_new = np.zeros(self._highest_parent_.param_array.shape, dtype=np.int)
label_buf_new[np.logical_not(bool_new)] = self.label_buf[np.logical_not(bool_old)]
label_buf_new[idx_new[:len(idx_old)]] = self.label_buf[idx_old]
self.label_buf = label_buf_new
def _shrink_label_buf(self, idx_old, idx_new, bool_list):
bool_old = np.zeros((self.label_buf.size,),dtype=np.bool)
bool_old[idx_old] = True
bool_new = np.zeros((self._highest_parent_.param_array.size,),dtype=np.bool)
bool_new[idx_new] = True
label_buf_new = np.empty(self._highest_parent_.param_array.shape, dtype=np.int)
label_buf_new[np.logical_not(bool_new)] = self.label_buf[np.logical_not(bool_old)]
label_buf_new[idx_new] = self.label_buf[idx_old[bool_list]]
self.label_buf = label_buf_new
def _check_change(self):
changed = False
if self.tied_param is not None:
for i in range(self.tied_param.size):
b0 = self.label_buf==self.label_buf[self.buf_idx[i]]
b = self._highest_parent_.param_array[b0]!=self.tied_param[i]
if b.sum()==0:
print('XXX')
continue
elif b.sum()==1:
print('!!!')
val = self._highest_parent_.param_array[b0][b][0]
self._highest_parent_.param_array[b0] = val
else:
print('@@@')
self._highest_parent_.param_array[b0] = self.tied_param[i]
changed = True
return changed
def parameters_changed(self):
#ensure all out parameters have the correct value, as specified by our mapping
changed = self._check_change()
if changed:
self._highest_parent_._trigger_params_changed()
self.collate_gradient()
def collate_gradient(self):
if self.tied_param is not None:
self.tied_param.gradient = 0.
[np.put(self.tied_param.gradient, i, self._highest_parent_.gradient[self.label_buf==self.label_buf[self.buf_idx[i]]].sum())
for i in range(self.tied_param.size)]
def propagate_val(self):
if self.tied_param is not None:
for i in range(self.tied_param.size):
self._highest_parent_.param_array[self.label_buf==self.label_buf[self.buf_idx[i]]] = self.tied_param[i]
|
|
#
"""
Copyright (c) 2016 World Wide Technology, Inc.
All rights reserved.
Revision history:
28 March 2016 | 1.0 - initial release
29 March 2016 | 1.1 - comments and style modifications
30 March 2016 | 1.2 - documentation update
31 March 2016 | 1.3 - password 'data type' should be password, not string
reformatted debug output
14 June 2016 | 2.0 - cyber5 branch, new F5 icontrol_install_config module
module: F5_connector.py
author: Joel W. King, World Wide Technology
short_description: This Phantom app supports containment actions like 'block ip' or 'unblock ip' on an F5 BIG-IP appliance.
remarks: The appdev tutorial is at https://<phantom IP>/docs/appdev/tutorial
ssh phantom@<phantom IP>
export PYTHONPATH=/opt/phantom/lib/:/opt/phantom/www/
export REQUESTS_CA_BUNDLE=/opt/phantom/etc/cacerts.pem
cd ./app_dev/f5_firewall
touch __init__.py
../compile_app.py -i
python2.7 ./F5_connector.py ./test_jsons/test.json
"""
#
# Phantom App imports
#
import phantom.app as phantom
from phantom.base_connector import BaseConnector
from phantom.action_result import ActionResult
#
# system imports
#
import simplejson as json
import time
import httplib
#
# application imports
#
import icontrol_install_config as iControl # https://github.com/joelwking/ansible-f5/blob/master/icontrol_install_config.py
try:
from F5_connector_consts import * # file name would be ./F5_connector_consts.py
except ImportError:
pass # this is an optional file, used to bring in constants
# ========================================================
# AppConnector
# ========================================================
class F5_Connector(BaseConnector):
" "
BANNER = "F5"
HEADER = {"Content-Type": "application/json"}
def initialize(self):
"""
This is an optional function that can be implemented by the AppConnector derived class. Since the configuration
dictionary is already validated by the time this function is called, it's a good place to do any extra initialization
of any internal modules. This function MUST return a value of either phantom.APP_SUCCESS or phantom.APP_ERROR.
If this function returns phantom.APP_ERROR, then AppConnector::handle_action will not get called.
"""
self.debug_print("%s INITIALIZE %s" % (F5_Connector.BANNER, time.asctime()))
return phantom.APP_SUCCESS
def finalize(self):
"""
This function gets called once all the param dictionary elements are looped over and no more handle_action calls
are left to be made. It gives the AppConnector a chance to loop through all the results that were accumulated by
multiple handle_action function calls and create any summary if required. Another usage is cleanup, disconnect
from remote devices etc.
"""
self.debug_print("%s FINALIZE" % F5_Connector.BANNER)
return
def handle_exception(self, exception_object):
"""
All the code within BaseConnector::_handle_action is within a 'try: except:' clause. Thus if an exception occurs
during the execution of this code it is caught at a single place. The resulting exception object is passed to the
AppConnector::handle_exception() to do any cleanup of it's own if required. This exception is then added to the
connector run result and passed back to spawn, which gets displayed in the Phantom UI.
"""
self.debug_print("%s HANDLE_EXCEPTION %s" % (F5_Connector.BANNER, exception_object))
return
def _test_connectivity(self, param):
"""
Called when the user depresses the test connectivity button on the Phantom UI.
Use a basic query to determine if the IP address, username and password is correct,
curl -k -u admin:redacted -X GET https://192.0.2.1/mgmt/tm/ltm/
"""
self.debug_print("%s TEST_CONNECTIVITY %s" % (F5_Connector.BANNER, param))
config = self.get_config()
host = config.get("device")
F5 = iControl.BIG_IP(host=host,
username=config.get("username"),
password=config.get("password"),
uri="/mgmt/tm/sys/software/image",
method="GET")
msg = "test connectivity to %s status_code: " % host
if F5.genericGET():
# True is success
return self.set_status_save_progress(phantom.APP_SUCCESS, msg + "%s %s" % (F5.status_code, httplib.responses[F5.status_code]))
else:
# None or False, is a failure based on incorrect IP address, username, passords
return self.set_status_save_progress(phantom.APP_ERROR, msg + "%s %s" % (F5.status_code, F5.response))
def handle_action(self, param):
"""
This function implements the main functionality of the AppConnector. It gets called for every param dictionary element
in the parameters array. In it's simplest form it gets the current action identifier and then calls a member function
of it's own to handle the action. This function is expected to create the results of the action run that get added
to the connector run. The return value of this function is mostly ignored by the BaseConnector. Instead it will
just loop over the next param element in the parameters array and call handle_action again.
We create a case structure in Python to allow for any number of actions to be easily added.
"""
action_id = self.get_action_identifier() # action_id determines what function to execute
self.debug_print("%s HANDLE_ACTION action_id:%s parameters:\n%s" % (F5_Connector.BANNER, action_id, param))
supported_actions = {"test connectivity": self._test_connectivity,
"block ip": self.block_ip,
"unblock ip": self.unblock_ip}
run_action = supported_actions[action_id]
return run_action(param)
def unblock_ip(self, param):
"""
Allow the IP address by deleting the rule which originally blocked the source IP address.
URL https://10.255.111.100/mgmt/tm/security/firewall/policy/~Common~Phantom_Inbound/rules/sourceIP_8.8.8.8
"""
config = self.get_config()
self.debug_print("%s UNBLOCK_IP parameters:\n%s \nconfig:%s" % (F5_Connector.BANNER, param, config))
action_result = ActionResult(dict(param)) # Add an action result to the App Run
self.add_action_result(action_result)
URL = "/mgmt/tm/security/firewall/policy/~%s~%s/rules/%s" % (param["partition"], param["policy"], param["rule name"])
self.debug_print("%s UNBLOCK_IP URL: %s" % (F5_Connector.BANNER, URL))
F5 = iControl.BIG_IP(host=config.get("device"),
username=config.get("username"),
password=config.get("password"),
uri=URL,
method="DELETE")
if F5.genericDELETE():
action_result.set_status(phantom.APP_SUCCESS)
else:
action_result.set_status(phantom.APP_ERROR)
action_result.add_data(F5.response)
self.debug_print("%s UNBLOCK_IP code: %s \nresponse: %s" % (F5_Connector.BANNER, F5.status_code, F5.response))
return
def block_ip(self, param):
"""
Block a source IP address, a simple call to update a security policy in place.
The firewall policy is called "Phantom_Inbound" which currently is tied to an inbound VIP in the "Common" partition.
POST
URL https://10.255.111.100/mgmt/tm/security/firewall/policy/~Common~Phantom_Inbound/rules
body {"name":"DYNAMIC_BLOCK_NAME","action":"reject","place-after":"first","source":{"addresses":[{"name": "8.8.8.8/32"}"
"""
config = self.get_config()
self.debug_print("%s BLOCK_IP parameters:\n%s \nconfig:%s" % (F5_Connector.BANNER, param, config))
action_result = ActionResult(dict(param)) # Add an action result to the App Run
self.add_action_result(action_result)
URL = "/mgmt/tm/security/firewall/policy/~%s~%s/rules" % (param["partition"], param["policy"])
body = '{"name":"%s","action":"%s","place-after":"first","source":{"addresses":[{"name":"%s/32"}]}}' \
% (param["rule name"], param["action"], param["source"])
self.debug_print("%s BLOCK_IP URL: %s \nbody:%s" % (F5_Connector.BANNER, URL, body))
F5 = iControl.BIG_IP(host=config.get("device"),
username=config.get("username"),
password=config.get("password"),
uri=URL,
method="POST")
if F5.genericPOST(body):
action_result.set_status(phantom.APP_SUCCESS)
else:
action_result.set_status(phantom.APP_ERROR)
action_result.add_data(F5.response)
self.debug_print("%s BLOCK_IP code: %s \nresponse: %s" % (F5_Connector.BANNER, F5.status_code, F5.response))
return
# ==========================================================================================
# Logic for testing interactively e.g. python2.7 ./F5_connector.py ./test_jsons/reject.json
# ==========================================================================================
if __name__ == '__main__':
import sys
# import pudb # executes a runtime breakpoint and brings up the pudb debugger.
# pudb.set_trace()
if (len(sys.argv) < 2):
print "No test json specified as input"
exit(0)
with open(sys.argv[1]) as f: # input a json file that contains data like the configuration and action parameters,
in_json = f.read()
in_json = json.loads(in_json)
print ("%s %s" % (sys.argv[1], json.dumps(in_json, indent=4)))
connector = F5_Connector()
connector.print_progress_message = True
ret_val = connector._handle_action(json.dumps(in_json), None)
print ("%s %s" % (connector.BANNER, json.dumps(json.loads(ret_val), indent=4)))
exit(0)
|
|
# stdlib
import json
from typing import Dict
from typing import List as TypeList
from typing import Optional
# third party
from google.protobuf.reflection import GeneratedProtocolMessageType
from typing_extensions import final
# relative
from ...... import serialize
from ......proto.grid.messages.setup_messages_pb2 import (
CreateInitialSetUpMessage as CreateInitialSetUpMessage_PB,
)
from ......proto.grid.messages.setup_messages_pb2 import (
GetSetUpMessage as GetSetUpMessage_PB,
)
from ......proto.grid.messages.setup_messages_pb2 import (
GetSetUpResponse as GetSetUpResponse_PB,
)
from ......proto.grid.messages.setup_messages_pb2 import (
UpdateSetupMessage as UpdateSetupMessage_PB,
)
from ......proto.grid.messages.setup_messages_pb2 import (
UpdateSetupResponse as UpdateSetupResponse_PB,
)
from .....common.message import ImmediateSyftMessageWithReply
from .....common.message import ImmediateSyftMessageWithoutReply
from .....common.serde.deserialize import _deserialize
from .....common.serde.serializable import serializable
from .....common.uid import UID
from .....io.address import Address
@serializable()
@final
class GetSetUpMessage(ImmediateSyftMessageWithReply):
def __init__(
self,
address: Address,
reply_to: Address,
msg_id: Optional[UID] = None,
):
super().__init__(address=address, msg_id=msg_id, reply_to=reply_to)
def _object2proto(self) -> GetSetUpMessage_PB:
"""Returns a protobuf serialization of self.
As a requirement of all objects which inherit from Serializable,
this method transforms the current object into the corresponding
Protobuf object so that it can be further serialized.
:return: returns a protobuf object
:rtype: GetSetUpMessage_PB
.. note::
This method is purely an internal method. Please use serialize(object) or one of
the other public serialization methods if you wish to serialize an
object.
"""
return GetSetUpMessage_PB(
msg_id=serialize(self.id),
address=serialize(self.address),
reply_to=serialize(self.reply_to),
)
@staticmethod
def _proto2object(
proto: GetSetUpMessage_PB,
) -> "GetSetUpMessage":
"""Creates a GetSetUpMessage from a protobuf
As a requirement of all objects which inherit from Serializable,
this method transforms a protobuf object into an instance of this class.
:return: returns an instance of SignalingOfferMessage
:rtype: GetSetUpMessage
.. note::
This method is purely an internal method. Please use syft.deserialize()
if you wish to deserialize an object.
"""
return GetSetUpMessage(
msg_id=_deserialize(blob=proto.msg_id),
address=_deserialize(blob=proto.address),
reply_to=_deserialize(blob=proto.reply_to),
)
@staticmethod
def get_protobuf_schema() -> GeneratedProtocolMessageType:
"""Return the type of protobuf object which stores a class of this type
As a part of serialization and deserialization, we need the ability to
lookup the protobuf object type directly from the object type. This
static method allows us to do this.
Importantly, this method is also used to create the reverse lookup ability within
the metaclass of Serializable. In the metaclass, it calls this method and then
it takes whatever type is returned from this method and adds an attribute to it
with the type of this class attached to it. See the MetaSerializable class for
details.
:return: the type of protobuf object which corresponds to this class.
:rtype: GeneratedProtocolMessageType
"""
return GetSetUpMessage_PB
@serializable()
@final
class GetSetUpResponse(ImmediateSyftMessageWithoutReply):
def __init__(
self,
address: Address,
content: Dict,
msg_id: Optional[UID] = None,
):
super().__init__(address=address, msg_id=msg_id)
self.content = content
def _object2proto(self) -> GetSetUpResponse_PB:
"""Returns a protobuf serialization of self.
As a requirement of all objects which inherit from Serializable,
this method transforms the current object into the corresponding
Protobuf object so that it can be further serialized.
:return: returns a protobuf object
:rtype: SignalingOfferMessage_PB
.. note::
This method is purely an internal method. Please use serialize(object) or one of
the other public serialization methods if you wish to serialize an
object.
"""
return GetSetUpResponse_PB(
msg_id=serialize(self.id),
address=serialize(self.address),
content=json.dumps(self.content),
)
@staticmethod
def _proto2object(
proto: GetSetUpResponse_PB,
) -> "GetSetUpResponse":
"""Creates a SignalingOfferMessage from a protobuf
As a requirement of all objects which inherit from Serializable,
this method transforms a protobuf object into an instance of this class.
:return: returns an instance of SignalingOfferMessage
:rtype: SignalingOfferMessage
.. note::
This method is purely an internal method. Please use syft.deserialize()
if you wish to deserialize an object.
"""
return GetSetUpResponse(
msg_id=_deserialize(blob=proto.msg_id),
address=_deserialize(blob=proto.address),
content=json.loads(proto.content),
)
@staticmethod
def get_protobuf_schema() -> GeneratedProtocolMessageType:
"""Return the type of protobuf object which stores a class of this type
As a part of serialization and deserialization, we need the ability to
lookup the protobuf object type directly from the object type. This
static method allows us to do this.
Importantly, this method is also used to create the reverse lookup ability within
the metaclass of Serializable. In the metaclass, it calls this method and then
it takes whatever type is returned from this method and adds an attribute to it
with the type of this class attached to it. See the MetaSerializable class for
details.
:return: the type of protobuf object which corresponds to this class.
:rtype: GeneratedProtocolMessageType
"""
return GetSetUpResponse_PB
@serializable()
@final
class CreateInitialSetUpMessage(ImmediateSyftMessageWithReply):
def __init__(
self,
address: Address,
name: str,
email: str,
password: str,
domain_name: str,
budget: float,
reply_to: Address,
msg_id: Optional[UID] = None,
):
super().__init__(address=address, msg_id=msg_id, reply_to=reply_to)
self.name = name
self.email = email
self.password = password
self.domain_name = domain_name
self.budget = budget
def _object2proto(self) -> CreateInitialSetUpMessage_PB:
"""Returns a protobuf serialization of self.
As a requirement of all objects which inherit from Serializable,
this method transforms the current object into the corresponding
Protobuf object so that it can be further serialized.
:return: returns a protobuf object
:rtype: CreateInitialSetUpMessage_PB
.. note::
This method is purely an internal method. Please use serialize(object) or one of
the other public serialization methods if you wish to serialize an
object.
"""
return CreateInitialSetUpMessage_PB(
msg_id=serialize(self.id),
address=serialize(self.address),
name=self.name,
email=self.email,
password=self.password,
domain_name=self.domain_name,
budget=self.budget,
reply_to=serialize(self.reply_to),
)
@staticmethod
def _proto2object(
proto: CreateInitialSetUpMessage_PB,
) -> "CreateInitialSetUpMessage":
"""Creates a CreateInitialSetUpMessage from a protobuf
As a requirement of all objects which inherit from Serializable,
this method transforms a protobuf object into an instance of this class.
:return: returns an instance of SignalingOfferMessage
:rtype: CreateInitialSetUpMessage
.. note::
This method is purely an internal method. Please use syft.deserialize()
if you wish to deserialize an object.
"""
return CreateInitialSetUpMessage(
msg_id=_deserialize(blob=proto.msg_id),
address=_deserialize(blob=proto.address),
name=proto.name,
email=proto.email,
password=proto.password,
budget=proto.budget,
domain_name=proto.domain_name,
reply_to=_deserialize(blob=proto.reply_to),
)
@staticmethod
def get_protobuf_schema() -> GeneratedProtocolMessageType:
"""Return the type of protobuf object which stores a class of this type
As a part of serialization and deserialization, we need the ability to
lookup the protobuf object type directly from the object type. This
static method allows us to do this.
Importantly, this method is also used to create the reverse lookup ability within
the metaclass of Serializable. In the metaclass, it calls this method and then
it takes whatever type is returned from this method and adds an attribute to it
with the type of this class attached to it. See the MetaSerializable class for
details.
:return: the type of protobuf object which corresponds to this class.
:rtype: GeneratedProtocolMessageType
"""
return CreateInitialSetUpMessage_PB
@serializable()
@final
class UpdateSetupMessage(ImmediateSyftMessageWithReply):
def __init__(
self,
address: Address,
domain_name: str,
description: str,
daa: bool,
contact: str,
reply_to: Address,
daa_document: Optional[bytes] = b"",
tags: Optional[TypeList] = None,
msg_id: Optional[UID] = None,
):
super().__init__(address=address, msg_id=msg_id, reply_to=reply_to)
self.daa = daa
self.contact = contact
self.description = description
self.domain_name = domain_name
self.daa_document = daa_document
self.tags = tags if tags is not None else []
def _object2proto(self) -> UpdateSetupMessage_PB:
"""Returns a protobuf serialization of self.
As a requirement of all objects which inherit from Serializable,
this method transforms the current object into the corresponding
Protobuf object so that it can be further serialized.
:return: returns a protobuf object
:rtype: UpdateSetupMessage_PB
.. note::
This method is purely an internal method. Please use serialize(object) or one of
the other public serialization methods if you wish to serialize an
object.
"""
return UpdateSetupMessage_PB(
msg_id=serialize(self.id),
address=serialize(self.address),
domain_name=self.domain_name,
contact=self.contact,
daa=self.daa,
description=self.description,
daa_document=self.daa_document,
tags=self.tags,
reply_to=serialize(self.reply_to),
)
@staticmethod
def _proto2object(
proto: UpdateSetupMessage_PB,
) -> "UpdateSetupMessage":
"""Creates a UpdateSetupMessage from a protobuf
As a requirement of all objects which inherit from Serializable,
this method transforms a protobuf object into an instance of this class.
:return: returns an instance of SignalingOfferMessage
:rtype: UpdateSetupMessage
.. note::
This method is purely an internal method. Please use syft.deserialize()
if you wish to deserialize an object.
"""
return UpdateSetupMessage(
msg_id=_deserialize(blob=proto.msg_id),
address=_deserialize(blob=proto.address),
daa=proto.daa,
contact=proto.contact,
domain_name=proto.domain_name,
description=proto.description,
daa_document=proto.daa_document,
tags=[tag for tag in proto.tags],
reply_to=_deserialize(blob=proto.reply_to),
)
@staticmethod
def get_protobuf_schema() -> GeneratedProtocolMessageType:
"""Return the type of protobuf object which stores a class of this type
As a part of serialization and deserialization, we need the ability to
lookup the protobuf object type directly from the object type. This
static method allows us to do this.
Importantly, this method is also used to create the reverse lookup ability within
the metaclass of Serializable. In the metaclass, it calls this method and then
it takes whatever type is returned from this method and adds an attribute to it
with the type of this class attached to it. See the MetaSerializable class for
details.
:return: the type of protobuf object which corresponds to this class.
:rtype: GeneratedProtocolMessageType
"""
return UpdateSetupMessage_PB
@serializable()
@final
class UpdateSetupResponse(ImmediateSyftMessageWithoutReply):
def __init__(
self,
address: Address,
content: Dict,
msg_id: Optional[UID] = None,
):
super().__init__(address=address, msg_id=msg_id)
self.content = content
def _object2proto(self) -> UpdateSetupResponse_PB:
"""Returns a protobuf serialization of self.
As a requirement of all objects which inherit from Serializable,
this method transforms the current object into the corresponding
Protobuf object so that it can be further serialized.
:return: returns a protobuf object
:rtype: SignalingOfferMessage_PB
.. note::
This method is purely an internal method. Please use serialize(object) or one of
the other public serialization methods if you wish to serialize an
object.
"""
return UpdateSetupResponse_PB(
msg_id=serialize(self.id),
address=serialize(self.address),
content=json.dumps(self.content),
)
@staticmethod
def _proto2object(
proto: UpdateSetupResponse_PB,
) -> "UpdateSetupResponse":
"""Creates a SignalingOfferMessage from a protobuf
As a requirement of all objects which inherit from Serializable,
this method transforms a protobuf object into an instance of this class.
:return: returns an instance of SignalingOfferMessage
:rtype: SignalingOfferMessage
.. note::
This method is purely an internal method. Please use syft.deserialize()
if you wish to deserialize an object.
"""
return UpdateSetupResponse(
msg_id=_deserialize(blob=proto.msg_id),
address=_deserialize(blob=proto.address),
content=json.loads(proto.content),
)
@staticmethod
def get_protobuf_schema() -> GeneratedProtocolMessageType:
"""Return the type of protobuf object which stores a class of this type
As a part of serialization and deserialization, we need the ability to
lookup the protobuf object type directly from the object type. This
static method allows us to do this.
Importantly, this method is also used to create the reverse lookup ability within
the metaclass of Serializable. In the metaclass, it calls this method and then
it takes whatever type is returned from this method and adds an attribute to it
with the type of this class attached to it. See the MetaSerializable class for
details.
:return: the type of protobuf object which corresponds to this class.
:rtype: GeneratedProtocolMessageType
"""
return UpdateSetupResponse_PB
|
|
# -*- coding: utf-8 -*-
# <nbformat>3.0</nbformat>
# <markdowncell>
# BroBeur Studios Blog
#
# This is similar to the artcontrolme script but it deals with BroBeur Studios posts.
# It takes a folder of .meta and .wp/md/html and mashes it together as a json object.
# It also outputs a html file with a random post, and all posts.
# <markdowncell>
# TODO
#
# deal with images.
# need bigger hard drive on local dev system (currently pi running debian)
#
# combine brobeur, artcontrolme, and freshfigure.me/art into one site. Do this by merging all posts and images.
#
# <codecell>
import dominate
from dominate.tags import *
import random
import os
import json
# <codecell>
# <codecell>
bropo = ('/home/wcmckee/brobeur-web/posts')
# <codecell>
os.chdir(bropo)
# <codecell>
wplis = []
# <codecell>
for fil in os.listdir(bropo):
print fil
wplis.append(fil)
# <codecell>
metlis = []
# <codecell>
for wp in wplis:
if '.meta' in wp:
metlis.append(wp)
# <codecell>
alldic = []
# <codecell>
for mea in metlis:
file = open(mea, 'r')
metaf = file.readlines()
chzdir = ({'name': metaf[0].rstrip()})#, file.readline()}
chzdir.update({'title': metaf[1].rstrip()})
chzdir.update({'date': metaf[2].rstrip()})
chzdir.update({'tags': metaf[3].rstrip()})
print chzdir
alldic.append(chzdir)
# <codecell>
alldic
# <codecell>
dicjsn = json.dumps(alldic)
# <codecell>
savdjsn = open('/home/wcmckee/visignsys/brobeur.json', 'w')
savdjsn.write(dicjsn)
savdjsn.close()
# <codecell>
for cha in chzdir:
print cha
# <codecell>
chzdir
# <codecell>
for al in alldic:
print al
# <codecell>
tiran = len(alldic)
# <codecell>
ranit = random.randint(0, tiran)
# <codecell>
ranit
# <codecell>
#from random import shuffle
#x = [[i] for i in alldic:
# random.shuffle(x)
# <codecell>
from random import shuffle
# <codecell>
shuffle(alldic)
# <codecell>
alldic
# <codecell>
betjsn = json.dumps(alldic)
# <codecell>
betjsn
# <codecell>
randiz = alldic[ranit]
# <codecell>
randiz
# <codecell>
razdiz = randiz.values()
# <codecell>
razdiz
# <codecell>
randaz = randiz.values()
# <codecell>
for itez in randaz:
print itez + '.wp'
str2 = itez.replace("\n", "")
tafilz = str2 + '.wp'
# <codecell>
tafilz
# <codecell>
bldat = open((tafilz), 'r')
# <codecell>
blogread = bldat.read()
# <codecell>
blogwra = blogread.upper()
# <codecell>
blogstr = str(blogwra)
# <codecell>
blogstr
# <codecell>
blogread
# <codecell>
ixran = os.urandom(128).encode('hex')
# <codecell>
randiz
# <codecell>
randiz.update({'text': blogread})
# <codecell>
randiz['title']
# <codecell>
lendiz = len(randiz)
# <codecell>
def bugsearch():
for bugz in range(lendiz):
return (randiz[bugz])
# <codecell>
for ranv in randiz.values():
print ranv
# <codecell>
from bs4 import BeautifulSoup
soup = BeautifulSoup(blogread)
print(soup.prettify())
# <codecell>
soup.findAll('html')
# <codecell>
doc = dominate.document(title='BroBeur Blog Post')
with doc.head:
link(rel='stylesheet', href='style.css')
script(type='text/javascript', src='script.js')
with doc:
with div(id='header').add(ol()):
for i in alldic[1]:
li(a(i.title(), href='/%s.html' % i))
with div():
attr(cls='body')
p(razdiz)
h1(randaz)
p(blogread)
#print doc
# <codecell>
doc()
# <codecell>
docre = doc.
# <codecell>
docre
# <codecell>
brobeurblogpz = ('/home/wcmckee/brobeur-blog-post')
# <codecell>
os.chdir(brobeurblogpz)
# <codecell>
wriind = open('index.html', 'w')
# <codecell>
jsnd = open('index.json', 'w')
jsnd.write(str(betjsn))
# <codecell>
jsnd.close()
# <codecell>
yourstring = docre.encode('ascii', 'ignore').decode('ascii')
# <codecell>
wriind.write(yourstring)
# <codecell>
wriind.close()
# <codecell>
# <codecell>
|
|
from __future__ import absolute_import
from mock import Mock, patch
import responses
from django.http import HttpRequest
from sentry.identity.vsts.provider import VSTSOAuth2CallbackView, VSTSIdentityProvider
from sentry.integrations.vsts.integration import AccountConfigView, AccountForm
from sentry.testutils import TestCase
from six.moves.urllib.parse import parse_qs
from sentry.utils.http import absolute_uri
from sentry.models import Identity, IdentityProvider
from time import time
class TestVSTSOAuthCallbackView(TestCase):
@responses.activate
def test_exchange_token(self):
def redirect_url():
return "https://app.vssps.visualstudio.com/oauth2/authorize"
view = VSTSOAuth2CallbackView(
access_token_url="https://app.vssps.visualstudio.com/oauth2/token",
client_id="vsts-client-id",
client_secret="vsts-client-secret",
)
request = Mock()
pipeline = Mock()
pipeline.redirect_url = redirect_url
responses.add(
responses.POST,
"https://app.vssps.visualstudio.com/oauth2/token",
json={
"access_token": "xxxxxxxxx",
"token_type": "jwt-bearer",
"expires_in": "3599",
"refresh_token": "zzzzzzzzzz",
},
)
result = view.exchange_token(request, pipeline, "oauth-code")
mock_request = responses.calls[0].request
req_params = parse_qs(mock_request.body)
assert req_params["grant_type"] == ["urn:ietf:params:oauth:grant-type:jwt-bearer"]
assert req_params["assertion"] == ["oauth-code"]
assert req_params["redirect_uri"] == ["https://app.vssps.visualstudio.com/oauth2/authorize"]
assert req_params["client_assertion_type"] == [
"urn:ietf:params:oauth:client-assertion-type:jwt-bearer"
]
assert req_params["client_assertion"] == ["vsts-client-secret"]
assert result["access_token"] == "xxxxxxxxx"
assert result["token_type"] == "jwt-bearer"
assert result["expires_in"] == "3599"
assert result["refresh_token"] == "zzzzzzzzzz"
class TestAccountConfigView(TestCase):
def setUp(self):
responses.reset()
account_id = "1234567-8910"
self.base_url = "http://sentry2.visualstudio.com/"
self.accounts = [
{
"accountId": "1234567-89",
"NamespaceId": "00000000-0000-0000-0000-000000000000",
"accountName": "sentry",
"OrganizationName": None,
"AccountType": 0,
"AccountOwner": "00000000-0000-0000-0000-000000000000",
"CreatedBy": "00000000-0000-0000-0000-000000000000",
"CreatedDate": "0001-01-01T00:00:00",
"AccountStatus": 0,
"StatusReason": None,
"LastUpdatedBy": "00000000-0000-0000-0000-000000000000",
"Properties": {},
},
{
"accountId": account_id,
"NamespaceId": "00000000-0000-0000-0000-000000000000",
"accountName": "sentry2",
"OrganizationName": None,
"AccountType": 0,
"AccountOwner": "00000000-0000-0000-0000-000000000000",
"CreatedBy": "00000000-0000-0000-0000-000000000000",
"CreatedDate": "0001-01-01T00:00:00",
"AccountStatus": 0,
"StatusReason": None,
"LastUpdatedBy": "00000000-0000-0000-0000-000000000000",
"Properties": {},
},
]
responses.add(
responses.GET,
"https://app.vssps.visualstudio.com/_apis/accounts",
json={"value": self.accounts, "count": len(self.accounts)},
status=200,
)
responses.add(
responses.GET,
"https://app.vssps.visualstudio.com/_apis/resourceareas/79134C72-4A58-4B42-976C-04E7115F32BF?hostId=%s&api-preview=5.0-preview.1"
% account_id,
json={"locationUrl": self.base_url},
)
@responses.activate
def test_dispatch(self):
view = AccountConfigView()
request = HttpRequest()
request.POST = {"account": "1234567-8910"}
pipeline = Mock()
pipeline.state = {
"accounts": self.accounts,
"identity": {"data": {"access_token": "123456789"}},
}
pipeline.fetch_state = lambda key: pipeline.state[key]
pipeline.bind_state = lambda name, value: pipeline.state.update({name: value})
view.dispatch(request, pipeline)
assert pipeline.fetch_state(key="account") == self.accounts[1]
assert pipeline.next_step.call_count == 1
@responses.activate
def test_get_accounts(self):
view = AccountConfigView()
accounts = view.get_accounts("access-token", "user-id")
assert accounts["value"][0]["accountName"] == "sentry"
assert accounts["value"][1]["accountName"] == "sentry2"
def test_account_form(self):
account_form = AccountForm(self.accounts)
assert account_form.fields["account"].choices == [
("1234567-89", "sentry"),
("1234567-8910", "sentry2"),
]
@responses.activate
@patch("sentry.integrations.vsts.integration.get_user_info")
@patch("sentry.integrations.vsts.integration.render_to_response")
def test_no_accounts_recieved(self, mock_render_to_response, mock_get_user_info):
responses.reset()
responses.add(
responses.GET,
"https://app.vssps.visualstudio.com/_apis/accounts",
json={"value": [], "count": 0},
status=200,
)
view = AccountConfigView()
request = Mock()
request.POST = {}
request.user = self.user
pipeline = Mock()
pipeline.fetch_state = lambda key: {"data": {"access_token": "1234567890"}}
pipeline.organization = self.organization
view.dispatch(request, pipeline)
assert mock_get_user_info.called is True
assert mock_render_to_response.called is True
assert mock_render_to_response.call_args[1]["context"] == {"no_accounts": True}
class VstsIdentityProviderTest(TestCase):
def setUp(self):
self.identity_provider_model = IdentityProvider.objects.create(type="vsts")
self.identity = Identity.objects.create(
idp=self.identity_provider_model,
user=self.user,
external_id="vsts_id",
data={
"access_token": "123456789",
"token_type": "token_type",
"expires": 12345678,
"refresh_token": "n354678",
},
)
self.provider = VSTSIdentityProvider()
self.client_secret = "12345678"
self.provider.get_oauth_client_secret = lambda: self.client_secret
def get_refresh_token_params(self):
refresh_token = "wertyui"
params = self.provider.get_refresh_token_params(refresh_token)
assert params == {
"client_assertion_type": "urn:ietf:params:oauth:client-assertion-type:jwt-bearer",
"client_assertion": self.client_secret,
"grant_type": "refresh_token",
"assertion": refresh_token,
"redirect_uri": absolute_uri(self.provider.oauth_redirect_url),
}
@responses.activate
def test_refresh_identity(self):
refresh_data = {
"access_token": "access token for this user",
"token_type": "type of token",
"expires": 1234567,
"refresh_token": "new refresh token to use when the token has timed out",
}
responses.add(
responses.POST, "https://app.vssps.visualstudio.com/oauth2/token", json=refresh_data
)
self.provider.refresh_identity(self.identity, redirect_url="redirect_url")
assert len(responses.calls) == 1
new_identity = Identity.objects.get(id=self.identity.id)
assert new_identity.data["access_token"] == refresh_data["access_token"]
assert new_identity.data["token_type"] == refresh_data["token_type"]
assert new_identity.data["expires"] <= int(time())
|
|
#!/usr/bin/env python
# Copyright (c) 2012 Cloudera, Inc. All rights reserved.
import os
from tests.beeswax.impala_beeswax import ImpalaBeeswaxException
from tests.common.test_vector import *
from tests.common.impala_test_suite import *
from tests.common.impala_cluster import ImpalaCluster
from tests.common.skip import *
from tests.util.filesystem_utils import get_fs_path
from subprocess import call
class TestUdfs(ImpalaTestSuite):
@classmethod
def get_workload(cls):
return 'functional-query'
@classmethod
def add_test_dimensions(cls):
super(TestUdfs, cls).add_test_dimensions()
# Without limiting the test suite to a single exec option, the tests will fail
# because the same test case may be executed in parallel with different exec option
# values leading to conflicting DDL ops.
cls.TestMatrix.add_dimension(create_single_exec_option_dimension())
# There is no reason to run these tests using all dimensions.
cls.TestMatrix.add_dimension(create_uncompressed_text_dimension(cls.get_workload()))
def test_native_functions(self, vector):
database = 'native_function_test'
self.__load_functions(
self.create_udfs_template, vector, database,
get_fs_path('/test-warehouse/libTestUdfs.so'))
self.__load_functions(
self.create_udas_template, vector, database,
get_fs_path('/test-warehouse/libudasample.so'))
self.run_test_case('QueryTest/udf', vector, use_db=database)
if not IS_S3: # S3 doesn't support INSERT
self.run_test_case('QueryTest/udf-init-close', vector, use_db=database)
self.run_test_case('QueryTest/uda', vector, use_db=database)
def test_ir_functions(self, vector):
database = 'ir_function_test'
self.__load_functions(
self.create_udfs_template, vector, database,
get_fs_path('/test-warehouse/test-udfs.ll'))
self.run_test_case('QueryTest/udf', vector, use_db=database)
if not IS_S3: # S3 doesn't support INSERT
self.run_test_case('QueryTest/udf-init-close', vector, use_db=database)
def test_udf_errors(self, vector):
self.run_test_case('QueryTest/udf-errors', vector)
def test_udf_invalid_symbol(self, vector):
""" IMPALA-1642: Impala crashes if the symbol for a Hive UDF doesn't exist
Crashing is non-deterministic so we run the UDF several times."""
drop_fn_stmt = "drop function if exists default.fn_invalid_symbol(STRING)"
create_fn_stmt = ("create function default.fn_invalid_symbol(STRING) returns "
"STRING LOCATION '%s' SYMBOL='not.a.Symbol'" %
get_fs_path('/test-warehouse/impala-hive-udfs.jar'))
query = "select default.fn_invalid_symbol('test')"
self.client.execute(drop_fn_stmt)
try:
self.client.execute(create_fn_stmt)
for _ in xrange(5):
ex = self.execute_query_expect_failure(self.client, query)
assert "Unable to find class" in str(ex)
finally:
self.client.execute(drop_fn_stmt)
def test_hive_udfs(self, vector):
#self.client.execute('create database if not exists udf_test')
#self.client.execute('create database if not exists uda_test')
self.run_test_case('QueryTest/load-hive-udfs', vector)
self.run_test_case('QueryTest/hive-udf', vector)
def test_libs_with_same_filenames(self, vector):
self.run_test_case('QueryTest/libs_with_same_filenames', vector)
def test_udf_update_via_drop(self, vector):
"""Test updating the UDF binary without restarting Impala. Dropping
the function should remove the binary from the local cache."""
# Run with sync_ddl to guarantee the drop is processed by all impalads.
exec_options = vector.get_value('exec_option')
exec_options['sync_ddl'] = 1
old_udf = os.path.join(os.environ['IMPALA_HOME'],
'testdata/udfs/impala-hive-udfs.jar')
new_udf = os.path.join(os.environ['IMPALA_HOME'],
'tests/test-hive-udfs/target/test-hive-udfs-1.0.jar')
udf_dst = get_fs_path('/test-warehouse/impala-hive-udfs2.jar')
drop_fn_stmt = 'drop function if exists default.udf_update_test_drop()'
create_fn_stmt = "create function default.udf_update_test_drop() returns string "\
"LOCATION '" + udf_dst + "' SYMBOL='com.cloudera.impala.TestUpdateUdf'"
query_stmt = "select default.udf_update_test_drop()"
# Put the old UDF binary on HDFS, make the UDF in Impala and run it.
call(["hadoop", "fs", "-put", "-f", old_udf, udf_dst])
self.execute_query_expect_success(self.client, drop_fn_stmt, exec_options)
self.execute_query_expect_success(self.client, create_fn_stmt, exec_options)
self.__run_query_all_impalads(exec_options, query_stmt, ["Old UDF"])
# Update the binary, drop and create the function again. The new binary should
# be running.
call(["hadoop", "fs", "-put", "-f", new_udf, udf_dst])
self.execute_query_expect_success(self.client, drop_fn_stmt, exec_options)
self.execute_query_expect_success(self.client, create_fn_stmt, exec_options)
self.__run_query_all_impalads(exec_options, query_stmt, ["New UDF"])
def test_udf_update_via_create(self, vector):
"""Test updating the UDF binary without restarting Impala. Creating a new function
from the library should refresh the cache."""
# Run with sync_ddl to guarantee the create is processed by all impalads.
exec_options = vector.get_value('exec_option')
exec_options['sync_ddl'] = 1
old_udf = os.path.join(os.environ['IMPALA_HOME'],
'testdata/udfs/impala-hive-udfs.jar')
new_udf = os.path.join(os.environ['IMPALA_HOME'],
'tests/test-hive-udfs/target/test-hive-udfs-1.0.jar')
udf_dst = get_fs_path('/test-warehouse/impala-hive-udfs3.jar')
old_function_name = "udf_update_test_create1"
new_function_name = "udf_update_test_create2"
drop_fn_template = 'drop function if exists default.%s()'
self.execute_query_expect_success(
self.client, drop_fn_template % old_function_name, exec_options)
self.execute_query_expect_success(
self.client, drop_fn_template % new_function_name, exec_options)
create_fn_template = "create function default.%s() returns string "\
"LOCATION '" + udf_dst + "' SYMBOL='com.cloudera.impala.TestUpdateUdf'"
query_template = "select default.%s()"
# Put the old UDF binary on HDFS, make the UDF in Impala and run it.
call(["hadoop", "fs", "-put", "-f", old_udf, udf_dst])
self.execute_query_expect_success(
self.client, create_fn_template % old_function_name, exec_options)
self.__run_query_all_impalads(
exec_options, query_template % old_function_name, ["Old UDF"])
# Update the binary, and create a new function using the binary. The new binary
# should be running.
call(["hadoop", "fs", "-put", "-f", new_udf, udf_dst])
self.execute_query_expect_success(
self.client, create_fn_template % new_function_name, exec_options)
self.__run_query_all_impalads(
exec_options, query_template % new_function_name, ["New UDF"])
# The old function should use the new library now
self.__run_query_all_impalads(
exec_options, query_template % old_function_name, ["New UDF"])
def test_drop_function_while_running(self, vector):
self.client.execute("drop function if exists default.drop_while_running(BIGINT)")
self.client.execute("create function default.drop_while_running(BIGINT) returns "\
"BIGINT LOCATION '%s' SYMBOL='Identity'" %
get_fs_path('/test-warehouse/libTestUdfs.so'))
query = \
"select default.drop_while_running(l_orderkey) from tpch.lineitem limit 10000";
# Run this query asynchronously.
handle = self.execute_query_async(query, vector.get_value('exec_option'),
table_format=vector.get_value('table_format'))
# Fetch some rows from the async query to make sure the UDF is being used
results = self.client.fetch(query, handle, 1)
assert results.success
assert len(results.data) == 1
# Drop the function while the original query is running.
self.client.execute("drop function default.drop_while_running(BIGINT)")
# Fetch the rest of the rows, this should still be able to run the UDF
results = self.client.fetch(query, handle, -1)
assert results.success
assert len(results.data) == 9999
# Run serially because this will blow the process limit, potentially causing other
# queries to fail
@pytest.mark.execute_serially
def test_mem_limits(self, vector):
# Set the mem limit high enough that a simple scan can run
mem_limit = 1024 * 1024
vector.get_value('exec_option')['mem_limit'] = mem_limit
try:
self.run_test_case('QueryTest/udf-mem-limit', vector)
assert False, "Query was expected to fail"
except ImpalaBeeswaxException, e:
self.__check_exception(e)
try:
self.run_test_case('QueryTest/uda-mem-limit', vector)
assert False, "Query was expected to fail"
except ImpalaBeeswaxException, e:
self.__check_exception(e)
def __check_exception(self, e):
# The interesting exception message may be in 'e' or in its inner_exception
# depending on the point of query failure.
if 'Memory limit exceeded' in str(e) or 'Cancelled' in str(e):
return
if e.inner_exception is not None\
and ('Memory limit exceeded' in e.inner_exception.message
or 'Cancelled' not in e.inner_exception.message):
return
raise e
def __run_query_all_impalads(self, exec_options, query, expected):
impala_cluster = ImpalaCluster()
for impalad in impala_cluster.impalads:
client = impalad.service.create_beeswax_client()
result = self.execute_query_expect_success(client, query, exec_options)
assert result.data == expected
def __load_functions(self, template, vector, database, location):
queries = template.format(database=database, location=location)
# Split queries and remove empty lines
queries = [q for q in queries.split(';') if q.strip()]
exec_options = vector.get_value('exec_option')
for query in queries:
if query.strip() == '': continue
result = self.execute_query_expect_success(self.client, query, exec_options)
assert result is not None
# Create test UDA functions in {database} from library {location}
create_udas_template = """
drop function if exists {database}.test_count(int);
drop function if exists {database}.hll(int);
drop function if exists {database}.sum_small_decimal(decimal(9,2));
create database if not exists {database};
create aggregate function {database}.test_count(int) returns bigint
location '{location}' update_fn='CountUpdate';
create aggregate function {database}.hll(int) returns string
location '{location}' update_fn='HllUpdate';
create aggregate function {database}.sum_small_decimal(decimal(9,2))
returns decimal(9,2) location '{location}' update_fn='SumSmallDecimalUpdate';
"""
# Create test UDF functions in {database} from library {location}
create_udfs_template = """
drop function if exists {database}.identity(boolean);
drop function if exists {database}.identity(tinyint);
drop function if exists {database}.identity(smallint);
drop function if exists {database}.identity(int);
drop function if exists {database}.identity(bigint);
drop function if exists {database}.identity(float);
drop function if exists {database}.identity(double);
drop function if exists {database}.identity(string);
drop function if exists {database}.identity(timestamp);
drop function if exists {database}.identity(decimal(9,0));
drop function if exists {database}.identity(decimal(18,1));
drop function if exists {database}.identity(decimal(38,10));
drop function if exists {database}.all_types_fn(
string, boolean, tinyint, smallint, int, bigint, float, double, decimal(2,0));
drop function if exists {database}.no_args();
drop function if exists {database}.var_and(boolean...);
drop function if exists {database}.var_sum(int...);
drop function if exists {database}.var_sum(double...);
drop function if exists {database}.var_sum(string...);
drop function if exists {database}.var_sum(decimal(4,2)...);
drop function if exists {database}.var_sum_multiply(double, int...);
drop function if exists {database}.constant_timestamp();
drop function if exists {database}.validate_arg_type(string);
drop function if exists {database}.count_rows();
drop function if exists {database}.constant_arg(int);
drop function if exists {database}.validate_open(int);
drop function if exists {database}.mem_test(bigint);
drop function if exists {database}.mem_test_leaks(bigint);
drop function if exists {database}.unmangled_symbol();
drop function if exists {database}.four_args(int, int, int, int);
drop function if exists {database}.five_args(int, int, int, int, int);
drop function if exists {database}.six_args(int, int, int, int, int, int);
drop function if exists {database}.seven_args(int, int, int, int, int, int, int);
drop function if exists {database}.eight_args(int, int, int, int, int, int, int, int);
create database if not exists {database};
create function {database}.identity(boolean) returns boolean
location '{location}' symbol='Identity';
create function {database}.identity(tinyint) returns tinyint
location '{location}' symbol='Identity';
create function {database}.identity(smallint) returns smallint
location '{location}' symbol='Identity';
create function {database}.identity(int) returns int
location '{location}' symbol='Identity';
create function {database}.identity(bigint) returns bigint
location '{location}' symbol='Identity';
create function {database}.identity(float) returns float
location '{location}' symbol='Identity';
create function {database}.identity(double) returns double
location '{location}' symbol='Identity';
create function {database}.identity(string) returns string
location '{location}'
symbol='_Z8IdentityPN10impala_udf15FunctionContextERKNS_9StringValE';
create function {database}.identity(timestamp) returns timestamp
location '{location}'
symbol='_Z8IdentityPN10impala_udf15FunctionContextERKNS_12TimestampValE';
create function {database}.identity(decimal(9,0)) returns decimal(9,0)
location '{location}'
symbol='_Z8IdentityPN10impala_udf15FunctionContextERKNS_10DecimalValE';
create function {database}.identity(decimal(18,1)) returns decimal(18,1)
location '{location}'
symbol='_Z8IdentityPN10impala_udf15FunctionContextERKNS_10DecimalValE';
create function {database}.identity(decimal(38,10)) returns decimal(38,10)
location '{location}'
symbol='_Z8IdentityPN10impala_udf15FunctionContextERKNS_10DecimalValE';
create function {database}.all_types_fn(
string, boolean, tinyint, smallint, int, bigint, float, double, decimal(2,0))
returns int
location '{location}' symbol='AllTypes';
create function {database}.no_args() returns string
location '{location}'
symbol='_Z6NoArgsPN10impala_udf15FunctionContextE';
create function {database}.var_and(boolean...) returns boolean
location '{location}' symbol='VarAnd';
create function {database}.var_sum(int...) returns int
location '{location}' symbol='VarSum';
create function {database}.var_sum(double...) returns double
location '{location}' symbol='VarSum';
create function {database}.var_sum(string...) returns int
location '{location}' symbol='VarSum';
create function {database}.var_sum(decimal(4,2)...) returns decimal(18,2)
location '{location}' symbol='VarSum';
create function {database}.var_sum_multiply(double, int...) returns double
location '{location}'
symbol='_Z14VarSumMultiplyPN10impala_udf15FunctionContextERKNS_9DoubleValEiPKNS_6IntValE';
create function {database}.constant_timestamp() returns timestamp
location '{location}' symbol='ConstantTimestamp';
create function {database}.validate_arg_type(string) returns boolean
location '{location}' symbol='ValidateArgType';
create function {database}.count_rows() returns bigint
location '{location}' symbol='Count' prepare_fn='CountPrepare' close_fn='CountClose';
create function {database}.constant_arg(int) returns int
location '{location}' symbol='ConstantArg' prepare_fn='ConstantArgPrepare' close_fn='ConstantArgClose';
create function {database}.validate_open(int) returns boolean
location '{location}' symbol='ValidateOpen'
prepare_fn='ValidateOpenPrepare' close_fn='ValidateOpenClose';
create function {database}.mem_test(bigint) returns bigint
location '{location}' symbol='MemTest'
prepare_fn='MemTestPrepare' close_fn='MemTestClose';
create function {database}.mem_test_leaks(bigint) returns bigint
location '{location}' symbol='MemTest'
prepare_fn='MemTestPrepare';
-- Regression test for IMPALA-1475
create function {database}.unmangled_symbol() returns bigint
location '{location}' symbol='UnmangledSymbol';
create function {database}.four_args(int, int, int, int) returns int
location '{location}' symbol='FourArgs';
create function {database}.five_args(int, int, int, int, int) returns int
location '{location}' symbol='FiveArgs';
create function {database}.six_args(int, int, int, int, int, int) returns int
location '{location}' symbol='SixArgs';
create function {database}.seven_args(int, int, int, int, int, int, int) returns int
location '{location}' symbol='SevenArgs';
create function {database}.eight_args(int, int, int, int, int, int, int, int) returns int
location '{location}' symbol='EightArgs';
"""
|
|
# coding: utf-8
"""
Gmail
Access Gmail mailboxes including sending user email.
OpenAPI spec version: v1
Generated by: https://github.com/swagger-api/swagger-codegen.git
"""
from __future__ import absolute_import
import io
import json
import ssl
import certifi
import logging
import re
# python 2 and python 3 compatibility library
from six import PY3
from six.moves.urllib.parse import urlencode
from .configuration import Configuration
try:
import urllib3
except ImportError:
raise ImportError('Swagger python client requires urllib3.')
logger = logging.getLogger(__name__)
class RESTResponse(io.IOBase):
def __init__(self, resp):
self.urllib3_response = resp
self.status = resp.status
self.reason = resp.reason
self.data = resp.data
def getheaders(self):
"""
Returns a dictionary of the response headers.
"""
return self.urllib3_response.getheaders()
def getheader(self, name, default=None):
"""
Returns a given response header.
"""
return self.urllib3_response.getheader(name, default)
class RESTClientObject(object):
def __init__(self, pools_size=4, maxsize=4):
# urllib3.PoolManager will pass all kw parameters to connectionpool
# https://github.com/shazow/urllib3/blob/f9409436f83aeb79fbaf090181cd81b784f1b8ce/urllib3/poolmanager.py#L75
# https://github.com/shazow/urllib3/blob/f9409436f83aeb79fbaf090181cd81b784f1b8ce/urllib3/connectionpool.py#L680
# maxsize is the number of requests to host that are allowed in parallel
# ca_certs vs cert_file vs key_file
# http://stackoverflow.com/a/23957365/2985775
# cert_reqs
if Configuration().verify_ssl:
cert_reqs = ssl.CERT_REQUIRED
else:
cert_reqs = ssl.CERT_NONE
# ca_certs
if Configuration().ssl_ca_cert:
ca_certs = Configuration().ssl_ca_cert
else:
# if not set certificate file, use Mozilla's root certificates.
ca_certs = certifi.where()
# cert_file
cert_file = Configuration().cert_file
# key file
key_file = Configuration().key_file
# https pool manager
self.pool_manager = urllib3.PoolManager(
num_pools=pools_size,
maxsize=maxsize,
cert_reqs=cert_reqs,
ca_certs=ca_certs,
cert_file=cert_file,
key_file=key_file
)
def request(self, method, url, query_params=None, headers=None,
body=None, post_params=None, _preload_content=True, _request_timeout=None):
"""
:param method: http request method
:param url: http request url
:param query_params: query parameters in the url
:param headers: http request headers
:param body: request json body, for `application/json`
:param post_params: request post parameters,
`application/x-www-form-urlencoded`
and `multipart/form-data`
:param _preload_content: if False, the urllib3.HTTPResponse object will be returned without
reading/decoding response data. Default is True.
:param _request_timeout: timeout setting for this request. If one number provided, it will be total request
timeout. It can also be a pair (tuple) of (connection, read) timeouts.
"""
method = method.upper()
assert method in ['GET', 'HEAD', 'DELETE', 'POST', 'PUT', 'PATCH', 'OPTIONS']
if post_params and body:
raise ValueError(
"body parameter cannot be used with post_params parameter."
)
post_params = post_params or {}
headers = headers or {}
timeout = None
if _request_timeout:
if isinstance(_request_timeout, (int, ) if PY3 else (int, long)):
timeout = urllib3.Timeout(total=_request_timeout)
elif isinstance(_request_timeout, tuple) and len(_request_timeout) == 2:
timeout = urllib3.Timeout(connect=_request_timeout[0], read=_request_timeout[1])
if 'Content-Type' not in headers:
headers['Content-Type'] = 'application/json'
try:
# For `POST`, `PUT`, `PATCH`, `OPTIONS`, `DELETE`
if method in ['POST', 'PUT', 'PATCH', 'OPTIONS', 'DELETE']:
if query_params:
url += '?' + urlencode(query_params)
if re.search('json', headers['Content-Type'], re.IGNORECASE):
request_body = None
if body:
request_body = json.dumps(body)
r = self.pool_manager.request(method, url,
body=request_body,
preload_content=_preload_content,
timeout=timeout,
headers=headers)
elif headers['Content-Type'] == 'application/x-www-form-urlencoded':
r = self.pool_manager.request(method, url,
fields=post_params,
encode_multipart=False,
preload_content=_preload_content,
timeout=timeout,
headers=headers)
elif headers['Content-Type'] == 'multipart/form-data':
# must del headers['Content-Type'], or the correct Content-Type
# which generated by urllib3 will be overwritten.
del headers['Content-Type']
r = self.pool_manager.request(method, url,
fields=post_params,
encode_multipart=True,
preload_content=_preload_content,
timeout=timeout,
headers=headers)
# Pass a `string` parameter directly in the body to support
# other content types than Json when `body` argument is provided
# in serialized form
elif isinstance(body, str):
request_body = body
r = self.pool_manager.request(method, url,
body=request_body,
preload_content=_preload_content,
timeout=timeout,
headers=headers)
else:
# Cannot generate the request from given parameters
msg = """Cannot prepare a request message for provided arguments.
Please check that your arguments match declared content type."""
raise ApiException(status=0, reason=msg)
# For `GET`, `HEAD`
else:
r = self.pool_manager.request(method, url,
fields=query_params,
preload_content=_preload_content,
timeout=timeout,
headers=headers)
except urllib3.exceptions.SSLError as e:
msg = "{0}\n{1}".format(type(e).__name__, str(e))
raise ApiException(status=0, reason=msg)
if _preload_content:
r = RESTResponse(r)
# In the python 3, the response.data is bytes.
# we need to decode it to string.
if PY3:
r.data = r.data.decode('utf8')
# log response body
logger.debug("response body: %s", r.data)
if r.status not in range(200, 206):
raise ApiException(http_resp=r)
return r
def GET(self, url, headers=None, query_params=None, _preload_content=True, _request_timeout=None):
return self.request("GET", url,
headers=headers,
_preload_content=_preload_content,
_request_timeout=_request_timeout,
query_params=query_params)
def HEAD(self, url, headers=None, query_params=None, _preload_content=True, _request_timeout=None):
return self.request("HEAD", url,
headers=headers,
_preload_content=_preload_content,
_request_timeout=_request_timeout,
query_params=query_params)
def OPTIONS(self, url, headers=None, query_params=None, post_params=None, body=None, _preload_content=True,
_request_timeout=None):
return self.request("OPTIONS", url,
headers=headers,
query_params=query_params,
post_params=post_params,
_preload_content=_preload_content,
_request_timeout=_request_timeout,
body=body)
def DELETE(self, url, headers=None, query_params=None, body=None, _preload_content=True, _request_timeout=None):
return self.request("DELETE", url,
headers=headers,
query_params=query_params,
_preload_content=_preload_content,
_request_timeout=_request_timeout,
body=body)
def POST(self, url, headers=None, query_params=None, post_params=None, body=None, _preload_content=True,
_request_timeout=None):
return self.request("POST", url,
headers=headers,
query_params=query_params,
post_params=post_params,
_preload_content=_preload_content,
_request_timeout=_request_timeout,
body=body)
def PUT(self, url, headers=None, query_params=None, post_params=None, body=None, _preload_content=True,
_request_timeout=None):
return self.request("PUT", url,
headers=headers,
query_params=query_params,
post_params=post_params,
_preload_content=_preload_content,
_request_timeout=_request_timeout,
body=body)
def PATCH(self, url, headers=None, query_params=None, post_params=None, body=None, _preload_content=True,
_request_timeout=None):
return self.request("PATCH", url,
headers=headers,
query_params=query_params,
post_params=post_params,
_preload_content=_preload_content,
_request_timeout=_request_timeout,
body=body)
class ApiException(Exception):
def __init__(self, status=None, reason=None, http_resp=None):
if http_resp:
self.status = http_resp.status
self.reason = http_resp.reason
self.body = http_resp.data
self.headers = http_resp.getheaders()
else:
self.status = status
self.reason = reason
self.body = None
self.headers = None
def __str__(self):
"""
Custom error messages for exception
"""
error_message = "({0})\n"\
"Reason: {1}\n".format(self.status, self.reason)
if self.headers:
error_message += "HTTP response headers: {0}\n".format(self.headers)
if self.body:
error_message += "HTTP response body: {0}\n".format(self.body)
return error_message
|
|
import json
from importlib import reload
from unittest import TestCase
from unittest.mock import patch, MagicMock
from api_background import dhis2_export
import api_background
from api_background.dhis2_export import put, delete, get, post, NewIdsProvider, get_form_keys_to_data_elements_dict, \
get_dhis2_organisations_codes_to_ids
class Dhis2RequestsWrapperTestCase(TestCase):
"""
Unit tests for the requests wrapper
"""
def setUp(self):
self.kwargs = {"they": "shall", "pass": "ok"}
self.fake_url = "http://foo"
self.bar = "bar"
self.baz = "baz"
@patch('requests.put')
def test_put(self, requests_mock):
self.__mock_ok_response(requests_mock)
put(self.fake_url, data=self.bar, json=self.baz, **self.kwargs)
requests_mock.assert_called_once_with(self.fake_url, data=self.bar, json=self.baz, **self.kwargs)
@patch('requests.post')
def test_post(self, requests_mock):
self.__mock_ok_response(requests_mock)
post(self.fake_url, data=self.bar, json=self.baz, **self.kwargs)
requests_mock.assert_called_once_with(self.fake_url, data=self.bar, json=self.baz, **self.kwargs)
@patch('requests.get')
def test_get(self, requests_mock):
self.__mock_ok_response(requests_mock)
get(self.fake_url, params=self.bar, **self.kwargs)
requests_mock.assert_called_once_with(self.fake_url, params=self.bar, **self.kwargs)
@patch('requests.delete')
def test_delete(self, requests_mock):
self.__mock_ok_response(requests_mock)
delete(self.fake_url, **self.kwargs)
requests_mock.assert_called_once_with(self.fake_url, **self.kwargs)
@patch('requests.Response')
@patch('requests.get')
def test_should_report_error_when_error_response(self, requests_mock, response_mock):
response_mock.status_code = 999
response_mock.json.return_value = {"message": "Error 999"}
requests_mock.return_value = response_mock
with self.assertLogs('meerkat_api.dhis2', level='ERROR') as cm:
get(self.fake_url)
self.assertEqual(cm.output[0], 'ERROR:meerkat_api.dhis2:Request failed with code 999.')
self.assertTrue("Error 999" in cm.output[1])
def __mock_ok_response(self, requests_mock):
response = MagicMock('requests.Response')
response.status_code = 200
requests_mock.return_value = response
class NewIdsProviderTestCase(TestCase):
"""
Unit test for dhis2 uids provider.
"""
def setUp(self):
self.first_batch = {
"codes": ["a", "b"]
}
self.second_batch = {
"codes": ["c", "d"]
}
self.not_used_batch = {
"codes": ["nope", "nah"]
}
response_patch = patch('requests.Response')
get_patch = patch('requests.get')
self.addCleanup(response_patch.stop)
self.addCleanup(get_patch.stop)
self.response_mock = response_patch.start()
self.get_mock = get_patch.start()
self.response_mock.json.side_effect = [self.first_batch, self.second_batch]
self.response_mock.status_code = 200
self.get_mock.return_value = self.response_mock
def tearDown(self):
self.response_mock.stop()
self.get_mock.stop()
def test_pop_should_lazy_initialize(self):
obj_under_test = NewIdsProvider("http://fake/url/api", ('John', 'random_string'))
self.assertFalse(self.get_mock.called)
obj_under_test.pop()
self.assertTrue(self.get_mock.called)
def test_pop_should_buffer_ids_lazily(self):
obj_under_test = NewIdsProvider("http://fake/url/api", ('John', 'random_string'))
for i in range(3):
obj_under_test.pop()
self.assertEqual(self.get_mock.call_count, 2)
def test_pop_should_return_correct_ids(self):
obj_under_test = NewIdsProvider("http://fake/url/api", ('John', 'random_string'))
self.__validate_return_codes(self.first_batch, obj_under_test)
self.__validate_return_codes(self.second_batch, obj_under_test)
def __validate_return_codes(self, batch_json, obj_under_test):
for expected_id in reversed(batch_json["codes"]):
actual_id = obj_under_test.pop()
self.assertEqual(actual_id, expected_id)
class ProgramUpdateTestCase(TestCase):
"""
Unit test for dhis2 program update
"""
def setUp(self):
self.form_config = {"name": "fake_form"}
self.ORGANISATION_UNITS_KEY = 'organisationUnits'
@patch('requests.put')
@patch('requests.get')
@patch('requests.Response')
@patch('requests.Response')
def test_with_program_id_and_with_already_assigned_organisations(self, get_res_mock, put_res_mock, get_mock,
put_mock):
expected_program_id = "existing_program_id"
self.form_config['programId'] = expected_program_id
existing = ["one", "two", "three"]
get_res_mock.status_code = 200
get_res_mock.json.return_value = {self.ORGANISATION_UNITS_KEY: self.ids_jarray(existing)}
get_mock.return_value = get_res_mock
put_res_mock.status_code = 200
put_mock.return_value = put_res_mock
new = ["four", "five"]
# code under test
returned_program_id = dhis2_export.update_program(self.form_config, new)
# assertions
self.assertEqual(expected_program_id, returned_program_id)
put_mock.assert_called_once()
called_url = put_mock.call_args[0][0]
expected_path = '/programs/' + expected_program_id
self.assertTrue(called_url.endswith(expected_path))
actual_org_ids = put_mock.call_args[1]['data'][self.ORGANISATION_UNITS_KEY]
expected_org_ids = self.ids_jarray(existing + new)
self.assertEquals(actual_org_ids, expected_org_ids)
@patch('requests.put')
@patch('requests.get')
@patch('requests.Response')
@patch('requests.Response')
def test_without_program_id_and_with_already_assigned_organisations(self, get_res_mock, put_res_mock, get_mock,
put_mock):
expected_program_id = "to_be_found_program_id"
get_res_mock.json.return_value = {"programs": [{"id": expected_program_id}]}
get_res_mock.status_code = 200
get_mock.return_value = get_res_mock
put_res_mock.status_code = 200
put_mock.return_value = put_res_mock
new = ["four", "five"]
returned_program_id = dhis2_export.update_program(self.form_config, new)
self.assertEqual(expected_program_id, returned_program_id)
put_mock.assert_called_once()
called_url = put_mock.call_args[0][0]
expected_path = '/programs/' + expected_program_id
self.assertTrue(called_url.endswith(expected_path))
@patch('requests.post')
@patch('requests.get')
@patch('requests.Response')
@patch('requests.Response')
def test_create_a_new_program(self, post_res_mock, get_res_mock, get_mock, post_mock):
get_res_mock.json.return_value = {"programs": []}
get_res_mock.status_code = 200
get_mock.return_value = get_res_mock
post_res_mock.status_code = 200
post_mock.return_value = post_res_mock
ids_provder_mock = MagicMock()
expected_program_id = 'generated_id_1'
ids_provder_mock.pop.side_effect = [expected_program_id, "generated_id_2", "generated_id_3"]
dhis2_export.ids = ids_provder_mock
keys_to_dhis2_ids = {}
for i in range(10):
keys_to_dhis2_ids["col" + str(i)] = "dhis2_id" + str(i)
dhis2_export.get_form_keys_to_data_elements_dict = MagicMock(return_value=keys_to_dhis2_ids)
# code under test
new = ["four", "five"]
returned_program_id = dhis2_export.update_program(self.form_config, new)
# assertions
self.assertEqual(expected_program_id, returned_program_id)
program_call_args = post_mock.call_args_list[0]
called_url = program_call_args[0][0]
expected_path = '/programs'
self.assertTrue(called_url.endswith(expected_path))
data_json = json.loads(program_call_args[1]['data'])
send_program_id = data_json['id']
self.assertEqual(expected_program_id, send_program_id)
send_org_units = data_json['organisationUnits']
expected_org_units = self.ids_jarray(new)
self.assertEqual(expected_org_units, send_org_units)
program_stages_call_args = post_mock.call_args_list[1]
called_url = program_stages_call_args[0][0]
expected_path = '/programStages'
self.assertTrue(called_url.endswith(expected_path))
expected_data_json = {
"name": expected_program_id,
"program": {"id": expected_program_id},
"programStageDataElements": [{"dataElement": {"id": dhis2_id}} for dhis2_id in keys_to_dhis2_ids.values()]
}
actual_data_json = json.loads(program_stages_call_args[1]['data'])
self.assertEqual(expected_data_json, actual_data_json)
@staticmethod
def ids_jarray(ids):
return [{"id": id} for id in ids]
class GetFormKeysToDataElementsDictTest(TestCase):
""" Unit test for event capture metadata creation """
def setUp(self):
self.keys = ["test_clinic", "test_region", "test_district"]
self.dhis2_ids = ["FQ2o8UBlcrS", "M62VHgYT2n0", "uF1DLnZNlWe"]
# clear module state (cached responses etc.)
api_background.dhis2_export = reload(api_background.dhis2_export)
@patch('requests.get')
@patch('requests.Response', status_code=200)
@patch('api_background.dhis2_export.__get_keys_from_db')
def test_returns_valid_json(self, get_keys_mock, response_mock, get_mock):
response_mock.json.return_value = {
'dataElements': [{"id": item[0], "displayName": item[1]} for item in zip(self.dhis2_ids, self.keys)]
}
get_mock.return_value = response_mock
get_keys_mock.return_value = self.keys
actual_value = get_form_keys_to_data_elements_dict("fakeUrl", ('user', 'password'), {"headers": "some"},
"my_form")
expected_value = dict(zip(self.keys, self.dhis2_ids))
self.assertEqual(actual_value, expected_value)
@patch('requests.get')
@patch('requests.Response', status_code=200)
@patch('api_background.dhis2_export.__get_keys_from_db')
@patch('api_background.dhis2_export.__update_data_elements')
def test_should_create_non_existing_keys(self, update_mock, get_keys_mock, response_mock, get_mock):
response_mock.json.return_value = {
'dataElements': [{"id": item[0], "displayName": item[1]} for item in zip(self.dhis2_ids, self.keys)]
}
get_mock.return_value = response_mock
get_keys_mock.return_value = self.keys + ['not_present_key']
update_mock.return_value = 'new_created_dhis2_id'
actual_value = get_form_keys_to_data_elements_dict("fakeUrl", ('user', 'password'), {"headers": "some"},
"my_form")
self.assertTrue('not_present_key' in update_mock.call_args[0])
expected_value = dict(zip(self.keys + ['not_present_key'], self.dhis2_ids + ['new_created_dhis2_id']))
self.assertEqual(actual_value, expected_value)
@patch('requests.get')
@patch('requests.Response', status_code=200)
@patch('api_background.dhis2_export.__get_keys_from_db')
def test_result_should_be_cached(self, get_keys_mock, response_mock, get_mock):
response_mock.json.return_value = {
'dataElements': [{"id": item[0], "displayName": item[1]} for item in zip(self.dhis2_ids, self.keys)]
}
get_mock.return_value = response_mock
get_keys_mock.return_value = self.keys
for i in range(3):
get_form_keys_to_data_elements_dict("fakeUrl", ('user', 'password'), {"headers": "some"},
"my_form")
get_mock.assert_called_once()
class GetOrganisationsTest(TestCase):
dhis2_org_ids = ["FQ2o8UBlcrS", "M62VHgYT2n0", "uF1DLnZNlWe"]
dhis2_codes = ["code1", "code2", "code3"]
def setUp(self):
# clear module state (cached responses etc.)
api_background.dhis2_export = reload(api_background.dhis2_export)
def tearDown(self):
pass
def get_organisations_mock(*args, **kwargs):
response_mock = MagicMock(status_code=200)
ids = GetOrganisationsTest.dhis2_org_ids
codes = GetOrganisationsTest.dhis2_codes
url = args[0]
if 'paging=False' in url:
result = {"organisationUnits": [{"id": a_id} for a_id in ids]}
response_mock.json.return_value = result
else:
# side effect fun to return propre responses for given dhis2 ids
# e.g. get("http://localhost/organisationUnits/FQ2o8UBlcrS").json()
# should return {"code": "code1"}
def return_code(*_args, **_kwargs):
dhis2_org_id = args[0].split('/')[-1]
code = dict(zip(ids, codes))[dhis2_org_id]
return {"code": code}
response_mock.json.side_effect = return_code
return response_mock
@patch('requests.get', side_effect=get_organisations_mock)
def test_should_return_valid_json(self, get_mock):
actual_value = get_dhis2_organisations_codes_to_ids()
expected_value = dict(zip(GetOrganisationsTest.dhis2_codes, GetOrganisationsTest.dhis2_org_ids))
self.assertEqual(actual_value, expected_value)
@patch('requests.get')
@patch('requests.Response', status_code=200)
def test_should_cache_result(self, response_mock, get_mock):
# 1st call for http:localhost/organisationUnits
# 2nd call for http:localhost/organisationUnits/id_foo
response_mock.json.side_effect = [{'organisationUnits': [{"id": "id_foo"}]}, {"code": "code_bar"}]
get_mock.return_value = response_mock
get_dhis2_organisations_codes_to_ids()
get_mock.assert_called()
get_mock.reset_mock()
get_dhis2_organisations_codes_to_ids()
get_mock.assert_not_called()
class PopulateDhis2LocationTest(TestCase):
def setUp(self):
pass
def tearDown(self):
pass
def test_should_publish_locations_with_hierarchy(self):
pass
def test_should_use_already_existing_organisations(self):
pass
class CreateDhis2OrganisationTest(TestCase):
def setUp(self):
pass
def tearDown(self):
pass
def test_should_create_dhis2_organisation(self):
pass
def test_should_create_dhis2_organisation_with_default_open_date(self):
pass
def test_should_handle_already_existing_organisation(self):
pass
class ProcessFormRecordsTest(TestCase):
def setUp(self):
pass
def tearDown(self):
pass
def test_send_case_form_data(self):
pass
def test_should_create_a_valid_json_payload(self):
pass
def test_should_send_data_in_batches(self):
pass
|
|
# Licensed to the Apache Software Foundation (ASF) under one or more
# contributor license agreements. See the NOTICE file distributed with
# this work for additional information regarding copyright ownership.
# The ASF licenses this file to You under the Apache License, Version 2.0
# (the "License"); you may not use this file except in compliance with
# the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import sys
import os
from libcloud.utils.py3 import httplib
from libcloud.utils.py3 import urlparse
from libcloud.utils.py3 import parse_qsl
try:
import simplejson as json
except ImportError:
import json
from libcloud.common.types import ProviderError
from libcloud.compute.drivers.cloudstack import CloudStackNodeDriver, \
CloudStackAffinityGroupType
from libcloud.compute.types import LibcloudError, Provider, InvalidCredsError
from libcloud.compute.types import KeyPairDoesNotExistError
from libcloud.compute.types import NodeState
from libcloud.compute.providers import get_driver
from libcloud.test import unittest
from libcloud.test import MockHttpTestCase
from libcloud.test.compute import TestCaseMixin
from libcloud.test.file_fixtures import ComputeFileFixtures
class CloudStackCommonTestCase(TestCaseMixin):
driver_klass = CloudStackNodeDriver
def setUp(self):
self.driver_klass.connectionCls.conn_classes = \
(None, CloudStackMockHttp)
self.driver = self.driver_klass('apikey', 'secret',
path='/test/path',
host='api.dummy.com')
self.driver.path = '/test/path'
self.driver.type = -1
CloudStackMockHttp.type = None
CloudStackMockHttp.fixture_tag = 'default'
self.driver.connection.poll_interval = 0.0
def test_invalid_credentials(self):
CloudStackMockHttp.type = 'invalid_credentials'
driver = self.driver_klass('invalid', 'invalid', path='/test/path',
host='api.dummy.com')
self.assertRaises(InvalidCredsError, driver.list_nodes)
def test_import_keypair_from_string_api_error(self):
CloudStackMockHttp.type = 'api_error'
name = 'test-pair'
key_material = ''
expected_msg = 'Public key is invalid'
self.assertRaisesRegexp(ProviderError, expected_msg,
self.driver.import_key_pair_from_string,
name=name, key_material=key_material)
def test_create_node_immediate_failure(self):
size = self.driver.list_sizes()[0]
image = self.driver.list_images()[0]
CloudStackMockHttp.fixture_tag = 'deployfail'
self.assertRaises(
Exception,
self.driver.create_node,
name='node-name', image=image, size=size)
def test_create_node_delayed_failure(self):
size = self.driver.list_sizes()[0]
image = self.driver.list_images()[0]
CloudStackMockHttp.fixture_tag = 'deployfail2'
self.assertRaises(
Exception,
self.driver.create_node,
name='node-name', image=image, size=size)
def test_create_node_default_location_success(self):
size = self.driver.list_sizes()[0]
image = self.driver.list_images()[0]
default_location = self.driver.list_locations()[0]
node = self.driver.create_node(name='fred',
image=image,
size=size)
self.assertEqual(node.name, 'fred')
self.assertEqual(node.public_ips, [])
self.assertEqual(node.private_ips, ['192.168.1.2'])
self.assertEqual(node.extra['zone_id'], default_location.id)
def test_create_node_ex_networks(self):
CloudStackMockHttp.fixture_tag = 'deploynetworks'
size = self.driver.list_sizes()[0]
image = self.driver.list_images()[0]
location = self.driver.list_locations()[0]
networks = [nw for nw in self.driver.ex_list_networks()
if str(nw.zoneid) == str(location.id)]
node = self.driver.create_node(name='deploynetworks',
location=location,
image=image,
size=size,
networks=networks)
self.assertEqual(node.name, 'deploynetworks')
self.assertEqual(node.extra['size_id'], size.id)
self.assertEqual(node.extra['zone_id'], location.id)
self.assertEqual(node.extra['image_id'], image.id)
self.assertEqual(len(node.private_ips), 2)
def test_create_node_ex_ipaddress(self):
CloudStackMockHttp.fixture_tag = 'deployip'
size = self.driver.list_sizes()[0]
image = self.driver.list_images()[0]
location = self.driver.list_locations()[0]
ipaddress = '10.1.0.128'
networks = [nw for nw in self.driver.ex_list_networks()
if str(nw.zoneid) == str(location.id)]
node = self.driver.create_node(name='deployip',
location=location,
image=image,
size=size,
networks=networks,
ex_ip_address=ipaddress)
self.assertEqual(node.name, 'deployip')
self.assertEqual(node.extra['size_id'], size.id)
self.assertEqual(node.extra['zone_id'], location.id)
self.assertEqual(node.extra['image_id'], image.id)
self.assertEqual(node.private_ips[0], ipaddress)
def test_create_node_ex_rootdisksize(self):
CloudStackMockHttp.fixture_tag = 'rootdisksize'
size = self.driver.list_sizes()[0]
image = self.driver.list_images()[0]
location = self.driver.list_locations()[0]
volumes = self.driver.list_volumes()
rootdisksize = '50'
networks = [nw for nw in self.driver.ex_list_networks()
if str(nw.zoneid) == str(location.id)]
node = self.driver.create_node(name='rootdisksize',
location=location,
image=image,
size=size,
networks=networks,
ex_rootdisksize=rootdisksize)
self.assertEqual(node.name, 'rootdisksize')
self.assertEqual(node.extra['size_id'], size.id)
self.assertEqual(node.extra['zone_id'], location.id)
self.assertEqual(node.extra['image_id'], image.id)
self.assertEqual(1, len(volumes))
self.assertEqual('ROOT-69941', volumes[0].name)
self.assertEqual(53687091200, volumes[0].size)
def test_create_node_ex_start_vm_false(self):
CloudStackMockHttp.fixture_tag = 'stoppedvm'
size = self.driver.list_sizes()[0]
image = self.driver.list_images()[0]
location = self.driver.list_locations()[0]
networks = [nw for nw in self.driver.ex_list_networks()
if str(nw.zoneid) == str(location.id)]
node = self.driver.create_node(name='stopped_vm',
location=location,
image=image,
size=size,
networks=networks,
ex_start_vm=False)
self.assertEqual(node.name, 'stopped_vm')
self.assertEqual(node.extra['size_id'], size.id)
self.assertEqual(node.extra['zone_id'], location.id)
self.assertEqual(node.extra['image_id'], image.id)
self.assertEqual(node.state, NodeState.STOPPED)
def test_create_node_ex_security_groups(self):
size = self.driver.list_sizes()[0]
image = self.driver.list_images()[0]
location = self.driver.list_locations()[0]
sg = [sg['name'] for sg in self.driver.ex_list_security_groups()]
CloudStackMockHttp.fixture_tag = 'deploysecuritygroup'
node = self.driver.create_node(name='test',
location=location,
image=image,
size=size,
ex_security_groups=sg)
self.assertEqual(node.name, 'test')
self.assertEqual(node.extra['security_group'], sg)
self.assertEqual(node.id, 'fc4fd31a-16d3-49db-814a-56b39b9ef986')
def test_create_node_ex_keyname(self):
size = self.driver.list_sizes()[0]
image = self.driver.list_images()[0]
location = self.driver.list_locations()[0]
CloudStackMockHttp.fixture_tag = 'deploykeyname'
node = self.driver.create_node(name='test',
location=location,
image=image,
size=size,
ex_keyname='foobar')
self.assertEqual(node.name, 'test')
self.assertEqual(node.extra['key_name'], 'foobar')
def test_create_node_project(self):
size = self.driver.list_sizes()[0]
image = self.driver.list_images()[0]
location = self.driver.list_locations()[0]
project = self.driver.ex_list_projects()[0]
CloudStackMockHttp.fixture_tag = 'deployproject'
node = self.driver.create_node(name='test',
location=location,
image=image,
size=size,
project=project)
self.assertEqual(node.name, 'TestNode')
self.assertEqual(node.extra['project'], 'Test Project')
def test_list_images_no_images_available(self):
CloudStackMockHttp.fixture_tag = 'notemplates'
images = self.driver.list_images()
self.assertEqual(0, len(images))
def test_list_images(self):
_, fixture = CloudStackMockHttp()._load_fixture(
'listTemplates_default.json')
templates = fixture['listtemplatesresponse']['template']
images = self.driver.list_images()
for i, image in enumerate(images):
# NodeImage expects id to be a string,
# the CloudStack fixture has an int
tid = str(templates[i]['id'])
tname = templates[i]['name']
self.assertIsInstance(image.driver, CloudStackNodeDriver)
self.assertEqual(image.id, tid)
self.assertEqual(image.name, tname)
def test_ex_list_disk_offerings(self):
diskOfferings = self.driver.ex_list_disk_offerings()
self.assertEqual(1, len(diskOfferings))
diskOffering, = diskOfferings
self.assertEqual('Disk offer 1', diskOffering.name)
self.assertEqual(10, diskOffering.size)
def test_ex_list_networks(self):
_, fixture = CloudStackMockHttp()._load_fixture(
'listNetworks_default.json')
fixture_networks = fixture['listnetworksresponse']['network']
networks = self.driver.ex_list_networks()
for i, network in enumerate(networks):
self.assertEqual(network.id, fixture_networks[i]['id'])
self.assertEqual(
network.displaytext, fixture_networks[i]['displaytext'])
self.assertEqual(network.name, fixture_networks[i]['name'])
self.assertEqual(
network.networkofferingid,
fixture_networks[i]['networkofferingid'])
self.assertEqual(network.zoneid, fixture_networks[i]['zoneid'])
def test_ex_list_network_offerings(self):
_, fixture = CloudStackMockHttp()._load_fixture(
'listNetworkOfferings_default.json')
fixture_networkoffers = \
fixture['listnetworkofferingsresponse']['networkoffering']
networkoffers = self.driver.ex_list_network_offerings()
for i, networkoffer in enumerate(networkoffers):
self.assertEqual(networkoffer.id, fixture_networkoffers[i]['id'])
self.assertEqual(networkoffer.name,
fixture_networkoffers[i]['name'])
self.assertEqual(networkoffer.display_text,
fixture_networkoffers[i]['displaytext'])
self.assertEqual(networkoffer.for_vpc,
fixture_networkoffers[i]['forvpc'])
self.assertEqual(networkoffer.guest_ip_type,
fixture_networkoffers[i]['guestiptype'])
self.assertEqual(networkoffer.service_offering_id,
fixture_networkoffers[i]['serviceofferingid'])
def test_ex_create_network(self):
_, fixture = CloudStackMockHttp()._load_fixture(
'createNetwork_default.json')
fixture_network = fixture['createnetworkresponse']['network']
netoffer = self.driver.ex_list_network_offerings()[0]
location = self.driver.list_locations()[0]
network = self.driver.ex_create_network(display_text='test',
name='test',
network_offering=netoffer,
location=location,
gateway='10.1.1.1',
netmask='255.255.255.0',
network_domain='cloud.local',
vpc_id="2",
project_id="2")
self.assertEqual(network.name, fixture_network['name'])
self.assertEqual(network.displaytext, fixture_network['displaytext'])
self.assertEqual(network.id, fixture_network['id'])
self.assertEqual(network.extra['gateway'], fixture_network['gateway'])
self.assertEqual(network.extra['netmask'], fixture_network['netmask'])
self.assertEqual(network.networkofferingid,
fixture_network['networkofferingid'])
self.assertEqual(network.extra['vpc_id'], fixture_network['vpcid'])
self.assertEqual(network.extra['project_id'],
fixture_network['projectid'])
def test_ex_delete_network(self):
network = self.driver.ex_list_networks()[0]
result = self.driver.ex_delete_network(network=network)
self.assertTrue(result)
def test_ex_list_nics(self):
_, fixture = CloudStackMockHttp()._load_fixture(
'listNics_default.json')
fixture_nic = fixture['listnicsresponse']['nic']
vm = self.driver.list_nodes()[0]
nics = self.driver.ex_list_nics(vm)
for i, nic in enumerate(nics):
self.assertEqual(nic.id, fixture_nic[i]['id'])
self.assertEqual(nic.network_id,
fixture_nic[i]['networkid'])
self.assertEqual(nic.net_mask,
fixture_nic[i]['netmask'])
self.assertEqual(nic.gateway,
fixture_nic[i]['gateway'])
self.assertEqual(nic.ip_address,
fixture_nic[i]['ipaddress'])
self.assertEqual(nic.is_default,
fixture_nic[i]['isdefault'])
self.assertEqual(nic.mac_address,
fixture_nic[i]['macaddress'])
def test_ex_add_nic_to_node(self):
vm = self.driver.list_nodes()[0]
network = self.driver.ex_list_networks()[0]
ip = "10.1.4.123"
result = self.driver.ex_attach_nic_to_node(node=vm, network=network, ip_address=ip)
self.assertTrue(result)
def test_ex_remove_nic_from_node(self):
vm = self.driver.list_nodes()[0]
nic = self.driver.ex_list_nics(node=vm)[0]
result = self.driver.ex_detach_nic_from_node(node=vm, nic=nic)
self.assertTrue(result)
def test_ex_list_vpc_offerings(self):
_, fixture = CloudStackMockHttp()._load_fixture(
'listVPCOfferings_default.json')
fixture_vpcoffers = \
fixture['listvpcofferingsresponse']['vpcoffering']
vpcoffers = self.driver.ex_list_vpc_offerings()
for i, vpcoffer in enumerate(vpcoffers):
self.assertEqual(vpcoffer.id, fixture_vpcoffers[i]['id'])
self.assertEqual(vpcoffer.name,
fixture_vpcoffers[i]['name'])
self.assertEqual(vpcoffer.display_text,
fixture_vpcoffers[i]['displaytext'])
def test_ex_list_vpcs(self):
_, fixture = CloudStackMockHttp()._load_fixture(
'listVPCs_default.json')
fixture_vpcs = fixture['listvpcsresponse']['vpc']
vpcs = self.driver.ex_list_vpcs()
for i, vpc in enumerate(vpcs):
self.assertEqual(vpc.id, fixture_vpcs[i]['id'])
self.assertEqual(vpc.display_text, fixture_vpcs[i]['displaytext'])
self.assertEqual(vpc.name, fixture_vpcs[i]['name'])
self.assertEqual(vpc.vpc_offering_id,
fixture_vpcs[i]['vpcofferingid'])
self.assertEqual(vpc.zone_id, fixture_vpcs[i]['zoneid'])
def test_ex_list_routers(self):
_, fixture = CloudStackMockHttp()._load_fixture(
'listRouters_default.json')
fixture_routers = fixture['listroutersresponse']['router']
routers = self.driver.ex_list_routers()
for i, router in enumerate(routers):
self.assertEqual(router.id, fixture_routers[i]['id'])
self.assertEqual(router.name, fixture_routers[i]['name'])
self.assertEqual(router.state, fixture_routers[i]['state'])
self.assertEqual(router.public_ip, fixture_routers[i]['publicip'])
self.assertEqual(router.vpc_id, fixture_routers[i]['vpcid'])
def test_ex_create_vpc(self):
_, fixture = CloudStackMockHttp()._load_fixture(
'createVPC_default.json')
fixture_vpc = fixture['createvpcresponse']
vpcoffer = self.driver.ex_list_vpc_offerings()[0]
vpc = self.driver.ex_create_vpc(cidr='10.1.1.0/16',
display_text='cloud.local',
name='cloud.local',
vpc_offering=vpcoffer,
zone_id="2")
self.assertEqual(vpc.id, fixture_vpc['id'])
def test_ex_delete_vpc(self):
vpc = self.driver.ex_list_vpcs()[0]
result = self.driver.ex_delete_vpc(vpc=vpc)
self.assertTrue(result)
def test_ex_create_network_acllist(self):
_, fixture = CloudStackMockHttp()._load_fixture(
'createNetworkACLList_default.json')
fixture_network_acllist = fixture['createnetworkacllistresponse']
vpc = self.driver.ex_list_vpcs()[0]
network_acllist = self.driver.ex_create_network_acllist(
name='test_acllist',
vpc_id=vpc.id,
description='test description')
self.assertEqual(network_acllist.id, fixture_network_acllist['id'])
def test_ex_list_network_acllist(self):
_, fixture = CloudStackMockHttp()._load_fixture(
'listNetworkACLLists_default.json')
fixture_acllist = \
fixture['listnetworkacllistsresponse']['networkacllist']
acllist = self.driver.ex_list_network_acllists()
for i, acllist in enumerate(acllist):
self.assertEqual(acllist.id,
fixture_acllist[i]['id'])
self.assertEqual(acllist.name,
fixture_acllist[i]['name'])
self.assertEqual(acllist.description,
fixture_acllist[i]['description'])
def test_ex_create_network_acl(self):
_, fixture = CloudStackMockHttp()._load_fixture(
'createNetworkACL_default.json')
fixture_network_acllist = fixture['createnetworkaclresponse']
acllist = self.driver.ex_list_network_acllists()[0]
network_acl = self.driver.ex_create_network_acl(
protocol='test_acllist',
acl_id=acllist.id,
cidr_list='',
start_port='80',
end_port='80')
self.assertEqual(network_acl.id, fixture_network_acllist['id'])
def test_ex_list_projects(self):
_, fixture = CloudStackMockHttp()._load_fixture(
'listProjects_default.json')
fixture_projects = fixture['listprojectsresponse']['project']
projects = self.driver.ex_list_projects()
for i, project in enumerate(projects):
self.assertEqual(project.id, fixture_projects[i]['id'])
self.assertEqual(
project.display_text, fixture_projects[i]['displaytext'])
self.assertEqual(project.name, fixture_projects[i]['name'])
self.assertEqual(
project.extra['domainid'],
fixture_projects[i]['domainid'])
self.assertEqual(
project.extra['cpulimit'],
fixture_projects[i]['cpulimit'])
# Note -1 represents unlimited
self.assertEqual(project.extra['networklimit'], -1)
def test_create_volume(self):
volumeName = 'vol-0'
location = self.driver.list_locations()[0]
volume = self.driver.create_volume(10, volumeName, location)
self.assertEqual(volumeName, volume.name)
self.assertEqual(10, volume.size)
def test_create_volume_no_noncustomized_offering_with_size(self):
"""If the sizes of disk offerings are not configurable and there
are no disk offerings with the requested size, an exception should
be thrown."""
location = self.driver.list_locations()[0]
self.assertRaises(
LibcloudError,
self.driver.create_volume,
'vol-0', location, 11)
def test_create_volume_with_custom_disk_size_offering(self):
CloudStackMockHttp.fixture_tag = 'withcustomdisksize'
volumeName = 'vol-0'
location = self.driver.list_locations()[0]
volume = self.driver.create_volume(10, volumeName, location)
self.assertEqual(volumeName, volume.name)
def test_attach_volume(self):
node = self.driver.list_nodes()[0]
volumeName = 'vol-0'
location = self.driver.list_locations()[0]
volume = self.driver.create_volume(10, volumeName, location)
attachReturnVal = self.driver.attach_volume(volume, node)
self.assertTrue(attachReturnVal)
def test_detach_volume(self):
volumeName = 'gre-test-volume'
location = self.driver.list_locations()[0]
volume = self.driver.create_volume(10, volumeName, location)
res = self.driver.detach_volume(volume)
self.assertTrue(res)
def test_destroy_volume(self):
volumeName = 'gre-test-volume'
location = self.driver.list_locations()[0]
volume = self.driver.create_volume(10, volumeName, location)
res = self.driver.destroy_volume(volume)
self.assertTrue(res)
def test_list_volumes(self):
volumes = self.driver.list_volumes()
self.assertEqual(1, len(volumes))
self.assertEqual('ROOT-69942', volumes[0].name)
def test_ex_get_volume(self):
volume = self.driver.ex_get_volume(2600)
self.assertEqual('ROOT-69942', volume.name)
def test_list_nodes(self):
nodes = self.driver.list_nodes()
self.assertEqual(2, len(nodes))
self.assertEqual('test', nodes[0].name)
self.assertEqual('2600', nodes[0].id)
self.assertEqual([], nodes[0].extra['security_group'])
self.assertEqual(None, nodes[0].extra['key_name'])
def test_ex_get_node(self):
node = self.driver.ex_get_node(2600)
self.assertEqual('test', node.name)
self.assertEqual('2600', node.id)
self.assertEqual([], node.extra['security_group'])
self.assertEqual(None, node.extra['key_name'])
def test_ex_get_node_doesnt_exist(self):
self.assertRaises(Exception, self.driver.ex_get_node(26), node_id=26)
def test_list_locations(self):
location = self.driver.list_locations()[0]
self.assertEqual('1', location.id)
self.assertEqual('Sydney', location.name)
def test_list_sizes(self):
sizes = self.driver.list_sizes()
self.assertEqual('Compute Micro PRD', sizes[0].name)
self.assertEqual('105', sizes[0].id)
self.assertEqual(384, sizes[0].ram)
self.assertEqual('Compute Large PRD', sizes[2].name)
self.assertEqual('69', sizes[2].id)
self.assertEqual(6964, sizes[2].ram)
def test_ex_start_node(self):
node = self.driver.list_nodes()[0]
res = node.ex_start()
self.assertEqual('Starting', res)
def test_ex_stop_node(self):
node = self.driver.list_nodes()[0]
res = node.ex_stop()
self.assertEqual('Stopped', res)
def test_destroy_node(self):
node = self.driver.list_nodes()[0]
res = node.destroy()
self.assertTrue(res)
def test_expunge_node(self):
node = self.driver.list_nodes()[0]
res = self.driver.destroy_node(node, ex_expunge=True)
self.assertTrue(res)
def test_reboot_node(self):
node = self.driver.list_nodes()[0]
res = node.reboot()
self.assertTrue(res)
def test_list_key_pairs(self):
keypairs = self.driver.list_key_pairs()
fingerprint = '00:00:00:00:00:00:00:00:00:00:00:00:00:00:00:' + \
'00:00:00:00:00'
self.assertEqual(keypairs[0].name, 'cs-keypair')
self.assertEqual(keypairs[0].fingerprint, fingerprint)
# Test old and deprecated way
keypairs = self.driver.ex_list_keypairs()
self.assertEqual(keypairs[0]['name'], 'cs-keypair')
self.assertEqual(keypairs[0]['fingerprint'], fingerprint)
def test_list_key_pairs_no_keypair_key(self):
CloudStackMockHttp.fixture_tag = 'no_keys'
keypairs = self.driver.list_key_pairs()
self.assertEqual(keypairs, [])
def test_get_key_pair(self):
CloudStackMockHttp.fixture_tag = 'get_one'
key_pair = self.driver.get_key_pair(name='cs-keypair')
self.assertEqual(key_pair.name, 'cs-keypair')
def test_get_key_pair_doesnt_exist(self):
CloudStackMockHttp.fixture_tag = 'get_one_doesnt_exist'
self.assertRaises(KeyPairDoesNotExistError, self.driver.get_key_pair,
name='does-not-exist')
def test_create_keypair(self):
key_pair = self.driver.create_key_pair(name='test-keypair')
self.assertEqual(key_pair.name, 'test-keypair')
self.assertTrue(key_pair.fingerprint is not None)
self.assertTrue(key_pair.private_key is not None)
# Test old and deprecated way
res = self.driver.ex_create_keypair(name='test-keypair')
self.assertEqual(res['name'], 'test-keypair')
self.assertTrue(res['fingerprint'] is not None)
self.assertTrue(res['privateKey'] is not None)
def test_import_keypair_from_file(self):
fingerprint = 'c4:a1:e5:d4:50:84:a9:4c:6b:22:ee:d6:57:02:b8:15'
path = os.path.join(os.path.dirname(__file__), 'fixtures',
'cloudstack',
'dummy_rsa.pub')
key_pair = self.driver.import_key_pair_from_file('foobar', path)
self.assertEqual(key_pair.name, 'foobar')
self.assertEqual(key_pair.fingerprint, fingerprint)
# Test old and deprecated way
res = self.driver.ex_import_keypair('foobar', path)
self.assertEqual(res['keyName'], 'foobar')
self.assertEqual(res['keyFingerprint'], fingerprint)
def test_ex_import_keypair_from_string(self):
fingerprint = 'c4:a1:e5:d4:50:84:a9:4c:6b:22:ee:d6:57:02:b8:15'
path = os.path.join(os.path.dirname(__file__), 'fixtures',
'cloudstack',
'dummy_rsa.pub')
fh = open(path)
key_material = fh.read()
fh.close()
key_pair = self.driver.import_key_pair_from_string('foobar', key_material=key_material)
self.assertEqual(key_pair.name, 'foobar')
self.assertEqual(key_pair.fingerprint, fingerprint)
# Test old and deprecated way
res = self.driver.ex_import_keypair_from_string('foobar', key_material=key_material)
self.assertEqual(res['keyName'], 'foobar')
self.assertEqual(res['keyFingerprint'], fingerprint)
def test_delete_key_pair(self):
key_pair = self.driver.list_key_pairs()[0]
res = self.driver.delete_key_pair(key_pair=key_pair)
self.assertTrue(res)
# Test old and deprecated way
res = self.driver.ex_delete_keypair(keypair='cs-keypair')
self.assertTrue(res)
def test_ex_list_security_groups(self):
groups = self.driver.ex_list_security_groups()
self.assertEqual(2, len(groups))
self.assertEqual(groups[0]['name'], 'default')
self.assertEqual(groups[1]['name'], 'mongodb')
def test_ex_list_security_groups_no_securitygroup_key(self):
CloudStackMockHttp.fixture_tag = 'no_groups'
groups = self.driver.ex_list_security_groups()
self.assertEqual(groups, [])
def test_ex_create_security_group(self):
group = self.driver.ex_create_security_group(name='MySG')
self.assertEqual(group['name'], 'MySG')
def test_ex_delete_security_group(self):
res = self.driver.ex_delete_security_group(name='MySG')
self.assertTrue(res)
def test_ex_authorize_security_group_ingress(self):
res = self.driver.ex_authorize_security_group_ingress('test_sg',
'udp',
'0.0.0.0/0',
'0',
'65535')
self.assertEqual(res.get('name'), 'test_sg')
self.assertTrue('ingressrule' in res)
rules = res['ingressrule']
self.assertEqual(len(rules), 1)
rule = rules[0]
self.assertEqual(rule['cidr'], '0.0.0.0/0')
self.assertEqual(rule['endport'], 65535)
self.assertEqual(rule['protocol'], 'udp')
self.assertEqual(rule['startport'], 0)
def test_ex_create_affinity_group(self):
res = self.driver.ex_create_affinity_group('MyAG2',
CloudStackAffinityGroupType('MyAGType'))
self.assertEqual(res.name, 'MyAG2')
self.assertIsInstance(res.type, CloudStackAffinityGroupType)
self.assertEqual(res.type.type, 'MyAGType')
def test_ex_create_affinity_group_already_exists(self):
self.assertRaises(LibcloudError,
self.driver.ex_create_affinity_group,
'MyAG', CloudStackAffinityGroupType('MyAGType'))
def test_delete_ex_affinity_group(self):
afg = self.driver.ex_create_affinity_group('MyAG3',
CloudStackAffinityGroupType('MyAGType'))
res = self.driver.ex_delete_affinity_group(afg)
self.assertTrue(res)
def test_ex_update_node_affinity_group(self):
affinity_group_list = self.driver.ex_list_affinity_groups()
nodes = self.driver.list_nodes()
node = self.driver.ex_update_node_affinity_group(nodes[0],
affinity_group_list)
self.assertEqual(node.extra['affinity_group'][0],
affinity_group_list[0].id)
def test_ex_list_affinity_groups(self):
res = self.driver.ex_list_affinity_groups()
self.assertEqual(len(res), 1)
self.assertEqual(res[0].id, '11112')
self.assertEqual(res[0].name, 'MyAG')
self.assertIsInstance(res[0].type, CloudStackAffinityGroupType)
self.assertEqual(res[0].type.type, 'MyAGType')
def test_ex_list_affinity_group_types(self):
res = self.driver.ex_list_affinity_group_types()
self.assertEqual(len(res), 1)
self.assertIsInstance(res[0], CloudStackAffinityGroupType)
self.assertEqual(res[0].type, 'MyAGType')
def test_ex_list_public_ips(self):
ips = self.driver.ex_list_public_ips()
self.assertEqual(ips[0].address, '1.1.1.116')
self.assertEqual(ips[0].virtualmachine_id, '2600')
def test_ex_allocate_public_ip(self):
addr = self.driver.ex_allocate_public_ip()
self.assertEqual(addr.address, '7.5.6.1')
self.assertEqual(addr.id, '10987171-8cc9-4d0a-b98f-1698c09ddd2d')
def test_ex_release_public_ip(self):
addresses = self.driver.ex_list_public_ips()
res = self.driver.ex_release_public_ip(addresses[0])
self.assertTrue(res)
def test_ex_create_port_forwarding_rule(self):
node = self.driver.list_nodes()[0]
address = self.driver.ex_list_public_ips()[0]
private_port = 33
private_end_port = 34
public_port = 33
public_end_port = 34
openfirewall = True
protocol = 'TCP'
rule = self.driver.ex_create_port_forwarding_rule(node,
address,
private_port,
public_port,
protocol,
public_end_port,
private_end_port,
openfirewall)
self.assertEqual(rule.address, address)
self.assertEqual(rule.protocol, protocol)
self.assertEqual(rule.public_port, public_port)
self.assertEqual(rule.public_end_port, public_end_port)
self.assertEqual(rule.private_port, private_port)
self.assertEqual(rule.private_end_port, private_end_port)
def test_ex_list_firewall_rules(self):
rules = self.driver.ex_list_firewall_rules()
self.assertEqual(len(rules), 1)
rule = rules[0]
self.assertEqual(rule.address.address, '1.1.1.116')
self.assertEqual(rule.protocol, 'tcp')
self.assertEqual(rule.cidr_list, '192.168.0.0/16')
self.assertIsNone(rule.icmp_code)
self.assertIsNone(rule.icmp_type)
self.assertEqual(rule.start_port, '33')
self.assertEqual(rule.end_port, '34')
def test_ex_list_firewall_rules_icmp(self):
CloudStackMockHttp.fixture_tag = 'firewallicmp'
rules = self.driver.ex_list_firewall_rules()
self.assertEqual(len(rules), 1)
rule = rules[0]
self.assertEqual(rule.address.address, '1.1.1.116')
self.assertEqual(rule.protocol, 'icmp')
self.assertEqual(rule.cidr_list, '192.168.0.0/16')
self.assertEqual(rule.icmp_code, 0)
self.assertEqual(rule.icmp_type, 8)
self.assertIsNone(rule.start_port)
self.assertIsNone(rule.end_port)
def test_ex_delete_firewall_rule(self):
rules = self.driver.ex_list_firewall_rules()
res = self.driver.ex_delete_firewall_rule(rules[0])
self.assertTrue(res)
def test_ex_create_firewall_rule(self):
address = self.driver.ex_list_public_ips()[0]
cidr_list = '192.168.0.0/16'
protocol = 'TCP'
start_port = 33
end_port = 34
rule = self.driver.ex_create_firewall_rule(address,
cidr_list,
protocol,
start_port=start_port,
end_port=end_port)
self.assertEqual(rule.address, address)
self.assertEqual(rule.protocol, protocol)
self.assertIsNone(rule.icmp_code)
self.assertIsNone(rule.icmp_type)
self.assertEqual(rule.start_port, start_port)
self.assertEqual(rule.end_port, end_port)
def test_ex_create_firewall_rule_icmp(self):
address = self.driver.ex_list_public_ips()[0]
cidr_list = '192.168.0.0/16'
protocol = 'icmp'
icmp_code = 0
icmp_type = 8
rule = self.driver.ex_create_firewall_rule(address,
cidr_list,
protocol,
icmp_code=icmp_code,
icmp_type=icmp_type)
self.assertEqual(rule.address, address)
self.assertEqual(rule.protocol, protocol)
self.assertEqual(rule.icmp_code, 0)
self.assertEqual(rule.icmp_type, 8)
self.assertIsNone(rule.start_port)
self.assertIsNone(rule.end_port)
def test_ex_list_egress_firewall_rules(self):
rules = self.driver.ex_list_egress_firewall_rules()
self.assertEqual(len(rules), 1)
rule = rules[0]
self.assertEqual(rule.network_id, '874be2ca-20a7-4360-80e9-7356c0018c0b')
self.assertEqual(rule.cidr_list, '192.168.0.0/16')
self.assertEqual(rule.protocol, 'tcp')
self.assertIsNone(rule.icmp_code)
self.assertIsNone(rule.icmp_type)
self.assertEqual(rule.start_port, '80')
self.assertEqual(rule.end_port, '80')
def test_ex_delete_egress_firewall_rule(self):
rules = self.driver.ex_list_egress_firewall_rules()
res = self.driver.ex_delete_egress_firewall_rule(rules[0])
self.assertTrue(res)
def test_ex_create_egress_firewall_rule(self):
network_id = '874be2ca-20a7-4360-80e9-7356c0018c0b'
cidr_list = '192.168.0.0/16'
protocol = 'TCP'
start_port = 33
end_port = 34
rule = self.driver.ex_create_egress_firewall_rule(
network_id,
cidr_list,
protocol,
start_port=start_port,
end_port=end_port)
self.assertEqual(rule.network_id, network_id)
self.assertEqual(rule.cidr_list, cidr_list)
self.assertEqual(rule.protocol, protocol)
self.assertIsNone(rule.icmp_code)
self.assertIsNone(rule.icmp_type)
self.assertEqual(rule.start_port, start_port)
self.assertEqual(rule.end_port, end_port)
def test_ex_list_port_forwarding_rules(self):
rules = self.driver.ex_list_port_forwarding_rules()
self.assertEqual(len(rules), 1)
rule = rules[0]
self.assertTrue(rule.node)
self.assertEqual(rule.protocol, 'tcp')
self.assertEqual(rule.public_port, '33')
self.assertEqual(rule.public_end_port, '34')
self.assertEqual(rule.private_port, '33')
self.assertEqual(rule.private_end_port, '34')
self.assertEqual(rule.address.address, '1.1.1.116')
def test_ex_delete_port_forwarding_rule(self):
node = self.driver.list_nodes()[0]
rule = self.driver.ex_list_port_forwarding_rules()[0]
res = self.driver.ex_delete_port_forwarding_rule(node, rule)
self.assertTrue(res)
def test_node_ex_delete_port_forwarding_rule(self):
node = self.driver.list_nodes()[0]
self.assertEqual(len(node.extra['port_forwarding_rules']), 1)
node.extra['port_forwarding_rules'][0].delete()
self.assertEqual(len(node.extra['port_forwarding_rules']), 0)
def test_node_ex_create_port_forwarding_rule(self):
node = self.driver.list_nodes()[0]
self.assertEqual(len(node.extra['port_forwarding_rules']), 1)
address = self.driver.ex_list_public_ips()[0]
private_port = 33
private_end_port = 34
public_port = 33
public_end_port = 34
openfirewall = True
protocol = 'TCP'
rule = node.ex_create_port_forwarding_rule(address,
private_port,
public_port,
protocol,
public_end_port,
private_end_port,
openfirewall)
self.assertEqual(rule.address, address)
self.assertEqual(rule.protocol, protocol)
self.assertEqual(rule.public_port, public_port)
self.assertEqual(rule.public_end_port, public_end_port)
self.assertEqual(rule.private_port, private_port)
self.assertEqual(rule.private_end_port, private_end_port)
self.assertEqual(len(node.extra['port_forwarding_rules']), 2)
def test_ex_list_ip_forwarding_rules(self):
rules = self.driver.ex_list_ip_forwarding_rules()
self.assertEqual(len(rules), 1)
rule = rules[0]
self.assertTrue(rule.node)
self.assertEqual(rule.protocol, 'tcp')
self.assertEqual(rule.start_port, 33)
self.assertEqual(rule.end_port, 34)
self.assertEqual(rule.address.address, '1.1.1.116')
def test_ex_limits(self):
limits = self.driver.ex_limits()
self.assertEqual(limits['max_images'], 20)
self.assertEqual(limits['max_networks'], 20)
self.assertEqual(limits['max_public_ips'], -1)
self.assertEqual(limits['max_vpc'], 20)
self.assertEqual(limits['max_instances'], 20)
self.assertEqual(limits['max_projects'], -1)
self.assertEqual(limits['max_volumes'], 20)
self.assertEqual(limits['max_snapshots'], 20)
def test_ex_create_tags(self):
node = self.driver.list_nodes()[0]
tags = {'Region': 'Canada'}
resp = self.driver.ex_create_tags([node.id], 'UserVm', tags)
self.assertTrue(resp)
def test_ex_delete_tags(self):
node = self.driver.list_nodes()[0]
tag_keys = ['Region']
resp = self.driver.ex_delete_tags([node.id], 'UserVm', tag_keys)
self.assertTrue(resp)
def test_list_snapshots(self):
snapshots = self.driver.list_snapshots()
self.assertEqual(len(snapshots), 3)
snap = snapshots[0]
self.assertEqual(snap.id, 188402)
self.assertEqual(snap.extra['name'], "i-123-87654-VM_ROOT-12344_20140917105548")
self.assertEqual(snap.extra['volume_id'], 89341)
def test_create_volume_snapshot(self):
volume = self.driver.list_volumes()[0]
snapshot = self.driver.create_volume_snapshot(volume)
self.assertEqual(snapshot.id, 190547)
self.assertEqual(snapshot.extra['name'], "i-123-87654-VM_ROOT-23456_20140917105548")
self.assertEqual(snapshot.extra['volume_id'], "fe1ada16-57a0-40ae-b577-01a153690fb4")
def test_destroy_volume_snapshot(self):
snapshot = self.driver.list_snapshots()[0]
resp = self.driver.destroy_volume_snapshot(snapshot)
self.assertTrue(resp)
def test_ex_create_snapshot_template(self):
snapshot = self.driver.list_snapshots()[0]
template = self.driver.ex_create_snapshot_template(snapshot, "test-libcloud-template", 99)
self.assertEqual(template.id, '10260')
self.assertEqual(template.name, "test-libcloud-template")
self.assertEqual(template.extra['displaytext'], "test-libcloud-template")
self.assertEqual(template.extra['hypervisor'], "VMware")
self.assertEqual(template.extra['os'], "Other Linux (64-bit)")
def test_ex_list_os_types(self):
os_types = self.driver.ex_list_os_types()
self.assertEqual(len(os_types), 146)
self.assertEqual(os_types[0]['id'], 69)
self.assertEqual(os_types[0]['oscategoryid'], 7)
self.assertEqual(os_types[0]['description'], "Asianux 3(32-bit)")
def test_ex_list_vpn_gateways(self):
vpn_gateways = self.driver.ex_list_vpn_gateways()
self.assertEqual(len(vpn_gateways), 1)
self.assertEqual(vpn_gateways[0].id, 'cffa0cab-d1da-42a7-92f6-41379267a29f')
self.assertEqual(vpn_gateways[0].account, 'some_account')
self.assertEqual(vpn_gateways[0].domain, 'some_domain')
self.assertEqual(vpn_gateways[0].domain_id, '9b397dea-25ef-4c5d-b47d-627eaebe8ed8')
self.assertEqual(vpn_gateways[0].public_ip, '1.2.3.4')
self.assertEqual(vpn_gateways[0].vpc_id, '4d25e181-8850-4d52-8ecb-a6f35bbbabde')
def test_ex_create_vpn_gateway(self):
vpc = self.driver.ex_list_vpcs()[0]
vpn_gateway = self.driver.ex_create_vpn_gateway(vpc)
self.assertEqual(vpn_gateway.id, '5ef6794e-cec8-4018-9fef-c4dacbadee14')
self.assertEqual(vpn_gateway.account, 'some_account')
self.assertEqual(vpn_gateway.domain, 'some_domain')
self.assertEqual(vpn_gateway.domain_id, '9b397dea-25ef-4c5d-b47d-627eaebe8ed8')
self.assertEqual(vpn_gateway.public_ip, '2.3.4.5')
self.assertEqual(vpn_gateway.vpc_id, vpc.id)
def test_ex_delete_vpn_gateway(self):
vpn_gateway = self.driver.ex_list_vpn_gateways()[0]
self.assertTrue(vpn_gateway.delete())
def test_ex_list_vpn_customer_gateways(self):
vpn_customer_gateways = self.driver.ex_list_vpn_customer_gateways()
self.assertEqual(len(vpn_customer_gateways), 1)
self.assertEqual(vpn_customer_gateways[0].id, 'ea67eaae-1c2a-4e65-b910-441e77f69bea')
self.assertEqual(vpn_customer_gateways[0].cidr_list, '10.2.2.0/24')
self.assertEqual(vpn_customer_gateways[0].esp_policy, '3des-md5')
self.assertEqual(vpn_customer_gateways[0].gateway, '10.2.2.1')
self.assertEqual(vpn_customer_gateways[0].ike_policy, '3des-md5')
self.assertEqual(vpn_customer_gateways[0].ipsec_psk, 'some_psk')
def test_ex_create_vpn_customer_gateway(self):
vpn_customer_gateway = self.driver.ex_create_vpn_customer_gateway(
cidr_list='10.0.0.0/24',
esp_policy='3des-md5',
gateway='10.0.0.1',
ike_policy='3des-md5',
ipsec_psk='ipsecpsk')
self.assertEqual(vpn_customer_gateway.id, 'cef3c766-116a-4e83-9844-7d08ab7d3fd4')
self.assertEqual(vpn_customer_gateway.esp_policy, '3des-md5')
self.assertEqual(vpn_customer_gateway.gateway, '10.0.0.1')
self.assertEqual(vpn_customer_gateway.ike_policy, '3des-md5')
self.assertEqual(vpn_customer_gateway.ipsec_psk, 'ipsecpsk')
def test_ex_ex_delete_vpn_customer_gateway(self):
vpn_customer_gateway = self.driver.ex_list_vpn_customer_gateways()[0]
self.assertTrue(vpn_customer_gateway.delete())
def test_ex_list_vpn_connections(self):
vpn_connections = self.driver.ex_list_vpn_connections()
self.assertEqual(len(vpn_connections), 1)
self.assertEqual(vpn_connections[0].id, '8f482d9a-6cee-453b-9e78-b0e1338ffce9')
self.assertEqual(vpn_connections[0].passive, False)
self.assertEqual(vpn_connections[0].vpn_customer_gateway_id, 'ea67eaae-1c2a-4e65-b910-441e77f69bea')
self.assertEqual(vpn_connections[0].vpn_gateway_id, 'cffa0cab-d1da-42a7-92f6-41379267a29f')
self.assertEqual(vpn_connections[0].state, 'Connected')
def test_ex_create_vpn_connection(self):
vpn_customer_gateway = self.driver.ex_list_vpn_customer_gateways()[0]
vpn_gateway = self.driver.ex_list_vpn_gateways()[0]
vpn_connection = self.driver.ex_create_vpn_connection(
vpn_customer_gateway,
vpn_gateway)
self.assertEqual(vpn_connection.id, 'f45c3af8-f909-4f16-9d40-ed4409c575f8')
self.assertEqual(vpn_connection.passive, False)
self.assertEqual(vpn_connection.vpn_customer_gateway_id, 'ea67eaae-1c2a-4e65-b910-441e77f69bea')
self.assertEqual(vpn_connection.vpn_gateway_id, 'cffa0cab-d1da-42a7-92f6-41379267a29f')
self.assertEqual(vpn_connection.state, 'Connected')
def test_ex_delete_vpn_connection(self):
vpn_connection = self.driver.ex_list_vpn_connections()[0]
self.assertTrue(vpn_connection.delete())
class CloudStackTestCase(CloudStackCommonTestCase, unittest.TestCase):
def test_driver_instantiation(self):
urls = [
'http://api.exoscale.ch/compute1', # http, default port
'https://api.exoscale.ch/compute2', # https, default port
'http://api.exoscale.ch:8888/compute3', # https, custom port
'https://api.exoscale.ch:8787/compute4', # https, custom port
'https://api.test.com/compute/endpoint' # https, default port
]
expected_values = [
{'host': 'api.exoscale.ch', 'port': 80, 'path': '/compute1'},
{'host': 'api.exoscale.ch', 'port': 443, 'path': '/compute2'},
{'host': 'api.exoscale.ch', 'port': 8888, 'path': '/compute3'},
{'host': 'api.exoscale.ch', 'port': 8787, 'path': '/compute4'},
{'host': 'api.test.com', 'port': 443, 'path': '/compute/endpoint'}
]
cls = get_driver(Provider.CLOUDSTACK)
for url, expected in zip(urls, expected_values):
driver = cls('key', 'secret', url=url)
self.assertEqual(driver.host, expected['host'])
self.assertEqual(driver.path, expected['path'])
self.assertEqual(driver.connection.port, expected['port'])
def test_user_must_provide_host_and_path_or_url(self):
expected_msg = ('When instantiating CloudStack driver directly '
'you also need to provide url or host and path '
'argument')
cls = get_driver(Provider.CLOUDSTACK)
self.assertRaisesRegexp(Exception, expected_msg, cls,
'key', 'secret')
try:
cls('key', 'secret', True, 'localhost', '/path')
except Exception:
self.fail('host and path provided but driver raised an exception')
try:
cls('key', 'secret', url='https://api.exoscale.ch/compute')
except Exception:
self.fail('url provided but driver raised an exception')
class CloudStackMockHttp(MockHttpTestCase):
fixtures = ComputeFileFixtures('cloudstack')
fixture_tag = 'default'
def _load_fixture(self, fixture):
body = self.fixtures.load(fixture)
return body, json.loads(body)
def _test_path_invalid_credentials(self, method, url, body, headers):
body = ''
return (httplib.UNAUTHORIZED, body, {},
httplib.responses[httplib.UNAUTHORIZED])
def _test_path_api_error(self, method, url, body, headers):
body = self.fixtures.load('registerSSHKeyPair_error.json')
return (431, body, {},
httplib.responses[httplib.OK])
def _test_path(self, method, url, body, headers):
url = urlparse.urlparse(url)
query = dict(parse_qsl(url.query))
self.assertTrue('apiKey' in query)
self.assertTrue('command' in query)
self.assertTrue('response' in query)
self.assertTrue('signature' in query)
self.assertTrue(query['response'] == 'json')
del query['apiKey']
del query['response']
del query['signature']
command = query.pop('command')
if hasattr(self, '_cmd_' + command):
return getattr(self, '_cmd_' + command)(**query)
else:
fixture = command + '_' + self.fixture_tag + '.json'
body, obj = self._load_fixture(fixture)
return (httplib.OK, body, obj, httplib.responses[httplib.OK])
def _cmd_queryAsyncJobResult(self, jobid):
fixture = 'queryAsyncJobResult' + '_' + str(jobid) + '.json'
body, obj = self._load_fixture(fixture)
return (httplib.OK, body, obj, httplib.responses[httplib.OK])
if __name__ == '__main__':
sys.exit(unittest.main())
|
|
# Copyright 2010 United States Government as represented by the
# Administrator of the National Aeronautics and Space Administration.
# All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
"""
Unit Tests for remote procedure calls using queue
"""
import mock
import mox
from oslo.config import cfg
from cinder import context
from cinder import db
from cinder import exception
from cinder import manager
from cinder import service
from cinder import test
from cinder import wsgi
test_service_opts = [
cfg.StrOpt("fake_manager",
default="cinder.tests.test_service.FakeManager",
help="Manager for testing"),
cfg.StrOpt("test_service_listen",
default=None,
help="Host to bind test service to"),
cfg.IntOpt("test_service_listen_port",
default=0,
help="Port number to bind test service to"), ]
CONF = cfg.CONF
CONF.register_opts(test_service_opts)
class FakeManager(manager.Manager):
"""Fake manager for tests."""
def __init__(self, host=None,
db_driver=None, service_name=None):
super(FakeManager, self).__init__(host=host,
db_driver=db_driver)
def test_method(self):
return 'manager'
class ExtendedService(service.Service):
def test_method(self):
return 'service'
class ServiceManagerTestCase(test.TestCase):
"""Test cases for Services."""
def test_message_gets_to_manager(self):
serv = service.Service('test',
'test',
'test',
'cinder.tests.test_service.FakeManager')
serv.start()
self.assertEqual(serv.test_method(), 'manager')
def test_override_manager_method(self):
serv = ExtendedService('test',
'test',
'test',
'cinder.tests.test_service.FakeManager')
serv.start()
self.assertEqual(serv.test_method(), 'service')
class ServiceFlagsTestCase(test.TestCase):
def test_service_enabled_on_create_based_on_flag(self):
self.flags(enable_new_services=True)
host = 'foo'
binary = 'cinder-fake'
app = service.Service.create(host=host, binary=binary)
app.start()
app.stop()
ref = db.service_get(context.get_admin_context(), app.service_id)
db.service_destroy(context.get_admin_context(), app.service_id)
self.assertFalse(ref['disabled'])
def test_service_disabled_on_create_based_on_flag(self):
self.flags(enable_new_services=False)
host = 'foo'
binary = 'cinder-fake'
app = service.Service.create(host=host, binary=binary)
app.start()
app.stop()
ref = db.service_get(context.get_admin_context(), app.service_id)
db.service_destroy(context.get_admin_context(), app.service_id)
self.assertTrue(ref['disabled'])
class ServiceTestCase(test.TestCase):
"""Test cases for Services."""
def setUp(self):
super(ServiceTestCase, self).setUp()
self.mox.StubOutWithMock(service, 'db')
def test_create(self):
host = 'foo'
binary = 'cinder-fake'
topic = 'fake'
# NOTE(vish): Create was moved out of mox replay to make sure that
# the looping calls are created in StartService.
app = service.Service.create(host=host, binary=binary, topic=topic)
self.assertTrue(app)
def test_report_state_newly_disconnected(self):
host = 'foo'
binary = 'bar'
topic = 'test'
service_create = {'host': host,
'binary': binary,
'topic': topic,
'report_count': 0,
'availability_zone': 'nova'}
service_ref = {'host': host,
'binary': binary,
'topic': topic,
'report_count': 0,
'availability_zone': 'nova',
'id': 1}
service.db.service_get_by_args(mox.IgnoreArg(),
host,
binary).AndRaise(exception.NotFound())
service.db.service_create(mox.IgnoreArg(),
service_create).AndReturn(service_ref)
service.db.service_get(mox.IgnoreArg(),
mox.IgnoreArg()).AndRaise(Exception())
self.mox.ReplayAll()
serv = service.Service(host,
binary,
topic,
'cinder.tests.test_service.FakeManager')
serv.start()
serv.report_state()
self.assertTrue(serv.model_disconnected)
def test_report_state_newly_connected(self):
host = 'foo'
binary = 'bar'
topic = 'test'
service_create = {'host': host,
'binary': binary,
'topic': topic,
'report_count': 0,
'availability_zone': 'nova'}
service_ref = {'host': host,
'binary': binary,
'topic': topic,
'report_count': 0,
'availability_zone': 'nova',
'id': 1}
service.db.service_get_by_args(mox.IgnoreArg(),
host,
binary).AndRaise(exception.NotFound())
service.db.service_create(mox.IgnoreArg(),
service_create).AndReturn(service_ref)
service.db.service_get(mox.IgnoreArg(),
service_ref['id']).AndReturn(service_ref)
service.db.service_update(mox.IgnoreArg(), service_ref['id'],
mox.ContainsKeyValue('report_count', 1))
self.mox.ReplayAll()
serv = service.Service(host,
binary,
topic,
'cinder.tests.test_service.FakeManager')
serv.start()
serv.model_disconnected = True
serv.report_state()
self.assertFalse(serv.model_disconnected)
def test_service_with_long_report_interval(self):
CONF.set_override('service_down_time', 10)
CONF.set_override('report_interval', 10)
service.Service.create(binary="test_service",
manager="cinder.tests.test_service.FakeManager")
self.assertEqual(CONF.service_down_time, 25)
class TestWSGIService(test.TestCase):
def setUp(self):
super(TestWSGIService, self).setUp()
self.stubs.Set(wsgi.Loader, "load_app", mox.MockAnything())
def test_service_random_port(self):
test_service = service.WSGIService("test_service")
self.assertEqual(0, test_service.port)
test_service.start()
self.assertNotEqual(0, test_service.port)
test_service.stop()
class OSCompatibilityTestCase(test.TestCase):
def _test_service_launcher(self, fake_os):
# Note(lpetrut): The cinder-volume service needs to be spawned
# differently on Windows due to an eventlet bug. For this reason,
# we must check the process launcher used.
fake_process_launcher = mock.MagicMock()
with mock.patch('os.name', fake_os):
with mock.patch('cinder.service.process_launcher',
fake_process_launcher):
launcher = service.get_launcher()
if fake_os == 'nt':
self.assertEqual(type(launcher),
service.Launcher)
else:
self.assertEqual(launcher,
fake_process_launcher())
def test_process_launcher_on_windows(self):
self._test_service_launcher('nt')
def test_process_launcher_on_linux(self):
self._test_service_launcher('posix')
|
|
"""
Example python code to call the 2 mocks correlation function
routines from python. (The codes are written in C)
Author: Manodeep Sinha <manodeep@gmail.com>
Requires: numpy
"""
from __future__ import print_function
from os.path import dirname, abspath, join as pjoin
import time
import numpy as np
from _countpairs_mocks import \
countpairs_rp_pi_mocks as rp_pi_mocks,\
countpairs_theta_mocks as theta_mocks,\
countspheres_vpf_mocks as vpf_mocks, \
countpairs_s_mu_mocks as s_mu_mocks
try:
import pandas as pd
except ImportError:
pd = None
def read_text_file(filename, encoding="utf-8"):
"""
Reads a file under python3 with encoding (default UTF-8).
Also works under python2, without encoding.
Uses the EAFP (https://docs.python.org/2/glossary.html#term-eafp)
principle.
"""
try:
with open(filename, 'r', encoding) as f:
r = f.read()
except TypeError:
with open(filename, 'r') as f:
r = f.read()
return r
def main():
tstart = time.time()
filename = pjoin(dirname(abspath(__file__)),
"../tests/data/", "Mr19_mock_northonly.rdcz.dat")
# Double-precision calculations
# (if you want single-prec, just change the following line
# to dtype = np.float32)
dtype = np.float64
# Check if pandas is available - much faster to read in the
# data through pandas
t0 = time.time()
print("Reading in the data...")
if pd is not None:
df = pd.read_csv(filename, header=None, engine="c",
dtype={"x": dtype, "y": dtype, "z": dtype},
delim_whitespace=True)
ra = np.asarray(df[0], dtype=dtype)
dec = np.asarray(df[1], dtype=dtype)
cz = np.asarray(df[2], dtype=dtype)
weights = np.asarray(df[3], dtype=dtype)
else:
ra, dec, cz, weights = np.genfromtxt(filename, dtype=dtype,
unpack=True)
weights = weights.reshape(1,-1)
t1 = time.time()
print("RA min = {0} max = {1}".format(np.min(ra), np.max(ra)))
print("DEC min = {0} max = {1}".format(np.min(dec), np.max(dec)))
print("cz min = {0} max = {1}".format(np.min(cz), np.max(cz)))
print("Done reading the data - time taken = {0:10.1f} seconds"
.format(t1 - t0))
print("Beginning Correlation functions calculations")
nthreads = 4
pimax = 40.0
binfile = pjoin(dirname(abspath(__file__)),
"../tests/", "bins")
autocorr = 1
numbins_to_print = 5
cosmology = 1
print("\nRunning 2-D correlation function xi(rp,pi)")
results_DDrppi, _ = rp_pi_mocks(autocorr, cosmology, nthreads,
pimax, binfile,
ra, dec, cz, weights1=weights,
output_rpavg=True, verbose=True,
weight_type='pair_product')
print("\n# ****** DD(rp,pi): first {0} bins ******* "
.format(numbins_to_print))
print("# rmin rmax rpavg pi_upper npairs weight_avg")
print("##########################################################################")
for ibin in range(numbins_to_print):
items = results_DDrppi[ibin]
print("{0:12.4f} {1:12.4f} {2:10.4f} {3:10.1f} {4:10d} {5:12.4f}"
.format(items[0], items[1], items[2], items[3], items[4], items[5]))
print("--------------------------------------------------------------------------")
print("\nRunning 2-D correlation function xi(rp,pi) with different bin refinement")
results_DDrppi, _ = rp_pi_mocks(autocorr, cosmology, nthreads,
pimax, binfile,
ra, dec, cz,
output_rpavg=True,
xbin_refine_factor=3,
ybin_refine_factor=3,
zbin_refine_factor=2,
verbose=True)
print("\n# ****** DD(rp,pi): first {0} bins ******* "
.format(numbins_to_print))
print("# rmin rmax rpavg pi_upper npairs")
print("###########################################################")
for ibin in range(numbins_to_print):
items = results_DDrppi[ibin]
print("{0:12.4f} {1:12.4f} {2:10.4f} {3:10.1f} {4:10d}"
.format(items[0], items[1], items[2], items[3], items[4]))
print("-----------------------------------------------------------")
nmu_bins = 10
mu_max = 1.0
print("\nRunning 2-D correlation function xi(s,mu)")
results_DDsmu, _ = s_mu_mocks(autocorr, cosmology, nthreads,
mu_max, nmu_bins, binfile,
ra, dec, cz, weights1=weights,
output_savg=True, verbose=True,
weight_type='pair_product')
print("\n# ****** DD(s,mu): first {0} bins ******* "
.format(numbins_to_print))
print("# smin smax savg mu_upper npairs weight_avg")
print("##########################################################################")
for ibin in range(numbins_to_print):
items = results_DDsmu[ibin]
print("{0:12.4f} {1:12.4f} {2:10.4f} {3:10.1f} {4:10d} {5:12.4f}"
.format(items[0], items[1], items[2], items[3], items[4], items[5]))
print("--------------------------------------------------------------------------")
binfile = pjoin(dirname(abspath(__file__)),
"../tests/", "angular_bins")
print("\nRunning angular correlation function w(theta)")
results_wtheta, _ = theta_mocks(autocorr, nthreads, binfile,
ra, dec, weights1=weights,
RA2=ra, DEC2=dec, weights2=weights,
output_thetaavg=True, fast_acos=True,
verbose=1, weight_type='pair_product')
print("\n# ****** wtheta: first {0} bins ******* "
.format(numbins_to_print))
print("# thetamin thetamax thetaavg npairs weightavg")
print("#######################################################################")
for ibin in range(numbins_to_print):
items = results_wtheta[ibin]
print("{0:14.4f} {1:14.4f} {2:14.4f} {3:14d} {4:14.4f}"
.format(items[0], items[1], items[2], items[3], items[4]))
print("-----------------------------------------------------------------------")
print("Beginning the VPF")
# Max. sphere radius of 10 Mpc
rmax = 10.0
# 10 bins..so counts in spheres of radius 1, 2, 3, 4...10 Mpc spheres
nbin = 10
num_spheres = 10000
num_pN = 6
threshold_neighbors = 1 # does not matter since we have the centers
centers_file = pjoin(dirname(abspath(__file__)),
"../tests/data/",
"Mr19_centers_xyz_forVPF_rmax_10Mpc.txt")
results_vpf, _ = vpf_mocks(rmax, nbin, num_spheres, num_pN,
threshold_neighbors, centers_file, cosmology,
ra, dec, cz, ra, dec, cz, verbose=True)
print("\n# ****** pN: first {0} bins ******* "
.format(numbins_to_print))
print('# r ', end="")
for ipn in range(num_pN):
print(' p{0:0d} '.format(ipn), end="")
print("")
print("###########", end="")
for ipn in range(num_pN):
print('################', end="")
print("")
for ibin in range(numbins_to_print):
items = results_vpf[ibin]
print('{0:10.2f} '.format(items[0]), end="")
for ipn in range(num_pN):
print(' {0:15.4e}'.format(items[ipn + 1]), end="")
print("")
print("-----------------------------------------------------------")
print("Done with the VPF.")
tend = time.time()
print("Done with all the MOCK clustering calculations. Total time \
taken = {0:0.2f} seconds.".format(tend - tstart))
if __name__ == "__main__":
main()
|
|
# Copyright 2017 The TensorFlow Authors. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# ==============================================================================
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
import collections
from tensorflow.python.eager import backprop
from tensorflow.python.eager import context
from tensorflow.python.eager import function
from tensorflow.python.eager import tape
from tensorflow.python.eager import test
from tensorflow.python.framework import constant_op
from tensorflow.python.framework import dtypes
from tensorflow.python.framework import errors
from tensorflow.python.framework import function as tf_function
from tensorflow.python.framework import ops
from tensorflow.python.framework import tensor_shape
from tensorflow.python.ops import array_ops
from tensorflow.python.ops import clip_ops
from tensorflow.python.ops import control_flow_ops
from tensorflow.python.ops import math_ops
from tensorflow.python.ops import resource_variable_ops
from tensorflow.python.ops import variable_scope
from tensorflow.python.ops import variables
from tensorflow.python.training import gradient_descent
class FunctionTest(test.TestCase):
def testBasic(self):
matmul = function.defun(math_ops.matmul)
t = constant_op.constant([[1.0, 2.0], [3.0, 4.0]])
sq = matmul(t, t, transpose_a=True)
sq2 = matmul(sq, t, transpose_a=True)
self.assertAllEqual(sq.numpy().reshape(-1), [10, 14, 14, 20])
self.assertAllEqual(sq2.numpy().reshape(-1), [52, 76, 74, 108])
def testBasicGraphMode(self):
matmul = function.defun(math_ops.matmul)
@function.defun
def sq(a):
return matmul(a, a)
t = constant_op.constant([[1.0, 2.0], [3.0, 4.0]])
out = sq(t)
self.assertAllEqual(out, math_ops.matmul(t, t).numpy())
def testNestedInputsGraphMode(self):
matmul = function.defun(math_ops.matmul)
pair = collections.namedtuple('pair', ['a', 'b'])
@function.defun
def a_times_b(inputs):
return matmul(inputs.a['a'], inputs.b['b'])
t = constant_op.constant([[1.0, 2.0], [3.0, 4.0]])
out = a_times_b(pair({'a': t}, {'b': t}))
self.assertAllEqual(out, math_ops.matmul(t, t).numpy())
def testGraphModeWithGradients(self):
v = resource_variable_ops.ResourceVariable(1.0, name='v')
@function.defun
def step():
def inner():
return v * v
return backprop.implicit_grad(inner)()[0][0]
self.assertAllEqual(step(), 2.0)
def testBasicDefunOpGraphMode(self):
matmul = function.defun(math_ops.matmul)
def sq(a):
return matmul(a, a)
t = constant_op.constant([[1.0, 2.0], [3.0, 4.0]])
sq_op = function.make_defun_op(sq, t)
self.assertEqual(sq_op.output_shapes, tensor_shape.TensorShape([2, 2]))
out = sq_op(t)
self.assertAllEqual(out, math_ops.matmul(t, t).numpy())
def testNestedInputsDefunOpGraphMode(self):
matmul = function.defun(math_ops.matmul)
pair = collections.namedtuple('pair', ['a', 'b'])
def a_times_b(inputs):
return matmul(inputs.a['a'], inputs.b['b'])
t = constant_op.constant([[1.0, 2.0], [3.0, 4.0]])
inputs = pair({'a': t}, {'b': t})
sq_op = function.make_defun_op(a_times_b, inputs)
self.assertEqual(sq_op.output_shapes, tensor_shape.TensorShape([2, 2]))
out = sq_op(inputs)
self.assertAllEqual(out, math_ops.matmul(t, t).numpy())
def testNestedOutputDefunOpGraphMode(self):
matmul = function.defun(math_ops.matmul)
def sq(a):
return (matmul(a, a), {'b': constant_op.constant(1.0)})
t = constant_op.constant([[1.0, 2.0], [3.0, 4.0]])
sq_op = function.make_defun_op(sq, t)
self.assertEqual(sq_op.output_shapes,
(tensor_shape.TensorShape([2, 2]),
{'b': tensor_shape.TensorShape([])}))
self.assertEqual(sq_op.output_dtypes,
(dtypes.float32, {'b': dtypes.float32}))
(a, b) = sq_op(t)
self.assertAllEqual(a, math_ops.matmul(t, t).numpy())
self.assertAllEqual(b['b'].numpy(), 1.0)
def testDefunOpGraphModeWithGradients(self):
v = resource_variable_ops.ResourceVariable(1.0, name='v')
def step():
def inner():
return v * v
return backprop.implicit_grad(inner)()[0][0]
step_op = function.make_defun_op(step)
self.assertEqual(step_op.output_dtypes, dtypes.float32)
self.assertEqual(step_op.output_shapes, tensor_shape.TensorShape([]))
self.assertAllEqual(step_op(), 2.0)
def testDefunOpGraphModeNoneOutput(self):
def fn(unused_a, unused_b):
return None
x = constant_op.constant(1)
fn_op = function.make_defun_op(fn, x, x)
self.assertEqual(fn_op.output_dtypes, None)
self.assertEqual(fn_op.output_shapes, None)
self.assertAllEqual(fn_op(x, x), None)
def testDefunReadVariable(self):
v = resource_variable_ops.ResourceVariable(1.0)
@function.defun
def f():
return v.read_value()
self.assertEqual(1.0, float(f()))
def testDefunAssignAddVariable(self):
v = resource_variable_ops.ResourceVariable(1.0)
@function.defun
def f():
v.assign_add(2.0)
return v.read_value()
self.assertEqual(3.0, float(f()))
def testDefunShapeInferenceWithCapturedResourceVariable(self):
v = resource_variable_ops.ResourceVariable([[1, 2], [3, 4]])
def f():
x = constant_op.constant([[1, 2], [3, 4]])
out = math_ops.matmul(v, x)
self.assertEqual(out.get_shape(), tensor_shape.TensorShape([2, 2]))
compiled = function.defun(f)
compiled()
def testDefunShapeInferenceWithCapturedResourceVariableInGraphMode(self):
with context.graph_mode():
v = resource_variable_ops.ResourceVariable([[1, 2], [3, 4]])
def f():
x = constant_op.constant([[1, 2], [3, 4]])
out = math_ops.matmul(v, x)
self.assertEqual(out.get_shape(), tensor_shape.TensorShape([2, 2]))
compiled = function.defun(f)
compiled()
def testDefunShapeInferenceWithCapturedVariableInGraphMode(self):
with context.graph_mode():
v = variables.Variable([[1, 2], [3, 4]])
def f():
x = constant_op.constant([[1, 2], [3, 4]])
out = math_ops.matmul(v, x)
self.assertEqual(out.get_shape(), tensor_shape.TensorShape([2, 2]))
# Check that shape inference works while creating the defun
compiled = function.defun(f)
compiled()
def testDefunDifferentiable(self):
v = resource_variable_ops.ResourceVariable(1.0)
@function.defun
def f():
return v * v
self.assertAllEqual(backprop.implicit_grad(f)()[0][0], 2.0)
def testDefunCanBeDifferentiatedTwice(self):
v = resource_variable_ops.ResourceVariable(1.0)
@function.defun
def f():
return v * v
self.assertAllEqual(backprop.implicit_grad(f)()[0][0], 2.0)
# Ensure that v is watched again.
self.assertAllEqual(backprop.implicit_grad(f)()[0][0], 2.0)
def testGraphModeCaptureVariable(self):
with context.graph_mode(), self.test_session() as sess:
class HasAVar(object):
def __init__(self):
self.v = resource_variable_ops.ResourceVariable(1.0)
def call(self):
return self.v * 2
o = HasAVar()
variables.global_variables_initializer().run()
call = function.defun(o.call)
op = call()
self.assertAllEqual(sess.run(op), 2.0)
def testGraphModeManyFunctions(self):
with context.graph_mode(), self.test_session():
@function.defun
def f(x):
return x * x
@function.defun
def g(x):
return f(x) + 1
self.assertAllEqual(g(constant_op.constant(2.0)).eval(), 5.0)
def testDict(self):
@function.defun
def f(x):
return {'name': x + 1}
self.assertAllEqual(f(constant_op.constant(1.0))['name'], 2.0)
def testTensorConversionWithDefun(self):
@function.defun
def f(x):
return math_ops.add(x, constant_op.constant(3))
self.assertAllEqual(5, f(constant_op.constant(2)))
def testTensorConversionCall(self):
@function.defun
def f(x):
return math_ops.add(x, constant_op.constant(3))
@function.defun
def g(x):
return f(f(x))
self.assertAllEqual(8, g(constant_op.constant(2)))
def testDefunCallBackprop(self):
@function.defun
def f(x):
return math_ops.add(x, x)
@function.defun
def g(x):
return backprop.gradients_function(f, [0])(x)[0]
self.assertAllEqual(2, g(constant_op.constant(2)))
def testGraphModeEagerGradError(self):
with context.graph_mode():
def f():
x = variable_scope.get_variable(
'v', initializer=constant_op.constant(1.0))
return x * constant_op.constant(2.0)
with self.assertRaisesRegexp(ValueError,
'No trainable variables were accessed'):
backprop.implicit_val_and_grad(f)()
def testDefunCallBackpropUsingSameObjectForMultipleArguments(self):
@function.defun
def g(x):
return backprop.gradients_function(math_ops.multiply, [0, 1])(x, x)
def np_g(x):
return [d.numpy() for d in g(x)]
x = constant_op.constant(1.)
self.assertAllEqual([1., 1.], np_g(x))
self.assertAllEqual([1., 1.], np_g(1.))
def testCallShape(self):
@function.defun
def f(x):
return x + 1
@function.defun
def g(x):
x = f(x)
self.assertEqual(x.shape.as_list(), [])
return None
g(constant_op.constant(1.0))
def testGradientTensorConversionWithDefun(self):
three = resource_variable_ops.ResourceVariable(3.0, name='v')
@function.defun
def f(x):
return math_ops.add(x, three)
def g(x):
tape.watch_variable(three)
return f(x)
g = backprop.implicit_grad(g)(constant_op.constant(1.0))[0][0]
self.assertAllEqual(g, 1.0)
def testGradient(self):
matmul = function.defun(math_ops.matmul)
def sq(x):
return matmul(x, x, transpose_a=True)
t = constant_op.constant([[1.0, 2.0], [3.0, 4.0]])
grad_t, = backprop.gradients_function(sq, [0])(t)
self.assertAllEqual(grad_t, [[6, 6], [14, 14]])
def testGradientInFunction(self):
@function.defun
def f(x):
return backprop.gradients_function(lambda y: y * y, [0])(x)[0]
self.assertAllEqual(f(constant_op.constant(1.0)), 2.0)
def testGradientOfGatherWithDefun(self):
v = resource_variable_ops.ResourceVariable([0.0, 1.0, 2.0])
def sum_gather():
return math_ops.reduce_sum(array_ops.gather(v, [1, 2]))
grad_fn = backprop.implicit_grad(sum_gather)
gradient = grad_fn()
defun_grad_fn = backprop.implicit_grad(function.defun(sum_gather))
defun_gradient = defun_grad_fn()
self.assertEqual(len(gradient), len(defun_gradient))
gradient = gradient[0][0]
defun_gradient = defun_gradient[0][0]
self.assertAllEqual(gradient.values, defun_gradient.values)
self.assertAllEqual(gradient.indices, defun_gradient.indices)
self.assertAllEqual(gradient.dense_shape, defun_gradient.dense_shape)
def testReturningIndexedSlicesWithDefun(self):
def validate(indexed_slice):
def f():
return indexed_slice
output = function.defun(f)()
self.assertTrue(isinstance(output, ops.IndexedSlices))
self.assertAllEqual(indexed_slice.values, output.values)
self.assertAllEqual(indexed_slice.indices, output.indices)
self.assertAllEqual(indexed_slice.dense_shape, output.dense_shape)
self.assertEqual(
function.make_defun_op(f).output_shapes, indexed_slice.values.shape)
arg = ops.IndexedSlices(
values=constant_op.constant([1, 2]),
indices=constant_op.constant([0, 1]),
dense_shape=constant_op.constant([2]))
validate(arg)
arg = ops.IndexedSlices(
values=constant_op.constant([1, 2]),
indices=constant_op.constant([0, 1]),
dense_shape=None)
validate(arg)
def testIndexedSliceAsArgumentWithDefun(self):
@function.defun
def f(indexed_slice):
return indexed_slice
def validate(arg):
output = f(arg)
self.assertTrue(isinstance(output, ops.IndexedSlices))
self.assertAllEqual(arg.values, output.values)
self.assertAllEqual(arg.indices, output.indices)
self.assertAllEqual(arg.dense_shape, output.dense_shape)
indexed_slice = ops.IndexedSlices(
values=constant_op.constant([1]),
indices=constant_op.constant([0]),
dense_shape=constant_op.constant([1]))
validate(indexed_slice)
# Test that `f` works even when `dense_shape` is None.
indexed_slice = ops.IndexedSlices(
values=constant_op.constant([1]),
indices=constant_op.constant([0]),
dense_shape=None)
validate(indexed_slice)
def testFunctionOnDevice(self):
if not context.context().num_gpus():
self.skipTest('No GPUs found')
x = constant_op.constant([1.]).gpu()
f = function.defun(math_ops.add)
y = f(x, x).cpu()
self.assertAllEqual(y, [2.])
def testFunctionHandlesInputsOnDifferentDevices(self):
if not context.context().num_gpus():
self.skipTest('No GPUs found')
# The Reshape op requires the shape tensor to be placed in host memory.
reshape = function.defun(array_ops.reshape)
value = constant_op.constant([1., 2.]).gpu()
shape = constant_op.constant([2, 1])
reshaped = reshape(value, shape).cpu()
self.assertAllEqual(reshaped, [[1], [2]])
def testFunctionHandlesInputsPlacedOnTheWrongDeviceGracefully(self):
if not context.context().num_gpus():
self.skipTest('No GPUs found')
# The Reshape op requires the shape tensor to be placed in host memory.
reshape = function.defun(array_ops.reshape)
value = constant_op.constant([1., 2.])
shape = constant_op.constant([2, 1]).gpu()
with self.assertRaises(errors.InvalidArgumentError):
with ops.device('gpu:0'):
reshape(value, shape)
def testDifferentiableFunctionNoneOutputs(self):
@function.defun
def my_function(x):
return x, None
def wrapper(x):
return my_function(x)[0]
g = backprop.gradients_function(wrapper, [0])(constant_op.constant(0.0))
self.assertAllEqual(g[0], 1.)
def testNoneOutput(self):
@function.defun
def my_function(_):
return None
self.assertAllEqual(my_function(1), None)
def testNestedFunctions(self):
# TensorFlow function (which is what would be used in TensorFlow graph
# construction).
@tf_function.Defun(dtypes.int32, dtypes.int32)
def add(a, b):
return math_ops.add(a, b)
@function.defun
def add_one(x):
return add(x, 1)
self.assertAllEqual(3, add_one(constant_op.constant(2)))
def testVariableCaptureInNestedFunctions(self):
v = resource_variable_ops.ResourceVariable(1)
@function.defun
def read():
return v.read_value()
@function.defun
def outer():
return read()
self.assertEqual(1, int(outer()))
def testReturnCapturedEagerTensor(self):
t = constant_op.constant(1)
@function.defun
def read():
return t
self.assertEqual(1, int(read()))
def testReturnCapturedGraphTensor(self):
with context.graph_mode(), self.test_session():
t = constant_op.constant(1)
@function.defun
def read():
return t
self.assertEqual(1, int(self.evaluate(read())))
def testSequenceInputs(self):
clip_by_global_norm = function.defun(clip_ops.clip_by_global_norm)
t_list = [constant_op.constant(1.0), constant_op.constant(2.0)]
clipped_list, global_norm = clip_by_global_norm(t_list,
constant_op.constant(.2))
for t in clipped_list:
self.assertTrue(isinstance(t, ops.Tensor))
self.assertTrue(isinstance(global_norm, ops.Tensor))
def testNestedSequenceInputs(self):
def my_op(inputs):
a, b, c = inputs
e, f = b
g, h = e
return [a + a, [tuple([f + f, g + g]), h + h], c + c], a + f + g + h + c
my_eager_op = function.defun(my_op)
ret = my_eager_op([
constant_op.constant(1), [(constant_op.constant(2),
constant_op.constant(3)),
constant_op.constant(4)],
constant_op.constant(5)
])
self.assertEqual(len(ret), 2)
self.assertAllEqual(ret[0][0], 2)
self.assertAllEqual(ret[0][1][0][0], 8)
self.assertAllEqual(ret[0][1][0][1], 4)
self.assertTrue(isinstance(ret[0][1][0], tuple))
self.assertAllEqual(ret[0][1][1], 6)
self.assertAllEqual(ret[0][2], 10)
self.assertAllEqual(ret[1], 15)
def testVariableNamesRespectNameScopesWithDefun(self):
@function.defun
def create_variable():
with ops.name_scope('foo'):
v = resource_variable_ops.ResourceVariable(0.0, name='bar')
self.assertEqual(v.name, 'foo/bar:0')
create_variable()
def testVariableNamesRespectNameScopesWithDefunInGraph(self):
with context.graph_mode():
@function.defun
def create_variable():
with ops.name_scope('foo'):
v = resource_variable_ops.ResourceVariable([1.0, 2.0], name='bar')
self.assertEqual(v.name, 'foo/bar:0')
with ops.get_default_graph().as_default():
create_variable()
class AutomaticControlDependenciesTest(test.TestCase):
def testBasic(self):
with context.graph_mode(), self.test_session():
v = resource_variable_ops.ResourceVariable(1.0)
variables.global_variables_initializer().run()
with function.AutomaticControlDependencies() as c:
v.assign(v + 1)
v.assign(2 * v)
val = v.read_value()
val = c.mark_as_return(val)
self.assertAllEqual(val.eval(), 4.0)
def testCondMustRun(self):
with context.graph_mode(), self.test_session():
v = resource_variable_ops.ResourceVariable(1.0)
variables.global_variables_initializer().run()
p = array_ops.placeholder(dtype=dtypes.bool)
with function.AutomaticControlDependencies() as c:
def true_fn():
v.assign(v + 1)
return 0.0
def false_fn():
v.assign(v + 4)
return 1.0
control_flow_ops.cond(p, true_fn, false_fn)
val = v.read_value()
val = c.mark_as_return(val)
self.assertAllEqual(val.eval(feed_dict={p: False}), 5.0)
self.assertAllEqual(val.eval(feed_dict={p: True}), 6.0)
def testCondMustRunSeparateRead(self):
with context.graph_mode(), self.test_session():
v = resource_variable_ops.ResourceVariable(1.0)
variables.global_variables_initializer().run()
p = array_ops.placeholder(dtype=dtypes.bool)
with function.AutomaticControlDependencies() as c:
def true_fn():
v.assign(v + 1)
return 0.0
def false_fn():
v.assign(v + 4)
return 1.0
control_flow_ops.cond(p, true_fn, false_fn)
one = constant_op.constant(1.0)
one = c.mark_as_return(one)
one.eval(feed_dict={p: False})
self.assertAllEqual(v.read_value().eval(), 5.0)
one.eval(feed_dict={p: True})
self.assertAllEqual(v.read_value().eval(), 6.0)
def testCondNested(self):
with context.graph_mode(), self.test_session():
v = resource_variable_ops.ResourceVariable(1.0)
variables.global_variables_initializer().run()
p = array_ops.placeholder(dtype=dtypes.bool)
q = array_ops.placeholder(dtype=dtypes.bool)
with function.AutomaticControlDependencies() as c:
def true_fn():
v.assign(v + 1, name='true')
return 1.0
def false_fn():
def inner_true_fn():
v.assign(v * 2, name='false_true')
return 2.0
def inner_false_fn():
v.assign(v * 3, name='false_false')
return 3.0
control_flow_ops.cond(q, inner_true_fn, inner_false_fn)
return 1.0
control_flow_ops.cond(p, true_fn, false_fn)
with ops.name_scope('final'):
val = v.read_value()
val = c.mark_as_return(val)
self.assertAllEqual(val.eval(feed_dict={p: False, q: False}), 3.0)
self.assertAllEqual(val.eval(feed_dict={p: False, q: True}), 6.0)
self.assertAllEqual(val.eval(feed_dict={p: True, q: True}), 7.0)
self.assertAllEqual(val.eval(feed_dict={p: True, q: False}), 8.0)
def testCondOneBranch(self):
with context.graph_mode(), self.test_session():
v = resource_variable_ops.ResourceVariable(1.0)
variables.global_variables_initializer().run()
p = array_ops.placeholder(dtype=dtypes.bool)
with function.AutomaticControlDependencies() as c:
def true_fn():
return 0.0
def false_fn():
v.assign(v + 4)
return 1.0
control_flow_ops.cond(p, true_fn, false_fn)
val = v.read_value()
val = c.mark_as_return(val)
self.assertAllEqual(val.eval(feed_dict={p: False}), 5.0)
self.assertAllEqual(val.eval(feed_dict={p: True}), 5.0)
def testCondOneBranchUpdateBefore(self):
with context.graph_mode(), self.test_session():
v = resource_variable_ops.ResourceVariable(1.0)
variables.global_variables_initializer().run()
p = array_ops.placeholder(dtype=dtypes.bool)
with function.AutomaticControlDependencies() as c:
v.assign(v * 2)
def true_fn():
return 0.0
def false_fn():
v.assign(v + 4)
return 1.0
control_flow_ops.cond(p, true_fn, false_fn)
val = v.read_value()
val = c.mark_as_return(val)
self.assertAllEqual(val.eval(feed_dict={p: False}), 6.0)
self.assertAllEqual(val.eval(feed_dict={p: True}), 12.0)
def testCondOneBranchUpdateAfter(self):
with context.graph_mode(), self.test_session():
v = resource_variable_ops.ResourceVariable(1.0)
variables.global_variables_initializer().run()
p = array_ops.placeholder(dtype=dtypes.bool)
with function.AutomaticControlDependencies() as c:
def true_fn():
return 0.0
def false_fn():
v.assign(v + 4)
return 1.0
control_flow_ops.cond(p, true_fn, false_fn)
v.assign(v * 2)
val = v.read_value()
val = c.mark_as_return(val)
self.assertAllEqual(val.eval(feed_dict={p: False}), 10.0)
self.assertAllEqual(val.eval(feed_dict={p: True}), 20.0)
def testDecorator(self):
with context.graph_mode(), self.test_session():
v = resource_variable_ops.ResourceVariable(1.0)
variables.global_variables_initializer().run()
@function.automatic_control_dependencies
def f():
v.assign(v + 1)
v.assign(2 * v)
return v.read_value()
self.assertAllEqual(f().eval(), 4.0)
def testOptimizerInDefun(self):
def loss(v):
return v**2
optimizer = gradient_descent.GradientDescentOptimizer(learning_rate=1.0)
@function.defun
def train():
v = resource_variable_ops.ResourceVariable(1.0)
grad = backprop.implicit_grad(loss)(v)
optimizer.apply_gradients(grad)
return v.read_value()
value = train()
self.assertEqual(value.numpy(), -1.0)
def testOptimizerInDefunWithCapturedVariable(self):
v = resource_variable_ops.ResourceVariable(1.0)
def loss():
return v**2
optimizer = gradient_descent.GradientDescentOptimizer(learning_rate=1.0)
@function.defun
def train():
grad = backprop.implicit_grad(loss)()
optimizer.apply_gradients(grad)
train()
self.assertEqual(v.numpy(), -1.0)
if __name__ == '__main__':
test.main()
|
|
import collections as col
import sqlite3
def get_2d_tileset_info(db_file):
conn = sqlite3.connect(db_file)
c = conn.cursor()
row = c.execute("SELECT * from tileset_info").fetchone()
tileset_info = {
"zoom_step": row[0],
"max_length": row[1],
"assembly": row[2],
"chrom_names": row[3],
"chrom_sizes": row[4],
"tile_size": row[5],
"max_zoom": row[6],
"max_width": row[7],
"min_pos": [1, 1],
"max_pos": [row[1], row[1]],
}
conn.close()
return tileset_info
def get_1D_tiles(db_file, zoom, tile_x_pos, numx):
"""
Retrieve a contiguous set of tiles from a 2D db tile file.
Parameters
----------
db_file: str
The filename of the sqlite db file
zoom: int
The zoom level
tile_x_pos: int
The x position of the first tile
numx: int
The width of the block of tiles to retrieve
Returns
-------
tiles: {pos: tile_value}
A set of tiles, indexed by position
"""
tileset_info = get_2d_tileset_info(db_file)
conn = sqlite3.connect(db_file)
c = conn.cursor()
tile_width = tileset_info["max_width"] / 2 ** zoom
tile_x_start_pos = tile_width * tile_x_pos
tile_x_end_pos = tile_x_start_pos + (numx * tile_width)
# print('tile_x_start:', tile_x_start_pos, tile_x_end_pos)
query = """
SELECT fromX, toX, fromY, toY, chrOffset, importance, fields, uid
FROM intervals,position_index
WHERE
intervals.id=position_index.id AND
zoomLevel <= {} AND
rToX >= {} AND
rFromX <= {}
""".format(
zoom, tile_x_start_pos, tile_x_end_pos
)
rows = c.execute(query).fetchall()
new_rows = col.defaultdict(list)
# print("len(rows)", len(rows))
for r in rows:
try:
uid = r[7].decode("utf-8")
except AttributeError:
uid = r[7]
x_start = r[0]
x_end = r[1]
y_start = r[2]
y_end = r[3]
for i in range(tile_x_pos, tile_x_pos + numx):
tile_x_start = i * tile_width
tile_x_end = (i + 1) * tile_width
if x_start < tile_x_end and x_end >= tile_x_start:
# add the position offset to the returned values
new_rows[i] += [
{
"xStart": x_start,
"xEnd": x_end,
"yStart": y_start,
"yEnd": y_end,
"chrOffset": r[4],
"importance": r[5],
"uid": uid,
"fields": r[6].split("\t"),
}
]
conn.close()
return new_rows
def get_2D_tiles(db_file, zoom, tile_x_pos, tile_y_pos, numx=1, numy=1):
"""
Retrieve a contiguous set of tiles from a 2D db tile file.
Parameters
----------
db_file: str
The filename of the sqlite db file
zoom: int
The zoom level
tile_x_pos: int
The x position of the first tile
tile_y_pos: int
The y position of the first tile
numx: int
The width of the block of tiles to retrieve
numy: int
The height of the block of tiles to retrieve
Returns
-------
tiles: {pos: tile_value}
A set of tiles, indexed by position
"""
tileset_info = get_2d_tileset_info(db_file)
conn = sqlite3.connect(db_file)
c = conn.cursor()
tile_width = tileset_info["max_width"] / 2 ** zoom
tile_x_start_pos = tile_width * tile_x_pos
tile_x_end_pos = tile_x_start_pos + (numx * tile_width)
tile_y_start_pos = tile_width * tile_y_pos
tile_y_end_pos = tile_y_start_pos + (numy * tile_width)
query = """
SELECT fromX, toX, fromY, toY, chrOffset, importance, fields, uid
FROM intervals,position_index
WHERE
intervals.id=position_index.id AND
zoomLevel <= {} AND
rToX >= {} AND
rFromX <= {} AND
rToY >= {} AND
rFromY <= {}
""".format(
zoom, tile_x_start_pos, tile_x_end_pos, tile_y_start_pos, tile_y_end_pos
)
rows = c.execute(query).fetchall()
new_rows = col.defaultdict(list)
for r in rows:
try:
uid = r[7].decode("utf-8")
except AttributeError:
uid = r[7]
x_start = r[0]
x_end = r[1]
y_start = r[2]
y_end = r[3]
for i in range(tile_x_pos, tile_x_pos + numx):
for j in range(tile_y_pos, tile_y_pos + numy):
tile_x_start = i * tile_width
tile_x_end = (i + 1) * tile_width
tile_y_start = j * tile_width
tile_y_end = (j + 1) * tile_width
if (
x_start < tile_x_end
and x_end >= tile_x_start
and y_start < tile_y_end
and y_end >= tile_y_start
):
# add the position offset to the returned values
new_rows[(i, j)] += [
{
"xStart": r[0],
"xEnd": r[1],
"yStart": r[2],
"yEnd": r[3],
"chrOffset": r[4],
"importance": r[5],
"uid": uid,
"fields": r[6].split("\t"),
}
]
conn.close()
return new_rows
|
|
from __future__ import absolute_import
import re
from sentry.constants import TAG_LABELS
from sentry.tagstore.exceptions import (
TagKeyNotFound,
TagValueNotFound,
GroupTagKeyNotFound,
GroupTagValueNotFound,
)
from sentry.utils.services import Service, raises
# Valid pattern for tag key names
TAG_KEY_RE = re.compile(r"^[a-zA-Z0-9_\.:-]+$")
# Number of tag values to return by default for any query returning the "top"
# values for a tag.
TOP_VALUES_DEFAULT_LIMIT = 9
# These tags are special and are used in pairing with `sentry:{}`
# they should not be allowed to be set via data ingest due to ambiguity
INTERNAL_TAG_KEYS = frozenset(("release", "dist", "user", "filename", "function"))
# TODO(dcramer): pull in enum library
class TagKeyStatus(object):
VISIBLE = 0
PENDING_DELETION = 1
DELETION_IN_PROGRESS = 2
class TagStorage(Service):
__read_methods__ = frozenset(
[
"get_tag_key",
"get_tag_keys",
"get_tag_value",
"get_tag_values",
"get_group_tag_key",
"get_group_tag_keys",
"get_group_tag_value",
"get_group_tag_values",
"get_group_list_tag_value",
"get_tag_keys_for_projects",
"get_groups_user_counts",
"get_group_event_filter",
"get_group_tag_value_count",
"get_top_group_tag_values",
"get_first_release",
"get_last_release",
"get_release_tags",
"get_group_ids_for_users",
"get_group_tag_values_for_users",
"get_group_tag_keys_and_top_values",
"get_tag_value_paginator",
"get_group_tag_value_paginator",
"get_tag_value_paginator_for_projects",
"get_group_tag_value_iter",
"get_group_tag_value_qs",
"get_group_seen_values_for_environments",
]
)
__all__ = (
frozenset(
[
"is_valid_key",
"is_valid_value",
"is_reserved_key",
"prefix_reserved_key",
"get_standardized_key",
"get_tag_key_label",
"get_tag_value_label",
]
)
| __read_methods__
)
def setup_merge(self, grouptagkey_model, grouptagvalue_model):
from sentry.tasks import merge
merge.EXTRA_MERGE_MODELS += [grouptagvalue_model, grouptagkey_model]
def is_valid_key(self, key):
return bool(TAG_KEY_RE.match(key))
def is_valid_value(self, value):
return "\n" not in value
def is_reserved_key(self, key):
return key in INTERNAL_TAG_KEYS
def prefix_reserved_key(self, key):
# XXX(dcramer): kill sentry prefix for internal reserved tags
if self.is_reserved_key(key):
return u"sentry:{0}".format(key)
else:
return key
def get_standardized_key(self, key):
return key.split("sentry:", 1)[-1]
def get_tag_key_label(self, key):
return TAG_LABELS.get(key) or key.replace("_", " ").title()
def get_tag_value_label(self, key, value):
label = value
if key == "sentry:user" and value:
if value.startswith("id:"):
label = value[len("id:") :]
elif value.startswith("email:"):
label = value[len("email:") :]
elif value.startswith("username:"):
label = value[len("username:") :]
elif value.startswith("ip:"):
label = value[len("ip:") :]
return label
@raises([TagKeyNotFound])
def get_tag_key(self, project_id, environment_id, key, status=TagKeyStatus.VISIBLE):
"""
>>> get_tag_key(1, 2, "key1")
"""
raise NotImplementedError
def get_tag_keys(
self, project_id, environment_id, status=TagKeyStatus.VISIBLE, include_values_seen=False
):
"""
>>> get_tag_keys(1, 2)
"""
raise NotImplementedError
def get_tag_keys_for_projects(
self, projects, environments, start, end, status=TagKeyStatus.VISIBLE
):
"""
>>> get_tag_key([1], [2])
"""
raise NotImplementedError
@raises([TagValueNotFound])
def get_tag_value(self, project_id, environment_id, key, value):
"""
>>> get_tag_value(1, 2, "key1", "value1")
"""
raise NotImplementedError
def get_tag_values(self, project_id, environment_id, key):
"""
>>> get_tag_values(1, 2, "key1")
"""
raise NotImplementedError
@raises([GroupTagKeyNotFound])
def get_group_tag_key(self, project_id, group_id, environment_id, key):
"""
>>> get_group_tag_key(1, 2, 3, "key1")
"""
raise NotImplementedError
def get_group_tag_keys(self, project_id, group_id, environment_ids, limit=None, keys=None):
"""
>>> get_group_tag_key(1, 2, [3])
"""
raise NotImplementedError
@raises([GroupTagValueNotFound])
def get_group_tag_value(self, project_id, group_id, environment_id, key, value):
"""
>>> get_group_tag_value(1, 2, 3, "key1", "value1")
"""
raise NotImplementedError
def get_group_tag_values(self, project_id, group_id, environment_id, key):
"""
>>> get_group_tag_values(1, 2, 3, "key1")
"""
raise NotImplementedError
def get_group_list_tag_value(self, project_ids, group_id_list, environment_ids, key, value):
"""
>>> get_group_tag_value([1, 2], [1, 2, 3, 4, 5], [3], "key1", "value1")
"""
raise NotImplementedError
def get_group_event_filter(self, project_id, group_id, environment_ids, tags, start, end):
"""
>>> get_group_event_filter(1, 2, 3, {'key1': 'value1', 'key2': 'value2'})
"""
raise NotImplementedError
def get_tag_value_paginator(
self, project_id, environment_id, key, query=None, order_by="-last_seen"
):
"""
>>> get_tag_value_paginator(1, 2, 'environment', query='prod')
"""
raise NotImplementedError
def get_tag_value_paginator_for_projects(
self, projects, environments, key, start, end, query=None, order_by="-last_seen"
):
"""
Includes tags and also snuba columns, with the arrayjoin when they are nested.
Also supports a query parameter to do a substring match on the tag/column values.
>>> get_tag_value_paginator_for_projects([1], [2], 'environment', query='prod')
"""
raise NotImplementedError
def get_group_tag_value_iter(
self, project_id, group_id, environment_ids, key, callbacks=(), offset=0
):
"""
>>> get_group_tag_value_iter(1, 2, 3, 'environment')
"""
raise NotImplementedError
def get_group_tag_value_paginator(
self, project_id, group_id, environment_ids, key, order_by="-id"
):
"""
>>> get_group_tag_value_paginator(1, 2, 3, 'environment')
"""
raise NotImplementedError
def get_group_tag_value_qs(self, project_id, group_id, environment_id, key, value=None):
"""
>>> get_group_tag_value_qs(1, 2, 3, 'environment')
"""
raise NotImplementedError
def get_groups_user_counts(self, project_ids, group_ids, environment_ids, start=None, end=None):
"""
>>> get_groups_user_counts([1, 2], [2, 3], [4, 5])
`start` and `end` are only used by the snuba backend
"""
raise NotImplementedError
def get_group_tag_value_count(self, project_id, group_id, environment_id, key):
"""
>>> get_group_tag_value_count(1, 2, 3, 'key1')
"""
raise NotImplementedError
def get_top_group_tag_values(
self, project_id, group_id, environment_id, key, limit=TOP_VALUES_DEFAULT_LIMIT
):
"""
>>> get_top_group_tag_values(1, 2, 3, 'key1')
"""
raise NotImplementedError
def get_first_release(self, project_id, group_id):
"""
>>> get_first_release(1, 2)
"""
raise NotImplementedError
def get_last_release(self, project_id, group_id):
"""
>>> get_last_release(1, 2)
"""
raise NotImplementedError
def get_release_tags(self, project_ids, environment_id, versions):
"""
>>> get_release_tags([1, 2], 3, ["1", "2"])
"""
raise NotImplementedError
def get_group_ids_for_users(self, project_ids, event_users, limit=100):
"""
>>> get_group_ids_for_users([1,2], [EventUser(1), EventUser(2)])
"""
raise NotImplementedError
def get_group_tag_values_for_users(self, event_users, limit=100):
"""
>>> get_group_tag_values_for_users([EventUser(1), EventUser(2)])
"""
raise NotImplementedError
def get_group_tag_keys_and_top_values(
self,
project_id,
group_id,
environment_ids,
keys=None,
value_limit=TOP_VALUES_DEFAULT_LIMIT,
**kwargs
):
# only the snuba backend supports multi env, and that overrides this method
if environment_ids and len(environment_ids) > 1:
environment_ids = environment_ids[:1]
# If keys is unspecified, we will grab all tag keys for this group.
tag_keys = self.get_group_tag_keys(project_id, group_id, environment_ids, keys=keys)
environment_id = environment_ids[0] if environment_ids else None
for tk in tag_keys:
tk.top_values = self.get_top_group_tag_values(
project_id, group_id, environment_id, tk.key, limit=value_limit
)
if tk.count is None:
tk.count = self.get_group_tag_value_count(
project_id, group_id, environment_id, tk.key
)
return tag_keys
def get_group_seen_values_for_environments(
self, project_ids, group_id_list, environment_ids, start=None, end=None
):
raise NotImplementedError
|
|
#!/usr/bin/env python
"""
Usage:
./top_filters <path_to_a_saved_DBM.pkl> <optional: output path prefix>
Displays the matrix product of the layer 1 and layer 2 weights.
Also displays a grid visualization the connections in more detail.
Row i of the grid corresponds to the second layer hidden unit
with the ith largest filter norm.
Grid cell (i,j) shows the filter for the first layer unit with the
jth largest weight going into the second layer unit for this row.
The cells is surrounded by a colored box.
Its brightness indicates the relative strength of the connection between
the first layer unit and second layer unit, and its color indicates
the sign of that connection (yellow = positive / excitatory,
magenta = negative / inhibitory).
Optionally saves these images as png files prefixed with
the given output path name instead of displaying them.
This can be useful when working over ssh.
"""
from __future__ import print_function
__authors__ = "Ian Goodfellow"
__copyright__ = "Copyright 2012, Universite de Montreal"
__credits__ = ["Ian Goodfellow"]
__license__ = "3-clause BSD"
__maintainer__ = "LISA Lab"
import numpy as np
import sys
from theano.compat.six.moves import xrange
from pylearn2.config import yaml_parse
from pylearn2.gui.patch_viewer import PatchViewer
from pylearn2.gui.patch_viewer import make_viewer
from pylearn2.utils import serial
def sort_layer2(W2):
"""
Sort weights of the a layer.
Parameters
----------
W2: list
The hidden layer to sort.
"""
print('Sorting so largest-norm layer 2 weights are plotted at the top')
norms = np.square(W2).sum(axis=0)
idxs = [elem[1] for elem in sorted(zip(-norms, range(norms.shape[0])))]
new = W2.copy()
for i in xrange(len(idxs)):
new[:, i] = W2[:, idxs[i]]
W2 = new
return new
def get_mat_product_viewer(W1, W2):
"""
Show the matrix product of 2 layers.
Parameters
----------
W1: list
First hidden layer.
W2: list
Second hidden layer.
out_prefix: str
Path where to save image.
"""
prod = np.dot(W1, W2)
pv = make_viewer(prod.T)
return pv
def get_connections_viewer(imgs, W1, W2):
"""
Show connections between 2 hidden layers.
Parameters
----------
imgs: ndarray
Images of weights from the first layer.
W1: list
First hidden layer.
W2: list
Second hidden layer.
"""
W2 = sort_layer2(W2)
N1 = W1.shape[1]
N = W2.shape[1]
N = min(N, 100)
count = get_elements_count(N, N1, W2)
pv = create_connect_viewer(N, N1, imgs, count, W2)
return pv
def create_connect_viewer(N, N1, imgs, count, W2):
"""
Create the patch to show connections between layers.
Parameters
----------
N: int
Number of rows.
N1: int
Number of elements in the first layer.
imgs: ndarray
Images of weights from the first layer.
count: int
Number of elements to show.
W2: list
Second hidden layer.
"""
pv = PatchViewer((N, count), imgs.shape[1:3], is_color=imgs.shape[3] == 3)
for i in xrange(N):
w = W2[:, i]
wneg = w[w < 0.]
wpos = w[w > 0.]
w /= np.abs(w).max()
wa = np.abs(w)
to_sort = zip(wa, range(N1), w)
s = sorted(to_sort)
for j in xrange(count):
idx = s[N1-j-1][1]
mag = s[N1-j-1][2]
if mag > 0:
act = (mag, 0)
else:
act = (0, -mag)
pv.add_patch(imgs[idx, ...], rescale=True, activation=act)
return pv
def get_elements_count(N, N1, W2):
"""
Retrieve the number of elements to show.
Parameters
----------
N: int
Number of rows.
N1: int
Number of elements in the first layer.
W2: list
Second hidden layer.
"""
thresh = .9
max_count = 0
total_counts = 0.
for i in xrange(N):
w = W2[:, i]
wa = np.abs(w)
total = wa.sum()
s = np.asarray(sorted(wa))
count = 1
while s[-count:].sum() < thresh * total:
count += 1
if count > max_count:
max_count = count
total_counts += count
ave = total_counts / float(N)
print('average needed filters', ave)
count = max_count
print('It takes', count, 'of', N1, 'elements to account for ',
(thresh*100.), '\% of the weight in at least one filter')
lim = 10
if count > lim:
count = lim
print('Only displaying ', count, ' elements though.')
if count > N1:
count = N1
return count
if __name__ == '__main__':
if len(sys.argv) == 2:
_, model_path = sys.argv
out_prefix = None
else:
_, model_path, out_prefix = sys.argv
model = serial.load(model_path)
layer_1, layer_2 = model.hidden_layers[0:2]
W1 = layer_1.get_weights()
W2 = layer_2.get_weights()
print(W1.shape)
print(W2.shape)
mat_v = get_mat_product_viewer(W1, W2)
if out_prefix is None:
mat_v.show()
else:
mat_v.save(out_prefix+"_prod.png")
dataset_yaml_src = model.dataset_yaml_src
dataset = yaml_parse.load(dataset_yaml_src)
imgs = dataset.get_weights_view(W1.T)
conn_v = get_connections_viewer(imgs, W1, W2)
if out_prefix is None:
conn_v.show()
else:
conn_v.save(out_prefix+".png")
|
|
import sys
import threading
import warnings
import weakref
from django.utils import six
from django.utils.deprecation import RemovedInDjango20Warning
from django.utils.inspect import func_accepts_kwargs
from django.utils.six.moves import range
if six.PY2:
from .weakref_backports import WeakMethod
else:
from weakref import WeakMethod
def _make_id(target):
if hasattr(target, '__func__'):
return (id(target.__self__), id(target.__func__))
return id(target)
NONE_ID = _make_id(None)
# A marker for caching
NO_RECEIVERS = object()
class Signal(object):
"""
Base class for all signals
Internal attributes:
receivers
{ receiverkey (id) : weakref(receiver) }
"""
def __init__(self, providing_args=None, use_caching=False):
"""
Create a new signal.
providing_args
A list of the arguments this signal can pass along in a send() call.
"""
self.receivers = []
if providing_args is None:
providing_args = []
self.providing_args = set(providing_args)
self.lock = threading.Lock()
self.use_caching = use_caching
# For convenience we create empty caches even if they are not used.
# A note about caching: if use_caching is defined, then for each
# distinct sender we cache the receivers that sender has in
# 'sender_receivers_cache'. The cache is cleaned when .connect() or
# .disconnect() is called and populated on send().
self.sender_receivers_cache = weakref.WeakKeyDictionary() if use_caching else {}
self._dead_receivers = False
def connect(self, receiver, sender=None, weak=True, dispatch_uid=None):
"""
Connect receiver to sender for signal.
Arguments:
receiver
A function or an instance method which is to receive signals.
Receivers must be hashable objects.
If weak is True, then receiver must be weak referenceable.
Receivers must be able to accept keyword arguments.
If a receiver is connected with a dispatch_uid argument, it
will not be added if another receiver was already connected
with that dispatch_uid.
sender
The sender to which the receiver should respond. Must either be
a Python object, or None to receive events from any sender.
weak
Whether to use weak references to the receiver. By default, the
module will attempt to use weak references to the receiver
objects. If this parameter is false, then strong references will
be used.
dispatch_uid
An identifier used to uniquely identify a particular instance of
a receiver. This will usually be a string, though it may be
anything hashable.
"""
from django.conf import settings
# If DEBUG is on, check that we got a good receiver
if settings.configured and settings.DEBUG:
assert callable(receiver), "Signal receivers must be callable."
# Check for **kwargs
if not func_accepts_kwargs(receiver):
raise ValueError("Signal receivers must accept keyword arguments (**kwargs).")
if dispatch_uid:
lookup_key = (dispatch_uid, _make_id(sender))
else:
lookup_key = (_make_id(receiver), _make_id(sender))
if weak:
ref = weakref.ref
receiver_object = receiver
# Check for bound methods
if hasattr(receiver, '__self__') and hasattr(receiver, '__func__'):
ref = WeakMethod
receiver_object = receiver.__self__
if six.PY3:
receiver = ref(receiver)
weakref.finalize(receiver_object, self._remove_receiver)
else:
receiver = ref(receiver, self._remove_receiver)
with self.lock:
self._clear_dead_receivers()
for r_key, _ in self.receivers:
if r_key == lookup_key:
break
else:
self.receivers.append((lookup_key, receiver))
self.sender_receivers_cache.clear()
def disconnect(self, receiver=None, sender=None, weak=None, dispatch_uid=None):
"""
Disconnect receiver from sender for signal.
If weak references are used, disconnect need not be called. The receiver
will be remove from dispatch automatically.
Arguments:
receiver
The registered receiver to disconnect. May be none if
dispatch_uid is specified.
sender
The registered sender to disconnect
dispatch_uid
the unique identifier of the receiver to disconnect
"""
if weak is not None:
warnings.warn("Passing `weak` to disconnect has no effect.", RemovedInDjango20Warning, stacklevel=2)
if dispatch_uid:
lookup_key = (dispatch_uid, _make_id(sender))
else:
lookup_key = (_make_id(receiver), _make_id(sender))
disconnected = False
with self.lock:
self._clear_dead_receivers()
for index in range(len(self.receivers)):
(r_key, _) = self.receivers[index]
if r_key == lookup_key:
disconnected = True
del self.receivers[index]
break
self.sender_receivers_cache.clear()
return disconnected
def has_listeners(self, sender=None):
return bool(self._live_receivers(sender))
def send(self, sender, **named):
"""
Send signal from sender to all connected receivers.
If any receiver raises an error, the error propagates back through send,
terminating the dispatch loop. So it's possible that all receivers
won't be called if an error is raised.
Arguments:
sender
The sender of the signal. Either a specific object or None.
named
Named arguments which will be passed to receivers.
Returns a list of tuple pairs [(receiver, response), ... ].
"""
responses = []
if not self.receivers or self.sender_receivers_cache.get(sender) is NO_RECEIVERS:
return responses
for receiver in self._live_receivers(sender):
response = receiver(signal=self, sender=sender, **named)
responses.append((receiver, response))
return responses
def send_robust(self, sender, **named):
"""
Send signal from sender to all connected receivers catching errors.
Arguments:
sender
The sender of the signal. Can be any python object (normally one
registered with a connect if you actually want something to
occur).
named
Named arguments which will be passed to receivers. These
arguments must be a subset of the argument names defined in
providing_args.
Return a list of tuple pairs [(receiver, response), ... ]. May raise
DispatcherKeyError.
If any receiver raises an error (specifically any subclass of
Exception), the error instance is returned as the result for that
receiver. The traceback is always attached to the error at
``__traceback__``.
"""
responses = []
if not self.receivers or self.sender_receivers_cache.get(sender) is NO_RECEIVERS:
return responses
# Call each receiver with whatever arguments it can accept.
# Return a list of tuple pairs [(receiver, response), ... ].
for receiver in self._live_receivers(sender):
try:
response = receiver(signal=self, sender=sender, **named)
except Exception as err:
if not hasattr(err, '__traceback__'):
err.__traceback__ = sys.exc_info()[2]
responses.append((receiver, err))
else:
responses.append((receiver, response))
return responses
def _clear_dead_receivers(self):
# Note: caller is assumed to hold self.lock.
if self._dead_receivers:
self._dead_receivers = False
new_receivers = []
for r in self.receivers:
if isinstance(r[1], weakref.ReferenceType) and r[1]() is None:
continue
new_receivers.append(r)
self.receivers = new_receivers
def _live_receivers(self, sender):
"""
Filter sequence of receivers to get resolved, live receivers.
This checks for weak references and resolves them, then returning only
live receivers.
"""
receivers = None
if self.use_caching and not self._dead_receivers:
receivers = self.sender_receivers_cache.get(sender)
# We could end up here with NO_RECEIVERS even if we do check this case in
# .send() prior to calling _live_receivers() due to concurrent .send() call.
if receivers is NO_RECEIVERS:
return []
if receivers is None:
with self.lock:
self._clear_dead_receivers()
senderkey = _make_id(sender)
receivers = []
for (receiverkey, r_senderkey), receiver in self.receivers:
if r_senderkey == NONE_ID or r_senderkey == senderkey:
receivers.append(receiver)
if self.use_caching:
if not receivers:
self.sender_receivers_cache[sender] = NO_RECEIVERS
else:
# Note, we must cache the weakref versions.
self.sender_receivers_cache[sender] = receivers
non_weak_receivers = []
for receiver in receivers:
if isinstance(receiver, weakref.ReferenceType):
# Dereference the weak reference.
receiver = receiver()
if receiver is not None:
non_weak_receivers.append(receiver)
else:
non_weak_receivers.append(receiver)
return non_weak_receivers
def _remove_receiver(self, receiver=None):
# Mark that the self.receivers list has dead weakrefs. If so, we will
# clean those up in connect, disconnect and _live_receivers while
# holding self.lock. Note that doing the cleanup here isn't a good
# idea, _remove_receiver() will be called as side effect of garbage
# collection, and so the call can happen while we are already holding
# self.lock.
self._dead_receivers = True
def receiver(signal, **kwargs):
"""
A decorator for connecting receivers to signals. Used by passing in the
signal (or list of signals) and keyword arguments to connect::
@receiver(post_save, sender=MyModel)
def signal_receiver(sender, **kwargs):
...
@receiver([post_save, post_delete], sender=MyModel)
def signals_receiver(sender, **kwargs):
...
"""
def _decorator(func):
if isinstance(signal, (list, tuple)):
for s in signal:
s.connect(func, **kwargs)
else:
signal.connect(func, **kwargs)
return func
return _decorator
|
|
# coding=utf-8
# --------------------------------------------------------------------------
# Copyright (c) Microsoft Corporation. All rights reserved.
# Licensed under the MIT License. See License.txt in the project root for license information.
# Code generated by Microsoft (R) AutoRest Code Generator.
# Changes may cause incorrect behavior and will be lost if the code is regenerated.
# --------------------------------------------------------------------------
from typing import TYPE_CHECKING
import warnings
from azure.core.exceptions import ClientAuthenticationError, HttpResponseError, ResourceExistsError, ResourceNotFoundError, map_error
from azure.core.paging import ItemPaged
from azure.core.pipeline import PipelineResponse
from azure.core.pipeline.transport import HttpRequest, HttpResponse
from azure.core.polling import LROPoller, NoPolling, PollingMethod
from azure.mgmt.core.exceptions import ARMErrorFormat
from azure.mgmt.core.polling.arm_polling import ARMPolling
from .. import models as _models
if TYPE_CHECKING:
# pylint: disable=unused-import,ungrouped-imports
from typing import Any, Callable, Dict, Generic, Iterable, Optional, TypeVar, Union
T = TypeVar('T')
ClsType = Optional[Callable[[PipelineResponse[HttpRequest, HttpResponse], T, Dict[str, Any]], Any]]
class AssociationsOperations(object):
"""AssociationsOperations operations.
You should not instantiate this class directly. Instead, you should create a Client instance that
instantiates it for you and attaches it as an attribute.
:ivar models: Alias to model classes used in this operation group.
:type models: ~azure.mgmt.customproviders.models
:param client: Client for service requests.
:param config: Configuration of service client.
:param serializer: An object model serializer.
:param deserializer: An object model deserializer.
"""
models = _models
def __init__(self, client, config, serializer, deserializer):
self._client = client
self._serialize = serializer
self._deserialize = deserializer
self._config = config
def _create_or_update_initial(
self,
scope, # type: str
association_name, # type: str
association, # type: "_models.Association"
**kwargs # type: Any
):
# type: (...) -> "_models.Association"
cls = kwargs.pop('cls', None) # type: ClsType["_models.Association"]
error_map = {
401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
}
error_map.update(kwargs.pop('error_map', {}))
api_version = "2018-09-01-preview"
content_type = kwargs.pop("content_type", "application/json")
accept = "application/json"
# Construct URL
url = self._create_or_update_initial.metadata['url'] # type: ignore
path_format_arguments = {
'scope': self._serialize.url("scope", scope, 'str', skip_quote=True),
'associationName': self._serialize.url("association_name", association_name, 'str'),
}
url = self._client.format_url(url, **path_format_arguments)
# Construct parameters
query_parameters = {} # type: Dict[str, Any]
query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str')
# Construct headers
header_parameters = {} # type: Dict[str, Any]
header_parameters['Content-Type'] = self._serialize.header("content_type", content_type, 'str')
header_parameters['Accept'] = self._serialize.header("accept", accept, 'str')
body_content_kwargs = {} # type: Dict[str, Any]
body_content = self._serialize.body(association, 'Association')
body_content_kwargs['content'] = body_content
request = self._client.put(url, query_parameters, header_parameters, **body_content_kwargs)
pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs)
response = pipeline_response.http_response
if response.status_code not in [200, 201]:
map_error(status_code=response.status_code, response=response, error_map=error_map)
error = self._deserialize.failsafe_deserialize(_models.ErrorResponse, response)
raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat)
if response.status_code == 200:
deserialized = self._deserialize('Association', pipeline_response)
if response.status_code == 201:
deserialized = self._deserialize('Association', pipeline_response)
if cls:
return cls(pipeline_response, deserialized, {})
return deserialized
_create_or_update_initial.metadata = {'url': '/{scope}/providers/Microsoft.CustomProviders/associations/{associationName}'} # type: ignore
def begin_create_or_update(
self,
scope, # type: str
association_name, # type: str
association, # type: "_models.Association"
**kwargs # type: Any
):
# type: (...) -> LROPoller["_models.Association"]
"""Create or update an association.
:param scope: The scope of the association. The scope can be any valid REST resource instance.
For example, use '/subscriptions/{subscription-id}/resourceGroups/{resource-group-
name}/providers/Microsoft.Compute/virtualMachines/{vm-name}' for a virtual machine resource.
:type scope: str
:param association_name: The name of the association.
:type association_name: str
:param association: The parameters required to create or update an association.
:type association: ~azure.mgmt.customproviders.models.Association
:keyword callable cls: A custom type or function that will be passed the direct response
:keyword str continuation_token: A continuation token to restart a poller from a saved state.
:keyword polling: Pass in True if you'd like the ARMPolling polling method,
False for no polling, or your own initialized polling object for a personal polling strategy.
:paramtype polling: bool or ~azure.core.polling.PollingMethod
:keyword int polling_interval: Default waiting time between two polls for LRO operations if no Retry-After header is present.
:return: An instance of LROPoller that returns either Association or the result of cls(response)
:rtype: ~azure.core.polling.LROPoller[~azure.mgmt.customproviders.models.Association]
:raises ~azure.core.exceptions.HttpResponseError:
"""
polling = kwargs.pop('polling', True) # type: Union[bool, PollingMethod]
cls = kwargs.pop('cls', None) # type: ClsType["_models.Association"]
lro_delay = kwargs.pop(
'polling_interval',
self._config.polling_interval
)
cont_token = kwargs.pop('continuation_token', None) # type: Optional[str]
if cont_token is None:
raw_result = self._create_or_update_initial(
scope=scope,
association_name=association_name,
association=association,
cls=lambda x,y,z: x,
**kwargs
)
kwargs.pop('error_map', None)
kwargs.pop('content_type', None)
def get_long_running_output(pipeline_response):
deserialized = self._deserialize('Association', pipeline_response)
if cls:
return cls(pipeline_response, deserialized, {})
return deserialized
path_format_arguments = {
'scope': self._serialize.url("scope", scope, 'str', skip_quote=True),
'associationName': self._serialize.url("association_name", association_name, 'str'),
}
if polling is True: polling_method = ARMPolling(lro_delay, path_format_arguments=path_format_arguments, **kwargs)
elif polling is False: polling_method = NoPolling()
else: polling_method = polling
if cont_token:
return LROPoller.from_continuation_token(
polling_method=polling_method,
continuation_token=cont_token,
client=self._client,
deserialization_callback=get_long_running_output
)
else:
return LROPoller(self._client, raw_result, get_long_running_output, polling_method)
begin_create_or_update.metadata = {'url': '/{scope}/providers/Microsoft.CustomProviders/associations/{associationName}'} # type: ignore
def _delete_initial(
self,
scope, # type: str
association_name, # type: str
**kwargs # type: Any
):
# type: (...) -> None
cls = kwargs.pop('cls', None) # type: ClsType[None]
error_map = {
401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
}
error_map.update(kwargs.pop('error_map', {}))
api_version = "2018-09-01-preview"
accept = "application/json"
# Construct URL
url = self._delete_initial.metadata['url'] # type: ignore
path_format_arguments = {
'scope': self._serialize.url("scope", scope, 'str', skip_quote=True),
'associationName': self._serialize.url("association_name", association_name, 'str'),
}
url = self._client.format_url(url, **path_format_arguments)
# Construct parameters
query_parameters = {} # type: Dict[str, Any]
query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str')
# Construct headers
header_parameters = {} # type: Dict[str, Any]
header_parameters['Accept'] = self._serialize.header("accept", accept, 'str')
request = self._client.delete(url, query_parameters, header_parameters)
pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs)
response = pipeline_response.http_response
if response.status_code not in [200, 202, 204]:
map_error(status_code=response.status_code, response=response, error_map=error_map)
error = self._deserialize.failsafe_deserialize(_models.ErrorResponse, response)
raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat)
if cls:
return cls(pipeline_response, None, {})
_delete_initial.metadata = {'url': '/{scope}/providers/Microsoft.CustomProviders/associations/{associationName}'} # type: ignore
def begin_delete(
self,
scope, # type: str
association_name, # type: str
**kwargs # type: Any
):
# type: (...) -> LROPoller[None]
"""Delete an association.
:param scope: The scope of the association.
:type scope: str
:param association_name: The name of the association.
:type association_name: str
:keyword callable cls: A custom type or function that will be passed the direct response
:keyword str continuation_token: A continuation token to restart a poller from a saved state.
:keyword polling: Pass in True if you'd like the ARMPolling polling method,
False for no polling, or your own initialized polling object for a personal polling strategy.
:paramtype polling: bool or ~azure.core.polling.PollingMethod
:keyword int polling_interval: Default waiting time between two polls for LRO operations if no Retry-After header is present.
:return: An instance of LROPoller that returns either None or the result of cls(response)
:rtype: ~azure.core.polling.LROPoller[None]
:raises ~azure.core.exceptions.HttpResponseError:
"""
polling = kwargs.pop('polling', True) # type: Union[bool, PollingMethod]
cls = kwargs.pop('cls', None) # type: ClsType[None]
lro_delay = kwargs.pop(
'polling_interval',
self._config.polling_interval
)
cont_token = kwargs.pop('continuation_token', None) # type: Optional[str]
if cont_token is None:
raw_result = self._delete_initial(
scope=scope,
association_name=association_name,
cls=lambda x,y,z: x,
**kwargs
)
kwargs.pop('error_map', None)
kwargs.pop('content_type', None)
def get_long_running_output(pipeline_response):
if cls:
return cls(pipeline_response, None, {})
path_format_arguments = {
'scope': self._serialize.url("scope", scope, 'str', skip_quote=True),
'associationName': self._serialize.url("association_name", association_name, 'str'),
}
if polling is True: polling_method = ARMPolling(lro_delay, path_format_arguments=path_format_arguments, **kwargs)
elif polling is False: polling_method = NoPolling()
else: polling_method = polling
if cont_token:
return LROPoller.from_continuation_token(
polling_method=polling_method,
continuation_token=cont_token,
client=self._client,
deserialization_callback=get_long_running_output
)
else:
return LROPoller(self._client, raw_result, get_long_running_output, polling_method)
begin_delete.metadata = {'url': '/{scope}/providers/Microsoft.CustomProviders/associations/{associationName}'} # type: ignore
def get(
self,
scope, # type: str
association_name, # type: str
**kwargs # type: Any
):
# type: (...) -> "_models.Association"
"""Get an association.
:param scope: The scope of the association.
:type scope: str
:param association_name: The name of the association.
:type association_name: str
:keyword callable cls: A custom type or function that will be passed the direct response
:return: Association, or the result of cls(response)
:rtype: ~azure.mgmt.customproviders.models.Association
:raises: ~azure.core.exceptions.HttpResponseError
"""
cls = kwargs.pop('cls', None) # type: ClsType["_models.Association"]
error_map = {
401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
}
error_map.update(kwargs.pop('error_map', {}))
api_version = "2018-09-01-preview"
accept = "application/json"
# Construct URL
url = self.get.metadata['url'] # type: ignore
path_format_arguments = {
'scope': self._serialize.url("scope", scope, 'str', skip_quote=True),
'associationName': self._serialize.url("association_name", association_name, 'str'),
}
url = self._client.format_url(url, **path_format_arguments)
# Construct parameters
query_parameters = {} # type: Dict[str, Any]
query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str')
# Construct headers
header_parameters = {} # type: Dict[str, Any]
header_parameters['Accept'] = self._serialize.header("accept", accept, 'str')
request = self._client.get(url, query_parameters, header_parameters)
pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs)
response = pipeline_response.http_response
if response.status_code not in [200]:
map_error(status_code=response.status_code, response=response, error_map=error_map)
error = self._deserialize.failsafe_deserialize(_models.ErrorResponse, response)
raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat)
deserialized = self._deserialize('Association', pipeline_response)
if cls:
return cls(pipeline_response, deserialized, {})
return deserialized
get.metadata = {'url': '/{scope}/providers/Microsoft.CustomProviders/associations/{associationName}'} # type: ignore
def list_all(
self,
scope, # type: str
**kwargs # type: Any
):
# type: (...) -> Iterable["_models.AssociationsList"]
"""Gets all association for the given scope.
:param scope: The scope of the association.
:type scope: str
:keyword callable cls: A custom type or function that will be passed the direct response
:return: An iterator like instance of either AssociationsList or the result of cls(response)
:rtype: ~azure.core.paging.ItemPaged[~azure.mgmt.customproviders.models.AssociationsList]
:raises: ~azure.core.exceptions.HttpResponseError
"""
cls = kwargs.pop('cls', None) # type: ClsType["_models.AssociationsList"]
error_map = {
401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
}
error_map.update(kwargs.pop('error_map', {}))
api_version = "2018-09-01-preview"
accept = "application/json"
def prepare_request(next_link=None):
# Construct headers
header_parameters = {} # type: Dict[str, Any]
header_parameters['Accept'] = self._serialize.header("accept", accept, 'str')
if not next_link:
# Construct URL
url = self.list_all.metadata['url'] # type: ignore
path_format_arguments = {
'scope': self._serialize.url("scope", scope, 'str', skip_quote=True),
}
url = self._client.format_url(url, **path_format_arguments)
# Construct parameters
query_parameters = {} # type: Dict[str, Any]
query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str')
request = self._client.get(url, query_parameters, header_parameters)
else:
url = next_link
query_parameters = {} # type: Dict[str, Any]
request = self._client.get(url, query_parameters, header_parameters)
return request
def extract_data(pipeline_response):
deserialized = self._deserialize('AssociationsList', pipeline_response)
list_of_elem = deserialized.value
if cls:
list_of_elem = cls(list_of_elem)
return deserialized.next_link or None, iter(list_of_elem)
def get_next(next_link=None):
request = prepare_request(next_link)
pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs)
response = pipeline_response.http_response
if response.status_code not in [200]:
error = self._deserialize.failsafe_deserialize(_models.ErrorResponse, response)
map_error(status_code=response.status_code, response=response, error_map=error_map)
raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat)
return pipeline_response
return ItemPaged(
get_next, extract_data
)
list_all.metadata = {'url': '/{scope}/providers/Microsoft.CustomProviders/associations'} # type: ignore
|
|
# -*- coding: utf-8 -*-
import pytest
import mock
usefixtures = pytest.mark.usefixtures
parametrize = pytest.mark.parametrize
@pytest.fixture
def operation():
import operation
return operation
@pytest.fixture
def model():
import model
return model
@usefixtures('database')
@parametrize('count, offset, priority, order', [
(10, 0, 0, 'asc')
])
def test_get_unread_entries_as_dict(
operation, count, offset, priority, order):
dicts = operation.get_unread_entries_as_dict(
count, offset, priority, order
)
assert all(map(lambda d: isinstance(d, dict), dicts))
@usefixtures('database')
@parametrize('count, offset, priority, order', [
(5, 0, 0, 'asc'),
(5, 5, 0, 'asc'),
(5, 0, 5, 'asc'),
(5, 0, 0, 'desc')
])
def test_get_unread_entries(operation, model, count, offset, priority, order):
entries = operation.get_unread_entries(count, offset, priority, order)
# count
actual_num_entries = len(list(
model.Entry.select().join(model.Feed).where(
model.Feed.priority >= priority,
model.Entry.is_read == False
).offset(offset).limit(count)
))
if actual_num_entries < count:
assert len(entries) == actual_num_entries
else:
assert len(entries) == count
# offset
entries_from_head = operation.get_unread_entries(count, 0, priority, order)
offset_entries = entries_from_head[:offset]
assert all(map(lambda e: e not in entries, offset_entries))
# priority
assert all(map(lambda e: e.feed.priority >= priority, entries))
# order
if order == 'asc':
cmp_func = lambda t: t[0].created_date <= t[1].created_date
else: # 'desc'
cmp_func = lambda t: t[0].created_date >= t[1].created_date
assert all(map(cmp_func, zip(entries, entries[1:])))
# is_read
assert all(map(lambda e: e.is_read is False, entries))
@usefixtures('database')
def test_read_entries(operation, model):
unread_entries = list(
model.Entry.select().where(model.Entry.is_read == False)
)
if unread_entries:
read_tagets = unread_entries[:len(unread_entries)/2]
else:
read_tagets = []
target_id_list = [e.id for e in read_tagets]
operation.read_entries(target_id_list)
# all target entries are changed to be read
assert all(map(
lambda e: e.is_read is True,
model.Entry.select().where(model.Entry.id << target_id_list)
))
# number of read entries
assert model.Entry.select().where(
model.Entry.is_read == False
).count() == len(unread_entries) - len(read_tagets)
@usefixtures('database')
@parametrize('changed', [
({'is_read': True, 'is_liked': True, 'is_disliked': True}),
({'is_read': False, 'is_liked': False, 'is_disliked': False})
])
def test_update_entry(operation, model, changed):
entry = model.Entry.select().first()
if entry:
operation.update_entry(entry.id, changed)
assert all([
getattr(
model.Entry.get(id=entry.id), k
) == v for k, v in changed.items()
])
@usefixtures('database')
def test_get_feeds_as_dict(operation):
feed_dicts = operation.get_feeds_as_dict()
assert all(map(lambda f: isinstance(f, dict), feed_dicts))
@usefixtures('database')
def test_get_feeds(operation, model):
feeds = operation.get_feeds()
# number of all feeds
assert len(feeds) == model.Feed.select().count()
# feeds order
assert all(map(
lambda t: t[0].created_date >= t[1].created_date,
zip(feeds, feeds[1:])
))
@usefixtures('database')
def test_feed_exists(operation, model):
feed = model.Feed.select().first()
if feed:
assert operation.feed_exists(feed.url)
assert not operation.feed_exists('http://notexists.example.com/feed')
@usefixtures('database')
def test_add_feed(operation, model):
num_feeds = model.Feed.select().count()
with mock.patch('feedparser.parse') as m:
url = 'http://new.example.com/feed'
operation.add_feed(url)
assert m.call_args[0][0] == url
assert model.Feed.select().count() == num_feeds + 1
@usefixtures('database')
@parametrize('changed', [
({'priority': 0, 'is_disabled': True}),
({'priority': 5, 'is_disabled': False})
])
def test_update_feed(operation, model, changed):
feed = model.Feed.select().first()
operation.update_feed(feed.id, changed)
assert all([
getattr(
model.Feed.get(id=feed.id), k
) == v for k, v in changed.items()
])
@usefixtures('database')
def test_fetch_entries(operation, model):
raw_feed = mock.Mock()
raw_feed.entries = [mock.Mock()]
feed = model.Feed(title='test', url='http://new.example.com/feed')
with mock.patch('feedparser.parse', return_value=raw_feed) as m:
entries = operation.fetch_entries(feed)
assert m.call_args[0][0] == feed.url
entry = entries[0]
assert entry.title == raw_feed.entries[0].title
assert entry.url == raw_feed.entries[0].link
assert entry.feed is feed
@usefixtures('database')
def test_entry_exists(operation, model):
feed = model.Feed.select().first()
if feed:
entry = model.Entry(
title='test',
url='http://new.example.com/entry',
feed=feed
)
assert not operation.entry_exists(entry)
entry.save()
assert operation.entry_exists(entry)
@usefixtures('database')
def test_filter_new_entries(operation, model):
entries = list(model.Entry.select())
feed = model.Feed.select().first()
if feed:
new_entries = [model.Entry(
title='test',
url='http://new.example.com/entry/{0}'.format(i),
feed=feed
) for i in range(10)]
entries.extend(new_entries)
else:
new_entries = []
filtered = operation.filter_new_entries(entries)
assert len(filtered) == len(new_entries)
assert all(map(lambda e: e in filtered, new_entries))
@usefixtures('database')
def test_add_entry(operation, model):
num_entries = model.Entry.select().count()
feed = model.Feed.select().first()
operation.add_entry(model.Entry(
title='test',
url='http://new.example.com/entry',
feed=feed
))
assert model.Entry.select().count() == num_entries + 1
@usefixtures('database')
def test_get_enabled_feeds(operation):
feeds = operation.get_enabled_feeds()
assert all(map(lambda f: f.is_disabled == False, feeds))
|
|
#
# Copyright (C) 2008 The Android Open Source Project
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
from command import PagedCommand
try:
import threading as _threading
except ImportError:
import dummy_threading as _threading
import glob
import itertools
import os
import sys
import StringIO
from color import Coloring
class Status(PagedCommand):
common = True
helpSummary = "Show the working tree status"
helpUsage = """
%prog [<project>...]
"""
helpDescription = """
'%prog' compares the working tree to the staging area (aka index),
and the most recent commit on this branch (HEAD), in each project
specified. A summary is displayed, one line per file where there
is a difference between these three states.
The -j/--jobs option can be used to run multiple status queries
in parallel.
The -o/--orphans option can be used to show objects that are in
the working directory, but not associated with a repo project.
This includes unmanaged top-level files and directories, but also
includes deeper items. For example, if dir/subdir/proj1 and
dir/subdir/proj2 are repo projects, dir/subdir/proj3 will be shown
if it is not known to repo.
Status Display
--------------
The status display is organized into three columns of information,
for example if the file 'subcmds/status.py' is modified in the
project 'repo' on branch 'devwork':
project repo/ branch devwork
-m subcmds/status.py
The first column explains how the staging area (index) differs from
the last commit (HEAD). Its values are always displayed in upper
case and have the following meanings:
-: no difference
A: added (not in HEAD, in index )
M: modified ( in HEAD, in index, different content )
D: deleted ( in HEAD, not in index )
R: renamed (not in HEAD, in index, path changed )
C: copied (not in HEAD, in index, copied from another)
T: mode changed ( in HEAD, in index, same content )
U: unmerged; conflict resolution required
The second column explains how the working directory differs from
the index. Its values are always displayed in lower case and have
the following meanings:
-: new / unknown (not in index, in work tree )
m: modified ( in index, in work tree, modified )
d: deleted ( in index, not in work tree )
"""
def _Options(self, p):
p.add_option('-j', '--jobs',
dest='jobs', action='store', type='int', default=2,
help="number of projects to check simultaneously")
p.add_option('-o', '--orphans',
dest='orphans', action='store_true',
help="include objects in working directory outside of repo projects")
def _StatusHelper(self, project, clean_counter, sem, output):
"""Obtains the status for a specific project.
Obtains the status for a project, redirecting the output to
the specified object. It will release the semaphore
when done.
Args:
project: Project to get status of.
clean_counter: Counter for clean projects.
sem: Semaphore, will call release() when complete.
output: Where to output the status.
"""
try:
state = project.PrintWorkTreeStatus(output)
if state == 'CLEAN':
clean_counter.next()
finally:
sem.release()
def _FindOrphans(self, dirs, proj_dirs, proj_dirs_parents, outstring):
"""find 'dirs' that are present in 'proj_dirs_parents' but not in 'proj_dirs'"""
status_header = ' --\t'
for item in dirs:
if not os.path.isdir(item):
outstring.write(''.join([status_header, item]))
continue
if item in proj_dirs:
continue
if item in proj_dirs_parents:
self._FindOrphans(glob.glob('%s/.*' % item) + \
glob.glob('%s/*' % item), \
proj_dirs, proj_dirs_parents, outstring)
continue
outstring.write(''.join([status_header, item, '/']))
def Execute(self, opt, args):
all_projects = self.GetProjects(args)
counter = itertools.count()
if opt.jobs == 1:
for project in all_projects:
state = project.PrintWorkTreeStatus()
if state == 'CLEAN':
counter.next()
else:
sem = _threading.Semaphore(opt.jobs)
threads_and_output = []
for project in all_projects:
sem.acquire()
class BufList(StringIO.StringIO):
def dump(self, ostream):
for entry in self.buflist:
ostream.write(entry)
output = BufList()
t = _threading.Thread(target=self._StatusHelper,
args=(project, counter, sem, output))
threads_and_output.append((t, output))
t.daemon = True
t.start()
for (t, output) in threads_and_output:
t.join()
output.dump(sys.stdout)
output.close()
if len(all_projects) == counter.next():
print('nothing to commit (working directory clean)')
if opt.orphans:
proj_dirs = set()
proj_dirs_parents = set()
for project in self.GetProjects(None, missing_ok=True):
proj_dirs.add(project.relpath)
(head, _tail) = os.path.split(project.relpath)
while head != "":
proj_dirs_parents.add(head)
(head, _tail) = os.path.split(head)
proj_dirs.add('.repo')
class StatusColoring(Coloring):
def __init__(self, config):
Coloring.__init__(self, config, 'status')
self.project = self.printer('header', attr = 'bold')
self.untracked = self.printer('untracked', fg = 'red')
orig_path = os.getcwd()
try:
os.chdir(self.manifest.topdir)
outstring = StringIO.StringIO()
self._FindOrphans(glob.glob('.*') + \
glob.glob('*'), \
proj_dirs, proj_dirs_parents, outstring)
if outstring.buflist:
output = StatusColoring(self.manifest.globalConfig)
output.project('Objects not within a project (orphans)')
output.nl()
for entry in outstring.buflist:
output.untracked(entry)
output.nl()
else:
print('No orphan files or directories')
outstring.close()
finally:
# Restore CWD.
os.chdir(orig_path)
|
|
# -*- coding: utf-8 -*-
"""
treebeard.ns_tree
-----------------
Nested Sets Tree.
:copyright: 2008 by Gustavo Picon
:license: Apache License 2.0
An implementation of Nested Sets trees for Django 1.0+, as described by
`Joe Celko`_ in `Trees and Hierarchies in SQL for Smarties`_.
Nested sets have very efficient reads at the cost of high maintenance on
write/delete operations.
.. _`Joe Celko`: http://www.celko.com/
.. _`Trees and Hierarchies in SQL for Smarties`:
http://www.elsevier.com/wps/find/bookdescription.cws_home/702605/description
"""
import operator
from django.db.models import Q
from django.core import serializers
from django.db import models, transaction, connection
from treebeard.models import Node
from treebeard.exceptions import InvalidMoveToDescendant, PathOverflow
class NS_NodeQuerySet(models.query.QuerySet):
"""
Custom queryset for the tree node manager.
Needed only for the customized delete method.
"""
def delete(self, removed_ranges=None):
"""
Custom delete method, will remove all descendant nodes to ensure a
consistent tree (no orphans)
:returns: ``None``
"""
if removed_ranges is not None:
# we already know the children, let's call the default django
# delete method and let it handle the removal of the user's
# foreign keys...
super(NS_NodeQuerySet, self).delete()
cursor = connection.cursor()
# Now closing the gap (Celko's trees book, page 62)
# We do this for every gap that was left in the tree when the nodes
# were removed. If many nodes were removed, we're going to update
# the same nodes over and over again. This would be probably
# cheaper precalculating the gapsize per intervals, or just do a
# complete reordering of the tree (uses COUNT)...
for tree_id, drop_lft, drop_rgt in sorted(removed_ranges,
reverse=True):
sql, params = self.model._get_close_gap_sql(drop_lft, drop_rgt,
tree_id)
cursor.execute(sql, params)
else:
# we'll have to manually run through all the nodes that are going
# to be deleted and remove nodes from the list if an ancestor is
# already getting removed, since that would be redundant
removed = {}
for node in self.order_by('tree_id', 'lft'):
found = False
for rid, rnode in removed.items():
if node.is_descendant_of(rnode):
found = True
break
if not found:
removed[node.id] = node
# ok, got the minimal list of nodes to remove...
# we must also remove their descendants
toremove = []
ranges = []
for id, node in removed.items():
toremove.append(
Q(lft__range=(node.lft, node.rgt))&Q(tree_id=node.tree_id))
ranges.append((node.tree_id, node.lft, node.rgt))
if toremove:
self.model.objects.filter(
reduce(operator.or_, toremove)).delete(
removed_ranges=ranges)
transaction.commit_unless_managed()
class NS_NodeManager(models.Manager):
""" Custom manager for nodes.
"""
def get_query_set(self):
"""
Sets the custom queryset as the default.
"""
return NS_NodeQuerySet(self.model)
class NS_Node(Node):
"""
Abstract model to create your own Nested Sets Trees.
.. attribute:: node_order_by
Attribute: a list of model fields that will be used for node
ordering. When enabled, all tree operations will assume this ordering.
Example::
node_order_by = ['field1', 'field2', 'field3']
.. attribute:: depth
``PositiveIntegerField``, depth of a node in the tree. A root node
has a depth of *1*.
.. attribute:: lft
``PositiveIntegerField``
.. attribute:: rgt
``PositiveIntegerField``
.. attribute:: tree_id
``PositiveIntegerField``
.. warning::
Be very careful if you add a ``Meta`` class in your
:class:`ns_tree.NS_Node` subclass.
You must add an ordering attribute with two elements on it::
class Meta:
ordering = ['tree_id', 'lft']
If you don't, the tree won't work, since :class:`ns_tree.NS_Node`
completely depends on this attribute.
"""
node_order_by = []
lft = models.PositiveIntegerField(db_index=True)
rgt = models.PositiveIntegerField(db_index=True)
tree_id = models.PositiveIntegerField(db_index=True)
depth = models.PositiveIntegerField(db_index=True)
objects = NS_NodeManager()
@classmethod
def add_root(cls, **kwargs):
"""
Adds a root node to the tree.
See: :meth:`treebeard.Node.add_root`
"""
# do we have a root node already?
last_root = cls.get_last_root_node()
if last_root and last_root.node_order_by:
# there are root nodes and node_order_by has been set
# delegate sorted insertion to add_sibling
return last_root.add_sibling('sorted-sibling', **kwargs)
if last_root:
# adding the new root node as the last one
#newtree_id = last_root.tree_id + 100
newtree_id = last_root.tree_id + 1
else:
# adding the first root node
#newtree_id = 100
newtree_id = 1
# creating the new object
newobj = cls(**kwargs)
newobj.depth = 1
newobj.tree_id = newtree_id
newobj.lft = 1
#newobj.rgt = 400000000
newobj.rgt = 2
# saving the instance before returning it
newobj.save()
transaction.commit_unless_managed()
return newobj
@classmethod
def _move_right(cls, tree_id, rgt, lftmove=False, incdec=2):
if lftmove:
lftop = '>='
else:
lftop = '>'
sql = 'UPDATE %(table)s ' \
' SET lft = CASE WHEN lft %(lftop)s %(parent_rgt)d ' \
' THEN lft %(incdec)+d ' \
' ELSE lft END, ' \
' rgt = CASE WHEN rgt >= %(parent_rgt)d ' \
' THEN rgt %(incdec)+d ' \
' ELSE rgt END ' \
' WHERE rgt >= %(parent_rgt)d AND ' \
' tree_id = %(tree_id)s' % {
'table': connection.ops.quote_name(cls._meta.db_table),
'parent_rgt': rgt,
'tree_id': tree_id,
'lftop': lftop,
'incdec': incdec}
return sql, []
@classmethod
def _move_tree_right(cls, tree_id):
sql = 'UPDATE %(table)s ' \
' SET tree_id = tree_id+1 ' \
' WHERE tree_id >= %(tree_id)d' % {
'table': connection.ops.quote_name(cls._meta.db_table),
'tree_id': tree_id}
return sql, []
def add_child(self, **kwargs):
"""
Adds a child to the node.
See: :meth:`treebeard.Node.add_child`
"""
if not self.is_leaf():
# there are child nodes, delegate insertion to add_sibling
if self.node_order_by:
pos = 'sorted-sibling'
else:
pos = 'last-sibling'
last_child = self.get_last_child()
tmp = self.__class__.objects.get(pk=self.id)
last_child._cached_parent_obj = self
return last_child.add_sibling(pos, **kwargs)
# we're adding the first child of this node
sql, params = self.__class__._move_right(self.tree_id, self.rgt, False,
2)
# creating a new object
newobj = self.__class__(**kwargs)
newobj.tree_id = self.tree_id
newobj.depth = self.depth + 1
newobj.lft = self.lft+1
newobj.rgt = self.lft+2
# this is just to update the cache
self.rgt = self.rgt+2
newobj._cached_parent_obj = self
cursor = connection.cursor()
cursor.execute(sql, params)
# saving the instance before returning it
newobj.save()
transaction.commit_unless_managed()
return newobj
def add_sibling(self, pos=None, **kwargs):
"""
Adds a new node as a sibling to the current node object.
See: :meth:`treebeard.Node.add_sibling`
"""
pos = self._fix_add_sibling_opts(pos)
# creating a new object
newobj = self.__class__(**kwargs)
newobj.depth = self.depth
sql = None
target = self
if target.is_root():
newobj.lft = 1
newobj.rgt = 2
if pos == 'sorted-sibling':
siblings = list(target.get_sorted_pos_queryset(
target.get_siblings(), newobj))
if siblings:
pos = 'left'
target = siblings[0]
else:
pos = 'last-sibling'
last_root = target.__class__.get_last_root_node()
if pos == 'last-sibling' \
or (pos == 'right' and target == last_root):
newobj.tree_id = last_root.tree_id + 1
else:
newpos = {'first-sibling': 1,
'left': target.tree_id,
'right': target.tree_id + 1}[pos]
sql, params = target.__class__._move_tree_right(newpos)
newobj.tree_id = newpos
else:
newobj.tree_id = target.tree_id
if pos == 'sorted-sibling':
siblings = list(target.get_sorted_pos_queryset(
target.get_siblings(), newobj))
if siblings:
pos = 'left'
target = siblings[0]
else:
pos = 'last-sibling'
if pos in ('left', 'right', 'first-sibling'):
siblings = list(target.get_siblings())
if pos == 'right':
if target == siblings[-1]:
pos = 'last-sibling'
else:
pos = 'left'
found = False
for node in siblings:
if found:
target = node
break
elif node == target:
found = True
if pos == 'left':
if target == siblings[0]:
pos = 'first-sibling'
if pos == 'first-sibling':
target = siblings[0]
move_right = self.__class__._move_right
if pos == 'last-sibling':
newpos = target.get_parent().rgt
sql, params = move_right(target.tree_id, newpos, False, 2)
elif pos == 'first-sibling':
newpos = target.lft
sql, params = move_right(target.tree_id, newpos-1, False, 2)
elif pos == 'left':
newpos = target.lft
sql, params = move_right(target.tree_id, newpos, True, 2)
newobj.lft = newpos
newobj.rgt = newpos + 1
# saving the instance before returning it
if sql:
cursor = connection.cursor()
cursor.execute(sql, params)
newobj.save()
transaction.commit_unless_managed()
return newobj
def move(self, target, pos=None):
"""
Moves the current node and all it's descendants to a new position
relative to another node.
See: :meth:`treebeard.Node.move`
"""
pos = self._fix_move_opts(pos)
cls = self.__class__
stmts = []
parent = None
if pos in ('first-child', 'last-child', 'sorted-child'):
# moving to a child
if target.is_leaf():
parent = target
pos = 'last-child'
else:
target = target.get_last_child()
pos = {'first-child': 'first-sibling',
'last-child': 'last-sibling',
'sorted-child': 'sorted-sibling'}[pos]
if target.is_descendant_of(self):
raise InvalidMoveToDescendant("Can't move node to a descendant.")
if self == target and (
(pos == 'left') or \
(pos in ('right', 'last-sibling') and \
target == target.get_last_sibling()) or \
(pos == 'first-sibling' and \
target == target.get_first_sibling())):
# special cases, not actually moving the node so no need to UPDATE
return
if pos == 'sorted-sibling':
siblings = list(target.get_sorted_pos_queryset(
target.get_siblings(), self))
if siblings:
pos = 'left'
target = siblings[0]
else:
pos = 'last-sibling'
if pos in ('left', 'right', 'first-sibling'):
siblings = list(target.get_siblings())
if pos == 'right':
if target == siblings[-1]:
pos = 'last-sibling'
else:
pos = 'left'
found = False
for node in siblings:
if found:
target = node
break
elif node == target:
found = True
if pos == 'left':
if target == siblings[0]:
pos = 'first-sibling'
if pos == 'first-sibling':
target = siblings[0]
# ok let's move this
cursor = connection.cursor()
move_right = cls._move_right
gap = self.rgt - self.lft + 1
sql = None
target_tree = target.tree_id
# first make a hole
if pos == 'last-child':
newpos = parent.rgt
sql, params = move_right(target.tree_id, newpos, False, gap)
elif target.is_root():
newpos = 1
if pos == 'last-sibling':
target_tree = target.get_siblings().reverse()[0].tree_id + 1
elif pos == 'first-sibling':
target_tree = 1
sql, params = cls._move_tree_right(1)
elif pos == 'left':
sql, params = cls._move_tree_right(target.tree_id)
else:
if pos == 'last-sibling':
newpos = target.get_parent().rgt
sql, params = move_right(target.tree_id, newpos, False, gap)
elif pos == 'first-sibling':
newpos = target.lft
sql, params = move_right(target.tree_id, newpos-1, False, gap)
elif pos == 'left':
newpos = target.lft
sql, params = move_right(target.tree_id, newpos, True, gap)
if sql:
cursor.execute(sql, params)
# we reload 'self' because lft/rgt may have changed
fromobj = cls.objects.get(pk=self.id)
depthdiff = target.depth - fromobj.depth
if parent:
depthdiff += 1
# move the tree to the hole
sql = "UPDATE %(table)s " \
" SET tree_id = %(target_tree)d, " \
" lft = lft + %(jump)d , " \
" rgt = rgt + %(jump)d , " \
" depth = depth + %(depthdiff)d " \
" WHERE tree_id = %(from_tree)d AND " \
" lft BETWEEN %(fromlft)d AND %(fromrgt)d" % {
'table': connection.ops.quote_name(cls._meta.db_table),
'from_tree': fromobj.tree_id,
'target_tree': target_tree,
'jump': newpos - fromobj.lft,
'depthdiff': depthdiff,
'fromlft': fromobj.lft,
'fromrgt': fromobj.rgt}
cursor.execute(sql, [])
# close the gap
sql, params = cls._get_close_gap_sql(fromobj.lft,
fromobj.rgt, fromobj.tree_id)
cursor.execute(sql, params)
transaction.commit_unless_managed()
@classmethod
def _get_close_gap_sql(cls, drop_lft, drop_rgt, tree_id):
sql = 'UPDATE %(table)s ' \
' SET lft = CASE ' \
' WHEN lft > %(drop_lft)d ' \
' THEN lft - %(gapsize)d ' \
' ELSE lft END, ' \
' rgt = CASE ' \
' WHEN rgt > %(drop_lft)d ' \
' THEN rgt - %(gapsize)d ' \
' ELSE rgt END ' \
' WHERE (lft > %(drop_lft)d ' \
' OR rgt > %(drop_lft)d) AND '\
' tree_id=%(tree_id)d' % {
'table': connection.ops.quote_name(cls._meta.db_table),
'gapsize': drop_rgt - drop_lft + 1,
'drop_lft': drop_lft,
'tree_id': tree_id}
return sql, []
@classmethod
def load_bulk(cls, bulk_data, parent=None, keep_ids=False):
"""
Loads a list/dictionary structure to the tree.
See: :meth:`treebeard.Node.move`
"""
# tree, iterative preorder
added = []
if parent:
parent_id = parent.id
else:
parent_id = None
# stack of nodes to analize
stack = [(parent_id, node) for node in bulk_data[::-1]]
while stack:
parent_id, node_struct = stack.pop()
# shallow copy of the data strucure so it doesn't persist...
node_data = node_struct['data'].copy()
if keep_ids:
node_data['id'] = node_struct['id']
if parent_id:
parent = cls.objects.get(pk=parent_id)
node_obj = parent.add_child(**node_data)
else:
node_obj = cls.add_root(**node_data)
added.append(node_obj.id)
if 'children' in node_struct:
# extending the stack with the current node as the parent of
# the new nodes
stack.extend([(node_obj.id, node) \
for node in node_struct['children'][::-1]])
transaction.commit_unless_managed()
return added
def get_children(self):
"""
:returns: A queryset of all the node's children
See: :meth:`treebeard.Node.get_children`
"""
return self.get_descendants().filter(depth=self.depth+1)
def get_depth(self):
"""
:returns: the depth (level) of the node
See: :meth:`treebeard.Node.get_depth`
"""
return self.depth
def is_leaf(self):
"""
:returns: True if the node is a leaf node (else, returns False)
See: :meth:`treebeard.Node.is_leaf`
"""
return self.rgt - self.lft == 1
def get_root(self):
"""
:returns: the root node for the current node object.
See: :meth:`treebeard.Node.get_root`
"""
if self.lft == 1:
return self
return self.__class__.objects.get(tree_id=self.tree_id,
lft=1)
def get_siblings(self):
"""
:returns: A queryset of all the node's siblings, including the node
itself.
See: :meth:`treebeard.Node.get_siblings`
"""
if self.lft == 1:
return self.get_root_nodes()
return self.get_parent(True).get_children()
@classmethod
def dump_bulk(cls, parent=None, keep_ids=True):
"""
Dumps a tree branch to a python data structure.
See: :meth:`treebeard.Node.dump_bulk`
"""
qset = cls.get_tree(parent)
ret, lnk = [], {}
for pyobj in qset:
serobj = serializers.serialize('python', [pyobj])[0]
# for serobj in serializers.serialize('python', qset):
# django's serializer stores the attributes in 'fields'
fields = serobj['fields']
depth = fields['depth']
lft = fields['lft']
tree_id = fields['tree_id']
# this will be useless in load_bulk
del fields['lft']
del fields['rgt']
del fields['depth']
del fields['tree_id']
if 'id' in fields:
# this happens immediately after a load_bulk
del fields['id']
newobj = {'data': fields}
if keep_ids:
newobj['id'] = serobj['pk']
if (not parent and depth == 1) or \
(parent and depth == parent.depth):
ret.append(newobj)
else:
parentobj = pyobj.get_parent()
parentser = lnk[parentobj.id]
if 'children' not in parentser:
parentser['children'] = []
parentser['children'].append(newobj)
lnk[pyobj.id] = newobj
return ret
@classmethod
def get_tree(cls, parent=None):
"""
:returns: A *queryset* of nodes ordered as DFS, including the parent.
If no parent is given, all trees are returned.
See: :meth:`treebeard.Node.get_tree`
.. note::
This metod returns a queryset.
"""
if parent is None:
# return the entire tree
return cls.objects.all()
if parent.is_leaf():
return cls.objects.filter(pk=parent.id)
return cls.objects.filter(
tree_id=parent.tree_id,
lft__range=(parent.lft, parent.rgt-1))
def get_descendants(self):
"""
:returns: A queryset of all the node's descendants as DFS, doesn't
include the node itself
See: :meth:`treebeard.Node.get_descendants`
"""
if self.is_leaf():
return self.__class__.objects.none()
return self.__class__.get_tree(self).exclude(pk=self.id)
def get_descendant_count(self):
"""
:returns: the number of descendants of a node.
See: :meth:`treebeard.Node.get_descendant_count`
"""
return (self.rgt - self.lft - 1) / 2
def get_ancestors(self):
"""
:returns: A queryset containing the current node object's ancestors,
starting by the root node and descending to the parent.
See: :meth:`treebeard.Node.get_ancestors`
"""
if self.is_root():
return self.__class__.objects.none()
return self.__class__.objects.filter(
tree_id=self.tree_id,
lft__lt=self.lft,
rgt__gt=self.rgt)
def is_descendant_of(self, node):
"""
:returns: ``True`` if the node if a descendant of another node given
as an argument, else, returns ``False``
See: :meth:`treebeard.Node.is_descendant_of`
"""
return self.tree_id == node.tree_id and \
self.lft > node.lft and \
self.rgt < node.rgt
def get_parent(self, update=False):
"""
:returns: the parent node of the current node object.
Caches the result in the object itself to help in loops.
See: :meth:`treebeard.Node.get_parent`
"""
if self.is_root():
return
try:
if update:
del self._cached_parent_obj
else:
return self._cached_parent_obj
except AttributeError:
pass
# parent = our most direct ancestor
self._cached_parent_obj = self.get_ancestors().reverse()[0]
return self._cached_parent_obj
@classmethod
def get_root_nodes(cls):
"""
:returns: A queryset containing the root nodes in the tree.
Example::
MyNodeModel.get_root_nodes()
"""
return cls.objects.filter(lft=1)
class Meta:
"""
Abstract model.
"""
abstract = True
# By changing the ordering, assume that lots of things will break,
# at least you'll want to check the first/last/prev/next methods.
# This ordering assumes you want something... TREEISH
# PROTIP: don't change this
# PROTIP2: Set the ordering property again if you add a Meta in
# your subclass
ordering = ['tree_id', 'lft']
|
|
#
# Copyright (c) 2018 SUNET
# Copyright (c) 2013, 2014, 2016, 2017 NORDUnet A/S
# Copyright 2012 Roland Hedberg. All rights reserved.
# All rights reserved.
#
# See the file eduid-IdP/LICENSE.txt for license statement.
#
# Author : Fredrik Thulin <fredrik@thulin.net>
# Roland Hedberg
#
import logging
import time
import warnings
from collections import deque
from threading import Lock
from typing import Any, Deque, Dict, List, Mapping, Optional, Tuple, Union, cast
from eduid_userdb.db import BaseDB
from eduid_userdb.exceptions import EduIDDBError
from eduid_userdb.idp import IdPUserDb
from eduid_webapp.idp.sso_session import SSOSession, SSOSessionId
_SHA1_HEXENCODED_SIZE = 160 // 8 * 2
# TODO: Rename to logger
module_logger = logging.getLogger(__name__)
class NoOpLock(object):
"""
A No-op lock class, to avoid a lot of "if self.lock:" in code using locks.
"""
def __init__(self) -> None:
pass
# noinspection PyUnusedLocal
def acquire(self, _block: bool = True) -> bool:
"""
Fake acquiring a lock.
:param _block: boolean, whether to block or not (NO-OP in this implementation)
"""
return True
def release(self) -> None:
"""
Fake releasing a lock.
"""
pass
class ExpiringCacheMem:
"""
Simplistic implementation of a cache that removes entrys as they become too old.
This implementation invokes garbage collecting on every addition of data. This
is believed to be a pragmatic approach for small to medium sites. For a large
site with e.g. load balancers causing uneven traffic patterns, this might not
work that well and the use of an external cache such as memcache is recommended.
:param name: name of cache as string, only used for debugging
:param logger: logging logger instance
:param ttl: data time to live in this cache, as seconds (integer)
:param lock: threading.Lock compatible locking instance
"""
def __init__(self, name: str, logger: Optional[logging.Logger], ttl: int, lock: Optional[Lock] = None):
self.logger = logger
self.ttl = ttl
self.name = name
self._data: Dict[SSOSessionId, Any] = {}
self._ages: Deque[Tuple[float, SSOSessionId]] = deque()
self.lock = lock
if self.lock is None:
self.lock = cast(Lock, NoOpLock()) # intentionally lie to mypy
if self.logger is not None:
warnings.warn('Object logger deprecated, using module_logger', DeprecationWarning)
def add(self, key: SSOSessionId, info: Any, now: Optional[int] = None) -> None:
"""
Add entry to the cache.
Ability to supply current time is only meant for test cases!
:param key: Lookup key for entry
:param info: Value to be stored for 'key'
:param now: Current time - do not use unless testing!
"""
self._data[key] = info
# record when this entry shall be purged
_now = now
if _now is None:
_now = int(time.time())
self._ages.append((_now, key))
self._purge_expired(_now - self.ttl)
def _purge_expired(self, timestamp: int) -> None:
"""
Purge expired records.
:param timestamp: Purge any entrys older than this (integer)
"""
if not self.lock or not self.lock.acquire(False):
# if we don't get the lock, don't worry about it and just skip purging
return None
try:
# purge any expired records. self._ages have the _data entries listed with oldest first.
while True:
try:
(_exp_ts, _exp_key) = self._ages.popleft()
except IndexError:
break
if _exp_ts > timestamp:
# entry not expired - reinsert in queue and end purging
self._ages.appendleft((_exp_ts, _exp_key))
break
module_logger.debug(
'Purged {!s} cache entry {!s} seconds over limit : {!s}'.format(
self.name, timestamp - _exp_ts, _exp_key
)
)
self.delete(_exp_key)
finally:
self.lock.release()
def get(self, key: SSOSessionId) -> Optional[Mapping[str, Any]]:
"""
Fetch data from cache based on `key'.
:param key: hash key to use for lookup
:returns: Any data found matching `key', or None.
"""
return self._data.get(key)
def update(self, key: SSOSessionId, info: Any) -> None:
"""
Update an entry in the cache.
:param key: Lookup key for entry
:param info: Value to be stored for 'key'
:return: None
"""
self._data[key] = info
def delete(self, key: SSOSessionId) -> bool:
"""
Delete an item from the cache.
:param key: hash key to delete
:return: True on success
"""
try:
del self._data[key]
return True
except KeyError:
module_logger.debug('Failed deleting key {!r} from {!s} cache (entry did not exist)'.format(key, self.name))
return False
def items(self) -> Any:
"""
Return all items from cache.
"""
return self._data
class SSOSessionCacheError(EduIDDBError):
pass
class SSOSessionCache(BaseDB):
def __init__(self, db_uri: str, ttl: int, db_name: str = 'eduid_idp', collection: str = 'sso_sessions'):
super().__init__(db_uri, db_name, collection=collection)
# Remove messages older than created_ts + ttl
indexes = {
'auto-discard': {'key': [('created_ts', 1)], 'expireAfterSeconds': ttl},
'unique-session-id': {'key': [('session_id', 1)], 'unique': True},
}
self.setup_indexes(indexes)
def remove_session(self, session: SSOSession) -> Union[int, bool]:
"""
Remove entries when SLO is executed.
:return: False on failure
"""
res = self._coll.remove({'session_id': session.session_id}, w='majority')
try:
return int(res['n']) # number of deleted records
except (KeyError, TypeError):
module_logger.warning(f'Remove session {repr(session.session_id)} failed, result: {repr(res)}')
return False
def save(self, session: SSOSession) -> None:
"""
Add a new SSO session to the cache, or update an existing one.
The mapping of uid -> user (and data) is used when a user visits another SP before
the SSO session expires, and the mapping of user -> uid is used if the user requests
logout (SLO).
"""
result = self._coll.replace_one({'_id': session._id}, session.to_dict(), upsert=True)
module_logger.debug(f'Updated SSO session {session} in the db: {result}')
return None
def get_session(self, sid: SSOSessionId, userdb: IdPUserDb) -> Optional[SSOSession]:
"""
Lookup an SSO session using the session id (same `sid' previously used with add_session).
:param sid: Unique session identifier as string
:param userdb: Database to use to initialise session.idp_user
:return: The session, if found
"""
try:
res = self._coll.find_one({'session_id': sid})
except KeyError:
module_logger.debug(f'Failed looking up SSO session with id={repr(sid)}')
raise
if not res:
return None
return SSOSession.from_dict(res, userdb)
def get_sessions_for_user(self, eppn: str, userdb: IdPUserDb) -> List[SSOSession]:
"""
Lookup all SSO session ids for a given user. Used in SLO with SOAP binding.
:param eppn: The eppn to look for
:return: A list with zero or more SSO sessions
"""
entrys = self._coll.find({'username': eppn})
res = [SSOSession.from_dict(this, userdb) for this in entrys]
return res
|
|
'''
Created on Sep 13, 2011
@author: Mark V Systems Limited
(c) Copyright 2011 Mark V Systems Limited, All rights reserved.
'''
import os
from arelle import ViewFile
from lxml import etree
from arelle.RenderingResolver import resolveAxesStructure, RENDER_UNITS_PER_CHAR
from arelle.ViewFile import HTML, XML
from arelle.ModelObject import ModelObject
from arelle.ModelFormulaObject import Aspect, aspectModels, aspectRuleAspects, aspectModelAspect, aspectStr
from arelle.FormulaEvaluator import aspectMatches
from arelle.FunctionXs import xsString
from arelle.ModelInstanceObject import ModelDimensionValue
from arelle.ModelValue import QName
from arelle.ModelXbrl import DEFAULT
from arelle.ModelRenderingObject import (ModelClosedDefinitionNode, ModelEuAxisCoord, ModelFilterDefinitionNode,
OPEN_ASPECT_ENTRY_SURROGATE)
from arelle.PrototypeInstanceObject import FactPrototype
# change tableModel for namespace needed for consistency suite
'''
from arelle.XbrlConst import (tableModelMMDD as tableModelNamespace,
tableModelMMDDQName as tableModelQName)
'''
from arelle import XbrlConst
from arelle.XmlUtil import innerTextList, child, elementFragmentIdentifier, addQnameValue
from collections import defaultdict
emptySet = set()
emptyList = []
def viewRenderedGrid(modelXbrl, outfile, lang=None, viewTblELR=None, sourceView=None, diffToFile=False, cssExtras=""):
modelXbrl.modelManager.showStatus(_("saving rendering"))
view = ViewRenderedGrid(modelXbrl, outfile, lang, cssExtras)
if sourceView is not None:
viewTblELR = sourceView.tblELR
view.ignoreDimValidity.set(sourceView.ignoreDimValidity.get())
view.xAxisChildrenFirst.set(sourceView.xAxisChildrenFirst.get())
view.yAxisChildrenFirst.set(sourceView.yAxisChildrenFirst.get())
view.view(viewTblELR)
if diffToFile and outfile:
from arelle.ValidateInfoset import validateRenderingInfoset
validateRenderingInfoset(modelXbrl, outfile, view.xmlDoc)
view.close(noWrite=True)
else:
view.close()
modelXbrl.modelManager.showStatus(_("rendering saved to {0}").format(outfile), clearAfter=5000)
class ViewRenderedGrid(ViewFile.View):
def __init__(self, modelXbrl, outfile, lang, cssExtras):
# find table model namespace based on table namespace
self.tableModelNamespace = XbrlConst.tableModel
for xsdNs in modelXbrl.namespaceDocs.keys():
if xsdNs in (XbrlConst.tableMMDD, XbrlConst.table, XbrlConst.table201305, XbrlConst.table201301, XbrlConst.table2011):
self.tableModelNamespace = xsdNs + "/model"
break
super(ViewRenderedGrid, self).__init__(modelXbrl, outfile,
'tableModel xmlns="{0}"'.format(self.tableModelNamespace),
lang,
style="rendering",
cssExtras=cssExtras)
class nonTkBooleanVar():
def __init__(self, value=True):
self.value = value
def set(self, value):
self.value = value
def get(self):
return self.value
# context menu boolean vars (non-tkinter boolean
self.ignoreDimValidity = nonTkBooleanVar(value=True)
self.xAxisChildrenFirst = nonTkBooleanVar(value=True)
self.yAxisChildrenFirst = nonTkBooleanVar(value=False)
def tableModelQName(self, localName):
return '{' + self.tableModelNamespace + '}' + localName
def viewReloadDueToMenuAction(self, *args):
self.view()
def view(self, viewTblELR=None):
if viewTblELR is not None:
tblELRs = (viewTblELR,)
else:
tblELRs = self.modelXbrl.relationshipSet("Table-rendering").linkRoleUris
if self.type == XML:
self.tblElt.append(etree.Comment("Entry point file: {0}".format(self.modelXbrl.modelDocument.basename)))
for tblELR in tblELRs:
self.zOrdinateChoices = {}
for discriminator in range(1, 65535):
# each table z production
tblAxisRelSet, xTopStructuralNode, yTopStructuralNode, zTopStructuralNode = resolveAxesStructure(self, tblELR)
self.hasTableFilters = bool(self.modelTable.filterRelationships)
self.zStrNodesWithChoices = []
if tblAxisRelSet and self.tblElt is not None:
tableLabel = (self.modelTable.genLabel(lang=self.lang, strip=True) or # use table label, if any
self.roledefinition)
if self.type == HTML: # table on each Z
# each Z is a separate table in the outer table
zTableRow = etree.SubElement(self.tblElt, "{http://www.w3.org/1999/xhtml}tr")
zRowCell = etree.SubElement(zTableRow, "{http://www.w3.org/1999/xhtml}td")
zCellTable = etree.SubElement(zRowCell, "{http://www.w3.org/1999/xhtml}table",
attrib={"border":"1", "cellspacing":"0", "cellpadding":"4", "style":"font-size:8pt;"})
self.rowElts = [etree.SubElement(zCellTable, "{http://www.w3.org/1999/xhtml}tr")
for r in range(self.dataFirstRow + self.dataRows - 1)]
etree.SubElement(self.rowElts[0], "{http://www.w3.org/1999/xhtml}th",
attrib={"class":"tableHdr",
"style":"max-width:100em;",
"colspan": str(self.dataFirstCol - 1),
"rowspan": str(self.dataFirstRow - 1)}
).text = tableLabel
elif self.type == XML:
self.structuralNodeModelElements = []
if discriminator == 1:
# headers structure only build once for table
tableSetElt = etree.SubElement(self.tblElt, self.tableModelQName("tableSet"))
tableSetElt.append(etree.Comment("TableSet linkbase file: {0}, line {1}".format(self.modelTable.modelDocument.basename, self.modelTable.sourceline)))
tableSetElt.append(etree.Comment("TableSet namespace: {0}".format(self.modelTable.namespaceURI)))
tableSetElt.append(etree.Comment("TableSet linkrole: {0}".format(tblELR)))
etree.SubElement(tableSetElt, self.tableModelQName("label")
).text = tableLabel
zAspectStructuralNodes = defaultdict(set)
tableElt = etree.SubElement(tableSetElt, self.tableModelQName("table"))
self.groupElts = {}
self.headerElts = {}
self.headerCells = defaultdict(list) # order #: (breakdownNode, xml element)
for axis in ("z", "y", "x"):
breakdownNodes = self.breakdownNodes.get(axis)
if breakdownNodes:
hdrsElt = etree.SubElement(tableElt, self.tableModelQName("headers"),
attrib={"axis": axis})
for brkdownNode in self.breakdownNodes.get(axis):
groupElt = etree.SubElement(hdrsElt, self.tableModelQName("group"))
groupElt.append(etree.Comment("Breakdown node file: {0}, line {1}".format(brkdownNode.modelDocument.basename, brkdownNode.sourceline)))
label = brkdownNode.genLabel(lang=self.lang, strip=True)
if label:
etree.SubElement(groupElt, self.tableModelQName("label")).text=label
self.groupElts[brkdownNode] = groupElt
# HF TODO omit header if zero cardinality on breakdown
self.headerElts[brkdownNode] = etree.SubElement(groupElt, self.tableModelQName("header"))
else:
tableElt.append(etree.Comment("No breakdown group for \"{0}\" axis".format(axis)))
self.zAxis(1, zTopStructuralNode, zAspectStructuralNodes, True)
self.cellsParentElt = tableElt
self.cellsParentElt = etree.SubElement(self.cellsParentElt, self.tableModelQName("cells"),
attrib={"axis": "z"})
self.cellsParentElt = etree.SubElement(self.cellsParentElt, self.tableModelQName("cells"),
attrib={"axis": "y"})
''' move into body cells, for entry row-by-row
self.cellsParentElt = etree.SubElement(self.cellsParentElt, self.tableModelQName("cells"),
attrib={"axis": "x"})
'''
# rows/cols only on firstTime for infoset XML, but on each time for xhtml
zAspectStructuralNodes = defaultdict(set)
self.zAxis(1, zTopStructuralNode, zAspectStructuralNodes, False)
xStructuralNodes = []
if self.type == HTML or (xTopStructuralNode and xTopStructuralNode.childStructuralNodes):
self.xAxis(self.dataFirstCol, self.colHdrTopRow, self.colHdrTopRow + self.colHdrRows - 1,
xTopStructuralNode, xStructuralNodes, self.xAxisChildrenFirst.get(), True, True)
if self.type == HTML: # table/tr goes by row
self.yAxisByRow(1, self.dataFirstRow,
yTopStructuralNode, self.yAxisChildrenFirst.get(), True, True)
elif self.type == XML: # infoset goes by col of row header
if yTopStructuralNode and yTopStructuralNode.childStructuralNodes: # no row header element if no rows
self.yAxisByCol(1, self.dataFirstRow,
yTopStructuralNode, self.yAxisChildrenFirst.get(), True, True)
# add header cells to header elements
for position, breakdownCellElts in sorted(self.headerCells.items()):
for breakdownNode, headerCell in breakdownCellElts:
self.headerElts[breakdownNode].append(headerCell)
for structuralNode,modelElt in self.structuralNodeModelElements: # must do after elements are all arragned
modelElt.addprevious(etree.Comment("{0}: label {1}, file {2}, line {3}"
.format(structuralNode.definitionNode.localName,
structuralNode.definitionNode.xlinkLabel,
structuralNode.definitionNode.modelDocument.basename,
structuralNode.definitionNode.sourceline)))
if structuralNode.definitionNode.get('value'):
modelElt.addprevious(etree.Comment(" @value {0}".format(structuralNode.definitionNode.get('value'))))
for aspect in sorted(structuralNode.aspectsCovered(), key=lambda a: aspectStr(a)):
if structuralNode.hasAspect(aspect) and aspect not in (Aspect.DIMENSIONS, Aspect.OMIT_DIMENSIONS):
aspectValue = structuralNode.aspectValue(aspect)
if aspectValue is None: aspectValue = "(bound dynamically)"
modelElt.addprevious(etree.Comment(" aspect {0}: {1}".format(aspectStr(aspect), xsString(None,None,aspectValue))))
for varName, varValue in structuralNode.variables.items():
modelElt.addprevious(etree.Comment(" variable ${0}: {1}".format(varName, varValue)))
for headerElt in self.headerElts.values(): # remove empty header elements
if not any(e is not None for e in headerElt.iterchildren()):
if headerElt.getparent() is not None:
headerElt.getparent().remove(headerElt)
self.bodyCells(self.dataFirstRow, yTopStructuralNode, xStructuralNodes, zAspectStructuralNodes, self.yAxisChildrenFirst.get())
# find next choice structural node
moreDiscriminators = False
for zStrNodeWithChoices in self.zStrNodesWithChoices:
currentIndex = zStrNodeWithChoices.choiceNodeIndex + 1
if currentIndex < len(zStrNodeWithChoices.choiceStructuralNodes):
zStrNodeWithChoices.choiceNodeIndex = currentIndex
self.zOrdinateChoices[zStrNodeWithChoices.definitionNode] = currentIndex
moreDiscriminators = True
break
else:
zStrNodeWithChoices.choiceNodeIndex = 0
self.zOrdinateChoices[zStrNodeWithChoices.definitionNode] = 0
# continue incrementing next outermore z choices index
if not moreDiscriminators:
break
def zAxis(self, row, zStructuralNode, zAspectStructuralNodes, discriminatorsTable):
if zStructuralNode is not None:
label = zStructuralNode.header(lang=self.lang)
choiceLabel = None
effectiveStructuralNode = zStructuralNode
if zStructuralNode.choiceStructuralNodes: # same as combo box selection in GUI mode
if not discriminatorsTable:
self.zStrNodesWithChoices.insert(0, zStructuralNode) # iteration from last is first
try:
effectiveStructuralNode = zStructuralNode.choiceStructuralNodes[zStructuralNode.choiceNodeIndex]
choiceLabel = effectiveStructuralNode.header(lang=self.lang)
if not label and choiceLabel:
label = choiceLabel # no header for choice
choiceLabel = None
except KeyError:
pass
if choiceLabel:
if self.dataCols > 3:
zLabelSpan = 2
else:
zLabelSpan = 1
zChoiceLabelSpan = self.dataCols - zLabelSpan
else:
zLabelSpan = self.dataCols
if self.type == HTML:
etree.SubElement(self.rowElts[row-1], "{http://www.w3.org/1999/xhtml}th",
attrib={"class":"zAxisHdr",
"style":"max-width:200pt;text-align:left;border-bottom:.5pt solid windowtext",
"colspan": str(zLabelSpan)} # "2"}
).text = label
if choiceLabel:
etree.SubElement(self.rowElts[row-1], "{http://www.w3.org/1999/xhtml}th",
attrib={"class":"zAxisHdr",
"style":"max-width:200pt;text-align:left;border-bottom:.5pt solid windowtext",
"colspan": str(zChoiceLabelSpan)} # "2"}
).text = choiceLabel
elif self.type == XML:
# headers element built for first pass on z axis
if discriminatorsTable:
brkdownNode = zStructuralNode.breakdownNode
if zStructuralNode.choiceStructuralNodes: # same as combo box selection in GUI mode
# hdrElt.set("label", label)
if discriminatorsTable:
def zSpan(zNode, startNode=False):
if startNode:
thisSpan = 0
elif zStructuralNode.choiceStructuralNodes:
thisSpan = len(zStructuralNode.choiceStructuralNodes)
else:
thisSpan = 1
return sum(zSpan(z) for z in zNode.childStructuralNodes) + thisSpan
span = zSpan(zStructuralNode, True)
for i, choiceStructuralNode in enumerate(zStructuralNode.choiceStructuralNodes):
choiceLabel = choiceStructuralNode.header(lang=self.lang)
cellElt = etree.Element(self.tableModelQName("cell"),
attrib={"span": str(span)} if span > 1 else None)
self.headerCells[i].append((brkdownNode, cellElt))
# self.structuralNodeModelElements.append((zStructuralNode, cellElt))
elt = etree.SubElement(cellElt, self.tableModelQName("label"))
if choiceLabel:
elt.text = choiceLabel
#else: # choiceLabel from above
# etree.SubElement(hdrElt, self.tableModelQName("label")
# ).text = choiceLabel
else: # no combo choices, single label
cellElt = etree.Element(self.tableModelQName("cell"))
self.headerCells[0].append((brkdownNode, cellElt))
# self.structuralNodeModelElements.append((zStructuralNode, cellElt))
elt = etree.SubElement(cellElt, self.tableModelQName("label"))
if label:
elt.text = label
for aspect in aspectModels[self.aspectModel]:
if effectiveStructuralNode.hasAspect(aspect, inherit=True): #implies inheriting from other z axes
if aspect == Aspect.DIMENSIONS:
for dim in (effectiveStructuralNode.aspectValue(Aspect.DIMENSIONS, inherit=True) or emptyList):
zAspectStructuralNodes[dim].add(effectiveStructuralNode)
else:
zAspectStructuralNodes[aspect].add(effectiveStructuralNode)
for zStructuralNode in zStructuralNode.childStructuralNodes:
self.zAxis(row + 1, zStructuralNode, zAspectStructuralNodes, discriminatorsTable)
def xAxis(self, leftCol, topRow, rowBelow, xParentStructuralNode, xStructuralNodes, childrenFirst, renderNow, atTop):
if xParentStructuralNode is not None:
parentRow = rowBelow
noDescendants = True
rightCol = leftCol
widthToSpanParent = 0
sideBorder = not xStructuralNodes
for xStructuralNode in xParentStructuralNode.childStructuralNodes:
noDescendants = False
rightCol, row, width, leafNode = self.xAxis(leftCol, topRow + 1, rowBelow, xStructuralNode, xStructuralNodes, # nested items before totals
childrenFirst, childrenFirst, False)
if row - 1 < parentRow:
parentRow = row - 1
#if not leafNode:
# rightCol -= 1
nonAbstract = not xStructuralNode.isAbstract
if nonAbstract:
width += 100 # width for this label
widthToSpanParent += width
if childrenFirst:
thisCol = rightCol
else:
thisCol = leftCol
#print ( "thisCol {0} leftCol {1} rightCol {2} topRow{3} renderNow {4} label {5}".format(thisCol, leftCol, rightCol, topRow, renderNow, label))
if renderNow:
label = xStructuralNode.header(lang=self.lang,
returnGenLabel=isinstance(xStructuralNode.definitionNode, (ModelClosedDefinitionNode, ModelEuAxisCoord)))
columnspan = rightCol - leftCol
if columnspan > 0 and nonAbstract: columnspan += 1
elt = None
if self.type == HTML:
if rightCol == self.dataFirstCol + self.dataCols - 1:
edgeBorder = "border-right:.5pt solid windowtext;"
else:
edgeBorder = ""
attrib = {"class":"xAxisHdr",
"style":"text-align:center;max-width:{0}pt;{1}".format(width,edgeBorder)}
if columnspan > 1:
attrib["colspan"] = str(columnspan)
if leafNode and row > topRow:
rowspan = row - topRow + 1
if rowspan > 1:
attrib["rowspan"] = str(rowspan)
elt = etree.Element("{http://www.w3.org/1999/xhtml}th",
attrib=attrib)
self.rowElts[topRow-1].insert(leftCol,elt)
elif (self.type == XML and # is leaf or no sub-breakdown cardinality
# TBD: determine why following clause is needed
(True or xStructuralNode.childStructuralNodes is None or columnspan > 0)): # ignore no-breakdown situation
brkdownNode = xStructuralNode.breakdownNode
cellElt = etree.Element(self.tableModelQName("cell"),
attrib={"span": str(columnspan)} if columnspan > 1 else None)
self.headerCells[thisCol].append((brkdownNode, cellElt))
# self.structuralNodeModelElements.append((xStructuralNode, cellElt))
elt = etree.SubElement(cellElt, self.tableModelQName("label"))
if nonAbstract or (leafNode and row > topRow):
for rollUpCol in range(topRow - self.colHdrTopRow + 1, self.colHdrRows - 1):
rollUpElt = etree.Element(self.tableModelQName("cell"),
attrib={"rollup":"true"})
self.headerCells[thisCol].append((brkdownNode, cellElt))
for i, role in enumerate(self.colHdrNonStdRoles):
roleLabel = xStructuralNode.header(role=role, lang=self.lang, recurseParent=False) # infoset does not move parent label to decscndant
if roleLabel is not None:
cellElt.append(etree.Comment("Label role: {0}, lang {1}"
.format(os.path.basename(role), self.lang)))
labelElt = etree.SubElement(cellElt, self.tableModelQName("label"),
#attrib={"role": role,
# "lang": self.lang}
)
labelElt.text = roleLabel
for aspect in sorted(xStructuralNode.aspectsCovered(), key=lambda a: aspectStr(a)):
if xStructuralNode.hasAspect(aspect) and aspect not in (Aspect.DIMENSIONS, Aspect.OMIT_DIMENSIONS):
aspectValue = xStructuralNode.aspectValue(aspect)
if aspectValue is None: aspectValue = "(bound dynamically)"
if isinstance(aspectValue, ModelObject): # typed dimension value
aspectValue = innerTextList(aspectValue)
if isinstance(aspectValue, QName) and aspectValue.prefix is None: # may be dynamic
try:
aspectValue = self.modelXbrl.qnameConcepts[aspectValue].qname # usually has a prefix
except KeyError:
pass
aspElt = etree.SubElement(cellElt, self.tableModelQName("constraint"))
etree.SubElement(aspElt, self.tableModelQName("aspect")
).text = aspectStr(aspect)
etree.SubElement(aspElt, self.tableModelQName("value")
).text = xsString(None,None,addQnameValue(self.xmlDoc, aspectValue))
if elt is not None:
elt.text = label if bool(label) and label != OPEN_ASPECT_ENTRY_SURROGATE else "\u00A0" #produces
if nonAbstract:
if columnspan > 1 and rowBelow > topRow: # add spanned left leg portion one row down
if self.type == HTML:
attrib= {"class":"xAxisSpanLeg",
"rowspan": str(rowBelow - row)}
if edgeBorder:
attrib["style"] = edgeBorder
elt = etree.Element("{http://www.w3.org/1999/xhtml}th",
attrib=attrib)
elt.text = "\u00A0"
if childrenFirst:
self.rowElts[topRow].append(elt)
else:
self.rowElts[topRow].insert(leftCol,elt)
if self.type == HTML:
for i, role in enumerate(self.colHdrNonStdRoles):
elt = etree.Element("{http://www.w3.org/1999/xhtml}th",
attrib={"class":"xAxisHdr",
"style":"text-align:center;max-width:100pt;{0}".format(edgeBorder)})
self.rowElts[self.dataFirstRow - 1 - len(self.colHdrNonStdRoles) + i].insert(thisCol,elt)
elt.text = xStructuralNode.header(role=role, lang=self.lang) or "\u00A0"
'''
if self.colHdrDocRow:
doc = xStructuralNode.header(role="http://www.xbrl.org/2008/role/documentation", lang=self.lang)
if self.type == HTML:
elt = etree.Element("{http://www.w3.org/1999/xhtml}th",
attrib={"class":"xAxisHdr",
"style":"text-align:center;max-width:100pt;{0}".format(edgeBorder)})
self.rowElts[self.dataFirstRow - 2 - self.rowHdrCodeCol].insert(thisCol,elt)
elif self.type == XML:
elt = etree.Element(self.tableModelQName("label"))
self.colHdrElts[self.colHdrRows - 1].insert(thisCol,elt)
elt.text = doc or "\u00A0"
if self.colHdrCodeRow:
code = xStructuralNode.header(role="http://www.eurofiling.info/role/2010/coordinate-code")
if self.type == HTML:
elt = etree.Element("{http://www.w3.org/1999/xhtml}th",
attrib={"class":"xAxisHdr",
"style":"text-align:center;max-width:100pt;{0}".format(edgeBorder)})
self.rowElts[self.dataFirstRow - 2].insert(thisCol,elt)
elif self.type == XML:
elt = etree.Element(self.tableModelQName("label"))
self.colHdrElts[self.colHdrRows - 1 + self.colHdrDocRow].insert(thisCol,elt)
elt.text = code or "\u00A0"
'''
xStructuralNodes.append(xStructuralNode)
if nonAbstract:
rightCol += 1
if renderNow and not childrenFirst:
self.xAxis(leftCol + (1 if nonAbstract else 0), topRow + 1, rowBelow, xStructuralNode, xStructuralNodes, childrenFirst, True, False) # render on this pass
leftCol = rightCol
return (rightCol, parentRow, widthToSpanParent, noDescendants)
def yAxisByRow(self, leftCol, row, yParentStructuralNode, childrenFirst, renderNow, atLeft):
if yParentStructuralNode is not None:
nestedBottomRow = row
for yStructuralNode in yParentStructuralNode.childStructuralNodes:
nestRow, nextRow = self.yAxisByRow(leftCol + 1, row, yStructuralNode, # nested items before totals
childrenFirst, childrenFirst, False)
isAbstract = (yStructuralNode.isAbstract or
(yStructuralNode.childStructuralNodes and
not isinstance(yStructuralNode.definitionNode, (ModelClosedDefinitionNode, ModelEuAxisCoord))))
isNonAbstract = not isAbstract
isLabeled = yStructuralNode.isLabeled
topRow = row
#print ( "row {0} topRow {1} nxtRow {2} col {3} renderNow {4} label {5}".format(row, topRow, nextRow, leftCol, renderNow, label))
if renderNow and isLabeled:
label = yStructuralNode.header(lang=self.lang,
returnGenLabel=isinstance(yStructuralNode.definitionNode, ModelClosedDefinitionNode),
recurseParent=not isinstance(yStructuralNode.definitionNode, ModelFilterDefinitionNode))
columnspan = self.rowHdrCols - leftCol + 1 if isNonAbstract or nextRow == row else 1
if childrenFirst and isNonAbstract and nextRow > row:
elt = etree.Element("{http://www.w3.org/1999/xhtml}th",
attrib={"class":"yAxisSpanArm",
"style":"text-align:center;min-width:2em;",
"rowspan": str(nextRow - topRow)}
)
insertPosition = self.rowElts[nextRow-1].__len__()
self.rowElts[row - 1].insert(insertPosition, elt)
elt.text = "\u00A0"
hdrRow = nextRow # put nested stuff on bottom row
row = nextRow # nested header still goes on this row
else:
hdrRow = row
# provide top or bottom borders
edgeBorder = ""
if childrenFirst:
if hdrRow == self.dataFirstRow:
edgeBorder = "border-top:.5pt solid windowtext;"
else:
if hdrRow == len(self.rowElts):
edgeBorder = "border-bottom:.5pt solid windowtext;"
depth = yStructuralNode.depth
attrib = {"style":"text-align:{0};max-width:{1}em;{2}".format(
self.langAlign if isNonAbstract or nestRow == hdrRow else "center",
# this is a wrap length max width in characters
self.rowHdrColWidth[depth] if isAbstract else
self.rowHdrWrapLength - sum(self.rowHdrColWidth[0:depth]),
edgeBorder),
"colspan": str(columnspan)}
if label == OPEN_ASPECT_ENTRY_SURROGATE: # entry of dimension
attrib["style"] += ";background:#fff" # override for white background
if isAbstract:
attrib["rowspan"] = str(nestRow - hdrRow)
attrib["class"] = "yAxisHdrAbstractChildrenFirst" if childrenFirst else "yAxisHdrAbstract"
elif nestRow > hdrRow:
attrib["class"] = "yAxisHdrWithLeg"
elif childrenFirst:
attrib["class"] = "yAxisHdrWithChildrenFirst"
else:
attrib["class"] = "yAxisHdr"
elt = etree.Element("{http://www.w3.org/1999/xhtml}th",
attrib=attrib
)
elt.text = label if bool(label) and label != OPEN_ASPECT_ENTRY_SURROGATE else "\u00A0"
if isNonAbstract:
self.rowElts[hdrRow-1].append(elt)
if not childrenFirst and nestRow > hdrRow: # add spanned left leg portion one row down
etree.SubElement(self.rowElts[hdrRow],
"{http://www.w3.org/1999/xhtml}th",
attrib={"class":"yAxisSpanLeg",
"style":"text-align:center;max-width:{0}pt;{1}".format(RENDER_UNITS_PER_CHAR, edgeBorder),
"rowspan": str(nestRow - hdrRow)}
).text = "\u00A0"
hdrClass = "yAxisHdr" if not childrenFirst else "yAxisHdrWithChildrenFirst"
for i, role in enumerate(self.rowHdrNonStdRoles):
hdr = yStructuralNode.header(role=role, lang=self.lang)
etree.SubElement(self.rowElts[hdrRow - 1],
"{http://www.w3.org/1999/xhtml}th",
attrib={"class":hdrClass,
"style":"text-align:left;max-width:100pt;{0}".format(edgeBorder)}
).text = hdr or "\u00A0"
'''
if self.rowHdrDocCol:
docCol = self.dataFirstCol - 1 - self.rowHdrCodeCol
doc = yStructuralNode.header(role="http://www.xbrl.org/2008/role/documentation")
etree.SubElement(self.rowElts[hdrRow - 1],
"{http://www.w3.org/1999/xhtml}th",
attrib={"class":hdrClass,
"style":"text-align:left;max-width:100pt;{0}".format(edgeBorder)}
).text = doc or "\u00A0"
if self.rowHdrCodeCol:
codeCol = self.dataFirstCol - 1
code = yStructuralNode.header(role="http://www.eurofiling.info/role/2010/coordinate-code")
etree.SubElement(self.rowElts[hdrRow - 1],
"{http://www.w3.org/1999/xhtml}th",
attrib={"class":hdrClass,
"style":"text-align:center;max-width:40pt;{0}".format(edgeBorder)}
).text = code or "\u00A0"
# gridBorder(self.gridRowHdr, leftCol, self.dataFirstRow - 1, BOTTOMBORDER)
'''
else:
self.rowElts[hdrRow-1].insert(leftCol - 1, elt)
if isNonAbstract:
row += 1
elif childrenFirst:
row = nextRow
if nestRow > nestedBottomRow:
nestedBottomRow = nestRow + (isNonAbstract and not childrenFirst)
if row > nestedBottomRow:
nestedBottomRow = row
#if renderNow and not childrenFirst:
# dummy, row = self.yAxis(leftCol + 1, row, yAxisHdrObj, childrenFirst, True, False) # render on this pass
if not childrenFirst:
dummy, row = self.yAxisByRow(leftCol + 1, row, yStructuralNode, childrenFirst, renderNow, False) # render on this pass
return (nestedBottomRow, row)
def yAxisByCol(self, leftCol, row, yParentStructuralNode, childrenFirst, renderNow, atTop):
if yParentStructuralNode is not None:
nestedBottomRow = row
for yStructuralNode in yParentStructuralNode.childStructuralNodes:
nestRow, nextRow = self.yAxisByCol(leftCol + 1, row, yStructuralNode, # nested items before totals
childrenFirst, childrenFirst, False)
isAbstract = (yStructuralNode.isAbstract or
(yStructuralNode.childStructuralNodes and
not isinstance(yStructuralNode.definitionNode, (ModelClosedDefinitionNode, ModelEuAxisCoord))))
isNonAbstract = not isAbstract
isLabeled = yStructuralNode.isLabeled
topRow = row
if childrenFirst and isNonAbstract:
row = nextRow
#print ( "thisCol {0} leftCol {1} rightCol {2} topRow{3} renderNow {4} label {5}".format(thisCol, leftCol, rightCol, topRow, renderNow, label))
if renderNow and isLabeled:
label = yStructuralNode.header(lang=self.lang,
returnGenLabel=isinstance(yStructuralNode.definitionNode, (ModelClosedDefinitionNode, ModelEuAxisCoord)),
recurseParent=not isinstance(yStructuralNode.definitionNode, ModelFilterDefinitionNode))
brkdownNode = yStructuralNode.breakdownNode
rowspan= nestRow - row + 1
cellElt = etree.Element(self.tableModelQName("cell"),
attrib={"span": str(rowspan)} if rowspan > 1 else None)
elt = etree.SubElement(cellElt, self.tableModelQName("label"))
elt.text = label if label != OPEN_ASPECT_ENTRY_SURROGATE else ""
self.headerCells[leftCol].append((brkdownNode, cellElt))
# self.structuralNodeModelElements.append((yStructuralNode, cellElt))
for rollUpCol in range(leftCol, self.rowHdrCols - 1):
rollUpElt = etree.Element(self.tableModelQName("cell"),
attrib={"rollup":"true"})
self.headerCells[leftCol].append((brkdownNode, rollUpElt))
#if isNonAbstract:
i = -1 # for case where no enumeration takes place
for i, role in enumerate(self.rowHdrNonStdRoles):
roleLabel = yStructuralNode.header(role=role, lang=self.lang, recurseParent=False)
if roleLabel is not None:
cellElt.append(etree.Comment("Label role: {0}, lang {1}"
.format(os.path.basename(role), self.lang)))
labelElt = etree.SubElement(cellElt, self.tableModelQName("label"),
#attrib={"role":role,
# "lang":self.lang}
).text = roleLabel
self.headerCells[leftCol].append((brkdownNode, cellElt))
for aspect in sorted(yStructuralNode.aspectsCovered(), key=lambda a: aspectStr(a)):
if yStructuralNode.hasAspect(aspect) and aspect not in (Aspect.DIMENSIONS, Aspect.OMIT_DIMENSIONS):
aspectValue = yStructuralNode.aspectValue(aspect)
if aspectValue is None: aspectValue = "(bound dynamically)"
if isinstance(aspectValue, ModelObject): # typed dimension value
aspectValue = innerTextList(aspectValue)
if isinstance(aspectValue, QName) and aspectValue.prefix is None: # may be dynamic
try:
aspectValue = self.modelXbrl.qnameConcepts[aspectValue].qname # usually has a prefix
except KeyError:
pass
if isinstance(aspectValue, str) and aspectValue.startswith(OPEN_ASPECT_ENTRY_SURROGATE):
continue # not an aspect, position for a new entry
elt = etree.SubElement(cellElt, self.tableModelQName("constraint"))
etree.SubElement(elt, self.tableModelQName("aspect")
).text = aspectStr(aspect)
etree.SubElement(elt, self.tableModelQName("value")
).text = xsString(None,None,addQnameValue(self.xmlDoc, aspectValue))
'''
if self.rowHdrDocCol:
labelElt = etree.SubElement(cellElt, self.tableModelQName("label"),
attrib={"span": str(rowspan)} if rowspan > 1 else None)
elt.text = yStructuralNode.header(role="http://www.xbrl.org/2008/role/documentation",
lang=self.lang)
self.rowHdrElts[self.rowHdrCols - 1].append(elt)
if self.rowHdrCodeCol:
elt = etree.Element(self.tableModelQName("label"),
attrib={"span": str(rowspan)} if rowspan > 1 else None)
elt.text = yStructuralNode.header(role="http://www.eurofiling.info/role/2010/coordinate-code",
lang=self.lang)
self.rowHdrElts[self.rowHdrCols - 1 + self.rowHdrDocCol].append(elt)
'''
if isNonAbstract:
row += 1
elif childrenFirst:
row = nextRow
if nestRow > nestedBottomRow:
nestedBottomRow = nestRow + (isNonAbstract and not childrenFirst)
if row > nestedBottomRow:
nestedBottomRow = row
#if renderNow and not childrenFirst:
# dummy, row = self.yAxis(leftCol + 1, row, yStructuralNode, childrenFirst, True, False) # render on this pass
if not childrenFirst:
dummy, row = self.yAxisByCol(leftCol + 1, row, yStructuralNode, childrenFirst, renderNow, False) # render on this pass
return (nestedBottomRow, row)
def bodyCells(self, row, yParentStructuralNode, xStructuralNodes, zAspectStructuralNodes, yChildrenFirst):
if yParentStructuralNode is not None:
dimDefaults = self.modelXbrl.qnameDimensionDefaults
for yStructuralNode in yParentStructuralNode.childStructuralNodes:
if yChildrenFirst:
row = self.bodyCells(row, yStructuralNode, xStructuralNodes, zAspectStructuralNodes, yChildrenFirst)
if not (yStructuralNode.isAbstract or
(yStructuralNode.childStructuralNodes and
not isinstance(yStructuralNode.definitionNode, (ModelClosedDefinitionNode, ModelEuAxisCoord)))) and yStructuralNode.isLabeled:
if self.type == XML:
if self.breakdownNodes.get("x"):
cellsParentElt = etree.SubElement(self.cellsParentElt, self.tableModelQName("cells"),
attrib={"axis": "x"})
else:
cellsParentElt = self.cellsParentElt
isEntryPrototype = yStructuralNode.isEntryPrototype(default=False) # row to enter open aspects
yAspectStructuralNodes = defaultdict(set)
for aspect in aspectModels[self.aspectModel]:
if yStructuralNode.hasAspect(aspect):
if aspect == Aspect.DIMENSIONS:
for dim in (yStructuralNode.aspectValue(Aspect.DIMENSIONS) or emptyList):
yAspectStructuralNodes[dim].add(yStructuralNode)
else:
yAspectStructuralNodes[aspect].add(yStructuralNode)
yTagSelectors = yStructuralNode.tagSelectors
# data for columns of rows
ignoreDimValidity = self.ignoreDimValidity.get()
for i, xStructuralNode in enumerate(xStructuralNodes):
xAspectStructuralNodes = defaultdict(set)
for aspect in aspectModels[self.aspectModel]:
if xStructuralNode.hasAspect(aspect):
if aspect == Aspect.DIMENSIONS:
for dim in (xStructuralNode.aspectValue(Aspect.DIMENSIONS) or emptyList):
xAspectStructuralNodes[dim].add(xStructuralNode)
else:
xAspectStructuralNodes[aspect].add(xStructuralNode)
cellTagSelectors = yTagSelectors | xStructuralNode.tagSelectors
cellAspectValues = {}
matchableAspects = set()
for aspect in _DICT_SET(xAspectStructuralNodes.keys()) | _DICT_SET(yAspectStructuralNodes.keys()) | _DICT_SET(zAspectStructuralNodes.keys()):
aspectValue = xStructuralNode.inheritedAspectValue(yStructuralNode,
self, aspect, cellTagSelectors,
xAspectStructuralNodes, yAspectStructuralNodes, zAspectStructuralNodes)
# value is None for a dimension whose value is to be not reported in this slice
if (isinstance(aspect, _INT) or # not a dimension
dimDefaults.get(aspect) != aspectValue or # explicit dim defaulted will equal the value
aspectValue is not None): # typed dim absent will be none
cellAspectValues[aspect] = aspectValue
matchableAspects.add(aspectModelAspect.get(aspect,aspect)) #filterable aspect from rule aspect
cellDefaultedDims = _DICT_SET(dimDefaults) - _DICT_SET(cellAspectValues.keys())
priItemQname = cellAspectValues.get(Aspect.CONCEPT)
concept = self.modelXbrl.qnameConcepts.get(priItemQname)
conceptNotAbstract = concept is None or not concept.isAbstract
from arelle.ValidateXbrlDimensions import isFactDimensionallyValid
fact = None
value = None
objectId = None
justify = None
fp = FactPrototype(self, cellAspectValues)
if conceptNotAbstract:
# reduce set of matchable facts to those with pri item qname and have dimension aspects
facts = self.modelXbrl.factsByQname[priItemQname] if priItemQname else self.modelXbrl.factsInInstance
if self.hasTableFilters:
facts = self.modelTable.filterFacts(self.rendrCntx, facts)
for aspect in matchableAspects: # trim down facts with explicit dimensions match or just present
if isinstance(aspect, QName):
aspectValue = cellAspectValues.get(aspect, None)
if isinstance(aspectValue, ModelDimensionValue):
if aspectValue.isExplicit:
dimMemQname = aspectValue.memberQname # match facts with this explicit value
else:
dimMemQname = None # match facts that report this dimension
elif isinstance(aspectValue, QName):
dimMemQname = aspectValue # match facts that have this explicit value
elif aspectValue is None: # match typed dims that don't report this value
dimMemQname = DEFAULT
else:
dimMemQname = None # match facts that report this dimension
facts = facts & self.modelXbrl.factsByDimMemQname(aspect, dimMemQname)
for fact in facts:
if (all(aspectMatches(self.rendrCntx, fact, fp, aspect)
for aspect in matchableAspects) and
all(fact.context.dimMemberQname(dim,includeDefaults=True) in (dimDefaults[dim], None)
for dim in cellDefaultedDims) and
len(fp.context.qnameDims) == len(fact.context.qnameDims)):
if yStructuralNode.hasValueExpression(xStructuralNode):
value = yStructuralNode.evalValueExpression(fact, xStructuralNode)
else:
value = fact.effectiveValue
justify = "right" if fact.isNumeric else "left"
break
if justify is None:
justify = "right" if fp.isNumeric else "left"
if conceptNotAbstract:
if self.type == XML:
cellsParentElt.append(etree.Comment("Cell concept {0}: segDims {1}, scenDims {2}"
.format(fp.qname,
', '.join("({}={})".format(dimVal.dimensionQname, dimVal.memberQname)
for dimVal in sorted(fp.context.segDimVals.values(), key=lambda d: d.dimensionQname)),
', '.join("({}={})".format(dimVal.dimensionQname, dimVal.memberQname)
for dimVal in sorted(fp.context.scenDimVals.values(), key=lambda d: d.dimensionQname)),
)))
if value is not None or ignoreDimValidity or isFactDimensionallyValid(self, fp) or isEntryPrototype:
if self.type == HTML:
etree.SubElement(self.rowElts[row - 1],
"{http://www.w3.org/1999/xhtml}td",
attrib={"class":"cell",
"style":"text-align:{0};width:8em".format(justify)}
).text = value or "\u00A0"
elif self.type == XML:
if value is not None and fact is not None:
cellsParentElt.append(etree.Comment("{0}: context {1}, value {2}, file {3}, line {4}"
.format(fact.qname,
fact.contextID,
value[:32], # no more than 32 characters
fact.modelDocument.basename,
fact.sourceline)))
elif fact is not None:
cellsParentElt.append(etree.Comment("Fact was not matched {0}: context {1}, value {2}, file {3}, line {4}, aspects not matched: {5}, dimensions expected to have been defaulted: {6}"
.format(fact.qname,
fact.contextID,
fact.effectiveValue[:32],
fact.modelDocument.basename,
fact.sourceline,
', '.join(str(aspect)
for aspect in matchableAspects
if not aspectMatches(self.rendrCntx, fact, fp, aspect)),
', '.join(str(dim)
for dim in cellDefaultedDims
if fact.context.dimMemberQname(dim,includeDefaults=True) not in (dimDefaults[dim], None))
)))
cellElt = etree.SubElement(cellsParentElt, self.tableModelQName("cell"))
if value is not None and fact is not None:
etree.SubElement(cellElt, self.tableModelQName("fact")
).text = '{}#{}'.format(fact.modelDocument.basename,
elementFragmentIdentifier(fact))
else:
if self.type == HTML:
etree.SubElement(self.rowElts[row - 1],
"{http://www.w3.org/1999/xhtml}td",
attrib={"class":"blockedCell",
"style":"text-align:{0};width:8em".format(justify)}
).text = "\u00A0\u00A0"
elif self.type == XML:
etree.SubElement(cellsParentElt, self.tableModelQName("cell"),
attrib={"blocked":"true"})
else: # concept is abstract
if self.type == HTML:
etree.SubElement(self.rowElts[row - 1],
"{http://www.w3.org/1999/xhtml}td",
attrib={"class":"abstractCell",
"style":"text-align:{0};width:8em".format(justify)}
).text = "\u00A0\u00A0"
elif self.type == XML:
etree.SubElement(cellsParentElt, self.tableModelQName("cell"),
attrib={"abstract":"true"})
fp.clear() # dereference
row += 1
if not yChildrenFirst:
row = self.bodyCells(row, yStructuralNode, xStructuralNodes, zAspectStructuralNodes, yChildrenFirst)
return row
|
|
from functools import reduce
import numpy as np
import numpy.core.umath as umath
import numpy.core.fromnumeric as fromnumeric
from numpy.testing import (
assert_, assert_raises, assert_equal,
)
from numpy.ma import (
MaskType, MaskedArray, absolute, add, all, allclose, allequal, alltrue,
arange, arccos, arcsin, arctan, arctan2, array, average, choose,
concatenate, conjugate, cos, cosh, count, divide, equal, exp, filled,
getmask, greater, greater_equal, inner, isMaskedArray, less,
less_equal, log, log10, make_mask, masked, masked_array, masked_equal,
masked_greater, masked_greater_equal, masked_inside, masked_less,
masked_less_equal, masked_not_equal, masked_outside,
masked_print_option, masked_values, masked_where, maximum, minimum,
multiply, nomask, nonzero, not_equal, ones, outer, product, put, ravel,
repeat, resize, shape, sin, sinh, sometrue, sort, sqrt, subtract, sum,
take, tan, tanh, transpose, where, zeros,
)
from numpy.compat import pickle
pi = np.pi
def eq(v, w, msg=''):
result = allclose(v, w)
if not result:
print(f'Not eq:{msg}\n{v}\n----{w}')
return result
class TestMa:
def setup(self):
x = np.array([1., 1., 1., -2., pi/2.0, 4., 5., -10., 10., 1., 2., 3.])
y = np.array([5., 0., 3., 2., -1., -4., 0., -10., 10., 1., 0., 3.])
a10 = 10.
m1 = [1, 0, 0, 0, 0, 0, 1, 0, 0, 0, 0, 0]
m2 = [0, 0, 1, 0, 0, 1, 1, 0, 0, 0, 0, 1]
xm = array(x, mask=m1)
ym = array(y, mask=m2)
z = np.array([-.5, 0., .5, .8])
zm = array(z, mask=[0, 1, 0, 0])
xf = np.where(m1, 1e+20, x)
s = x.shape
xm.set_fill_value(1e+20)
self.d = (x, y, a10, m1, m2, xm, ym, z, zm, xf, s)
def test_testBasic1d(self):
# Test of basic array creation and properties in 1 dimension.
(x, y, a10, m1, m2, xm, ym, z, zm, xf, s) = self.d
assert_(not isMaskedArray(x))
assert_(isMaskedArray(xm))
assert_equal(shape(xm), s)
assert_equal(xm.shape, s)
assert_equal(xm.dtype, x.dtype)
assert_equal(xm.size, reduce(lambda x, y:x * y, s))
assert_equal(count(xm), len(m1) - reduce(lambda x, y:x + y, m1))
assert_(eq(xm, xf))
assert_(eq(filled(xm, 1.e20), xf))
assert_(eq(x, xm))
def test_testBasic2d(self):
# Test of basic array creation and properties in 2 dimensions.
for s in [(4, 3), (6, 2)]:
(x, y, a10, m1, m2, xm, ym, z, zm, xf, s) = self.d
x.shape = s
y.shape = s
xm.shape = s
ym.shape = s
xf.shape = s
assert_(not isMaskedArray(x))
assert_(isMaskedArray(xm))
assert_equal(shape(xm), s)
assert_equal(xm.shape, s)
assert_equal(xm.size, reduce(lambda x, y:x * y, s))
assert_equal(count(xm),
len(m1) - reduce(lambda x, y:x + y, m1))
assert_(eq(xm, xf))
assert_(eq(filled(xm, 1.e20), xf))
assert_(eq(x, xm))
self.setup()
def test_testArithmetic(self):
# Test of basic arithmetic.
(x, y, a10, m1, m2, xm, ym, z, zm, xf, s) = self.d
a2d = array([[1, 2], [0, 4]])
a2dm = masked_array(a2d, [[0, 0], [1, 0]])
assert_(eq(a2d * a2d, a2d * a2dm))
assert_(eq(a2d + a2d, a2d + a2dm))
assert_(eq(a2d - a2d, a2d - a2dm))
for s in [(12,), (4, 3), (2, 6)]:
x = x.reshape(s)
y = y.reshape(s)
xm = xm.reshape(s)
ym = ym.reshape(s)
xf = xf.reshape(s)
assert_(eq(-x, -xm))
assert_(eq(x + y, xm + ym))
assert_(eq(x - y, xm - ym))
assert_(eq(x * y, xm * ym))
with np.errstate(divide='ignore', invalid='ignore'):
assert_(eq(x / y, xm / ym))
assert_(eq(a10 + y, a10 + ym))
assert_(eq(a10 - y, a10 - ym))
assert_(eq(a10 * y, a10 * ym))
with np.errstate(divide='ignore', invalid='ignore'):
assert_(eq(a10 / y, a10 / ym))
assert_(eq(x + a10, xm + a10))
assert_(eq(x - a10, xm - a10))
assert_(eq(x * a10, xm * a10))
assert_(eq(x / a10, xm / a10))
assert_(eq(x ** 2, xm ** 2))
assert_(eq(abs(x) ** 2.5, abs(xm) ** 2.5))
assert_(eq(x ** y, xm ** ym))
assert_(eq(np.add(x, y), add(xm, ym)))
assert_(eq(np.subtract(x, y), subtract(xm, ym)))
assert_(eq(np.multiply(x, y), multiply(xm, ym)))
with np.errstate(divide='ignore', invalid='ignore'):
assert_(eq(np.divide(x, y), divide(xm, ym)))
def test_testMixedArithmetic(self):
na = np.array([1])
ma = array([1])
assert_(isinstance(na + ma, MaskedArray))
assert_(isinstance(ma + na, MaskedArray))
def test_testUfuncs1(self):
# Test various functions such as sin, cos.
(x, y, a10, m1, m2, xm, ym, z, zm, xf, s) = self.d
assert_(eq(np.cos(x), cos(xm)))
assert_(eq(np.cosh(x), cosh(xm)))
assert_(eq(np.sin(x), sin(xm)))
assert_(eq(np.sinh(x), sinh(xm)))
assert_(eq(np.tan(x), tan(xm)))
assert_(eq(np.tanh(x), tanh(xm)))
with np.errstate(divide='ignore', invalid='ignore'):
assert_(eq(np.sqrt(abs(x)), sqrt(xm)))
assert_(eq(np.log(abs(x)), log(xm)))
assert_(eq(np.log10(abs(x)), log10(xm)))
assert_(eq(np.exp(x), exp(xm)))
assert_(eq(np.arcsin(z), arcsin(zm)))
assert_(eq(np.arccos(z), arccos(zm)))
assert_(eq(np.arctan(z), arctan(zm)))
assert_(eq(np.arctan2(x, y), arctan2(xm, ym)))
assert_(eq(np.absolute(x), absolute(xm)))
assert_(eq(np.equal(x, y), equal(xm, ym)))
assert_(eq(np.not_equal(x, y), not_equal(xm, ym)))
assert_(eq(np.less(x, y), less(xm, ym)))
assert_(eq(np.greater(x, y), greater(xm, ym)))
assert_(eq(np.less_equal(x, y), less_equal(xm, ym)))
assert_(eq(np.greater_equal(x, y), greater_equal(xm, ym)))
assert_(eq(np.conjugate(x), conjugate(xm)))
assert_(eq(np.concatenate((x, y)), concatenate((xm, ym))))
assert_(eq(np.concatenate((x, y)), concatenate((x, y))))
assert_(eq(np.concatenate((x, y)), concatenate((xm, y))))
assert_(eq(np.concatenate((x, y, x)), concatenate((x, ym, x))))
def test_xtestCount(self):
# Test count
ott = array([0., 1., 2., 3.], mask=[1, 0, 0, 0])
assert_(count(ott).dtype.type is np.intp)
assert_equal(3, count(ott))
assert_equal(1, count(1))
assert_(eq(0, array(1, mask=[1])))
ott = ott.reshape((2, 2))
assert_(count(ott).dtype.type is np.intp)
assert_(isinstance(count(ott, 0), np.ndarray))
assert_(count(ott).dtype.type is np.intp)
assert_(eq(3, count(ott)))
assert_(getmask(count(ott, 0)) is nomask)
assert_(eq([1, 2], count(ott, 0)))
def test_testMinMax(self):
# Test minimum and maximum.
(x, y, a10, m1, m2, xm, ym, z, zm, xf, s) = self.d
xr = np.ravel(x) # max doesn't work if shaped
xmr = ravel(xm)
# true because of careful selection of data
assert_(eq(max(xr), maximum.reduce(xmr)))
assert_(eq(min(xr), minimum.reduce(xmr)))
def test_testAddSumProd(self):
# Test add, sum, product.
(x, y, a10, m1, m2, xm, ym, z, zm, xf, s) = self.d
assert_(eq(np.add.reduce(x), add.reduce(x)))
assert_(eq(np.add.accumulate(x), add.accumulate(x)))
assert_(eq(4, sum(array(4), axis=0)))
assert_(eq(4, sum(array(4), axis=0)))
assert_(eq(np.sum(x, axis=0), sum(x, axis=0)))
assert_(eq(np.sum(filled(xm, 0), axis=0), sum(xm, axis=0)))
assert_(eq(np.sum(x, 0), sum(x, 0)))
assert_(eq(np.product(x, axis=0), product(x, axis=0)))
assert_(eq(np.product(x, 0), product(x, 0)))
assert_(eq(np.product(filled(xm, 1), axis=0),
product(xm, axis=0)))
if len(s) > 1:
assert_(eq(np.concatenate((x, y), 1),
concatenate((xm, ym), 1)))
assert_(eq(np.add.reduce(x, 1), add.reduce(x, 1)))
assert_(eq(np.sum(x, 1), sum(x, 1)))
assert_(eq(np.product(x, 1), product(x, 1)))
def test_testCI(self):
# Test of conversions and indexing
x1 = np.array([1, 2, 4, 3])
x2 = array(x1, mask=[1, 0, 0, 0])
x3 = array(x1, mask=[0, 1, 0, 1])
x4 = array(x1)
# test conversion to strings
str(x2) # raises?
repr(x2) # raises?
assert_(eq(np.sort(x1), sort(x2, fill_value=0)))
# tests of indexing
assert_(type(x2[1]) is type(x1[1]))
assert_(x1[1] == x2[1])
assert_(x2[0] is masked)
assert_(eq(x1[2], x2[2]))
assert_(eq(x1[2:5], x2[2:5]))
assert_(eq(x1[:], x2[:]))
assert_(eq(x1[1:], x3[1:]))
x1[2] = 9
x2[2] = 9
assert_(eq(x1, x2))
x1[1:3] = 99
x2[1:3] = 99
assert_(eq(x1, x2))
x2[1] = masked
assert_(eq(x1, x2))
x2[1:3] = masked
assert_(eq(x1, x2))
x2[:] = x1
x2[1] = masked
assert_(allequal(getmask(x2), array([0, 1, 0, 0])))
x3[:] = masked_array([1, 2, 3, 4], [0, 1, 1, 0])
assert_(allequal(getmask(x3), array([0, 1, 1, 0])))
x4[:] = masked_array([1, 2, 3, 4], [0, 1, 1, 0])
assert_(allequal(getmask(x4), array([0, 1, 1, 0])))
assert_(allequal(x4, array([1, 2, 3, 4])))
x1 = np.arange(5) * 1.0
x2 = masked_values(x1, 3.0)
assert_(eq(x1, x2))
assert_(allequal(array([0, 0, 0, 1, 0], MaskType), x2.mask))
assert_(eq(3.0, x2.fill_value))
x1 = array([1, 'hello', 2, 3], object)
x2 = np.array([1, 'hello', 2, 3], object)
s1 = x1[1]
s2 = x2[1]
assert_equal(type(s2), str)
assert_equal(type(s1), str)
assert_equal(s1, s2)
assert_(x1[1:1].shape == (0,))
def test_testCopySize(self):
# Tests of some subtle points of copying and sizing.
n = [0, 0, 1, 0, 0]
m = make_mask(n)
m2 = make_mask(m)
assert_(m is m2)
m3 = make_mask(m, copy=True)
assert_(m is not m3)
x1 = np.arange(5)
y1 = array(x1, mask=m)
assert_(y1._data is not x1)
assert_(allequal(x1, y1._data))
assert_(y1._mask is m)
y1a = array(y1, copy=0)
# For copy=False, one might expect that the array would just
# passed on, i.e., that it would be "is" instead of "==".
# See gh-4043 for discussion.
assert_(y1a._mask.__array_interface__ ==
y1._mask.__array_interface__)
y2 = array(x1, mask=m3, copy=0)
assert_(y2._mask is m3)
assert_(y2[2] is masked)
y2[2] = 9
assert_(y2[2] is not masked)
assert_(y2._mask is m3)
assert_(allequal(y2.mask, 0))
y2a = array(x1, mask=m, copy=1)
assert_(y2a._mask is not m)
assert_(y2a[2] is masked)
y2a[2] = 9
assert_(y2a[2] is not masked)
assert_(y2a._mask is not m)
assert_(allequal(y2a.mask, 0))
y3 = array(x1 * 1.0, mask=m)
assert_(filled(y3).dtype is (x1 * 1.0).dtype)
x4 = arange(4)
x4[2] = masked
y4 = resize(x4, (8,))
assert_(eq(concatenate([x4, x4]), y4))
assert_(eq(getmask(y4), [0, 0, 1, 0, 0, 0, 1, 0]))
y5 = repeat(x4, (2, 2, 2, 2), axis=0)
assert_(eq(y5, [0, 0, 1, 1, 2, 2, 3, 3]))
y6 = repeat(x4, 2, axis=0)
assert_(eq(y5, y6))
def test_testPut(self):
# Test of put
d = arange(5)
n = [0, 0, 0, 1, 1]
m = make_mask(n)
m2 = m.copy()
x = array(d, mask=m)
assert_(x[3] is masked)
assert_(x[4] is masked)
x[[1, 4]] = [10, 40]
assert_(x._mask is m)
assert_(x[3] is masked)
assert_(x[4] is not masked)
assert_(eq(x, [0, 10, 2, -1, 40]))
x = array(d, mask=m2, copy=True)
x.put([0, 1, 2], [-1, 100, 200])
assert_(x._mask is not m2)
assert_(x[3] is masked)
assert_(x[4] is masked)
assert_(eq(x, [-1, 100, 200, 0, 0]))
def test_testPut2(self):
# Test of put
d = arange(5)
x = array(d, mask=[0, 0, 0, 0, 0])
z = array([10, 40], mask=[1, 0])
assert_(x[2] is not masked)
assert_(x[3] is not masked)
x[2:4] = z
assert_(x[2] is masked)
assert_(x[3] is not masked)
assert_(eq(x, [0, 1, 10, 40, 4]))
d = arange(5)
x = array(d, mask=[0, 0, 0, 0, 0])
y = x[2:4]
z = array([10, 40], mask=[1, 0])
assert_(x[2] is not masked)
assert_(x[3] is not masked)
y[:] = z
assert_(y[0] is masked)
assert_(y[1] is not masked)
assert_(eq(y, [10, 40]))
assert_(x[2] is masked)
assert_(x[3] is not masked)
assert_(eq(x, [0, 1, 10, 40, 4]))
def test_testMaPut(self):
(x, y, a10, m1, m2, xm, ym, z, zm, xf, s) = self.d
m = [1, 0, 0, 0, 0, 0, 1, 0, 0, 1, 0, 1]
i = np.nonzero(m)[0]
put(ym, i, zm)
assert_(all(take(ym, i, axis=0) == zm))
def test_testOddFeatures(self):
# Test of other odd features
x = arange(20)
x = x.reshape(4, 5)
x.flat[5] = 12
assert_(x[1, 0] == 12)
z = x + 10j * x
assert_(eq(z.real, x))
assert_(eq(z.imag, 10 * x))
assert_(eq((z * conjugate(z)).real, 101 * x * x))
z.imag[...] = 0.0
x = arange(10)
x[3] = masked
assert_(str(x[3]) == str(masked))
c = x >= 8
assert_(count(where(c, masked, masked)) == 0)
assert_(shape(where(c, masked, masked)) == c.shape)
z = where(c, x, masked)
assert_(z.dtype is x.dtype)
assert_(z[3] is masked)
assert_(z[4] is masked)
assert_(z[7] is masked)
assert_(z[8] is not masked)
assert_(z[9] is not masked)
assert_(eq(x, z))
z = where(c, masked, x)
assert_(z.dtype is x.dtype)
assert_(z[3] is masked)
assert_(z[4] is not masked)
assert_(z[7] is not masked)
assert_(z[8] is masked)
assert_(z[9] is masked)
z = masked_where(c, x)
assert_(z.dtype is x.dtype)
assert_(z[3] is masked)
assert_(z[4] is not masked)
assert_(z[7] is not masked)
assert_(z[8] is masked)
assert_(z[9] is masked)
assert_(eq(x, z))
x = array([1., 2., 3., 4., 5.])
c = array([1, 1, 1, 0, 0])
x[2] = masked
z = where(c, x, -x)
assert_(eq(z, [1., 2., 0., -4., -5]))
c[0] = masked
z = where(c, x, -x)
assert_(eq(z, [1., 2., 0., -4., -5]))
assert_(z[0] is masked)
assert_(z[1] is not masked)
assert_(z[2] is masked)
assert_(eq(masked_where(greater(x, 2), x), masked_greater(x, 2)))
assert_(eq(masked_where(greater_equal(x, 2), x),
masked_greater_equal(x, 2)))
assert_(eq(masked_where(less(x, 2), x), masked_less(x, 2)))
assert_(eq(masked_where(less_equal(x, 2), x), masked_less_equal(x, 2)))
assert_(eq(masked_where(not_equal(x, 2), x), masked_not_equal(x, 2)))
assert_(eq(masked_where(equal(x, 2), x), masked_equal(x, 2)))
assert_(eq(masked_where(not_equal(x, 2), x), masked_not_equal(x, 2)))
assert_(eq(masked_inside(list(range(5)), 1, 3), [0, 199, 199, 199, 4]))
assert_(eq(masked_outside(list(range(5)), 1, 3), [199, 1, 2, 3, 199]))
assert_(eq(masked_inside(array(list(range(5)),
mask=[1, 0, 0, 0, 0]), 1, 3).mask,
[1, 1, 1, 1, 0]))
assert_(eq(masked_outside(array(list(range(5)),
mask=[0, 1, 0, 0, 0]), 1, 3).mask,
[1, 1, 0, 0, 1]))
assert_(eq(masked_equal(array(list(range(5)),
mask=[1, 0, 0, 0, 0]), 2).mask,
[1, 0, 1, 0, 0]))
assert_(eq(masked_not_equal(array([2, 2, 1, 2, 1],
mask=[1, 0, 0, 0, 0]), 2).mask,
[1, 0, 1, 0, 1]))
assert_(eq(masked_where([1, 1, 0, 0, 0], [1, 2, 3, 4, 5]),
[99, 99, 3, 4, 5]))
atest = ones((10, 10, 10), dtype=np.float32)
btest = zeros(atest.shape, MaskType)
ctest = masked_where(btest, atest)
assert_(eq(atest, ctest))
z = choose(c, (-x, x))
assert_(eq(z, [1., 2., 0., -4., -5]))
assert_(z[0] is masked)
assert_(z[1] is not masked)
assert_(z[2] is masked)
x = arange(6)
x[5] = masked
y = arange(6) * 10
y[2] = masked
c = array([1, 1, 1, 0, 0, 0], mask=[1, 0, 0, 0, 0, 0])
cm = c.filled(1)
z = where(c, x, y)
zm = where(cm, x, y)
assert_(eq(z, zm))
assert_(getmask(zm) is nomask)
assert_(eq(zm, [0, 1, 2, 30, 40, 50]))
z = where(c, masked, 1)
assert_(eq(z, [99, 99, 99, 1, 1, 1]))
z = where(c, 1, masked)
assert_(eq(z, [99, 1, 1, 99, 99, 99]))
def test_testMinMax2(self):
# Test of minimum, maximum.
assert_(eq(minimum([1, 2, 3], [4, 0, 9]), [1, 0, 3]))
assert_(eq(maximum([1, 2, 3], [4, 0, 9]), [4, 2, 9]))
x = arange(5)
y = arange(5) - 2
x[3] = masked
y[0] = masked
assert_(eq(minimum(x, y), where(less(x, y), x, y)))
assert_(eq(maximum(x, y), where(greater(x, y), x, y)))
assert_(minimum.reduce(x) == 0)
assert_(maximum.reduce(x) == 4)
def test_testTakeTransposeInnerOuter(self):
# Test of take, transpose, inner, outer products
x = arange(24)
y = np.arange(24)
x[5:6] = masked
x = x.reshape(2, 3, 4)
y = y.reshape(2, 3, 4)
assert_(eq(np.transpose(y, (2, 0, 1)), transpose(x, (2, 0, 1))))
assert_(eq(np.take(y, (2, 0, 1), 1), take(x, (2, 0, 1), 1)))
assert_(eq(np.inner(filled(x, 0), filled(y, 0)),
inner(x, y)))
assert_(eq(np.outer(filled(x, 0), filled(y, 0)),
outer(x, y)))
y = array(['abc', 1, 'def', 2, 3], object)
y[2] = masked
t = take(y, [0, 3, 4])
assert_(t[0] == 'abc')
assert_(t[1] == 2)
assert_(t[2] == 3)
def test_testInplace(self):
# Test of inplace operations and rich comparisons
y = arange(10)
x = arange(10)
xm = arange(10)
xm[2] = masked
x += 1
assert_(eq(x, y + 1))
xm += 1
assert_(eq(x, y + 1))
x = arange(10)
xm = arange(10)
xm[2] = masked
x -= 1
assert_(eq(x, y - 1))
xm -= 1
assert_(eq(xm, y - 1))
x = arange(10) * 1.0
xm = arange(10) * 1.0
xm[2] = masked
x *= 2.0
assert_(eq(x, y * 2))
xm *= 2.0
assert_(eq(xm, y * 2))
x = arange(10) * 2
xm = arange(10)
xm[2] = masked
x //= 2
assert_(eq(x, y))
xm //= 2
assert_(eq(x, y))
x = arange(10) * 1.0
xm = arange(10) * 1.0
xm[2] = masked
x /= 2.0
assert_(eq(x, y / 2.0))
xm /= arange(10)
assert_(eq(xm, ones((10,))))
x = arange(10).astype(np.float32)
xm = arange(10)
xm[2] = masked
x += 1.
assert_(eq(x, y + 1.))
def test_testPickle(self):
# Test of pickling
x = arange(12)
x[4:10:2] = masked
x = x.reshape(4, 3)
for proto in range(2, pickle.HIGHEST_PROTOCOL + 1):
s = pickle.dumps(x, protocol=proto)
y = pickle.loads(s)
assert_(eq(x, y))
def test_testMasked(self):
# Test of masked element
xx = arange(6)
xx[1] = masked
assert_(str(masked) == '--')
assert_(xx[1] is masked)
assert_equal(filled(xx[1], 0), 0)
def test_testAverage1(self):
# Test of average.
ott = array([0., 1., 2., 3.], mask=[1, 0, 0, 0])
assert_(eq(2.0, average(ott, axis=0)))
assert_(eq(2.0, average(ott, weights=[1., 1., 2., 1.])))
result, wts = average(ott, weights=[1., 1., 2., 1.], returned=True)
assert_(eq(2.0, result))
assert_(wts == 4.0)
ott[:] = masked
assert_(average(ott, axis=0) is masked)
ott = array([0., 1., 2., 3.], mask=[1, 0, 0, 0])
ott = ott.reshape(2, 2)
ott[:, 1] = masked
assert_(eq(average(ott, axis=0), [2.0, 0.0]))
assert_(average(ott, axis=1)[0] is masked)
assert_(eq([2., 0.], average(ott, axis=0)))
result, wts = average(ott, axis=0, returned=True)
assert_(eq(wts, [1., 0.]))
def test_testAverage2(self):
# More tests of average.
w1 = [0, 1, 1, 1, 1, 0]
w2 = [[0, 1, 1, 1, 1, 0], [1, 0, 0, 0, 0, 1]]
x = arange(6)
assert_(allclose(average(x, axis=0), 2.5))
assert_(allclose(average(x, axis=0, weights=w1), 2.5))
y = array([arange(6), 2.0 * arange(6)])
assert_(allclose(average(y, None),
np.add.reduce(np.arange(6)) * 3. / 12.))
assert_(allclose(average(y, axis=0), np.arange(6) * 3. / 2.))
assert_(allclose(average(y, axis=1),
[average(x, axis=0), average(x, axis=0)*2.0]))
assert_(allclose(average(y, None, weights=w2), 20. / 6.))
assert_(allclose(average(y, axis=0, weights=w2),
[0., 1., 2., 3., 4., 10.]))
assert_(allclose(average(y, axis=1),
[average(x, axis=0), average(x, axis=0)*2.0]))
m1 = zeros(6)
m2 = [0, 0, 1, 1, 0, 0]
m3 = [[0, 0, 1, 1, 0, 0], [0, 1, 1, 1, 1, 0]]
m4 = ones(6)
m5 = [0, 1, 1, 1, 1, 1]
assert_(allclose(average(masked_array(x, m1), axis=0), 2.5))
assert_(allclose(average(masked_array(x, m2), axis=0), 2.5))
assert_(average(masked_array(x, m4), axis=0) is masked)
assert_equal(average(masked_array(x, m5), axis=0), 0.0)
assert_equal(count(average(masked_array(x, m4), axis=0)), 0)
z = masked_array(y, m3)
assert_(allclose(average(z, None), 20. / 6.))
assert_(allclose(average(z, axis=0),
[0., 1., 99., 99., 4.0, 7.5]))
assert_(allclose(average(z, axis=1), [2.5, 5.0]))
assert_(allclose(average(z, axis=0, weights=w2),
[0., 1., 99., 99., 4.0, 10.0]))
a = arange(6)
b = arange(6) * 3
r1, w1 = average([[a, b], [b, a]], axis=1, returned=True)
assert_equal(shape(r1), shape(w1))
assert_equal(r1.shape, w1.shape)
r2, w2 = average(ones((2, 2, 3)), axis=0, weights=[3, 1], returned=True)
assert_equal(shape(w2), shape(r2))
r2, w2 = average(ones((2, 2, 3)), returned=True)
assert_equal(shape(w2), shape(r2))
r2, w2 = average(ones((2, 2, 3)), weights=ones((2, 2, 3)), returned=True)
assert_(shape(w2) == shape(r2))
a2d = array([[1, 2], [0, 4]], float)
a2dm = masked_array(a2d, [[0, 0], [1, 0]])
a2da = average(a2d, axis=0)
assert_(eq(a2da, [0.5, 3.0]))
a2dma = average(a2dm, axis=0)
assert_(eq(a2dma, [1.0, 3.0]))
a2dma = average(a2dm, axis=None)
assert_(eq(a2dma, 7. / 3.))
a2dma = average(a2dm, axis=1)
assert_(eq(a2dma, [1.5, 4.0]))
def test_testToPython(self):
assert_equal(1, int(array(1)))
assert_equal(1.0, float(array(1)))
assert_equal(1, int(array([[[1]]])))
assert_equal(1.0, float(array([[1]])))
assert_raises(TypeError, float, array([1, 1]))
assert_raises(ValueError, bool, array([0, 1]))
assert_raises(ValueError, bool, array([0, 0], mask=[0, 1]))
def test_testScalarArithmetic(self):
xm = array(0, mask=1)
#TODO FIXME: Find out what the following raises a warning in r8247
with np.errstate(divide='ignore'):
assert_((1 / array(0)).mask)
assert_((1 + xm).mask)
assert_((-xm).mask)
assert_((-xm).mask)
assert_(maximum(xm, xm).mask)
assert_(minimum(xm, xm).mask)
assert_(xm.filled().dtype is xm._data.dtype)
x = array(0, mask=0)
assert_(x.filled() == x._data)
assert_equal(str(xm), str(masked_print_option))
def test_testArrayMethods(self):
a = array([1, 3, 2])
assert_(eq(a.any(), a._data.any()))
assert_(eq(a.all(), a._data.all()))
assert_(eq(a.argmax(), a._data.argmax()))
assert_(eq(a.argmin(), a._data.argmin()))
assert_(eq(a.choose(0, 1, 2, 3, 4),
a._data.choose(0, 1, 2, 3, 4)))
assert_(eq(a.compress([1, 0, 1]), a._data.compress([1, 0, 1])))
assert_(eq(a.conj(), a._data.conj()))
assert_(eq(a.conjugate(), a._data.conjugate()))
m = array([[1, 2], [3, 4]])
assert_(eq(m.diagonal(), m._data.diagonal()))
assert_(eq(a.sum(), a._data.sum()))
assert_(eq(a.take([1, 2]), a._data.take([1, 2])))
assert_(eq(m.transpose(), m._data.transpose()))
def test_testArrayAttributes(self):
a = array([1, 3, 2])
assert_equal(a.ndim, 1)
def test_testAPI(self):
assert_(not [m for m in dir(np.ndarray)
if m not in dir(MaskedArray) and
not m.startswith('_')])
def test_testSingleElementSubscript(self):
a = array([1, 3, 2])
b = array([1, 3, 2], mask=[1, 0, 1])
assert_equal(a[0].shape, ())
assert_equal(b[0].shape, ())
assert_equal(b[1].shape, ())
def test_assignment_by_condition(self):
# Test for gh-18951
a = array([1, 2, 3, 4], mask=[1, 0, 1, 0])
c = a >= 3
a[c] = 5
assert_(a[2] is masked)
def test_assignment_by_condition_2(self):
# gh-19721
a = masked_array([0, 1], mask=[False, False])
b = masked_array([0, 1], mask=[True, True])
mask = a < 1
b[mask] = a[mask]
expected_mask = [False, True]
assert_equal(b.mask, expected_mask)
class TestUfuncs:
def setup(self):
self.d = (array([1.0, 0, -1, pi / 2] * 2, mask=[0, 1] + [0] * 6),
array([1.0, 0, -1, pi / 2] * 2, mask=[1, 0] + [0] * 6),)
def test_testUfuncRegression(self):
f_invalid_ignore = [
'sqrt', 'arctanh', 'arcsin', 'arccos',
'arccosh', 'arctanh', 'log', 'log10', 'divide',
'true_divide', 'floor_divide', 'remainder', 'fmod']
for f in ['sqrt', 'log', 'log10', 'exp', 'conjugate',
'sin', 'cos', 'tan',
'arcsin', 'arccos', 'arctan',
'sinh', 'cosh', 'tanh',
'arcsinh',
'arccosh',
'arctanh',
'absolute', 'fabs', 'negative',
'floor', 'ceil',
'logical_not',
'add', 'subtract', 'multiply',
'divide', 'true_divide', 'floor_divide',
'remainder', 'fmod', 'hypot', 'arctan2',
'equal', 'not_equal', 'less_equal', 'greater_equal',
'less', 'greater',
'logical_and', 'logical_or', 'logical_xor']:
try:
uf = getattr(umath, f)
except AttributeError:
uf = getattr(fromnumeric, f)
mf = getattr(np.ma, f)
args = self.d[:uf.nin]
with np.errstate():
if f in f_invalid_ignore:
np.seterr(invalid='ignore')
if f in ['arctanh', 'log', 'log10']:
np.seterr(divide='ignore')
ur = uf(*args)
mr = mf(*args)
assert_(eq(ur.filled(0), mr.filled(0), f))
assert_(eqmask(ur.mask, mr.mask))
def test_reduce(self):
a = self.d[0]
assert_(not alltrue(a, axis=0))
assert_(sometrue(a, axis=0))
assert_equal(sum(a[:3], axis=0), 0)
assert_equal(product(a, axis=0), 0)
def test_minmax(self):
a = arange(1, 13).reshape(3, 4)
amask = masked_where(a < 5, a)
assert_equal(amask.max(), a.max())
assert_equal(amask.min(), 5)
assert_((amask.max(0) == a.max(0)).all())
assert_((amask.min(0) == [5, 6, 7, 8]).all())
assert_(amask.max(1)[0].mask)
assert_(amask.min(1)[0].mask)
def test_nonzero(self):
for t in "?bhilqpBHILQPfdgFDGO":
x = array([1, 0, 2, 0], mask=[0, 0, 1, 1])
assert_(eq(nonzero(x), [0]))
class TestArrayMethods:
def setup(self):
x = np.array([8.375, 7.545, 8.828, 8.5, 1.757, 5.928,
8.43, 7.78, 9.865, 5.878, 8.979, 4.732,
3.012, 6.022, 5.095, 3.116, 5.238, 3.957,
6.04, 9.63, 7.712, 3.382, 4.489, 6.479,
7.189, 9.645, 5.395, 4.961, 9.894, 2.893,
7.357, 9.828, 6.272, 3.758, 6.693, 0.993])
X = x.reshape(6, 6)
XX = x.reshape(3, 2, 2, 3)
m = np.array([0, 1, 0, 1, 0, 0,
1, 0, 1, 1, 0, 1,
0, 0, 0, 1, 0, 1,
0, 0, 0, 1, 1, 1,
1, 0, 0, 1, 0, 0,
0, 0, 1, 0, 1, 0])
mx = array(data=x, mask=m)
mX = array(data=X, mask=m.reshape(X.shape))
mXX = array(data=XX, mask=m.reshape(XX.shape))
self.d = (x, X, XX, m, mx, mX, mXX)
def test_trace(self):
(x, X, XX, m, mx, mX, mXX,) = self.d
mXdiag = mX.diagonal()
assert_equal(mX.trace(), mX.diagonal().compressed().sum())
assert_(eq(mX.trace(),
X.trace() - sum(mXdiag.mask * X.diagonal(),
axis=0)))
def test_clip(self):
(x, X, XX, m, mx, mX, mXX,) = self.d
clipped = mx.clip(2, 8)
assert_(eq(clipped.mask, mx.mask))
assert_(eq(clipped._data, x.clip(2, 8)))
assert_(eq(clipped._data, mx._data.clip(2, 8)))
def test_ptp(self):
(x, X, XX, m, mx, mX, mXX,) = self.d
(n, m) = X.shape
assert_equal(mx.ptp(), mx.compressed().ptp())
rows = np.zeros(n, np.float_)
cols = np.zeros(m, np.float_)
for k in range(m):
cols[k] = mX[:, k].compressed().ptp()
for k in range(n):
rows[k] = mX[k].compressed().ptp()
assert_(eq(mX.ptp(0), cols))
assert_(eq(mX.ptp(1), rows))
def test_swapaxes(self):
(x, X, XX, m, mx, mX, mXX,) = self.d
mXswapped = mX.swapaxes(0, 1)
assert_(eq(mXswapped[-1], mX[:, -1]))
mXXswapped = mXX.swapaxes(0, 2)
assert_equal(mXXswapped.shape, (2, 2, 3, 3))
def test_cumprod(self):
(x, X, XX, m, mx, mX, mXX,) = self.d
mXcp = mX.cumprod(0)
assert_(eq(mXcp._data, mX.filled(1).cumprod(0)))
mXcp = mX.cumprod(1)
assert_(eq(mXcp._data, mX.filled(1).cumprod(1)))
def test_cumsum(self):
(x, X, XX, m, mx, mX, mXX,) = self.d
mXcp = mX.cumsum(0)
assert_(eq(mXcp._data, mX.filled(0).cumsum(0)))
mXcp = mX.cumsum(1)
assert_(eq(mXcp._data, mX.filled(0).cumsum(1)))
def test_varstd(self):
(x, X, XX, m, mx, mX, mXX,) = self.d
assert_(eq(mX.var(axis=None), mX.compressed().var()))
assert_(eq(mX.std(axis=None), mX.compressed().std()))
assert_(eq(mXX.var(axis=3).shape, XX.var(axis=3).shape))
assert_(eq(mX.var().shape, X.var().shape))
(mXvar0, mXvar1) = (mX.var(axis=0), mX.var(axis=1))
for k in range(6):
assert_(eq(mXvar1[k], mX[k].compressed().var()))
assert_(eq(mXvar0[k], mX[:, k].compressed().var()))
assert_(eq(np.sqrt(mXvar0[k]),
mX[:, k].compressed().std()))
def eqmask(m1, m2):
if m1 is nomask:
return m2 is nomask
if m2 is nomask:
return m1 is nomask
return (m1 == m2).all()
|
|
# The content of this file was contributed by leppton
# (http://mail.python.org/pipermail/patches/2006-November/020942.html) to
# ctypes project, under MIT License.
# This example shows how to use ctypes module to read all
# function names from dll export directory
import os
if os.name != "nt":
raise Exception("Wrong OS")
import ctypes as ctypes
import ctypes.wintypes as wintypes
def convert_cdef_to_pydef(line):
"""convert_cdef_to_pydef(line_from_c_header_file) -> python_tuple_string
'DWORD var_name[LENGTH];' -> '("var_name", DWORD*LENGTH)'
doesn't work for all valid c/c++ declarations"""
l = line[:line.find(';')].split()
if len(l) != 2:
return None
type_ = l[0]
name = l[1]
i = name.find('[')
if i != -1:
name, brac = name[:i], name[i:][1:-1]
return '("%s", %s*%s)' % (name, type_, brac)
else:
return '("%s", %s)' % (name, type_)
def convert_cdef_to_structure(cdef, name, data_dict=ctypes.__dict__):
"""convert_cdef_to_structure(struct_definition_from_c_header_file)
-> python class derived from ctypes.Structure
limited support for c/c++ syntax"""
py_str = '[\n'
for line in cdef.split('\n'):
field = convert_cdef_to_pydef(line)
if field is not None:
py_str += ' ' * 4 + field + ',\n'
py_str += ']\n'
pyarr = eval(py_str, data_dict)
class ret_val(ctypes.Structure):
_fields_ = pyarr
ret_val.__name__ = name
ret_val.__module__ = None
return ret_val
# struct definitions we need to read dll file export table
winnt = (
('IMAGE_DOS_HEADER', """\
WORD e_magic;
WORD e_cblp;
WORD e_cp;
WORD e_crlc;
WORD e_cparhdr;
WORD e_minalloc;
WORD e_maxalloc;
WORD e_ss;
WORD e_sp;
WORD e_csum;
WORD e_ip;
WORD e_cs;
WORD e_lfarlc;
WORD e_ovno;
WORD e_res[4];
WORD e_oemid;
WORD e_oeminfo;
WORD e_res2[10];
LONG e_lfanew;
"""),
('IMAGE_FILE_HEADER', """\
WORD Machine;
WORD NumberOfSections;
DWORD TimeDateStamp;
DWORD PointerToSymbolTable;
DWORD NumberOfSymbols;
WORD SizeOfOptionalHeader;
WORD Characteristics;
"""),
('IMAGE_DATA_DIRECTORY', """\
DWORD VirtualAddress;
DWORD Size;
"""),
('IMAGE_OPTIONAL_HEADER32', """\
WORD Magic;
BYTE MajorLinkerVersion;
BYTE MinorLinkerVersion;
DWORD SizeOfCode;
DWORD SizeOfInitializedData;
DWORD SizeOfUninitializedData;
DWORD AddressOfEntryPoint;
DWORD BaseOfCode;
DWORD BaseOfData;
DWORD ImageBase;
DWORD SectionAlignment;
DWORD FileAlignment;
WORD MajorOperatingSystemVersion;
WORD MinorOperatingSystemVersion;
WORD MajorImageVersion;
WORD MinorImageVersion;
WORD MajorSubsystemVersion;
WORD MinorSubsystemVersion;
DWORD Win32VersionValue;
DWORD SizeOfImage;
DWORD SizeOfHeaders;
DWORD CheckSum;
WORD Subsystem;
WORD DllCharacteristics;
DWORD SizeOfStackReserve;
DWORD SizeOfStackCommit;
DWORD SizeOfHeapReserve;
DWORD SizeOfHeapCommit;
DWORD LoaderFlags;
DWORD NumberOfRvaAndSizes;
IMAGE_DATA_DIRECTORY DataDirectory[IMAGE_NUMBEROF_DIRECTORY_ENTRIES];
""",
{'IMAGE_NUMBEROF_DIRECTORY_ENTRIES': 16}),
('IMAGE_NT_HEADERS', """\
DWORD Signature;
IMAGE_FILE_HEADER FileHeader;
IMAGE_OPTIONAL_HEADER32 OptionalHeader;
"""),
('IMAGE_EXPORT_DIRECTORY', """\
DWORD Characteristics;
DWORD TimeDateStamp;
WORD MajorVersion;
WORD MinorVersion;
DWORD Name;
DWORD Base;
DWORD NumberOfFunctions;
DWORD NumberOfNames;
DWORD AddressOfFunctions;
DWORD AddressOfNames;
DWORD AddressOfNameOrdinals;
"""),
)
# Construct python ctypes.Structures from above definitions
data_dict = dict(wintypes.__dict__)
for definition in winnt:
name = definition[0]
def_str = definition[1]
if len(definition) == 3:
data_dict.update(definition[2])
type_ = convert_cdef_to_structure(def_str, name, data_dict)
data_dict[name] = type_
globals()[name] = type_
ptype = ctypes.POINTER(type_)
pname = 'P' + name
data_dict[pname] = ptype
globals()[pname] = ptype
del data_dict
del winnt
class DllException(Exception):
pass
def read_export_table(dll_name, mmap=False, use_kernel=False):
"""
read_export_table(dll_name [,mmap=False [,use_kernel=False]]])
-> list of exported names
default is to load dll into memory: dll sections are aligned to
page boundaries, dll entry points is called, etc...
with mmap=True dll file image is mapped to memory, Relative Virtual
Addresses (RVAs) must be mapped to real addresses manually
with use_kernel=True direct kernel32.dll calls are used,
instead of python mmap module
see http://www.windowsitlibrary.com/Content/356/11/1.html
for details on Portable Executable (PE) file format
"""
if not mmap:
dll = ctypes.cdll.LoadLibrary(dll_name)
if dll is None:
raise DllException("Cant load dll")
base_addr = dll._handle
else:
if not use_kernel:
fileH = open(dll_name)
if fileH is None:
raise DllException("Cant load dll")
import mmap
m = mmap.mmap(fileH.fileno(), 0, None, mmap.ACCESS_READ)
# id(m)+8 sucks, is there better way?
base_addr = ctypes.cast(id(m) + 8, ctypes.POINTER(ctypes.c_int))[0]
else:
kernel32 = ctypes.windll.kernel32
if kernel32 is None:
raise DllException("cant load kernel")
fileH = kernel32.CreateFileA(dll_name, 0x00120089, 1, 0, 3, 0, 0)
if fileH == 0:
raise DllException(
"Cant open, errcode = %d" %
kernel32.GetLastError())
mapH = kernel32.CreateFileMappingW(fileH, 0, 0x8000002, 0, 0, 0)
if mapH == 0:
raise DllException(
"Cant mmap, errocode = %d" %
kernel32.GetLastError())
base_addr = ctypes.windll.kernel32.MapViewOfFile(
mapH, 0x4, 0, 0, 0)
if base_addr == 0:
raise DllException(
"Cant mmap(2), errocode = %d" %
kernel32.GetLastError())
dbghelp = ctypes.windll.dbghelp
if dbghelp is None:
raise DllException("dbghelp.dll not installed")
pimage_nt_header = dbghelp.ImageNtHeader(base_addr)
if pimage_nt_header == 0:
raise DllException("Cant find IMAGE_NT_HEADER")
# Functions like dbghelp.ImageNtHeader above have no type information
# let's make one prototype for extra buzz
# PVOID ImageRvaToVa(PIMAGE_NT_HEADERS NtHeaders, PVOID Base,
# ULONG Rva, PIMAGE_SECTION_HEADER* LastRvaSection)
# we use integers instead of pointers, coz integers are better
# for pointer arithmetic
prototype = ctypes.WINFUNCTYPE(
ctypes.c_int,
ctypes.c_int,
ctypes.c_int,
ctypes.c_int,
ctypes.c_int)
paramflags = (
(1, "NtHeaders", pimage_nt_header),
(1, "Base", base_addr),
(1, "Rva"),
(1, "LastRvaSection", 0))
ImageRvaToVa = prototype(('ImageRvaToVa', dbghelp), paramflags)
def cast_rva(rva, type_):
va = base_addr + rva
if mmap and va > pimage_nt_header:
va = ImageRvaToVa(Rva=rva)
if va == 0:
raise DllException("ImageRvaToVa failed")
return ctypes.cast(va, type_)
if not mmap:
dos_header = cast_rva(0, PIMAGE_DOS_HEADER)[0]
if dos_header.e_magic != 0x5A4D:
raise DllException("IMAGE_DOS_HEADER.e_magic error")
nt_header = cast_rva(dos_header.e_lfanew, PIMAGE_NT_HEADERS)[0]
else:
nt_header = ctypes.cast(pimage_nt_header, PIMAGE_NT_HEADERS)[0]
if nt_header.Signature != 0x00004550:
raise DllException("IMAGE_NT_HEADERS.Signature error")
opt_header = nt_header.OptionalHeader
if opt_header.Magic != 0x010b:
raise DllException("IMAGE_OPTIONAL_HEADERS32.Magic error")
ret_val = []
exports_dd = opt_header.DataDirectory[0]
if opt_header.NumberOfRvaAndSizes > 0 or exports_dd != 0:
export_dir = cast_rva(
exports_dd.VirtualAddress,
PIMAGE_EXPORT_DIRECTORY)[0]
nNames = export_dir.NumberOfNames
if nNames > 0:
PNamesType = ctypes.POINTER(ctypes.c_int * nNames)
names = cast_rva(export_dir.AddressOfNames, PNamesType)[0]
for rva in names:
name = cast_rva(rva, ctypes.c_char_p).value
ret_val.append(name)
if mmap:
if use_kernel:
kernel32.UnmapViewOfFile(base_addr)
kernel32.CloseHandle(mapH)
kernel32.CloseHandle(fileH)
else:
m.close()
fileH.close()
return ret_val
if __name__ == '__main__':
import sys
if len(sys.argv) != 2:
print('usage: %s dll_file_name' % sys.argv[0])
sys.exit()
# names = read_export_table(sys.argv[1], mmap=False, use_kernel=False)
names = read_export_table(sys.argv[1], mmap=False, use_kernel=False)
for name in names:
print(name)
|
|
from __future__ import unicode_literals
import datetime
import re
from decimal import Decimal
from django.core.exceptions import FieldError
from django.db import connection
from django.db.models import (
F, Aggregate, Avg, Count, DecimalField, DurationField, FloatField, Func,
IntegerField, Max, Min, Sum, Value,
)
from django.test import TestCase, ignore_warnings
from django.test.utils import Approximate, CaptureQueriesContext
from django.utils import six, timezone
from django.utils.deprecation import RemovedInDjango110Warning
from .models import Author, Book, Publisher, Store
class AggregateTestCase(TestCase):
@classmethod
def setUpTestData(cls):
cls.a1 = Author.objects.create(name='Adrian Holovaty', age=34)
cls.a2 = Author.objects.create(name='Jacob Kaplan-Moss', age=35)
cls.a3 = Author.objects.create(name='Brad Dayley', age=45)
cls.a4 = Author.objects.create(name='James Bennett', age=29)
cls.a5 = Author.objects.create(name='Jeffrey Forcier', age=37)
cls.a6 = Author.objects.create(name='Paul Bissex', age=29)
cls.a7 = Author.objects.create(name='Wesley J. Chun', age=25)
cls.a8 = Author.objects.create(name='Peter Norvig', age=57)
cls.a9 = Author.objects.create(name='Stuart Russell', age=46)
cls.a1.friends.add(cls.a2, cls.a4)
cls.a2.friends.add(cls.a1, cls.a7)
cls.a4.friends.add(cls.a1)
cls.a5.friends.add(cls.a6, cls.a7)
cls.a6.friends.add(cls.a5, cls.a7)
cls.a7.friends.add(cls.a2, cls.a5, cls.a6)
cls.a8.friends.add(cls.a9)
cls.a9.friends.add(cls.a8)
cls.p1 = Publisher.objects.create(name='Apress', num_awards=3, duration=datetime.timedelta(days=1))
cls.p2 = Publisher.objects.create(name='Sams', num_awards=1, duration=datetime.timedelta(days=2))
cls.p3 = Publisher.objects.create(name='Prentice Hall', num_awards=7)
cls.p4 = Publisher.objects.create(name='Morgan Kaufmann', num_awards=9)
cls.p5 = Publisher.objects.create(name="Jonno's House of Books", num_awards=0)
cls.b1 = Book.objects.create(
isbn='159059725', name='The Definitive Guide to Django: Web Development Done Right',
pages=447, rating=4.5, price=Decimal('30.00'), contact=cls.a1, publisher=cls.p1,
pubdate=datetime.date(2007, 12, 6)
)
cls.b2 = Book.objects.create(
isbn='067232959', name='Sams Teach Yourself Django in 24 Hours',
pages=528, rating=3.0, price=Decimal('23.09'), contact=cls.a3, publisher=cls.p2,
pubdate=datetime.date(2008, 3, 3)
)
cls.b3 = Book.objects.create(
isbn='159059996', name='Practical Django Projects',
pages=300, rating=4.0, price=Decimal('29.69'), contact=cls.a4, publisher=cls.p1,
pubdate=datetime.date(2008, 6, 23)
)
cls.b4 = Book.objects.create(
isbn='013235613', name='Python Web Development with Django',
pages=350, rating=4.0, price=Decimal('29.69'), contact=cls.a5, publisher=cls.p3,
pubdate=datetime.date(2008, 11, 3)
)
cls.b5 = Book.objects.create(
isbn='013790395', name='Artificial Intelligence: A Modern Approach',
pages=1132, rating=4.0, price=Decimal('82.80'), contact=cls.a8, publisher=cls.p3,
pubdate=datetime.date(1995, 1, 15)
)
cls.b6 = Book.objects.create(
isbn='155860191', name='Paradigms of Artificial Intelligence Programming: Case Studies in Common Lisp',
pages=946, rating=5.0, price=Decimal('75.00'), contact=cls.a8, publisher=cls.p4,
pubdate=datetime.date(1991, 10, 15)
)
cls.b1.authors.add(cls.a1, cls.a2)
cls.b2.authors.add(cls.a3)
cls.b3.authors.add(cls.a4)
cls.b4.authors.add(cls.a5, cls.a6, cls.a7)
cls.b5.authors.add(cls.a8, cls.a9)
cls.b6.authors.add(cls.a8)
s1 = Store.objects.create(
name='Amazon.com',
original_opening=datetime.datetime(1994, 4, 23, 9, 17, 42),
friday_night_closing=datetime.time(23, 59, 59)
)
s2 = Store.objects.create(
name='Books.com',
original_opening=datetime.datetime(2001, 3, 15, 11, 23, 37),
friday_night_closing=datetime.time(23, 59, 59)
)
s3 = Store.objects.create(
name="Mamma and Pappa's Books",
original_opening=datetime.datetime(1945, 4, 25, 16, 24, 14),
friday_night_closing=datetime.time(21, 30)
)
s1.books.add(cls.b1, cls.b2, cls.b3, cls.b4, cls.b5, cls.b6)
s2.books.add(cls.b1, cls.b3, cls.b5, cls.b6)
s3.books.add(cls.b3, cls.b4, cls.b6)
def test_empty_aggregate(self):
self.assertEqual(Author.objects.all().aggregate(), {})
def test_single_aggregate(self):
vals = Author.objects.aggregate(Avg("age"))
self.assertEqual(vals, {"age__avg": Approximate(37.4, places=1)})
def test_multiple_aggregates(self):
vals = Author.objects.aggregate(Sum("age"), Avg("age"))
self.assertEqual(vals, {"age__sum": 337, "age__avg": Approximate(37.4, places=1)})
def test_filter_aggregate(self):
vals = Author.objects.filter(age__gt=29).aggregate(Sum("age"))
self.assertEqual(len(vals), 1)
self.assertEqual(vals["age__sum"], 254)
def test_related_aggregate(self):
vals = Author.objects.aggregate(Avg("friends__age"))
self.assertEqual(len(vals), 1)
self.assertAlmostEqual(vals["friends__age__avg"], 34.07, places=2)
vals = Book.objects.filter(rating__lt=4.5).aggregate(Avg("authors__age"))
self.assertEqual(len(vals), 1)
self.assertAlmostEqual(vals["authors__age__avg"], 38.2857, places=2)
vals = Author.objects.all().filter(name__contains="a").aggregate(Avg("book__rating"))
self.assertEqual(len(vals), 1)
self.assertEqual(vals["book__rating__avg"], 4.0)
vals = Book.objects.aggregate(Sum("publisher__num_awards"))
self.assertEqual(len(vals), 1)
self.assertEqual(vals["publisher__num_awards__sum"], 30)
vals = Publisher.objects.aggregate(Sum("book__price"))
self.assertEqual(len(vals), 1)
self.assertEqual(vals["book__price__sum"], Decimal("270.27"))
def test_aggregate_multi_join(self):
vals = Store.objects.aggregate(Max("books__authors__age"))
self.assertEqual(len(vals), 1)
self.assertEqual(vals["books__authors__age__max"], 57)
vals = Author.objects.aggregate(Min("book__publisher__num_awards"))
self.assertEqual(len(vals), 1)
self.assertEqual(vals["book__publisher__num_awards__min"], 1)
def test_aggregate_alias(self):
vals = Store.objects.filter(name="Amazon.com").aggregate(amazon_mean=Avg("books__rating"))
self.assertEqual(len(vals), 1)
self.assertAlmostEqual(vals["amazon_mean"], 4.08, places=2)
def test_annotate_basic(self):
self.assertQuerysetEqual(
Book.objects.annotate().order_by('pk'), [
"The Definitive Guide to Django: Web Development Done Right",
"Sams Teach Yourself Django in 24 Hours",
"Practical Django Projects",
"Python Web Development with Django",
"Artificial Intelligence: A Modern Approach",
"Paradigms of Artificial Intelligence Programming: Case Studies in Common Lisp"
],
lambda b: b.name
)
books = Book.objects.annotate(mean_age=Avg("authors__age"))
b = books.get(pk=self.b1.pk)
self.assertEqual(
b.name,
'The Definitive Guide to Django: Web Development Done Right'
)
self.assertEqual(b.mean_age, 34.5)
def test_annotate_defer(self):
qs = Book.objects.annotate(
page_sum=Sum("pages")).defer('name').filter(pk=self.b1.pk)
rows = [
(1, "159059725", 447, "The Definitive Guide to Django: Web Development Done Right")
]
self.assertQuerysetEqual(
qs.order_by('pk'), rows,
lambda r: (r.id, r.isbn, r.page_sum, r.name)
)
def test_annotate_defer_select_related(self):
qs = Book.objects.select_related('contact').annotate(
page_sum=Sum("pages")).defer('name').filter(pk=self.b1.pk)
rows = [
(1, "159059725", 447, "Adrian Holovaty",
"The Definitive Guide to Django: Web Development Done Right")
]
self.assertQuerysetEqual(
qs.order_by('pk'), rows,
lambda r: (r.id, r.isbn, r.page_sum, r.contact.name, r.name)
)
def test_annotate_m2m(self):
books = Book.objects.filter(rating__lt=4.5).annotate(Avg("authors__age")).order_by("name")
self.assertQuerysetEqual(
books, [
('Artificial Intelligence: A Modern Approach', 51.5),
('Practical Django Projects', 29.0),
('Python Web Development with Django', Approximate(30.3, places=1)),
('Sams Teach Yourself Django in 24 Hours', 45.0)
],
lambda b: (b.name, b.authors__age__avg),
)
books = Book.objects.annotate(num_authors=Count("authors")).order_by("name")
self.assertQuerysetEqual(
books, [
('Artificial Intelligence: A Modern Approach', 2),
('Paradigms of Artificial Intelligence Programming: Case Studies in Common Lisp', 1),
('Practical Django Projects', 1),
('Python Web Development with Django', 3),
('Sams Teach Yourself Django in 24 Hours', 1),
('The Definitive Guide to Django: Web Development Done Right', 2)
],
lambda b: (b.name, b.num_authors)
)
def test_backwards_m2m_annotate(self):
authors = Author.objects.filter(name__contains="a").annotate(Avg("book__rating")).order_by("name")
self.assertQuerysetEqual(
authors, [
('Adrian Holovaty', 4.5),
('Brad Dayley', 3.0),
('Jacob Kaplan-Moss', 4.5),
('James Bennett', 4.0),
('Paul Bissex', 4.0),
('Stuart Russell', 4.0)
],
lambda a: (a.name, a.book__rating__avg)
)
authors = Author.objects.annotate(num_books=Count("book")).order_by("name")
self.assertQuerysetEqual(
authors, [
('Adrian Holovaty', 1),
('Brad Dayley', 1),
('Jacob Kaplan-Moss', 1),
('James Bennett', 1),
('Jeffrey Forcier', 1),
('Paul Bissex', 1),
('Peter Norvig', 2),
('Stuart Russell', 1),
('Wesley J. Chun', 1)
],
lambda a: (a.name, a.num_books)
)
def test_reverse_fkey_annotate(self):
books = Book.objects.annotate(Sum("publisher__num_awards")).order_by("name")
self.assertQuerysetEqual(
books, [
('Artificial Intelligence: A Modern Approach', 7),
('Paradigms of Artificial Intelligence Programming: Case Studies in Common Lisp', 9),
('Practical Django Projects', 3),
('Python Web Development with Django', 7),
('Sams Teach Yourself Django in 24 Hours', 1),
('The Definitive Guide to Django: Web Development Done Right', 3)
],
lambda b: (b.name, b.publisher__num_awards__sum)
)
publishers = Publisher.objects.annotate(Sum("book__price")).order_by("name")
self.assertQuerysetEqual(
publishers, [
('Apress', Decimal("59.69")),
("Jonno's House of Books", None),
('Morgan Kaufmann', Decimal("75.00")),
('Prentice Hall', Decimal("112.49")),
('Sams', Decimal("23.09"))
],
lambda p: (p.name, p.book__price__sum)
)
def test_annotate_values(self):
books = list(Book.objects.filter(pk=self.b1.pk).annotate(mean_age=Avg("authors__age")).values())
self.assertEqual(
books, [
{
"contact_id": 1,
"id": 1,
"isbn": "159059725",
"mean_age": 34.5,
"name": "The Definitive Guide to Django: Web Development Done Right",
"pages": 447,
"price": Approximate(Decimal("30")),
"pubdate": datetime.date(2007, 12, 6),
"publisher_id": 1,
"rating": 4.5,
}
]
)
books = Book.objects.filter(pk=self.b1.pk).annotate(mean_age=Avg('authors__age')).values('pk', 'isbn', 'mean_age')
self.assertEqual(
list(books), [
{
"pk": 1,
"isbn": "159059725",
"mean_age": 34.5,
}
]
)
books = Book.objects.filter(pk=self.b1.pk).annotate(mean_age=Avg("authors__age")).values("name")
self.assertEqual(
list(books), [
{
"name": "The Definitive Guide to Django: Web Development Done Right"
}
]
)
books = Book.objects.filter(pk=self.b1.pk).values().annotate(mean_age=Avg('authors__age'))
self.assertEqual(
list(books), [
{
"contact_id": 1,
"id": 1,
"isbn": "159059725",
"mean_age": 34.5,
"name": "The Definitive Guide to Django: Web Development Done Right",
"pages": 447,
"price": Approximate(Decimal("30")),
"pubdate": datetime.date(2007, 12, 6),
"publisher_id": 1,
"rating": 4.5,
}
]
)
books = Book.objects.values("rating").annotate(n_authors=Count("authors__id"), mean_age=Avg("authors__age")).order_by("rating")
self.assertEqual(
list(books), [
{
"rating": 3.0,
"n_authors": 1,
"mean_age": 45.0,
},
{
"rating": 4.0,
"n_authors": 6,
"mean_age": Approximate(37.16, places=1)
},
{
"rating": 4.5,
"n_authors": 2,
"mean_age": 34.5,
},
{
"rating": 5.0,
"n_authors": 1,
"mean_age": 57.0,
}
]
)
authors = Author.objects.annotate(Avg("friends__age")).order_by("name")
self.assertEqual(len(authors), 9)
self.assertQuerysetEqual(
authors, [
('Adrian Holovaty', 32.0),
('Brad Dayley', None),
('Jacob Kaplan-Moss', 29.5),
('James Bennett', 34.0),
('Jeffrey Forcier', 27.0),
('Paul Bissex', 31.0),
('Peter Norvig', 46.0),
('Stuart Russell', 57.0),
('Wesley J. Chun', Approximate(33.66, places=1))
],
lambda a: (a.name, a.friends__age__avg)
)
def test_count(self):
vals = Book.objects.aggregate(Count("rating"))
self.assertEqual(vals, {"rating__count": 6})
vals = Book.objects.aggregate(Count("rating", distinct=True))
self.assertEqual(vals, {"rating__count": 4})
def test_fkey_aggregate(self):
explicit = list(Author.objects.annotate(Count('book__id')))
implicit = list(Author.objects.annotate(Count('book')))
self.assertEqual(explicit, implicit)
def test_annotate_ordering(self):
books = Book.objects.values('rating').annotate(oldest=Max('authors__age')).order_by('oldest', 'rating')
self.assertEqual(
list(books), [
{
"rating": 4.5,
"oldest": 35,
},
{
"rating": 3.0,
"oldest": 45
},
{
"rating": 4.0,
"oldest": 57,
},
{
"rating": 5.0,
"oldest": 57,
}
]
)
books = Book.objects.values("rating").annotate(oldest=Max("authors__age")).order_by("-oldest", "-rating")
self.assertEqual(
list(books), [
{
"rating": 5.0,
"oldest": 57,
},
{
"rating": 4.0,
"oldest": 57,
},
{
"rating": 3.0,
"oldest": 45,
},
{
"rating": 4.5,
"oldest": 35,
}
]
)
def test_aggregate_annotation(self):
vals = Book.objects.annotate(num_authors=Count("authors__id")).aggregate(Avg("num_authors"))
self.assertEqual(vals, {"num_authors__avg": Approximate(1.66, places=1)})
def test_avg_duration_field(self):
self.assertEqual(
Publisher.objects.aggregate(Avg('duration', output_field=DurationField())),
{'duration__avg': datetime.timedelta(days=1, hours=12)}
)
def test_sum_duration_field(self):
self.assertEqual(
Publisher.objects.aggregate(Sum('duration', output_field=DurationField())),
{'duration__sum': datetime.timedelta(days=3)}
)
def test_sum_distinct_aggregate(self):
"""
Sum on a distict() QuerySet should aggregate only the distinct items.
"""
authors = Author.objects.filter(book__in=[5, 6])
self.assertEqual(authors.count(), 3)
distinct_authors = authors.distinct()
self.assertEqual(distinct_authors.count(), 2)
# Selected author ages are 57 and 46
age_sum = distinct_authors.aggregate(Sum('age'))
self.assertEqual(age_sum['age__sum'], 103)
def test_filtering(self):
p = Publisher.objects.create(name='Expensive Publisher', num_awards=0)
Book.objects.create(
name='ExpensiveBook1',
pages=1,
isbn='111',
rating=3.5,
price=Decimal("1000"),
publisher=p,
contact_id=1,
pubdate=datetime.date(2008, 12, 1)
)
Book.objects.create(
name='ExpensiveBook2',
pages=1,
isbn='222',
rating=4.0,
price=Decimal("1000"),
publisher=p,
contact_id=1,
pubdate=datetime.date(2008, 12, 2)
)
Book.objects.create(
name='ExpensiveBook3',
pages=1,
isbn='333',
rating=4.5,
price=Decimal("35"),
publisher=p,
contact_id=1,
pubdate=datetime.date(2008, 12, 3)
)
publishers = Publisher.objects.annotate(num_books=Count("book__id")).filter(num_books__gt=1).order_by("pk")
self.assertQuerysetEqual(
publishers, [
"Apress",
"Prentice Hall",
"Expensive Publisher",
],
lambda p: p.name,
)
publishers = Publisher.objects.filter(book__price__lt=Decimal("40.0")).order_by("pk")
self.assertQuerysetEqual(
publishers, [
"Apress",
"Apress",
"Sams",
"Prentice Hall",
"Expensive Publisher",
],
lambda p: p.name
)
publishers = Publisher.objects.annotate(num_books=Count("book__id")).filter(num_books__gt=1, book__price__lt=Decimal("40.0")).order_by("pk")
self.assertQuerysetEqual(
publishers, [
"Apress",
"Prentice Hall",
"Expensive Publisher",
],
lambda p: p.name,
)
publishers = Publisher.objects.filter(book__price__lt=Decimal("40.0")).annotate(num_books=Count("book__id")).filter(num_books__gt=1).order_by("pk")
self.assertQuerysetEqual(
publishers, [
"Apress",
],
lambda p: p.name
)
publishers = Publisher.objects.annotate(num_books=Count("book")).filter(num_books__range=[1, 3]).order_by("pk")
self.assertQuerysetEqual(
publishers, [
"Apress",
"Sams",
"Prentice Hall",
"Morgan Kaufmann",
"Expensive Publisher",
],
lambda p: p.name
)
publishers = Publisher.objects.annotate(num_books=Count("book")).filter(num_books__range=[1, 2]).order_by("pk")
self.assertQuerysetEqual(
publishers, [
"Apress",
"Sams",
"Prentice Hall",
"Morgan Kaufmann",
],
lambda p: p.name
)
publishers = Publisher.objects.annotate(num_books=Count("book")).filter(num_books__in=[1, 3]).order_by("pk")
self.assertQuerysetEqual(
publishers, [
"Sams",
"Morgan Kaufmann",
"Expensive Publisher",
],
lambda p: p.name,
)
publishers = Publisher.objects.annotate(num_books=Count("book")).filter(num_books__isnull=True)
self.assertEqual(len(publishers), 0)
def test_annotation(self):
vals = Author.objects.filter(pk=self.a1.pk).aggregate(Count("friends__id"))
self.assertEqual(vals, {"friends__id__count": 2})
books = Book.objects.annotate(num_authors=Count("authors__name")).filter(num_authors__exact=2).order_by("pk")
self.assertQuerysetEqual(
books, [
"The Definitive Guide to Django: Web Development Done Right",
"Artificial Intelligence: A Modern Approach",
],
lambda b: b.name
)
authors = Author.objects.annotate(num_friends=Count("friends__id", distinct=True)).filter(num_friends=0).order_by("pk")
self.assertQuerysetEqual(
authors, [
"Brad Dayley",
],
lambda a: a.name
)
publishers = Publisher.objects.annotate(num_books=Count("book__id")).filter(num_books__gt=1).order_by("pk")
self.assertQuerysetEqual(
publishers, [
"Apress",
"Prentice Hall",
],
lambda p: p.name
)
publishers = Publisher.objects.filter(book__price__lt=Decimal("40.0")).annotate(num_books=Count("book__id")).filter(num_books__gt=1)
self.assertQuerysetEqual(
publishers, [
"Apress",
],
lambda p: p.name
)
books = Book.objects.annotate(num_authors=Count("authors__id")).filter(authors__name__contains="Norvig", num_authors__gt=1)
self.assertQuerysetEqual(
books, [
"Artificial Intelligence: A Modern Approach",
],
lambda b: b.name
)
def test_more_aggregation(self):
a = Author.objects.get(name__contains='Norvig')
b = Book.objects.get(name__contains='Done Right')
b.authors.add(a)
b.save()
vals = Book.objects.annotate(num_authors=Count("authors__id")).filter(authors__name__contains="Norvig", num_authors__gt=1).aggregate(Avg("rating"))
self.assertEqual(vals, {"rating__avg": 4.25})
def test_even_more_aggregate(self):
publishers = Publisher.objects.annotate(
earliest_book=Min("book__pubdate"),
).exclude(earliest_book=None).order_by("earliest_book").values(
'earliest_book',
'num_awards',
'id',
'name',
)
self.assertEqual(
list(publishers), [
{
'earliest_book': datetime.date(1991, 10, 15),
'num_awards': 9,
'id': 4,
'name': 'Morgan Kaufmann'
},
{
'earliest_book': datetime.date(1995, 1, 15),
'num_awards': 7,
'id': 3,
'name': 'Prentice Hall'
},
{
'earliest_book': datetime.date(2007, 12, 6),
'num_awards': 3,
'id': 1,
'name': 'Apress'
},
{
'earliest_book': datetime.date(2008, 3, 3),
'num_awards': 1,
'id': 2,
'name': 'Sams'
}
]
)
vals = Store.objects.aggregate(Max("friday_night_closing"), Min("original_opening"))
self.assertEqual(
vals,
{
"friday_night_closing__max": datetime.time(23, 59, 59),
"original_opening__min": datetime.datetime(1945, 4, 25, 16, 24, 14),
}
)
def test_annotate_values_list(self):
books = Book.objects.filter(pk=self.b1.pk).annotate(mean_age=Avg("authors__age")).values_list("pk", "isbn", "mean_age")
self.assertEqual(
list(books), [
(1, "159059725", 34.5),
]
)
books = Book.objects.filter(pk=self.b1.pk).annotate(mean_age=Avg("authors__age")).values_list("isbn")
self.assertEqual(
list(books), [
('159059725',)
]
)
books = Book.objects.filter(pk=self.b1.pk).annotate(mean_age=Avg("authors__age")).values_list("mean_age")
self.assertEqual(
list(books), [
(34.5,)
]
)
books = Book.objects.filter(pk=self.b1.pk).annotate(mean_age=Avg("authors__age")).values_list("mean_age", flat=True)
self.assertEqual(list(books), [34.5])
books = Book.objects.values_list("price").annotate(count=Count("price")).order_by("-count", "price")
self.assertEqual(
list(books), [
(Decimal("29.69"), 2),
(Decimal('23.09'), 1),
(Decimal('30'), 1),
(Decimal('75'), 1),
(Decimal('82.8'), 1),
]
)
def test_dates_with_aggregation(self):
"""
Test that .dates() returns a distinct set of dates when applied to a
QuerySet with aggregation.
Refs #18056. Previously, .dates() would return distinct (date_kind,
aggregation) sets, in this case (year, num_authors), so 2008 would be
returned twice because there are books from 2008 with a different
number of authors.
"""
dates = Book.objects.annotate(num_authors=Count("authors")).dates('pubdate', 'year')
self.assertQuerysetEqual(
dates, [
"datetime.date(1991, 1, 1)",
"datetime.date(1995, 1, 1)",
"datetime.date(2007, 1, 1)",
"datetime.date(2008, 1, 1)"
]
)
def test_values_aggregation(self):
# Refs #20782
max_rating = Book.objects.values('rating').aggregate(max_rating=Max('rating'))
self.assertEqual(max_rating['max_rating'], 5)
max_books_per_rating = Book.objects.values('rating').annotate(
books_per_rating=Count('id')
).aggregate(Max('books_per_rating'))
self.assertEqual(
max_books_per_rating,
{'books_per_rating__max': 3})
def test_ticket17424(self):
"""
Check that doing exclude() on a foreign model after annotate()
doesn't crash.
"""
all_books = list(Book.objects.values_list('pk', flat=True).order_by('pk'))
annotated_books = Book.objects.order_by('pk').annotate(one=Count("id"))
# The value doesn't matter, we just need any negative
# constraint on a related model that's a noop.
excluded_books = annotated_books.exclude(publisher__name="__UNLIKELY_VALUE__")
# Try to generate query tree
str(excluded_books.query)
self.assertQuerysetEqual(excluded_books, all_books, lambda x: x.pk)
# Check internal state
self.assertIsNone(annotated_books.query.alias_map["aggregation_book"].join_type)
self.assertIsNone(excluded_books.query.alias_map["aggregation_book"].join_type)
def test_ticket12886(self):
"""
Check that aggregation over sliced queryset works correctly.
"""
qs = Book.objects.all().order_by('-rating')[0:3]
vals = qs.aggregate(average_top3_rating=Avg('rating'))['average_top3_rating']
self.assertAlmostEqual(vals, 4.5, places=2)
def test_ticket11881(self):
"""
Check that subqueries do not needlessly contain ORDER BY, SELECT FOR UPDATE
or select_related() stuff.
"""
qs = Book.objects.all().select_for_update().order_by(
'pk').select_related('publisher').annotate(max_pk=Max('pk'))
with CaptureQueriesContext(connection) as captured_queries:
qs.aggregate(avg_pk=Avg('max_pk'))
self.assertEqual(len(captured_queries), 1)
qstr = captured_queries[0]['sql'].lower()
self.assertNotIn('for update', qstr)
forced_ordering = connection.ops.force_no_ordering()
if forced_ordering:
# If the backend needs to force an ordering we make sure it's
# the only "ORDER BY" clause present in the query.
self.assertEqual(
re.findall(r'order by (\w+)', qstr),
[', '.join(f[1][0] for f in forced_ordering).lower()]
)
else:
self.assertNotIn('order by', qstr)
self.assertEqual(qstr.count(' join '), 0)
def test_decimal_max_digits_has_no_effect(self):
Book.objects.all().delete()
a1 = Author.objects.first()
p1 = Publisher.objects.first()
thedate = timezone.now()
for i in range(10):
Book.objects.create(
isbn="abcde{}".format(i), name="none", pages=10, rating=4.0,
price=9999.98, contact=a1, publisher=p1, pubdate=thedate)
book = Book.objects.aggregate(price_sum=Sum('price'))
self.assertEqual(book['price_sum'], Decimal("99999.80"))
def test_nonaggregate_aggregation_throws(self):
with six.assertRaisesRegex(self, TypeError, 'fail is not an aggregate expression'):
Book.objects.aggregate(fail=F('price'))
def test_nonfield_annotation(self):
book = Book.objects.annotate(val=Max(Value(2, output_field=IntegerField()))).first()
self.assertEqual(book.val, 2)
book = Book.objects.annotate(val=Max(Value(2), output_field=IntegerField())).first()
self.assertEqual(book.val, 2)
book = Book.objects.annotate(val=Max(2, output_field=IntegerField())).first()
self.assertEqual(book.val, 2)
def test_missing_output_field_raises_error(self):
with six.assertRaisesRegex(self, FieldError, 'Cannot resolve expression type, unknown output_field'):
Book.objects.annotate(val=Max(2)).first()
def test_annotation_expressions(self):
authors = Author.objects.annotate(combined_ages=Sum(F('age') + F('friends__age'))).order_by('name')
authors2 = Author.objects.annotate(combined_ages=Sum('age') + Sum('friends__age')).order_by('name')
for qs in (authors, authors2):
self.assertEqual(len(qs), 9)
self.assertQuerysetEqual(
qs, [
('Adrian Holovaty', 132),
('Brad Dayley', None),
('Jacob Kaplan-Moss', 129),
('James Bennett', 63),
('Jeffrey Forcier', 128),
('Paul Bissex', 120),
('Peter Norvig', 103),
('Stuart Russell', 103),
('Wesley J. Chun', 176)
],
lambda a: (a.name, a.combined_ages)
)
def test_aggregation_expressions(self):
a1 = Author.objects.aggregate(av_age=Sum('age') / Count('*'))
a2 = Author.objects.aggregate(av_age=Sum('age') / Count('age'))
a3 = Author.objects.aggregate(av_age=Avg('age'))
self.assertEqual(a1, {'av_age': 37})
self.assertEqual(a2, {'av_age': 37})
self.assertEqual(a3, {'av_age': Approximate(37.4, places=1)})
def test_avg_decimal_field(self):
v = Book.objects.filter(rating=4).aggregate(avg_price=(Avg('price')))['avg_price']
self.assertIsInstance(v, float)
self.assertEqual(v, Approximate(47.39, places=2))
def test_order_of_precedence(self):
p1 = Book.objects.filter(rating=4).aggregate(avg_price=(Avg('price') + 2) * 3)
self.assertEqual(p1, {'avg_price': Approximate(148.18, places=2)})
p2 = Book.objects.filter(rating=4).aggregate(avg_price=Avg('price') + 2 * 3)
self.assertEqual(p2, {'avg_price': Approximate(53.39, places=2)})
def test_combine_different_types(self):
with six.assertRaisesRegex(self, FieldError, 'Expression contains mixed types. You must set output_field'):
Book.objects.annotate(sums=Sum('rating') + Sum('pages') + Sum('price')).get(pk=self.b4.pk)
b1 = Book.objects.annotate(sums=Sum(F('rating') + F('pages') + F('price'),
output_field=IntegerField())).get(pk=self.b4.pk)
self.assertEqual(b1.sums, 383)
b2 = Book.objects.annotate(sums=Sum(F('rating') + F('pages') + F('price'),
output_field=FloatField())).get(pk=self.b4.pk)
self.assertEqual(b2.sums, 383.69)
b3 = Book.objects.annotate(sums=Sum(F('rating') + F('pages') + F('price'),
output_field=DecimalField())).get(pk=self.b4.pk)
self.assertEqual(b3.sums, Approximate(Decimal("383.69"), places=2))
def test_complex_aggregations_require_kwarg(self):
with six.assertRaisesRegex(self, TypeError, 'Complex annotations require an alias'):
Author.objects.annotate(Sum(F('age') + F('friends__age')))
with six.assertRaisesRegex(self, TypeError, 'Complex aggregates require an alias'):
Author.objects.aggregate(Sum('age') / Count('age'))
with six.assertRaisesRegex(self, TypeError, 'Complex aggregates require an alias'):
Author.objects.aggregate(Sum(1))
def test_aggregate_over_complex_annotation(self):
qs = Author.objects.annotate(
combined_ages=Sum(F('age') + F('friends__age')))
age = qs.aggregate(max_combined_age=Max('combined_ages'))
self.assertEqual(age['max_combined_age'], 176)
age = qs.aggregate(max_combined_age_doubled=Max('combined_ages') * 2)
self.assertEqual(age['max_combined_age_doubled'], 176 * 2)
age = qs.aggregate(
max_combined_age_doubled=Max('combined_ages') + Max('combined_ages'))
self.assertEqual(age['max_combined_age_doubled'], 176 * 2)
age = qs.aggregate(
max_combined_age_doubled=Max('combined_ages') + Max('combined_ages'),
sum_combined_age=Sum('combined_ages'))
self.assertEqual(age['max_combined_age_doubled'], 176 * 2)
self.assertEqual(age['sum_combined_age'], 954)
age = qs.aggregate(
max_combined_age_doubled=Max('combined_ages') + Max('combined_ages'),
sum_combined_age_doubled=Sum('combined_ages') + Sum('combined_ages'))
self.assertEqual(age['max_combined_age_doubled'], 176 * 2)
self.assertEqual(age['sum_combined_age_doubled'], 954 * 2)
def test_values_annotation_with_expression(self):
# ensure the F() is promoted to the group by clause
qs = Author.objects.values('name').annotate(another_age=Sum('age') + F('age'))
a = qs.get(name="Adrian Holovaty")
self.assertEqual(a['another_age'], 68)
qs = qs.annotate(friend_count=Count('friends'))
a = qs.get(name="Adrian Holovaty")
self.assertEqual(a['friend_count'], 2)
qs = qs.annotate(combined_age=Sum('age') + F('friends__age')).filter(
name="Adrian Holovaty").order_by('-combined_age')
self.assertEqual(
list(qs), [
{
"name": 'Adrian Holovaty',
"another_age": 68,
"friend_count": 1,
"combined_age": 69
},
{
"name": 'Adrian Holovaty',
"another_age": 68,
"friend_count": 1,
"combined_age": 63
}
]
)
vals = qs.values('name', 'combined_age')
self.assertEqual(
list(vals), [
{
"name": 'Adrian Holovaty',
"combined_age": 69
},
{
"name": 'Adrian Holovaty',
"combined_age": 63
}
]
)
def test_annotate_values_aggregate(self):
alias_age = Author.objects.annotate(
age_alias=F('age')
).values(
'age_alias',
).aggregate(sum_age=Sum('age_alias'))
age = Author.objects.values('age').aggregate(sum_age=Sum('age'))
self.assertEqual(alias_age['sum_age'], age['sum_age'])
def test_annotate_over_annotate(self):
author = Author.objects.annotate(
age_alias=F('age')
).annotate(
sum_age=Sum('age_alias')
).get(name="Adrian Holovaty")
other_author = Author.objects.annotate(
sum_age=Sum('age')
).get(name="Adrian Holovaty")
self.assertEqual(author.sum_age, other_author.sum_age)
def test_annotated_aggregate_over_annotated_aggregate(self):
with self.assertRaisesMessage(FieldError, "Cannot compute Sum('id__max'): 'id__max' is an aggregate"):
Book.objects.annotate(Max('id')).annotate(Sum('id__max'))
class MyMax(Max):
def as_sql(self, compiler, connection):
self.set_source_expressions(self.get_source_expressions()[0:1])
return super(MyMax, self).as_sql(compiler, connection)
with self.assertRaisesMessage(FieldError, "Cannot compute Max('id__max'): 'id__max' is an aggregate"):
Book.objects.annotate(Max('id')).annotate(my_max=MyMax('id__max', 'price'))
def test_multi_arg_aggregate(self):
class MyMax(Max):
def as_sql(self, compiler, connection):
self.set_source_expressions(self.get_source_expressions()[0:1])
return super(MyMax, self).as_sql(compiler, connection)
with self.assertRaisesMessage(TypeError, 'Complex aggregates require an alias'):
Book.objects.aggregate(MyMax('pages', 'price'))
with self.assertRaisesMessage(TypeError, 'Complex annotations require an alias'):
Book.objects.annotate(MyMax('pages', 'price'))
Book.objects.aggregate(max_field=MyMax('pages', 'price'))
def test_add_implementation(self):
class MySum(Sum):
pass
# test completely changing how the output is rendered
def lower_case_function_override(self, compiler, connection):
sql, params = compiler.compile(self.source_expressions[0])
substitutions = dict(function=self.function.lower(), expressions=sql)
substitutions.update(self.extra)
return self.template % substitutions, params
setattr(MySum, 'as_' + connection.vendor, lower_case_function_override)
qs = Book.objects.annotate(
sums=MySum(F('rating') + F('pages') + F('price'), output_field=IntegerField())
)
self.assertEqual(str(qs.query).count('sum('), 1)
b1 = qs.get(pk=self.b4.pk)
self.assertEqual(b1.sums, 383)
# test changing the dict and delegating
def lower_case_function_super(self, compiler, connection):
self.extra['function'] = self.function.lower()
return super(MySum, self).as_sql(compiler, connection)
setattr(MySum, 'as_' + connection.vendor, lower_case_function_super)
qs = Book.objects.annotate(
sums=MySum(F('rating') + F('pages') + F('price'), output_field=IntegerField())
)
self.assertEqual(str(qs.query).count('sum('), 1)
b1 = qs.get(pk=self.b4.pk)
self.assertEqual(b1.sums, 383)
# test overriding all parts of the template
def be_evil(self, compiler, connection):
substitutions = dict(function='MAX', expressions='2')
substitutions.update(self.extra)
return self.template % substitutions, ()
setattr(MySum, 'as_' + connection.vendor, be_evil)
qs = Book.objects.annotate(
sums=MySum(F('rating') + F('pages') + F('price'), output_field=IntegerField())
)
self.assertEqual(str(qs.query).count('MAX('), 1)
b1 = qs.get(pk=self.b4.pk)
self.assertEqual(b1.sums, 2)
def test_complex_values_aggregation(self):
max_rating = Book.objects.values('rating').aggregate(
double_max_rating=Max('rating') + Max('rating'))
self.assertEqual(max_rating['double_max_rating'], 5 * 2)
max_books_per_rating = Book.objects.values('rating').annotate(
books_per_rating=Count('id') + 5
).aggregate(Max('books_per_rating'))
self.assertEqual(
max_books_per_rating,
{'books_per_rating__max': 3 + 5})
def test_expression_on_aggregation(self):
# Create a plain expression
class Greatest(Func):
function = 'GREATEST'
def as_sqlite(self, compiler, connection):
return super(Greatest, self).as_sql(compiler, connection, function='MAX')
qs = Publisher.objects.annotate(
price_or_median=Greatest(Avg('book__rating'), Avg('book__price'))
).filter(price_or_median__gte=F('num_awards')).order_by('num_awards')
self.assertQuerysetEqual(
qs, [1, 3, 7, 9], lambda v: v.num_awards)
qs2 = Publisher.objects.annotate(
rating_or_num_awards=Greatest(Avg('book__rating'), F('num_awards'),
output_field=FloatField())
).filter(rating_or_num_awards__gt=F('num_awards')).order_by('num_awards')
self.assertQuerysetEqual(
qs2, [1, 3], lambda v: v.num_awards)
@ignore_warnings(category=RemovedInDjango110Warning)
def test_backwards_compatibility(self):
from django.db.models.sql import aggregates as sql_aggregates
class SqlNewSum(sql_aggregates.Aggregate):
sql_function = 'SUM'
class NewSum(Aggregate):
name = 'Sum'
def add_to_query(self, query, alias, col, source, is_summary):
klass = SqlNewSum
aggregate = klass(
col, source=source, is_summary=is_summary, **self.extra)
query.annotations[alias] = aggregate
qs = Author.objects.values('name').annotate(another_age=NewSum('age') + F('age'))
a = qs.get(name="Adrian Holovaty")
self.assertEqual(a['another_age'], 68)
|
|
from __future__ import division, absolute_import
import os
import itertools
import random
import traceback
import warnings
import numpy as np
from scipy import spatial, sparse
from . import utils
from . import geometry
from . import graphics
class Network(dict):
'''
A network as defined by MiniPNM is essentially a graph and a coordinate
array coupled together.
__init__ must *somehow* append coordinate (x,y,z) and connectivity (tails &
heads) arrays to the network. Other attributes may be added if required,
but the aforementioned 5 are mandatory. They are packaged into properties
for convenience.
As a general rule, aside from methods and properties, these objects
should essentially be *just* dictionaries. In order to save the data and
recover it later, it should suffice to call `network.items()` and write
the output to a file.
Lastly, for the sake of communicability, there is a reference of choice for
terminology:
http://en.wikipedia.org/wiki/Glossary_of_graph_theory
'''
points = utils.property_from(['x','y','z'])
pairs = utils.property_from(['tails','heads'], dtype=int, default=[])
filename = None
@classmethod
def load(cls, dict_):
inst = cls.__new__(cls)
inst.update(dict_)
return inst
@property
def order(self):
return len(self.points)
@property
def size(self):
return len(self.pairs)
@property
def coords(self):
return self.points.T
@property
def midpoints(self):
tails, heads = self.points[self.pairs.T]
return tails + (heads - tails)/2
@property
def spans(self):
tails, heads = self.points[self.pairs.T]
return heads - tails
@property
def lengths(self):
return np.linalg.norm(self.spans, axis=1).astype('float32')
@property
def diagonals(self):
return sparse.diags(np.ones(self.order), 0)
@property
def adjacency_matrix(self):
tails, heads = self.pairs.T
ijk = np.ones_like(tails), (heads, tails)
return sparse.coo_matrix(ijk, shape=(self.order, self.order), dtype=float)
@property
def labels(self):
return sparse.csgraph.connected_components(self.adjacency_matrix)[1]
@property
def clusters(self):
return sparse.csgraph.connected_components(self.adjacency_matrix)[0]
@property
def bbox(self):
return self.points.max(axis=0) - self.points.min(axis=0)
@property
def centroid(self):
return self.points.min(axis=0) + self.bbox/2.
@property
def indexes(self):
return np.arange(self.order)
def system(self, cvalues=1, units=None):
'''
Returns a matrix representing a system of equations
'''
if units is not None:
cvalues = cvalues(units)
A = self.adjacency_matrix.astype(float)
A.data *= cvalues
D = self.diagonals.astype(float)
D.data *= -A.sum(axis=1).A1
return (A + D) * (1 if units is None else units)
def boundary(self):
all_points = self.indexes
boundary_points = spatial.ConvexHull(geometry.drop_coplanar(self.points)).vertices
return np.in1d(all_points, boundary_points).astype(bool)
def save(self, filename):
minipnm.save_vtp(self, filename)
self.filename = filename
def merge(self, other, axis=2, spacing=None, centering=False, stitch=False):
new = Network()
# alignment along a common centroid
if centering:
center_distance = other.centroid - self.centroid
center_distance[axis] = 0 # we take care of this one differently
shifted_points = other.points - center_distance
else:
shifted_points = other.points
# the distance between the max for base and min for other should
# equal to spacing. rearranging, it gives us the required offset
if spacing is not None:
offset = other.coords[axis].min() \
- self.coords[axis].max() - spacing
shifted_points.T[axis] -= offset
new.points = np.vstack([self.points, shifted_points])
# push the connectivity array by the number of already existing vertices
new.pairs = np.vstack([self.pairs, other.pairs+self.order])
# merge the rest
for key in set(itertools.chain(self.keys(),other.keys())) - \
{'x','y','z','heads','tails'}:
values_self = self.get(key, -np.ones(self.order))
values_other = other.get(key, -np.ones(other.order))
new[key] = np.hstack([values_self, values_other])
return new
def cut(self, mask, values=None, bijective=False, directed=True):
'''
returns id of throats where the the tail is masked and the head is not.
(ie: True for sources).
for convenience, if a value array is given, the corresponding values
are returned instead of indices
the bijective condition, if enabled, drops any edges that are not
one-to-one
if directed is set to false, the method will ignore which sides of the
mask are true and which are false. default is to consider throats
where the mask is a selection and we want throats reaching out of it
'''
imask = self.indexes[np.array(mask).nonzero()]
heads, tails = self.pairs.T
if directed:
pair_mask = np.in1d(heads, imask) & ~np.in1d(tails, imask)
else:
pair_mask = np.in1d(heads, imask) == ~np.in1d(tails, imask)
if values is None:
return pair_mask
else:
tails, heads = values[self.pairs[pair_mask]].T
if not bijective:
return tails, heads
valid = (np.bincount(tails)[tails]==1) & (np.bincount(heads)[heads]==1)
return tails[valid], heads[valid]
def prune(self, inaccessible, remove_pores=True):
'''
the update calls have some probability of messing things up if the
network.order somehow ends up being equal to the network.size
'''
old_order, old_size, old_keys = self.order, self.size, self.keys()
accessible = self.indexes[~inaccessible.flatten()]
good_heads = np.in1d(self['heads'], accessible)
good_tails = np.in1d(self['tails'], accessible)
valid = good_heads & good_tails
if len(self.pairs) > 0:
self.pairs = self.pairs[valid]
self.update({key:array[valid] for key,array in self.data() if array.size==old_size})
if not remove_pores:
return
# remove the unwanted pores
if len(self.points) > 0:
self.points = self.points[accessible]
# now we need to shift throat indexes accordingly
if len(self.pairs) > 0:
hs, ts = self.pairs.T
mapping = np.zeros(inaccessible.size, dtype=int)
mapping[accessible] = self.indexes
self.pairs = np.vstack([mapping[hs], mapping[ts]]).T
self.update({key:array[accessible] for key,array in self.data() if array.size==old_order})
left_out = set(self.keys()) ^ set(old_keys)
if any(left_out):
warnings.warn("{}".format(left_out)) # make more verbose
def copy(self):
return self.load(self)
def split(self, mask):
subnetwork_1 = self - mask
subnetwork_2 = self - ~mask
return subnetwork_1, subnetwork_2
def data(self):
'''
data is a mask on dict.items that returns non-essential stored arrays
'''
for key, value in self.items():
if key not in ('x','y','z','heads','tails'):
yield key, value
def __repr__(self):
return self.__class__.__name__+str(self.size)
def __str__(self):
entries = [self.__class__.__name__]
for key, value in sorted(self.data()):
entries.append('{:<15}: {:<10}: {:<15}'.format(
key,
str(value.dtype),
str(value.shape),))
return '<'+'\n\t'.join(entries)+\
'\nOrder: {self.order}, Size: {self.size}>'.format(**locals())
def __add__(self, other):
return self.merge(other, spacing=0, centering=True)
def __or__(self, other):
return self.merge(other)
def __sub__(self, inaccessible):
new = self.copy()
new.prune(inaccessible, remove_pores=False)
return new
def render(self, *args, **kwargs):
wait = True
if 'scene' in kwargs:
scene = kwargs.pop('scene')
else:
scene = graphics.Scene()
wait = False
scene.add_actors(self.actors(*args, **kwargs))
if not wait:
scene.play()
def actors(self, values=None, offset=[0, 0, 0], **kwargs):
try:
# to load as if given a key
values = self[values]
except KeyError:
# show error, but plot anyway (fail gracefully?)
if values:
traceback.print_exc()
values = None
except TypeError:
# probably an array, but make sure it fits!
assert np.array(values).shape[-1] == self.order
finally:
wires = graphics.Wires(self.points+offset, self.pairs, values, **kwargs)
return [wires]
def plot(self, *values):
rotation = itertools.cycle(['Blues', 'hot', 'summer', 'copper', 'rainbow'])
if 0 not in self.bbox:
raise NotImplementedError('Only usable by 1D or 2D networks')
canvas = 'xyz'[self.bbox.tolist().index(0)]
self[canvas] *= 0
scene = graphics.Scene()
self.render(scene=scene)
for arr, cmap in zip(values, rotation):
self[canvas][:] = arr
self.render(scene=scene, cmap=cmap)
scene.play()
class Cubic(Network):
@classmethod
def from_source(cls, im, spacing=None):
network = cls(im.shape, spacing)
network['source'] = im.ravel()
return network
def __init__(self, shape, spacing=None, bbox=None):
arr = np.atleast_3d(np.empty(shape))
self.points = np.array([i for i,v in np.ndenumerate(arr)], dtype=float)
if bbox is not None:
spacing = bbox / np.where( self.bbox==0, 1, self.bbox )
if spacing is not None:
self.points *= spacing
if any(self.bbox==0):
# shift up to mid for 0-dimensional planes
if bbox is None:
bbox = self.bbox
self.points += np.where( self.bbox==0, np.array(bbox)/2., 0 )
I = np.arange(arr.size).reshape(arr.shape)
tails, heads = [], []
for T,H in [
(I[:,:,:-1], I[:,:,1:]),
(I[:,:-1], I[:,1:]),
(I[:-1], I[1:]),
]:
tails.extend(T.flat)
tails.extend(H.flat)
heads.extend(H.flat)
heads.extend(T.flat)
self.pairs = np.vstack([tails, heads]).T
def asarray(self, values=None):
spacing = map(np.diff, map(np.unique, self.coords))
min_spacing = [min(a) if len(a) else 1.0 for a in spacing]
points = (self.points / min_spacing).astype(int)
points -= points.min(axis=0)
bbox = (self.bbox / min_spacing + 1).astype(int)
actual_indexes = np.ravel_multi_index(points.T, bbox)
array = np.zeros(bbox)
if values is not None:
array.flat[actual_indexes] = values.ravel()
return array.squeeze()
class Delaunay(Network):
@classmethod
def random(cls, npoints):
points = np.random.rand(npoints,3)
return cls(points)
def __init__(self, points, mask=None):
self.pairs = self.edges_from_points(points)
self.points = np.atleast_2d(points)
@staticmethod
def edges_from_points(points, mask=None, directed=True):
noncoplanar = geometry.drop_coplanar(points)
edges = np.vstack(spatial.Voronoi(noncoplanar).ridge_points)
if mask:
edges = mask[edges]
if directed:
edges = np.vstack([edges, np.fliplr(edges)])
return edges
class Radial(Network):
'''
Takes in points, sphere radii, and returns a fleshed out network consisting
of spheres and cylinders.
If connectivity pairs aren't specified, default is Delaunay tessellation.
Pruning follows to ensure there are no collisions regardless.
'''
@classmethod
def from_cubic_topology(cls, *args, **kwargs):
topology = t = Cubic.from_source(*args, **kwargs)
geometry = Radial(t.points, t['source'], t.pairs, prune=False)
return geometry
def __init__(self, centers, radii, pairs=None, prune=True, f=2):
self.spheres = geometry.Spheres(self)
self.cylinders = geometry.Cylinders(self)
self.spheres.centers = centers
self.spheres.radii = np.ones(self.order, dtype=float)*radii
if pairs is None:
pairs = Delaunay.edges_from_points(self.points)
self.pairs = np.atleast_2d(pairs)
self.cylinders.generate(f)
if prune is True:
self.prune_colliding()
@property
def bbox(self):
minima = (self.coords - self['sphere_radii']).min(axis=1)
maxima = (self.coords + self['sphere_radii']).max(axis=1)
return maxima - minima
def prune_colliding(self):
for center, radius in zip(self.spheres.centers, self.spheres.radii):
safe = ~self.cylinders.intersecting(center, radius)
# deletes and stuff
self.pairs = self.pairs[safe]
self.cylinders.midpoints = self.cylinders.midpoints[safe]
self.cylinders.spans = self.cylinders.spans[safe]
self.cylinders.radii = self.cylinders.radii[safe]
def rasterize(self, resolution=20):
offset = (self.coords - self['sphere_radii']).min(axis=1)
scale = np.true_divide(self.bbox.max(), resolution-1)
dims = np.ceil(self.bbox/scale) + 1
im = np.zeros(dims)-1
raster = Cubic.from_source(im)
raster.points *= scale
raster.points += offset
for i, center in enumerate(self.points):
distance = np.linalg.norm(raster.points-center, axis=1)
equivalent = distance < self['sphere_radii'][i]
raster['source'][equivalent] = i
return raster
def actors(self, saturation_history=None):
shells = graphics.Spheres(self.points, self['sphere_radii'], color=(1,1,1), alpha=0.4)
tubes = graphics.Tubes(self.cylinders.midpoints, self.cylinders.spans, self['cylinder_radii'])
if saturation_history is None:
return [shells, tubes]
capacity = 4./3. * np.pi * self['sphere_radii']**3
fill_radii = (capacity * saturation_history * 3./4. / np.pi)**(1./3.)
history = graphics.Spheres(self.points, fill_radii, color=(0,0,1))
return [shells, tubes, history]
def porosity(self):
total_volume = np.prod(self.bbox)
hollowed_out = self.spheres.volumes.sum() + self.cylinders.volumes.sum()/2 # double counting of cylinders
return hollowed_out/total_volume
def translate(self, x=0, y=0, z=0):
self.points += self.bbox * [x, y, z]
self.cylinders.midpoints += self.bbox * [x, y, z]
|
|
"""Contains the base scenario class."""
from flow.core.params import InitialConfig
from flow.core.params import TrafficLightParams
from flow.core.params import SumoCarFollowingParams
from flow.core.params import SumoLaneChangeParams
import time
import xml.etree.ElementTree as ElementTree
from lxml import etree
from collections import defaultdict
try:
# Import serializable if rllab is installed
from rllab.core.serializable import Serializable
except ImportError:
Serializable = object
# default sumo probability value TODO (ak): remove
DEFAULT_PROBABILITY = 0
# default sumo vehicle length value (in meters) TODO (ak): remove
DEFAULT_LENGTH = 5
# default sumo vehicle class class TODO (ak): remove
DEFAULT_VCLASS = 0
class Scenario(Serializable):
"""Base scenario class.
Initializes a new scenario. Scenarios are used to specify features of
a network, including the positions of nodes, properties of the edges
and junctions connecting these nodes, properties of vehicles and
traffic lights, and other features as well. These features can later be
acquired from this class via a plethora of get methods (see
documentation).
This class uses network specific features to generate the necessary network
configuration files needed to initialize a simulation instance. The methods
of this class are called by the base scenario class.
The network files can be created in one of three ways:
* Custom networks can be generated by defining the properties of the
network's directed graph. This is done by defining the nodes and edges
properties using the ``specify_nodes`` and ``specify_edges`` methods,
respectively, as well as other properties via methods including
``specify_types``, ``specify_connections``, etc... For more on this,
see the tutorial on creating custom scenarios or refer to some of the
available scenarios.
* Scenario data can be collected from an OpenStreetMap (.osm) file. The
.osm file is specified in the NetParams object. For example:
>>> from flow.core.params import NetParams
>>> net_params = NetParams(osm_path='/path/to/osm_file.osm')
In this case, no ``specify_nodes`` and ``specify_edges`` methods are
needed. However, a ``specify_routes`` method is still needed to specify
the appropriate routes vehicles can traverse in the network.
* Scenario data can be collected from an sumo-specific network (.net.xml)
file. This file is specified in the NetParams object. For example:
>>> from flow.core.params import NetParams
>>> net_params = NetParams(template='/path/to/template')
In this case, no ``specify_nodes`` and ``specify_edges`` methods are
needed. However, a ``specify_routes`` method is still needed to specify
the appropriate routes vehicles can traverse in the network.
This class can be instantiated once and reused in multiple experiments.
Note that this function stores all the relevant parameters. The
generate() function still needs to be called separately.
Attributes
----------
orig_name : str
the variable provided under the `name` parameter to this object upon
instantiation
name : str
the variable provided under the `name` parameter to this object upon
instantiation, appended with a timestamp variable. This timestamp is
meant to differentiate generated scenario files during parallelism
vehicles : flow.core.params.VehicleParams
vehicle specific parameters, used to specify the types and number of
vehicles at the start of a simulation
net_params : flow.core.params.NetParams
network specific parameters, used primarily to identify properties of a
network such as the lengths of edges and the number of lanes in each
edge. This attribute is very network-specific, and should contain the
variables denoted by the `ADDITIONAL_NET_PARAMS` dict in each scenario
class file
initial_config : flow.core.params.InitialConfig
specifies parameters that affect the positioning of vehicle in the
network at the start of a simulation. For more, see flow/core/params.py
traffic_lights : flow.core.params.TrafficLightParams
used to describe the positions and types of traffic lights in the
network. For more, see flow/core/params.py
nodes : list of dict or None
list of nodes that are assigned to the scenario via the `specify_nodes`
method. All nodes in this variable are expected to have the following
properties:
* **name**: a unique identifier for the node
* **x**: x-coordinate of the node, in meters
* **y**: y-coordinate of the node, in meters
If the scenario is meant to generate the network from an OpenStreetMap
or template file, this variable is set to None
edges : list of dict or None
edges that are assigned to the scenario via the `specify_edges` method.
This include the shape, position, and properties of all edges in the
network. These properties include the following mandatory properties:
* **id**: name of the edge
* **from**: name of the node the edge starts from
* **to**: the name of the node the edges ends at
* **length**: length of the edge
In addition, either the following properties need to be specifically
defined or a **type** variable property must be defined with equivalent
attributes in `self.types`:
* **numLanes**: the number of lanes on the edge
* **speed**: the speed limit for vehicles on the edge
Moreover, the following attributes may optionally be available:
* **shape**: the positions of intermediary nodes used to define the
shape of an edge. If no shape is specified, then the edge will appear
as a straight line.
Note that, if the scenario is meant to generate the network from an
OpenStreetMap or template file, this variable is set to None
types : list of dict or None
A variable used to ease the definition of the properties of various
edges. Each element in the list consists of a dict consisting of the
following property:
* **id**: name of the edge type. Edges in the `self.edges` attribute
with a similar value under the "type" key will adopt the properties
of other components of this list, such as "speed" and "numLanes".
If the type variable is None, then no types are available within the
scenario. Furthermore, a proper example of this variable being used can
be found under `specify_types` in flow/scenarios/loop.py.
Note that, if the scenario is meant to generate the network from an
OpenStreetMap or template file, this variable is set to None
connections : list of dict or None
A variable used to describe how any specific node's incoming and
outgoing edges/lane pairs are connected. If no connections are
specified, sumo generates default connections.
If the connections attribute is set to None, then the connections
within the network will be specified by the simulator.
Note that, if the scenario is meant to generate the network from an
OpenStreetMap or template file, this variable is set to None
routes : dict
A variable whose keys are the starting edge of a specific route, and
whose values are the list of edges a vehicle is meant to traverse
starting from that edge. These are only applied at the start of a
simulation; vehicles are allowed to reroute within the environment
immediately afterwards.
edge_starts : list of (str, float)
a list of tuples in which the first element of the tuple is the name of
the edge/intersection/internal_link, and the second value is the
distance of the link from some global reference, i.e. [(link_0, pos_0),
(link_1, pos_1), ...]
internal_edge_starts : list of (str, float)
A variable similar to `edge_starts` but for junctions within the
network. If no junctions are available, this variable will return the
default variable: `[(':', -1)]` needed by sumo simulations.
intersection_edge_starts : list of (str, float)
A variable similar to `edge_starts` but for intersections within
the network. This variable will be deprecated in future releases.
Example
-------
The following examples are derived from the `LoopScenario` Scenario class
located in flow/scenarios/loop.py, and should serve as an example of the
types of outputs to be expected from the different variables of a scenario
class.
First of all, the ring road scenario class can be instantiated by running
the following commands (note if this this unclear please refer to Tutorial
1):
>>> from flow.scenarios import LoopScenario
>>> from flow.core.params import NetParams, VehicleParams
>>>
>>> scenario = LoopScenario(
>>> name='test',
>>> vehicles=VehicleParams(),
>>> net_params=NetParams(
>>> additional_params={
>>> 'length': 230,
>>> 'lanes': 1,
>>> 'speed_limit': 30,
>>> 'resolution': 40,
>>> }
>>> )
>>> )
The various attributes then look as follows:
>>> print(scenario.nodes)
>>> [{'id': 'bottom', 'x': '0', 'y': '-36.60563691113593'},
>>> {'id': 'right', 'x': '36.60563691113593', 'y': '0'},
>>> {'id': 'top', 'x': '0', 'y': '36.60563691113593'},
>>> {'id': 'left', 'x': '-36.60563691113593', 'y': '0'}]
>>> print(scenario.edges)
>>> [
>>> {'id': 'bottom',
>>> 'type': 'edgeType',
>>> 'from': 'bottom',
>>> 'to': 'right',
>>> 'length': '57.5',
>>> 'shape': '0.00,-36.61 1.47,-36.58 2.95,-36.49 4.41,-36.34 '
>>> '5.87,-36.13 7.32,-35.87 8.76,-35.54 10.18,-35.16 '
>>> '11.59,-34.72 12.98,-34.23 14.35,-33.68 15.69,-33.07 '
>>> '17.01,-32.41 18.30,-31.70 19.56,-30.94 20.79,-30.13 '
>>> '21.99,-29.26 23.15,-28.35 24.27,-27.40 25.36,-26.40 '
>>> '26.40,-25.36 27.40,-24.27 28.35,-23.15 29.26,-21.99 '
>>> '30.13,-20.79 30.94,-19.56 31.70,-18.30 32.41,-17.01 '
>>> '33.07,-15.69 33.68,-14.35 34.23,-12.98 34.72,-11.59 '
>>> '35.16,-10.18 35.54,-8.76 35.87,-7.32 36.13,-5.87 '
>>> '36.34,-4.41 36.49,-2.95 36.58,-1.47 36.61,0.00'
>>> },
>>> {'id': 'right',
>>> 'type': 'edgeType',
>>> 'from': 'right',
>>> 'to': 'top',
>>> 'length': '57.5',
>>> 'shape': '36.61,0.00 36.58,1.47 36.49,2.95 36.34,4.41 36.13,5.87 '
>>> '35.87,7.32 35.54,8.76 35.16,10.18 34.72,11.59 '
>>> '34.23,12.98 33.68,14.35 33.07,15.69 32.41,17.01 '
>>> '31.70,18.30 30.94,19.56 30.13,20.79 29.26,21.99 '
>>> '28.35,23.15 27.40,24.27 26.40,25.36 25.36,26.40 '
>>> '24.27,27.40 23.15,28.35 21.99,29.26 20.79,30.13 '
>>> '19.56,30.94 18.30,31.70 17.01,32.41 15.69,33.07 '
>>> '14.35,33.68 12.98,34.23 11.59,34.72 10.18,35.16 '
>>> '8.76,35.54 7.32,35.87 5.87,36.13 4.41,36.34 2.95,36.49 '
>>> '1.47,36.58 0.00,36.61'
>>> },
>>> {'id': 'top',
>>> 'type': 'edgeType',
>>> 'from': 'top',
>>> 'to': 'left',
>>> 'length': '57.5',
>>> 'shape': '0.00,36.61 -1.47,36.58 -2.95,36.49 -4.41,36.34 '
>>> '-5.87,36.13 -7.32,35.87 -8.76,35.54 -10.18,35.16 '
>>> '-11.59,34.72 -12.98,34.23 -14.35,33.68 -15.69,33.07 '
>>> '-17.01,32.41 -18.30,31.70 -19.56,30.94 -20.79,30.13 '
>>> '-21.99,29.26 -23.15,28.35 -24.27,27.40 -25.36,26.40 '
>>> '-26.40,25.36 -27.40,24.27 -28.35,23.15 -29.26,21.99 '
>>> '-30.13,20.79 -30.94,19.56 -31.70,18.30 -32.41,17.01 '
>>> '-33.07,15.69 -33.68,14.35 -34.23,12.98 -34.72,11.59 '
>>> '-35.16,10.18 -35.54,8.76 -35.87,7.32 -36.13,5.87 '
>>> '-36.34,4.41 -36.49,2.95 -36.58,1.47 -36.61,0.00'
>>> },
>>> {'id': 'left',
>>> 'type': 'edgeType',
>>> 'from': 'left',
>>> 'to': 'bottom',
>>> 'length': '57.5',
>>> 'shape': '-36.61,0.00 -36.58,-1.47 -36.49,-2.95 -36.34,-4.41 '
>>> '-36.13,-5.87 -35.87,-7.32 -35.54,-8.76 -35.16,-10.18 '
>>> '-34.72,-11.59 -34.23,-12.98 -33.68,-14.35 '
>>> '-33.07,-15.69 -32.41,-17.01 -31.70,-18.30 '
>>> '-30.94,-19.56 -30.13,-20.79 -29.26,-21.99 '
>>> '-28.35,-23.15 -27.40,-24.27 -26.40,-25.36 '
>>> '-25.36,-26.40 -24.27,-27.40 -23.15,-28.35 '
>>> '-21.99,-29.26 -20.79,-30.13 -19.56,-30.94 '
>>> '-18.30,-31.70 -17.01,-32.41 -15.69,-33.07 '
>>> '-14.35,-33.68 -12.98,-34.23 -11.59,-34.72 '
>>> '-10.18,-35.16 -8.76,-35.54 -7.32,-35.87 -5.87,-36.13 '
>>> '-4.41,-36.34 -2.95,-36.49 -1.47,-36.58 -0.00,-36.61'
>>> }
>>> ]
>>> print(scenario.types)
>>> [{'id': 'edgeType', 'numLanes': '1', 'speed': '30'}]
>>> print(scenario.connections)
>>> None
>>> print(scenario.routes)
>>> {
>>> 'top': ['top', 'left', 'bottom', 'right'],
>>> 'left': ['left', 'bottom', 'right', 'top'],
>>> 'bottom': ['bottom', 'right', 'top', 'left'],
>>> 'right': ['right', 'top', 'left', 'bottom']
>>> }
>>> print(scenario.edge_starts)
>>> [('bottom', 0), ('right', 57.5), ('top', 115.0), ('left', 172.5)]
Finally, the loop scenario does not contain any junctions or intersections,
and as a result the `internal_edge_starts` and `intersection_edge_starts`
attributes are both set to None. For an example of a network with junctions
and intersections, please refer to: flow/scenarios/figure_eight.py.
>>> print(scenario.internal_edge_starts)
>>> [(':', -1)]
>>> print(scenario.intersection_edge_starts)
>>> []
"""
def __init__(self,
name,
vehicles,
net_params,
initial_config=InitialConfig(),
traffic_lights=TrafficLightParams()):
"""Instantiate the base scenario class.
Attributes
----------
name : str
A tag associated with the scenario
vehicles : flow.core.params.VehicleParams
see flow/core/params.py
net_params : flow.core.params.NetParams
see flow/core/params.py
initial_config : flow.core.params.InitialConfig
see flow/core/params.py
traffic_lights : flow.core.params.TrafficLightParams
see flow/core/params.py
"""
# Invoke serializable if using rllab
if Serializable is not object:
Serializable.quick_init(self, locals())
self.orig_name = name # To avoid repeated concatenation upon reset
self.name = name + time.strftime('_%Y%m%d-%H%M%S') + str(time.time())
self.vehicles = vehicles
self.net_params = net_params
self.initial_config = initial_config
self.traffic_lights = traffic_lights
# specify routes vehicles can take
self.routes = self.specify_routes(net_params)
if net_params.template is None and net_params.osm_path is None:
# specify the attributes of the nodes
self.nodes = self.specify_nodes(net_params)
# collect the attributes of each edge
self.edges = self.specify_edges(net_params)
# specify the types attributes (default is None)
self.types = self.specify_types(net_params)
# specify the connection attributes (default is None)
self.connections = self.specify_connections(net_params)
# this is to be used if file paths other than the the network geometry
# file is specified
elif type(net_params.template) is dict:
if 'rou' in net_params.template:
veh, rou = self._vehicle_infos(net_params.template['rou'])
vtypes = self._vehicle_type(net_params.template.get('vtype'))
cf = self._get_cf_params(vtypes)
lc = self._get_lc_params(vtypes)
# add the vehicle types to the VehicleParams object
for t in vtypes:
vehicles.add(veh_id=t, car_following_params=cf[t],
lane_change_params=lc[t], num_vehicles=0)
# add the routes of the vehicles that will be departed later
# under the name of the vehicle. This will later be identified
# by k.vehicles._add_departed
self.routes = rou
# vehicles to be added with different departure times
self.template_vehicles = veh
self.types = None
self.nodes = None
self.edges = None
self.connections = None
# osm_path or template as type str
else:
self.nodes = None
self.edges = None
self.types = None
self.connections = None
# optional parameters, used to get positions from some global reference
self.edge_starts = self.specify_edge_starts()
self.internal_edge_starts = self.specify_internal_edge_starts()
self.intersection_edge_starts = [] # this will be deprecated
# TODO: convert to property
def specify_edge_starts(self):
"""Define edge starts for road sections in the network.
This is meant to provide some global reference frame for the road
edges in the network.
By default, the edge starts are specified from the network
configuration file. Note that, the values are arbitrary but do not
allow the positions of any two edges to overlap, thereby making them
compatible with all starting position methods for vehicles.
Returns
-------
list of (str, float)
list of edge names and starting positions,
ex: [(edge0, pos0), (edge1, pos1), ...]
"""
return None
# TODO: convert to property
def specify_internal_edge_starts(self):
"""Define the edge starts for internal edge nodes.
This is meant to provide some global reference frame for the internal
edges in the network.
These edges are the result of finite-length connections between road
sections. This methods does not need to be specified if "no-internal-
links" is set to True in net_params.
By default, all internal edge starts are given a position of -1. This
may be overridden; however, in general we do not worry about internal
edges and junctions in large networks.
Returns
-------
list of (str, float)
list of internal junction names and starting positions,
ex: [(internal0, pos0), (internal1, pos1), ...]
"""
return [(':', -1)]
# TODO: convert to property
def specify_nodes(self, net_params):
"""Specify the attributes of nodes in the network.
Parameters
----------
net_params : flow.core.params.NetParams
see flow/core/params.py
Returns
-------
list of dict
A list of node attributes (a separate dict for each node). Nodes
attributes must include:
* id {string} -- name of the node
* x {float} -- x coordinate of the node
* y {float} -- y coordinate of the node
Other attributes may also be specified. See:
http://sumo.dlr.de/wiki/Networks/Building_Networks_from_own_XML-descriptions#Node_Descriptions
"""
raise NotImplementedError
# TODO: convert to property
def specify_edges(self, net_params):
"""Specify the attributes of edges connecting pairs on nodes.
Parameters
----------
net_params : flow.core.params.NetParams
see flow/core/params.py
Returns
-------
list of dict
A list of edges attributes (a separate dict for each edge). Edge
attributes must include:
* id {string} -- name of the edge
* from {string} -- name of node the directed edge starts from
* to {string} -- name of the node the directed edge ends at
In addition, the attributes must contain at least one of the
following:
* "numLanes" {int} and "speed" {float} -- the number of lanes and
speed limit of the edge, respectively
* type {string} -- a type identifier for the edge, which can be
used if several edges are supposed to possess the same number of
lanes, speed limits, etc...
Other attributes may also be specified. See:
http://sumo.dlr.de/wiki/Networks/Building_Networks_from_own_XML-descriptions#Edge_Descriptions
"""
raise NotImplementedError
# TODO: convert to property
def specify_types(self, net_params):
"""Specify the attributes of various edge types (if any exist).
Parameters
----------
net_params : flow.core.params.NetParams
see flow/core/params.py
Returns
-------
list of dict
A list of type attributes for specific groups of edges. If none are
specified, no .typ.xml file is created.
For information on type attributes, see:
http://sumo.dlr.de/wiki/Networks/Building_Networks_from_own_XML-descriptions#Type_Descriptions
"""
return None
# TODO: convert to property
def specify_connections(self, net_params):
"""Specify the attributes of connections.
These attributes are used to describe how any specific node's incoming
and outgoing edges/lane pairs are connected. If no connections are
specified, sumo generates default connections.
Parameters
----------
net_params : flow.core.params.NetParams
see flow/core/params.py
Returns
-------
list of dict
A list of connection attributes. If none are specified, no .con.xml
file is created.
For information on type attributes, see:
http://sumo.dlr.de/wiki/Networks/Building_Networks_from_own_XML-descriptions#Connection_Descriptions
"""
return None
# TODO: convert to property
def specify_routes(self, net_params):
"""Specify the routes vehicles can take starting from any edge.
Routes can be specified in one of three ways:
* In this case of deterministic routes (as is the case in the ring road
scenario), the routes can be specified as dictionary where the key
element represents the starting edge and the element is a single list
of edges the vehicle must traverse, with the first edge corresponding
to the edge the vehicle begins on. Note that the edges must be
connected for the route to be valid.
For example (from flow/scenarios/loop.py):
>>> def specify_routes(self, net_params):
>>> return {
>>> "top": ["top", "left", "bottom", "right"],
>>> "left": ["left", "bottom", "right", "top"],
>>> "bottom": ["bottom", "right", "top", "left"],
>>> "right": ["right", "top", "left", "bottom"]
>>> }
* Alternatively, if the routes are meant to be stochastic, each element
can consist of a list of (route, probability) tuples, where the first
element in the tuple is one of the routes a vehicle can take from a
specific starting edge, and the second element is the probability
that vehicles will choose that route. Note that, in this case, the
sum of probability values for each dictionary key must sum up to one.
For example, if we were to imagine the edge "right" in the ring road
examples where split into two edges, "right_0" and "right_1", the
routes for vehicles in this network in the probabilistic setting can
be:
>>> def specify_routes(self, net_params):
>>> return {
>>> "top": [
>>> (["top", "left", "bottom", "right_0"], 0.9),
>>> (["top", "left", "bottom", "right_1"], 0.1)
>>> ],
>>> "left": [
>>> (["left", "bottom", "right_0", "top"], 0.3),
>>> (["left", "bottom", "right_1", "top"], 0.7)
>>> ],
>>> "bottom": [
>>> (["bottom", "right_0", "top", "left"], 0.5),
>>> (["bottom", "right_1", "top", "left"], 0.5)
>>> ],
>>> "right_0": [
>>> (["right_0", "top", "left", "bottom"], 1)
>>> ],
>>> "right_1": [
>>> (["right_1", "top", "left", "bottom"], 1)
>>> ]
>>> }
* Finally, if you would like to assign a specific starting edge and
route to a vehicle with a specific ID, you can do so by adding a
element into the dictionary whose key is the name of the vehicle and
whose content is the list of edges the vehicle is meant to traverse
as soon as it is introduced to the network.
As an example, assume we have 4 vehicles named 'human_0', 'human_1',
'human_2', and 'human_3' in the original ring road. Then, an
appropriate definition of the routes may look something like:
>>> def specify_routes(self, net_params):
>>> return {
>>> "human_0": ["top", "left", "bottom", "right"],
>>> "human_1": ["left", "bottom", "right", "top"],
>>> "human_2": ["bottom", "right", "top", "left"],
>>> "human_3": ["right", "top", "left", "bottom"]
>>> }
**Note**: This feature is experimental, and may not always work as
expected (for example if the starting positions and routes of a
specific vehicle do not match).
The `define_routes` method is optional, and need not be defined. If it
is not implemented, vehicles that enter a network are assigned routes
consisting solely on their current edges, and exit the network once
they reach the end of their edge. Routes, however, can be reassigned
during simulation via a routing controller (see
flow/controllers/routing_controllers.py).
Parameters
----------
net_params : flow.core.params.NetParams
see flow/core/params.py
Returns
-------
dict
Key = name of the starting edge
Element = list of edges a vehicle starting from this edge must
traverse *OR* a list of (route, probability) tuples for each
starting edge
"""
return None
@staticmethod
def gen_custom_start_pos(cls, net_params, initial_config, num_vehicles):
"""Generate a user defined set of starting positions.
Parameters
----------
cls : flow.core.kernel.scenario.KernelScenario
flow scenario kernel, with all the relevant methods implemented
net_params : flow.core.params.NetParams
network-specific parameters
initial_config : flow.core.params.InitialConfig
see flow/core/params.py
num_vehicles : int
number of vehicles to be placed on the network
Returns
-------
list of tuple (float, float)
list of start positions [(edge0, pos0), (edge1, pos1), ...]
list of int
list of start lanes
list of float
list of start speeds
"""
raise NotImplementedError
@staticmethod
def _vehicle_infos(file_names):
"""Import of vehicle from a configuration file.
This is a utility function for computing vehicle information. It
imports a network configuration file, and returns the information on
the vehicle and add it into the Vehicle object.
Parameters
----------
file_names : list of str
path to the xml file to load
Returns
-------
dict <dict>
* Key = id of the vehicle
* Element = dict of departure speed, vehicle type, depart Position,
depart edges
"""
# this is meant to deal with the case that there is only one rou file
if isinstance(file_names, str):
file_names = [file_names]
vehicle_data = dict()
routes_data = dict()
type_data = defaultdict(int)
for filename in file_names:
# import the .net.xml file containing all edge/type data
parser = etree.XMLParser(recover=True)
tree = ElementTree.parse(filename, parser=parser)
root = tree.getroot()
# collect the departure properties and routes and vehicles whose
# properties are instantiated within the .rou.xml file. This will
# only apply if such data is within the file (it is not implemented
# by scenarios in Flow).
for vehicle in root.findall('vehicle'):
# collect the edges the vehicle is meant to traverse
route = vehicle.find('route')
route_edges = route.attrib["edges"].split(' ')
# collect the names of each vehicle type and number of vehicles
# of each type
type_vehicle = vehicle.attrib['type']
type_data[type_vehicle] += 1
vehicle_data[vehicle.attrib['id']] = {
'departSpeed': vehicle.attrib['departSpeed'],
'depart': vehicle.attrib['depart'],
'typeID': type_vehicle,
'departPos': vehicle.attrib['departPos'],
}
routes_data[vehicle.attrib['id']] = route_edges
# collect the edges the vehicle is meant to traverse for the given
# sets of routes that are not associated with individual vehicles
for route in root.findall('route'):
route_edges = route.attrib["edges"].split(' ')
routes_data[route.attrib['id']] = route_edges
return vehicle_data, routes_data
@staticmethod
def _vehicle_type(filename):
"""Import vehicle type data from a *.add.xml file.
This is a utility function for outputting all the type of vehicle.
Parameters
----------
filename : str
path to the vtypes.add.xml file to load
Returns
-------
dict or None
the key is the vehicle_type id and the value is a dict we've type
of the vehicle, depart edges, depart Speed, departPos. If no
filename is provided, this method returns None as well.
"""
if filename is None:
return None
parser = etree.XMLParser(recover=True)
tree = ElementTree.parse(filename, parser=parser)
root = tree.getroot()
veh_type = {}
# this hack is meant to support the LuST scenario and Flow scenarios
root = [root] if len(root.findall('vTypeDistribution')) == 0 \
else root.findall('vTypeDistribution')
for r in root:
for vtype in r.findall('vType'):
# TODO: make for everything
veh_type[vtype.attrib['id']] = {
'vClass': vtype.attrib.get('vClass', DEFAULT_VCLASS),
'accel': vtype.attrib['accel'],
'decel': vtype.attrib['decel'],
'sigma': vtype.attrib['sigma'],
'length': vtype.attrib.get('length', DEFAULT_LENGTH),
'minGap': vtype.attrib['minGap'],
'maxSpeed': vtype.attrib['maxSpeed'],
'probability': vtype.attrib.get(
'probability', DEFAULT_PROBABILITY),
'speedDev': vtype.attrib['speedDev']
}
return veh_type
@staticmethod
def _get_cf_params(vtypes):
"""Return the car-following sumo params from vtypes."""
ret = {}
for typ in vtypes:
# TODO: add vClass
ret[typ] = SumoCarFollowingParams(
speed_mode='all_checks',
accel=float(vtypes[typ]['accel']),
decel=float(vtypes[typ]['decel']),
sigma=float(vtypes[typ]['sigma']),
length=float(vtypes[typ]['length']),
min_gap=float(vtypes[typ]['minGap']),
max_speed=float(vtypes[typ]['maxSpeed']),
probability=float(vtypes[typ]['probability']),
speed_dev=float(vtypes[typ]['speedDev'])
)
return ret
@staticmethod
def _get_lc_params(vtypes):
"""Return the lane change sumo params from vtypes."""
ret = {}
for typ in vtypes:
ret[typ] = SumoLaneChangeParams(lane_change_mode=1621)
return ret
def __str__(self):
"""Return the name of the scenario and the number of vehicles."""
return 'Scenario ' + self.name + ' with ' + \
str(self.vehicles.num_vehicles) + ' vehicles.'
|
|
import datetime
from pprint import pprint
from bitmovin import Bitmovin, Encoding, S3Output, H264CodecConfiguration, \
AACCodecConfiguration, H264Profile, StreamInput, SelectionMode, Stream, EncodingOutput, ACLEntry, \
ACLPermission, FMP4Muxing, MuxingStream, CloudRegion, DashManifest, FMP4Representation, FMP4RepresentationType, \
Period, VideoAdaptationSet, AudioAdaptationSet, TSMuxing, HlsManifest, AudioMedia, VariantStream, S3Input
from bitmovin.errors import BitmovinError
from bitmovin.resources.models.encodings.conditions import Condition
API_KEY = '<INSERT_YOUR_API_KEY>'
S3_INPUT_ACCESSKEY = '<INSERT_YOUR_ACCESS_KEY>'
S3_INPUT_SECRETKEY = '<INSERT_YOUR_SECRET_KEY>'
S3_INPUT_BUCKETNAME = '<INSERT_YOUR_BUCKET_NAME>'
S3_INPUT_PATH = 'path/to/input/file.mp4'
S3_OUTPUT_ACCESSKEY = '<INSERT_YOUR_ACCESS_KEY>'
S3_OUTPUT_SECRETKEY = '<INSERT_YOUR_SECRET_KEY>'
S3_OUTPUT_BUCKETNAME = '<INSERT_YOUR_BUCKET_NAME>'
date_component = str(datetime.datetime.now()).replace(' ', '_').replace(':', '-').split('.')[0].replace('_', '__')
OUTPUT_BASE_PATH = 'your/output/base/path/{}/'.format(date_component)
# Please set here the encoding profiles. You can modify height, bitrate and fps.
encoding_profiles_h264 = [
dict(height=240, bitrate=400, fps=None, profile=H264Profile.HIGH),
dict(height=360, bitrate=800, fps=None, profile=H264Profile.HIGH),
dict(height=480, bitrate=1200, fps=None, profile=H264Profile.HIGH),
dict(height=720, bitrate=2400, fps=None, profile=H264Profile.HIGH),
dict(height=1080, bitrate=4800, fps=None, profile=H264Profile.HIGH),
]
def main():
bitmovin = Bitmovin(api_key=API_KEY)
# Create an S3 input. This resource is then used as base bucket for the input file.
s3_input = S3Input(access_key=S3_INPUT_ACCESSKEY,
secret_key=S3_INPUT_SECRETKEY,
bucket_name=S3_INPUT_BUCKETNAME,
name='S3 Input')
s3_input = bitmovin.inputs.S3.create(s3_input).resource
# Create an S3 Output. This will be used as target bucket for the muxings, sprites and manifests
s3_output = S3Output(access_key=S3_OUTPUT_ACCESSKEY,
secret_key=S3_OUTPUT_SECRETKEY,
bucket_name=S3_OUTPUT_BUCKETNAME,
name='S3 Output')
s3_output = bitmovin.outputs.S3.create(s3_output).resource
# Create an Encoding. This will run in AWS_EU_WEST_1. This is the base entity used to configure the encoding.
encoding = Encoding(name='Encoding with video/audio and stream condition',
cloud_region=CloudRegion.AWS_EU_WEST_1)
encoding = bitmovin.encodings.Encoding.create(encoding).resource
encoding_configs = []
# Iterate over all encoding profiles and create the H264 configuration with the defined height and bitrate.
for idx, _ in enumerate(encoding_profiles_h264):
profile_h264 = encoding_profiles_h264[idx]
encoding_config = dict(profile_h264=profile_h264)
h264_codec = H264CodecConfiguration(
name='H264 Codec {}p {}k Configuration'.format(profile_h264.get('height'),
profile_h264.get('bitrate')),
bitrate=profile_h264.get('bitrate') * 1000,
height=profile_h264.get('height'),
profile=profile_h264.get('profile'),
rate=profile_h264.get("fps"))
encoding_config['h264_codec'] = bitmovin.codecConfigurations.H264.create(h264_codec).resource
encoding_configs.append(encoding_config)
# Also the AAC configuration has to be created, which will be later on used to create the streams.
audio_codec_configuration = AACCodecConfiguration(name='AAC Codec Configuration',
bitrate=128000,
rate=48000)
audio_codec_configuration = bitmovin.codecConfigurations.AAC.create(audio_codec_configuration).resource
video_input_stream = StreamInput(input_id=s3_input.id,
input_path=S3_INPUT_PATH,
selection_mode=SelectionMode.VIDEO_RELATIVE,
position=0)
audio_input_stream = StreamInput(input_id=s3_input.id,
input_path=S3_INPUT_PATH,
selection_mode=SelectionMode.AUDIO_RELATIVE,
position=0)
# With the configurations and the input file streams are now created and muxed later on.
for encoding_config in encoding_configs:
encoding_profile = encoding_config.get("profile_h264")
video_stream_condition = Condition(attribute="HEIGHT", operator=">=", value=str(encoding_profile.get('height')))
video_stream_h264 = Stream(codec_configuration_id=encoding_config.get("h264_codec").id,
input_streams=[video_input_stream],
conditions=video_stream_condition,
name='Stream H264 {}p_{}k'.format(encoding_profile.get('height'),
encoding_profile.get('bitrate')))
encoding_config['h264_stream'] = bitmovin.encodings.Stream.create(object_=video_stream_h264,
encoding_id=encoding.id).resource
audio_stream_condition = Condition(attribute="INPUTSTREAM", operator="==", value="TRUE")
audio_stream = Stream(codec_configuration_id=audio_codec_configuration.id,
input_streams=[audio_input_stream],
conditions=audio_stream_condition,
name='Audio Stream')
audio_stream = bitmovin.encodings.Stream.create(object_=audio_stream, encoding_id=encoding.id).resource
acl_entry = ACLEntry(permission=ACLPermission.PUBLIC_READ)
# Create FMP4 muxings which are later used for the DASH manifest. The current settings will set a segment length
# of 4 seconds.
for encoding_config in encoding_configs:
encoding_profile = encoding_config.get("profile_h264")
video_muxing_stream_h264 = MuxingStream(encoding_config.get("h264_stream").id)
video_muxing_output_h264 = EncodingOutput(output_id=s3_output.id,
output_path=OUTPUT_BASE_PATH + 'video/h264/dash/{}p_{}k/'.format(
encoding_profile.get('height'),
encoding_profile.get('bitrate')),
acl=[acl_entry])
video_muxing_h264 = FMP4Muxing(segment_length=4,
segment_naming='seg_%number%.m4s',
init_segment_name='init.mp4',
streams=[video_muxing_stream_h264],
outputs=[video_muxing_output_h264],
name='FMP4 H264 Muxing {}p_{}k'.format(encoding_profile.get('height'),
encoding_profile.get('bitrate')))
encoding_config['h264_muxing'] = bitmovin.encodings.Muxing.FMP4.create(object_=video_muxing_h264,
encoding_id=encoding.id).resource
video_ts_muxing_output_h264 = EncodingOutput(output_id=s3_output.id,
output_path=OUTPUT_BASE_PATH + 'video/h264/hls/{}p_{}k/'.format(
encoding_profile.get('height'),
encoding_profile.get('bitrate')),
acl=[acl_entry])
video_ts_muxing_h264 = TSMuxing(segment_length=4,
segment_naming='seg_%number%.ts',
streams=[video_muxing_stream_h264],
outputs=[video_ts_muxing_output_h264],
name='FMP4 H264 Muxing {}p_{}k'.format(encoding_profile.get('height'),
encoding_profile.get('bitrate')))
encoding_config['h264_ts_muxing'] = bitmovin.encodings.Muxing.TS.create(object_=video_ts_muxing_h264,
encoding_id=encoding.id).resource
audio_muxing_stream = MuxingStream(audio_stream.id)
# mp4 audio muxing
audio_muxing_output = EncodingOutput(output_id=s3_output.id,
output_path=OUTPUT_BASE_PATH + "audio/dash/",
acl=[acl_entry])
audio_muxing = FMP4Muxing(segment_length=4,
segment_naming='seg_%number%.m4s',
init_segment_name='init.mp4',
streams=[audio_muxing_stream],
outputs=[audio_muxing_output],
name='Audio Dash Muxing')
audio_muxing = bitmovin.encodings.Muxing.FMP4.create(object_=audio_muxing,
encoding_id=encoding.id).resource
# TS audio muxing
hls_audio_muxing_output = EncodingOutput(output_id=s3_output.id,
output_path=OUTPUT_BASE_PATH + "audio/hls/",
acl=[acl_entry])
hls_audio_muxing = TSMuxing(segment_length=4,
segment_naming='seg_%number%.ts',
streams=[audio_muxing_stream],
outputs=[hls_audio_muxing_output],
name='Audio TS Muxing')
hls_audio_muxing = bitmovin.encodings.Muxing.TS.create(object_=hls_audio_muxing,
encoding_id=encoding.id).resource
bitmovin.encodings.Encoding.start(encoding_id=encoding.id)
try:
bitmovin.encodings.Encoding.wait_until_finished(encoding_id=encoding.id)
except BitmovinError as bitmovin_error:
print("Exception occurred while waiting for encoding to finish:")
pprint(bitmovin_error)
exit(-1)
# Specify the output for manifest which will be in the OUTPUT_BASE_PATH.
manifest_output = EncodingOutput(output_id=s3_output.id,
output_path=OUTPUT_BASE_PATH,
acl=[acl_entry])
# Create a DASH H264 manifest and add one period with an adaptation set for audio and video
dash_manifest_h264 = DashManifest(manifest_name='stream.mpd',
outputs=[manifest_output],
name='DASH H264 Manifest')
dash_manifest_h264 = bitmovin.manifests.DASH.create(dash_manifest_h264).resource
period_h264 = Period()
period_h264 = bitmovin.manifests.DASH.add_period(object_=period_h264, manifest_id=dash_manifest_h264.id).resource
video_adaptation_set_h264 = VideoAdaptationSet()
video_adaptation_set_h264 = bitmovin.manifests.DASH.add_video_adaptation_set(object_=video_adaptation_set_h264,
manifest_id=dash_manifest_h264.id,
period_id=period_h264.id).resource
audio_adaptation_set_h264 = AudioAdaptationSet(lang='en')
audio_adaptation_set_h264 = bitmovin.manifests.DASH.add_audio_adaptation_set(object_=audio_adaptation_set_h264,
manifest_id=dash_manifest_h264.id,
period_id=period_h264.id).resource
fmp4_representation_audio = FMP4Representation(FMP4RepresentationType.TEMPLATE,
encoding_id=encoding.id,
muxing_id=audio_muxing.id,
segment_path="audio/dash/")
bitmovin.manifests.DASH.add_fmp4_representation(object_=fmp4_representation_audio,
manifest_id=dash_manifest_h264.id,
period_id=period_h264.id,
adaptationset_id=audio_adaptation_set_h264.id)
# Add all representation to the video adaption set
for encoding_config in encoding_configs:
encoding_profile = encoding_config.get("profile_h264")
muxing = encoding_config.get('h264_muxing')
fmp4_representation = FMP4Representation(FMP4RepresentationType.TEMPLATE,
encoding_id=encoding.id,
muxing_id=muxing.id,
segment_path='video/h264/dash/{}p_{}k/'.format(
encoding_profile.get('height'),
encoding_profile.get('bitrate')))
encoding_config['h264_dash'] = bitmovin.manifests.DASH.add_fmp4_representation(
object_=fmp4_representation,
manifest_id=dash_manifest_h264.id,
period_id=period_h264.id,
adaptationset_id=video_adaptation_set_h264.id
).resource
bitmovin.manifests.DASH.start(manifest_id=dash_manifest_h264.id)
# Create a HLS H264 manifest and add one period with an adaptation set for audio and video
hls_manifest = HlsManifest(manifest_name='stream.m3u8',
outputs=[manifest_output],
name='HLS H264 Manifest')
hls_manifest = bitmovin.manifests.HLS.create(object_=hls_manifest).resource
hls_audio_media = AudioMedia(name='en', group_id='audio_group',
segment_path="audio/hls/",
encoding_id=encoding.id,
stream_id=audio_stream.id,
muxing_id=hls_audio_muxing.id,
language='en',
uri="audio.m3u8")
bitmovin.manifests.HLS.AudioMedia.create(manifest_id=hls_manifest.id,
object_=hls_audio_media)
# Add all representation to the video adaption set
for encoding_config in encoding_configs:
encoding_profile = encoding_config.get("profile_h264")
video_muxing_stream_h264 = encoding_config.get("h264_stream")
ts_muxing = encoding_config.get('h264_ts_muxing')
# append another variant stream for this video quality to our hls renditions.
hls_variant_stream = VariantStream(audio="audio_group",
segment_path='video/h264/hls/{}p_{}k/'.format(
encoding_profile.get('height'),
encoding_profile.get('bitrate')),
uri='video_{}p_{}.m3u8'.format(
encoding_profile.get('height'),
encoding_profile.get('bitrate')),
encoding_id=encoding.id,
stream_id=video_muxing_stream_h264.id,
muxing_id=ts_muxing.id)
bitmovin.manifests.HLS.VariantStream.create(manifest_id=hls_manifest.id,
object_=hls_variant_stream)
bitmovin.manifests.HLS.start(manifest_id=hls_manifest.id)
try:
bitmovin.manifests.DASH.wait_until_finished(manifest_id=dash_manifest_h264.id)
except BitmovinError as bitmovin_error:
print("Exception occurred while waiting for manifest creation to finish: {}".format(bitmovin_error))
exit(-1)
try:
bitmovin.manifests.HLS.wait_until_finished(manifest_id=hls_manifest.id)
except BitmovinError as bitmovin_error:
print("Exception occurred while waiting for manifest creation to finish: {}".format(bitmovin_error))
exit(-1)
if __name__ == '__main__':
main()
|
|
"""
This bootstrap module contains code for ensuring that the astropy_helpers
package will be importable by the time the setup.py script runs. It also
includes some workarounds to ensure that a recent-enough version of setuptools
is being used for the installation.
This module should be the first thing imported in the setup.py of distributions
that make use of the utilities in astropy_helpers. If the distribution ships
with its own copy of astropy_helpers, this module will first attempt to import
from the shipped copy. However, it will also check PyPI to see if there are
any bug-fix releases on top of the current version that may be useful to get
past platform-specific bugs that have been fixed. When running setup.py, use
the ``--offline`` command-line option to disable the auto-upgrade checks.
When this module is imported or otherwise executed it automatically calls a
main function that attempts to read the project's setup.cfg file, which it
checks for a configuration section called ``[ah_bootstrap]`` the presences of
that section, and options therein, determine the next step taken: If it
contains an option called ``auto_use`` with a value of ``True``, it will
automatically call the main function of this module called
`use_astropy_helpers` (see that function's docstring for full details).
Otherwise no further action is taken and by default the system-installed version
of astropy-helpers will be used (however, ``ah_bootstrap.use_astropy_helpers``
may be called manually from within the setup.py script).
This behavior can also be controlled using the ``--auto-use`` and
``--no-auto-use`` command-line flags. For clarity, an alias for
``--no-auto-use`` is ``--use-system-astropy-helpers``, and we recommend using
the latter if needed.
Additional options in the ``[ah_boostrap]`` section of setup.cfg have the same
names as the arguments to `use_astropy_helpers`, and can be used to configure
the bootstrap script when ``auto_use = True``.
See https://github.com/astropy/astropy-helpers for more details, and for the
latest version of this module.
"""
import contextlib
import errno
import io
import locale
import os
import re
import subprocess as sp
import sys
from distutils import log
from distutils.debug import DEBUG
from configparser import ConfigParser, RawConfigParser
import pkg_resources
from setuptools import Distribution
from setuptools.package_index import PackageIndex
# This is the minimum Python version required for astropy-helpers
__minimum_python_version__ = (3, 5)
# TODO: Maybe enable checking for a specific version of astropy_helpers?
DIST_NAME = 'astropy-helpers'
PACKAGE_NAME = 'astropy_helpers'
UPPER_VERSION_EXCLUSIVE = None
# Defaults for other options
DOWNLOAD_IF_NEEDED = True
INDEX_URL = 'https://pypi.python.org/simple'
USE_GIT = True
OFFLINE = False
AUTO_UPGRADE = True
# A list of all the configuration options and their required types
CFG_OPTIONS = [
('auto_use', bool), ('path', str), ('download_if_needed', bool),
('index_url', str), ('use_git', bool), ('offline', bool),
('auto_upgrade', bool)
]
# Start off by parsing the setup.cfg file
SETUP_CFG = ConfigParser()
if os.path.exists('setup.cfg'):
try:
SETUP_CFG.read('setup.cfg')
except Exception as e:
if DEBUG:
raise
log.error(
"Error reading setup.cfg: {0!r}\n{1} will not be "
"automatically bootstrapped and package installation may fail."
"\n{2}".format(e, PACKAGE_NAME, _err_help_msg))
# We used package_name in the package template for a while instead of name
if SETUP_CFG.has_option('metadata', 'name'):
parent_package = SETUP_CFG.get('metadata', 'name')
elif SETUP_CFG.has_option('metadata', 'package_name'):
parent_package = SETUP_CFG.get('metadata', 'package_name')
else:
parent_package = None
if SETUP_CFG.has_option('options', 'python_requires'):
python_requires = SETUP_CFG.get('options', 'python_requires')
# The python_requires key has a syntax that can be parsed by SpecifierSet
# in the packaging package. However, we don't want to have to depend on that
# package, so instead we can use setuptools (which bundles packaging). We
# have to add 'python' to parse it with Requirement.
from pkg_resources import Requirement
req = Requirement.parse('python' + python_requires)
# We want the Python version as a string, which we can get from the platform module
import platform
# strip off trailing '+' incase this is a dev install of python
python_version = platform.python_version().strip('+')
# allow pre-releases to count as 'new enough'
if not req.specifier.contains(python_version, True):
if parent_package is None:
message = "ERROR: Python {} is required by this package\n".format(req.specifier)
else:
message = "ERROR: Python {} is required by {}\n".format(req.specifier, parent_package)
sys.stderr.write(message)
sys.exit(1)
if sys.version_info < __minimum_python_version__:
if parent_package is None:
message = "ERROR: Python {} or later is required by astropy-helpers\n".format(
__minimum_python_version__)
else:
message = "ERROR: Python {} or later is required by astropy-helpers for {}\n".format(
__minimum_python_version__, parent_package)
sys.stderr.write(message)
sys.exit(1)
_str_types = (str, bytes)
# What follows are several import statements meant to deal with install-time
# issues with either missing or misbehaving pacakges (including making sure
# setuptools itself is installed):
# Check that setuptools 30.3 or later is present
from distutils.version import LooseVersion
try:
import setuptools
assert LooseVersion(setuptools.__version__) >= LooseVersion('30.3')
except (ImportError, AssertionError):
sys.stderr.write("ERROR: setuptools 30.3 or later is required by astropy-helpers\n")
sys.exit(1)
# typing as a dependency for 1.6.1+ Sphinx causes issues when imported after
# initializing submodule with ah_boostrap.py
# See discussion and references in
# https://github.com/astropy/astropy-helpers/issues/302
try:
import typing # noqa
except ImportError:
pass
# Note: The following import is required as a workaround to
# https://github.com/astropy/astropy-helpers/issues/89; if we don't import this
# module now, it will get cleaned up after `run_setup` is called, but that will
# later cause the TemporaryDirectory class defined in it to stop working when
# used later on by setuptools
try:
import setuptools.py31compat # noqa
except ImportError:
pass
# matplotlib can cause problems if it is imported from within a call of
# run_setup(), because in some circumstances it will try to write to the user's
# home directory, resulting in a SandboxViolation. See
# https://github.com/matplotlib/matplotlib/pull/4165
# Making sure matplotlib, if it is available, is imported early in the setup
# process can mitigate this (note importing matplotlib.pyplot has the same
# issue)
try:
import matplotlib
matplotlib.use('Agg')
import matplotlib.pyplot
except:
# Ignore if this fails for *any* reason*
pass
# End compatibility imports...
class _Bootstrapper(object):
"""
Bootstrapper implementation. See ``use_astropy_helpers`` for parameter
documentation.
"""
def __init__(self, path=None, index_url=None, use_git=None, offline=None,
download_if_needed=None, auto_upgrade=None):
if path is None:
path = PACKAGE_NAME
if not (isinstance(path, _str_types) or path is False):
raise TypeError('path must be a string or False')
if not isinstance(path, str):
fs_encoding = sys.getfilesystemencoding()
path = path.decode(fs_encoding) # path to unicode
self.path = path
# Set other option attributes, using defaults where necessary
self.index_url = index_url if index_url is not None else INDEX_URL
self.offline = offline if offline is not None else OFFLINE
# If offline=True, override download and auto-upgrade
if self.offline:
download_if_needed = False
auto_upgrade = False
self.download = (download_if_needed
if download_if_needed is not None
else DOWNLOAD_IF_NEEDED)
self.auto_upgrade = (auto_upgrade
if auto_upgrade is not None else AUTO_UPGRADE)
# If this is a release then the .git directory will not exist so we
# should not use git.
git_dir_exists = os.path.exists(os.path.join(os.path.dirname(__file__), '.git'))
if use_git is None and not git_dir_exists:
use_git = False
self.use_git = use_git if use_git is not None else USE_GIT
# Declared as False by default--later we check if astropy-helpers can be
# upgraded from PyPI, but only if not using a source distribution (as in
# the case of import from a git submodule)
self.is_submodule = False
@classmethod
def main(cls, argv=None):
if argv is None:
argv = sys.argv
config = cls.parse_config()
config.update(cls.parse_command_line(argv))
auto_use = config.pop('auto_use', False)
bootstrapper = cls(**config)
if auto_use:
# Run the bootstrapper, otherwise the setup.py is using the old
# use_astropy_helpers() interface, in which case it will run the
# bootstrapper manually after reconfiguring it.
bootstrapper.run()
return bootstrapper
@classmethod
def parse_config(cls):
if not SETUP_CFG.has_section('ah_bootstrap'):
return {}
config = {}
for option, type_ in CFG_OPTIONS:
if not SETUP_CFG.has_option('ah_bootstrap', option):
continue
if type_ is bool:
value = SETUP_CFG.getboolean('ah_bootstrap', option)
else:
value = SETUP_CFG.get('ah_bootstrap', option)
config[option] = value
return config
@classmethod
def parse_command_line(cls, argv=None):
if argv is None:
argv = sys.argv
config = {}
# For now we just pop recognized ah_bootstrap options out of the
# arg list. This is imperfect; in the unlikely case that a setup.py
# custom command or even custom Distribution class defines an argument
# of the same name then we will break that. However there's a catch22
# here that we can't just do full argument parsing right here, because
# we don't yet know *how* to parse all possible command-line arguments.
if '--no-git' in argv:
config['use_git'] = False
argv.remove('--no-git')
if '--offline' in argv:
config['offline'] = True
argv.remove('--offline')
if '--auto-use' in argv:
config['auto_use'] = True
argv.remove('--auto-use')
if '--no-auto-use' in argv:
config['auto_use'] = False
argv.remove('--no-auto-use')
if '--use-system-astropy-helpers' in argv:
config['auto_use'] = False
argv.remove('--use-system-astropy-helpers')
return config
def run(self):
strategies = ['local_directory', 'local_file', 'index']
dist = None
# First, remove any previously imported versions of astropy_helpers;
# this is necessary for nested installs where one package's installer
# is installing another package via setuptools.sandbox.run_setup, as in
# the case of setup_requires
for key in list(sys.modules):
try:
if key == PACKAGE_NAME or key.startswith(PACKAGE_NAME + '.'):
del sys.modules[key]
except AttributeError:
# Sometimes mysterious non-string things can turn up in
# sys.modules
continue
# Check to see if the path is a submodule
self.is_submodule = self._check_submodule()
for strategy in strategies:
method = getattr(self, 'get_{0}_dist'.format(strategy))
dist = method()
if dist is not None:
break
else:
raise _AHBootstrapSystemExit(
"No source found for the {0!r} package; {0} must be "
"available and importable as a prerequisite to building "
"or installing this package.".format(PACKAGE_NAME))
# This is a bit hacky, but if astropy_helpers was loaded from a
# directory/submodule its Distribution object gets a "precedence" of
# "DEVELOP_DIST". However, in other cases it gets a precedence of
# "EGG_DIST". However, when activing the distribution it will only be
# placed early on sys.path if it is treated as an EGG_DIST, so always
# do that
dist = dist.clone(precedence=pkg_resources.EGG_DIST)
# Otherwise we found a version of astropy-helpers, so we're done
# Just active the found distribution on sys.path--if we did a
# download this usually happens automatically but it doesn't hurt to
# do it again
# Note: Adding the dist to the global working set also activates it
# (makes it importable on sys.path) by default.
try:
pkg_resources.working_set.add(dist, replace=True)
except TypeError:
# Some (much) older versions of setuptools do not have the
# replace=True option here. These versions are old enough that all
# bets may be off anyways, but it's easy enough to work around just
# in case...
if dist.key in pkg_resources.working_set.by_key:
del pkg_resources.working_set.by_key[dist.key]
pkg_resources.working_set.add(dist)
@property
def config(self):
"""
A `dict` containing the options this `_Bootstrapper` was configured
with.
"""
return dict((optname, getattr(self, optname))
for optname, _ in CFG_OPTIONS if hasattr(self, optname))
def get_local_directory_dist(self):
"""
Handle importing a vendored package from a subdirectory of the source
distribution.
"""
if not os.path.isdir(self.path):
return
log.info('Attempting to import astropy_helpers from {0} {1!r}'.format(
'submodule' if self.is_submodule else 'directory',
self.path))
dist = self._directory_import()
if dist is None:
log.warn(
'The requested path {0!r} for importing {1} does not '
'exist, or does not contain a copy of the {1} '
'package.'.format(self.path, PACKAGE_NAME))
elif self.auto_upgrade and not self.is_submodule:
# A version of astropy-helpers was found on the available path, but
# check to see if a bugfix release is available on PyPI
upgrade = self._do_upgrade(dist)
if upgrade is not None:
dist = upgrade
return dist
def get_local_file_dist(self):
"""
Handle importing from a source archive; this also uses setup_requires
but points easy_install directly to the source archive.
"""
if not os.path.isfile(self.path):
return
log.info('Attempting to unpack and import astropy_helpers from '
'{0!r}'.format(self.path))
try:
dist = self._do_download(find_links=[self.path])
except Exception as e:
if DEBUG:
raise
log.warn(
'Failed to import {0} from the specified archive {1!r}: '
'{2}'.format(PACKAGE_NAME, self.path, str(e)))
dist = None
if dist is not None and self.auto_upgrade:
# A version of astropy-helpers was found on the available path, but
# check to see if a bugfix release is available on PyPI
upgrade = self._do_upgrade(dist)
if upgrade is not None:
dist = upgrade
return dist
def get_index_dist(self):
if not self.download:
log.warn('Downloading {0!r} disabled.'.format(DIST_NAME))
return None
log.warn(
"Downloading {0!r}; run setup.py with the --offline option to "
"force offline installation.".format(DIST_NAME))
try:
dist = self._do_download()
except Exception as e:
if DEBUG:
raise
log.warn(
'Failed to download and/or install {0!r} from {1!r}:\n'
'{2}'.format(DIST_NAME, self.index_url, str(e)))
dist = None
# No need to run auto-upgrade here since we've already presumably
# gotten the most up-to-date version from the package index
return dist
def _directory_import(self):
"""
Import astropy_helpers from the given path, which will be added to
sys.path.
Must return True if the import succeeded, and False otherwise.
"""
# Return True on success, False on failure but download is allowed, and
# otherwise raise SystemExit
path = os.path.abspath(self.path)
# Use an empty WorkingSet rather than the man
# pkg_resources.working_set, since on older versions of setuptools this
# will invoke a VersionConflict when trying to install an upgrade
ws = pkg_resources.WorkingSet([])
ws.add_entry(path)
dist = ws.by_key.get(DIST_NAME)
if dist is None:
# We didn't find an egg-info/dist-info in the given path, but if a
# setup.py exists we can generate it
setup_py = os.path.join(path, 'setup.py')
if os.path.isfile(setup_py):
# We use subprocess instead of run_setup from setuptools to
# avoid segmentation faults - see the following for more details:
# https://github.com/cython/cython/issues/2104
sp.check_output([sys.executable, 'setup.py', 'egg_info'], cwd=path)
for dist in pkg_resources.find_distributions(path, True):
# There should be only one...
return dist
return dist
def _do_download(self, version='', find_links=None):
if find_links:
allow_hosts = ''
index_url = None
else:
allow_hosts = None
index_url = self.index_url
# Annoyingly, setuptools will not handle other arguments to
# Distribution (such as options) before handling setup_requires, so it
# is not straightforward to programmatically augment the arguments which
# are passed to easy_install
class _Distribution(Distribution):
def get_option_dict(self, command_name):
opts = Distribution.get_option_dict(self, command_name)
if command_name == 'easy_install':
if find_links is not None:
opts['find_links'] = ('setup script', find_links)
if index_url is not None:
opts['index_url'] = ('setup script', index_url)
if allow_hosts is not None:
opts['allow_hosts'] = ('setup script', allow_hosts)
return opts
if version:
req = '{0}=={1}'.format(DIST_NAME, version)
else:
if UPPER_VERSION_EXCLUSIVE is None:
req = DIST_NAME
else:
req = '{0}<{1}'.format(DIST_NAME, UPPER_VERSION_EXCLUSIVE)
attrs = {'setup_requires': [req]}
# NOTE: we need to parse the config file (e.g. setup.cfg) to make sure
# it honours the options set in the [easy_install] section, and we need
# to explicitly fetch the requirement eggs as setup_requires does not
# get honored in recent versions of setuptools:
# https://github.com/pypa/setuptools/issues/1273
try:
context = _verbose if DEBUG else _silence
with context():
dist = _Distribution(attrs=attrs)
try:
dist.parse_config_files(ignore_option_errors=True)
dist.fetch_build_eggs(req)
except TypeError:
# On older versions of setuptools, ignore_option_errors
# doesn't exist, and the above two lines are not needed
# so we can just continue
pass
# If the setup_requires succeeded it will have added the new dist to
# the main working_set
return pkg_resources.working_set.by_key.get(DIST_NAME)
except Exception as e:
if DEBUG:
raise
msg = 'Error retrieving {0} from {1}:\n{2}'
if find_links:
source = find_links[0]
elif index_url != INDEX_URL:
source = index_url
else:
source = 'PyPI'
raise Exception(msg.format(DIST_NAME, source, repr(e)))
def _do_upgrade(self, dist):
# Build up a requirement for a higher bugfix release but a lower minor
# release (so API compatibility is guaranteed)
next_version = _next_version(dist.parsed_version)
req = pkg_resources.Requirement.parse(
'{0}>{1},<{2}'.format(DIST_NAME, dist.version, next_version))
package_index = PackageIndex(index_url=self.index_url)
upgrade = package_index.obtain(req)
if upgrade is not None:
return self._do_download(version=upgrade.version)
def _check_submodule(self):
"""
Check if the given path is a git submodule.
See the docstrings for ``_check_submodule_using_git`` and
``_check_submodule_no_git`` for further details.
"""
if (self.path is None or
(os.path.exists(self.path) and not os.path.isdir(self.path))):
return False
if self.use_git:
return self._check_submodule_using_git()
else:
return self._check_submodule_no_git()
def _check_submodule_using_git(self):
"""
Check if the given path is a git submodule. If so, attempt to initialize
and/or update the submodule if needed.
This function makes calls to the ``git`` command in subprocesses. The
``_check_submodule_no_git`` option uses pure Python to check if the given
path looks like a git submodule, but it cannot perform updates.
"""
cmd = ['git', 'submodule', 'status', '--', self.path]
try:
log.info('Running `{0}`; use the --no-git option to disable git '
'commands'.format(' '.join(cmd)))
returncode, stdout, stderr = run_cmd(cmd)
except _CommandNotFound:
# The git command simply wasn't found; this is most likely the
# case on user systems that don't have git and are simply
# trying to install the package from PyPI or a source
# distribution. Silently ignore this case and simply don't try
# to use submodules
return False
stderr = stderr.strip()
if returncode != 0 and stderr:
# Unfortunately the return code alone cannot be relied on, as
# earlier versions of git returned 0 even if the requested submodule
# does not exist
# This is a warning that occurs in perl (from running git submodule)
# which only occurs with a malformatted locale setting which can
# happen sometimes on OSX. See again
# https://github.com/astropy/astropy/issues/2749
perl_warning = ('perl: warning: Falling back to the standard locale '
'("C").')
if not stderr.strip().endswith(perl_warning):
# Some other unknown error condition occurred
log.warn('git submodule command failed '
'unexpectedly:\n{0}'.format(stderr))
return False
# Output of `git submodule status` is as follows:
#
# 1: Status indicator: '-' for submodule is uninitialized, '+' if
# submodule is initialized but is not at the commit currently indicated
# in .gitmodules (and thus needs to be updated), or 'U' if the
# submodule is in an unstable state (i.e. has merge conflicts)
#
# 2. SHA-1 hash of the current commit of the submodule (we don't really
# need this information but it's useful for checking that the output is
# correct)
#
# 3. The output of `git describe` for the submodule's current commit
# hash (this includes for example what branches the commit is on) but
# only if the submodule is initialized. We ignore this information for
# now
_git_submodule_status_re = re.compile(
r'^(?P<status>[+-U ])(?P<commit>[0-9a-f]{40}) '
r'(?P<submodule>\S+)( .*)?$')
# The stdout should only contain one line--the status of the
# requested submodule
m = _git_submodule_status_re.match(stdout)
if m:
# Yes, the path *is* a git submodule
self._update_submodule(m.group('submodule'), m.group('status'))
return True
else:
log.warn(
'Unexpected output from `git submodule status`:\n{0}\n'
'Will attempt import from {1!r} regardless.'.format(
stdout, self.path))
return False
def _check_submodule_no_git(self):
"""
Like ``_check_submodule_using_git``, but simply parses the .gitmodules file
to determine if the supplied path is a git submodule, and does not exec any
subprocesses.
This can only determine if a path is a submodule--it does not perform
updates, etc. This function may need to be updated if the format of the
.gitmodules file is changed between git versions.
"""
gitmodules_path = os.path.abspath('.gitmodules')
if not os.path.isfile(gitmodules_path):
return False
# This is a minimal reader for gitconfig-style files. It handles a few of
# the quirks that make gitconfig files incompatible with ConfigParser-style
# files, but does not support the full gitconfig syntax (just enough
# needed to read a .gitmodules file).
gitmodules_fileobj = io.StringIO()
# Must use io.open for cross-Python-compatible behavior wrt unicode
with io.open(gitmodules_path) as f:
for line in f:
# gitconfig files are more flexible with leading whitespace; just
# go ahead and remove it
line = line.lstrip()
# comments can start with either # or ;
if line and line[0] in (':', ';'):
continue
gitmodules_fileobj.write(line)
gitmodules_fileobj.seek(0)
cfg = RawConfigParser()
try:
cfg.readfp(gitmodules_fileobj)
except Exception as exc:
log.warn('Malformatted .gitmodules file: {0}\n'
'{1} cannot be assumed to be a git submodule.'.format(
exc, self.path))
return False
for section in cfg.sections():
if not cfg.has_option(section, 'path'):
continue
submodule_path = cfg.get(section, 'path').rstrip(os.sep)
if submodule_path == self.path.rstrip(os.sep):
return True
return False
def _update_submodule(self, submodule, status):
if status == ' ':
# The submodule is up to date; no action necessary
return
elif status == '-':
if self.offline:
raise _AHBootstrapSystemExit(
"Cannot initialize the {0} submodule in --offline mode; "
"this requires being able to clone the submodule from an "
"online repository.".format(submodule))
cmd = ['update', '--init']
action = 'Initializing'
elif status == '+':
cmd = ['update']
action = 'Updating'
if self.offline:
cmd.append('--no-fetch')
elif status == 'U':
raise _AHBootstrapSystemExit(
'Error: Submodule {0} contains unresolved merge conflicts. '
'Please complete or abandon any changes in the submodule so that '
'it is in a usable state, then try again.'.format(submodule))
else:
log.warn('Unknown status {0!r} for git submodule {1!r}. Will '
'attempt to use the submodule as-is, but try to ensure '
'that the submodule is in a clean state and contains no '
'conflicts or errors.\n{2}'.format(status, submodule,
_err_help_msg))
return
err_msg = None
cmd = ['git', 'submodule'] + cmd + ['--', submodule]
log.warn('{0} {1} submodule with: `{2}`'.format(
action, submodule, ' '.join(cmd)))
try:
log.info('Running `{0}`; use the --no-git option to disable git '
'commands'.format(' '.join(cmd)))
returncode, stdout, stderr = run_cmd(cmd)
except OSError as e:
err_msg = str(e)
else:
if returncode != 0:
err_msg = stderr
if err_msg is not None:
log.warn('An unexpected error occurred updating the git submodule '
'{0!r}:\n{1}\n{2}'.format(submodule, err_msg,
_err_help_msg))
class _CommandNotFound(OSError):
"""
An exception raised when a command run with run_cmd is not found on the
system.
"""
def run_cmd(cmd):
"""
Run a command in a subprocess, given as a list of command-line
arguments.
Returns a ``(returncode, stdout, stderr)`` tuple.
"""
try:
p = sp.Popen(cmd, stdout=sp.PIPE, stderr=sp.PIPE)
# XXX: May block if either stdout or stderr fill their buffers;
# however for the commands this is currently used for that is
# unlikely (they should have very brief output)
stdout, stderr = p.communicate()
except OSError as e:
if DEBUG:
raise
if e.errno == errno.ENOENT:
msg = 'Command not found: `{0}`'.format(' '.join(cmd))
raise _CommandNotFound(msg, cmd)
else:
raise _AHBootstrapSystemExit(
'An unexpected error occurred when running the '
'`{0}` command:\n{1}'.format(' '.join(cmd), str(e)))
# Can fail of the default locale is not configured properly. See
# https://github.com/astropy/astropy/issues/2749. For the purposes under
# consideration 'latin1' is an acceptable fallback.
try:
stdio_encoding = locale.getdefaultlocale()[1] or 'latin1'
except ValueError:
# Due to an OSX oddity locale.getdefaultlocale() can also crash
# depending on the user's locale/language settings. See:
# http://bugs.python.org/issue18378
stdio_encoding = 'latin1'
# Unlikely to fail at this point but even then let's be flexible
if not isinstance(stdout, str):
stdout = stdout.decode(stdio_encoding, 'replace')
if not isinstance(stderr, str):
stderr = stderr.decode(stdio_encoding, 'replace')
return (p.returncode, stdout, stderr)
def _next_version(version):
"""
Given a parsed version from pkg_resources.parse_version, returns a new
version string with the next minor version.
Examples
========
>>> _next_version(pkg_resources.parse_version('1.2.3'))
'1.3.0'
"""
if hasattr(version, 'base_version'):
# New version parsing from setuptools >= 8.0
if version.base_version:
parts = version.base_version.split('.')
else:
parts = []
else:
parts = []
for part in version:
if part.startswith('*'):
break
parts.append(part)
parts = [int(p) for p in parts]
if len(parts) < 3:
parts += [0] * (3 - len(parts))
major, minor, micro = parts[:3]
return '{0}.{1}.{2}'.format(major, minor + 1, 0)
class _DummyFile(object):
"""A noop writeable object."""
errors = '' # Required for Python 3.x
encoding = 'utf-8'
def write(self, s):
pass
def flush(self):
pass
@contextlib.contextmanager
def _verbose():
yield
@contextlib.contextmanager
def _silence():
"""A context manager that silences sys.stdout and sys.stderr."""
old_stdout = sys.stdout
old_stderr = sys.stderr
sys.stdout = _DummyFile()
sys.stderr = _DummyFile()
exception_occurred = False
try:
yield
except:
exception_occurred = True
# Go ahead and clean up so that exception handling can work normally
sys.stdout = old_stdout
sys.stderr = old_stderr
raise
if not exception_occurred:
sys.stdout = old_stdout
sys.stderr = old_stderr
_err_help_msg = """
If the problem persists consider installing astropy_helpers manually using pip
(`pip install astropy_helpers`) or by manually downloading the source archive,
extracting it, and installing by running `python setup.py install` from the
root of the extracted source code.
"""
class _AHBootstrapSystemExit(SystemExit):
def __init__(self, *args):
if not args:
msg = 'An unknown problem occurred bootstrapping astropy_helpers.'
else:
msg = args[0]
msg += '\n' + _err_help_msg
super(_AHBootstrapSystemExit, self).__init__(msg, *args[1:])
BOOTSTRAPPER = _Bootstrapper.main()
def use_astropy_helpers(**kwargs):
"""
Ensure that the `astropy_helpers` module is available and is importable.
This supports automatic submodule initialization if astropy_helpers is
included in a project as a git submodule, or will download it from PyPI if
necessary.
Parameters
----------
path : str or None, optional
A filesystem path relative to the root of the project's source code
that should be added to `sys.path` so that `astropy_helpers` can be
imported from that path.
If the path is a git submodule it will automatically be initialized
and/or updated.
The path may also be to a ``.tar.gz`` archive of the astropy_helpers
source distribution. In this case the archive is automatically
unpacked and made temporarily available on `sys.path` as a ``.egg``
archive.
If `None` skip straight to downloading.
download_if_needed : bool, optional
If the provided filesystem path is not found an attempt will be made to
download astropy_helpers from PyPI. It will then be made temporarily
available on `sys.path` as a ``.egg`` archive (using the
``setup_requires`` feature of setuptools. If the ``--offline`` option
is given at the command line the value of this argument is overridden
to `False`.
index_url : str, optional
If provided, use a different URL for the Python package index than the
main PyPI server.
use_git : bool, optional
If `False` no git commands will be used--this effectively disables
support for git submodules. If the ``--no-git`` option is given at the
command line the value of this argument is overridden to `False`.
auto_upgrade : bool, optional
By default, when installing a package from a non-development source
distribution ah_boostrap will try to automatically check for patch
releases to astropy-helpers on PyPI and use the patched version over
any bundled versions. Setting this to `False` will disable that
functionality. If the ``--offline`` option is given at the command line
the value of this argument is overridden to `False`.
offline : bool, optional
If `False` disable all actions that require an internet connection,
including downloading packages from the package index and fetching
updates to any git submodule. Defaults to `True`.
"""
global BOOTSTRAPPER
config = BOOTSTRAPPER.config
config.update(**kwargs)
# Create a new bootstrapper with the updated configuration and run it
BOOTSTRAPPER = _Bootstrapper(**config)
BOOTSTRAPPER.run()
|
|
# Copyright (c) 2012 The Chromium Authors. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
from code import Code
from model import PropertyType, Type
import cpp_util
import model
import schema_util
import sys
import util_cc_helper
class CCGenerator(object):
def __init__(self, type_generator, cpp_namespace):
self._type_generator = type_generator
self._cpp_namespace = cpp_namespace
def Generate(self, namespace):
return _Generator(namespace,
self._type_generator,
self._cpp_namespace).Generate()
class _Generator(object):
"""A .cc generator for a namespace.
"""
def __init__(self, namespace, cpp_type_generator, cpp_namespace):
self._namespace = namespace
self._type_helper = cpp_type_generator
self._cpp_namespace = cpp_namespace
self._target_namespace = (
self._type_helper.GetCppNamespaceName(self._namespace))
self._util_cc_helper = (
util_cc_helper.UtilCCHelper(self._type_helper))
def Generate(self):
"""Generates a Code object with the .cc for a single namespace.
"""
c = Code()
(c.Append(cpp_util.CHROMIUM_LICENSE)
.Append()
.Append(cpp_util.GENERATED_FILE_MESSAGE % self._namespace.source_file)
.Append()
.Append(self._util_cc_helper.GetIncludePath())
.Append('#include "base/logging.h"')
.Append('#include "base/strings/string_number_conversions.h"')
.Append('#include "%s/%s.h"' %
(self._namespace.source_file_dir, self._namespace.unix_name))
.Cblock(self._type_helper.GenerateIncludes(include_soft=True))
.Append()
.Concat(cpp_util.OpenNamespace(self._cpp_namespace))
.Cblock(self._type_helper.GetNamespaceStart())
)
if self._namespace.properties:
(c.Append('//')
.Append('// Properties')
.Append('//')
.Append()
)
for property in self._namespace.properties.values():
property_code = self._type_helper.GeneratePropertyValues(
property,
'const %(type)s %(name)s = %(value)s;',
nodoc=True)
if property_code:
c.Cblock(property_code)
if self._namespace.types:
(c.Append('//')
.Append('// Types')
.Append('//')
.Append()
.Cblock(self._GenerateTypes(None, self._namespace.types.values()))
)
if self._namespace.functions:
(c.Append('//')
.Append('// Functions')
.Append('//')
.Append()
)
for function in self._namespace.functions.values():
c.Cblock(self._GenerateFunction(function))
if self._namespace.events:
(c.Append('//')
.Append('// Events')
.Append('//')
.Append()
)
for event in self._namespace.events.values():
c.Cblock(self._GenerateEvent(event))
(c.Concat(self._type_helper.GetNamespaceEnd())
.Cblock(cpp_util.CloseNamespace(self._cpp_namespace))
)
return c
def _GenerateType(self, cpp_namespace, type_):
"""Generates the function definitions for a type.
"""
classname = cpp_util.Classname(schema_util.StripNamespace(type_.name))
c = Code()
if type_.functions:
# Wrap functions within types in the type's namespace.
(c.Append('namespace %s {' % classname)
.Append())
for function in type_.functions.values():
c.Cblock(self._GenerateFunction(function))
c.Append('} // namespace %s' % classname)
elif type_.property_type == PropertyType.ARRAY:
c.Cblock(self._GenerateType(cpp_namespace, type_.item_type))
elif (type_.property_type == PropertyType.OBJECT or
type_.property_type == PropertyType.CHOICES):
if cpp_namespace is None:
classname_in_namespace = classname
else:
classname_in_namespace = '%s::%s' % (cpp_namespace, classname)
if type_.property_type == PropertyType.OBJECT:
c.Cblock(self._GeneratePropertyFunctions(classname_in_namespace,
type_.properties.values()))
else:
c.Cblock(self._GenerateTypes(classname_in_namespace, type_.choices))
(c.Append('%s::%s()' % (classname_in_namespace, classname))
.Cblock(self._GenerateInitializersAndBody(type_))
.Append('%s::~%s() {}' % (classname_in_namespace, classname))
.Append()
)
if type_.origin.from_json:
c.Cblock(self._GenerateTypePopulate(classname_in_namespace, type_))
if cpp_namespace is None: # only generate for top-level types
c.Cblock(self._GenerateTypeFromValue(classname_in_namespace, type_))
if type_.origin.from_client:
c.Cblock(self._GenerateTypeToValue(classname_in_namespace, type_))
elif type_.property_type == PropertyType.ENUM:
(c.Cblock(self._GenerateEnumToString(cpp_namespace, type_))
.Cblock(self._GenerateEnumFromString(cpp_namespace, type_))
)
return c
def _GenerateInitializersAndBody(self, type_):
items = []
for prop in type_.properties.values():
if prop.optional:
continue
t = prop.type_
if t.property_type == PropertyType.INTEGER:
items.append('%s(0)' % prop.unix_name)
elif t.property_type == PropertyType.DOUBLE:
items.append('%s(0.0)' % prop.unix_name)
elif t.property_type == PropertyType.BOOLEAN:
items.append('%s(false)' % prop.unix_name)
elif t.property_type == PropertyType.BINARY:
items.append('%s(NULL)' % prop.unix_name)
elif (t.property_type == PropertyType.ANY or
t.property_type == PropertyType.ARRAY or
t.property_type == PropertyType.CHOICES or
t.property_type == PropertyType.ENUM or
t.property_type == PropertyType.OBJECT or
t.property_type == PropertyType.FUNCTION or
t.property_type == PropertyType.REF or
t.property_type == PropertyType.STRING):
# TODO(miket): It would be nice to initialize CHOICES and ENUM, but we
# don't presently have the semantics to indicate which one of a set
# should be the default.
continue
else:
raise TypeError(t)
if items:
s = ': %s' % (', '.join(items))
else:
s = ''
s = s + ' {}'
return Code().Append(s)
def _GenerateTypePopulate(self, cpp_namespace, type_):
"""Generates the function for populating a type given a pointer to it.
E.g for type "Foo", generates Foo::Populate()
"""
classname = cpp_util.Classname(schema_util.StripNamespace(type_.name))
c = Code()
(c.Append('// static')
.Append('bool %(namespace)s::Populate(')
.Sblock(' const base::Value& value, %(name)s* out) {')
)
if type_.property_type == PropertyType.CHOICES:
for choice in type_.choices:
value_type = cpp_util.GetValueType(self._type_helper.FollowRef(choice))
(c.Sblock('if (value.IsType(%s)) {' % value_type)
.Concat(self._GeneratePopulateVariableFromValue(
choice,
'(&value)',
'out->as_%s' % choice.unix_name,
'false',
is_ptr=True))
.Append('return true;')
.Eblock('}')
)
c.Append('return false;')
elif type_.property_type == PropertyType.OBJECT:
(c.Append('if (!value.IsType(base::Value::TYPE_DICTIONARY))')
.Append(' return false;')
)
if type_.properties or type_.additional_properties is not None:
c.Append('const base::DictionaryValue* dict = '
'static_cast<const base::DictionaryValue*>(&value);')
for prop in type_.properties.values():
c.Concat(self._InitializePropertyToDefault(prop, 'out'))
for prop in type_.properties.values():
c.Concat(self._GenerateTypePopulateProperty(prop, 'dict', 'out'))
if type_.additional_properties is not None:
if type_.additional_properties.property_type == PropertyType.ANY:
c.Append('out->additional_properties.MergeDictionary(dict);')
else:
cpp_type = self._type_helper.GetCppType(type_.additional_properties,
is_in_container=True)
(c.Append('for (base::DictionaryValue::Iterator it(*dict);')
.Sblock(' !it.IsAtEnd(); it.Advance()) {')
.Append('%s tmp;' % cpp_type)
.Concat(self._GeneratePopulateVariableFromValue(
type_.additional_properties,
'(&it.value())',
'tmp',
'false'))
.Append('out->additional_properties[it.key()] = tmp;')
.Eblock('}')
)
c.Append('return true;')
(c.Eblock('}')
.Substitute({'namespace': cpp_namespace, 'name': classname}))
return c
def _GenerateTypePopulateProperty(self, prop, src, dst):
"""Generate the code to populate a single property in a type.
src: base::DictionaryValue*
dst: Type*
"""
c = Code()
value_var = prop.unix_name + '_value'
c.Append('const base::Value* %(value_var)s = NULL;')
if prop.optional:
(c.Sblock(
'if (%(src)s->GetWithoutPathExpansion("%(key)s", &%(value_var)s)) {')
.Concat(self._GeneratePopulatePropertyFromValue(
prop, value_var, dst, 'false')))
underlying_type = self._type_helper.FollowRef(prop.type_)
if underlying_type.property_type == PropertyType.ENUM:
(c.Append('} else {')
.Append('%%(dst)s->%%(name)s = %s;' %
self._type_helper.GetEnumNoneValue(prop.type_)))
c.Eblock('}')
else:
(c.Append(
'if (!%(src)s->GetWithoutPathExpansion("%(key)s", &%(value_var)s))')
.Append(' return false;')
.Concat(self._GeneratePopulatePropertyFromValue(
prop, value_var, dst, 'false'))
)
c.Append()
c.Substitute({
'value_var': value_var,
'key': prop.name,
'src': src,
'dst': dst,
'name': prop.unix_name
})
return c
def _GenerateTypeFromValue(self, cpp_namespace, type_):
classname = cpp_util.Classname(schema_util.StripNamespace(type_.name))
c = Code()
(c.Append('// static')
.Append('scoped_ptr<%s> %s::FromValue(const base::Value& value) {' % (
classname, cpp_namespace))
.Append(' scoped_ptr<%s> out(new %s());' % (classname, classname))
.Append(' if (!Populate(value, out.get()))')
.Append(' return scoped_ptr<%s>();' % classname)
.Append(' return out.Pass();')
.Append('}')
)
return c
def _GenerateTypeToValue(self, cpp_namespace, type_):
"""Generates a function that serializes the type into a base::Value.
E.g. for type "Foo" generates Foo::ToValue()
"""
if type_.property_type == PropertyType.OBJECT:
return self._GenerateObjectTypeToValue(cpp_namespace, type_)
elif type_.property_type == PropertyType.CHOICES:
return self._GenerateChoiceTypeToValue(cpp_namespace, type_)
else:
raise ValueError("Unsupported property type %s" % type_.type_)
def _GenerateObjectTypeToValue(self, cpp_namespace, type_):
"""Generates a function that serializes an object-representing type
into a base::DictionaryValue.
"""
c = Code()
(c.Sblock('scoped_ptr<base::DictionaryValue> %s::ToValue() const {' %
cpp_namespace)
.Append('scoped_ptr<base::DictionaryValue> value('
'new base::DictionaryValue());')
.Append()
)
for prop in type_.properties.values():
if prop.optional:
# Optional enum values are generated with a NONE enum value.
underlying_type = self._type_helper.FollowRef(prop.type_)
if underlying_type.property_type == PropertyType.ENUM:
c.Sblock('if (%s != %s) {' %
(prop.unix_name,
self._type_helper.GetEnumNoneValue(prop.type_)))
else:
c.Sblock('if (%s.get()) {' % prop.unix_name)
# ANY is a base::Value which is abstract and cannot be a direct member, so
# it will always be a pointer.
is_ptr = prop.optional or prop.type_.property_type == PropertyType.ANY
c.Append('value->SetWithoutPathExpansion("%s", %s);' % (
prop.name,
self._CreateValueFromType(prop.type_,
'this->%s' % prop.unix_name,
is_ptr=is_ptr)))
if prop.optional:
c.Eblock('}');
if type_.additional_properties is not None:
if type_.additional_properties.property_type == PropertyType.ANY:
c.Append('value->MergeDictionary(&additional_properties);')
else:
# Non-copyable types will be wrapped in a linked_ptr for inclusion in
# maps, so we need to unwrap them.
needs_unwrap = (
not self._type_helper.IsCopyable(type_.additional_properties))
cpp_type = self._type_helper.GetCppType(type_.additional_properties,
is_in_container=True)
(c.Sblock('for (std::map<std::string, %s>::const_iterator it =' %
cpp_util.PadForGenerics(cpp_type))
.Append(' additional_properties.begin();')
.Append(' it != additional_properties.end(); ++it) {')
.Append('value->SetWithoutPathExpansion(it->first, %s);' %
self._CreateValueFromType(
type_.additional_properties,
'%sit->second' % ('*' if needs_unwrap else '')))
.Eblock('}')
)
return (c.Append()
.Append('return value.Pass();')
.Eblock('}'))
def _GenerateChoiceTypeToValue(self, cpp_namespace, type_):
"""Generates a function that serializes a choice-representing type
into a base::Value.
"""
c = Code()
c.Sblock('scoped_ptr<base::Value> %s::ToValue() const {' % cpp_namespace)
c.Append('scoped_ptr<base::Value> result;');
for choice in type_.choices:
choice_var = 'as_%s' % choice.unix_name
(c.Sblock('if (%s) {' % choice_var)
.Append('DCHECK(!result) << "Cannot set multiple choices for %s";' %
type_.unix_name)
.Append('result.reset(%s);' %
self._CreateValueFromType(choice, '*%s' % choice_var))
.Eblock('}')
)
(c.Append('DCHECK(result) << "Must set at least one choice for %s";' %
type_.unix_name)
.Append('return result.Pass();')
.Eblock('}')
)
return c
def _GenerateFunction(self, function):
"""Generates the definitions for function structs.
"""
c = Code()
# TODO(kalman): use function.unix_name not Classname.
function_namespace = cpp_util.Classname(function.name)
"""Windows has a #define for SendMessage, so to avoid any issues, we need
to not use the name.
"""
if function_namespace == 'SendMessage':
function_namespace = 'PassMessage'
(c.Append('namespace %s {' % function_namespace)
.Append()
)
# Params::Populate function
if function.params:
c.Concat(self._GeneratePropertyFunctions('Params', function.params))
(c.Append('Params::Params() {}')
.Append('Params::~Params() {}')
.Append()
.Cblock(self._GenerateFunctionParamsCreate(function))
)
# Results::Create function
if function.callback:
c.Concat(self._GenerateCreateCallbackArguments('Results',
function.callback))
c.Append('} // namespace %s' % function_namespace)
return c
def _GenerateEvent(self, event):
# TODO(kalman): use event.unix_name not Classname.
c = Code()
event_namespace = cpp_util.Classname(event.name)
(c.Append('namespace %s {' % event_namespace)
.Append()
.Cblock(self._GenerateCreateCallbackArguments(None, event))
.Append('} // namespace %s' % event_namespace)
)
return c
def _CreateValueFromType(self, type_, var, is_ptr=False):
"""Creates a base::Value given a type. Generated code passes ownership
to caller.
var: variable or variable*
E.g for std::string, generate base::Value::CreateStringValue(var)
"""
underlying_type = self._type_helper.FollowRef(type_)
if (underlying_type.property_type == PropertyType.CHOICES or
underlying_type.property_type == PropertyType.OBJECT):
if is_ptr:
return '(%s)->ToValue().release()' % var
else:
return '(%s).ToValue().release()' % var
elif (underlying_type.property_type == PropertyType.ANY or
underlying_type.property_type == PropertyType.FUNCTION):
if is_ptr:
vardot = '(%s)->' % var
else:
vardot = '(%s).' % var
return '%sDeepCopy()' % vardot
elif underlying_type.property_type == PropertyType.ENUM:
return 'base::Value::CreateStringValue(ToString(%s))' % var
elif underlying_type.property_type == PropertyType.BINARY:
if is_ptr:
vardot = var + '->'
else:
vardot = var + '.'
return ('base::BinaryValue::CreateWithCopiedBuffer(%sdata(), %ssize())' %
(vardot, vardot))
elif underlying_type.property_type == PropertyType.ARRAY:
return '%s.release()' % self._util_cc_helper.CreateValueFromArray(
underlying_type,
var,
is_ptr)
elif underlying_type.property_type.is_fundamental:
if is_ptr:
var = '*%s' % var
if underlying_type.property_type == PropertyType.STRING:
return 'new base::StringValue(%s)' % var
else:
return 'new base::FundamentalValue(%s)' % var
else:
raise NotImplementedError('Conversion of %s to base::Value not '
'implemented' % repr(type_.type_))
def _GenerateParamsCheck(self, function, var):
"""Generates a check for the correct number of arguments when creating
Params.
"""
c = Code()
num_required = 0
for param in function.params:
if not param.optional:
num_required += 1
if num_required == len(function.params):
c.Append('if (%(var)s.GetSize() != %(total)d)')
elif not num_required:
c.Append('if (%(var)s.GetSize() > %(total)d)')
else:
c.Append('if (%(var)s.GetSize() < %(required)d'
' || %(var)s.GetSize() > %(total)d)')
c.Append(' return scoped_ptr<Params>();')
c.Substitute({
'var': var,
'required': num_required,
'total': len(function.params),
})
return c
def _GenerateFunctionParamsCreate(self, function):
"""Generate function to create an instance of Params. The generated
function takes a base::ListValue of arguments.
E.g for function "Bar", generate Bar::Params::Create()
"""
c = Code()
(c.Append('// static')
.Sblock('scoped_ptr<Params> '
'Params::Create(const base::ListValue& args) {')
.Concat(self._GenerateParamsCheck(function, 'args'))
.Append('scoped_ptr<Params> params(new Params());')
)
for param in function.params:
c.Concat(self._InitializePropertyToDefault(param, 'params'))
for i, param in enumerate(function.params):
# Any failure will cause this function to return. If any argument is
# incorrect or missing, those following it are not processed. Note that
# for optional arguments, we allow missing arguments and proceed because
# there may be other arguments following it.
failure_value = 'scoped_ptr<Params>()'
c.Append()
value_var = param.unix_name + '_value'
(c.Append('const base::Value* %(value_var)s = NULL;')
.Append('if (args.Get(%(i)s, &%(value_var)s) &&')
.Sblock(' !%(value_var)s->IsType(base::Value::TYPE_NULL)) {')
.Concat(self._GeneratePopulatePropertyFromValue(
param, value_var, 'params', failure_value))
.Eblock('}')
)
if not param.optional:
(c.Sblock('else {')
.Append('return %s;' % failure_value)
.Eblock('}')
)
c.Substitute({'value_var': value_var, 'i': i})
(c.Append()
.Append('return params.Pass();')
.Eblock('}')
.Append()
)
return c
def _GeneratePopulatePropertyFromValue(self,
prop,
src_var,
dst_class_var,
failure_value):
"""Generates code to populate property |prop| of |dst_class_var| (a
pointer) from a Value*. See |_GeneratePopulateVariableFromValue| for
semantics.
"""
return self._GeneratePopulateVariableFromValue(prop.type_,
src_var,
'%s->%s' % (dst_class_var,
prop.unix_name),
failure_value,
is_ptr=prop.optional)
def _GeneratePopulateVariableFromValue(self,
type_,
src_var,
dst_var,
failure_value,
is_ptr=False):
"""Generates code to populate a variable |dst_var| of type |type_| from a
Value* at |src_var|. The Value* is assumed to be non-NULL. In the generated
code, if |dst_var| fails to be populated then Populate will return
|failure_value|.
"""
c = Code()
c.Sblock('{')
underlying_type = self._type_helper.FollowRef(type_)
if underlying_type.property_type.is_fundamental:
if is_ptr:
(c.Append('%(cpp_type)s temp;')
.Append('if (!%s)' % cpp_util.GetAsFundamentalValue(
self._type_helper.FollowRef(type_), src_var, '&temp'))
.Append(' return %(failure_value)s;')
.Append('%(dst_var)s.reset(new %(cpp_type)s(temp));')
)
else:
(c.Append('if (!%s)' % cpp_util.GetAsFundamentalValue(
self._type_helper.FollowRef(type_),
src_var,
'&%s' % dst_var))
.Append(' return %(failure_value)s;')
)
elif underlying_type.property_type == PropertyType.OBJECT:
if is_ptr:
(c.Append('const base::DictionaryValue* dictionary = NULL;')
.Append('if (!%(src_var)s->GetAsDictionary(&dictionary))')
.Append(' return %(failure_value)s;')
.Append('scoped_ptr<%(cpp_type)s> temp(new %(cpp_type)s());')
.Append('if (!%(cpp_type)s::Populate(*dictionary, temp.get()))')
.Append(' return %(failure_value)s;')
.Append('%(dst_var)s = temp.Pass();')
)
else:
(c.Append('const base::DictionaryValue* dictionary = NULL;')
.Append('if (!%(src_var)s->GetAsDictionary(&dictionary))')
.Append(' return %(failure_value)s;')
.Append('if (!%(cpp_type)s::Populate(*dictionary, &%(dst_var)s))')
.Append(' return %(failure_value)s;')
)
elif underlying_type.property_type == PropertyType.FUNCTION:
if is_ptr:
c.Append('%(dst_var)s.reset(new base::DictionaryValue());')
elif underlying_type.property_type == PropertyType.ANY:
c.Append('%(dst_var)s.reset(%(src_var)s->DeepCopy());')
elif underlying_type.property_type == PropertyType.ARRAY:
# util_cc_helper deals with optional and required arrays
(c.Append('const base::ListValue* list = NULL;')
.Append('if (!%(src_var)s->GetAsList(&list))')
.Append(' return %(failure_value)s;'))
item_type = underlying_type.item_type
if item_type.property_type == PropertyType.ENUM:
c.Concat(self._GenerateListValueToEnumArrayConversion(
item_type,
'list',
dst_var,
failure_value,
is_ptr=is_ptr))
else:
(c.Append('if (!%s)' % self._util_cc_helper.PopulateArrayFromList(
underlying_type,
'list',
dst_var,
is_ptr))
.Append(' return %(failure_value)s;')
)
elif underlying_type.property_type == PropertyType.CHOICES:
if is_ptr:
(c.Append('scoped_ptr<%(cpp_type)s> temp(new %(cpp_type)s());')
.Append('if (!%(cpp_type)s::Populate(*%(src_var)s, temp.get()))')
.Append(' return %(failure_value)s;')
.Append('%(dst_var)s = temp.Pass();')
)
else:
(c.Append('if (!%(cpp_type)s::Populate(*%(src_var)s, &%(dst_var)s))')
.Append(' return %(failure_value)s;')
)
elif underlying_type.property_type == PropertyType.ENUM:
c.Concat(self._GenerateStringToEnumConversion(type_,
src_var,
dst_var,
failure_value))
elif underlying_type.property_type == PropertyType.BINARY:
(c.Append('if (!%(src_var)s->IsType(%(value_type)s))')
.Append(' return %(failure_value)s;')
.Append('const base::BinaryValue* binary_value =')
.Append(' static_cast<const base::BinaryValue*>(%(src_var)s);')
)
if is_ptr:
(c.Append('%(dst_var)s.reset(')
.Append(' new std::string(binary_value->GetBuffer(),')
.Append(' binary_value->GetSize()));')
)
else:
(c.Append('%(dst_var)s.assign(binary_value->GetBuffer(),')
.Append(' binary_value->GetSize());')
)
else:
raise NotImplementedError(type_)
sub = {
'cpp_type': self._type_helper.GetCppType(type_),
'src_var': src_var,
'dst_var': dst_var,
'failure_value': failure_value,
}
if underlying_type.property_type not in (PropertyType.ANY,
PropertyType.CHOICES):
sub['value_type'] = cpp_util.GetValueType(underlying_type)
return c.Eblock('}').Substitute(sub)
def _GenerateListValueToEnumArrayConversion(self,
item_type,
src_var,
dst_var,
failure_value,
is_ptr=False):
"""Returns Code that converts a ListValue of string constants from
|src_var| into an array of enums of |type_| in |dst_var|. On failure,
returns |failure_value|.
"""
c = Code()
accessor = '.'
if is_ptr:
accessor = '->'
cpp_type = self._type_helper.GetCppType(item_type, is_in_container=True)
c.Append('%s.reset(new std::vector<%s>);' %
(dst_var, cpp_util.PadForGenerics(cpp_type)))
(c.Sblock('for (base::ListValue::const_iterator it = %s->begin(); '
'it != %s->end(); ++it) {' % (src_var, src_var))
.Append('%s tmp;' % self._type_helper.GetCppType(item_type))
.Concat(self._GenerateStringToEnumConversion(item_type,
'(*it)',
'tmp',
failure_value))
.Append('%s%spush_back(tmp);' % (dst_var, accessor))
.Eblock('}')
)
return c
def _GenerateStringToEnumConversion(self,
type_,
src_var,
dst_var,
failure_value):
"""Returns Code that converts a string type in |src_var| to an enum with
type |type_| in |dst_var|. In the generated code, if |src_var| is not
a valid enum name then the function will return |failure_value|.
"""
c = Code()
enum_as_string = '%s_as_string' % type_.unix_name
(c.Append('std::string %s;' % enum_as_string)
.Append('if (!%s->GetAsString(&%s))' % (src_var, enum_as_string))
.Append(' return %s;' % failure_value)
.Append('%s = Parse%s(%s);' % (dst_var,
self._type_helper.GetCppType(type_),
enum_as_string))
.Append('if (%s == %s)' % (dst_var,
self._type_helper.GetEnumNoneValue(type_)))
.Append(' return %s;' % failure_value)
)
return c
def _GeneratePropertyFunctions(self, namespace, params):
"""Generates the member functions for a list of parameters.
"""
return self._GenerateTypes(namespace, (param.type_ for param in params))
def _GenerateTypes(self, namespace, types):
"""Generates the member functions for a list of types.
"""
c = Code()
for type_ in types:
c.Cblock(self._GenerateType(namespace, type_))
return c
def _GenerateEnumToString(self, cpp_namespace, type_):
"""Generates ToString() which gets the string representation of an enum.
"""
c = Code()
classname = cpp_util.Classname(schema_util.StripNamespace(type_.name))
if cpp_namespace is not None:
c.Append('// static')
maybe_namespace = '' if cpp_namespace is None else '%s::' % cpp_namespace
c.Sblock('std::string %sToString(%s enum_param) {' %
(maybe_namespace, classname))
c.Sblock('switch (enum_param) {')
for enum_value in self._type_helper.FollowRef(type_).enum_values:
(c.Append('case %s: ' % self._type_helper.GetEnumValue(type_, enum_value))
.Append(' return "%s";' % enum_value))
(c.Append('case %s:' % self._type_helper.GetEnumNoneValue(type_))
.Append(' return "";')
.Eblock('}')
.Append('NOTREACHED();')
.Append('return "";')
.Eblock('}')
)
return c
def _GenerateEnumFromString(self, cpp_namespace, type_):
"""Generates FromClassNameString() which gets an enum from its string
representation.
"""
c = Code()
classname = cpp_util.Classname(schema_util.StripNamespace(type_.name))
if cpp_namespace is not None:
c.Append('// static')
maybe_namespace = '' if cpp_namespace is None else '%s::' % cpp_namespace
c.Sblock('%s%s %sParse%s(const std::string& enum_string) {' %
(maybe_namespace, classname, maybe_namespace, classname))
for i, enum_value in enumerate(
self._type_helper.FollowRef(type_).enum_values):
# This is broken up into all ifs with no else ifs because we get
# "fatal error C1061: compiler limit : blocks nested too deeply"
# on Windows.
(c.Append('if (enum_string == "%s")' % enum_value)
.Append(' return %s;' %
self._type_helper.GetEnumValue(type_, enum_value)))
(c.Append('return %s;' % self._type_helper.GetEnumNoneValue(type_))
.Eblock('}')
)
return c
def _GenerateCreateCallbackArguments(self, function_scope, callback):
"""Generate all functions to create Value parameters for a callback.
E.g for function "Bar", generate Bar::Results::Create
E.g for event "Baz", generate Baz::Create
function_scope: the function scope path, e.g. Foo::Bar for the function
Foo::Bar::Baz(). May be None if there is no function scope.
callback: the Function object we are creating callback arguments for.
"""
c = Code()
params = callback.params
c.Concat(self._GeneratePropertyFunctions(function_scope, params))
(c.Sblock('scoped_ptr<base::ListValue> %(function_scope)s'
'Create(%(declaration_list)s) {')
.Append('scoped_ptr<base::ListValue> create_results('
'new base::ListValue());')
)
declaration_list = []
for param in params:
declaration_list.append(cpp_util.GetParameterDeclaration(
param, self._type_helper.GetCppType(param.type_)))
c.Append('create_results->Append(%s);' %
self._CreateValueFromType(param.type_, param.unix_name))
c.Append('return create_results.Pass();')
c.Eblock('}')
c.Substitute({
'function_scope': ('%s::' % function_scope) if function_scope else '',
'declaration_list': ', '.join(declaration_list),
'param_names': ', '.join(param.unix_name for param in params)
})
return c
def _InitializePropertyToDefault(self, prop, dst):
"""Initialize a model.Property to its default value inside an object.
E.g for optional enum "state", generate dst->state = STATE_NONE;
dst: Type*
"""
c = Code()
underlying_type = self._type_helper.FollowRef(prop.type_)
if (underlying_type.property_type == PropertyType.ENUM and
prop.optional):
c.Append('%s->%s = %s;' % (
dst,
prop.unix_name,
self._type_helper.GetEnumNoneValue(prop.type_)))
return c
|
|
# Copyright (c) 2010 Cloud.com, Inc
# Copyright (c) 2012 Cloudbase Solutions Srl
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
"""
A Hyper-V Nova Compute driver.
"""
from nova.openstack.common.gettextutils import _
from nova.openstack.common import log as logging
from nova.virt import driver
from nova.virt.hyperv import hostops
from nova.virt.hyperv import livemigrationops
from nova.virt.hyperv import migrationops
from nova.virt.hyperv import rdpconsoleops
from nova.virt.hyperv import snapshotops
from nova.virt.hyperv import vmops
from nova.virt.hyperv import volumeops
LOG = logging.getLogger(__name__)
class HyperVDriver(driver.ComputeDriver):
def __init__(self, virtapi):
super(HyperVDriver, self).__init__(virtapi)
self._hostops = hostops.HostOps()
self._volumeops = volumeops.VolumeOps()
self._vmops = vmops.VMOps()
self._snapshotops = snapshotops.SnapshotOps()
self._livemigrationops = livemigrationops.LiveMigrationOps()
self._migrationops = migrationops.MigrationOps()
self._rdpconsoleops = rdpconsoleops.RDPConsoleOps()
def init_host(self, host):
pass
def list_instances(self):
return self._vmops.list_instances()
def spawn(self, context, instance, image_meta, injected_files,
admin_password, network_info=None, block_device_info=None):
self._vmops.spawn(context, instance, image_meta, injected_files,
admin_password, network_info, block_device_info)
def reboot(self, context, instance, network_info, reboot_type,
block_device_info=None, bad_volumes_callback=None):
self._vmops.reboot(instance, network_info, reboot_type)
def destroy(self, context, instance, network_info, block_device_info=None,
destroy_disks=True):
self._vmops.destroy(instance, network_info, block_device_info,
destroy_disks)
def cleanup(self, context, instance, network_info, block_device_info=None,
destroy_disks=True):
"""Cleanup after instance being destroyed by Hypervisor."""
pass
def get_info(self, instance):
return self._vmops.get_info(instance)
def attach_volume(self, context, connection_info, instance, mountpoint,
disk_bus=None, device_type=None, encryption=None):
return self._volumeops.attach_volume(connection_info,
instance['name'])
def detach_volume(self, connection_info, instance, mountpoint,
encryption=None):
return self._volumeops.detach_volume(connection_info,
instance['name'])
def get_volume_connector(self, instance):
return self._volumeops.get_volume_connector(instance)
def get_available_resource(self, nodename):
return self._hostops.get_available_resource()
def get_host_stats(self, refresh=False):
return self._hostops.get_host_stats(refresh)
def host_power_action(self, host, action):
return self._hostops.host_power_action(host, action)
def snapshot(self, context, instance, name, update_task_state):
self._snapshotops.snapshot(context, instance, name, update_task_state)
def pause(self, instance):
self._vmops.pause(instance)
def unpause(self, instance):
self._vmops.unpause(instance)
def suspend(self, instance):
self._vmops.suspend(instance)
def resume(self, context, instance, network_info, block_device_info=None):
self._vmops.resume(instance)
def power_off(self, instance):
self._vmops.power_off(instance)
def power_on(self, context, instance, network_info,
block_device_info=None):
self._vmops.power_on(instance)
def live_migration(self, context, instance_ref, dest, post_method,
recover_method, block_migration=False,
migrate_data=None):
self._livemigrationops.live_migration(context, instance_ref, dest,
post_method, recover_method,
block_migration, migrate_data)
def rollback_live_migration_at_destination(self, context, instance,
network_info,
block_device_info):
self.destroy(context, instance, network_info, block_device_info)
def pre_live_migration(self, context, instance, block_device_info,
network_info, disk, migrate_data=None):
self._livemigrationops.pre_live_migration(context, instance,
block_device_info,
network_info)
def post_live_migration_at_destination(self, ctxt, instance_ref,
network_info,
block_migr=False,
block_device_info=None):
self._livemigrationops.post_live_migration_at_destination(ctxt,
instance_ref,
network_info,
block_migr)
def check_can_live_migrate_destination(self, ctxt, instance_ref,
src_compute_info, dst_compute_info,
block_migration=False,
disk_over_commit=False):
return self._livemigrationops.check_can_live_migrate_destination(
ctxt, instance_ref, src_compute_info, dst_compute_info,
block_migration, disk_over_commit)
def check_can_live_migrate_destination_cleanup(self, ctxt,
dest_check_data):
self._livemigrationops.check_can_live_migrate_destination_cleanup(
ctxt, dest_check_data)
def check_can_live_migrate_source(self, ctxt, instance_ref,
dest_check_data):
return self._livemigrationops.check_can_live_migrate_source(
ctxt, instance_ref, dest_check_data)
def plug_vifs(self, instance, network_info):
"""Plug VIFs into networks."""
msg = _("VIF plugging is not supported by the Hyper-V driver.")
raise NotImplementedError(msg)
def unplug_vifs(self, instance, network_info):
"""Unplug VIFs from networks."""
msg = _("VIF unplugging is not supported by the Hyper-V driver.")
raise NotImplementedError(msg)
def ensure_filtering_rules_for_instance(self, instance_ref, network_info):
LOG.debug(_("ensure_filtering_rules_for_instance called"),
instance=instance_ref)
def unfilter_instance(self, instance, network_info):
LOG.debug(_("unfilter_instance called"), instance=instance)
def migrate_disk_and_power_off(self, context, instance, dest,
flavor, network_info,
block_device_info=None):
return self._migrationops.migrate_disk_and_power_off(context,
instance, dest,
flavor,
network_info,
block_device_info)
def confirm_migration(self, migration, instance, network_info):
self._migrationops.confirm_migration(migration, instance, network_info)
def finish_revert_migration(self, context, instance, network_info,
block_device_info=None, power_on=True):
self._migrationops.finish_revert_migration(context, instance,
network_info,
block_device_info, power_on)
def rename_virtualmachine(self, context, instance):
LOG.info('Doesn\'t actually call the rename method')
def finish_migration(self, context, migration, instance, disk_info,
network_info, image_meta, resize_instance=False,
block_device_info=None, power_on=True):
self._migrationops.finish_migration(context, migration, instance,
disk_info, network_info,
image_meta, resize_instance,
block_device_info, power_on)
def get_host_ip_addr(self):
return self._hostops.get_host_ip_addr()
def get_rdp_console(self, context, instance):
return self._rdpconsoleops.get_rdp_console(instance)
|
|
"""
Support for repeating alerts when conditions are met.
For more details about this component, please refer to the documentation at
https://home-assistant.io/components/alert/
"""
import asyncio
from datetime import datetime, timedelta
import logging
import voluptuous as vol
from homeassistant.components.notify import (
ATTR_MESSAGE, DOMAIN as DOMAIN_NOTIFY)
from homeassistant.const import (
CONF_ENTITY_ID, STATE_IDLE, CONF_NAME, CONF_STATE, STATE_ON, STATE_OFF,
SERVICE_TURN_ON, SERVICE_TURN_OFF, SERVICE_TOGGLE, ATTR_ENTITY_ID)
from homeassistant.helpers.entity import ToggleEntity
from homeassistant.helpers import service, event
import homeassistant.helpers.config_validation as cv
_LOGGER = logging.getLogger(__name__)
DOMAIN = 'alert'
ENTITY_ID_FORMAT = DOMAIN + '.{}'
CONF_CAN_ACK = 'can_acknowledge'
CONF_NOTIFIERS = 'notifiers'
CONF_REPEAT = 'repeat'
CONF_SKIP_FIRST = 'skip_first'
CONF_ALERT_MESSAGE = 'message'
CONF_DONE_MESSAGE = 'done_message'
DEFAULT_CAN_ACK = True
DEFAULT_SKIP_FIRST = False
ALERT_SCHEMA = vol.Schema({
vol.Required(CONF_NAME): cv.string,
vol.Required(CONF_ENTITY_ID): cv.entity_id,
vol.Required(CONF_STATE, default=STATE_ON): cv.string,
vol.Required(CONF_REPEAT): vol.All(cv.ensure_list, [vol.Coerce(float)]),
vol.Required(CONF_CAN_ACK, default=DEFAULT_CAN_ACK): cv.boolean,
vol.Required(CONF_SKIP_FIRST, default=DEFAULT_SKIP_FIRST): cv.boolean,
vol.Optional(CONF_ALERT_MESSAGE): cv.template,
vol.Optional(CONF_DONE_MESSAGE): cv.template,
vol.Required(CONF_NOTIFIERS): cv.ensure_list})
CONFIG_SCHEMA = vol.Schema({
DOMAIN: vol.Schema({
cv.slug: ALERT_SCHEMA,
}),
}, extra=vol.ALLOW_EXTRA)
ALERT_SERVICE_SCHEMA = vol.Schema({
vol.Required(ATTR_ENTITY_ID): cv.entity_ids,
})
def is_on(hass, entity_id):
"""Return if the alert is firing and not acknowledged."""
return hass.states.is_state(entity_id, STATE_ON)
async def async_setup(hass, config):
"""Set up the Alert component."""
entities = []
for object_id, cfg in config[DOMAIN].items():
if not cfg:
cfg = {}
name = cfg.get(CONF_NAME)
watched_entity_id = cfg.get(CONF_ENTITY_ID)
alert_state = cfg.get(CONF_STATE)
repeat = cfg.get(CONF_REPEAT)
skip_first = cfg.get(CONF_SKIP_FIRST)
message_template = cfg.get(CONF_ALERT_MESSAGE)
done_message_template = cfg.get(CONF_DONE_MESSAGE)
notifiers = cfg.get(CONF_NOTIFIERS)
can_ack = cfg.get(CONF_CAN_ACK)
entities.append(Alert(hass, object_id, name,
watched_entity_id, alert_state, repeat,
skip_first, message_template,
done_message_template, notifiers,
can_ack))
if not entities:
return False
async def async_handle_alert_service(service_call):
"""Handle calls to alert services."""
alert_ids = service.extract_entity_ids(hass, service_call)
for alert_id in alert_ids:
for alert in entities:
if alert.entity_id != alert_id:
continue
alert.async_set_context(service_call.context)
if service_call.service == SERVICE_TURN_ON:
await alert.async_turn_on()
elif service_call.service == SERVICE_TOGGLE:
await alert.async_toggle()
else:
await alert.async_turn_off()
# Setup service calls
hass.services.async_register(
DOMAIN, SERVICE_TURN_OFF, async_handle_alert_service,
schema=ALERT_SERVICE_SCHEMA)
hass.services.async_register(
DOMAIN, SERVICE_TURN_ON, async_handle_alert_service,
schema=ALERT_SERVICE_SCHEMA)
hass.services.async_register(
DOMAIN, SERVICE_TOGGLE, async_handle_alert_service,
schema=ALERT_SERVICE_SCHEMA)
tasks = [alert.async_update_ha_state() for alert in entities]
if tasks:
await asyncio.wait(tasks, loop=hass.loop)
return True
class Alert(ToggleEntity):
"""Representation of an alert."""
def __init__(self, hass, entity_id, name, watched_entity_id,
state, repeat, skip_first, message_template,
done_message_template, notifiers, can_ack):
"""Initialize the alert."""
self.hass = hass
self._name = name
self._alert_state = state
self._skip_first = skip_first
self._message_template = message_template
if self._message_template is not None:
self._message_template.hass = hass
self._done_message_template = done_message_template
if self._done_message_template is not None:
self._done_message_template.hass = hass
self._notifiers = notifiers
self._can_ack = can_ack
self._delay = [timedelta(minutes=val) for val in repeat]
self._next_delay = 0
self._firing = False
self._ack = False
self._cancel = None
self._send_done_message = False
self.entity_id = ENTITY_ID_FORMAT.format(entity_id)
event.async_track_state_change(
hass, watched_entity_id, self.watched_entity_change)
@property
def name(self):
"""Return the name of the alert."""
return self._name
@property
def should_poll(self):
"""HASS need not poll these entities."""
return False
@property
def state(self):
"""Return the alert status."""
if self._firing:
if self._ack:
return STATE_OFF
return STATE_ON
return STATE_IDLE
@property
def hidden(self):
"""Hide the alert when it is not firing."""
return not self._can_ack or not self._firing
async def watched_entity_change(self, entity, from_state, to_state):
"""Determine if the alert should start or stop."""
_LOGGER.debug("Watched entity (%s) has changed", entity)
if to_state.state == self._alert_state and not self._firing:
await self.begin_alerting()
if to_state.state != self._alert_state and self._firing:
await self.end_alerting()
async def begin_alerting(self):
"""Begin the alert procedures."""
_LOGGER.debug("Beginning Alert: %s", self._name)
self._ack = False
self._firing = True
self._next_delay = 0
if not self._skip_first:
await self._notify()
else:
await self._schedule_notify()
self.async_schedule_update_ha_state()
async def end_alerting(self):
"""End the alert procedures."""
_LOGGER.debug("Ending Alert: %s", self._name)
self._cancel()
self._ack = False
self._firing = False
if self._send_done_message:
await self._notify_done_message()
self.async_schedule_update_ha_state()
async def _schedule_notify(self):
"""Schedule a notification."""
delay = self._delay[self._next_delay]
next_msg = datetime.now() + delay
self._cancel = \
event.async_track_point_in_time(self.hass, self._notify, next_msg)
self._next_delay = min(self._next_delay + 1, len(self._delay) - 1)
async def _notify(self, *args):
"""Send the alert notification."""
if not self._firing:
return
if not self._ack:
_LOGGER.info("Alerting: %s", self._name)
self._send_done_message = True
if self._message_template is not None:
message = self._message_template.async_render()
else:
message = self._name
await self._send_notification_message(message)
await self._schedule_notify()
async def _notify_done_message(self, *args):
"""Send notification of complete alert."""
_LOGGER.info("Alerting: %s", self._done_message_template)
self._send_done_message = False
if self._done_message_template is None:
return
message = self._done_message_template.async_render()
await self._send_notification_message(message)
async def _send_notification_message(self, message):
for target in self._notifiers:
await self.hass.services.async_call(
DOMAIN_NOTIFY, target, {ATTR_MESSAGE: message})
async def async_turn_on(self, **kwargs):
"""Async Unacknowledge alert."""
_LOGGER.debug("Reset Alert: %s", self._name)
self._ack = False
await self.async_update_ha_state()
async def async_turn_off(self, **kwargs):
"""Async Acknowledge alert."""
_LOGGER.debug("Acknowledged Alert: %s", self._name)
self._ack = True
await self.async_update_ha_state()
async def async_toggle(self, **kwargs):
"""Async toggle alert."""
if self._ack:
return await self.async_turn_on()
return await self.async_turn_off()
|
|
# -*- coding: utf-8 -*-
from __future__ import with_statement
from django.contrib.sites.models import Site
from django.contrib.auth.models import AnonymousUser, Group
from cms.api import create_page
from cms.menu import get_visible_pages
from cms.models import Page
from cms.models import ACCESS_DESCENDANTS, ACCESS_CHILDREN, ACCESS_PAGE
from cms.models import ACCESS_PAGE_AND_CHILDREN, ACCESS_PAGE_AND_DESCENDANTS
from cms.models.permissionmodels import GlobalPagePermission, PagePermission
from cms.test_utils.testcases import SettingsOverrideTestCase
from cms.utils.compat.dj import get_user_model, user_related_name
from menus.menu_pool import menu_pool
__all__ = [
'ViewPermissionTreeBugTests',
'ViewPermissionComplexMenuAllNodesTests'
]
class ViewPermissionTests(SettingsOverrideTestCase):
"""
Test various combinations of view permissions pages and menus
Focus on the different grant types and inheritance options of grant on
Given the tree:
|- Page_a
|- Page_b
| |- Page_b_a
| |- Page_b_b
| | |- Page_b_b_a
| | | |- Page_b_b_a_a
| | |- Page_b_b_b
| | |- Page_b_b_c
| |- Page_b_c
| |- Page_b_d
| | |- Page_b_d_a
| | |- Page_b_d_b
| | |- Page_b_d_c
|- Page_c
| |- Page_c_a
| |- Page_c_b
|- Page_d
| |- Page_d_a
| |- Page_d_b
| |- Page_d_c
"""
GROUPNAME_1 = 'group_b_ACCESS_PAGE_AND_CHILDREN'
GROUPNAME_2 = 'group_b_b_ACCESS_CHILDREN'
GROUPNAME_3 = 'group_b_ACCESS_PAGE_AND_DESCENDANTS'
GROUPNAME_4 = 'group_b_b_ACCESS_DESCENDANTS'
GROUPNAME_5 = 'group_d_ACCESS_PAGE'
def setUp(self):
self.site = Site()
self.site.pk = 1
super(ViewPermissionTests, self).setUp()
def tearDown(self):
super(ViewPermissionTests, self).tearDown()
def _setup_tree_pages(self):
stdkwargs = {
'template': 'nav_playground.html',
'language': 'en',
'published': True,
'in_navigation': True,
}
page_a = create_page("page_a", **stdkwargs) # first page slug is /
page_b = create_page("page_b", **stdkwargs)
page_c = create_page("page_c", **stdkwargs)
page_d = create_page("page_d", **stdkwargs)
page_b_a = create_page("page_b_a", parent=page_b, **stdkwargs)
page_b_b = create_page("page_b_b", parent=page_b, **stdkwargs)
page_b_b_a = create_page("page_b_b_a", parent=page_b_b, **stdkwargs)
page_b_b_b = create_page("page_b_b_b", parent=page_b_b, **stdkwargs)
page_b_b_c = create_page("page_b_b_c", parent=page_b_b, **stdkwargs)
page_b_b_a_a = create_page("page_b_b_a_a", parent=page_b_b_a, **stdkwargs)
page_b_c = create_page("page_b_c", parent=page_b, **stdkwargs)
page_b_d = create_page("page_b_d", parent=page_b, **stdkwargs)
page_b_d_a = create_page("page_b_d_a", parent=page_b_d, **stdkwargs)
page_b_d_b = create_page("page_b_d_b", parent=page_b_d, **stdkwargs)
page_b_d_c = create_page("page_b_d_c", parent=page_b_d, **stdkwargs)
page_c_a = create_page("page_c_a", parent=page_c, **stdkwargs)
page_c_b = create_page("page_c_b", parent=page_c, **stdkwargs)
page_d_a = create_page("page_d_a", parent=page_d, **stdkwargs)
page_d_b = create_page("page_d_b", parent=page_d, **stdkwargs)
page_d_c = create_page("page_d_c", parent=page_d, **stdkwargs)
page_d_d = create_page("page_d_d", parent=page_d, **stdkwargs)
pages = [
page_a,
page_b,
page_b_a,
page_b_b,
page_b_b_a,
page_b_b_a_a,
page_b_b_b,
page_b_b_c,
page_b_c,
page_b_d,
page_b_d_a,
page_b_d_b,
page_b_d_c,
page_c,
page_c_a,
page_c_b,
page_d,
page_d_a,
page_d_b,
page_d_c,
page_d_d,
]
new_pages = []
for page in pages:
new_pages.append(page.reload())
return new_pages
def _setup_user_groups(self):
"""
Setup a group for every grant on ACCESS TYPE
"""
userdata = [
('user_1', True, self.GROUPNAME_1),
('user_1_nostaff', False, self.GROUPNAME_1),
('user_2', True, self.GROUPNAME_2),
('user_2_nostaff', False, self.GROUPNAME_2),
('user_3', True, self.GROUPNAME_3),
('user_3_nostaff', False, self.GROUPNAME_3),
('user_4', True, self.GROUPNAME_4),
('user_4_nostaff', False, self.GROUPNAME_4),
('user_5', True, self.GROUPNAME_5),
('user_5_nostaff', False, self.GROUPNAME_5),
('user_staff', True, None),
]
default_users_count = get_user_model().objects.all().count()
for username, is_staff, groupname in userdata:
user = self._create_user(username, is_staff)
if groupname:
group, _ = Group.objects.get_or_create(name=groupname)
user_set = getattr(group, user_related_name)
user_set.add(user)
group.save()
self.assertEqual(11, get_user_model().objects.all().count()-default_users_count)
def _setup_view_restrictions(self):
"""
Setup a view restriction with every type of the grant_on ACCESS_*
"""
data = [("page_b", self.GROUPNAME_1, ACCESS_PAGE_AND_CHILDREN),
("page_b_b", self.GROUPNAME_2, ACCESS_CHILDREN),
("page_b", self.GROUPNAME_3, ACCESS_PAGE_AND_DESCENDANTS),
("page_b_b", self.GROUPNAME_4, ACCESS_DESCENDANTS),
("page_d", self.GROUPNAME_5, ACCESS_PAGE),
]
for title, groupname, inherit in data:
page = Page.objects.drafts().get(title_set__title=title)
group = Group.objects.get(name__iexact=groupname)
PagePermission.objects.create(can_view=True, group=group, page=page, grant_on=inherit)
self.assertEqual(5, PagePermission.objects.all().count())
self.assertEqual(0, GlobalPagePermission.objects.all().count())
def assertPageFound(self, url, client=None):
if not client:
client = self.client
response = client.get(url)
self.assertEqual(response.status_code, 200)
def assertPageNotFound(self, url, client=None):
if not client:
client = self.client
response = client.get(url)
self.assertEqual(response.status_code, 404)
def assertViewAllowed(self, page, user):
request = self.get_request(user, page)
self.assertTrue(page.has_view_permission(request))
def assertViewNotAllowed(self, page, user):
request = self.get_request(user, page)
self.assertFalse(page.has_view_permission(request))
def assertInMenu(self, page, user):
request = self.get_request(user, page)
nodes = menu_pool.get_nodes(request)
target_url = page.get_absolute_url()
found_in_menu = False
for node in nodes:
if node.get_absolute_url() == target_url:
found_in_menu = True
break
self.assertTrue(found_in_menu)
def assertNotInMenu(self, page, user):
request = self.get_request(user, page)
nodes = menu_pool.get_nodes(request)
target_url = page.get_absolute_url()
found_in_menu = False
for node in nodes:
if node.get_absolute_url() == target_url:
found_in_menu = True
break
self.assertFalse(found_in_menu)
def assertNodeMemberships(self, visible_page_ids, restricted_pages, public_page_ids):
"""
test all visible page ids are either in_public and not in_restricted
or not in_public and in_restricted
"""
for page_id in visible_page_ids:
in_restricted = False
in_public = False
if page_id in restricted_pages:
in_restricted = True
if page_id in public_page_ids:
in_public = True
self.assertTrue((in_public and not in_restricted) or
(not in_public and in_restricted),
msg="page_id %s in_public: %s, in_restricted: %s" % (page_id, in_public, in_restricted))
def assertGrantedVisibility(self, all_pages, expected_granted_pages, username=None):
"""
helper function to check the expected_granted_pages are
not in the restricted_pages list and
all visible pages are in the expected_granted_pages
"""
# log the user in if present
user = None
if username is not None:
if get_user_model().USERNAME_FIELD == 'email':
username = username + '@django-cms.org'
query = dict()
query[get_user_model().USERNAME_FIELD+'__iexact'] = username
user = get_user_model().objects.get(**query)
request = self.get_request(user)
visible_page_ids = get_visible_pages(request, all_pages, self.site)
public_page_ids = Page.objects.drafts().filter(title_set__title__in=expected_granted_pages).values_list('id',
flat=True)
self.assertEqual(len(visible_page_ids), len(expected_granted_pages))
restricted_pages = Page.objects.public().exclude(title_set__title__in=expected_granted_pages).values_list('id',
flat=True)
self.assertNodeMemberships(visible_page_ids, restricted_pages, public_page_ids)
def get_request(self, user=None, page=None):
# see tests/menu.py line 753
path = "/"
if page:
path = page.get_absolute_url()
attrs = {
'user': user or AnonymousUser(),
'REQUEST': {},
'GET': {},
'path': path,
'session': {},
}
return type('Request', (object,), attrs)
def get_url_dict(self, pages, language='en'):
return dict((page.get_absolute_url(language=language), page) for page in pages)
class ViewPermissionComplexMenuAllNodesTests(ViewPermissionTests):
"""
Test CMS_PUBLIC_FOR=all group access and menu nodes rendering
"""
settings_overrides = {
'CMS_PERMISSION': True,
'CMS_PUBLIC_FOR': 'all',
}
def test_public_pages_anonymous_norestrictions(self):
"""
All pages are visible to an anonymous user
"""
all_pages = self._setup_tree_pages()
request = self.get_request()
visible_page_ids = get_visible_pages(request, all_pages, self.site)
self.assertEqual(len(all_pages), len(visible_page_ids))
nodes = menu_pool.get_nodes(request)
self.assertEqual(len(nodes), len(all_pages))
def test_public_menu_anonymous_user(self):
"""
Anonymous user should only see the pages in the rendered menu
that have no permissions assigned,directly or indirectly
"""
self._setup_user_groups()
all_pages = self._setup_tree_pages()
self._setup_view_restrictions()
granted = ['page_a',
'page_c',
'page_c_a',
'page_c_b',
'page_d_a',
'page_d_b',
'page_d_c',
'page_d_d'
]
self.assertGrantedVisibility(all_pages, granted)
urls = self.get_url_dict(all_pages)
user = AnonymousUser()
request = self.get_request(user, urls['/en/'])
nodes = menu_pool.get_nodes(request)
self.assertEqual(len(nodes), 4)
self.assertInMenu(urls["/en/"], user)
self.assertInMenu(urls["/en/page_c/"], user)
self.assertInMenu(urls["/en/page_c/page_c_a/"], user)
self.assertInMenu(urls["/en/page_c/page_c_b/"], user)
self.assertViewNotAllowed(urls["/en/page_b/"], user)
self.assertNotInMenu(urls["/en/page_b/"], user)
self.assertViewNotAllowed(urls["/en/page_d/"], user)
self.assertNotInMenu(urls["/en/page_d/"], user)
def test_menu_access_page_and_children_group_1(self):
"""
simulate behaviour of group b member
group_b_ACCESS_PAGE_AND_CHILDREN to page_b
"""
self._setup_user_groups()
all_pages = self._setup_tree_pages()
self._setup_view_restrictions()
granted = ['page_a',
'page_c',
'page_c_a',
'page_c_b',
#group_1
'page_b', #page_id b has page_id and children restricted - group 1
'page_b_a',
'page_b_b', #page_id b_b children restricted - group 2
'page_b_c',
'page_b_d',
# not restricted
'page_d_a',
'page_d_b',
'page_d_c',
'page_d_d'
]
urls = self.get_url_dict(all_pages)
if get_user_model().USERNAME_FIELD == 'email':
user = get_user_model().objects.get(email='user_1@django-cms.org')
else:
user = get_user_model().objects.get(username='user_1')
self.assertGrantedVisibility(all_pages, granted, username='user_1')
self.assertViewAllowed(urls["/en/page_b/"], user)
self.assertInMenu(urls["/en/page_b/"], user)
self.assertViewAllowed(urls["/en/page_b/page_b_b/"], user)
self.assertInMenu(urls["/en/page_b/page_b_b/"], user)
# descendant
self.assertViewNotAllowed(urls["/en/page_b/page_b_b/page_b_b_a/"], user)
self.assertNotInMenu(urls["/en/page_b/page_b_b/page_b_b_a/"], user)
# group 5
self.assertViewNotAllowed(urls["/en/page_d/"], user)
self.assertNotInMenu(urls["/en/page_d/"], user)
# should be public as only page_d is restricted
self.assertViewAllowed(urls["/en/page_d/page_d_a/"], user)
self.assertNotInMenu(urls["/en/page_d/page_d_a/"], user)
def test_menu_access_children_group_2(self):
"""
simulate behaviour of group 2 member
GROUPNAME_2 = 'group_b_b_ACCESS_CHILDREN'
to page_b_b
"""
self._setup_user_groups()
all_pages = self._setup_tree_pages()
self._setup_view_restrictions()
granted = [
'page_a',
'page_c',
'page_c_a',
'page_c_b',
'page_b_b_a',
'page_b_b_b',
'page_b_b_c',
# not restricted
'page_d_a',
'page_d_b',
'page_d_c',
'page_d_d',
]
self.assertGrantedVisibility(all_pages, granted, username='user_2')
urls = self.get_url_dict(all_pages)
if get_user_model().USERNAME_FIELD == 'email':
user = get_user_model().objects.get(email='user_2@django-cms.org')
else:
user = get_user_model().objects.get(username='user_2')
self.assertViewNotAllowed(urls["/en/page_b/page_b_b/"], user)
self.assertViewAllowed(urls["/en/page_b/page_b_b/page_b_b_a/"], user)
self.assertViewNotAllowed(urls["/en/page_b/page_b_b/page_b_b_a/page_b_b_a_a/"], user)
self.assertViewNotAllowed(urls["/en/page_d/"], user)
self.assertViewAllowed(urls["/en/page_d/page_d_a/"], user)
def test_menu_access_page_and_descendants_group_3(self):
"""
simulate behaviour of group 3 member
group_b_ACCESS_PAGE_AND_DESCENDANTS to page_b
"""
self._setup_user_groups()
all_pages = self._setup_tree_pages()
self._setup_view_restrictions()
granted = ['page_a',
'page_b',
'page_b_a',
'page_b_b',
'page_b_b_a',
'page_b_b_a_a',
'page_b_b_b',
'page_b_b_c',
'page_b_c',
'page_b_d',
'page_b_d_a',
'page_b_d_b',
'page_b_d_c',
'page_c',
'page_c_a',
'page_c_b',
'page_d_a',
'page_d_b',
'page_d_c',
'page_d_d',
]
self.assertGrantedVisibility(all_pages, granted, username='user_3')
urls = self.get_url_dict(all_pages)
if get_user_model().USERNAME_FIELD == 'email':
user = get_user_model().objects.get(email='user_3@django-cms.org')
else:
user = get_user_model().objects.get(username='user_3')
self.assertViewAllowed(urls["/en/page_b/"], user)
self.assertViewAllowed(urls["/en/page_b/page_b_d/page_b_d_a/"], user)
self.assertViewNotAllowed(urls["/en/page_d/"], user)
self.assertViewAllowed(urls["/en/page_d/page_d_a/"], user)
def test_menu_access_descendants_group_4(self):
"""
simulate behaviour of group 4 member
group_b_b_ACCESS_DESCENDANTS to page_b_b
"""
self._setup_user_groups()
all_pages = self._setup_tree_pages()
self._setup_view_restrictions()
granted = ['page_a',
'page_b_b_a',
'page_b_b_a_a',
'page_b_b_b',
'page_b_b_c',
'page_c',
'page_c_a',
'page_c_b',
'page_d_a',
'page_d_b',
'page_d_c',
'page_d_d',
]
self.assertGrantedVisibility(all_pages, granted, username='user_4')
urls = self.get_url_dict(all_pages)
if get_user_model().USERNAME_FIELD == 'email':
user = get_user_model().objects.get(email='user_4@django-cms.org')
else:
user = get_user_model().objects.get(username='user_4')
self.assertViewNotAllowed(urls["/en/page_b/"], user)
self.assertViewNotAllowed(urls["/en/page_b/page_b_b/"], user)
self.assertViewAllowed(urls["/en/page_b/page_b_b/page_b_b_a/"], user)
self.assertViewNotAllowed(urls["/en/page_d/"], user)
self.assertViewAllowed(urls["/en/page_d/page_d_a/"], user)
def test_menu_access_page_group_5(self):
"""
simulate behaviour of group b member
group_d_ACCESS_PAGE to page_d
"""
self._setup_user_groups()
all_pages = self._setup_tree_pages()
self._setup_view_restrictions()
granted = ['page_a',
'page_c',
'page_c_a',
'page_c_b',
'page_d',
'page_d_a',
'page_d_b',
'page_d_c',
'page_d_d',
]
self.assertGrantedVisibility(all_pages, granted, username='user_5')
urls = self.get_url_dict(all_pages)
if get_user_model().USERNAME_FIELD == 'email':
user = get_user_model().objects.get(email='user_5@django-cms.org')
else:
user = get_user_model().objects.get(username='user_5')
# call /
self.assertViewNotAllowed(urls["/en/page_b/"], user)
self.assertViewNotAllowed(urls["/en/page_b/page_b_b/"], user)
self.assertViewNotAllowed(urls["/en/page_b/page_b_b/page_b_b_a/"], user)
self.assertViewAllowed(urls["/en/page_d/"], user)
self.assertViewAllowed(urls["/en/page_d/page_d_a/"], user)
class ViewPermissionTreeBugTests(ViewPermissionTests):
"""Test issue 1113
https://github.com/divio/django-cms/issues/1113
Wrong view permission calculation in PagePermission.objects.for_page
grant_on=ACCESS_PAGE_AND_CHILDREN or ACCESS_PAGE_AND_DESCENDANTS to page 6
Test if this affects the menu entries and page visibility
"""
settings_overrides = {
'CMS_PERMISSION': True,
'CMS_PUBLIC_FOR': 'all',
}
GROUPNAME_6 = 'group_6_ACCESS_PAGE'
def _setup_pages(self):
"""
Tree Structure
|- Page_1
| |- Page_2
| |- Page_3
| |- Page_4 (false positive)
| |- Page_5
| | |- Page_6 (group 6 page access)
"""
stdkwargs = {
'template': 'nav_playground.html',
'language': 'en',
'published': True,
'in_navigation': True,
}
page_1 = create_page("page_1", **stdkwargs) # first page slug is /
page_2 = create_page("page_2", parent=page_1, **stdkwargs)
page_3 = create_page("page_3", parent=page_2, **stdkwargs)
page_4 = create_page("page_4", parent=page_3, **stdkwargs)
page_5 = create_page("page_5", parent=page_1, **stdkwargs)
page_6 = create_page("page_6", parent=page_5, **stdkwargs)
return [page_1,
page_2,
page_3,
page_4,
page_5,
page_6,
]
def _setup_user(self):
user = self._create_user('user_6', True)
group = Group.objects.create(name=self.GROUPNAME_6)
user_set = getattr(group, user_related_name)
user_set.add(user)
group.save()
def _setup_permviewbug(self):
"""
Setup group_6_ACCESS_PAGE view restriction
"""
page = Page.objects.drafts().get(title_set__title="page_6")
group = Group.objects.get(name__iexact=self.GROUPNAME_6)
PagePermission.objects.create(can_view=True, group=group, page=page, grant_on=ACCESS_PAGE_AND_CHILDREN)
PagePermission.objects.create(can_view=True, group=group, page=page, grant_on=ACCESS_PAGE_AND_DESCENDANTS)
def test_pageforbug(self):
all_pages = self._setup_pages()
self._setup_user()
self._setup_permviewbug()
for page in all_pages:
perm = PagePermission.objects.for_page(page=page)
# only page_6 has a permission assigned
if page.get_title() == 'page_6':
self.assertEqual(len(perm), 2)
else:
msg = "Permission wrong at page %s" % (page.get_title())
self.assertEqual(len(perm), 0, msg)
granted = ['page_1',
'page_2',
'page_3',
'page_4',
'page_5',
]
urls = self.get_url_dict(all_pages)
user = AnonymousUser()
# anonymous doesn't see page_6
self.assertGrantedVisibility(all_pages, granted)
self.assertViewAllowed(urls["/en/page_2/page_3/page_4/"], user)
self.assertViewAllowed(urls["/en/page_5/"], user)
self.assertViewNotAllowed(urls["/en/page_5/page_6/"], user)
# group member
granted = ['page_1',
'page_2',
'page_3',
'page_4',
'page_5',
'page_6',
]
self.assertGrantedVisibility(all_pages, granted, username='user_6')
if get_user_model().USERNAME_FIELD == 'email':
user = get_user_model().objects.get(email='user_6@django-cms.org')
else:
user = get_user_model().objects.get(username='user_6')
url = "/en/page_2/page_3/page_4/"
self.assertViewAllowed(urls[url], user)
url = "/en/page_5/page_6/"
self.assertViewAllowed(urls[url], user)
|
|
import xml.dom.minidom as xml
import tempfile, os, codecs, sys, re, shutil, yaml
class TempFile(object):
def __init__(self, finalTargetFilePath):
tmpfd, self.temppath = tempfile.mkstemp()
self.tmpfo = os.fdopen(tmpfd, 'wb')
self.targetPath = finalTargetFilePath
def __enter__(self):
return codecs.EncodedFile(self.tmpfo, 'utf-8', 'utf-8')
def __exit__(self, exc_type, exc_value, traceback):
self.tmpfo.close()
if not exc_type:
if os.path.exists(self.targetPath):
os.remove(self.targetPath)
shutil.move(self.temppath, self.targetPath)
else:
os.unlink(self.temppath)
return None
class AndroidManifestInst(object):
def __init__(self, path=None):
if path is None:
self._initEmptyManifest()
else:
self.doc = xml.parse(path)
def getPermissions(self):
parentNode = self.doc.documentElement
return AndroidManifestInst._getChildrenNS(parentNode, 'uses-permission')
def getPkgName(self):
return self._rootNode.getAttribute('package')
def getPkgVersionName(self):
return self._rootNode.getAttribute('android:versionName')
def replace(self, cfg):
AndroidManifestInst._walkElementNode(self._rootNode,
lambda node: replaceNodeAttr(node, cfg))
def replaceTargetSDK(self, target):
children = AndroidManifestInst._getChildrenNS(self._rootNode, 'uses-sdk')
if len(children) == 0:
targetNode = self.doc.createElement('use-sdk')
targetNode.setAttribute('android:minSdkVersion', '7')
self._rootNode.appendChild(targetNode)
else:
targetNode = children[0]
targetNode.setAttribute('android:targetSdkVersion', target)
def replaceApplication(self):
self._applicationNode.setAttribute('android:name', 'prj.chameleon.channelapi.ChameleonApplication')
def replaceEntryActivity(self, orientation, channel, oldPkgName):
entryActivityNode = self._findEntryActivity()
if entryActivityNode is None:
raise RuntimeError('Fail to find the start entry')
oldEntry = entryActivityNode.getAttribute('android:name')
if oldEntry.startswith('.'):
oldEntry = oldPkgName + oldEntry
newEntry = oldEntry[0:oldEntry.rfind('.')] + ".ChameleonMainActivity"
entryActivityNode.setAttribute('android:name', newEntry)
if channel == 'lenovo':
_addLenovoMainAction(self.doc, entryActivityNode)
#TODO add SplashScreenActivity
def addSplashScreenActivity(self, orientation):
splashActivity = self.doc.createElement('activity')
_fillSplashScreenActivity(self.doc, splashActivity, orientation)
self._applicationNode.appendChild(splashActivity)
def merge(self, that):
self._mergePermissions(that)
self._mergeActivity(that)
def setElement(self, parent, tag, attrs, valueAttrs):
parentNode = self.doc.documentElement
for p in parent:
parentNode = AndroidManifestInst._getChildNS(parentNode, p)
if parentNode is None:
raise RuntimeError(u'fail to find element %s/%s' %(parent, tag))
childNode = AndroidManifestInst._getChildNS(parentNode, tag, attrs)
if childNode is None:
childNode = self.doc.createElement(tag)
parentNode.appendChild(childNode)
for (name, value) in attrs:
childNode.setAttribute(name, value)
for (name, value) in valueAttrs:
childNode.setAttribute(name, value)
def setPkgName(self, pkgName):
root = self.doc.documentElement
root.setAttribute('package', pkgName)
def createElement(self, parentPath, tag, attrs = None):
parentNode = self.doc.documentElement
for p in parentPath:
if type(p) is tuple:
if len(p) != 2:
raise RuntimeError(u'the path tule must be (tag, attr)')
parentNode = AndroidManifestInst._getChildNs(parentNode, p[0], p[1])
if parentNode is None:
raise RuntimeError(u'Fail to find the path ' + repr(p))
elif type(p) is str:
parentNode = AndroidManifestInst._getChildNS(parentNode, p)
childNode = self.doc.createElement(tag)
parentNode.append(childNode)
if attrs is not None:
for name, value in attrs:
childNode.setAttribute(name, value)
def setIcon(self, iconname):
self.setElement([],
'application',
[],
[('android:icon', '@drawable/'+iconname)])
def setMetaData(self, name, value):
self.setElement(['application'],
'meta-data',
[('android:name', name)],
[('android:value', value)])
def addAdditionalInfo(self, yamlPath):
if not os.path.exists(yamlPath):
return 1
fd = open(yamlPath, 'r')
addInfo = yaml.load(fd)
children = AndroidManifestInst._getChildrenNS(self._rootNode, 'uses-sdk')
if len(children) == 0:
targetNode = self.doc.createElement('uses-sdk')
self._rootNode.appendChild(targetNode)
else:
targetNode = children[0]
for (key, value) in addInfo['sdkInfo'].items():
targetNode.setAttribute('android:'+key, value)
root = self.doc.documentElement
for (key,value) in addInfo['versionInfo'].items():
root.setAttribute('android:'+key, value)
def _findEntryActivity(self):
activityNodes = AndroidManifestInst._getChildrenNS(self._applicationNode,
'activity')
for n in activityNodes:
intentNode = AndroidManifestInst._getChildNS(n, 'intent-filter')
if intentNode is not None:
actionNode = AndroidManifestInst._getChildNS( intentNode,
'action')
actionNode = AndroidManifestInst._getChildNS( intentNode,
'action',
[('android:name', 'android.intent.action.MAIN')])
categoryNode = AndroidManifestInst._getChildNS(intentNode,
'category',
[('android:name', 'android.intent.category.LAUNCHER')])
if actionNode is not None and categoryNode is not None:
return n
def _mergeActivity(self, that):
toMerge = [x for x in that._applicationNode.childNodes
if x.nodeType==x.ELEMENT_NODE]
for m in toMerge:
self._applicationNode.appendChild(m)
def _mergePermissions(self, that):
thatPerm = that.getPermissions()
myPerm = self.getPermissions()
myNowPermission = [x.getAttribute('android:name') for x in myPerm]
toAddPerm = [x for x in thatPerm
if x.getAttribute('android:name') not in myNowPermission]
for p in toAddPerm:
self._rootNode.appendChild(p)
def _initEmptyManifest(self):
self.doc = xml.getDOMImplementation().createDocument(None, 'manifest', None)
root = self.doc.documentElement
root.setAttribute('xmlns:android', 'http://schemas.android.com/apk/res/android')
root.setAttribute('package', 'prj.chameleon.entry')
applicationDoc = self.doc.createElement('application')
root.appendChild(applicationDoc)
def _getChildElement(self, parentNode, tag):
if type(tag) is tuple:
parentNode.childNodes
def dump(self, path = None):
if path is None:
return self.doc.toprettyxml()
else:
f = codecs.open(path, 'w', 'utf8')
return self.doc.writexml(f, indent="\t")
def safeDump(self, path):
with TempFile(path) as f:
return f.write(self.doc.toxml('utf-8'))
def fullQualifyName(self, pkgName):
appNode = self._applicationNode
for node in appNode.childNodes:
if node.nodeType == node.ELEMENT_NODE:
if node.tagName in ["activity", "service", "provider", 'receiver']:
self._fullQualifyName(node, 'android:name', pkgName)
elif node.tagName == 'activity-alias':
self._fullQualifyName(node, 'android:name', pkgName)
self._fullQualifyName(node, 'android:targetActivity', pkgName)
def _fullQualifyName(self, node, attrId, pkgName):
val = node.getAttribute(attrId)
if val is None:
return
if val.startswith(pkgName):
pass
elif val.startswith('.'):
node.setAttribute(attrId, pkgName+val)
elif val.find('.') == -1:
node.setAttribute(attrId, pkgName+'.'+val)
else:
pass
@property
def _rootNode(self):
return self.doc.documentElement
@property
def _applicationNode(self):
return AndroidManifestInst._getChildNS(self._rootNode, 'application')
@staticmethod
def _getChildNS(node, tag, attrs=None):
for n in node.childNodes:
if AndroidManifestInst._matchNode(n, tag, attrs):
return n
return None
@staticmethod
def _getChildrenNS(node, tag, attrs = None):
return [x for x in node.childNodes if
AndroidManifestInst._matchNode(x, tag, attrs)]
@staticmethod
def _matchNode(node, tag, attrs):
return AndroidManifestInst._matchTag(node, tag) and AndroidManifestInst._matchAttr(node, attrs)
@staticmethod
def _matchTag(node, tag):
return node.nodeType==node.ELEMENT_NODE and node.tagName == tag
@staticmethod
def _matchAttr(node, attrs):
if attrs is None:
return True
for attr in attrs:
if node.getAttribute(attr[0]) != attr[1]:
return False
return True
@staticmethod
def _walkElementNode(parentNode, func):
func(parentNode)
for n in parentNode.childNodes:
if n.nodeType == n.ELEMENT_NODE:
AndroidManifestInst._walkElementNode(n, func)
def parseReplaceVal(val):
ts = val.split(';;')
return [t.split('=') for t in ts]
REPLACE_RE = re.compile('%(.+?)%')
def replaceNodeAttr(node, cfg):
replaceVal = node.getAttribute("chameleon:replace")
def repl(o):
c = o.group(1)
if cfg.get(c) is None:
return c
return str(cfg.get(c))
if len(replaceVal) != 0:
replaceVal = parseReplaceVal(replaceVal)
for name, val in replaceVal:
realv = REPLACE_RE.sub(repl, str(val))
node.setAttribute(name, realv)
node.removeAttribute("chameleon:replace")
# TODO add splash attribute rm intent-filter MAIN LAUNCHER
def _fillSplashScreenActivity(doc, splashActivity, orientation):
splashActivity.setAttribute('android:name', 'prj.chameleon.channelapi.SplashScreenActivity')
if orientation is not None:
splashActivity.setAttribute('android:screenOrientation', orientation)
splashActivity.setAttribute('android:noHistory', "true")
splashActivity.setAttribute('android:stateNotNeeded', "true")
splashActivity.setAttribute('android:launchMode', "singleTask")
splashActivity.setAttribute('android:theme', "@android:style/Theme.NoTitleBar.Fullscreen")
# TODO add lenovo main action
def _addLenovoMainAction(doc, entryActivityNode):
intentNode = AndroidManifestInst._getChildNS(entryActivityNode, 'intent-filter')
mainActionNode = AndroidManifestInst._getChildNS(intentNode, 'action', [('android:name', 'android.intent.action.MAIN')])
launchCatNode = AndroidManifestInst._getChildNS(intentNode, 'category', [('android:name', 'android.intent.category.LAUNCHER')])
intentNode.removeChild(mainActionNode)
intentNode.removeChild(launchCatNode)
mainActionNode = doc.createElement('action')
intentNode.appendChild(mainActionNode)
mainActionNode.setAttribute('android:name', "lenovoid.MAIN")
categoryNode = doc.createElement('category')
intentNode.appendChild(categoryNode)
categoryNode.setAttribute('android:name', "android.intent.category.DEFAULT")
|
|
# -*- coding: utf-8 -*-
from datetime import date
from django.conf import settings
from django.contrib.auth.base_user import AbstractBaseUser, BaseUserManager
from django.contrib.auth.models import PermissionsMixin
from django.contrib.contenttypes.fields import GenericForeignKey
from django.contrib.contenttypes.models import ContentType
from django.core.exceptions import ValidationError
from django.db import models
from django.forms.utils import flatatt
from django.utils import timezone
from django.utils.formats import date_format
from django.utils.html import format_html
from django.utils.translation import ugettext_lazy as _
class UserManager(BaseUserManager):
use_in_migrations = True
def _create_user(self, email, password=None, **extra_fields):
if not email:
raise ValueError("The given email must be set")
email = self.normalize_email(email)
user = self.model(email=email, **extra_fields)
if password:
user.set_password(password)
else:
user.set_unusable_password()
user.save()
return user
def create_user(self, email, password=None, **extra_fields):
extra_fields.setdefault('is_staff', False)
extra_fields.setdefault('is_superuser', False)
return self._create_user(email, password, **extra_fields)
def create_superuser(self, email, password=None, **extra_fields):
extra_fields.setdefault('is_staff', True)
extra_fields.setdefault('is_superuser', True)
if extra_fields['is_staff'] is not True:
raise ValueError("Superuser must have is_staff=True.")
if extra_fields['is_superuser'] is not True:
raise ValueError("Superuser must have is_superuser=True.")
return self._create_user(email, password, **extra_fields)
class User(AbstractBaseUser, PermissionsMixin):
class Meta:
verbose_name = _("user")
verbose_name_plural = _("users")
USERNAME_FIELD = 'email'
REQUIRED_FIELDS = []
objects = UserManager()
email = models.EmailField(
unique=True,
verbose_name=_("e-mail address"),
)
display_name = models.CharField(
max_length=30,
blank=True,
verbose_name=_("display name"),
)
first_name = models.CharField(
max_length=30,
blank=True,
verbose_name=_("first name"),
)
last_name = models.CharField(
max_length=30,
blank=True,
verbose_name=_("last name"),
)
is_staff = models.BooleanField(
verbose_name=_("staff status"),
default=False,
)
is_active = models.BooleanField(
default=True,
verbose_name=_("active"),
)
date_joined = models.DateTimeField(
verbose_name=_("date joined"),
default=timezone.now,
)
picture = models.ImageField(
upload_to='%Y/users/pictures/',
blank=True,
verbose_name=_("picture"),
)
birth_date = models.DateField(
blank=True, null=True,
verbose_name=_("birth date"),
)
short_description = models.CharField(
max_length=255,
blank=True,
verbose_name=_("short description"),
)
bragging_rights = models.TextField(
blank=True,
verbose_name=_("bragging rights"),
)
website = models.URLField(
blank=True,
verbose_name=_("website"),
)
website_name = models.CharField(
max_length=50,
blank=True,
verbose_name=_("website name"),
)
def __str__(self):
return self.email
def get_full_name(self):
return "{} {}".format(self.first_name, self.last_name).strip()
def get_short_name(self):
return self.display_name or self.get_full_name() or _("Anonymous")
get_short_name.short_description = _("display name")
def get_website_link(self, attrs=None):
if not self.website:
return None
if attrs is None:
attrs = {'target': '_blank'}
attrs['href'] = self.website
website_name = self.website_name or self.website
return format_html('<a{}>{}</a>', flatatt(attrs), website_name)
get_website_link.short_description = _("website")
def get_age(self):
bday = self.birth_date
if not bday:
return None
today = date.today()
return today.year - bday.year - (
(today.month, today.day) < (bday.month, bday.day))
class Tag(models.Model):
class Meta:
verbose_name = _("tag")
verbose_name_plural = _("tags")
name = models.CharField(
max_length=50,
unique=True,
verbose_name=_("name"),
)
slug = models.SlugField(
max_length=50,
unique=True,
verbose_name=_("slug")
)
description = models.TextField(
blank=True,
verbose_name=_("description"),
)
article_count = models.PositiveSmallIntegerField(
default=0,
verbose_name=_("article count")
)
article_view_count = models.PositiveIntegerField(
default=0,
verbose_name=_("article view count"),
)
def __str__(self):
return self.name
class Article(models.Model):
class Meta:
verbose_name = _("article")
verbose_name_plural = _("articles")
title = models.CharField(
max_length=200,
verbose_name=_("title"),
)
slug = models.SlugField(
max_length=100,
unique=True,
verbose_name=_("slug"),
)
author = models.ForeignKey(
settings.AUTH_USER_MODEL, models.PROTECT,
blank=True, null=True,
verbose_name=_("author"),
)
pub_date = models.DateTimeField(
blank=True, null=True,
verbose_name=_("publication date"),
)
update_date = models.DateTimeField(
blank=True, null=True,
verbose_name=_("is_online")
)
update_summary = models.TextField(
blank=True,
verbose_name=_("update summary"),
)
is_online = models.BooleanField(
default=False,
verbose_name=_("is online"),
)
teaser = models.TextField(
verbose_name=_("teaser"),
)
content = models.FileField(
upload_to='%Y/articles/',
verbose_name=_("content")
)
tags = models.ManyToManyField(
Tag,
verbose_name=_("tags"),
)
illustration = models.ImageField(
upload_to='%Y/illustrations/',
blank=True,
verbose_name=_("illustration"),
)
illustration_credit = models.CharField(
max_length=50,
blank=True,
verbose_name=_("illustration credit"),
)
illustration_credit_url = models.URLField(
blank=True,
verbose_name=_("illustration credit link")
)
view_count = models.PositiveIntegerField(
default=0,
verbose_name=_("view count"),
)
subscribed_users = models.ManyToManyField(
settings.AUTH_USER_MODEL,
related_name='article_subscriptions',
verbose_name=_("subscribed users"),
)
def __str__(self):
return self.slug
def clean(self):
super().clean()
if self.is_online and not self.pub_date:
self.pub_date = timezone.now()
if self.update_date:
if not self.pub_date or self.pub_date >= self.update_date:
self.update_date = None
def get_last_modified(self):
return self.update_date or self.pub_date
class Reaction(models.Model):
class Meta:
verbose_name = _("reaction")
verbose_name_plural = _("reactions")
REACTION_CHOICES = [
('must_read', _("Must read")),
('interesting', _("Interesting")),
('fuzzy', _("Fuzzy")),
('outdated', _("Outdated")),
('mistaking', _("Mistaking")),
]
article = models.ForeignKey(
Article, models.CASCADE,
verbose_name=_("article"),
)
reaction = models.CharField(
max_length=20,
blank=True,
choices=REACTION_CHOICES,
verbose_name=_("reaction"),
)
comment = models.TextField(
blank=True,
verbose_name=_("comment"),
)
author = models.ForeignKey(
settings.AUTH_USER_MODEL, models.SET_NULL,
blank=True, null=True,
verbose_name=_("author"),
)
posted_on = models.DateTimeField(
auto_now_add=True,
verbose_name=_("posted on"),
)
is_valid = models.NullBooleanField(
verbose_name=_("is valid"),
)
up_votes = models.PositiveSmallIntegerField(
default=0,
verbose_name=_("up votes"),
)
down_votes = models.PositiveSmallIntegerField(
default=0,
verbose_name=_("down votes"),
)
def __str__(self):
return '#{}'.format(self.pk)
def clean(self):
super().clean()
if not self.reaction and not self.comment:
raise ValidationError(
_("Please submit at least your reaction or a comment.")
)
class Log(models.Model):
class Meta:
verbose_name = _("log")
verbose_name_plural = _("logs")
ACTION_CHOICES = [
('view_article', _("viewed article")),
('comment_article', _("commented article")),
('upvote_comment', _("upvoted comment")),
('downvote_comment', _("downvoted comment")),
('report_comment', _("reported comment")),
]
ip_address = models.GenericIPAddressField(
verbose_name=_("ip address"),
)
user = models.ForeignKey(
settings.AUTH_USER_MODEL, models.SET_NULL,
blank=True, null=True,
verbose_name=_("user"),
)
action = models.CharField(
max_length=50,
choices=ACTION_CHOICES,
verbose_name=_("action"),
)
when = models.DateTimeField(
auto_now_add=True,
verbose_name=_("when"),
)
target_type = models.ForeignKey(
ContentType, models.CASCADE,
verbose_name=_("target type"),
)
target_id = models.PositiveSmallIntegerField(
verbose_name=_("target"),
)
target = GenericForeignKey('target_type', 'target_id')
comment = models.TextField(
blank=True,
verbose_name=_("comment"),
)
def __str__(self):
message = _("{client} {action} \"{target}\" on {when}")
return message.format(
client=self.get_client_info(),
action=self.get_action_display(),
target_type=self.target_type._meta.verbose_name,
target=self.target,
when=date_format(self.when, 'DATETIME_FORMAT'),
)
|
|
# -*- coding: utf-8 -*-
#
# Copyright (c) 2015, Alcatel-Lucent Inc, 2017 Nokia
# All rights reserved.
#
# Redistribution and use in source and binary forms, with or without
# modification, are permitted provided that the following conditions are met:
# * Redistributions of source code must retain the above copyright
# notice, this list of conditions and the following disclaimer.
# * Redistributions in binary form must reproduce the above copyright
# notice, this list of conditions and the following disclaimer in the
# documentation and/or other materials provided with the distribution.
# * Neither the name of the copyright holder nor the names of its contributors
# may be used to endorse or promote products derived from this software without
# specific prior written permission.
#
# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND
# ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED
# WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE
# DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE FOR ANY
# DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES
# (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES;
# LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND
# ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
# (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS
# SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
from .fetchers import NUPermissionsFetcher
from .fetchers import NUMetadatasFetcher
from .fetchers import NUDomainFIPAclTemplateEntriesFetcher
from .fetchers import NUGlobalMetadatasFetcher
from bambou import NURESTObject
class NUDomainFIPAclTemplate(NURESTObject):
""" Represents a DomainFIPAclTemplate in the VSD
Notes:
Defines the template for an Domain Floating IP ACL
"""
__rest_name__ = "egressdomainfloatingipacltemplate"
__resource_name__ = "egressdomainfloatingipacltemplates"
## Constants
CONST_POLICY_STATE_DRAFT = "DRAFT"
CONST_POLICY_STATE_LIVE = "LIVE"
CONST_ENTITY_SCOPE_GLOBAL = "GLOBAL"
CONST_PRIORITY_TYPE_NONE = "NONE"
CONST_ENTITY_SCOPE_ENTERPRISE = "ENTERPRISE"
CONST_PRIORITY_TYPE_TOP = "TOP"
CONST_PRIORITY_TYPE_BOTTOM = "BOTTOM"
def __init__(self, **kwargs):
""" Initializes a DomainFIPAclTemplate instance
Notes:
You can specify all parameters while calling this methods.
A special argument named `data` will enable you to load the
object from a Python dictionary
Examples:
>>> domainfipacltemplate = NUDomainFIPAclTemplate(id=u'xxxx-xxx-xxx-xxx', name=u'DomainFIPAclTemplate')
>>> domainfipacltemplate = NUDomainFIPAclTemplate(data=my_dict)
"""
super(NUDomainFIPAclTemplate, self).__init__()
# Read/Write Attributes
self._name = None
self._last_updated_by = None
self._last_updated_date = None
self._active = None
self._default_allow_ip = None
self._default_allow_non_ip = None
self._description = None
self._embedded_metadata = None
self._entity_scope = None
self._entries = None
self._policy_state = None
self._creation_date = None
self._priority = None
self._priority_type = None
self._associated_live_entity_id = None
self._auto_generate_priority = None
self._owner = None
self._external_id = None
self.expose_attribute(local_name="name", remote_name="name", attribute_type=str, is_required=False, is_unique=False)
self.expose_attribute(local_name="last_updated_by", remote_name="lastUpdatedBy", attribute_type=str, is_required=False, is_unique=False)
self.expose_attribute(local_name="last_updated_date", remote_name="lastUpdatedDate", attribute_type=str, is_required=False, is_unique=False)
self.expose_attribute(local_name="active", remote_name="active", attribute_type=bool, is_required=False, is_unique=False)
self.expose_attribute(local_name="default_allow_ip", remote_name="defaultAllowIP", attribute_type=bool, is_required=False, is_unique=False)
self.expose_attribute(local_name="default_allow_non_ip", remote_name="defaultAllowNonIP", attribute_type=bool, is_required=False, is_unique=False)
self.expose_attribute(local_name="description", remote_name="description", attribute_type=str, is_required=False, is_unique=False)
self.expose_attribute(local_name="embedded_metadata", remote_name="embeddedMetadata", attribute_type=list, is_required=False, is_unique=False)
self.expose_attribute(local_name="entity_scope", remote_name="entityScope", attribute_type=str, is_required=False, is_unique=False, choices=[u'ENTERPRISE', u'GLOBAL'])
self.expose_attribute(local_name="entries", remote_name="entries", attribute_type=list, is_required=False, is_unique=False)
self.expose_attribute(local_name="policy_state", remote_name="policyState", attribute_type=str, is_required=False, is_unique=False, choices=[u'DRAFT', u'LIVE'])
self.expose_attribute(local_name="creation_date", remote_name="creationDate", attribute_type=str, is_required=False, is_unique=False)
self.expose_attribute(local_name="priority", remote_name="priority", attribute_type=int, is_required=False, is_unique=False)
self.expose_attribute(local_name="priority_type", remote_name="priorityType", attribute_type=str, is_required=False, is_unique=False, choices=[u'BOTTOM', u'NONE', u'TOP'])
self.expose_attribute(local_name="associated_live_entity_id", remote_name="associatedLiveEntityID", attribute_type=str, is_required=False, is_unique=False)
self.expose_attribute(local_name="auto_generate_priority", remote_name="autoGeneratePriority", attribute_type=bool, is_required=False, is_unique=False)
self.expose_attribute(local_name="owner", remote_name="owner", attribute_type=str, is_required=False, is_unique=False)
self.expose_attribute(local_name="external_id", remote_name="externalID", attribute_type=str, is_required=False, is_unique=True)
# Fetchers
self.permissions = NUPermissionsFetcher.fetcher_with_object(parent_object=self, relationship="child")
self.metadatas = NUMetadatasFetcher.fetcher_with_object(parent_object=self, relationship="child")
self.domain_fip_acl_template_entries = NUDomainFIPAclTemplateEntriesFetcher.fetcher_with_object(parent_object=self, relationship="child")
self.global_metadatas = NUGlobalMetadatasFetcher.fetcher_with_object(parent_object=self, relationship="child")
self._compute_args(**kwargs)
# Properties
@property
def name(self):
""" Get name value.
Notes:
The name of the entity
"""
return self._name
@name.setter
def name(self, value):
""" Set name value.
Notes:
The name of the entity
"""
self._name = value
@property
def last_updated_by(self):
""" Get last_updated_by value.
Notes:
ID of the user who last updated the object.
This attribute is named `lastUpdatedBy` in VSD API.
"""
return self._last_updated_by
@last_updated_by.setter
def last_updated_by(self, value):
""" Set last_updated_by value.
Notes:
ID of the user who last updated the object.
This attribute is named `lastUpdatedBy` in VSD API.
"""
self._last_updated_by = value
@property
def last_updated_date(self):
""" Get last_updated_date value.
Notes:
Time stamp when this object was last updated.
This attribute is named `lastUpdatedDate` in VSD API.
"""
return self._last_updated_date
@last_updated_date.setter
def last_updated_date(self, value):
""" Set last_updated_date value.
Notes:
Time stamp when this object was last updated.
This attribute is named `lastUpdatedDate` in VSD API.
"""
self._last_updated_date = value
@property
def active(self):
""" Get active value.
Notes:
If enabled, it means that this ACL or QOS entry is active
"""
return self._active
@active.setter
def active(self, value):
""" Set active value.
Notes:
If enabled, it means that this ACL or QOS entry is active
"""
self._active = value
@property
def default_allow_ip(self):
""" Get default_allow_ip value.
Notes:
If enabled a default ACL of Allow All is added as the last entry in the list of ACL entries
This attribute is named `defaultAllowIP` in VSD API.
"""
return self._default_allow_ip
@default_allow_ip.setter
def default_allow_ip(self, value):
""" Set default_allow_ip value.
Notes:
If enabled a default ACL of Allow All is added as the last entry in the list of ACL entries
This attribute is named `defaultAllowIP` in VSD API.
"""
self._default_allow_ip = value
@property
def default_allow_non_ip(self):
""" Get default_allow_non_ip value.
Notes:
If enabled, non ip traffic will be dropped
This attribute is named `defaultAllowNonIP` in VSD API.
"""
return self._default_allow_non_ip
@default_allow_non_ip.setter
def default_allow_non_ip(self, value):
""" Set default_allow_non_ip value.
Notes:
If enabled, non ip traffic will be dropped
This attribute is named `defaultAllowNonIP` in VSD API.
"""
self._default_allow_non_ip = value
@property
def description(self):
""" Get description value.
Notes:
A description of the entity
"""
return self._description
@description.setter
def description(self, value):
""" Set description value.
Notes:
A description of the entity
"""
self._description = value
@property
def embedded_metadata(self):
""" Get embedded_metadata value.
Notes:
Metadata objects associated with this entity. This will contain a list of Metadata objects if the API request is made using the special flag to enable the embedded Metadata feature. Only a maximum of Metadata objects is returned based on the value set in the system configuration.
This attribute is named `embeddedMetadata` in VSD API.
"""
return self._embedded_metadata
@embedded_metadata.setter
def embedded_metadata(self, value):
""" Set embedded_metadata value.
Notes:
Metadata objects associated with this entity. This will contain a list of Metadata objects if the API request is made using the special flag to enable the embedded Metadata feature. Only a maximum of Metadata objects is returned based on the value set in the system configuration.
This attribute is named `embeddedMetadata` in VSD API.
"""
self._embedded_metadata = value
@property
def entity_scope(self):
""" Get entity_scope value.
Notes:
Specify if scope of entity is Data center or Enterprise level
This attribute is named `entityScope` in VSD API.
"""
return self._entity_scope
@entity_scope.setter
def entity_scope(self, value):
""" Set entity_scope value.
Notes:
Specify if scope of entity is Data center or Enterprise level
This attribute is named `entityScope` in VSD API.
"""
self._entity_scope = value
@property
def entries(self):
""" Get entries value.
Notes:
List of Egress Domain ACL entries associated with this ACL
"""
return self._entries
@entries.setter
def entries(self, value):
""" Set entries value.
Notes:
List of Egress Domain ACL entries associated with this ACL
"""
self._entries = value
@property
def policy_state(self):
""" Get policy_state value.
Notes:
State of the policy
This attribute is named `policyState` in VSD API.
"""
return self._policy_state
@policy_state.setter
def policy_state(self, value):
""" Set policy_state value.
Notes:
State of the policy
This attribute is named `policyState` in VSD API.
"""
self._policy_state = value
@property
def creation_date(self):
""" Get creation_date value.
Notes:
Time stamp when this object was created.
This attribute is named `creationDate` in VSD API.
"""
return self._creation_date
@creation_date.setter
def creation_date(self, value):
""" Set creation_date value.
Notes:
Time stamp when this object was created.
This attribute is named `creationDate` in VSD API.
"""
self._creation_date = value
@property
def priority(self):
""" Get priority value.
Notes:
The priority of the ACL entry that determines the order of entries
"""
return self._priority
@priority.setter
def priority(self, value):
""" Set priority value.
Notes:
The priority of the ACL entry that determines the order of entries
"""
self._priority = value
@property
def priority_type(self):
""" Get priority_type value.
Notes:
None
This attribute is named `priorityType` in VSD API.
"""
return self._priority_type
@priority_type.setter
def priority_type(self, value):
""" Set priority_type value.
Notes:
None
This attribute is named `priorityType` in VSD API.
"""
self._priority_type = value
@property
def associated_live_entity_id(self):
""" Get associated_live_entity_id value.
Notes:
ID of the associated live entity
This attribute is named `associatedLiveEntityID` in VSD API.
"""
return self._associated_live_entity_id
@associated_live_entity_id.setter
def associated_live_entity_id(self, value):
""" Set associated_live_entity_id value.
Notes:
ID of the associated live entity
This attribute is named `associatedLiveEntityID` in VSD API.
"""
self._associated_live_entity_id = value
@property
def auto_generate_priority(self):
""" Get auto_generate_priority value.
Notes:
If enabled, entries priority will be randomly generated between allowed range.
This attribute is named `autoGeneratePriority` in VSD API.
"""
return self._auto_generate_priority
@auto_generate_priority.setter
def auto_generate_priority(self, value):
""" Set auto_generate_priority value.
Notes:
If enabled, entries priority will be randomly generated between allowed range.
This attribute is named `autoGeneratePriority` in VSD API.
"""
self._auto_generate_priority = value
@property
def owner(self):
""" Get owner value.
Notes:
Identifies the user that has created this object.
"""
return self._owner
@owner.setter
def owner(self, value):
""" Set owner value.
Notes:
Identifies the user that has created this object.
"""
self._owner = value
@property
def external_id(self):
""" Get external_id value.
Notes:
External object ID. Used for integration with third party systems
This attribute is named `externalID` in VSD API.
"""
return self._external_id
@external_id.setter
def external_id(self, value):
""" Set external_id value.
Notes:
External object ID. Used for integration with third party systems
This attribute is named `externalID` in VSD API.
"""
self._external_id = value
|
|
# coding=utf-8
# Copyright 2022 The Google Research Authors.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""A collection of projection utility functions."""
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
import numpy as np
import tensorflow.compat.v1 as tf
from lighthouse.geometry import sampling
def inv_depths(start_depth, end_depth, num_depths):
"""Returns reversed, sorted inverse interpolated depths.
Args:
start_depth: The first depth.
end_depth: The last depth.
num_depths: The total number of depths to create, include start_depth and
end_depth are always included and other depths are interpolated between
them, in inverse depth space.
Returns:
The depths sorted in descending order (so furthest first). This order is
useful for back to front compositing.
"""
depths = 1.0 / tf.linspace(1.0 / end_depth, 1.0 / start_depth, num_depths)
return depths
def pixel2cam(depth, pixel_coords, intrinsics, is_homogeneous=True):
"""Transforms coordinates in the pixel frame to the camera frame.
Args:
depth: [batch, height, width]
pixel_coords: homogeneous pixel coordinates [batch, 3, height, width]
intrinsics: camera intrinsics [batch, 3, 3]
is_homogeneous: return in homogeneous coordinates
Returns:
Coords in the camera frame [batch, 3 (4 if homogeneous), height, width]
"""
# Derived from code written by Tinghui Zhou and Shubham Tulsiani
batch = tf.shape(depth)[0]
height = tf.shape(depth)[1]
width = tf.shape(depth)[2]
depth = tf.reshape(depth, [batch, 1, -1])
pixel_coords = tf.reshape(pixel_coords, [batch, 3, -1])
cam_coords = tf.matmul(tf.matrix_inverse(intrinsics), pixel_coords) * depth
if is_homogeneous:
ones = tf.ones([batch, 1, height * width])
cam_coords = tf.concat([cam_coords, ones], axis=1)
cam_coords = tf.reshape(cam_coords, [batch, -1, height, width])
return cam_coords
def cam2pixel(cam_coords, proj):
"""Transforms coordinates in a camera frame to the pixel frame.
Args:
cam_coords: [batch, 4, height, width]
proj: [batch, 4, 4]
Returns:
Pixel coordinates projected from the camera frame [batch, height, width, 2]
"""
# Derived from code written by Tinghui Zhou and Shubham Tulsiani
batch = tf.shape(cam_coords)[0]
height = tf.shape(cam_coords)[2]
width = tf.shape(cam_coords)[3]
cam_coords = tf.reshape(cam_coords, [batch, 4, -1])
unnormalized_pixel_coords = tf.matmul(proj, cam_coords)
x_u = tf.slice(unnormalized_pixel_coords, [0, 0, 0], [-1, 1, -1])
y_u = tf.slice(unnormalized_pixel_coords, [0, 1, 0], [-1, 1, -1])
z_u = tf.slice(unnormalized_pixel_coords, [0, 2, 0], [-1, 1, -1])
x_n = x_u / (z_u + 1e-10)
y_n = y_u / (z_u + 1e-10)
pixel_coords = tf.concat([x_n, y_n], axis=1)
pixel_coords = tf.reshape(pixel_coords, [batch, 2, height, width])
return tf.transpose(pixel_coords, perm=[0, 2, 3, 1])
def mpi_resample_cube(mpi, tgt, intrinsics, depth_planes, side_length,
cube_res):
"""Resample MPI onto cube centered at target point.
Args:
mpi: [B,H,W,D,C], input MPI
tgt: [B,3], [x,y,z] coordinates for cube center (in reference/mpi frame)
intrinsics: [B,3,3], MPI reference camera intrinsics
depth_planes: [D] depth values for MPI planes
side_length: metric side length of cube
cube_res: resolution of each cube dimension
Returns:
resampled: [B, cube_res, cube_res, cube_res, C]
"""
batch_size = tf.shape(mpi)[0]
num_depths = tf.shape(mpi)[3]
# compute MPI world coordinates
intrinsics_tile = tf.tile(intrinsics, [num_depths, 1, 1])
# create cube coordinates
b_vals = tf.to_float(tf.range(batch_size))
x_vals = tf.linspace(-side_length / 2.0, side_length / 2.0, cube_res)
y_vals = tf.linspace(-side_length / 2.0, side_length / 2.0, cube_res)
z_vals = tf.linspace(side_length / 2.0, -side_length / 2.0, cube_res)
b, y, x, z = tf.meshgrid(b_vals, y_vals, x_vals, z_vals, indexing='ij')
x = x + tgt[:, 0, tf.newaxis, tf.newaxis, tf.newaxis]
y = y + tgt[:, 1, tf.newaxis, tf.newaxis, tf.newaxis]
z = z + tgt[:, 2, tf.newaxis, tf.newaxis, tf.newaxis]
ones = tf.ones_like(x)
coords = tf.stack([x, y, z, ones], axis=1)
coords_r = tf.reshape(
tf.transpose(coords, [0, 4, 1, 2, 3]),
[batch_size * cube_res, 4, cube_res, cube_res])
# store elements with negative z vals for projection
bad_inds = tf.less(z, 0.0)
# project into reference camera to transform coordinates into MPI indices
filler = tf.constant([0.0, 0.0, 0.0, 1.0], shape=[1, 1, 4])
filler = tf.tile(filler, [batch_size * cube_res, 1, 1])
intrinsics_tile = tf.tile(intrinsics, [cube_res, 1, 1])
intrinsics_tile_4 = tf.concat(
[intrinsics_tile,
tf.zeros([batch_size * cube_res, 3, 1])], axis=2)
intrinsics_tile_4 = tf.concat([intrinsics_tile_4, filler], axis=1)
coords_proj = cam2pixel(coords_r, intrinsics_tile_4)
coords_depths = tf.transpose(coords_r[:, 2:3, :, :], [0, 2, 3, 1])
coords_depth_inds = (tf.to_float(num_depths) - 1) * (
(1.0 / coords_depths) -
(1.0 / depth_planes[0])) / ((1.0 / depth_planes[-1]) -
(1.0 / depth_planes[0]))
coords_proj = tf.concat([coords_proj, coords_depth_inds], axis=3)
coords_proj = tf.transpose(
tf.reshape(coords_proj, [batch_size, cube_res, cube_res, cube_res, 3]),
[0, 2, 3, 1, 4])
coords_proj = tf.concat([b[:, :, :, :, tf.newaxis], coords_proj], axis=4)
# trilinear interpolation gather from MPI
# interpolate pre-multiplied RGBAs, then un-pre-multiply
mpi_alpha = mpi[Ellipsis, -1:]
mpi_channels_p = mpi[Ellipsis, :-1] * mpi_alpha
mpi_p = tf.concat([mpi_channels_p, mpi_alpha], axis=-1)
resampled_p = sampling.trilerp_gather(mpi_p, coords_proj, bad_inds)
resampled_alpha = tf.clip_by_value(resampled_p[Ellipsis, -1:], 0.0, 1.0)
resampled_channels = resampled_p[Ellipsis, :-1] / (resampled_alpha + 1e-8)
resampled = tf.concat([resampled_channels, resampled_alpha], axis=-1)
return resampled, coords_proj
def spherical_cubevol_resample(vol, env2ref, cube_center, side_length, n_phi,
n_theta, n_r):
"""Resample cube volume onto spherical coordinates centered at target point.
Args:
vol: [B,H,W,D,C], input volume
env2ref: [B,4,4], relative pose transformation (transform env to ref)
cube_center: [B,3], [x,y,z] coordinates for center of cube volume
side_length: side length of cube
n_phi: number of samples along vertical spherical coordinate dim
n_theta: number of samples along horizontal spherical coordinate dim
n_r: number of samples along radius spherical coordinate dim
Returns:
resampled: [B, n_phi, n_theta, n_r, C]
"""
batch_size = tf.shape(vol)[0]
height = tf.shape(vol)[1]
cube_res = tf.to_float(height)
# create spherical coordinates
b_vals = tf.to_float(tf.range(batch_size))
phi_vals = tf.linspace(0.0, np.pi, n_phi)
theta_vals = tf.linspace(1.5 * np.pi, -0.5 * np.pi, n_theta)
# compute radii to use
x_vals = tf.linspace(-side_length / 2.0, side_length / 2.0,
tf.to_int32(cube_res))
y_vals = tf.linspace(-side_length / 2.0, side_length / 2.0,
tf.to_int32(cube_res))
z_vals = tf.linspace(side_length / 2.0, -side_length / 2.0,
tf.to_int32(cube_res))
y_c, x_c, z_c = tf.meshgrid(y_vals, x_vals, z_vals, indexing='ij')
x_c = x_c + cube_center[:, 0, tf.newaxis, tf.newaxis, tf.newaxis]
y_c = y_c + cube_center[:, 1, tf.newaxis, tf.newaxis, tf.newaxis]
z_c = z_c + cube_center[:, 2, tf.newaxis, tf.newaxis, tf.newaxis]
cube_coords = tf.stack([x_c, y_c, z_c], axis=4)
min_r = tf.reduce_min(
tf.norm(
cube_coords -
env2ref[:, :3, 3][:, tf.newaxis, tf.newaxis, tf.newaxis, :],
axis=4),
axis=[0, 1, 2, 3]) # side_length / cube_res
max_r = tf.reduce_max(
tf.norm(
cube_coords -
env2ref[:, :3, 3][:, tf.newaxis, tf.newaxis, tf.newaxis, :],
axis=4),
axis=[0, 1, 2, 3])
r_vals = tf.linspace(max_r, min_r, n_r)
b, phi, theta, r = tf.meshgrid(
b_vals, phi_vals, theta_vals, r_vals,
indexing='ij') # currently in env frame
# transform spherical coordinates into cartesian
# (currently in env frame, z points forwards)
x = r * tf.cos(theta) * tf.sin(phi)
z = r * tf.sin(theta) * tf.sin(phi)
y = r * tf.cos(phi)
# transform coordinates into ref frame
sphere_coords = tf.stack([x, y, z, tf.ones_like(x)], axis=-1)[Ellipsis, tf.newaxis]
sphere_coords_ref = tfmm(env2ref, sphere_coords)
x = sphere_coords_ref[Ellipsis, 0, 0]
y = sphere_coords_ref[Ellipsis, 1, 0]
z = sphere_coords_ref[Ellipsis, 2, 0]
# transform coordinates into vol indices
x_inds = (x - cube_center[:, 0, tf.newaxis, tf.newaxis, tf.newaxis] +
side_length / 2.0) * ((cube_res - 1) / side_length)
y_inds = -(y - cube_center[:, 1, tf.newaxis, tf.newaxis, tf.newaxis] -
side_length / 2.0) * ((cube_res - 1) / side_length)
z_inds = -(z - cube_center[:, 2, tf.newaxis, tf.newaxis, tf.newaxis] -
side_length / 2.0) * ((cube_res - 1) / side_length)
sphere_coords_inds = tf.stack([b, x_inds, y_inds, z_inds], axis=-1)
# trilinear interpolation gather from volume
# interpolate pre-multiplied RGBAs, then un-pre-multiply
vol_alpha = tf.clip_by_value(vol[Ellipsis, -1:], 0.0, 1.0)
vol_channels_p = vol[Ellipsis, :-1] * vol_alpha
vol_p = tf.concat([vol_channels_p, vol_alpha], axis=-1)
resampled_p = sampling.trilerp_gather(vol_p, sphere_coords_inds)
resampled_alpha = resampled_p[Ellipsis, -1:]
resampled_channels = resampled_p[Ellipsis, :-1] / (resampled_alpha + 1e-8)
resampled = tf.concat([resampled_channels, resampled_alpha], axis=-1)
return resampled, r_vals
def over_composite(rgbas):
"""Combines a list of rgba images using the over operation.
Combines RGBA images from back to front (where back is index 0 in list)
with the over operation.
Args:
rgbas: A list of rgba images, these are combined from *back to front*.
Returns:
Returns an RGB image.
"""
alphas = rgbas[:, :, :, :, -1:]
colors = rgbas[:, :, :, :, :-1]
transmittance = tf.cumprod(
1.0 - alphas + 1.0e-8, axis=3, exclusive=True, reverse=True) * alphas
output = tf.reduce_sum(transmittance * colors, axis=3)
accum_alpha = tf.reduce_sum(transmittance, axis=3)
return tf.concat([output, accum_alpha], axis=3)
def interleave_shells(shells, radii):
"""Interleave spherical shell tensors out-to-in by radii."""
radius_order = tf.argsort(radii, direction='DESCENDING')
shells_interleaved = tf.gather(shells, radius_order, axis=3)
return shells_interleaved
# To complete this codebase, you must copy lines 6-191 from
# https://github.com/Fyusion/LLFF/blob/master/llff/math/mpi_math.py
# to here. Some incomplete function stubs are provided to suppress python lint
# errors.
def tfmm(a_mat, b_mat):
"""Redefined tensorflow matrix multiply (broken)."""
return tf.linalg.matmul(a_mat, b_mat)
|
|
import numpy as np
import matplotlib.pyplot as pylab
from matplotlib.widgets import Slider
pylab.rcParams['image.interpolation'] = 'sinc'
#==================================================
# display tools
#==================================================
class KeyHandler(object):
'''
Main drawing class.
'''
def __init__(self, fig, ax, dataset, kwargs):
self.fig = fig
self.ax = ax
self.kwargs = kwargs
self.dataset = dataset
self.start = 0
try:
assert kwargs['wiggle']
except:
kwargs['wiggle'] = False
if kwargs['primary'] == None:
self.slice = self.dataset
else:
keys = np.unique(dataset[kwargs['primary']])
self.keys = keys[::kwargs['step']]
self.nkeys = self.keys.size
self.ensemble()
if 'clip' in kwargs and kwargs['clip'] != 0:
self.clip = kwargs['clip']
else:
self.clip = np.median(np.abs(self.dataset['trace']))*4.0
if kwargs['wiggle'] == True: self.clip /= 8.0
print 'PySeis Seismic Viewer'
print 'type "h" for help'
self.fig.tight_layout()
self.draw()
def __call__(self, e):
print e.xdata, e.ydata
if e.key == "right":
self.start += 1
self.ensemble()
elif e.key == "left":
self.start -= 1
self.ensemble()
elif e.key == "up":
self.clip /= 1.1
print self.clip
elif e.key == "down":
self.clip *= 1.1
print self.clip
elif e.key == "h":
print "right arrow: next gather"
print "left arrow: last gather"
print "up arrow: hotter"
print "down arrow: colder"
print "clip=", self.clip
else:
return
self.draw()
def draw(self):
try:
if self.kwargs['wiggle'] == True: self.wiggle()
else: self.ximage()
except KeyError: self.ximage()
def ximage(self):
self.ax.cla()
self.im = self.ax.imshow(self.slice['trace'].T, aspect='auto', cmap='RdGy', vmax =self.clip, vmin=-1*self.clip)
try:
self.ax.set_title('%s = %d' %(self.kwargs['primary'], self.keys[self.start]))
except AttributeError:
pass
self.fig.tight_layout()
self.fig.canvas.draw()
def wiggle(self, scale=0.05):
self.ax.cla()
frame = self.slice
ns = frame['ns'][0]
nt = frame.size
scalar = scale*frame.size/(frame.size*self.clip) #scales the trace amplitudes relative to the number of traces
frame['trace'][:,-1] = np.nan #set the very last value to nan. this is a lazy way to prevent wrapping
vals = frame['trace'].ravel() #flat view of the 2d array.
vect = np.arange(vals.size).astype(np.float) #flat index array, for correctly locating zero crossings in the flat view
crossing = np.where(np.diff(np.signbit(vals)))[0] #index before zero crossing
#use linear interpolation to find the zero crossing, i.e. y = mx + c.
x1= vals[crossing]
x2 = vals[crossing+1]
y1 = vect[crossing]
y2 = vect[crossing+1]
m = (y2 - y1)/(x2-x1)
c = y1 - m*x1
#tack these values onto the end of the existing data
x = np.hstack([vals, np.zeros_like(c)])
y = np.hstack([vect, c])
#resort the data
order = np.argsort(y)
#shift from amplitudes to plotting coordinates
x_shift, y = y[order].__divmod__(ns)
self.ax.plot(x[order] *scalar + x_shift + 1, y, 'k')
x[x<0] = np.nan
x = x[order] *scalar + x_shift + 1
self.ax.fill(x,y, 'k', aa=True)
self.ax.set_xlim([0,nt])
self.ax.set_ylim([ns,0])
try:
self.ax.set_title('%s = %d' %(self.kwargs['primary'], self.keys[self.start]))
except AttributeError:
pass
self.fig.tight_layout()
self.fig.canvas.draw()
def ensemble(self):
try:
self.slice = self.dataset[self.dataset[self.kwargs['primary']] == self.keys[self.start]]
except IndexError:
self.start = 0
def display(dataset, **kwargs):
'''
iterates through dataset using
left and right keys
parameters required:
primary key
seconary key
step size
'''
fig = pylab.figure()
ax = fig.add_subplot(111)
eventManager = KeyHandler(fig, ax, dataset, kwargs)
fig.canvas.mpl_connect('key_press_event',eventManager)
return fig
def scan(dataset):
'''
Scans dataset and generates a printed list of all the header ranges.
Needs to be reformated so it returns a data object such as a dictionary
'''
print " %0-35s: %0-15s %s" %('key', 'min', 'max')
print "========================================="
for key in np.result_type(dataset).descr:
a = np.amin(dataset[key[0]])
b = np.amax(dataset[key[0]])
if (a != 0) and (b != 0):
print "%0-35s %0-15.3f %.3f" %(key, a, b)
print "========================================="
#~ def build_vels(times, velocities, ns=1000, dt=0.001):
#~ '''builds a full velocity trace from a list of vels and times'''
#~ tx = np.linspace(dt, dt*ns, ns)
#~ vels = np.interp(tx, times, velocities)
#~ vels = np.pad(vels, (100,100), 'reflect')
#~ vels = np.convolve(np.ones(100.0)/100.0, vels, mode='same')
#~ vels = vels[100:-100]
#~ return vels
def cp(workspace, **params):
return workspace
def agc(workspace, **params):
'''
automatic gain control
inputs:
window
'''
try:
window = params['window']
except:
window = 100
vec = np.ones(window, 'f')
func = np.apply_along_axis(lambda m: np.convolve(np.abs(m), vec, mode='same'), axis=-1, arr=workspace.data['trace'])
workspace.data['trace'] /= func
workspace.data['trace'][~np.isfinite(workspace.data['trace'])] = 0
workspace.data['trace'] /= np.amax(np.abs(workspace.data['trace']))
return workspace.data
def ricker(f, length=0.512, dt=0.001):
t = np.linspace(-length/2, (length-dt)/2, length/dt)
y = (1.0 - 2.0*(np.pi**2)*(f**2)*(t**2)) * np.exp(-(np.pi**2)*(f**2)*(t**2))
y = np.around(y, 10)
inds = np.nonzero(y)[0]
return y[np.amin(inds):np.amax(inds)]
def conv(workspace, wavelet):
workspace['trace'] = np.apply_along_axis(lambda m: np.convolve(m, wavelet, mode='same'), axis=-1, arr=workspace['trace'])
return workspace
def fx(workspace, **params):
f = np.abs(np.fft.rfft(workspace['trace'], axis=-1))
correction = np.mean(np.abs(f), axis=-1).reshape(-1,1)
f /= correction
f = 20.0*np.log10(f)[:,::-1]
freq = np.fft.rfftfreq(params['ns'], params['dt'])
print params['ns'], params['dt']
hmin = np.amin(workspace['cdp'])
hmax = np.amax(workspace['cdp'])
vmin = np.amin(freq)
vmax = np.amax(freq)
extent=[hmin,hmax,vmin,vmax]
pylab.imshow(f.T, aspect='auto', extent=extent)
def db(data):
return 20.0*np.log10(data)
def slice(workspace, **params):
ns = params['ns']
newtype = typeSU(ns)
new = workspace.astype(newtype)
workspace = new.copy()
workspace['ns'] = ns
return workspace
def build_mask(data, keys):
mask = data == False
for key in keys:
a = data == key
mask = mask | a
return mask
import numpy as np
su_header_dtype = np.dtype([
('tracl', np.int32),
('tracr', np.int32),
('fldr', np.int32),
('tracf', np.int32),
('ep', np.int32),
('cdp', np.int32),
('cdpt', np.int32),
('trid', np.int16),
('nvs', np.int16),
('nhs', np.int16),
('duse', np.int16),
('offset', np.int32),
('gelev', np.int32),
('selev', np.int32),
('sdepth', np.int32),
('gdel', np.int32),
('sdel', np.int32),
('swdep', np.int32),
('gwdep', np.int32),
('scalel', np.int16),
('scalco', np.int16),
('sx', np.int32),
('sy', np.int32),
('gx', np.int32),
('gy', np.int32),
('counit', np.int16),
('wevel', np.int16),
('swevel', np.int16),
('sut', np.int16),
('gut', np.int16),
('sstat', np.int16),
('gstat', np.int16),
('tstat', np.int16),
('laga', np.int16),
('lagb', np.int16),
('delrt', np.int16),
('muts', np.int16),
('mute', np.int16),
('ns', np.uint16),
('dt', np.uint16),
('gain', np.int16),
('igc', np.int16),
('igi', np.int16),
('corr', np.int16),
('sfs', np.int16),
('sfe', np.int16),
('slen', np.int16),
('styp', np.int16),
('stas', np.int16),
('stae', np.int16),
('tatyp', np.int16),
('afilf', np.int16),
('afils', np.int16),
('nofilf', np.int16),
('nofils', np.int16),
('lcf', np.int16),
('hcf', np.int16),
('lcs', np.int16),
('hcs', np.int16),
('year', np.int16),
('day', np.int16),
('hour', np.int16),
('minute', np.int16),
('sec', np.int16),
('timebas', np.int16),
('trwf', np.int16),
('grnors', np.int16),
('grnofr', np.int16),
('grnlof', np.int16),
('gaps', np.int16),
('otrav', np.int16), #179,180
('d1', np.float32), #181,184
('f1', np.float32), #185,188
('d2', np.float32), #189,192
('f2', np.float32), #193, 196
('ShotPoint', np.int32), #197,200
('unscale', np.int16), #201, 204
('TraceValueMeasurementUnit', np.int16),
('TransductionConstantMantissa', np.int32),
('TransductionConstantPower', np.int16),
('TransductionUnit', np.int16),
('TraceIdentifier', np.int16),
('ScalarTraceHeader', np.int16),
('SourceType', np.int16),
('SourceEnergyDirectionMantissa', np.int32),
('SourceEnergyDirectionExponent', np.int16),
('SourceMeasurementMantissa', np.int32),
('SourceMeasurementExponent', np.int16),
('SourceMeasurementUnit', np.int16),
('UnassignedInt1', np.int32),
('ns1', np.int32),
])
def typeSU(ns):
return np.dtype(su_header_dtype.descr + [('trace', ('<f4',ns))])
def readSUheader(filename):
raw = open(filename, 'rb').read()
return np.fromstring(raw, dtype=su_header_dtype, count=1)
def read(filename=None):
if filename == None:
raw= sys.stdin.read()
else:
raw = open(filename, 'rb').read()
return readData(raw)
def readData(raw):
su_header = np.fromstring(raw, dtype=su_header_dtype, count=1)
ns = su_header['ns'][0]
file_dtype = typeSU(ns)
data = np.fromstring(raw, dtype=file_dtype)
return data
def write(data, filename=None):
if filename == None:
data.tofile(sys.stdout)
else:
data.tofile(filename)
|
|
from math import isnan
import itertools
import numpy as np
from AnyQt.QtWidgets import QTableView, QItemDelegate, QHeaderView
from AnyQt.QtGui import QColor, QPen, QBrush
from AnyQt.QtCore import Qt, QAbstractTableModel, QModelIndex, \
QItemSelectionModel, QItemSelection, QSize
from Orange.data import Table, Variable, ContinuousVariable, DiscreteVariable
from Orange.misc import DistMatrix
from Orange.widgets import widget, gui
from Orange.widgets.data.owtable import ranges
from Orange.widgets.gui import OrangeUserRole
from Orange.widgets.settings import Setting, ContextSetting, ContextHandler
from Orange.widgets.utils.colorpalette import ContinuousPaletteGenerator
from Orange.widgets.utils.itemmodels import VariableListModel
class DistanceMatrixModel(QAbstractTableModel):
def __init__(self):
super().__init__()
self.distances = None
self.fact = 70
self.labels = None
self.colors = None
self.variable = None
self.values = None
self.label_colors = None
self.zero_diag = True
def set_data(self, distances):
self.beginResetModel()
self.distances = distances
if distances is None:
return
span = distances.max()
self.colors = \
(distances * (170 / span if span > 1e-10 else 0)).astype(np.int)
self.zero_diag = all(distances.diagonal() < 1e-6)
self.endResetModel()
def set_labels(self, labels, variable=None, values=None):
self.beginResetModel()
self.labels = labels
self.variable = variable
self.values = values
if isinstance(variable, ContinuousVariable):
palette = ContinuousPaletteGenerator(*variable.colors)
off, m = values.min(), values.max()
fact = off != m and 1 / (m - off)
self.label_colors = [palette[x] if not isnan(x) else Qt.lightGray
for x in (values - off) * fact]
else:
self.label_colors = None
self.endResetModel()
def dimension(self, parent=None):
if parent and parent.isValid() or self.distances is None:
return 0
return len(self.distances)
columnCount = rowCount = dimension
def color_for_label(self, ind, light=100):
color = Qt.lightGray
if isinstance(self.variable, ContinuousVariable):
color = self.label_colors[ind].lighter(light)
elif isinstance(self.variable, DiscreteVariable):
value = self.values[ind]
if not isnan(value):
color = QColor(*self.variable.colors[int(value)])
return QBrush(color)
def color_for_cell(self, row, col):
return QBrush(QColor.fromHsv(120, self.colors[row, col], 255))
def data(self, index, role=Qt.DisplayRole):
if role == Qt.TextAlignmentRole:
return Qt.AlignRight | Qt.AlignVCenter
row, col = index.row(), index.column()
if self.distances is None:
return
if role == TableBorderItem.BorderColorRole:
return self.color_for_label(col), self.color_for_label(row)
if row == col and self.zero_diag:
if role == Qt.BackgroundColorRole and self.variable:
return self.color_for_label(row, 200)
return
if role == Qt.DisplayRole:
return "{:.3f}".format(self.distances[row, col])
if role == Qt.BackgroundColorRole:
return self.color_for_cell(row, col)
def headerData(self, ind, orientation, role):
if not self.labels:
return
if role == Qt.DisplayRole and ind < len(self.labels):
return self.labels[ind]
# On some systems, Qt doesn't respect the following role in the header
if role == Qt.BackgroundRole:
return self.color_for_label(ind, 200)
class TableBorderItem(QItemDelegate):
BorderColorRole = next(OrangeUserRole)
def paint(self, painter, option, index):
super().paint(painter, option, index)
colors = index.data(self.BorderColorRole)
vcolor, hcolor = colors or (None, None)
if vcolor is not None or hcolor is not None:
painter.save()
x1, y1, x2, y2 = option.rect.getCoords()
if vcolor is not None:
painter.setPen(
QPen(QBrush(vcolor), 1, Qt.SolidLine, Qt.RoundCap))
painter.drawLine(x1, y1, x1, y2)
if hcolor is not None:
painter.setPen(
QPen(QBrush(hcolor), 1, Qt.SolidLine, Qt.RoundCap))
painter.drawLine(x1, y1, x2, y1)
painter.restore()
class SymmetricSelectionModel(QItemSelectionModel):
def select(self, selection, flags):
if isinstance(selection, QModelIndex):
selection = QItemSelection(selection, selection)
model = self.model()
indexes = selection.indexes()
sel_inds = {ind.row() for ind in indexes} | \
{ind.column() for ind in indexes}
if flags == QItemSelectionModel.ClearAndSelect:
selected = set()
else:
selected = {ind.row() for ind in self.selectedIndexes()}
if flags & QItemSelectionModel.Select:
selected |= sel_inds
elif flags & QItemSelectionModel.Deselect:
selected -= sel_inds
new_selection = QItemSelection()
regions = list(ranges(sorted(selected)))
for r_start, r_end in regions:
for c_start, c_end in regions:
top_left = model.index(r_start, c_start)
bottom_right = model.index(r_end - 1, c_end - 1)
new_selection.select(top_left, bottom_right)
QItemSelectionModel.select(self, new_selection,
QItemSelectionModel.ClearAndSelect)
def selected_items(self):
return list({ind.row() for ind in self.selectedIndexes()})
def set_selected_items(self, inds):
index = self.model().index
selection = QItemSelection()
for i in inds:
selection.select(index(i, i), index(i, i))
self.select(selection, QItemSelectionModel.ClearAndSelect)
class DistanceMatrixContextHandler(ContextHandler):
@staticmethod
def _var_names(annotations):
return [a.name if isinstance(a, Variable) else a for a in annotations]
def new_context(self, matrix, annotations):
context = super().new_context()
context.dim = matrix.shape[0]
context.annotations = self._var_names(annotations)
context.annotation = context.annotations[1]
context.selection = []
return context
# noinspection PyMethodOverriding
def match(self, context, matrix, annotations):
annotations = self._var_names(annotations)
if context.dim != matrix.shape[0] or \
context.annotation not in annotations:
return 0
return 1 + (context.annotations == annotations)
def settings_from_widget(self, widget, *args):
context = widget.current_context
if context is not None:
context.annotation = widget.annot_combo.currentText()
context.selection = widget.tableview.selectionModel().selected_items()
def settings_to_widget(self, widget, *args):
context = widget.current_context
widget.annotation_idx = context.annotations.index(context.annotation)
widget.tableview.selectionModel().set_selected_items(context.selection)
class OWDistanceMatrix(widget.OWWidget):
name = "Distance Matrix"
description = "View distance matrix."
icon = "icons/DistanceMatrix.svg"
priority = 200
inputs = [("Distances", DistMatrix, "set_distances")]
outputs = [("Distances", DistMatrix),
("Table", Table)]
settingsHandler = DistanceMatrixContextHandler()
auto_commit = Setting(True)
annotation_idx = ContextSetting(1)
selection = ContextSetting([])
want_control_area = False
def __init__(self):
super().__init__()
self.distances = None
self.items = None
self.tablemodel = DistanceMatrixModel()
view = self.tableview = QTableView()
view.setEditTriggers(QTableView.NoEditTriggers)
view.setItemDelegate(TableBorderItem())
view.setModel(self.tablemodel)
view.setShowGrid(False)
for header in (view.horizontalHeader(), view.verticalHeader()):
header.setSectionResizeMode(QHeaderView.ResizeToContents)
header.setHighlightSections(True)
header.setSectionsClickable(False)
view.verticalHeader().setDefaultAlignment(
Qt.AlignRight | Qt.AlignVCenter)
selmodel = SymmetricSelectionModel(view.model(), view)
view.setSelectionModel(selmodel)
view.setSelectionBehavior(QTableView.SelectItems)
self.mainArea.layout().addWidget(view)
settings_box = gui.hBox(self.mainArea)
self.annot_combo = gui.comboBox(
settings_box, self, "annotation_idx", label="Labels: ",
orientation=Qt.Horizontal,
callback=self._invalidate_annotations, contentsLength=12)
self.annot_combo.setModel(VariableListModel())
self.annot_combo.model()[:] = ["None", "Enumeration"]
gui.rubber(settings_box)
settings_box.layout().addWidget(self.report_button)
gui.separator(settings_box, 40)
acb = gui.auto_commit(settings_box, self, "auto_commit",
"Send Selected", "Send Automatically", box=None)
acb.setFixedWidth(200)
# Signal must be connected after self.commit is redirected
selmodel.selectionChanged.connect(self.commit)
def sizeHint(self):
return QSize(800, 500)
def set_distances(self, distances):
self.closeContext()
self.distances = distances
self.tablemodel.set_data(self.distances)
self.selection = []
self.tableview.selectionModel().set_selected_items([])
self.items = items = distances is not None and distances.row_items
annotations = ["None", "Enumerate"]
self.annotation_idx = 1
if items and not distances.axis:
annotations.append("Attribute names")
self.annotation_idx = 2
elif isinstance(items, list) and \
all(isinstance(item, Variable) for item in items):
annotations.append("Name")
self.annotation_idx = 2
elif isinstance(items, Table):
annotations.extend(
itertools.chain(items.domain, items.domain.metas))
if items.domain.class_var:
self.annotation_idx = 2 + len(items.domain.attributes)
self.annot_combo.model()[:] = annotations
if items:
self.openContext(distances, annotations)
self._update_labels()
self.tableview.resizeColumnsToContents()
self.commit()
def _invalidate_annotations(self):
if self.distances is not None:
self._update_labels()
def _update_labels(self):
var = column = None
if self.annotation_idx == 0:
labels = None
elif self.annotation_idx == 1:
labels = [str(i + 1) for i in range(self.distances.shape[0])]
elif self.annot_combo.model()[self.annotation_idx] == "Attribute names":
attr = self.distances.row_items.domain.attributes
labels = [str(attr[i]) for i in range(self.distances.shape[0])]
elif self.annotation_idx == 2 and \
isinstance(self.items, widget.AttributeList):
labels = [v.name for v in self.items]
elif isinstance(self.items, Table):
var = self.annot_combo.model()[self.annotation_idx]
column, _ = self.items.get_column_view(var)
labels = [var.repr_val(value) for value in column]
saved_selection = self.tableview.selectionModel().selected_items()
self.tablemodel.set_labels(labels, var, column)
if labels:
self.tableview.horizontalHeader().show()
self.tableview.verticalHeader().show()
else:
self.tableview.horizontalHeader().hide()
self.tableview.verticalHeader().hide()
self.tableview.resizeColumnsToContents()
self.tableview.selectionModel().set_selected_items(saved_selection)
def commit(self):
sub_table = sub_distances = None
if self.distances is not None:
inds = self.tableview.selectionModel().selected_items()
if inds:
sub_distances = self.distances.submatrix(inds)
if self.distances.axis and isinstance(self.items, Table):
sub_table = self.items[inds]
self.send("Distances", sub_distances)
self.send("Table", sub_table)
def send_report(self):
if self.distances is None:
return
model = self.tablemodel
dim = self.distances.shape[0]
col_cell = model.color_for_cell
def _rgb(brush):
return "rgb({}, {}, {})".format(*brush.color().getRgb())
if model.labels:
col_label = model.color_for_label
label_colors = [_rgb(col_label(i)) for i in range(dim)]
self.report_raw('<table style="border-collapse:collapse">')
self.report_raw("<tr><td></td>")
self.report_raw("".join(
'<td style="background-color: {}">{}</td>'.format(*cv)
for cv in zip(label_colors, model.labels)))
self.report_raw("</tr>")
for i in range(dim):
self.report_raw("<tr>")
self.report_raw(
'<td style="background-color: {}">{}</td>'.
format(label_colors[i], model.labels[i]))
self.report_raw(
"".join(
'<td style="background-color: {};'
'border-top:1px solid {}; border-left:1px solid {};">'
'{:.3f}</td>'.format(
_rgb(col_cell(i, j)),
label_colors[i], label_colors[j],
self.distances[i, j])
for j in range(dim)))
self.report_raw("</tr>")
self.report_raw("</table>")
else:
self.report_raw('<table>')
for i in range(dim):
self.report_raw(
"<tr>" +
"".join('<td style="background-color: {}">{:.3f}</td>'.
format(_rgb(col_cell(i, j)), self.distances[i, j])
for j in range(dim)) +
"</tr>")
self.report_raw("</table>")
|
|
"""Config flow to configure the Netgear integration."""
import logging
from urllib.parse import urlparse
from pynetgear import DEFAULT_HOST, DEFAULT_PORT, DEFAULT_USER
import voluptuous as vol
from homeassistant import config_entries
from homeassistant.components import ssdp
from homeassistant.const import (
CONF_HOST,
CONF_PASSWORD,
CONF_PORT,
CONF_SSL,
CONF_USERNAME,
)
from homeassistant.core import callback
from homeassistant.data_entry_flow import FlowResult
from .const import (
CONF_CONSIDER_HOME,
DEFAULT_CONSIDER_HOME,
DEFAULT_NAME,
DOMAIN,
MODELS_V2,
ORBI_PORT,
)
from .errors import CannotLoginException
from .router import get_api
_LOGGER = logging.getLogger(__name__)
def _discovery_schema_with_defaults(discovery_info):
return vol.Schema(_ordered_shared_schema(discovery_info))
def _user_schema_with_defaults(user_input):
user_schema = {
vol.Optional(CONF_HOST, default=user_input.get(CONF_HOST, "")): str,
vol.Optional(CONF_PORT, default=user_input.get(CONF_PORT, DEFAULT_PORT)): int,
vol.Optional(CONF_SSL, default=user_input.get(CONF_SSL, False)): bool,
}
user_schema.update(_ordered_shared_schema(user_input))
return vol.Schema(user_schema)
def _ordered_shared_schema(schema_input):
return {
vol.Optional(CONF_USERNAME, default=schema_input.get(CONF_USERNAME, "")): str,
vol.Required(CONF_PASSWORD, default=schema_input.get(CONF_PASSWORD, "")): str,
}
class OptionsFlowHandler(config_entries.OptionsFlow):
"""Options for the component."""
def __init__(self, config_entry: config_entries.ConfigEntry) -> None:
"""Init object."""
self.config_entry = config_entry
async def async_step_init(self, user_input=None):
"""Manage the options."""
if user_input is not None:
return self.async_create_entry(title="", data=user_input)
settings_schema = vol.Schema(
{
vol.Optional(
CONF_CONSIDER_HOME,
default=self.config_entry.options.get(
CONF_CONSIDER_HOME, DEFAULT_CONSIDER_HOME.total_seconds()
),
): int,
}
)
return self.async_show_form(step_id="init", data_schema=settings_schema)
class NetgearFlowHandler(config_entries.ConfigFlow, domain=DOMAIN):
"""Handle a config flow."""
VERSION = 1
def __init__(self):
"""Initialize the netgear config flow."""
self.placeholders = {
CONF_HOST: DEFAULT_HOST,
CONF_PORT: DEFAULT_PORT,
CONF_USERNAME: DEFAULT_USER,
CONF_SSL: False,
}
self.discovered = False
@staticmethod
@callback
def async_get_options_flow(
config_entry: config_entries.ConfigEntry,
) -> OptionsFlowHandler:
"""Get the options flow."""
return OptionsFlowHandler(config_entry)
async def _show_setup_form(self, user_input=None, errors=None):
"""Show the setup form to the user."""
if not user_input:
user_input = {}
if self.discovered:
data_schema = _discovery_schema_with_defaults(user_input)
else:
data_schema = _user_schema_with_defaults(user_input)
return self.async_show_form(
step_id="user",
data_schema=data_schema,
errors=errors or {},
description_placeholders=self.placeholders,
)
async def async_step_import(self, user_input=None):
"""Import a config entry."""
return await self.async_step_user(user_input)
async def async_step_ssdp(self, discovery_info: dict) -> FlowResult:
"""Initialize flow from ssdp."""
updated_data = {}
device_url = urlparse(discovery_info[ssdp.ATTR_SSDP_LOCATION])
if device_url.hostname:
updated_data[CONF_HOST] = device_url.hostname
if device_url.scheme == "https":
updated_data[CONF_SSL] = True
else:
updated_data[CONF_SSL] = False
_LOGGER.debug("Netgear ssdp discovery info: %s", discovery_info)
await self.async_set_unique_id(discovery_info[ssdp.ATTR_UPNP_SERIAL])
self._abort_if_unique_id_configured(updates=updated_data)
updated_data[CONF_PORT] = DEFAULT_PORT
for model in MODELS_V2:
if discovery_info.get(ssdp.ATTR_UPNP_MODEL_NUMBER, "").startswith(
model
) or discovery_info.get(ssdp.ATTR_UPNP_MODEL_NAME, "").startswith(model):
updated_data[CONF_PORT] = ORBI_PORT
self.placeholders.update(updated_data)
self.discovered = True
return await self.async_step_user()
async def async_step_user(self, user_input=None):
"""Handle a flow initiated by the user."""
errors = {}
if user_input is None:
return await self._show_setup_form()
host = user_input.get(CONF_HOST, self.placeholders[CONF_HOST])
port = user_input.get(CONF_PORT, self.placeholders[CONF_PORT])
ssl = user_input.get(CONF_SSL, self.placeholders[CONF_SSL])
username = user_input.get(CONF_USERNAME, self.placeholders[CONF_USERNAME])
password = user_input[CONF_PASSWORD]
if not username:
username = self.placeholders[CONF_USERNAME]
# Open connection and check authentication
try:
api = await self.hass.async_add_executor_job(
get_api, password, host, username, port, ssl
)
except CannotLoginException:
errors["base"] = "config"
if errors:
return await self._show_setup_form(user_input, errors)
# Check if already configured
info = await self.hass.async_add_executor_job(api.get_info)
await self.async_set_unique_id(info["SerialNumber"], raise_on_progress=False)
self._abort_if_unique_id_configured()
config_data = {
CONF_USERNAME: username,
CONF_PASSWORD: password,
CONF_HOST: host,
CONF_PORT: port,
CONF_SSL: ssl,
}
if info.get("ModelName") is not None and info.get("DeviceName") is not None:
name = f"{info['ModelName']} - {info['DeviceName']}"
else:
name = info.get("ModelName", DEFAULT_NAME)
return self.async_create_entry(
title=name,
data=config_data,
)
|
|
#!/usr/bin/env python
# Copyright 2014 Google Inc. All rights reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Command-line tools for authenticating via OAuth 2.0
Do the OAuth 2.0 Web Server dance for a command line application. Stores the
generated credentials in a common file that is used by other example apps in
the same directory.
"""
from __future__ import print_function
__all__ = ['argparser', 'run_flow', 'run', 'message_if_missing']
import logging
import socket
import sys
from six.moves import BaseHTTPServer
from six.moves import urllib
from six.moves import input
from oauth2client import client
from oauth2client import util
_CLIENT_SECRETS_MESSAGE = """WARNING: Please configure OAuth 2.0
To make this sample run you will need to populate the client_secrets.json file
found at:
%s
with information from the APIs Console <https://code.google.com/apis/console>.
"""
def _CreateArgumentParser():
try:
import argparse
except ImportError:
return None
parser = argparse.ArgumentParser(add_help=False)
parser.add_argument('--auth_host_name', default='localhost',
help='Hostname when running a local web server.')
parser.add_argument('--noauth_local_webserver', action='store_true',
default=False, help='Do not run a local web server.')
parser.add_argument('--auth_host_port', default=[8080, 8090], type=int,
nargs='*', help='Port web server should listen on.')
parser.add_argument('--logging_level', default='ERROR',
choices=['DEBUG', 'INFO', 'WARNING', 'ERROR', 'CRITICAL'],
help='Set the logging level of detail.')
return parser
# argparser is an ArgumentParser that contains command-line options expected
# by tools.run(). Pass it in as part of the 'parents' argument to your own
# ArgumentParser.
argparser = _CreateArgumentParser()
class ClientRedirectServer(BaseHTTPServer.HTTPServer):
"""A server to handle OAuth 2.0 redirects back to localhost.
Waits for a single request and parses the query parameters
into query_params and then stops serving.
"""
query_params = {}
class ClientRedirectHandler(BaseHTTPServer.BaseHTTPRequestHandler):
"""A handler for OAuth 2.0 redirects back to localhost.
Waits for a single request and parses the query parameters
into the servers query_params and then stops serving.
"""
def do_GET(self):
"""Handle a GET request.
Parses the query parameters and prints a message
if the flow has completed. Note that we can't detect
if an error occurred.
"""
self.send_response(200)
self.send_header("Content-type", "text/html")
self.end_headers()
query = self.path.split('?', 1)[-1]
query = dict(urllib.parse.parse_qsl(query))
self.server.query_params = query
self.wfile.write(b"<html><head><title>Authentication Status</title></head>")
self.wfile.write(b"<body><p>The authentication flow has completed.</p>")
self.wfile.write(b"</body></html>")
def log_message(self, format, *args):
"""Do not log messages to stdout while running as command line program."""
@util.positional(3)
def run_flow(flow, storage, flags, http=None):
"""Core code for a command-line application.
The ``run()`` function is called from your application and runs
through all the steps to obtain credentials. It takes a ``Flow``
argument and attempts to open an authorization server page in the
user's default web browser. The server asks the user to grant your
application access to the user's data. If the user grants access,
the ``run()`` function returns new credentials. The new credentials
are also stored in the ``storage`` argument, which updates the file
associated with the ``Storage`` object.
It presumes it is run from a command-line application and supports the
following flags:
``--auth_host_name`` (string, default: ``localhost``)
Host name to use when running a local web server to handle
redirects during OAuth authorization.
``--auth_host_port`` (integer, default: ``[8080, 8090]``)
Port to use when running a local web server to handle redirects
during OAuth authorization. Repeat this option to specify a list
of values.
``--[no]auth_local_webserver`` (boolean, default: ``True``)
Run a local web server to handle redirects during OAuth authorization.
The tools module defines an ``ArgumentParser`` the already contains the flag
definitions that ``run()`` requires. You can pass that ``ArgumentParser`` to your
``ArgumentParser`` constructor::
parser = argparse.ArgumentParser(description=__doc__,
formatter_class=argparse.RawDescriptionHelpFormatter,
parents=[tools.argparser])
flags = parser.parse_args(argv)
Args:
flow: Flow, an OAuth 2.0 Flow to step through.
storage: Storage, a ``Storage`` to store the credential in.
flags: ``argparse.Namespace``, The command-line flags. This is the
object returned from calling ``parse_args()`` on
``argparse.ArgumentParser`` as described above.
http: An instance of ``httplib2.Http.request`` or something that
acts like it.
Returns:
Credentials, the obtained credential.
"""
logging.getLogger().setLevel(getattr(logging, flags.logging_level))
if not flags.noauth_local_webserver:
success = False
port_number = 0
for port in flags.auth_host_port:
port_number = port
try:
httpd = ClientRedirectServer((flags.auth_host_name, port),
ClientRedirectHandler)
except socket.error:
pass
else:
success = True
break
flags.noauth_local_webserver = not success
if not success:
print('Failed to start a local webserver listening on either port 8080')
print('or port 9090. Please check your firewall settings and locally')
print('running programs that may be blocking or using those ports.')
print()
print('Falling back to --noauth_local_webserver and continuing with')
print('authorization.')
print()
if not flags.noauth_local_webserver:
oauth_callback = 'http://%s:%s/' % (flags.auth_host_name, port_number)
else:
oauth_callback = client.OOB_CALLBACK_URN
flow.redirect_uri = oauth_callback
authorize_url = flow.step1_get_authorize_url()
if not flags.noauth_local_webserver:
import webbrowser
webbrowser.open(authorize_url, new=1, autoraise=True)
print('Your browser has been opened to visit:')
print()
print(' ' + authorize_url)
print()
print('If your browser is on a different machine then exit and re-run this')
print('application with the command-line parameter ')
print()
print(' --noauth_local_webserver')
print()
else:
print('Go to the following link in your browser:')
print()
print(' ' + authorize_url)
print()
code = None
if not flags.noauth_local_webserver:
httpd.handle_request()
if 'error' in httpd.query_params:
sys.exit('Authentication request was rejected.')
if 'code' in httpd.query_params:
code = httpd.query_params['code']
else:
print('Failed to find "code" in the query parameters of the redirect.')
sys.exit('Try running with --noauth_local_webserver.')
else:
code = input('Enter verification code: ').strip()
try:
credential = flow.step2_exchange(code, http=http)
except client.FlowExchangeError as e:
sys.exit('Authentication has failed: %s' % e)
storage.put(credential)
credential.set_store(storage)
print('Authentication successful.')
return credential
def message_if_missing(filename):
"""Helpful message to display if the CLIENT_SECRETS file is missing."""
return _CLIENT_SECRETS_MESSAGE % filename
try:
from oauth2client.old_run import run
from oauth2client.old_run import FLAGS
except ImportError:
def run(*args, **kwargs):
raise NotImplementedError(
'The gflags library must be installed to use tools.run(). '
'Please install gflags or preferrably switch to using '
'tools.run_flow().')
|
|
# coding: utf-8
# Copyright (c) Pymatgen Development Team.
# Distributed under the terms of the MIT License.
import logging
import os
import unittest
from monty.serialization import loadfn
from pymatgen.core.structure import Molecule
from pymatgen.io.qchem.inputs import QCInput
from pymatgen.util.testing import PymatgenTest
__author__ = "Brandon Wood, Samuel Blau, Shyam Dwaraknath, Julian Self"
__copyright__ = "Copyright 2018, The Materials Project"
__version__ = "0.1"
__email__ = "b.wood@berkeley.edu"
__credits__ = "Xiaohui Qu"
logger = logging.getLogger(__name__)
class TestQCInput(PymatgenTest):
# ef setUpClass(cls):
# add things that show up over and over again
def test_molecule_template(self):
species = ["C", "O"]
coords = [
[-9.5782000000, 0.6241500000, 0.0000000000],
[-7.5827400000, 0.5127000000, -0.0000000000],
]
mol = Molecule(species=species, coords=coords)
molecule_test = QCInput.molecule_template(mol)
molecule_actual = """$molecule
0 1
C -9.5782000000 0.6241500000 0.0000000000
O -7.5827400000 0.5127000000 -0.0000000000
$end"""
self.assertEqual(molecule_actual, molecule_test)
# TODO improve this test maybe add ordered dicts
def test_rem_template(self):
rem_params = {
"job_type": "opt",
"method": "wb97m-v",
"basis": "def2-qzvppd",
"max_scf_cycles": 300,
"gen_scfman": "true",
}
rem_test = QCInput.rem_template(rem_params).split("\n")
rem_actual_list = [
"$rem",
" job_type = opt",
" method = wb97m-v",
" basis = def2-qzvppd",
" max_scf_cycles = 300",
" gen_scfman = true",
"$end",
]
for i_rem in rem_actual_list:
self.assertIn(i_rem, rem_test)
def test_opt_template(self):
opt_params = {
"CONSTRAINT": ["tors 2 3 4 5 25.0", "bend 2 1 4 110.0"],
"FIXED": ["x y 2 4 5"],
"DUMMY": ["M 2 3 4 5"],
"CONNECT": ["4 3 2 3 5 6"],
}
opt_test = QCInput.opt_template(opt_params).split("\n")
opt_actual_list = [
"$opt",
"CONSTRAINT",
" tors 2 3 4 5 25.0",
" bend 2 1 4 110.0",
"ENDCONSTRAINT",
"FIXED",
" x y 2 4 5",
"ENDFIXED",
"DUMMY",
" M 2 3 4 5",
"ENDDUMMY",
"CONNECT",
" 4 3 2 3 5 6",
"ENDCONNECT",
"$end",
]
for i_opt in opt_actual_list:
self.assertIn(i_opt, opt_test)
def test_pcm_template(self):
pcm_params = {"theory": "cpcm"}
pcm_test = QCInput.pcm_template(pcm_params)
pcm_actual = """$pcm
theory cpcm
$end"""
self.assertEqual(pcm_actual, pcm_test)
def test_solvent_template(self):
solvent_params = {"dielectric": "5.0"}
solvent_test = QCInput.solvent_template(solvent_params)
solvent_actual = """$solvent
dielectric 5.0
$end"""
self.assertEqual(solvent_actual, solvent_test)
def test_smx_template(self):
smx_params = {"solvent": "water"}
smx_test = QCInput.smx_template(smx_params)
smx_actual = """$smx
solvent water
$end"""
self.assertEqual(smx_actual, smx_test)
def test_find_sections(self):
str_single_job_input = """$molecule
0 1
S -0.00250959 -0.05817469 -0.02921636
C 1.70755408 -0.03033788 -0.01382912
H 2.24317221 -0.05215019 0.92026728
C 2.21976393 0.01718014 -1.27293235
H 3.27786220 0.04082146 -1.48539646
C 1.20867399 0.04478540 -2.27007793
H 1.40292257 0.10591684 -3.33110912
C -0.05341046 0.01577217 -1.74839343
C -1.32843436 0.03545064 -2.45531187
C -1.55195156 0.08743920 -3.80184635
H -0.75245172 0.10267657 -4.52817967
C -2.93293778 0.08408786 -4.13352169
H -3.31125108 0.11340328 -5.14405819
C -3.73173288 0.02741365 -3.03412864
H -4.80776535 0.00535688 -2.99564645
S -2.81590978 -0.00516172 -1.58990580
$end
$rem
job_type = opt
method = wb97m-v
basis = def2-tzvppd
gen_scfman = true
geom_opt_max_cycles = 75
max_scf_cycles = 300
scf_algorithm = diis
scf_guess = sad
sym_ignore = true
symmetry = false
thresh = 14
$end
$opt
CONSTRAINT
tors 6 8 9 10 0.0
ENDCONSTRAINT
$end
"""
sections_test = QCInput.find_sections(str_single_job_input)
section_actual = ["molecule", "rem", "opt"]
self.assertEqual(section_actual, sections_test)
def test_read_molecule(self):
str_molecule = """$molecule
0 1
C -9.5782000000 0.6241500000 0.0000000000
O -7.5827400000 0.5127000000 -0.0000000000
$end"""
molecule_test = QCInput.read_molecule(str_molecule)
species = ["C", "O"]
coords = [
[-9.5782000000, 0.6241500000, 0.0000000000],
[-7.5827400000, 0.5127000000, -0.0000000000],
]
molecule_actual = Molecule(species, coords)
self.assertEqual(molecule_actual, molecule_test)
def test_read_rem(self):
str_rem = """Trying to break you!
$rem
job_type opt
method wb97m-v
basis def2-qzvppd
max_scf_cycles 300
gen_scfman = true
$end"""
rem_test = QCInput.read_rem(str_rem)
rem_actual = {
"job_type": "opt",
"method": "wb97m-v",
"basis": "def2-qzvppd",
"max_scf_cycles": "300",
"gen_scfman": "true",
}
self.assertDictEqual(rem_actual, rem_test)
def test_read_only_rem(self):
str_rem = """Trying to break you!
$rem
job_type opt
method wb97m-v
basis def2-qzvppd
max_scf_cycles 300
gen_scfman = true
$end
$pcm
heavypoints 194
hpoints 194
radii uff
theory cpcm
vdwscale 1.1
$end
$solvent
dielectric 10.0
$end
"""
rem_test = QCInput.read_rem(str_rem)
rem_actual = {
"job_type": "opt",
"method": "wb97m-v",
"basis": "def2-qzvppd",
"max_scf_cycles": "300",
"gen_scfman": "true",
}
self.assertDictEqual(rem_actual, rem_test)
def test_read_opt(self):
str_opt = """$opt
CONSTRAINT
tors 2 3 4 5 25.0
bend 2 1 4 110.0
ENDCONSTRAINT
FIXED
x y 2 4 5
ENDFIXED
DUMMY
M 2 3 4 5
ENDDUMMY
CONNECT
4 3 2 3 5 6
ENDCONNECT
$end"""
opt_test = QCInput.read_opt(str_opt)
opt_actual = {
"CONSTRAINT": ["tors 2 3 4 5 25.0", "bend 2 1 4 110.0"],
"FIXED": ["x y 2 4 5"],
"DUMMY": ["M 2 3 4 5"],
"CONNECT": ["4 3 2 3 5 6"],
}
self.assertDictEqual(opt_actual, opt_test)
def test__str__(self):
species = ["C", "O"]
coords = [
[-9.5782000000, 0.6241500000, 0.0000000000],
[-7.5827400000, 0.5127000000, -0.0000000000],
]
molecule = Molecule(species=species, coords=coords)
rem = {
"jobtype": "opt",
"method": "wb97m-v",
"basis": "def2-qzvppd",
"max_scf_cycles": "300",
"gen_scfman": "true",
}
str_test = QCInput(molecule=molecule, rem=rem).__str__().split("\n")
str_actual_list = [
"$molecule",
" 0 1",
" C -9.5782000000 0.6241500000 0.0000000000",
" O -7.5827400000 0.5127000000 -0.0000000000",
"$end",
"$rem",
" job_type = opt",
" method = wb97m-v",
" basis = def2-qzvppd",
" max_scf_cycles = 300",
" gen_scfman = true",
"$end",
]
for i_str in str_actual_list:
self.assertIn(i_str, str_test)
def test_from_string(self):
string = """$molecule
0 1
S -0.00250959 -0.05817469 -0.02921636
C 1.70755408 -0.03033788 -0.01382912
H 2.24317221 -0.05215019 0.92026728
C 2.21976393 0.01718014 -1.27293235
H 3.27786220 0.04082146 -1.48539646
C 1.20867399 0.04478540 -2.27007793
H 1.40292257 0.10591684 -3.33110912
C -0.05341046 0.01577217 -1.74839343
C -1.32843436 0.03545064 -2.45531187
C -1.55195156 0.08743920 -3.80184635
H -0.75245172 0.10267657 -4.52817967
C -2.93293778 0.08408786 -4.13352169
H -3.31125108 0.11340328 -5.14405819
C -3.73173288 0.02741365 -3.03412864
H -4.80776535 0.00535688 -2.99564645
S -2.81590978 -0.00516172 -1.58990580
$end
$rem
jobtype = opt
method = wb97m-v
basis = def2-tzvppd
gen_scfman = true
geom_opt_max_cycles = 75
max_scf_cycles = 300
scf_algorithm = diis
scf_guess = sad
sym_ignore = true
symmetry = false
thresh = 14
$end
$opt
CONSTRAINT
tors 6 8 9 10 0.0
ENDCONSTRAINT
$end
"""
qcinput_test = QCInput.from_string(string)
species = [
"S",
"C",
"H",
"C",
"H",
"C",
"H",
"C",
"C",
"C",
"H",
"C",
"H",
"C",
"H",
"S",
]
coords = [
[-0.00250959, -0.05817469, -0.02921636],
[1.70755408, -0.03033788, -0.01382912],
[2.24317221, -0.05215019, 0.92026728],
[2.21976393, 0.01718014, -1.27293235],
[3.27786220, 0.04082146, -1.48539646],
[1.20867399, 0.04478540, -2.27007793],
[1.40292257, 0.10591684, -3.33110912],
[-0.05341046, 0.01577217, -1.74839343],
[-1.32843436, 0.03545064, -2.45531187],
[-1.55195156, 0.08743920, -3.80184635],
[-0.75245172, 0.10267657, -4.52817967],
[-2.93293778, 0.08408786, -4.13352169],
[-3.31125108, 0.11340328, -5.14405819],
[-3.73173288, 0.02741365, -3.03412864],
[-4.80776535, 0.00535688, -2.99564645],
[-2.81590978, -0.00516172, -1.58990580],
]
molecule_actual = Molecule(species, coords)
self.assertEqual(molecule_actual, qcinput_test.molecule)
rem_actual = {
"job_type": "opt",
"method": "wb97m-v",
"basis": "def2-tzvppd",
"gen_scfman": "true",
"geom_opt_max_cycles": "75",
"max_scf_cycles": "300",
"scf_algorithm": "diis",
"scf_guess": "sad",
"sym_ignore": "true",
"symmetry": "false",
"thresh": "14",
}
self.assertDictEqual(rem_actual, qcinput_test.rem)
opt_actual = {"CONSTRAINT": ["tors 6 8 9 10 0.0"]}
self.assertDictEqual(opt_actual, qcinput_test.opt)
# TODO this test needs an update, the assertion doesn't differentiate between the different rem sections
def test_multi_job_string(self):
species = [
"S",
"C",
"H",
"C",
"H",
"C",
"H",
"C",
"C",
"C",
"H",
"C",
"H",
"C",
"H",
"S",
]
coords = [
[-0.00250959, -0.05817469, -0.02921636],
[1.70755408, -0.03033788, -0.01382912],
[2.24317221, -0.05215019, 0.92026728],
[2.21976393, 0.01718014, -1.27293235],
[3.27786220, 0.04082146, -1.48539646],
[1.20867399, 0.04478540, -2.27007793],
[1.40292257, 0.10591684, -3.33110912],
[-0.05341046, 0.01577217, -1.74839343],
[-1.32843436, 0.03545064, -2.45531187],
[-1.55195156, 0.08743920, -3.80184635],
[-0.75245172, 0.10267657, -4.52817967],
[-2.93293778, 0.08408786, -4.13352169],
[-3.31125108, 0.11340328, -5.14405819],
[-3.73173288, 0.02741365, -3.03412864],
[-4.80776535, 0.00535688, -2.99564645],
[-2.81590978, -0.00516172, -1.58990580],
]
molecule_1 = Molecule(species, coords)
rem_1 = {
"jobtype": "opt",
"method": "wb97m-v",
"basis": "def2-tzvppd",
"gen_scfman": "true",
"geom_opt_max_cycles": "75",
"max_scf_cycles": "300",
"scf_algorithm": "diis",
"scf_guess": "sad",
"sym_ignore": "true",
"symmetry": "false",
"thresh": "14",
}
opt_1 = {"CONSTRAINT": ["tors 6 8 9 10 0.0"]}
job_1 = QCInput(molecule=molecule_1, rem=rem_1, opt=opt_1)
molecule_2 = "read"
rem_2 = {
"jobtype": "sp",
"method": "wb97m-v",
"basis": "def2-tzvppd",
"gen_scfman": "true",
"geom_opt_max_cycles": "75",
"max_scf_cycles": "300",
"scf_algorithm": "diis",
"scf_guess": "read",
"sym_ignore": "true",
"symmetry": "false",
"thresh": "14",
}
job_2 = QCInput(molecule=molecule_2, rem=rem_2)
job_list = [job_1, job_2]
multi_job_str_test = QCInput.multi_job_string(job_list=job_list).split("\n")
multi_job_str_actual_list = [
"$molecule",
" 0 1",
" S -0.0025095900 -0.0581746900 -0.0292163600",
" C 1.7075540800 -0.0303378800 -0.0138291200",
" H 2.2431722100 -0.0521501900 0.9202672800",
" C 2.2197639300 0.0171801400 -1.2729323500",
" H 3.2778622000 0.0408214600 -1.4853964600",
" C 1.2086739900 0.0447854000 -2.2700779300",
" H 1.4029225700 0.1059168400 -3.3311091200",
" C -0.0534104600 0.0157721700 -1.7483934300",
" C -1.3284343600 0.0354506400 -2.4553118700",
" C -1.5519515600 0.0874392000 -3.8018463500",
" H -0.7524517200 0.1026765700 -4.5281796700",
" C -2.9329377800 0.0840878600 -4.1335216900",
" H -3.3112510800 0.1134032800 -5.1440581900",
" C -3.7317328800 0.0274136500 -3.0341286400",
" H -4.8077653500 0.0053568800 -2.9956464500",
" S -2.8159097800 -0.0051617200 -1.5899058000",
"$end",
"$rem",
" job_type = opt",
" method = wb97m-v",
" basis = def2-tzvppd",
" gen_scfman = true",
" geom_opt_max_cycles = 75",
" max_scf_cycles = 300",
" scf_algorithm = diis",
" scf_guess = sad",
" sym_ignore = true",
" symmetry = false",
" thresh = 14",
"$end",
"$opt",
"CONSTRAINT",
" tors 6 8 9 10 0.0",
"ENDCONSTRAINT",
"$end",
"@@@",
"$molecule",
" read",
"$end",
"$rem",
" job_type = opt",
" method = wb97m-v",
" basis = def2-tzvppd",
" gen_scfman = true",
" geom_opt_max_cycles = 75",
" max_scf_cycles = 300",
" scf_algorithm = diis",
" scf_guess = sad",
" sym_ignore = true",
" symmetry = false",
" thresh = 14",
"$end",
]
for i_str in multi_job_str_actual_list:
self.assertIn(i_str, multi_job_str_test)
def test_from_multi_jobs_file(self):
job_list_test = QCInput.from_multi_jobs_file(
os.path.join(PymatgenTest.TEST_FILES_DIR, "qchem", "pt_n2_wb97mv_0.0.in")
)
species = [
"S",
"C",
"H",
"C",
"H",
"C",
"H",
"C",
"C",
"C",
"H",
"C",
"H",
"C",
"H",
"S",
]
coords = [
[-0.00250959, -0.05817469, -0.02921636],
[1.70755408, -0.03033788, -0.01382912],
[2.24317221, -0.05215019, 0.92026728],
[2.21976393, 0.01718014, -1.27293235],
[3.27786220, 0.04082146, -1.48539646],
[1.20867399, 0.04478540, -2.27007793],
[1.40292257, 0.10591684, -3.33110912],
[-0.05341046, 0.01577217, -1.74839343],
[-1.32843436, 0.03545064, -2.45531187],
[-1.55195156, 0.08743920, -3.80184635],
[-0.75245172, 0.10267657, -4.52817967],
[-2.93293778, 0.08408786, -4.13352169],
[-3.31125108, 0.11340328, -5.14405819],
[-3.73173288, 0.02741365, -3.03412864],
[-4.80776535, 0.00535688, -2.99564645],
[-2.81590978, -0.00516172, -1.58990580],
]
molecule_1_actual = Molecule(species, coords)
rem_1_actual = {
"job_type": "opt",
"method": "wb97m-v",
"basis": "def2-tzvppd",
"gen_scfman": "true",
"geom_opt_max_cycles": "75",
"max_scf_cycles": "300",
"scf_algorithm": "diis",
"scf_guess": "sad",
"sym_ignore": "true",
"symmetry": "false",
"thresh": "14",
}
opt_1_actual = {"CONSTRAINT": ["tors 6 8 9 10 0.0"]}
self.assertEqual(molecule_1_actual, job_list_test[0].molecule)
self.assertEqual(rem_1_actual, job_list_test[0].rem)
self.assertEqual(opt_1_actual, job_list_test[0].opt)
molecule_2_actual = "read"
rem_2_actual = {
"job_type": "sp",
"method": "wb97m-v",
"basis": "def2-tzvppd",
"gen_scfman": "true",
"geom_opt_max_cycles": "75",
"max_scf_cycles": "300",
"scf_algorithm": "diis",
"scf_guess": "read",
"sym_ignore": "true",
"symmetry": "false",
"thresh": "14",
}
self.assertEqual(molecule_2_actual, job_list_test[1].molecule)
self.assertEqual(rem_2_actual, job_list_test[1].rem)
def test_read_pcm(self):
str_pcm = """I'm once again trying to break you!
$pcm
theory cpcm
radii uff
vdwscale 1.1
$end"""
pcm_test = QCInput.read_pcm(str_pcm)
pcm_actual = {"theory": "cpcm", "radii": "uff", "vdwscale": "1.1"}
self.assertDictEqual(pcm_actual, pcm_test)
def test_read_bad_pcm(self):
str_pcm = """I'm once again trying to break you!
$pcm
theory = cpcm
radii = uff
vdwscale = 1.1
$end"""
pcm_test = QCInput.read_pcm(str_pcm)
pcm_actual = {}
self.assertDictEqual(pcm_actual, pcm_test)
def test_read_solvent(self):
str_solvent = """Once again, I'm trying to break you!
$solvent
dielectric 5.0
$end"""
solvent_test = QCInput.read_solvent(str_solvent)
solvent_actual = {
"dielectric": "5.0",
}
self.assertDictEqual(solvent_actual, solvent_test)
def test_read_bad_solvent(self):
str_solvent = """Once again, I'm trying to break you!
$solvent
dielectric = 5.0
$end"""
solvent_test = QCInput.read_solvent(str_solvent)
solvent_actual = {}
self.assertDictEqual(solvent_actual, solvent_test)
def test_read_smx(self):
str_smx = """Once again, I'm trying to break you!
$smx
solvent water
$end"""
smx_test = QCInput.read_smx(str_smx)
smx_actual = {
"solvent": "water",
}
self.assertDictEqual(smx_actual, smx_test)
def test_read_bad_smx(self):
str_smx = """Once again, I'm trying to break you!
$solvent
solvent = water
$end"""
smx_test = QCInput.read_smx(str_smx)
smx_actual = {}
self.assertDictEqual(smx_actual, smx_test)
def test_read_negative(self):
str_molecule = """$molecule
-1 1
S -1.1516880000 0.8568110000 -0.0787470000
S 1.1527500000 -0.8580450000 -0.0786430000
O -1.6523520000 1.8607750000 -1.0252100000
O -0.9052880000 1.2448490000 1.3156410000
O 0.9072410000 -1.2461780000 1.3158760000
O 1.6543670000 -1.8616640000 -1.0249090000
C -2.5841130000 -0.3746500000 0.0297340000
C 2.5833220000 0.3755850000 0.0296900000
F -3.6480730000 0.2204040000 0.6112110000
F -2.2609850000 -1.4531020000 0.7616580000
F -2.9656640000 -0.7966010000 -1.1900330000
F 3.6467050000 -0.2152590000 0.6163310000
F 2.2560700000 1.4560310000 0.7568190000
F 2.9672080000 0.7933560000 -1.1908790000
N -0.0001900000 -0.0016540000 -0.8250640000
$end
$rem
job_type = opt
basis = 6-311++g*
max_scf_cycles = 200
gen_scfman = true
scf_algorithm = diis
method = wb97xd
geom_opt_max_cycles = 200
$end
"""
qcinp = QCInput.from_string(str_molecule)
self.assertEqual(str_molecule, str(qcinp))
def test_read_plots(self):
str_molecule = """$molecule
0 2
O 1.6159947668 0.3522275191 0.3343192028
O -0.5921658045 1.4368355787 1.2632324885
C 0.4160355545 -0.4617433561 0.2180766834
C -0.7655230468 0.4776728409 0.1826587618
C 2.8437090411 -0.3853724291 0.0935770045
C -1.7918488579 2.2003569978 1.5593659974
H 0.4649228147 -1.0347597878 -0.7097270414
H 3.6714833661 0.3051154983 0.2509025369
H 2.8395611019 -0.7401009356 -0.9372741555
H -2.1017802975 2.7482577804 0.6678359687
H -1.5445030956 2.8894960726 2.3658396091
Mg 1.2856817013 1.9249743897 1.4285694502
$end
$rem
job_type = sp
basis = def2-tzvppd
max_scf_cycles = 200
gen_scfman = true
xc_grid = 3
scf_algorithm = gdm
resp_charges = true
symmetry = false
sym_ignore = true
method = wb97xv
solvent_method = smd
ideriv = 1
thresh = 14
scf_guess_always = true
plots = true
make_cube_files = true
$end
$smx
solvent thf
$end
$plots
grid_spacing 0.05
total_density 0
$end
"""
qcinp = QCInput.from_string(str_molecule)
self.assertEqual(str_molecule, str(qcinp))
def test_write_file_from_OptSet(self):
from pymatgen.io.qchem.sets import OptSet
odd_dict = loadfn(os.path.join(os.path.dirname(__file__), "odd.json"))
odd_mol = odd_dict["spec"]["_tasks"][0]["molecule"]
qcinp = OptSet(odd_mol)
qcinp.write_file(os.path.join(os.path.dirname(__file__), "test.qin"))
test_dict = QCInput.from_file(os.path.join(os.path.dirname(__file__), "test.qin")).as_dict()
test_ref_dict = QCInput.from_file(os.path.join(os.path.dirname(__file__), "test_ref.qin")).as_dict()
for key in test_dict:
self.assertEqual(test_dict[key], test_ref_dict[key])
os.remove(os.path.join(os.path.dirname(__file__), "test.qin"))
if __name__ == "__main__":
unittest.main()
|
|
"""Weight initializer."""
from __future__ import absolute_import, print_function
import re
import logging
import warnings
import json
import numpy as np
from .base import string_types
from .ndarray import NDArray, load
from . import random
from . import registry
# inherit str for backward compatibility
class InitDesc(str):
"""Descriptor for the initialization pattern.
Parameter
---------
name : str
Name of variable.
attrs : dict of str to str
Attributes of this variable taken from ``Symbol.attr_dict``.
global_init : Initializer
Global initializer to fallback to.
"""
def __new__(cls, name, attrs=None, global_init=None):
ret = super(InitDesc, cls).__new__(cls, name)
ret.attrs = attrs or {}
ret.global_init = global_init
return ret
class Initializer(object):
"""The base class of an initializer."""
def __init__(self, **kwargs):
self._kwargs = kwargs
def dumps(self):
"""Saves the initializer to string
Returns
-------
str
JSON formatted string that describes the initializer.
Examples
--------
>>> # Create initializer and retrieve its parameters
...
>>> init = mx.init.Normal(0.5)
>>> init.dumps()
'["normal", {"sigma": 0.5}]'
>>> init = mx.init.Xavier(factor_type="in", magnitude=2.34)
>>> init.dumps()
'["xavier", {"rnd_type": "uniform", "magnitude": 2.34, "factor_type": "in"}]'
"""
return json.dumps([self.__class__.__name__.lower(), self._kwargs])
def __call__(self, desc, arr):
"""Initialize an array
Parameters
----------
desc : InitDesc
Initialization pattern descriptor.
arr : NDArray
The array to be initialized.
"""
if not isinstance(desc, InitDesc):
self._legacy_init(desc, arr)
return
if desc.global_init is None:
desc.global_init = self
init = desc.attrs.get('__init__', "")
if init:
# when calling Variable initializer
create(init)._init_weight(desc, arr)
else:
# register nnvm::FSetInputVariableAttrs in the backend for new patterns
# don't add new cases here.
if desc.endswith('weight'):
self._init_weight(desc, arr)
elif desc.endswith('bias'):
self._init_bias(desc, arr)
elif desc.endswith('gamma'):
self._init_gamma(desc, arr)
elif desc.endswith('beta'):
self._init_beta(desc, arr)
else:
self._init_default(desc, arr)
def _legacy_init(self, name, arr):
"""Legacy initialization method.
Parameters
----------
name : str
Name of corrosponding NDArray.
arr : NDArray
NDArray to be initialized.
"""
warnings.warn(
"\033[91mCalling initializer with init(str, NDArray) has been deprecated." \
"please use init(mx.init.InitDesc(...), NDArray) instead.\033[0m",
DeprecationWarning, stacklevel=3)
if not isinstance(name, string_types):
raise TypeError('name must be string')
if not isinstance(arr, NDArray):
raise TypeError('arr must be NDArray')
if name.startswith('upsampling'):
self._init_bilinear(name, arr)
elif name.startswith('stn_loc') and name.endswith('weight'):
self._init_zero(name, arr)
elif name.startswith('stn_loc') and name.endswith('bias'):
self._init_loc_bias(name, arr)
elif name.endswith('bias'):
self._init_bias(name, arr)
elif name.endswith('gamma'):
self._init_gamma(name, arr)
elif name.endswith('beta'):
self._init_beta(name, arr)
elif name.endswith('weight'):
self._init_weight(name, arr)
elif name.endswith("moving_mean"):
self._init_zero(name, arr)
elif name.endswith("moving_var"):
self._init_one(name, arr)
elif name.endswith("moving_inv_var"):
self._init_zero(name, arr)
elif name.endswith("moving_avg"):
self._init_zero(name, arr)
else:
self._init_default(name, arr)
def _init_bilinear(self, _, arr):
weight = np.zeros(np.prod(arr.shape), dtype='float32')
shape = arr.shape
f = np.ceil(shape[3] / 2.)
c = (2 * f - 1 - f % 2) / (2. * f)
for i in range(np.prod(shape)):
x = i % shape[3]
y = (i / shape[3]) % shape[2]
weight[i] = (1 - abs(x / f - c)) * (1 - abs(y / f - c))
arr[:] = weight.reshape(shape)
def _init_loc_bias(self, _, arr):
shape = arr.shape
assert(shape[0] == 6)
arr[:] = np.array([1.0, 0, 0, 0, 1.0, 0])
def _init_zero(self, _, arr):
arr[:] = 0.0
def _init_one(self, _, arr):
arr[:] = 1.0
def _init_bias(self, _, arr):
arr[:] = 0.0
def _init_gamma(self, _, arr):
arr[:] = 1.0
def _init_beta(self, _, arr):
arr[:] = 0.0
def _init_weight(self, name, arr):
"""Abstract method to Initialize weight."""
raise NotImplementedError("Must override it")
def _init_default(self, name, _):
raise ValueError(
'Unknown initialization pattern for %s. ' \
'Default initialization is now limited to '\
'"weight", "bias", "gamma" (1.0), and "beta" (0.0).' \
'Please use mx.sym.Variable(init=mx.init.*) to set initialization pattern' % name)
# pylint: disable=invalid-name
_register = registry.get_register_func(Initializer, 'initializer')
alias = registry.get_alias_func(Initializer, 'initializer')
create = registry.get_create_func(Initializer, 'initializer')
# pylint: enable=invalid-name
def register(klass):
"""Registers a custom initializer.
Custom initializers can be created by extending `mx.init.Initializer` and implementing the
required functions like `_init_weight` and `_init_bias`. The created initializer must be
registered using `mx.init.register` before it can be called by name.
Parameters
----------
klass : class
A subclass of `mx.init.Initializer` that needs to be registered as a custom initializer.
Example
-------
>>> # Create and register a custom initializer that
... # initializes weights to 0.1 and biases to 1.
...
>>> @mx.init.register
... @alias('myinit')
... class CustomInit(mx.init.Initializer):
... def __init__(self):
... super(CustomInit, self).__init__()
... def _init_weight(self, _, arr):
... arr[:] = 0.1
... def _init_bias(self, _, arr):
... arr[:] = 1
...
>>> # Module is an instance of 'mxnet.module.Module'
...
>>> module.init_params("custominit")
>>> # module.init_params("myinit")
>>> # module.init_params(CustomInit())
"""
return _register(klass)
class Load(object):
"""Initializes variables by loading data from file or dict.
**Note** Load will drop ``arg:`` or ``aux:`` from name and
initialize the variables that match with the prefix dropped.
Parameters
----------
param: str or dict of str->`NDArray`
Parameter file or dict mapping name to NDArray.
default_init: Initializer
Default initializer when name is not found in `param`.
verbose: bool
Flag for enabling logging of source when initializing.
"""
def __init__(self, param, default_init=None, verbose=False):
if isinstance(param, str):
param = load(param)
assert isinstance(param, dict)
self.param = {}
for name, arr in param.items():
if name.startswith('arg:') or name.startswith('aux:'):
self.param[name[4:]] = arr
else:
self.param[name] = arr
self.default_init = default_init
self.verbose = verbose
def __call__(self, name, arr):
if name in self.param:
assert arr.shape == self.param[name].shape, \
'Parameter %s cannot be initialized from loading. '%name + \
'Shape mismatch, target %s vs loaded %s'%(str(arr.shape),
self.param[name].shape)
arr[:] = self.param[name]
if self.verbose:
logging.info('Initialized %s by loading', name)
else:
assert self.default_init is not None, \
"Cannot Initialize %s. Not found in loaded param "%name + \
"and no default Initializer is provided."
self.default_init(name, arr)
if self.verbose:
logging.info('Initialized %s by default', name)
class Mixed(object):
"""Initialize parameters using multiple initializers.
Parameters
----------
patterns: list of str
List of regular expressions matching parameter names.
initializers: list of Initializer
List of initializers corresponding to `patterns`.
Example
-------
>>> # Given 'module', an instance of 'mxnet.module.Module', initialize biases to zero
... # and every other parameter to random values with uniform distribution.
...
>>> init = mx.initializer.Mixed(['bias', '.*'], [mx.init.Zero(), mx.init.Uniform(0.1)])
>>> module.init_params(init)
>>>
>>> for dictionary in module.get_params():
... for key in dictionary:
... print(key)
... print(dictionary[key].asnumpy())
...
fullyconnected1_weight
[[ 0.0097627 0.01856892 0.04303787]]
fullyconnected1_bias
[ 0.]
"""
def __init__(self, patterns, initializers):
assert len(patterns) == len(initializers)
self.map = list(zip([re.compile(p) for p in patterns], initializers))
def __call__(self, name, arr):
for prog, init in self.map:
if prog.match(name):
init(name, arr)
return
raise ValueError('Parameter name %s did not match any pattern. Consider' +
'add a ".*" pattern at the and with default Initializer.')
@register
@alias("zeros")
class Zero(Initializer):
"""Initializes weights to zero.
Example
-------
>>> # Given 'module', an instance of 'mxnet.module.Module', initialize weights to zero.
...
>>> init = mx.initializer.Zero()
>>> module.init_params(init)
>>> for dictionary in module.get_params():
... for key in dictionary:
... print(key)
... print(dictionary[key].asnumpy())
...
fullyconnected0_weight
[[ 0. 0. 0.]]
"""
def __init__(self):
super(Zero, self).__init__()
def _init_weight(self, _, arr):
arr[:] = 0
@register
@alias("ones")
class One(Initializer):
"""Initializes weights to one.
Example
-------
>>> # Given 'module', an instance of 'mxnet.module.Module', initialize weights to one.
...
>>> init = mx.initializer.One()
>>> module.init_params(init)
>>> for dictionary in module.get_params():
... for key in dictionary:
... print(key)
... print(dictionary[key].asnumpy())
...
fullyconnected0_weight
[[ 1. 1. 1.]]
"""
def __init__(self):
super(One, self).__init__()
def _init_weight(self, _, arr):
arr[:] = 1
@register
class Constant(Initializer):
"""Initializes the weights to a scalar value.
Parameters
----------
value : float
Fill value.
"""
def __init__(self, value):
super(Constant, self).__init__(value=value)
self.value = value
def _init_weight(self, _, arr):
arr[:] = self.value
@register
class Uniform(Initializer):
"""Initializes weights with random values uniformly sampled from a given range.
Parameters
----------
scale : float, optional
The bound on the range of the generated random values.
Values are generated from the range [-`scale`, `scale`].
Default scale is 0.07.
Example
-------
>>> # Given 'module', an instance of 'mxnet.module.Module', initialize weights
>>> # to random values uniformly sampled between -0.1 and 0.1.
...
>>> init = mx.init.Uniform(0.1)
>>> module.init_params(init)
>>> for dictionary in module.get_params():
... for key in dictionary:
... print(key)
... print(dictionary[key].asnumpy())
...
fullyconnected0_weight
[[ 0.01360891 -0.02144304 0.08511933]]
"""
def __init__(self, scale=0.07):
super(Uniform, self).__init__(scale=scale)
self.scale = scale
def _init_weight(self, _, arr):
random.uniform(-self.scale, self.scale, out=arr)
@register
class Normal(Initializer):
"""Initializes weights with random values sampled from a normal distribution
with a mean of zero and standard deviation of `sigma`.
Parameters
----------
sigma : float, optional
Standard deviation of the normal distribution.
Default standard deviation is 0.01.
Example
-------
>>> # Given 'module', an instance of 'mxnet.module.Module', initialize weights
>>> # to random values sampled from a normal distribution.
...
>>> init = mx.init.Normal(0.5)
>>> module.init_params(init)
>>> for dictionary in module.get_params():
... for key in dictionary:
... print(key)
... print(dictionary[key].asnumpy())
...
fullyconnected0_weight
[[-0.3214761 -0.12660924 0.53789419]]
"""
def __init__(self, sigma=0.01):
super(Normal, self).__init__(sigma=sigma)
self.sigma = sigma
def _init_weight(self, _, arr):
random.normal(0, self.sigma, out=arr)
@register
class Orthogonal(Initializer):
"""Initialize weight as orthogonal matrix.
This initializer implements *Exact solutions to the nonlinear dynamics of
learning in deep linear neural networks*, available at
https://arxiv.org/abs/1312.6120.
Parameters
----------
scale : float optional
Scaling factor of weight.
rand_type: string optional
Use "uniform" or "normal" random number to initialize weight.
"""
def __init__(self, scale=1.414, rand_type="uniform"):
super(Orthogonal, self).__init__(scale=scale, rand_type=rand_type)
self.scale = scale
self.rand_type = rand_type
def _init_weight(self, _, arr):
nout = arr.shape[0]
nin = np.prod(arr.shape[1:])
if self.rand_type == "uniform":
tmp = np.random.uniform(-1.0, 1.0, (nout, nin))
elif self.rand_type == "normal":
tmp = np.random.normal(0.0, 1.0, (nout, nin))
u, _, v = np.linalg.svd(tmp, full_matrices=False) # pylint: disable=invalid-name
if u.shape == tmp.shape:
res = u
else:
res = v
res = self.scale * res.reshape(arr.shape)
arr[:] = res
@register
class Xavier(Initializer):
"""Returns an initializer performing "Xavier" initialization for weights.
This initializer is designed to keep the scale of gradients roughly the same
in all layers.
By default, `rnd_type` is ``'uniform'`` and `factor_type` is ``'avg'``,
the initializer fills the weights with random numbers in the range
of :math:`[-c, c]`, where :math:`c = \\sqrt{\\frac{3.}{0.5 * (n_{in} + n_{out})}}`.
:math:`n_{in}` is the number of neurons feeding into weights, and :math:`n_{out}` is
the number of neurons the result is fed to.
If `rnd_type` is ``'uniform'`` and `factor_type` is ``'in'``,
the :math:`c = \\sqrt{\\frac{3.}{n_{in}}}`.
Similarly when `factor_type` is ``'out'``, the :math:`c = \\sqrt{\\frac{3.}{n_{out}}}`.
If `rnd_type` is ``'gaussian'`` and `factor_type` is ``'avg'``,
the initializer fills the weights with numbers from normal distribution with
a standard deviation of :math:`\\sqrt{\\frac{3.}{0.5 * (n_{in} + n_{out})}}`.
Parameters
----------
rnd_type: str, optional
Random generator type, can be ``'gaussian'`` or ``'uniform'``.
factor_type: str, optional
Can be ``'avg'``, ``'in'``, or ``'out'``.
magnitude: float, optional
Scale of random number.
"""
def __init__(self, rnd_type="uniform", factor_type="avg", magnitude=3):
super(Xavier, self).__init__(rnd_type=rnd_type, factor_type=factor_type,
magnitude=magnitude)
self.rnd_type = rnd_type
self.factor_type = factor_type
self.magnitude = float(magnitude)
def _init_weight(self, _, arr):
shape = arr.shape
hw_scale = 1.
if len(shape) > 2:
hw_scale = np.prod(shape[2:])
fan_in, fan_out = shape[1] * hw_scale, shape[0] * hw_scale
factor = 1.
if self.factor_type == "avg":
factor = (fan_in + fan_out) / 2.0
elif self.factor_type == "in":
factor = fan_in
elif self.factor_type == "out":
factor = fan_out
else:
raise ValueError("Incorrect factor type")
scale = np.sqrt(self.magnitude / factor)
if self.rnd_type == "uniform":
random.uniform(-scale, scale, out=arr)
elif self.rnd_type == "gaussian":
random.normal(0, scale, out=arr)
else:
raise ValueError("Unknown random type")
@register
class MSRAPrelu(Xavier):
"""Initialize the weight according to a MSRA paper.
This initializer implements *Delving Deep into Rectifiers: Surpassing
Human-Level Performance on ImageNet Classification*, available at
https://arxiv.org/abs/1502.01852.
This initializer is proposed for initialization related to ReLu activation,
it maked some changes on top of Xavier method.
Parameters
----------
factor_type: str, optional
Can be ``'avg'``, ``'in'``, or ``'out'``.
slope: float, optional
initial slope of any PReLU (or similar) nonlinearities.
"""
def __init__(self, factor_type="avg", slope=0.25):
magnitude = 2. / (1 + slope ** 2)
super(MSRAPrelu, self).__init__("gaussian", factor_type, magnitude)
self._kwargs = {'factor_type': factor_type, 'slope': slope}
@register
class Bilinear(Initializer):
"""Initialize weight for upsampling layers."""
def __init__(self):
super(Bilinear, self).__init__()
def _init_weight(self, _, arr):
weight = np.zeros(np.prod(arr.shape), dtype='float32')
shape = arr.shape
f = np.ceil(shape[3] / 2.)
c = (2 * f - 1 - f % 2) / (2. * f)
for i in range(np.prod(shape)):
x = i % shape[3]
y = (i / shape[3]) % shape[2]
weight[i] = (1 - abs(x / f - c)) * (1 - abs(y / f - c))
arr[:] = weight.reshape(shape)
@register
class LSTMBias(Initializer):
"""Initialize all bias of an LSTMCell to 0.0 except for
the forget gate whose bias is set to custom value.
Parameters
----------
forget_bias: float, bias for the forget gate.
Jozefowicz et al. 2015 recommends setting this to 1.0.
"""
def __init__(self, forget_bias):
super(LSTMBias, self).__init__(forget_bias=forget_bias)
self.forget_bias = forget_bias
def _init_weight(self, name, arr):
arr[:] = 0.0
# in the case of LSTMCell the forget gate is the second
# gate of the 4 LSTM gates, we modify the according values.
num_hidden = int(arr.shape[0] / 4)
arr[num_hidden:2*num_hidden] = self.forget_bias
@register
class FusedRNN(Initializer):
"""Initialize parameters for fused rnn layers.
Parameters
----------
init : Initializer
intializer applied to unpacked weights. Fall back to global
initializer if None.
num_hidden : int
should be the same with arguments passed to FusedRNNCell.
num_layers : int
should be the same with arguments passed to FusedRNNCell.
mode : str
should be the same with arguments passed to FusedRNNCell.
bidirectional : bool
should be the same with arguments passed to FusedRNNCell.
forget_bias : float
should be the same with arguments passed to FusedRNNCell.
"""
def __init__(self, init, num_hidden, num_layers, mode, bidirectional=False, forget_bias=1.0):
if isinstance(init, string_types):
klass, kwargs = json.loads(init)
init = _INITIALIZER_REGISTRY[klass.lower()](**kwargs)
super(FusedRNN, self).__init__(init=init.dumps() if init is not None else None,
num_hidden=num_hidden, num_layers=num_layers, mode=mode,
bidirectional=bidirectional, forget_bias=forget_bias)
self._init = init
self._num_hidden = num_hidden
self._num_layers = num_layers
self._mode = mode
self._bidirectional = bidirectional
self._forget_bias = forget_bias
def _init_weight(self, desc, arr):
from .rnn import rnn_cell
cell = rnn_cell.FusedRNNCell(self._num_hidden, self._num_layers,
self._mode, self._bidirectional,
forget_bias=self._forget_bias, prefix='')
args = cell.unpack_weights({'parameters': arr})
for name in args:
arg_desc = InitDesc(name, global_init=desc.global_init)
# for lstm bias, we use a custom initializer
# which adds a bias to the forget gate
if self._mode == 'lstm' and name.endswith("_f_bias"):
args[name][:] = self._forget_bias
elif self._init is None:
desc.global_init(arg_desc, args[name])
else:
self._init(arg_desc, args[name])
arr[:] = cell.pack_weights(args)['parameters']
|
|
"""
Utilities to manipulate graphs (vertices and edges, not control flow graphs).
Convention:
'vertices' is a set of vertices (or a dict with vertices as keys);
'edges' is a dict mapping vertices to a list of edges with its source.
Note that we can usually use 'edges' as the set of 'vertices' too.
"""
from rpython.tool.identity_dict import identity_dict
class Edge:
def __init__(self, source, target):
self.source = source
self.target = target
def __repr__(self):
return '%r -> %r' % (self.source, self.target)
def make_edge_dict(edge_list):
"Put a list of edges in the official dict format."
edges = {}
for edge in edge_list:
edges.setdefault(edge.source, []).append(edge)
edges.setdefault(edge.target, [])
return edges
def depth_first_search(root, vertices, edges):
seen = {}
result = []
def visit(vertex):
result.append(('start', vertex))
seen[vertex] = True
for edge in edges[vertex]:
w = edge.target
if w in vertices and w not in seen:
visit(w)
result.append(('stop', vertex))
visit(root)
return result
def vertices_reachable_from(root, vertices, edges):
for event, v in depth_first_search(root, vertices, edges):
if event == 'start':
yield v
def strong_components(vertices, edges):
"""Enumerates the strongly connected components of a graph. Each one is
a set of vertices where any vertex can be reached from any other vertex by
following the edges. In a tree, all strongly connected components are
sets of size 1; larger sets are unions of cycles.
"""
component_root = {}
discovery_time = {}
remaining = vertices.copy()
stack = []
for root in vertices:
if root in remaining:
for event, v in depth_first_search(root, remaining, edges):
if event == 'start':
del remaining[v]
discovery_time[v] = len(discovery_time)
component_root[v] = v
stack.append(v)
else: # event == 'stop'
vroot = v
for edge in edges[v]:
w = edge.target
if w in component_root:
wroot = component_root[w]
if discovery_time[wroot] < discovery_time[vroot]:
vroot = wroot
if vroot == v:
component = {}
while True:
w = stack.pop()
del component_root[w]
component[w] = True
if w == v:
break
yield component
else:
component_root[v] = vroot
def all_cycles(root, vertices, edges):
"""Enumerates cycles. Each cycle is a list of edges.
This may not give stricly all cycles if they are many intermixed cycles.
"""
stackpos = {}
edgestack = []
result = []
def visit(v):
if v not in stackpos:
stackpos[v] = len(edgestack)
for edge in edges[v]:
if edge.target in vertices:
edgestack.append(edge)
visit(edge.target)
edgestack.pop()
stackpos[v] = None
else:
if stackpos[v] is not None: # back-edge
result.append(edgestack[stackpos[v]:])
visit(root)
return result
def find_roots(vertices, edges):
"""Find roots, i.e. a minimal set of vertices such that all other
vertices are reachable from them."""
rep = {} # maps all vertices to a random representing vertex
# from the same strongly connected component
for component in strong_components(vertices, edges):
random_vertex, _ = component.popitem()
rep[random_vertex] = random_vertex
for v in component:
rep[v] = random_vertex
roots = set(rep.values())
for v in vertices:
v1 = rep[v]
for edge in edges[v]:
try:
v2 = rep[edge.target]
if v1 is not v2: # cross-component edge: no root is needed
roots.remove(v2) # in the target component
except KeyError:
pass
return roots
def compute_depths(roots, vertices, edges):
"""The 'depth' of a vertex is its minimal distance from any root."""
depths = {}
curdepth = 0
for v in roots:
depths[v] = 0
pending = list(roots)
while pending:
curdepth += 1
prev_generation = pending
pending = []
for v in prev_generation:
for edge in edges[v]:
v2 = edge.target
if v2 in vertices and v2 not in depths:
depths[v2] = curdepth
pending.append(v2)
return depths
def is_acyclic(vertices, edges):
class CycleFound(Exception):
pass
def visit(vertex):
visiting[vertex] = True
for edge in edges[vertex]:
w = edge.target
if w in visiting:
raise CycleFound
if w in unvisited:
del unvisited[w]
visit(w)
del visiting[vertex]
try:
unvisited = vertices.copy()
while unvisited:
visiting = {}
root = unvisited.popitem()[0]
visit(root)
except CycleFound:
return False
else:
return True
def break_cycles(vertices, edges):
"""Enumerates a reasonably minimal set of edges that must be removed to
make the graph acyclic."""
import py; py.test.skip("break_cycles() is not used any more")
# the approach is as follows: starting from each root, find some set
# of cycles using a simple depth-first search. Then break the
# edge that is part of the most cycles. Repeat.
remaining_edges = edges.copy()
progress = True
roots_finished = set()
while progress:
roots = list(find_roots(vertices, remaining_edges))
#print '%d inital roots' % (len(roots,))
progress = False
for root in roots:
if root in roots_finished:
continue
cycles = all_cycles(root, vertices, remaining_edges)
if not cycles:
roots_finished.add(root)
continue
#print 'from root %r: %d cycles' % (root, len(cycles))
allcycles = identity_dict()
edge2cycles = {}
for cycle in cycles:
allcycles[cycle] = cycle
for edge in cycle:
edge2cycles.setdefault(edge, []).append(cycle)
edge_weights = {}
for edge, cycle in edge2cycles.iteritems():
edge_weights[edge] = len(cycle)
while allcycles:
max_weight = 0
max_edge = None
for edge, weight in edge_weights.iteritems():
if weight > max_weight:
max_edge = edge
max_weight = weight
if max_edge is None:
break
# kill this edge
yield max_edge
progress = True
# unregister all cycles that have just been broken
for broken_cycle in edge2cycles[max_edge]:
broken_cycle = allcycles.pop(broken_cycle, ())
for edge in broken_cycle:
edge_weights[edge] -= 1
lst = remaining_edges[max_edge.source][:]
lst.remove(max_edge)
remaining_edges[max_edge.source] = lst
assert is_acyclic(vertices, remaining_edges)
def break_cycles_v(vertices, edges):
"""Enumerates a reasonably minimal set of vertices that must be removed to
make the graph acyclic."""
# Consider where each cycle should be broken -- we go for the idea
# that it is often better to break it as far as possible from the
# cycle's entry point, so that the stack check occurs as late as
# possible. For the distance we use a global "depth" computed as
# the distance from the roots. The algo below is:
# - get a list of cycles
# - let maxdepth(cycle) = max(depth(vertex) for vertex in cycle)
# - sort the list of cycles by their maxdepth, nearest first
# - for each cycle in the list, if the cycle is not broken yet,
# remove the vertex with the largest depth
# - repeat the whole procedure until no more cycles are found.
# Ordering the cycles themselves nearest first maximizes the chances
# that when breaking a nearby cycle - which must be broken in any
# case - we remove a vertex and break some further cycles by chance.
v_depths = vertices
progress = True
roots_finished = set()
while progress:
roots = list(find_roots(v_depths, edges))
if v_depths is vertices: # first time only
v_depths = compute_depths(roots, vertices, edges)
assert len(v_depths) == len(vertices) # ...so far. We remove
# from v_depths the vertices at which we choose to break cycles
#print '%d inital roots' % (len(roots,))
progress = False
for root in roots:
if root in roots_finished:
continue
cycles = all_cycles(root, v_depths, edges)
if not cycles:
roots_finished.add(root)
continue
#print 'from root %r: %d cycles' % (root, len(cycles))
# compute the "depth" of each cycles: how far it goes from any root
allcycles = []
for cycle in cycles:
cycledepth = max([v_depths[edge.source] for edge in cycle])
allcycles.append((cycledepth, cycle))
allcycles.sort()
# consider all cycles starting from the ones with smallest depth
for _, cycle in allcycles:
try:
choices = [(v_depths[edge.source], edge.source)
for edge in cycle]
except KeyError:
pass # this cycle was already broken
else:
# break this cycle by removing the furthest vertex
max_depth, max_vertex = max(choices)
del v_depths[max_vertex]
yield max_vertex
progress = True
assert is_acyclic(v_depths, edges)
def show_graph(vertices, edges):
from rpython.translator.tool.graphpage import GraphPage, DotGen
class MathGraphPage(GraphPage):
def compute(self):
dotgen = DotGen('mathgraph')
names = {}
for i, v in enumerate(vertices):
names[v] = 'node%d' % i
for i, v in enumerate(vertices):
dotgen.emit_node(names[v], label=str(v))
for edge in edges[v]:
dotgen.emit_edge(names[edge.source], names[edge.target])
self.source = dotgen.generate(target=None)
MathGraphPage().display()
|
|
"""
A test spanning all the capabilities of all the serializers.
This class defines sample data and a dynamically generated
test case that is capable of testing the capabilities of
the serializers. This includes all valid data values, plus
forward, backwards and self references.
"""
import datetime
import decimal
import uuid
from django.core import serializers
from django.db import connection, models
from django.test import TestCase
from .models import (
Anchor, AutoNowDateTimeData, BigIntegerData, BinaryData, BooleanData,
BooleanPKData, CharData, CharPKData, DateData, DatePKData, DateTimeData,
DateTimePKData, DecimalData, DecimalPKData, EmailData, EmailPKData,
ExplicitInheritBaseModel, FileData, FilePathData, FilePathPKData, FKData,
FKDataToField, FKDataToO2O, FKSelfData, FKToUUID, FloatData, FloatPKData,
GenericData, GenericIPAddressData, GenericIPAddressPKData,
InheritAbstractModel, InheritBaseModel, IntegerData, IntegerPKData,
Intermediate, LengthModel, M2MData, M2MIntermediateData, M2MSelfData,
ModifyingSaveData, O2OData, PositiveBigIntegerData, PositiveIntegerData,
PositiveIntegerPKData, PositiveSmallIntegerData,
PositiveSmallIntegerPKData, SlugData, SlugPKData, SmallData, SmallPKData,
Tag, TextData, TimeData, UniqueAnchor, UUIDData, UUIDDefaultData,
)
from .tests import register_tests
# A set of functions that can be used to recreate
# test data objects of various kinds.
# The save method is a raw base model save, to make
# sure that the data in the database matches the
# exact test case.
def data_create(pk, klass, data):
instance = klass(id=pk)
instance.data = data
models.Model.save_base(instance, raw=True)
return [instance]
def generic_create(pk, klass, data):
instance = klass(id=pk)
instance.data = data[0]
models.Model.save_base(instance, raw=True)
for tag in data[1:]:
instance.tags.create(data=tag)
return [instance]
def fk_create(pk, klass, data):
instance = klass(id=pk)
setattr(instance, 'data_id', data)
models.Model.save_base(instance, raw=True)
return [instance]
def m2m_create(pk, klass, data):
instance = klass(id=pk)
models.Model.save_base(instance, raw=True)
instance.data.set(data)
return [instance]
def im2m_create(pk, klass, data):
instance = klass(id=pk)
models.Model.save_base(instance, raw=True)
return [instance]
def im_create(pk, klass, data):
instance = klass(id=pk)
instance.right_id = data['right']
instance.left_id = data['left']
if 'extra' in data:
instance.extra = data['extra']
models.Model.save_base(instance, raw=True)
return [instance]
def o2o_create(pk, klass, data):
instance = klass()
instance.data_id = data
models.Model.save_base(instance, raw=True)
return [instance]
def pk_create(pk, klass, data):
instance = klass()
instance.data = data
models.Model.save_base(instance, raw=True)
return [instance]
def inherited_create(pk, klass, data):
instance = klass(id=pk, **data)
# This isn't a raw save because:
# 1) we're testing inheritance, not field behavior, so none
# of the field values need to be protected.
# 2) saving the child class and having the parent created
# automatically is easier than manually creating both.
models.Model.save(instance)
created = [instance]
for klass in instance._meta.parents:
created.append(klass.objects.get(id=pk))
return created
# A set of functions that can be used to compare
# test data objects of various kinds
def data_compare(testcase, pk, klass, data):
instance = klass.objects.get(id=pk)
if klass == BinaryData and data is not None:
testcase.assertEqual(
bytes(data), bytes(instance.data),
"Objects with PK=%d not equal; expected '%s' (%s), got '%s' (%s)" % (
pk, repr(bytes(data)), type(data), repr(bytes(instance.data)),
type(instance.data),
)
)
else:
testcase.assertEqual(
data, instance.data,
"Objects with PK=%d not equal; expected '%s' (%s), got '%s' (%s)" % (
pk, data, type(data), instance, type(instance.data),
)
)
def generic_compare(testcase, pk, klass, data):
instance = klass.objects.get(id=pk)
testcase.assertEqual(data[0], instance.data)
testcase.assertEqual(data[1:], [t.data for t in instance.tags.order_by('id')])
def fk_compare(testcase, pk, klass, data):
instance = klass.objects.get(id=pk)
testcase.assertEqual(data, instance.data_id)
def m2m_compare(testcase, pk, klass, data):
instance = klass.objects.get(id=pk)
testcase.assertEqual(data, [obj.id for obj in instance.data.order_by('id')])
def im2m_compare(testcase, pk, klass, data):
klass.objects.get(id=pk)
# actually nothing else to check, the instance just should exist
def im_compare(testcase, pk, klass, data):
instance = klass.objects.get(id=pk)
testcase.assertEqual(data['left'], instance.left_id)
testcase.assertEqual(data['right'], instance.right_id)
if 'extra' in data:
testcase.assertEqual(data['extra'], instance.extra)
else:
testcase.assertEqual("doesn't matter", instance.extra)
def o2o_compare(testcase, pk, klass, data):
instance = klass.objects.get(data=data)
testcase.assertEqual(data, instance.data_id)
def pk_compare(testcase, pk, klass, data):
instance = klass.objects.get(data=data)
testcase.assertEqual(data, instance.data)
def inherited_compare(testcase, pk, klass, data):
instance = klass.objects.get(id=pk)
for key, value in data.items():
testcase.assertEqual(value, getattr(instance, key))
# Define some data types. Each data type is
# actually a pair of functions; one to create
# and one to compare objects of that type
data_obj = (data_create, data_compare)
generic_obj = (generic_create, generic_compare)
fk_obj = (fk_create, fk_compare)
m2m_obj = (m2m_create, m2m_compare)
im2m_obj = (im2m_create, im2m_compare)
im_obj = (im_create, im_compare)
o2o_obj = (o2o_create, o2o_compare)
pk_obj = (pk_create, pk_compare)
inherited_obj = (inherited_create, inherited_compare)
uuid_obj = uuid.uuid4()
test_data = [
# Format: (data type, PK value, Model Class, data)
(data_obj, 1, BinaryData, memoryview(b"\x05\xFD\x00")),
(data_obj, 2, BinaryData, None),
(data_obj, 5, BooleanData, True),
(data_obj, 6, BooleanData, False),
(data_obj, 7, BooleanData, None),
(data_obj, 10, CharData, "Test Char Data"),
(data_obj, 11, CharData, ""),
(data_obj, 12, CharData, "None"),
(data_obj, 13, CharData, "null"),
(data_obj, 14, CharData, "NULL"),
(data_obj, 15, CharData, None),
# (We use something that will fit into a latin1 database encoding here,
# because that is still the default used on many system setups.)
(data_obj, 16, CharData, '\xa5'),
(data_obj, 20, DateData, datetime.date(2006, 6, 16)),
(data_obj, 21, DateData, None),
(data_obj, 30, DateTimeData, datetime.datetime(2006, 6, 16, 10, 42, 37)),
(data_obj, 31, DateTimeData, None),
(data_obj, 40, EmailData, "hovercraft@example.com"),
(data_obj, 41, EmailData, None),
(data_obj, 42, EmailData, ""),
(data_obj, 50, FileData, 'file:///foo/bar/whiz.txt'),
# (data_obj, 51, FileData, None),
(data_obj, 52, FileData, ""),
(data_obj, 60, FilePathData, "/foo/bar/whiz.txt"),
(data_obj, 61, FilePathData, None),
(data_obj, 62, FilePathData, ""),
(data_obj, 70, DecimalData, decimal.Decimal('12.345')),
(data_obj, 71, DecimalData, decimal.Decimal('-12.345')),
(data_obj, 72, DecimalData, decimal.Decimal('0.0')),
(data_obj, 73, DecimalData, None),
(data_obj, 74, FloatData, 12.345),
(data_obj, 75, FloatData, -12.345),
(data_obj, 76, FloatData, 0.0),
(data_obj, 77, FloatData, None),
(data_obj, 80, IntegerData, 123456789),
(data_obj, 81, IntegerData, -123456789),
(data_obj, 82, IntegerData, 0),
(data_obj, 83, IntegerData, None),
# (XX, ImageData
(data_obj, 95, GenericIPAddressData, "fe80:1424:2223:6cff:fe8a:2e8a:2151:abcd"),
(data_obj, 96, GenericIPAddressData, None),
(data_obj, 110, PositiveBigIntegerData, 9223372036854775807),
(data_obj, 111, PositiveBigIntegerData, None),
(data_obj, 120, PositiveIntegerData, 123456789),
(data_obj, 121, PositiveIntegerData, None),
(data_obj, 130, PositiveSmallIntegerData, 12),
(data_obj, 131, PositiveSmallIntegerData, None),
(data_obj, 140, SlugData, "this-is-a-slug"),
(data_obj, 141, SlugData, None),
(data_obj, 142, SlugData, ""),
(data_obj, 150, SmallData, 12),
(data_obj, 151, SmallData, -12),
(data_obj, 152, SmallData, 0),
(data_obj, 153, SmallData, None),
(data_obj, 160, TextData, """This is a long piece of text.
It contains line breaks.
Several of them.
The end."""),
(data_obj, 161, TextData, ""),
(data_obj, 162, TextData, None),
(data_obj, 170, TimeData, datetime.time(10, 42, 37)),
(data_obj, 171, TimeData, None),
(generic_obj, 200, GenericData, ['Generic Object 1', 'tag1', 'tag2']),
(generic_obj, 201, GenericData, ['Generic Object 2', 'tag2', 'tag3']),
(data_obj, 300, Anchor, "Anchor 1"),
(data_obj, 301, Anchor, "Anchor 2"),
(data_obj, 302, UniqueAnchor, "UAnchor 1"),
(fk_obj, 400, FKData, 300), # Post reference
(fk_obj, 401, FKData, 500), # Pre reference
(fk_obj, 402, FKData, None), # Empty reference
(m2m_obj, 410, M2MData, []), # Empty set
(m2m_obj, 411, M2MData, [300, 301]), # Post reference
(m2m_obj, 412, M2MData, [500, 501]), # Pre reference
(m2m_obj, 413, M2MData, [300, 301, 500, 501]), # Pre and Post reference
(o2o_obj, None, O2OData, 300), # Post reference
(o2o_obj, None, O2OData, 500), # Pre reference
(fk_obj, 430, FKSelfData, 431), # Pre reference
(fk_obj, 431, FKSelfData, 430), # Post reference
(fk_obj, 432, FKSelfData, None), # Empty reference
(m2m_obj, 440, M2MSelfData, []),
(m2m_obj, 441, M2MSelfData, []),
(m2m_obj, 442, M2MSelfData, [440, 441]),
(m2m_obj, 443, M2MSelfData, [445, 446]),
(m2m_obj, 444, M2MSelfData, [440, 441, 445, 446]),
(m2m_obj, 445, M2MSelfData, []),
(m2m_obj, 446, M2MSelfData, []),
(fk_obj, 450, FKDataToField, "UAnchor 1"),
(fk_obj, 451, FKDataToField, "UAnchor 2"),
(fk_obj, 452, FKDataToField, None),
(fk_obj, 460, FKDataToO2O, 300),
(im2m_obj, 470, M2MIntermediateData, None),
# testing post- and pre-references and extra fields
(im_obj, 480, Intermediate, {'right': 300, 'left': 470}),
(im_obj, 481, Intermediate, {'right': 300, 'left': 490}),
(im_obj, 482, Intermediate, {'right': 500, 'left': 470}),
(im_obj, 483, Intermediate, {'right': 500, 'left': 490}),
(im_obj, 484, Intermediate, {'right': 300, 'left': 470, 'extra': "extra"}),
(im_obj, 485, Intermediate, {'right': 300, 'left': 490, 'extra': "extra"}),
(im_obj, 486, Intermediate, {'right': 500, 'left': 470, 'extra': "extra"}),
(im_obj, 487, Intermediate, {'right': 500, 'left': 490, 'extra': "extra"}),
(im2m_obj, 490, M2MIntermediateData, []),
(data_obj, 500, Anchor, "Anchor 3"),
(data_obj, 501, Anchor, "Anchor 4"),
(data_obj, 502, UniqueAnchor, "UAnchor 2"),
(pk_obj, 601, BooleanPKData, True),
(pk_obj, 602, BooleanPKData, False),
(pk_obj, 610, CharPKData, "Test Char PKData"),
(pk_obj, 620, DatePKData, datetime.date(2006, 6, 16)),
(pk_obj, 630, DateTimePKData, datetime.datetime(2006, 6, 16, 10, 42, 37)),
(pk_obj, 640, EmailPKData, "hovercraft@example.com"),
# (pk_obj, 650, FilePKData, 'file:///foo/bar/whiz.txt'),
(pk_obj, 660, FilePathPKData, "/foo/bar/whiz.txt"),
(pk_obj, 670, DecimalPKData, decimal.Decimal('12.345')),
(pk_obj, 671, DecimalPKData, decimal.Decimal('-12.345')),
(pk_obj, 672, DecimalPKData, decimal.Decimal('0.0')),
(pk_obj, 673, FloatPKData, 12.345),
(pk_obj, 674, FloatPKData, -12.345),
(pk_obj, 675, FloatPKData, 0.0),
(pk_obj, 680, IntegerPKData, 123456789),
(pk_obj, 681, IntegerPKData, -123456789),
(pk_obj, 682, IntegerPKData, 0),
# (XX, ImagePKData
(pk_obj, 695, GenericIPAddressPKData, "fe80:1424:2223:6cff:fe8a:2e8a:2151:abcd"),
(pk_obj, 720, PositiveIntegerPKData, 123456789),
(pk_obj, 730, PositiveSmallIntegerPKData, 12),
(pk_obj, 740, SlugPKData, "this-is-a-slug"),
(pk_obj, 750, SmallPKData, 12),
(pk_obj, 751, SmallPKData, -12),
(pk_obj, 752, SmallPKData, 0),
# (pk_obj, 760, TextPKData, """This is a long piece of text.
# It contains line breaks.
# Several of them.
# The end."""),
# (pk_obj, 770, TimePKData, datetime.time(10, 42, 37)),
# (pk_obj, 790, XMLPKData, "<foo></foo>"),
(pk_obj, 791, UUIDData, uuid_obj),
(fk_obj, 792, FKToUUID, uuid_obj),
(pk_obj, 793, UUIDDefaultData, uuid_obj),
(data_obj, 800, AutoNowDateTimeData, datetime.datetime(2006, 6, 16, 10, 42, 37)),
(data_obj, 810, ModifyingSaveData, 42),
(inherited_obj, 900, InheritAbstractModel, {'child_data': 37, 'parent_data': 42}),
(inherited_obj, 910, ExplicitInheritBaseModel, {'child_data': 37, 'parent_data': 42}),
(inherited_obj, 920, InheritBaseModel, {'child_data': 37, 'parent_data': 42}),
(data_obj, 1000, BigIntegerData, 9223372036854775807),
(data_obj, 1001, BigIntegerData, -9223372036854775808),
(data_obj, 1002, BigIntegerData, 0),
(data_obj, 1003, BigIntegerData, None),
(data_obj, 1004, LengthModel, 0),
(data_obj, 1005, LengthModel, 1),
]
# Because Oracle treats the empty string as NULL, Oracle is expected to fail
# when field.empty_strings_allowed is True and the value is None; skip these
# tests.
if connection.features.interprets_empty_strings_as_nulls:
test_data = [data for data in test_data
if not (data[0] == data_obj and
data[2]._meta.get_field('data').empty_strings_allowed and
data[3] is None)]
class SerializerDataTests(TestCase):
pass
def serializerTest(self, format):
# FK to an object with PK of 0. This won't work on MySQL without the
# NO_AUTO_VALUE_ON_ZERO SQL mode since it won't let you create an object
# with an autoincrement primary key of 0.
if connection.features.allows_auto_pk_0:
test_data.extend([
(data_obj, 0, Anchor, 'Anchor 0'),
(fk_obj, 465, FKData, 0),
])
# Create all the objects defined in the test data
objects = []
instance_count = {}
for (func, pk, klass, datum) in test_data:
with connection.constraint_checks_disabled():
objects.extend(func[0](pk, klass, datum))
# Get a count of the number of objects created for each class
for klass in instance_count:
instance_count[klass] = klass.objects.count()
# Add the generic tagged objects to the object list
objects.extend(Tag.objects.all())
# Serialize the test database
serialized_data = serializers.serialize(format, objects, indent=2)
for obj in serializers.deserialize(format, serialized_data):
obj.save()
# Assert that the deserialized data is the same
# as the original source
for (func, pk, klass, datum) in test_data:
func[1](self, pk, klass, datum)
# Assert that the number of objects deserialized is the
# same as the number that was serialized.
for klass, count in instance_count.items():
self.assertEqual(count, klass.objects.count())
register_tests(SerializerDataTests, 'test_%s_serializer', serializerTest)
|
|
import warnings
from lexer import lang
from .types import DataType
from .types import Integer
from .types import Decimal
from .types import String
from .types import Bool
from .declarations import Declaration
from .declarations import VarDeclarator
from .declarations import Identifier
from .declarations import FunctionDef
from .declarations import Parameter
from .statements import IfStat
from .statements import ReturnStat
from .statements import WhileStat
from .statements import BreakStat
from .statements import SwitchStat
from .statements import CaseStat
from .statements import ForStat
from .statements import ContinueStat
from .expressions import UnaryExp
from .expressions import BinaryExp
from .expressions import FunctionCall
from . import builtin
BUILTIN_FUNCS_MAP = {
'imprime': builtin.PrintFn,
'imprimenl': builtin.PrintlnFn,
'lee': builtin.ReadFn,
}
def basic_expression(child_exp):
def decorator(func):
def wrapper(instance):
try:
exp_method = getattr(instance, child_exp)
except AttributeError:
raise NotImplementedError(f'Class `{instance.__class__.__name__}` does not implement "{child_exp}"')
exp = exp_method()
while instance.current_token.contains(func(instance)):
symbol_token = instance.current_token
instance.match(func(instance))
exp = BinaryExp(symbol_token.value, exp, exp_method(), symbol_token)
return exp
return wrapper
return decorator
class SyntaxError(Warning):
pass
class Syntax(object):
"""docstring for Syntax."""
def __init__(self, lexer):
super().__init__()
self.lexer = lexer
def parse(self):
self.current_token = self.lexer.next_token()
tree = self.external_declaration()
self.match_type(lang.EOF)
return tree
def _raise_expected(self, expected):
warnings.warn(f'Expected "{expected}". Found "{self.current_token.value}". Line: {self.current_token.line_index} - Col: {self.current_token.col_index}', SyntaxError)
def _match_assing(self, value, expected):
if value in expected:
self.current_token = self.lexer.next_token()
return True
self._raise_expected(expected)
def match_type(self, value):
return self._match_assing(self.current_token.type, value)
def match_value(self, value):
return self._match_assing(self.current_token.value, value)
def match(self, value):
if self.current_token.contains(value):
self.current_token = self.lexer.next_token()
return True
self._raise_expected(expected)
def external_declaration(self):
node = None
token = self.current_token;
if token.value == 'programa':
datatype = DataType('void', token)
identifier = Identifier('main', token)
self.match_value('programa')
node = self.compound_stat(ismain=True)
return FunctionDef(datatype, identifier, None, node, token)
if token.value == 'constante':
self.match_type(lang.KEYWORD)
declaration_type = self.type_def()
node = self.constant_declaration(declaration_type)
elif token.value == 'procedimiento':
self.match_type(lang.KEYWORD)
datatype = DataType('void', token)
node = self.declaration(datatype, isfunc=True)
elif token.value in lang.RESERVED_TYPES:
datatype = self.type_def()
isfunc = False
if self.current_token.value == 'funcion':
isfunc = True
self.match_value('funcion')
node = self.declaration(datatype, isfunc=isfunc)
else:
self._raise_expected('|'.join(lang.RESERVED_TYPES + ['constante']))
self.current_token = self.lexer.next_token()
return self.external_declaration()
if node:
node.next = self.external_declaration();
return node
def type_def(self):
node = DataType(self.current_token.value, self.current_token)
self.match_value(lang.RESERVED_TYPES)
return node
def constant_declaration(self, declaration_type):
cond = {
'isconstant': True,
'dimensions': 0,
'dimensionsSizes': []
}
stat_token = self.current_token
identifier = Identifier(self.current_token.value, stat_token)
self.match_type(lang.IDENTIFIER)
assign_token = self.current_token
self.match_type(lang.ASSING_OP)
init = self.unary_exp(isconstant=True)
statement = VarDeclarator(identifier, init, assign_token, cond)
self.match_value(';')
return Declaration(declaration_type, statement, stat_token)
def declaration(self, declaration_type, **cond):
if not cond.get('isfunc', False):
declaration_token = self.current_token
declaration = self.declarator_list()
self.match_value(';')
return Declaration(declaration_type, declaration, declaration_token)
return self.function_def(declaration_type)
def declarator_list(self):
node = self.declarator()
if self.current_token.value == ',':
self.match_value(',')
node.next = self.declarator_list()
return node
def declarator(self):
id_token = self.current_token
identifier = Identifier(id_token.value, id_token)
self.match_type(lang.IDENTIFIER)
array_def = self.array_def()
init = None
if self.current_token.type == lang.ASSING_OP:
if array_def['dimensions']:
self._raise_expected(';')
self.match_type(lang.ASSING_OP)
init = self.logical_or_exp()
return VarDeclarator(identifier, init, id_token, array_def)
def function_def(self, type_def):
func_token = self.current_token
identifier = Identifier(func_token.value, func_token)
self.match_type(lang.IDENTIFIER)
self.match_value('(')
args = self.parameter_list()
self.match_value(')')
variables = None
declaration = None
if self.current_token.value != 'inicio':
var_type = self.type_def()
declaration = variables = self.declaration(var_type)
while self.current_token.value != 'inicio':
if declaration:
var_type = self.type_def()
declaration = declaration.next = self.declaration(var_type)
if self.current_token.value == 'inicio':
statement = self.compound_stat()
if declaration:
declaration.next = statement
statement = variables
return FunctionDef(type_def, identifier, args, statement, func_token)
self._raise_expected('function|procedure body')
return None
def array_def(self):
dimensions_sizes = []
while self.current_token.value == '[':
self.match_value('[')
if self.current_token.type in [lang.INTEGER, lang.IDENTIFIER]:
dimensions_sizes.append(self.primary_exp())
else:
self._raise_expected('enetro|constante')
self.match_value(']')
return dict(
dimensions_sizes=dimensions_sizes,
dimensions=len(dimensions_sizes)
)
def array_exp(self):
expressions = []
while self.current_token.value == '[':
self.match_value('[')
expressions.append(self.logical_or_exp())
self.match_value(']')
return dict(expressions=expressions, dimensions=len(expressions))
def parameter_list(self):
if self.current_token.value in lang.RESERVED_TYPES:
arg_token = self.current_token
type_def = self.type_def()
identifier = Identifier(self.current_token.value, self.current_token)
self.match_type(lang.IDENTIFIER)
aux = node = Parameter(type_def, identifier, arg_token)
while self.current_token.value == ',':
self.match_value(',')
arg_token = self.current_token
type_def = self.type_def()
identifier = Identifier(self.current_token.value, self.current_token)
self.match_type(lang.IDENTIFIER)
aux = node.next = Parameter(type_def, identifier, arg_token)
return node
return None
def compound_stat(self, **cond):
self.match_value('inicio')
node = self.stat_list()
self.match_value('fin')
if cond.get('ismain', False):
self.match_value('programa')
self.match_value('.')
else:
self.match_value(';')
return node
def stat_list(self):
keywords_subset = [
'si',
'iterar',
'para',
'regresa',
'haz',
'continua',
'interrumpe',
'caso',
'otro',
]
node = None
token = self.current_token
if (token.type == lang.IDENTIFIER or
token.value in keywords_subset or
token.value in lang.RESERVED_FUNCS):
node = self.statement()
if node:
node.next = self.stat_list()
return node
def statement(self):
STATS_MAP = {
'si': self.if_statement,
'iterar': self.while_statement,
'para': self.for_statement,
'continua': self.continue_statement,
'haz': self.swich_statment,
'caso': self.case_statement,
'otro': self.defaultcase_statement,
'interrumpe': self.break_statement,
'regresa': self.return_statement,
'inicio': self.start_statement,
}
try:
return STATS_MAP[self.current_token.value]()
except KeyError:
node = self.assignment_expression()
self.match_value(';')
return node
def if_statement(self):
if_token = self.current_token
self.match_value('si')
self.match_value('(')
exp = self.logical_or_exp()
self.match_value(')')
stat = self.statement()
else_stat = self.else_statement()
return IfStat(exp, stat, else_stat, if_token)
def while_statement(self):
while_token = self.current_token
self.match_value('iterar')
self.match_value('mientras')
self.match_value('(')
exp = self.logical_or_exp()
self.match_value(')')
stat = self.statement()
return WhileStat(exp, stat, while_token)
def for_statement(self):
for_token = self.current_token
self.match_value('para')
identifier_token = self.current_token
self.match_type(lang.IDENTIFIER)
self.match_value('en')
self.match_value('rango')
identifier = Identifier(identifier_token.value, identifier_token)
initial_value = self.logical_or_exp()
initializer = BinaryExp(':=', identifier, initial_value, self.current_token)
self.match_value('a')
stop_condition = self.logical_or_exp()
op = '>=' if self.current_token.value == 'decr' else '<='
condition = BinaryExp(op, identifier, stop_condition, self.current_token)
if self.current_token.value in ['incr', 'decr']:
step_token = self.current_token
self.match_value(['incr', 'decr'])
step_op = '-' if step_token.value == 'decr' else '+'
step_exp = BinaryExp(step_op, identifier, self.logical_or_exp(), step_token)
step = BinaryExp(':=', identifier, step_exp, step_token)
else:
step_def = Integer(1, self.current_token)
step_inc = BinaryExp('+', identifier, step_def, self.current_token)
step = BinaryExp(':=', identifier, step_inc, self.current_token)
stats = self.statement()
return ForStat(initializer, condition, step, stats, for_token)
def continue_statement(self):
continue_token = self.current_token
self.match_value('continua')
self.match_value(';')
return ContinueStat(continue_token)
def swich_statment(self):
switch_token = self.current_token
self.match_value('haz')
self.match_value('opcion')
self.match_value('(')
exp = self.logical_or_exp()
self.match_value(')')
stats = self.statement()
return SwitchStat(exp, stats, switch_token)
def case_statement(self):
case_token = self.current_token
self.match_value('caso')
if self.current_token.type in lang.DATA_TYPES:
exp = self.primary_exp()
self.match_value(':')
stats = self.statement()
return CaseStat(exp, stats, case_token)
self._raise_expected('|'.join(lang.DATA_TYPES))
return None
def defaultcase_statement(self):
default_token = self.current_token
self.match_value('otro')
self.match_value('caso')
self.match_value(':')
stats = self.statement()
return CaseStat(None, stats, default_token)
def break_statement(self):
break_token = self.current_token
self.match_value('interrumpe')
self.match_value(';')
return BreakStat(break_token)
def return_statement(self):
return_token = self.current_token
self.match_value('regresa')
exp = None
if self.current_token.value != ';':
exp = self.logical_or_exp()
self.match_value(';')
return ReturnStat(exp, return_token)
def start_statement(self):
return self.compound_stat()
def else_statement(self):
stat = None
if self.current_token.value == 'sino':
self.match_value('sino')
stat = self.statement()
return stat
def primary_exp(self, **cond):
isconstant = cond.get('isconstant', False)
if self.current_token.type == lang.IDENTIFIER and not isconstant:
identifier = Identifier(self.current_token.value, self.current_token)
self.match_type(lang.IDENTIFIER)
return self.function_call(identifier)
elif self.current_token.value in lang.RESERVED_FUNCS and not isconstant:
return self.reserved_function_call()
elif self.current_token.type == lang.INTEGER:
exp = Integer(self.current_token.value, self.current_token)
self.match_type(lang.INTEGER)
elif self.current_token.type == lang.DECIMAL:
exp = Decimal(self.current_token.value, self.current_token)
self.match_type(lang.DECIMAL)
elif self.current_token.type == lang.STRING:
exp = String(self.current_token.value, self.current_token)
self.match_type(lang.STRING)
elif self.current_token.type == lang.LOGIC_CONST:
exp = Bool(self.current_token.value, self.current_token)
self.match_type(lang.LOGIC_CONST)
elif self.current_token.value == '(' and not isconstant:
self.match_value('(')
exp = self.logical_or_exp()
self.match_value(')')
else:
exp = None
self._raise_expected('<Expression>')
return exp
def assignment_expression(self):
identifier = None
token_ahead = self.lexer.lookahead()
if self.current_token.type == lang.IDENTIFIER:
identifier = Identifier(self.current_token.value, self.current_token)
if token_ahead.type == lang.ASSING_OP:
self.match_type(lang.IDENTIFIER)
elif token_ahead.value == '[':
self.match_type(lang.IDENTIFIER)
identifier.array_values = self.array_exp()
if self.current_token.type == lang.ASSING_OP:
symbol_token = self.current_token
self.match_type(lang.ASSING_OP)
exp = self.logical_or_exp()
return BinaryExp(symbol_token.value, identifier, exp, symbol_token)
return self.logical_or_exp()
def logical_or_exp(self):
exp = self.logical_and_exp()
while self.current_token.value == 'o':
symbol_token = self.current_token
self.match_value('o')
exp = BinaryExp(symbol_token.value, exp, self.logical_and_exp(), symbol_token)
return exp
def logical_and_exp(self):
exp = self.equality_exp()
while self.current_token.value == 'y':
symbol_token = self.current_token
self.match_value('y')
exp = BinaryExp(symbol_token.value, exp, self.equality_exp(), symbol_token)
return exp
def equality_exp(self):
exp = self.relational_exp()
while self.current_token.value == '=':
symbol_token = self.current_token
self.match_value('=')
exp = BinaryExp(symbol_token.value, exp, self.relational_exp(), symbol_token)
return exp
@basic_expression('additive_exp')
def relational_exp(self):
return lang.RELATIONAL_OP
# exp = self.additive_exp()
#
# while self.current_token.type == lang.RELATIONAL_OP:
# symbol_token = self.current_token
# self.match_type(lang.RELATIONAL_OP)
#
# exp = BinaryExp(symbol_token.value, exp, self.additive_exp(), symbol_token)
# return exp
@basic_expression('multiplicative_exp')
def additive_exp(self):
return ['+', '-']
# exp = self.multiplicative_exp()
#
# while self.current_token.value in ['+', '-']:
# symbol_token = self.current_token
# self.match_value(self.current_token.value)
#
# exp = BinaryExp(symbol_token.value, exp, self.multiplicative_exp(), symbol_token)
# return exp
@basic_expression('pow_exp')
def multiplicative_exp(self):
return ['*', '/', '%']
# exp = self.pow_exp()
#
# while self.current_token.value in ['*', '/', '%']:
# symbol_token = self.current_token
# self.match_value(self.current_token.value)
#
# exp = BinaryExp(symbol_token.value, exp, self.pow_exp(), symbol_token)
# return exp
@basic_expression('unary_exp')
def pow_exp(self):
return '^'
# exp = self.unary_exp()
#
# while self.current_token.value == '^':
# symbol_token = self.current_token
# self.match_value(self.current_token.value)
#
# exp = BinaryExp(symbol_token.value, exp, self.unary_exp(), symbol_token)
# return exp
def unary_exp(self, **cond):
if self.current_token.value == '+' or self.current_token == '-':
token_symbol = self.current_token
self.match_value(token_symbol.value)
return UnaryExp(token_symbol.value, self.unary_exp(**cond), token_symbol)
return self.primary_exp(**cond)
def function_call(self, identifier):
if self.current_token.value == '(':
func_token = self.current_token
self.match_value('(')
args = self.arg_list()
self.match_value(')')
return FunctionCall(identifier, args, func_token)
identifier.array_values = self.array_exp()
return identifier
def reserved_function_call(self):
id_token = self.current_token
self.match_type(lang.KEYWORD)
self.match_value('(')
args = self.arg_list()
self.match_value(')')
try:
return BUILTIN_FUNCS_MAP[id_token.value](args, id_token)
except KeyError:
return None
def arg_list(self):
op_subset = [
'+',
'-',
'(',
]
types_subset = [
lang.INTEGER,
lang.DECIMAL,
lang.LOGIC_CONST,
lang.STRING,
lang.IDENTIFIER,
]
args = None
token = self.current_token
if token.value in op_subset or token.type in types_subset:
args = []
param = self.logical_or_exp()
if param:
args.append(param)
while self.current_token.value == ',':
self.match_value(',')
param = self.logical_or_exp()
if param:
args.append(param)
return args
|
|
#!/usr/bin/python
# =============================================================================================
# MODULE DOCSTRING
# =============================================================================================
"""
Test restraints module.
"""
# =============================================================================================
# GLOBAL IMPORTS
# =============================================================================================
import tempfile
import os
import shutil
import math
import numpy as np
import netCDF4 as netcdf
from nose.plugins.attrib import attr
import yank.restraints
from yank.repex import ThermodynamicState
from yank.yamlbuild import YamlBuilder
from yank.utils import get_data_filename
from yank import analyze
from simtk import unit
from openmmtools import testsystems
# =============================================================================================
# UNIT TESTS
# =============================================================================================
from openmmtools.testsystems import HostGuestVacuum
class HostGuestNoninteracting(HostGuestVacuum):
"""CB7:B2 host-guest system in vacuum with no nonbonded interactions.
Parameters
----------
Same as HostGuestVacuum
Examples
--------
Create host:guest system with no nonbonded interactions.
>>> testsystem = HostGuestVacuumNoninteracting()
>>> (system, positions) = testsystem.system, testsystem.positions
Properties
----------
receptor_atoms : list of int
Indices of receptor atoms
ligand_atoms : list of int
Indices of ligand atoms
"""
def __init__(self, **kwargs):
super(HostGuestNoninteracting, self).__init__(**kwargs)
# Store receptor and ligand atom indices
self.receptor_atoms = range(0,126)
self.ligand_atoms = range(126,156)
# Remove nonbonded interactions
force_indices = { self.system.getForce(index).__class__.__name__ : index for index in range(self.system.getNumForces()) }
self.system.removeForce(force_indices['NonbondedForce'])
expected_restraints = {
'Harmonic' : yank.restraints.Harmonic,
'FlatBottom' : yank.restraints.FlatBottom,
'Boresch' : yank.restraints.Boresch,
}
restraint_test_yaml = """
---
options:
minimize: no
verbose: yes
output_dir: %(output_directory)s
number_of_iterations: %(number_of_iter)s
nsteps_per_iteration: 100
temperature: 300*kelvin
pressure: null
anisotropic_dispersion_correction: no
platform: OpenCL
solvents:
vacuum:
nonbonded_method: PME
nonbonded_cutoff: 0.59 * nanometer
systems:
ship:
phase1_path: [data/benzene-toluene-standard-state/standard_state_complex.inpcrd, data/benzene-toluene-standard-state/standard_state_complex.prmtop]
phase2_path: [data/benzene-toluene-standard-state/standard_state_complex.inpcrd, data/benzene-toluene-standard-state/standard_state_complex.prmtop]
ligand_dsl: resname ene
solvent: vacuum
protocols:
absolute-binding:
complex:
alchemical_path:
lambda_restraints: [0.0, 0.25, 0.5, 0.75, 1.0]
lambda_electrostatics: [0.0, 0.00, 0.0, 0.00, 0.0]
lambda_sterics: [0.0, 0.00, 0.0, 0.00, 0.0]
solvent:
alchemical_path:
lambda_electrostatics: [1.0, 1.0]
lambda_sterics: [1.0, 1.0]
experiments:
system: ship
protocol: absolute-binding
restraint:
type: %(restraint_type)s
"""
def general_restraint_run(options):
"""
Generalized restraint simulation run to test free energy = standard state correction.
options : Dict. A dictionary of substitutions for restraint_test_yaml
"""
output_directory = tempfile.mkdtemp()
options['output_directory'] = output_directory
# run both setup and experiment
yaml_builder = YamlBuilder(restraint_test_yaml % options)
yaml_builder.build_experiments()
# Estimate Free Energies
ncfile_path = os.path.join(output_directory, 'experiments', 'complex.nc')
ncfile = netcdf.Dataset(ncfile_path, 'r')
(Deltaf_ij, dDeltaf_ij) = analyze.estimate_free_energies(ncfile)
# Correct the sign for the fact that we are adding vs removing the restraints
DeltaF_simulated = Deltaf_ij[-1, 0]
dDeltaF_simulated = dDeltaf_ij[-1, 0]
DeltaF_restraints = ncfile.groups['metadata'].variables['standard_state_correction'][0]
ncfile.close()
# Clean up
shutil.rmtree(output_directory)
# Check if they are close
assert np.allclose(DeltaF_restraints, DeltaF_simulated, rtol=dDeltaF_simulated)
@attr('slow') # Skip on Travis-CI
def test_harmonic_free_energy():
"""
Test that the harmonic restraint simulated free energy equals the standard state correction
"""
options = {'number_of_iter': '500',
'restraint_type': 'Harmonic'}
general_restraint_run(options)
@attr('slow') # Skip on Travis-CI
def test_flat_bottom_free_energy():
"""
Test that the harmonic restraint simulated free energy equals the standard state correction
"""
options = {'number_of_iter': '500',
'restraint_type': 'FlatBottom'}
general_restraint_run(options)
@attr('slow') # Skip on Travis-CI
def test_Boresch_free_energy():
"""
Test that the harmonic restraint simulated free energy equals the standard state correction
"""
# These need more samples to converge
options = {'number_of_iter': '1000',
'restraint_type': 'Boresch'}
general_restraint_run(options)
def test_harmonic_standard_state():
"""
Test that the expected harmonic standard state correction is close to our approximation
Also ensures that PBC bonds are being computed and disabled correctly as expected
"""
LJ_fluid = testsystems.LennardJonesFluid()
receptor_atoms = [0, 1, 2]
ligand_atoms = [3, 4, 5]
thermodynamic_state = ThermodynamicState(temperature=300.0 * unit.kelvin)
restraint = yank.restraints.create_restraints('Harmonic', LJ_fluid.topology, thermodynamic_state, LJ_fluid.system,
LJ_fluid.positions, receptor_atoms, ligand_atoms)
spring_constant = restraint._determine_bond_parameters()[0]
# Compute standard-state volume for a single molecule in a box of size (1 L) / (avogadros number)
liter = 1000.0 * unit.centimeters ** 3 # one liter
box_volume = liter / (unit.AVOGADRO_CONSTANT_NA * unit.mole) # standard state volume
analytical_shell_volume = (2 * math.pi / (spring_constant * restraint.beta))**(3.0/2)
analytical_standard_state_G = - math.log(box_volume / analytical_shell_volume)
restraint_standard_state_G = restraint.get_standard_state_correction()
np.testing.assert_allclose(analytical_standard_state_G, restraint_standard_state_G)
def test_available_restraint_classes():
"""Test to make sure expected restraint classes are available.
"""
available_restraint_classes = yank.restraints.available_restraint_classes()
available_restraint_types = yank.restraints.available_restraint_types()
# We shouldn't have `None` (from the base class) as an available type
assert(None not in available_restraint_classes)
assert(None not in available_restraint_types)
for (restraint_type, restraint_class) in expected_restraints.items():
msg = "Failed comparing restraint type '%s' with %s" % (restraint_type, str(available_restraint_classes))
assert(restraint_type in available_restraint_classes), msg
assert(available_restraint_classes[restraint_type] is restraint_class), msg
assert(restraint_type in available_restraint_types), msg
def test_restraint_dispatch():
"""Test dispatch of various restraint types.
"""
for (restraint_type, restraint_class) in expected_restraints.items():
# Create a test system
t = HostGuestNoninteracting()
# Create a thermodynamic state encoding temperature
thermodynamic_state = ThermodynamicState(temperature=300.0*unit.kelvin)
# Add restraints
restraint = yank.restraints.create_restraints(restraint_type, t.topology, thermodynamic_state, t.system, t.positions, t.receptor_atoms, t.ligand_atoms)
# Check that we got the right restraint class
assert(restraint.__class__.__name__ == restraint_type)
assert(restraint.__class__ == restraint_class)
# =============================================================================================
# MAIN
# =============================================================================================
if __name__ == '__main__':
test_restraint_dispatch()
|
|
# -*- test-case-name: miru.test.test_camera -*-
# Copyright (c) 2008 Drew Smathers.
# See LICENSE for details
from pyglet import gl
from miru import imiru
from miru.utils import glvec
from miru import components
from miru import core
from miru import options
from euclid import Vector3
from UserList import UserList
import math
from zope.interface import implementer, implements
from twisted.python.components import registerAdapter
lightno = lambda i : eval('gl.GL_LIGHT%d' % i)
for LIGHT_CT_MAX in range(1000):
try:
lightno(LIGHT_CT_MAX)
except Exception, e:
#print 'Max lights count supported : ', LIGHT_CT_MAX
break
def vlightno(n):
off = n - LIGHT_CT_MAX
return lightno(LIGHT_CT_MAX - 1) + off
def _constrain_lights(ct):
assert ct <= LIGHT_CT_MAX,\
'No more than %d lights can exist in a lightgroup' % LIGHT_CT_MAX
if ct > 8:
from warnings import warn
warn("You're using more than 8 lights - this is not likely suppoted on other systems")
def _zcmp(o1, o2):
return cmp(o1.pos.z, o2.pos.z)
class Camera(core.PositionalMixin):
"""Abstraction representing a camera - the viewer's perspective.
The also acts a C{imiru.IRenderStage} for world objects which are drawn
after the initial tranformation matrix is applied.
@param wireframe: draw objects as wireframes (default: False)
@type wireframe: C{bool}
@param lights: Lighting for scene rendered by Camera
@type lights: C{imiru.ILightGroup}
@param projection: The C{imiru.IProjection} for the camera
(default: C{PerspectiveProjection}.
@param effects: List of optional effects
@type effects: C{list}
>>> from zope.interface.verify import verifyObject, verifyClass
>>> verifyClass(imiru.IPositional, Camera)
True
>>> verifyObject(imiru.IPositional, Camera(light_group=None))
True
>>> verifyClass(imiru.ICamera, Camera)
True
>>> verifyObject(imiru.ICamera, Camera(light_group=None))
True
>>> verifyClass(imiru.IWorldRenderStage, Camera)
True
>>> verifyObject(imiru.IWorldRenderStage, Camera(light_group=None))
True
"""
implements(imiru.ICamera, imiru.IWorldRenderStage)
ORBIT_MODE = 1
ROTATE_MODE = 2
rotation_mode = ORBIT_MODE
track_target = None
_render_idx = None
def __init__(self, *p, **kw):
self.wireframe = kw.get('wireframe', False)
self.objects = self.visible = []
self.lights = kw.get('lights', LightGroup())
self.effects = kw.get('effects', [])
self.window = kw.get('window', None)
self.projection = kw.get('projection', PerspectiveProjection())
self.depth_sort = kw.get('depth_sort', False)
super(Camera, self).__init__(*p, **kw)
def addobj(self, obj):
"""
@deprecated - will become add_object
"""
self.objects.append(obj)
if self.depth_sort:
self.objects.sort(_zcmp)
if self._depth_sort not in obj.pos_listeners:
obj.pos_listeners.append(self._depth_sort)
add_object = addobj
def delobj(self, obj):
"""
@deprecated - will become remove_object
"""
self.objects.remove(obj)
if self.depth_sort:
if self._depth_sort in obj.pos_listeners:
obj.pos_listeners.remove(self._depth_sort)
remove_object = delobj
def _getprojection(self):
return self._projection
def _setprojection(self, proj):
self._projection = proj
self._projection.camera = self
if self.window:
self._projection.on_resize(
self.window.width,
self.window.height)
return
try:
from miru.context import context
if getattr(context, 'window', False):
self._projection.on_resize(
context.window.width,
context.window.height)
except ImportError:
pass
projection = property(_getprojection, _setprojection)
def render(self, select_pass=0, visible=None, lights=None,
effect_pass=0, before_render=None):
if isinstance(self.projection, Viewport) and not select_pass:
self.projection.enable()
if not effect_pass or select_pass:
for effect in self.effects:
effect.enable()
angle = self.angle
if not select_pass:
gl.glLoadIdentity()
p = self.pos
p = (-p.x, -p.y, -p.z)
if self.track_target:
t = self.track_target.pos
up = (t - self.pos).normalized()
eux = up.cross(Vector3(0,1,0))
up = eux.cross(up)
gl.gluLookAt(self.pos.x, self.pos.y, self.pos.z,
t.x, t.y, t.z, up.x, up.y, up.z)
elif self.rotation_mode == Camera.ORBIT_MODE:
if not select_pass:
gl.glTranslatef(*p)
gl.glRotatef(angle.z, 0, 0, 1)
gl.glRotatef(angle.y, 0, 1, 0)
gl.glRotatef(angle.x, 1, 0, 0)
else:
if not (select_pass or effect_pass):
gl.glRotatef(angle.z, 0, 0, 1)
gl.glRotatef(angle.y, 0, 1, 0)
gl.glRotatef(angle.x, 1, 0, 0)
gl.glTranslatef(*p)
lights = (lights, self.lights)[lights is None]
if not select_pass:
if self.lights:
lights.on()
visible = visible or self.objects
if not select_pass:
if effect_pass:
beforeRender = before_render or (lambda : None)
beforeRender()
for (idx,v) in enumerate(visible):
self._render_idx = idx
v.draw()
else:
for (idx,v) in enumerate([ v for v in visible if _translateable(v) ]):
gl.glLoadName(idx)
v.draw()
if not select_pass and self.lights:
lights.off()
if not effect_pass or select_pass:
for effect in self.effects:
effect.disable()
self._render_idx = None
def _depth_sort(self, moved, pos, delta):
if moved not in self.objects:
# XXX this would be unusual?? hrmmm ... remove
# from positional listeners?
moved.pos_listeners.remove(self._check_zorder)
return
# This is uuuugly - but we want to avoid sorting potentially
# large lists if we don't have to.
# We do a custom sort in place as an optimization.
if delta.z:
idx = self.objects.index(moved)
if idx:
left = self.objects[idx-1]
while pos.z < left.pos.z:
o = self.objects.pop(idx)
idx -= 1
self.objects.insert(idx, o)
if (idx - 1) < 0:
return
left = self.objects[idx - 1]
if idx < len(self.objects) - 1:
right = self.objects[idx+1]
while pos.z > right.pos.z:
o = self.objects.pop(idx)
idx += 1
self.objects.insert(idx, o)
if (idx + 1) > len(self.objects) - 1:
return
right = self.objects[idx + 1]
@property
def nextObject(self):
"""The next object to render or None if the current is the
last object to render or camera is not currently rendering.
"""
if self._render_idx is None:
return
try:
return self.objects[self._render_idx + 1]
except IndexError:
return
def _translateable(o):
try:
th = imiru.ITranslationHandler(o)
except TypeError:
return False
return True
class BaseProjection(object):
"""
>>> from zope.interface import verify
>>> verify.verifyClass(imiru.IProjection, BaseProjection)
True
>>> verify.verifyObject(imiru.IProjection, BaseProjection())
True
"""
implements(imiru.IProjection)
camera = None
clear_color = (0.2, 0.2, 0.2, 1)
def __init__(self, **kw):
self.clear_color = kw.get('clear_color',
BaseProjection.clear_color)
def on_resize(self, width, height, x=0, y=0):
raise NotImplementedError
def _setLightsAndEffects(self):
fNoLight = (4 * gl.GLfloat)()
fNoLight[:] = [0,0,0,0]
gl.glLightModelfv(gl.GL_LIGHT_MODEL_AMBIENT, fNoLight);
gl.glClearColor(*self.clear_color)
for effect in self.camera.effects:
effect.enable()
class PerspectiveProjection(BaseProjection):
def on_resize(self, width, height, x=0, y=0):
gl.glViewport(x, y, width, height)
gl.glMatrixMode(gl.GL_PROJECTION)
gl.glLoadIdentity()
height = height or 1
gl.gluPerspective(45., width / float(height), 0.1, 1000.)
gl.glMatrixMode(gl.GL_MODELVIEW)
self._setLightsAndEffects()
class OrthographicProjection(BaseProjection):
def on_resize(self, width, height, x=0, y=0):
gl.glViewport(x, y, width, height)
gl.glMatrixMode(gl.GL_PROJECTION)
gl.glLoadIdentity()
h = height or 1
w = width / float(h)
gl.glOrtho(-w, w, -1, 1, -1, 1000.)
gl.glMatrixMode(gl.GL_MODELVIEW)
self._setLightsAndEffects()
class DebugView:
"""
>>> from zope.interface.verify import verifyClass, verifyObject
>>> verifyClass(imiru.IDebuggingRenderStage, DebugView)
True
>>> verifyObject(imiru.IDebuggingRenderStage, DebugView())
True
"""
implements(imiru.IDebuggingRenderStage)
def __init__(self, objects=None):
self.objects = (objects, [])[objects is None]
def render(self):
for o in self.objects:
o.draw()
def addobj(self, obj):
self.objects.append(obj)
def delobj(self, obj):
self.objects.remove(obj)
class BlittableView:
"""
>>> from zope.interface.verify import verifyClass, verifyObject
>>> verifyClass(imiru.IBlittableRenderStage, BlittableView)
True
>>> verifyObject(imiru.IBlittableRenderStage, BlittableView())
True
"""
implements(imiru.IBlittableRenderStage)
def __init__(self):
self.objects = []
def _cmp(self, o1, o2):
return cmp(o1.pos.z, o2.pos.z)
def _check_zorder(self, moved, pos, delta):
if moved not in self.objects:
# XXX this would be unusual?? hrmmm ... remove
# from positional listeners?
moved.pos_listeners.remove(self._check_zorder)
return
# This is uuuugly - but we want to avoid sorting potentially
# large lists if we don't have to.
# We do a custom sort in place as an optimization.
if delta.z:
idx = self.objects.index(moved)
if idx:
left = self.objects[idx-1]
while pos.z < left.pos.z:
o = self.objects.pop(idx)
idx -= 1
self.objects.insert(idx, o)
if (idx - 1) < 0:
return
left = self.objects[idx - 1]
if idx < len(self.objects) - 1:
right = self.objects[idx+1]
while pos.z > right.pos.z:
o = self.objects.pop(idx)
idx += 1
self.objects.insert(idx, o)
if (idx + 1) > len(self.objects) - 1:
return
right = self.objects[idx + 1]
def render(self):
gl.glPushAttrib(gl.GL_ENABLE_BIT)
gl.glEnable(gl.GL_BLEND)
gl.glDisable(gl.GL_DEPTH_TEST)
gl.glBlendFunc(gl.GL_SRC_ALPHA, gl.GL_ONE_MINUS_SRC_ALPHA)
gl.glEnable(gl.GL_TEXTURE_2D)
gl.glPushClientAttrib(gl.GL_CLIENT_VERTEX_ARRAY_BIT)
for o in self.objects:
o.draw()
gl.glPopClientAttrib()
gl.glPopAttrib()
def addobj(self, o):
self.objects.append(o)
if self._check_zorder not in o.pos_listeners:
o.pos_listeners.append(self._check_zorder)
self.objects.sort(self._cmp)
def delobj(self, o):
if self._check_zorder in o.pos_listeners:
o.pos_listeners.remove(self._check_zorder)
self.objects.remove(o)
_debugView = DebugView()
components.registerUtility(imiru.IDebuggingRenderStage, _debugView)
_blittableView = BlittableView()
components.registerUtility(imiru.IBlittableRenderStage, _blittableView)
class LightMixin(object):
light_no = gl.GL_LIGHT0
def __init__(self, *p, **kw):
#self.pos = kw.get('pos', (0.5,0.5,1.0))
#self.angle = Vector3(0.0, 0.0, 0.0) # XXX setting the angle has no effect
self.ambient = kw.get('ambient', (0.5,0.5,0.5,1))
self.specular = kw.get('specular', (0.9,0.9,0.9,1))
self.diffuse = kw.get('diffuse', (1,1,1,1))
super(LightMixin, self).__init__(*p, **kw)
def _getspecular(self):
# TODO
# get value from gl.GL vector - maximize space
return self._specular
def _setspecular(self, specular):
self._specular = specular
self.lspec = glvec(*specular)
def _getdiffuse(self):
return self._diffuse
def _setdiffuse(self, diffuse):
self._diffuse = diffuse
self.ldifs = glvec(*diffuse)
def _getambient(self):
return self._ambient
def _setambient(self, ambient):
self._ambient = ambient
self.lambient = glvec(*ambient)
specular = property(_getspecular, _setspecular)
diffuse = property(_getdiffuse, _setdiffuse)
ambient = property(_getambient, _setambient)
def _update_light_pos(self, pos, _delta):
self._pos = pos
self.lpos = glvec(*(tuple(pos) + (self.W,)))
class DirectionalLight(LightMixin, core.PositionalMixin):
W = 0.0
def __init__(self, *p, **kw):
# change default for pos
if not kw.has_key('pos'):
kw['pos'] = (0.5,0.5,1.0)
super(DirectionalLight, self).__init__(*p, **kw)
self.pos_listeners.append(_update_light_pos)
_update_light_pos(self, self.pos, None)
def on(self):
lno = self.light_no
gl.glEnable(lno)
# Define a simple function to create ctypes arrays of floats:
gl.glLightfv(lno, gl.GL_POSITION, self.lpos)
gl.glLightfv(lno, gl.GL_SPECULAR, self.lspec)
gl.glLightfv(lno, gl.GL_DIFFUSE, self.ldifs)
gl.glLightfv(lno, gl.GL_AMBIENT, self.lambient)
def clone(self):
return DirectionalLight(ambient=tuple(self.ambient),
specular=tuple(self.specular),
diffuse=self.diffuse,
pos=tuple(self.pos),
angle=tuple(self.angle))
class PositionalLight(LightMixin, core.PositionalMixin):
"""A positinal light's pos is the actual position in 3D space coords.
The attenuation can be adjusted via the three properties kc, kl and kq
which are constant, linear and quadratic parts.
Additionaly, a positional light can be turned into a spotlight by
specifying a spot_cutoff with value in the range [0,90]. By default
the spot_cutoff is the special value 180 and behaves as a normal
positional light source. Other parameters related to spot lights
(spot_dir, spot_exponent and track_target) will have no effect if
spot_cutoff is set to 180.
Related formula:
attenuation = 1 / (k_c + k_l*d + k_q*d^2)
k_c = gl.GL_CONSTANT_ATTENUATION
k_l = gl.GL_LINEAR_ATTENUATION
k_q = gl.GL_QUADRATIC_ATTENUATION
@param kc: constant attenuation
@param kl: linear attenuation
@param kq: quadratic attenuation
@param spot_cutoff: spot cut off in range [0,90] or 180 for all directions
@param spot_exponent: the spot exponent in range [0,128]
@param spot_dir: the spot direction (unit vector)
@param track_target: positional object the spot light should track
"""
W = 1.0
_debug = False
_debugGlobe = None
_debugTrack = None
def __init__(self, *p, **kw):
self.kc = kw.get('kc', 0.0)
self.kl = kw.get('kl', 0.0)
if not self.kc and not self.kl:
self.kq = kw.get('kq', 0.5)
else:
self.kq = kw.get('kq', 0.0)
self.spot_cutoff = kw.get('spot_cutoff', 180.0)
self.spot_dir = kw.get('spot_dir', (0.0,0.0,-1.0))
self.spot_exponent = float(kw.get('spot_exponent', 3))
self.track_target = kw.get('track_target', None)
# change default for pos
if not kw.has_key('pos'):
kw['pos'] = (0.5,1.0,1.0)
super(PositionalLight, self).__init__(*p, **kw)
self.pos_listeners.append(_update_light_pos)
_update_light_pos(self, self.pos, None)
if options['debug_view'] or kw.get('debug'):
self.debug = True
def on(self):
lno = self.light_no
gl.glEnable(lno)
gl.glLightf(lno, gl.GL_CONSTANT_ATTENUATION, self.kc)
gl.glLightf(lno, gl.GL_LINEAR_ATTENUATION, self.kl)
gl.glLightf(lno, gl.GL_QUADRATIC_ATTENUATION, self.kq)
if self.is_spot:
if self.track_target:
self._track()
gl.glLightf(lno, gl.GL_SPOT_CUTOFF, self._spot_cutoff)
gl.glLightfv(lno, gl.GL_SPOT_DIRECTION, self.lspot_dir)
gl.glLightfv(lno, gl.GL_SPOT_EXPONENT, self.spot_exponent)
gl.glLightfv(lno, gl.GL_POSITION, self.lpos)
gl.glLightfv(lno, gl.GL_SPECULAR, self.lspec)
gl.glLightfv(lno, gl.GL_DIFFUSE, self.ldifs)
gl.glLightfv(lno, gl.GL_AMBIENT, self.lambient)
def _track(self):
v = (self.track_target.pos - self._pos).normalize()
self.spot_dir = v
def _getspotcutoff(self):
return self._spot_cutoff
def _setspotcutoff(self, angle):
assert angle == 180.0 or (angle >= 0.0 and angle <= 90.0)
self.is_spot = (angle != 180.0)
self._spot_cutoff = angle
def _getspotdir(self):
return self._spot_dir
def _setspotdir(self, direction):
self._spot_dir = Vector3(*direction)
self.lspot_dir = glvec(*direction)
def _getspotexponent(self):
return self._spot_exponent
def _setspotexponent(self, exp):
self._spot_exponent = (1 * gl.GLfloat)()
self._spot_exponent[:] = [exp]
spot_cutoff = property(_getspotcutoff, _setspotcutoff)
spot_dir = property(_getspotdir, _setspotdir)
spot_exponent = property(_getspotexponent, _setspotexponent)
def clone(self):
return PositionalLight(
pos=tuple(self.pos),
angle=tuple(self.angle),
diffuse=tuple(self.diffuse),
ambient=tuple(self.ambient),
kc=self.kc, kl=self.kl, kq=self.kq,
spot_cutoff=self.spot_cutoff,
spot_exponent=self.spot_exponent[0],
spot_dir=tuple(self.spot_dir))
class LightGroup(UserList):
"""Known issue:
On slicing the light group list a sublist of copies (vs. same instances)
is returned. Current workaround is to only get instances via indexing
into the light group.
@param lights: List of lights to add to the group
"""
def __init__(self, lights=None, enforce_limit=True):
UserList.__init__(self)
self.enforce_limit = enforce_limit
self.extend(lights or [])
def on(self):
gl.glPushMatrix()
gl.glEnable(gl.GL_DEPTH_TEST)
gl.glPushAttrib(gl.GL_LIGHTING_BIT)
gl.glEnable(gl.GL_LIGHTING)
for l in self:
l.on()
def off(self):
gl.glPopAttrib()
#gl.glDisable(gl.GL_DEPTH_TEST)
gl.glPopMatrix()
def append(self, light):
if self.enforce_limit:
_constrain_lights(len(self) + 1)
light.light_no = lightno(len(self))
else:
light.light_no = vlightno(len(self))
UserList.append(self, light)
def extend(self, lights):
if self.enforce_limit:
_constrain_lights(len(self) + len(lights))
lts = []
for i, l in enumerate(lights):
if self.enforce_limit:
l.light_no = lightno(len(self) + i)
else:
l.light_no = vlightno(len(self) + i)
lts.append(l)
UserList.extend(self, lts)
def _camera_on_translate(control, target, x, y, dx, dy):
self = control
# XXX for backwards compat
if hasattr(self, 'camera_move_factor'):
f = self.camera_move_factor
else:
f = self.sensitivity
if self.axis == self.AXIS_X:
target.pos += (f * dx, 0, 0)
elif self.axis == self.AXIS_Y:
target.pos += (0, f * dy, 0)
elif self.axis == self.AXIS_Z:
d = math.sqrt(dx**2 + dy**2)
d *= (3,-3)[dy < 0]
target.pos += (0, 0, f * d)
else:
target.pos += (f * dx, f * dy, 0)
@implementer(imiru.ITranslationHandler)
def _camera_on_translate_adpt(_context):
return _camera_on_translate
registerAdapter(_camera_on_translate_adpt, imiru.ICamera, imiru.ITranslationHandler)
@implementer(imiru.IRotationHandler)
def _camera_on_rotate_adpt(_context):
return core._worldobj_on_rotate
registerAdapter(_camera_on_rotate_adpt, imiru.ICamera, imiru.IRotationHandler)
###############
# Experimental
def _setFocussed(m):
def _m(self, *p, **kw):
#from miru.environment import env
from miru.context import context
from miru import input
#from miru import controls
x,y = input.mouseXY()
for c in self.cameras:
vp = c.projection
bl_x = vp.bl[0] * context.window.width
bl_y = vp.bl[1] * context.window.height
tr_x = vp.tr[0] * context.window.width
tr_y = vp.tr[1] * context.window.height
if (x >= bl_x and x <= tr_x) and (y >= bl_y and y <= tr_y):
self._focussed = c
break
else:
self._focussed = self.cameras[0]
return m(self, *p, **kw)
_m.__name__ = m.__name__
return _m
class MetaCamera(object):
implements(imiru.ICamera, imiru.IWorldRenderStage)
# These default to None so that projection and
# lights can be customized on child cameras.
_projection = None
_lights = None
_debug_lines = None
effects = []
def __init__(self, *cameras):
from warnings import warn
warn('MetaCamera is experimental - expect things to break')
self.cameras = cameras
self._focussed = self.cameras[0]
self._resetProjection = PerspectiveProjection()
self._resetProjection.camera = self
def render(self, *p, **kw):
#from miru.environment import env
from miru.context import context
for c in self.cameras:
c.render(*p, **kw)
self._resetProjection.on_resize(
context.window.width, context.window.height)
@property
def projection(self):
return _MetaProjection(self, [c.projection for c in self.cameras])
def _getlights(self):
return self.cameras[0].lights
def _setlights(self, lights):
for c in self.cameras:
c.lights = lights
self._lights = lights
lights = property(_getlights, _setlights)
@_setFocussed
def _getangle(self):
return self._focussed.angle
@_setFocussed
def _setangle(self, angle):
self._focussed.angle = angle
angle = property(_getangle, _setangle)
@_setFocussed
def _getpos(self):
return self._focussed.pos
@_setFocussed
def _setpos(self, pos):
self._focussed.pos = pos
pos = property(_getpos, _setpos)
@property
@_setFocussed
def objects(self):
return self._focussed.objects
@property
@_setFocussed
def focussed(self):
return self._focussed
# FIXME
def add_object(self, obj):
self.cameras[0].addobj(obj)
def remove_object(self, obj):
self.cameras[0].delobj(obj)
def _getdebug(self):
return self._debug
def _setdebug(self, debug):
from miru.context import context
self._debug = debug
if debug:
if not self._debug_lines:
self._debug_lines = core.Object(ViewportOutline(self))
context.add_object(self._debug_lines)
else:
context.remove_object(self._debug_lines)
debug = property(_getdebug, _setdebug)
class _MetaProjection(BaseProjection):
def __init__(self, camera, projections):
self.camera = camera
self.projections = projections
def on_resize(self, width, height, x=0, y=0):
for p in self.projections:
p.on_resize(width, height)
class Viewport(BaseProjection):
"""GL viewport. bl and tr are bottom-left and top-right coordinates
of the viewports. (0,0) is given for the left and bottom and (1,1)
for the top and right of the window.
@param bl: bottom-left of the viewport
@param tr: top-right of the viewport
"""
def __init__(self, bl=(0.,0.), tr=(1.,1.), contextProjection=None):
self.contextProjection = contextProjection or PerspectiveProjection()
self.bl = bl
self.tr = tr
self.window = 0
self.height = 0
self.x = 0
self.y = 0
self.viewport_on = True
def on_resize(self, width, height, x=None, y=None):
self.height = int((self.tr[1] - self.bl[1]) * height)
self.width = int((self.tr[0] - self.bl[0]) * width)
self.x = int(self.bl[0] * width)
self.y = int(self.bl[1] * height)
self.contextProjection.on_resize(self.width, self.height,
self.x, self.y)
def enable(self):
if self.viewport_on:
self.contextProjection.on_resize(self.width, self.height,
self.x, self.y)
def _getcamera(self):
return self.contextProjection.camera
def _setcamera(self, camera):
self.contextProjection.camera = camera
camera = property(_getcamera, _setcamera)
class ViewportOutline(object):
implements(imiru.IDrawable)
renderStages = (imiru.IOSDRenderStage,)
def __init__(self, metaCamera, color=(0.2,0,0,1)):
self.metaCamera = metaCamera
self.color = color
def draw(self):
from miru.context import context
gl.glEnable(gl.GL_LINE_STIPPLE)
gl.glLineStipple(1, 0x51315)
gl.glColor4f(*self.color)
for c in self.metaCamera.cameras:
vp = c.projection
x = vp.x == 0 and 1 or vp.x
width = vp.x == 0 and vp.width - 1 or vp.width
width = (vp.x + vp.width) >= context.window.width and width - 1 or width
y = vp.y == 0 and 1 or vp.y
height = vp.y == 0 and vp.height - 1 or vp.height
height = (vp.y + vp.height) >= context.window.height and height - 1 or height
gl.glBegin(gl.GL_LINE_LOOP)
gl.glVertex2f(x, y)
gl.glVertex2f(x, y + height)
gl.glVertex2f(x + width, y + height)
gl.glVertex2f(x + width, y)
gl.glEnd()
gl.glDisable(gl.GL_LINE_LOOP)
gl.glColor4f(1,1,1,1)
|
|
from pymatch import *
import functions as uf
class Matcher:
'''
Matcher Class -- Match data for an observational study.
Args:
test (pd.DataFrame): Data representing the test group
control (pd.DataFrame): Data representing the control group
formula (str): custom formula to use for logistic regression
i.e. "Y ~ x1 + x2 + ..."
yvar (str): Name of dependent variable (the treatment)
exclude (list): List of variables to ignore in regression/matching.
Useful for unique idenifiers
'''
def __init__(self, test, control, yvar, formula=None, exclude=[]):
# configure plots for ipynb
plt.rcParams["figure.figsize"] = (10, 5)
# assign unique indices to test and control
t, c = [i.copy().reset_index(drop=True) for i in (test, control)]
c.index += len(t)
self.data = t.append(c).dropna(axis=1, how="all")
self.control_color = "#1F77B4"
self.test_color = "#FF7F0E"
self.yvar = yvar
self.exclude = exclude + [self.yvar] + ['scores', 'match_id']
self.formula = formula
self.models = []
self.swdata = None
self.model_accurracy = []
self.data[yvar] = self.data[yvar].astype(int) # should be binary 0, 1
self.xvars = [i for i in self.data.columns if i not in self.exclude and i != yvar]
self.matched_data = []
self.y, self.X = patsy.dmatrices('{} ~ {}'.format(yvar, '+'.join(self.xvars)), data=self.data,
return_type='dataframe')
self.xvars = [i for i in self.data.columns if i not in exclude]
self.test= self.data[self.data[yvar] == True]
self.control = self.data[self.data[yvar] == False]
self.testn = len(self.test)
self.controln = len(self.control)
self.minority, self.majority = \
[i[1] for i in sorted(zip([self.testn, self.controln], [1, 0]),
key=lambda x: x[0])]
print 'Formula:\n{} ~ {}'.format(yvar, '+'.join(self.xvars))
print 'n majority:', len(self.data[self.data[yvar] == self.majority])
print 'n minority:', len(self.data[self.data[yvar] == self.minority])
def fit_scores(self, balance=True, nmodels=None):
"""
Args:
balance (bool): Should balanced datasets be used?
(n_control ~ n_test)
nmodels (int): How many models should be fit?
Score becomes the average of the <nmodels> models if nmodels > 1
"""
# reset models if refitting
if len(self.models) > 0:
self.models = []
if len(self.model_accurracy) > 0:
self.model_accurracy = []
if not self.formula:
# use all columns in the model
self.formula = '{} ~ {}'.format(self.yvar, '+'.join(self.xvars))
if balance:
if nmodels is None:
# fit mutliple models based on imbalance severity (rounded up to nearest tenth)
minor, major = [self.data[self.data[self.yvar] == i] for i in (self.minority, self.majority)]
nmodels = int(np.ceil((len(major) / len(minor)) / 10) * 10)
self.nmodels = nmodels
i = 0
errors = 0
while i < nmodels and errors < 5:
uf.progress(i+1, nmodels,
prestr="Fitting {} Models on Balanced Samples...".format(nmodels))
# sample from majority to create balance dataset
df = self.balanced_sample()
df = pd.concat([uf.drop_static_cols(df[df[self.yvar] == 1], yvar=self.yvar),
uf.drop_static_cols(df[df[self.yvar] == 0], yvar=self.yvar)])
y_samp, X_samp = patsy.dmatrices(self.formula, data=df, return_type='dataframe')
X_samp.drop(self.yvar, axis=1, errors='ignore', inplace=True)
glm = GLM(y_samp, X_samp, family=sm.families.Binomial())
try:
res = glm.fit()
self.model_accurracy.append(self._scores_to_accuracy(res, X_samp, y_samp))
self.models.append(res)
i += 1
except Exception as e:
errors += 1 # to avoid infinite loop for misspecified matrix
print 'Error: {}'.format(e)
print "\nAverage Accuracy:", "{}%".\
format(round(np.mean(self.model_accurracy) * 100, 2))
else:
# ignore any imbalance and fit one model
self.nmodels = 1
print '\nFitting 1 (Unbalanced) Model...'
glm = GLM(self.y, self.X, family=sm.families.Binomial())
res = glm.fit()
self.model_accurracy.append(self._scores_to_accuracy(res, self.X, self.y))
self.models.append(res)
print "Accuracy", round(np.mean(self.model_accurracy[0]) * 100, 2)
def predict_scores(self):
"""
Predict Propensity scores for each observation
"""
scores = np.zeros(len(self.X))
for i in range(self.nmodels):
uf.progress(i+1, self.nmodels, "Caclculating Propensity Scores...")
m = self.models[i]
scores += m.predict(self.X[m.params.index])
self.data['scores'] = scores/self.nmodels
def match(self, threshold=0.001, nmatches=1, method='min', max_rand=10):
"""
Match data
Args:
threshold (float): threshold for "exact" matching
i.e. |score_x - score_y| >= theshold
nmatches (int): How control profiles should be matched
(at most) to test
method (str): Strategy for when multiple control profiles
are suitable matches for a single test profile
"random" - choose randomly
"min" - choose the profile with the closest score
max_rand
"""
if 'scores' not in self.data.columns:
print "Propensity Scores have not been calculated. Using defaults..."
self.fit_scores()
self.predict_scores()
test_scores = self.data[self.data[self.yvar]==True][['scores']]
ctrl_scores = self.data[self.data[self.yvar]==False][['scores']]
result, match_ids = [], []
for i in range(len(test_scores)):
# uf.progress(i+1, len(test_scores), 'Matching Control to Test...')
match_id = i
score = test_scores.iloc[i]
if method == 'random':
bool_match = abs(ctrl_scores - score) <= threshold
matches = ctrl_scores.loc[bool_match[bool_match.scores].index]
elif method == 'min':
matches = abs(ctrl_scores - score).sort_values('scores').head(1)
else:
raise AssertionError, "Invalid tie_strategy parameter, use ('random', 'min')"
if len(matches) == 0:
continue
# randomly choose nmatches indices, if len(matches) > nmatches
select = nmatches if method != 'random' else np.random.choice(range(1, max_rand+1), 1)
chosen = np.random.choice(matches.index, min(select, nmatches), replace=False)
result.extend([test_scores.index[i]] + list(chosen))
match_ids.extend([i] * (len(chosen)+1))
self.matched_data = self.data.loc[result]
self.matched_data['match_id'] = match_ids
def select_from_design(self, cols):
d = pd.DataFrame()
for c in cols:
d = pd.concat([d, self.X.select(lambda x: x.startswith(c), axis=1)], axis=1)
return d
def balanced_sample(self, data=None):
if not data:
data=self.data
minor, major = data[data[self.yvar] == self.minority], data[data[self.yvar] == self.majority]
return major.sample(len(minor)).append(minor).dropna()
def plot_scores(self):
assert 'scores' in self.data.columns, "Propensity scores haven't been calculated, use Matcher.predict_scores()"
sns.distplot(self.data[self.data[self.yvar]==False].scores, label='Control')
sns.distplot(self.data[self.data[self.yvar]==True].scores, label='Test')
plt.legend(loc='upper right')
plt.xlim((0, 1))
plt.title("Propensity Scores Before Matching")
plt.ylabel("Percentage (%)")
plt.xlabel("Scores")
def prop_test(self, col):
if not uf.is_continuous(col, self.X) and col not in self.exclude:
pval_before = round(stats.chi2_contingency(self.prep_prop_test(self.data, col))[1], 6)
pval_after = round(stats.chi2_contingency(self.prep_prop_test(self.matched_data, col))[1], 6)
return {'var':col, 'before':pval_before, 'after':pval_after}
else:
print "{} is a continuous variable".format(col)
def compare_continuous(self, save=False, return_table=False):
test_results = []
for col in self.matched_data.columns:
if uf.is_continuous(col, self.X) and col not in self.exclude:
if save: pp = PdfPages("{}-ecdf.pdf".format(col))
# organize data
trb, cob = self.test[col], self.control[col]
tra = self.matched_data[self.matched_data[self.yvar]==True][col]
coa = self.matched_data[self.matched_data[self.yvar]==False][col]
xtb, xcb = ECDF(trb), ECDF(cob)
xta, xca = ECDF(tra),ECDF(coa)
# before/after stats
std_diff_med_before, std_diff_mean_before = uf.std_diff(trb, cob)
std_diff_med_after, std_diff_mean_after = uf.std_diff(tra, coa)
pb, truthb = uf.grouped_permutation_test(uf.chi2_distance, trb, cob)
pa, trutha = uf.grouped_permutation_test(uf.chi2_distance, tra, coa)
ksb = round(uf.ks_boot(trb, cob, nboots=1000), 6)
ksa = round(uf.ks_boot(tra, coa, nboots=1000), 6)
# plotting
f, (ax1, ax2) = plt.subplots(1, 2, sharey=True, sharex=True, figsize=(12, 5))
ax1.plot(xcb.x, xcb.y, label='Control', color=self.control_color)
ax1.plot(xtb.x, xtb.y, label='Test', color=self.test_color)
ax1.plot(xcb.x, xcb.y, label='Control', color=self.control_color)
ax1.plot(xtb.x, xtb.y, label='Test', color=self.test_color)
title_str = '''
ECDF for {} {} Matching
KS p-value: {}
Grouped Perm p-value: {}
Std. Median Difference: {}
Std. Mean Difference: {}
'''
ax1.set_title(title_str\
.format(col, "before", ksb, pb, std_diff_med_before, std_diff_mean_before))
ax2.plot(xca.x, xca.y, label='Control')
ax2.plot(xta.x, xta.y, label='Test')
ax2.set_title(title_str\
.format(col, "after", ksa, pa, std_diff_med_after, std_diff_mean_after))
ax2.legend(loc="lower right")
plt.xlim((0, np.percentile(xta.x, 99)))
test_results.append({
"var": col,
"ks_before": ksb,
"ks_after": ksa,
"perm_chisqr_before": pb,
"grouped_chisqr_after": pa,
"std_median_diff_before": std_diff_med_before,
"std_median_diff_after": std_diff_med_after,
"std_mean_diff_before": std_diff_mean_before,
"std_mean_diff_after": std_diff_mean_after
})
if save: pp.savefig()
var_order=["var",
"ks_before",
"ks_after",
"perm_chisqr_before",
"grouped_chisqr_after",
"std_median_diff_before",
"std_median_diff_after",
"std_mean_diff_before",
"std_mean_diff_after"]
if save: pp.close()
return pd.DataFrame(test_results)[var_order] if return_table else None
def compare_discrete(self, return_table=False):
def prep_plot(data, var, colname):
t, c = data[data[self.yvar]==1], data[data[self.yvar]==0]
#dummy var for counting
dummy = [i for i in t.columns if i != var][0]
countt = t[[var, dummy]].groupby(var).count() / len(t)
countc = c[[var, dummy]].groupby(var).count() / len(c)
ret = (countt-countc).dropna()
ret.columns = [colname]
return ret
title_str = '''
Proportional Difference (test-control) for {} Before and After Matching
Chi-Square Test for Independence p-value before | after:
{} | {}
'''
test_results = []
for col in self.matched_data.columns:
if not uf.is_continuous(col, self.X) and col not in self.exclude:
dbefore = prep_plot(self.data, col, colname="before")
dafter = prep_plot(self.matched_data, col, colname="after")
df = dbefore.join(dafter)
test_results_i = self.prop_test(col)
test_results.append(test_results_i)
# plotting
df.plot.bar(alpha=.8)
plt.title(title_str.format(col, test_results_i["before"], test_results_i["after"]))
lim = max(.09, abs(df).max().max()) + .01
plt.ylim((-lim, lim))
return pd.DataFrame(test_results)[['var', 'before', 'after']] if return_table else None
def prep_prop_test(self, data, var):
counts = data.groupby([var, self.yvar]).count().reset_index()
table = []
for t in (0, 1):
os_counts = counts[counts[self.yvar] ==t]\
.sort_values(var)
cdict = {}
for row in os_counts.iterrows():
row = row[1]
cdict[row[var]] = row[2]
table.append(cdict)
# fill empty keys as 0
all_keys = set(chain.from_iterable(table))
for d in table:
d.update((k, 0) for k in all_keys if k not in d)
ctable = [[i[k] for k in sorted(all_keys)] for i in table]
return ctable
def prop_retained(self):
return len(self.matched_data[self.matched_data[self.yvar] == self.minority]) * 1.0 / \
len(self.data[self.data[self.yvar] == self.minority])
def tune_threshold(self, method, nmatches=1, rng=np.arange(0, .001, .0001)):
results = []
for i in rng:
self.match(method=method, nmatches=nmatches, threshold=i)
results.append(self.prop_retained())
plt.plot(rng, results)
plt.title("Proportion of Data retained for grid of threshold values")
plt.ylabel("Proportion Retained")
plt.xlabel("Threshold")
plt.xticks(rng)
def _scores_to_accuracy(self, m, X, y):
preds = [1.0 if i >= .5 else 0.0 for i in m.predict(X)]
return (y == preds).sum() * 1.0 / len(y)
|
|
"""
Module for handling Fekete points approximations.
"""
from math import pi, sin
from numpy import empty, arange
from numpy.linalg import solve
from gauss_lobatto_points import points
def get_x_phys(x_ref, a, b):
return (a+b)/2. + x_ref*(b-a)/2.;
class Mesh1D(object):
def __init__(self, points, orders):
if not (len(points) == len(orders) + 1):
raise Exception("points vs order mismatch")
self._points = points
self._orders = orders
def iter_elems(self):
for i in range(len(self._orders)):
yield (self._points[i], self._points[i+1], self._orders[i])
def plot(self, call_show=True):
try:
from jsplot import plot, show
except ImportError:
from pylab import plot, show
odd = False
for a, b, order in self.iter_elems():
fekete_points = points[order]
fekete_points = [get_x_phys(x, a, b) for x in fekete_points]
if odd:
format = "y-"
else:
format = "k-"
odd = not odd
plot([a, a, b, b], [0, order, order, 0], format, lw=2)
if call_show:
show()
class Function(object):
"""
Represents a function on a mesh.
The values are given in the Fekete points.
"""
def __init__(self, obj, mesh=None):
if not isinstance(mesh, Mesh1D):
raise Exception("You need to specify a mesh.")
self._mesh = mesh
self._values = []
for a, b, order in mesh.iter_elems():
fekete_points = points[order]
elem_values = []
# Note: this is not a projection, so the result is not the best
# approximation possible:
for p in fekete_points:
p = get_x_phys(p, a, b)
val = obj(p)
elem_values.append(val)
self._values.append(elem_values)
def get_polynomial(self, values, a, b):
"""
Returns the interpolating polynomial's coeffs.
The len(values) specifies the order and we work in the element <a, b>
"""
n = len(values)
A = empty((n, n), dtype="double")
y = empty((n,), dtype="double")
x = points[n-1]
assert len(x) == n
for i in range(n):
for j in range(n):
A[i, j] = get_x_phys(x[i], a, b)**(n-j-1)
y[i] = values[i]
a = solve(A, y)
return a
def eval_polynomial(self, coeffs, x):
r = 0
n = len(coeffs)
for i, a in enumerate(coeffs):
r += a*x**(n-i-1)
return r
def __call__(self, x):
for n, (a, b, order) in enumerate(self._mesh.iter_elems()):
if b < x:
continue
# This can be made faster by using Lagrange interpolation
# polynomials (no need to invert a matrix in order to get the
# polynomial below). The results are however identical.
coeffs = self.get_polynomial(self._values[n], a, b)
return self.eval_polynomial(coeffs, x)
def project_onto(self, mesh):
# This is not a true projection, only some approximation:
return Function(self, mesh)
def plot(self, call_show=True):
try:
from jsplot import plot, show
except ImportError:
from pylab import plot, show
odd = False
for n, (a, b, order) in enumerate(self._mesh.iter_elems()):
fekete_points = points[order]
vals = self._values[n]
assert len(vals) == len(fekete_points)
fekete_points = [get_x_phys(x, a, b) for x in fekete_points]
x = arange(a, b, 0.1)
y = [self(_x) for _x in x]
if odd:
format = "g-"
else:
format = "r-"
odd = not odd
plot(x, y, format)
plot(fekete_points, vals, "ko")
if call_show:
show()
def __eq__(self, o):
eps = 1e-12
if isinstance(o, Function):
for a, b, order in self._mesh.iter_elems():
fekete_points = points[order]
fekete_points = [get_x_phys(x, a, b) for x in fekete_points]
for p in fekete_points:
if abs(self(p) - o(p)) > eps:
return False
for a, b, order in o._mesh.iter_elems():
fekete_points = points[order]
fekete_points = [get_x_phys(x, a, b) for x in fekete_points]
for p in fekete_points:
if abs(self(p) - o(p)) > eps:
return False
return True
else:
return False
def __neq__(self, o):
return not self.__eq__(o)
def get_mesh_adapt(self, max_order=12):
return self._mesh
def test1():
m = Mesh1D((-5, -4, 3, 10), (1, 5, 1))
def test2():
eps = 1e-12
func = lambda x: x**2
f = Function(func, Mesh1D((-5, -4, 3, 10), (2, 5, 2)))
for x in [-5, -4.5, -4, -3, -2, -1, 0, 0.01, 1e-5, 1, 2, 3, 4, 5, 6, 7, 10]:
assert abs(f(x) - func(x)) < eps
f = Function(func, Mesh1D((-5, -4, 3, 10), (1, 5, 2)))
for x in [-5, -4, -3, -2, -1, 0, 0.01, 1e-5, 1, 2, 3, 4, 5, 6, 7, 10]:
assert abs(f(x) - func(x)) < eps
x = -4.9
assert abs(f(x) - func(x)) > 0.08
x = -4.5
assert abs(f(x) - func(x)) > 0.24
f = Function(func, Mesh1D((-5, -4, 3, 10), (1, 5, 1)))
for x in [-5, -4, -3, -2, -1, 0, 0.01, 1e-5, 1, 2, 3, 10]:
assert abs(f(x) - func(x)) < eps
x = -4.9
assert abs(f(x) - func(x)) > 0.08
x = -4.5
assert abs(f(x) - func(x)) > 0.24
x = 4
assert abs(f(x) - func(x)) > 5.9
x = 5
assert abs(f(x) - func(x)) > 9.9
x = 6
assert abs(f(x) - func(x)) > 11.9
x = 7
assert abs(f(x) - func(x)) > 11.9
x = 8
assert abs(f(x) - func(x)) > 9.9
x = 9
assert abs(f(x) - func(x)) > 5.9
def test3():
eps = 1e-12
func = lambda x: x**2
f = Function(func, Mesh1D((-5, -4, 3, 10), (1, 5, 1)))
for x in [-4, -3, -2, -1, 0, 0.01, 1e-5, 1, 2, 3]:
assert abs(f(x) - func(x)) < eps
func = lambda x: x**3
f = Function(func, Mesh1D((-5, -4, 3, 10), (1, 5, 1)))
for x in [-4, -3, -2, -1, 0, 0.01, 1e-5, 1, 2, 3]:
assert abs(f(x) - func(x)) < eps
func = lambda x: x**4
f = Function(func, Mesh1D((-5, -4, 3, 10), (1, 5, 1)))
for x in [-4, -3, -2, -1, 0, 0.01, 1e-5, 1, 2, 3]:
assert abs(f(x) - func(x)) < eps
func = lambda x: x**5
f = Function(func, Mesh1D((-5, -4, 3, 10), (1, 5, 1)))
for x in [-4, -3, -2, -1, 0, 0.01, 1e-5, 1, 2, 3]:
assert abs(f(x) - func(x)) < eps
func = lambda x: x**6
f = Function(func, Mesh1D((-5, -4, 3, 10), (1, 5, 1)))
x = -1
assert abs(f(x) - func(x)) > 61.9
x = 0
assert abs(f(x) - func(x)) > 61.9
x = 1
assert abs(f(x) - func(x)) > 61.6
x = 2
assert abs(f(x) - func(x)) > 28.9
def test4():
eps = 1e-12
func = lambda x: x**2
orig_mesh = Mesh1D((-5, -4, 3, 10), (1, 5, 1))
mesh1 = Mesh1D((-5, -4, 3, 10), (1, 1, 1))
f = Function(func, orig_mesh)
g = f.project_onto(mesh1)
h = Function(func, mesh1)
assert g == Function(func, mesh1)
assert h == h.project_onto(orig_mesh)
def test5():
eps = 1e-12
func = lambda x: x**2
mesh1 = Mesh1D((-5, -4, 3, 10), (2, 5, 2))
mesh2 = Mesh1D((-5, -4, 3, 10), (2, 2, 2))
mesh3 = Mesh1D((-5, -4, 3, 10), (2, 2, 1))
mesh4 = Mesh1D((-5, 10), (2,))
mesh5 = Mesh1D((-5, 10), (3,))
mesh6 = Mesh1D((-5, 10), (1,))
f = Function(func, mesh1)
g = Function(func, mesh2)
h = Function(func, mesh3)
l = Function(func, mesh4)
assert f == g
assert g == f
assert f == l
assert g == l
assert f != h
assert h != f
assert g != h
assert h != g
assert f == Function(lambda x: x**2, mesh1)
assert f != Function(lambda x: x**3, mesh1)
assert f == Function(lambda x: x**2, mesh2)
assert f == Function(lambda x: x**2, mesh4)
assert f == Function(lambda x: x**2, mesh5)
assert f != Function(lambda x: x**2, mesh6)
def main():
test1()
test2()
test3()
test4()
test5()
#f = Function(lambda x: sin(x), Mesh1D((-pi,pi), (12,)))
#mesh = f.get_mesh_adapt(max_order=1)
#mesh = Mesh1D((-pi, -1, 0, 1, pi), (1, 1, 1, 1))
#mesh.plot(False)
#f.plot(False)
#f.project_onto(mesh).plot()
if __name__ == "__main__":
main()
|
|
# Licensed under a 3-clause BSD style license - see LICENSE.rst
# pylint: disable=invalid-name
import pytest
import numpy as np
from numpy.testing import assert_allclose, assert_array_equal, assert_array_less
from astropy.modeling import models, InputParameterError
from astropy.coordinates import Angle
from astropy.modeling import fitting
from astropy.utils.exceptions import AstropyUserWarning
from astropy.utils.compat.optional_deps import HAS_SCIPY # noqa
def test_sigma_constant():
"""
Test that the GAUSSIAN_SIGMA_TO_FWHM constant matches the
gaussian_sigma_to_fwhm constant in astropy.stats. We define
it manually in astropy.modeling to avoid importing from
astropy.stats.
"""
from astropy.stats.funcs import gaussian_sigma_to_fwhm
from astropy.modeling.functional_models import GAUSSIAN_SIGMA_TO_FWHM
assert gaussian_sigma_to_fwhm == GAUSSIAN_SIGMA_TO_FWHM
def test_Trapezoid1D():
"""Regression test for https://github.com/astropy/astropy/issues/1721"""
model = models.Trapezoid1D(amplitude=4.2, x_0=2.0, width=1.0, slope=3)
xx = np.linspace(0, 4, 8)
yy = model(xx)
yy_ref = [0., 1.41428571, 3.12857143, 4.2, 4.2, 3.12857143, 1.41428571, 0.]
assert_allclose(yy, yy_ref, rtol=0, atol=1e-6)
def test_Gaussian2D():
"""
Test rotated elliptical Gaussian2D model.
https://github.com/astropy/astropy/pull/2038
"""
model = models.Gaussian2D(4.2, 1.7, 3.1, x_stddev=5.1, y_stddev=3.3,
theta=np.pi/6.)
y, x = np.mgrid[0:5, 0:5]
g = model(x, y)
g_ref = [[3.01907812, 2.99051889, 2.81271552, 2.5119566, 2.13012709],
[3.55982239, 3.6086023, 3.4734158, 3.17454575, 2.75494838],
[3.88059142, 4.0257528, 3.96554926, 3.70908389, 3.29410187],
[3.91095768, 4.15212857, 4.18567526, 4.00652015, 3.64146544],
[3.6440466, 3.95922417, 4.08454159, 4.00113878, 3.72161094]]
assert_allclose(g, g_ref, rtol=0, atol=1e-6)
assert_allclose([model.x_fwhm, model.y_fwhm],
[12.009582229657841, 7.7709061486021325])
def test_Gaussian2DCovariance():
"""
Test rotated elliptical Gaussian2D model when cov_matrix is input.
https://github.com/astropy/astropy/pull/2199
"""
cov_matrix = [[49., -16.], [-16., 9.]]
model = models.Gaussian2D(17., 2.0, 2.5, cov_matrix=cov_matrix)
y, x = np.mgrid[0:5, 0:5]
g = model(x, y)
g_ref = [[4.3744505, 5.8413977, 7.42988694, 9.00160175, 10.38794269],
[8.83290201, 10.81772851, 12.61946384, 14.02225593, 14.84113227],
[13.68528889, 15.37184621, 16.44637743, 16.76048705, 16.26953638],
[16.26953638, 16.76048705, 16.44637743, 15.37184621, 13.68528889],
[14.84113227, 14.02225593, 12.61946384, 10.81772851, 8.83290201]]
assert_allclose(g, g_ref, rtol=0, atol=1e-6)
def test_Gaussian2DRotation():
amplitude = 42
x_mean, y_mean = 0, 0
x_stddev, y_stddev = 2, 3
theta = Angle(10, 'deg')
pars = dict(amplitude=amplitude, x_mean=x_mean, y_mean=y_mean,
x_stddev=x_stddev, y_stddev=y_stddev)
rotation = models.Rotation2D(angle=theta.degree)
point1 = (x_mean + 2 * x_stddev, y_mean + 2 * y_stddev)
point2 = rotation(*point1)
g1 = models.Gaussian2D(theta=0, **pars)
g2 = models.Gaussian2D(theta=theta.radian, **pars)
value1 = g1(*point1)
value2 = g2(*point2)
assert_allclose(value1, value2)
def test_Gaussian2D_invalid_inputs():
x_stddev = 5.1
y_stddev = 3.3
theta = 10
cov_matrix = [[49., -16.], [-16., 9.]]
# first make sure the valid ones are OK
models.Gaussian2D()
models.Gaussian2D(x_stddev=x_stddev, y_stddev=y_stddev, theta=theta)
models.Gaussian2D(x_stddev=None, y_stddev=y_stddev, theta=theta)
models.Gaussian2D(x_stddev=x_stddev, y_stddev=None, theta=theta)
models.Gaussian2D(x_stddev=x_stddev, y_stddev=y_stddev, theta=None)
models.Gaussian2D(cov_matrix=cov_matrix)
with pytest.raises(InputParameterError):
models.Gaussian2D(x_stddev=0, cov_matrix=cov_matrix)
with pytest.raises(InputParameterError):
models.Gaussian2D(y_stddev=0, cov_matrix=cov_matrix)
with pytest.raises(InputParameterError):
models.Gaussian2D(theta=0, cov_matrix=cov_matrix)
@pytest.mark.parametrize('gamma', (10, -10))
def test_moffat_fwhm(gamma):
ans = 34.641016151377542
kwargs = {'gamma': gamma, 'alpha': 0.5}
m1 = models.Moffat1D(**kwargs)
m2 = models.Moffat2D(**kwargs)
assert_allclose([m1.fwhm, m2.fwhm], ans)
assert_array_less(0, [m1.fwhm, m2.fwhm])
def test_RedshiftScaleFactor():
"""Like ``test_ScaleModel()``."""
# Scale by a scalar
m = models.RedshiftScaleFactor(0.4)
assert m(0) == 0
assert_array_equal(m([1, 2]), [1.4, 2.8])
assert_allclose(m.inverse(m([1, 2])), [1, 2])
# Scale by a list
m = models.RedshiftScaleFactor([-0.5, 0, 0.5], n_models=3)
assert_array_equal(m(0), 0)
assert_array_equal(m([1, 2], model_set_axis=False),
[[0.5, 1], [1, 2], [1.5, 3]])
assert_allclose(m.inverse(m([1, 2], model_set_axis=False)),
[[1, 2], [1, 2], [1, 2]])
def test_RedshiftScaleFactor_inverse():
m = models.RedshiftScaleFactor(1.2345)
assert_allclose(m.inverse(m(6.789)), 6.789)
def test_RedshiftScaleFactor_inverse_bounding_box():
model = models.RedshiftScaleFactor(2)
model.bounding_box = (1, 5)
assert model.bounding_box == (1, 5)
inverse_model = model.inverse
assert inverse_model.bounding_box == (3, 15)
assert_allclose(inverse_model(model(4, with_bounding_box=True), with_bounding_box=True), 4)
def test_Ellipse2D():
"""Test Ellipse2D model."""
amplitude = 7.5
x0, y0 = 15, 15
theta = Angle(45, 'deg')
em = models.Ellipse2D(amplitude, x0, y0, 7, 3, theta.radian)
y, x = np.mgrid[0:30, 0:30]
e = em(x, y)
assert np.all(e[e > 0] == amplitude)
assert e[y0, x0] == amplitude
rotation = models.Rotation2D(angle=theta.degree)
point1 = [2, 0] # Rotation2D center is (0, 0)
point2 = rotation(*point1)
point1 = np.array(point1) + [x0, y0]
point2 = np.array(point2) + [x0, y0]
e1 = models.Ellipse2D(amplitude, x0, y0, 7, 3, theta=0.)
e2 = models.Ellipse2D(amplitude, x0, y0, 7, 3, theta=theta.radian)
assert e1(*point1) == e2(*point2)
def test_Ellipse2D_circular():
"""Test that circular Ellipse2D agrees with Disk2D [3736]."""
amplitude = 7.5
radius = 10
size = (radius * 2) + 1
y, x = np.mgrid[0:size, 0:size]
ellipse = models.Ellipse2D(amplitude, radius, radius, radius, radius,
theta=0)(x, y)
disk = models.Disk2D(amplitude, radius, radius, radius)(x, y)
assert np.all(ellipse == disk)
def test_Scale_inverse():
m = models.Scale(1.2345)
assert_allclose(m.inverse(m(6.789)), 6.789)
def test_Scale_inverse_bounding_box():
model = models.Scale(2)
model.bounding_box = (1, 5)
assert model.bounding_box == (1, 5)
inverse_model = model.inverse
assert inverse_model.bounding_box == (2, 10)
assert inverse_model(model(4, with_bounding_box=True), with_bounding_box=True) == 4.0
def test_Multiply_inverse():
m = models.Multiply(1.2345)
assert_allclose(m.inverse(m(6.789)), 6.789)
def test_Multiply_inverse_bounding_box():
model = models.Multiply(2)
model.bounding_box = (1, 5)
assert model.bounding_box == (1, 5)
inverse_model = model.inverse
assert inverse_model.bounding_box == (2, 10)
assert inverse_model(model(4, with_bounding_box=True), with_bounding_box=True) == 4.0
def test_Shift_inverse():
m = models.Shift(1.2345)
assert_allclose(m.inverse(m(6.789)), 6.789)
def test_Shift_inverse_bounding_box():
model = models.Shift(10)
model.bounding_box = (1, 5)
assert model.bounding_box == (1, 5)
inverse_model = model.inverse
assert inverse_model.bounding_box == (11, 15)
assert inverse_model(model(4, with_bounding_box=True), with_bounding_box=True) == 4.0
@pytest.mark.skipif('not HAS_SCIPY')
def test_Shift_model_levmar_fit():
"""Test fitting Shift model with LevMarLSQFitter (issue #6103)."""
init_model = models.Shift()
x = np.arange(10)
y = x+0.1
fitter = fitting.LevMarLSQFitter()
with pytest.warns(AstropyUserWarning,
match='Model is linear in parameters'):
fitted_model = fitter(init_model, x, y)
assert_allclose(fitted_model.parameters, [0.1], atol=1e-15)
def test_Shift_model_set_linear_fit():
"""Test linear fitting of Shift model (issue #6103)."""
init_model = models.Shift(offset=[0, 0], n_models=2)
x = np.arange(10)
yy = np.array([x+0.1, x-0.2])
fitter = fitting.LinearLSQFitter()
fitted_model = fitter(init_model, x, yy)
assert_allclose(fitted_model.parameters, [0.1, -0.2], atol=1e-15)
@pytest.mark.parametrize('Model', (models.Scale, models.Multiply))
def test_Scale_model_set_linear_fit(Model):
"""Test linear fitting of Scale model (#6103)."""
init_model = Model(factor=[0, 0], n_models=2)
x = np.arange(-3, 7)
yy = np.array([1.15*x, 0.96*x])
fitter = fitting.LinearLSQFitter()
fitted_model = fitter(init_model, x, yy)
assert_allclose(fitted_model.parameters, [1.15, 0.96], atol=1e-15)
# https://github.com/astropy/astropy/issues/6178
def test_Ring2D_rout():
m = models.Ring2D(amplitude=1, x_0=1, y_0=1, r_in=2, r_out=5)
assert m.width.value == 3
@pytest.mark.skipif("not HAS_SCIPY")
def test_Voigt1D():
voi = models.Voigt1D(amplitude_L=-0.5, x_0=1.0, fwhm_L=5.0, fwhm_G=5.0)
xarr = np.linspace(-5.0, 5.0, num=40)
yarr = voi(xarr)
voi_init = models.Voigt1D(amplitude_L=-1.0, x_0=1.0, fwhm_L=5.0, fwhm_G=5.0)
fitter = fitting.LevMarLSQFitter()
voi_fit = fitter(voi_init, xarr, yarr)
assert_allclose(voi_fit.param_sets, voi.param_sets)
@pytest.mark.skipif("not HAS_SCIPY")
@pytest.mark.parametrize('algorithm', ('humlicek2', 'wofz'))
def test_Voigt1D_norm(algorithm):
"""Test integral of normalized Voigt profile."""
from scipy.integrate import quad
voi = models.Voigt1D(amplitude_L=1.0/np.pi, x_0=0.0, fwhm_L=2.0, fwhm_G=1.5, method=algorithm)
if algorithm == 'wofz':
atol = 1e-14
else:
atol = 1e-8
assert_allclose(quad(voi, -np.inf, np.inf)[0], 1.0, atol=atol)
@pytest.mark.skipif("not HAS_SCIPY")
@pytest.mark.parametrize('doppler', (1.e-3, 1.e-2, 0.1, 0.5, 1.0, 2.5, 5.0, 10))
def test_Voigt1D_hum2(doppler):
"""Verify accuracy of Voigt profile in Humlicek approximation to Faddeeva.cc (SciPy)."""
x = np.linspace(-20, 20, 400001)
voi_w = models.Voigt1D(amplitude_L=2.0/np.pi, fwhm_L=1.0, fwhm_G=doppler, method='wofz')
vf_w = voi_w(x)
dvda_w = voi_w.fit_deriv(x, x_0=0, amplitude_L=2.0/np.pi, fwhm_L=1.0, fwhm_G=doppler)
voi_h = models.Voigt1D(amplitude_L=2.0/np.pi, fwhm_L=1.0, fwhm_G=doppler, method='humlicek2')
vf_h = voi_h(x)
dvda_h = voi_h.fit_deriv(x, x_0=0, amplitude_L=2.0/np.pi, fwhm_L=1.0, fwhm_G=doppler)
assert_allclose(vf_h, vf_w, rtol=1e-7 * (2 + 1 / np.sqrt(doppler)))
assert_allclose(dvda_h, dvda_w, rtol=1e-9, atol=1e-7 * (1 + 30 / doppler))
@pytest.mark.skipif("not HAS_SCIPY")
def test_KingProjectedAnalytic1D_fit():
km = models.KingProjectedAnalytic1D(amplitude=1, r_core=1, r_tide=2)
xarr = np.linspace(0.1, 2, 10)
yarr = km(xarr)
km_init = models.KingProjectedAnalytic1D(amplitude=1, r_core=1, r_tide=1)
fitter = fitting.LevMarLSQFitter()
km_fit = fitter(km_init, xarr, yarr)
assert_allclose(km_fit.param_sets, km.param_sets)
def test_ExponentialAndLogarithmic1D_fit():
xarr = np.linspace(0.1, 10., 200)
em_model = models.Exponential1D(amplitude=1, tau=1)
log_model = models.Logarithmic1D(amplitude=1, tau=1)
assert_allclose(xarr, em_model.inverse(em_model(xarr)))
assert_allclose(xarr, log_model.inverse(log_model(xarr)))
|
|
import logging.handlers
import re
import sys
import types
import warnings
from django.utils import six
from django.utils.deprecation import RemovedInDjango19Warning
warnings.warn("django.utils.dictconfig will be removed in Django 1.9.",
RemovedInDjango19Warning, stacklevel=2)
# This is a copy of the Python logging.config.dictconfig module,
# reproduced with permission. It is provided here for backwards
# compatibility for Python versions prior to 2.7.
#
# Copyright 2009-2010 by Vinay Sajip. All Rights Reserved.
#
# Permission to use, copy, modify, and distribute this software and its
# documentation for any purpose and without fee is hereby granted,
# provided that the above copyright notice appear in all copies and that
# both that copyright notice and this permission notice appear in
# supporting documentation, and that the name of Vinay Sajip
# not be used in advertising or publicity pertaining to distribution
# of the software without specific, written prior permission.
# VINAY SAJIP DISCLAIMS ALL WARRANTIES WITH REGARD TO THIS SOFTWARE, INCLUDING
# ALL IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL
# VINAY SAJIP BE LIABLE FOR ANY SPECIAL, INDIRECT OR CONSEQUENTIAL DAMAGES OR
# ANY DAMAGES WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER
# IN AN ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT
# OF OR IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE.
IDENTIFIER = re.compile('^[a-z_][a-z0-9_]*$', re.I)
def valid_ident(s):
m = IDENTIFIER.match(s)
if not m:
raise ValueError('Not a valid Python identifier: %r' % s)
return True
#
# This function is defined in logging only in recent versions of Python
#
try:
from logging import _checkLevel
except ImportError:
def _checkLevel(level):
if isinstance(level, int):
rv = level
elif str(level) == level:
if level not in logging._levelNames:
raise ValueError('Unknown level: %r' % level)
rv = logging._levelNames[level]
else:
raise TypeError('Level not an integer or a '
'valid string: %r' % level)
return rv
# The ConvertingXXX classes are wrappers around standard Python containers,
# and they serve to convert any suitable values in the container. The
# conversion converts base dicts, lists and tuples to their wrapped
# equivalents, whereas strings which match a conversion format are converted
# appropriately.
#
# Each wrapper should have a configurator attribute holding the actual
# configurator to use for conversion.
class ConvertingDict(dict):
"""A converting dictionary wrapper."""
def __getitem__(self, key):
value = dict.__getitem__(self, key)
result = self.configurator.convert(value)
# If the converted value is different, save for next time
if value is not result:
self[key] = result
if type(result) in (ConvertingDict, ConvertingList,
ConvertingTuple):
result.parent = self
result.key = key
return result
def get(self, key, default=None):
value = dict.get(self, key, default)
result = self.configurator.convert(value)
# If the converted value is different, save for next time
if value is not result:
self[key] = result
if type(result) in (ConvertingDict, ConvertingList,
ConvertingTuple):
result.parent = self
result.key = key
return result
def pop(self, key, default=None):
value = dict.pop(self, key, default)
result = self.configurator.convert(value)
if value is not result:
if type(result) in (ConvertingDict, ConvertingList,
ConvertingTuple):
result.parent = self
result.key = key
return result
class ConvertingList(list):
"""A converting list wrapper."""
def __getitem__(self, key):
value = list.__getitem__(self, key)
result = self.configurator.convert(value)
# If the converted value is different, save for next time
if value is not result:
self[key] = result
if type(result) in (ConvertingDict, ConvertingList,
ConvertingTuple):
result.parent = self
result.key = key
return result
def pop(self, idx=-1):
value = list.pop(self, idx)
result = self.configurator.convert(value)
if value is not result:
if type(result) in (ConvertingDict, ConvertingList,
ConvertingTuple):
result.parent = self
return result
class ConvertingTuple(tuple):
"""A converting tuple wrapper."""
def __getitem__(self, key):
value = tuple.__getitem__(self, key)
result = self.configurator.convert(value)
if value is not result:
if type(result) in (ConvertingDict, ConvertingList,
ConvertingTuple):
result.parent = self
result.key = key
return result
class BaseConfigurator(object):
"""
The configurator base class which defines some useful defaults.
"""
CONVERT_PATTERN = re.compile(r'^(?P<prefix>[a-z]+)://(?P<suffix>.*)$')
WORD_PATTERN = re.compile(r'^\s*(\w+)\s*')
DOT_PATTERN = re.compile(r'^\.\s*(\w+)\s*')
INDEX_PATTERN = re.compile(r'^\[\s*(\w+)\s*\]\s*')
DIGIT_PATTERN = re.compile(r'^\d+$')
value_converters = {
'ext' : 'ext_convert',
'cfg' : 'cfg_convert',
}
# We might want to use a different one, e.g. importlib
importer = __import__
def __init__(self, config):
self.config = ConvertingDict(config)
self.config.configurator = self
def resolve(self, s):
"""
Resolve strings to objects using standard import and attribute
syntax.
"""
name = s.split('.')
used = name.pop(0)
try:
found = self.importer(used)
for frag in name:
used += '.' + frag
try:
found = getattr(found, frag)
except AttributeError:
self.importer(used)
found = getattr(found, frag)
return found
except ImportError:
e, tb = sys.exc_info()[1:]
v = ValueError('Cannot resolve %r: %s' % (s, e))
v.__cause__, v.__traceback__ = e, tb
raise v
def ext_convert(self, value):
"""Default converter for the ext:// protocol."""
return self.resolve(value)
def cfg_convert(self, value):
"""Default converter for the cfg:// protocol."""
rest = value
m = self.WORD_PATTERN.match(rest)
if m is None:
raise ValueError("Unable to convert %r" % value)
else:
rest = rest[m.end():]
d = self.config[m.groups()[0]]
# print d, rest
while rest:
m = self.DOT_PATTERN.match(rest)
if m:
d = d[m.groups()[0]]
else:
m = self.INDEX_PATTERN.match(rest)
if m:
idx = m.groups()[0]
if not self.DIGIT_PATTERN.match(idx):
d = d[idx]
else:
try:
n = int(idx) # try as number first (most likely)
d = d[n]
except TypeError:
d = d[idx]
if m:
rest = rest[m.end():]
else:
raise ValueError('Unable to convert '
'%r at %r' % (value, rest))
# rest should be empty
return d
def convert(self, value):
"""
Convert values to an appropriate type. dicts, lists and tuples are
replaced by their converting alternatives. Strings are checked to
see if they have a conversion format and are converted if they do.
"""
if not isinstance(value, ConvertingDict) and isinstance(value, dict):
value = ConvertingDict(value)
value.configurator = self
elif not isinstance(value, ConvertingList) and isinstance(value, list):
value = ConvertingList(value)
value.configurator = self
elif not isinstance(value, ConvertingTuple) and\
isinstance(value, tuple):
value = ConvertingTuple(value)
value.configurator = self
elif isinstance(value, six.string_types): # str for py3k
m = self.CONVERT_PATTERN.match(value)
if m:
d = m.groupdict()
prefix = d['prefix']
converter = self.value_converters.get(prefix, None)
if converter:
suffix = d['suffix']
converter = getattr(self, converter)
value = converter(suffix)
return value
def configure_custom(self, config):
"""Configure an object with a user-supplied factory."""
c = config.pop('()')
if not hasattr(c, '__call__') and hasattr(types, 'ClassType') and type(c) != types.ClassType:
c = self.resolve(c)
props = config.pop('.', None)
# Check for valid identifiers
kwargs = {k: config[k] for k in config if valid_ident(k)}
result = c(**kwargs)
if props:
for name, value in props.items():
setattr(result, name, value)
return result
def as_tuple(self, value):
"""Utility function which converts lists to tuples."""
if isinstance(value, list):
value = tuple(value)
return value
class DictConfigurator(BaseConfigurator):
"""
Configure logging using a dictionary-like object to describe the
configuration.
"""
def configure(self):
"""Do the configuration."""
config = self.config
if 'version' not in config:
raise ValueError("dictionary doesn't specify a version")
if config['version'] != 1:
raise ValueError("Unsupported version: %s" % config['version'])
incremental = config.pop('incremental', False)
EMPTY_DICT = {}
logging._acquireLock()
try:
if incremental:
handlers = config.get('handlers', EMPTY_DICT)
# incremental handler config only if handler name
# ties in to logging._handlers (Python 2.7)
if sys.version_info[:2] == (2, 7):
for name in handlers:
if name not in logging._handlers:
raise ValueError('No handler found with '
'name %r' % name)
else:
try:
handler = logging._handlers[name]
handler_config = handlers[name]
level = handler_config.get('level', None)
if level:
handler.setLevel(_checkLevel(level))
except StandardError as e:
raise ValueError('Unable to configure handler '
'%r: %s' % (name, e))
loggers = config.get('loggers', EMPTY_DICT)
for name in loggers:
try:
self.configure_logger(name, loggers[name], True)
except StandardError as e:
raise ValueError('Unable to configure logger '
'%r: %s' % (name, e))
root = config.get('root', None)
if root:
try:
self.configure_root(root, True)
except StandardError as e:
raise ValueError('Unable to configure root '
'logger: %s' % e)
else:
disable_existing = config.pop('disable_existing_loggers', True)
logging._handlers.clear()
del logging._handlerList[:]
# Do formatters first - they don't refer to anything else
formatters = config.get('formatters', EMPTY_DICT)
for name in formatters:
try:
formatters[name] = self.configure_formatter(
formatters[name])
except StandardError as e:
raise ValueError('Unable to configure '
'formatter %r: %s' % (name, e))
# Next, do filters - they don't refer to anything else, either
filters = config.get('filters', EMPTY_DICT)
for name in filters:
try:
filters[name] = self.configure_filter(filters[name])
except StandardError as e:
raise ValueError('Unable to configure '
'filter %r: %s' % (name, e))
# Next, do handlers - they refer to formatters and filters
# As handlers can refer to other handlers, sort the keys
# to allow a deterministic order of configuration
handlers = config.get('handlers', EMPTY_DICT)
for name in sorted(handlers):
try:
handler = self.configure_handler(handlers[name])
handler.name = name
handlers[name] = handler
except StandardError as e:
raise ValueError('Unable to configure handler '
'%r: %s' % (name, e))
# Next, do loggers - they refer to handlers and filters
# we don't want to lose the existing loggers,
# since other threads may have pointers to them.
# existing is set to contain all existing loggers,
# and as we go through the new configuration we
# remove any which are configured. At the end,
# what's left in existing is the set of loggers
# which were in the previous configuration but
# which are not in the new configuration.
root = logging.root
existing = list(root.manager.loggerDict)
# The list needs to be sorted so that we can
# avoid disabling child loggers of explicitly
# named loggers. With a sorted list it is easier
# to find the child loggers.
existing.sort()
# We'll keep the list of existing loggers
# which are children of named loggers here...
child_loggers = []
# now set up the new ones...
loggers = config.get('loggers', EMPTY_DICT)
for name in loggers:
if name in existing:
i = existing.index(name)
prefixed = name + "."
pflen = len(prefixed)
num_existing = len(existing)
i = i + 1 # look at the entry after name
while (i < num_existing) and\
(existing[i][:pflen] == prefixed):
child_loggers.append(existing[i])
i = i + 1
existing.remove(name)
try:
self.configure_logger(name, loggers[name])
except StandardError as e:
raise ValueError('Unable to configure logger '
'%r: %s' % (name, e))
# Disable any old loggers. There's no point deleting
# them as other threads may continue to hold references
# and by disabling them, you stop them doing any logging.
# However, don't disable children of named loggers, as that's
# probably not what was intended by the user.
for log in existing:
logger = root.manager.loggerDict[log]
if log in child_loggers:
logger.level = logging.NOTSET
logger.handlers = []
logger.propagate = True
elif disable_existing:
logger.disabled = True
# And finally, do the root logger
root = config.get('root', None)
if root:
try:
self.configure_root(root)
except StandardError as e:
raise ValueError('Unable to configure root '
'logger: %s' % e)
finally:
logging._releaseLock()
def configure_formatter(self, config):
"""Configure a formatter from a dictionary."""
if '()' in config:
factory = config['()'] # for use in exception handler
try:
result = self.configure_custom(config)
except TypeError as te:
if "'format'" not in str(te):
raise
# Name of parameter changed from fmt to format.
# Retry with old name.
# This is so that code can be used with older Python versions
#(e.g. by Django)
config['fmt'] = config.pop('format')
config['()'] = factory
result = self.configure_custom(config)
else:
fmt = config.get('format', None)
dfmt = config.get('datefmt', None)
result = logging.Formatter(fmt, dfmt)
return result
def configure_filter(self, config):
"""Configure a filter from a dictionary."""
if '()' in config:
result = self.configure_custom(config)
else:
name = config.get('name', '')
result = logging.Filter(name)
return result
def add_filters(self, filterer, filters):
"""Add filters to a filterer from a list of names."""
for f in filters:
try:
filterer.addFilter(self.config['filters'][f])
except StandardError as e:
raise ValueError('Unable to add filter %r: %s' % (f, e))
def configure_handler(self, config):
"""Configure a handler from a dictionary."""
formatter = config.pop('formatter', None)
if formatter:
try:
formatter = self.config['formatters'][formatter]
except StandardError as e:
raise ValueError('Unable to set formatter '
'%r: %s' % (formatter, e))
level = config.pop('level', None)
filters = config.pop('filters', None)
if '()' in config:
c = config.pop('()')
if not hasattr(c, '__call__') and hasattr(types, 'ClassType') and type(c) != types.ClassType:
c = self.resolve(c)
factory = c
else:
klass = self.resolve(config.pop('class'))
# Special case for handler which refers to another handler
if issubclass(klass, logging.handlers.MemoryHandler) and\
'target' in config:
try:
config['target'] = self.config['handlers'][config['target']]
except StandardError as e:
raise ValueError('Unable to set target handler '
'%r: %s' % (config['target'], e))
elif issubclass(klass, logging.handlers.SMTPHandler) and\
'mailhost' in config:
config['mailhost'] = self.as_tuple(config['mailhost'])
elif issubclass(klass, logging.handlers.SysLogHandler) and\
'address' in config:
config['address'] = self.as_tuple(config['address'])
factory = klass
kwargs = {k: config[k] for k in config if valid_ident(k)}
try:
result = factory(**kwargs)
except TypeError as te:
if "'stream'" not in str(te):
raise
# The argument name changed from strm to stream
# Retry with old name.
# This is so that code can be used with older Python versions
#(e.g. by Django)
kwargs['strm'] = kwargs.pop('stream')
result = factory(**kwargs)
if formatter:
result.setFormatter(formatter)
if level is not None:
result.setLevel(_checkLevel(level))
if filters:
self.add_filters(result, filters)
return result
def add_handlers(self, logger, handlers):
"""Add handlers to a logger from a list of names."""
for h in handlers:
try:
logger.addHandler(self.config['handlers'][h])
except StandardError as e:
raise ValueError('Unable to add handler %r: %s' % (h, e))
def common_logger_config(self, logger, config, incremental=False):
"""
Perform configuration which is common to root and non-root loggers.
"""
level = config.get('level', None)
if level is not None:
logger.setLevel(_checkLevel(level))
if not incremental:
# Remove any existing handlers
for h in logger.handlers[:]:
logger.removeHandler(h)
handlers = config.get('handlers', None)
if handlers:
self.add_handlers(logger, handlers)
filters = config.get('filters', None)
if filters:
self.add_filters(logger, filters)
def configure_logger(self, name, config, incremental=False):
"""Configure a non-root logger from a dictionary."""
logger = logging.getLogger(name)
self.common_logger_config(logger, config, incremental)
propagate = config.get('propagate', None)
if propagate is not None:
logger.propagate = propagate
def configure_root(self, config, incremental=False):
"""Configure a root logger from a dictionary."""
root = logging.getLogger()
self.common_logger_config(root, config, incremental)
dictConfigClass = DictConfigurator
def dictConfig(config):
"""Configure logging using a dictionary."""
dictConfigClass(config).configure()
|
|
# Copyright (c) 1996-2015 PSERC. All rights reserved.
# Use of this source code is governed by a BSD-style
# license that can be found in the LICENSE file.
"""Evaluates Hessian of Lagrangian for AC OPF.
"""
from numpy import array, zeros, ones, exp, arange, r_, flatnonzero as find
from scipy.sparse import vstack, hstack, issparse, csr_matrix as sparse
from pypower.idx_gen import PG, QG
from pypower.idx_brch import F_BUS, T_BUS
from pypower.idx_cost import MODEL, POLYNOMIAL
from pypower.polycost import polycost
from pypower.d2Sbus_dV2 import d2Sbus_dV2
from pypower.dSbr_dV import dSbr_dV
from pypower.dIbr_dV import dIbr_dV
from pypower.d2AIbr_dV2 import d2AIbr_dV2
from pypower.d2ASbr_dV2 import d2ASbr_dV2
from pypower.opf_costfcn import opf_costfcn
from pypower.opf_consfcn import opf_consfcn
def opf_hessfcn(x, lmbda, om, Ybus, Yf, Yt, ppopt, il=None, cost_mult=1.0):
"""Evaluates Hessian of Lagrangian for AC OPF.
Hessian evaluation function for AC optimal power flow, suitable
for use with L{pips}.
Examples::
Lxx = opf_hessfcn(x, lmbda, om, Ybus, Yf, Yt, ppopt)
Lxx = opf_hessfcn(x, lmbda, om, Ybus, Yf, Yt, ppopt, il)
Lxx = opf_hessfcn(x, lmbda, om, Ybus, Yf, Yt, ppopt, il, cost_mult)
@param x: optimization vector
@param lmbda: C{eqnonlin} - Lagrange multipliers on power balance
equations. C{ineqnonlin} - Kuhn-Tucker multipliers on constrained
branch flows.
@param om: OPF model object
@param Ybus: bus admittance matrix
@param Yf: admittance matrix for "from" end of constrained branches
@param Yt: admittance matrix for "to" end of constrained branches
@param ppopt: PYPOWER options vector
@param il: (optional) vector of branch indices corresponding to
branches with flow limits (all others are assumed to be unconstrained).
The default is C{range(nl)} (all branches). C{Yf} and C{Yt} contain
only the rows corresponding to C{il}.
@param cost_mult: (optional) Scale factor to be applied to the cost
(default = 1).
@return: Hessian of the Lagrangian.
@see: L{opf_costfcn}, L{opf_consfcn}
@author: Ray Zimmerman (PSERC Cornell)
@author: Carlos E. Murillo-Sanchez (PSERC Cornell & Universidad
Autonoma de Manizales)
"""
##----- initialize -----
## unpack data
ppc = om.get_ppc()
baseMVA, bus, gen, branch, gencost = \
ppc["baseMVA"], ppc["bus"], ppc["gen"], ppc["branch"], ppc["gencost"]
cp = om.get_cost_params()
N, Cw, H, dd, rh, kk, mm = \
cp["N"], cp["Cw"], cp["H"], cp["dd"], cp["rh"], cp["kk"], cp["mm"]
vv, _, _, _ = om.get_idx()
## unpack needed parameters
nb = bus.shape[0] ## number of buses
nl = branch.shape[0] ## number of branches
ng = gen.shape[0] ## number of dispatchable injections
nxyz = len(x) ## total number of control vars of all types
## set default constrained lines
if il is None:
il = arange(nl) ## all lines have limits by default
nl2 = len(il) ## number of constrained lines
## grab Pg & Qg
Pg = x[vv["i1"]["Pg"]:vv["iN"]["Pg"]] ## active generation in p.u.
Qg = x[vv["i1"]["Qg"]:vv["iN"]["Qg"]] ## reactive generation in p.u.
## put Pg & Qg back in gen
gen[:, PG] = Pg * baseMVA ## active generation in MW
gen[:, QG] = Qg * baseMVA ## reactive generation in MVAr
## reconstruct V
Va = x[vv["i1"]["Va"]:vv["iN"]["Va"]]
Vm = x[vv["i1"]["Vm"]:vv["iN"]["Vm"]]
V = Vm * exp(1j * Va)
nxtra = nxyz - 2 * nb
pcost = gencost[arange(ng), :]
if gencost.shape[0] > ng:
qcost = gencost[arange(ng, 2 * ng), :]
else:
qcost = array([])
## ----- evaluate d2f -----
d2f_dPg2 = zeros(ng)#sparse((ng, 1)) ## w.r.t. p.u. Pg
d2f_dQg2 = zeros(ng)#sparse((ng, 1)) ## w.r.t. p.u. Qg
ipolp = find(pcost[:, MODEL] == POLYNOMIAL)
d2f_dPg2[ipolp] = \
baseMVA**2 * polycost(pcost[ipolp, :], Pg[ipolp] * baseMVA, 2)
if any(qcost): ## Qg is not free
ipolq = find(qcost[:, MODEL] == POLYNOMIAL)
d2f_dQg2[ipolq] = \
baseMVA**2 * polycost(qcost[ipolq, :], Qg[ipolq] * baseMVA, 2)
i = r_[arange(vv["i1"]["Pg"], vv["iN"]["Pg"]),
arange(vv["i1"]["Qg"], vv["iN"]["Qg"])]
# d2f = sparse((vstack([d2f_dPg2, d2f_dQg2]).toarray().flatten(),
# (i, i)), shape=(nxyz, nxyz))
d2f = sparse((r_[d2f_dPg2, d2f_dQg2], (i, i)), (nxyz, nxyz))
## generalized cost
if issparse(N) and N.nnz > 0:
nw = N.shape[0]
r = N * x - rh ## Nx - rhat
iLT = find(r < -kk) ## below dead zone
iEQ = find((r == 0) & (kk == 0)) ## dead zone doesn't exist
iGT = find(r > kk) ## above dead zone
iND = r_[iLT, iEQ, iGT] ## rows that are Not in the Dead region
iL = find(dd == 1) ## rows using linear function
iQ = find(dd == 2) ## rows using quadratic function
LL = sparse((ones(len(iL)), (iL, iL)), (nw, nw))
QQ = sparse((ones(len(iQ)), (iQ, iQ)), (nw, nw))
kbar = sparse((r_[ones(len(iLT)), zeros(len(iEQ)), -ones(len(iGT))],
(iND, iND)), (nw, nw)) * kk
rr = r + kbar ## apply non-dead zone shift
M = sparse((mm[iND], (iND, iND)), (nw, nw)) ## dead zone or scale
diagrr = sparse((rr, (arange(nw), arange(nw))), (nw, nw))
## linear rows multiplied by rr(i), quadratic rows by rr(i)^2
w = M * (LL + QQ * diagrr) * rr
HwC = H * w + Cw
AA = N.T * M * (LL + 2 * QQ * diagrr)
d2f = d2f + AA * H * AA.T + 2 * N.T * M * QQ * \
sparse((HwC, (arange(nw), arange(nw))), (nw, nw)) * N
d2f = d2f * cost_mult
##----- evaluate Hessian of power balance constraints -----
nlam = len(lmbda["eqnonlin"]) / 2
lamP = lmbda["eqnonlin"][:nlam]
lamQ = lmbda["eqnonlin"][nlam:nlam + nlam]
Gpaa, Gpav, Gpva, Gpvv = d2Sbus_dV2(Ybus, V, lamP)
Gqaa, Gqav, Gqva, Gqvv = d2Sbus_dV2(Ybus, V, lamQ)
d2G = vstack([
hstack([
vstack([hstack([Gpaa, Gpav]),
hstack([Gpva, Gpvv])]).real +
vstack([hstack([Gqaa, Gqav]),
hstack([Gqva, Gqvv])]).imag,
sparse((2 * nb, nxtra))]),
hstack([
sparse((nxtra, 2 * nb)),
sparse((nxtra, nxtra))
])
], "csr")
##----- evaluate Hessian of flow constraints -----
nmu = len(lmbda["ineqnonlin"]) / 2
muF = lmbda["ineqnonlin"][:nmu]
muT = lmbda["ineqnonlin"][nmu:nmu + nmu]
if ppopt['OPF_FLOW_LIM'] == 2: ## current
dIf_dVa, dIf_dVm, dIt_dVa, dIt_dVm, If, It = dIbr_dV(Yf, Yt, V)
Hfaa, Hfav, Hfva, Hfvv = d2AIbr_dV2(dIf_dVa, dIf_dVm, If, Yf, V, muF)
Htaa, Htav, Htva, Htvv = d2AIbr_dV2(dIt_dVa, dIt_dVm, It, Yt, V, muT)
else:
f = branch[il, F_BUS].astype(int) ## list of "from" buses
t = branch[il, T_BUS].astype(int) ## list of "to" buses
## connection matrix for line & from buses
Cf = sparse((ones(nl2), (arange(nl2), f)), (nl2, nb))
## connection matrix for line & to buses
Ct = sparse((ones(nl2), (arange(nl2), t)), (nl2, nb))
dSf_dVa, dSf_dVm, dSt_dVa, dSt_dVm, Sf, St = \
dSbr_dV(branch[il,:], Yf, Yt, V)
if ppopt['OPF_FLOW_LIM'] == 1: ## real power
Hfaa, Hfav, Hfva, Hfvv = d2ASbr_dV2(dSf_dVa.real, dSf_dVm.real,
Sf.real, Cf, Yf, V, muF)
Htaa, Htav, Htva, Htvv = d2ASbr_dV2(dSt_dVa.real, dSt_dVm.real,
St.real, Ct, Yt, V, muT)
else: ## apparent power
Hfaa, Hfav, Hfva, Hfvv = \
d2ASbr_dV2(dSf_dVa, dSf_dVm, Sf, Cf, Yf, V, muF)
Htaa, Htav, Htva, Htvv = \
d2ASbr_dV2(dSt_dVa, dSt_dVm, St, Ct, Yt, V, muT)
d2H = vstack([
hstack([
vstack([hstack([Hfaa, Hfav]),
hstack([Hfva, Hfvv])]) +
vstack([hstack([Htaa, Htav]),
hstack([Htva, Htvv])]),
sparse((2 * nb, nxtra))
]),
hstack([
sparse((nxtra, 2 * nb)),
sparse((nxtra, nxtra))
])
], "csr")
##----- do numerical check using (central) finite differences -----
if 0:
nx = len(x)
step = 1e-5
num_d2f = sparse((nx, nx))
num_d2G = sparse((nx, nx))
num_d2H = sparse((nx, nx))
for i in range(nx):
xp = x
xm = x
xp[i] = x[i] + step / 2
xm[i] = x[i] - step / 2
# evaluate cost & gradients
_, dfp = opf_costfcn(xp, om)
_, dfm = opf_costfcn(xm, om)
# evaluate constraints & gradients
_, _, dHp, dGp = opf_consfcn(xp, om, Ybus, Yf, Yt, ppopt, il)
_, _, dHm, dGm = opf_consfcn(xm, om, Ybus, Yf, Yt, ppopt, il)
num_d2f[:, i] = cost_mult * (dfp - dfm) / step
num_d2G[:, i] = (dGp - dGm) * lmbda["eqnonlin"] / step
num_d2H[:, i] = (dHp - dHm) * lmbda["ineqnonlin"] / step
d2f_err = max(max(abs(d2f - num_d2f)))
d2G_err = max(max(abs(d2G - num_d2G)))
d2H_err = max(max(abs(d2H - num_d2H)))
if d2f_err > 1e-6:
print('Max difference in d2f: %g' % d2f_err)
if d2G_err > 1e-5:
print('Max difference in d2G: %g' % d2G_err)
if d2H_err > 1e-6:
print('Max difference in d2H: %g' % d2H_err)
return d2f + d2G + d2H
|
|
"""Rewrite assertion AST to produce nice error messages"""
from __future__ import absolute_import, division, print_function
import ast
import errno
import itertools
import imp
import marshal
import os
import re
import six
import string
import struct
import sys
import types
import atomicwrites
import py
from _pytest.assertion import util
from _pytest.compat import PurePath, spec_from_file_location
from _pytest.paths import fnmatch_ex
# pytest caches rewritten pycs in __pycache__.
if hasattr(imp, "get_tag"):
PYTEST_TAG = imp.get_tag() + "-PYTEST"
else:
if hasattr(sys, "pypy_version_info"):
impl = "pypy"
elif sys.platform == "java":
impl = "jython"
else:
impl = "cpython"
ver = sys.version_info
PYTEST_TAG = "%s-%s%s-PYTEST" % (impl, ver[0], ver[1])
del ver, impl
PYC_EXT = ".py" + (__debug__ and "c" or "o")
PYC_TAIL = "." + PYTEST_TAG + PYC_EXT
ASCII_IS_DEFAULT_ENCODING = sys.version_info[0] < 3
if sys.version_info >= (3, 5):
ast_Call = ast.Call
else:
def ast_Call(a, b, c):
return ast.Call(a, b, c, None, None)
class AssertionRewritingHook(object):
"""PEP302 Import hook which rewrites asserts."""
def __init__(self, config):
self.config = config
self.fnpats = config.getini("python_files")
self.session = None
self.modules = {}
self._rewritten_names = set()
self._register_with_pkg_resources()
self._must_rewrite = set()
# flag to guard against trying to rewrite a pyc file while we are already writing another pyc file,
# which might result in infinite recursion (#3506)
self._writing_pyc = False
self._basenames_to_check_rewrite = {"conftest"}
self._marked_for_rewrite_cache = {}
self._session_paths_checked = False
def set_session(self, session):
self.session = session
self._session_paths_checked = False
def _imp_find_module(self, name, path=None):
"""Indirection so we can mock calls to find_module originated from the hook during testing"""
return imp.find_module(name, path)
def find_module(self, name, path=None):
if self._writing_pyc:
return None
state = self.config._assertstate
if self._early_rewrite_bailout(name, state):
return None
state.trace("find_module called for: %s" % name)
names = name.rsplit(".", 1)
lastname = names[-1]
pth = None
if path is not None:
# Starting with Python 3.3, path is a _NamespacePath(), which
# causes problems if not converted to list.
path = list(path)
if len(path) == 1:
pth = path[0]
if pth is None:
try:
fd, fn, desc = self._imp_find_module(lastname, path)
except ImportError:
return None
if fd is not None:
fd.close()
tp = desc[2]
if tp == imp.PY_COMPILED:
if hasattr(imp, "source_from_cache"):
try:
fn = imp.source_from_cache(fn)
except ValueError:
# Python 3 doesn't like orphaned but still-importable
# .pyc files.
fn = fn[:-1]
else:
fn = fn[:-1]
elif tp != imp.PY_SOURCE:
# Don't know what this is.
return None
else:
fn = os.path.join(pth, name.rpartition(".")[2] + ".py")
fn_pypath = py.path.local(fn)
if not self._should_rewrite(name, fn_pypath, state):
return None
self._rewritten_names.add(name)
# The requested module looks like a test file, so rewrite it. This is
# the most magical part of the process: load the source, rewrite the
# asserts, and load the rewritten source. We also cache the rewritten
# module code in a special pyc. We must be aware of the possibility of
# concurrent pytest processes rewriting and loading pycs. To avoid
# tricky race conditions, we maintain the following invariant: The
# cached pyc is always a complete, valid pyc. Operations on it must be
# atomic. POSIX's atomic rename comes in handy.
write = not sys.dont_write_bytecode
cache_dir = os.path.join(fn_pypath.dirname, "__pycache__")
if write:
try:
os.mkdir(cache_dir)
except OSError:
e = sys.exc_info()[1].errno
if e == errno.EEXIST:
# Either the __pycache__ directory already exists (the
# common case) or it's blocked by a non-dir node. In the
# latter case, we'll ignore it in _write_pyc.
pass
elif e in [errno.ENOENT, errno.ENOTDIR]:
# One of the path components was not a directory, likely
# because we're in a zip file.
write = False
elif e in [errno.EACCES, errno.EROFS, errno.EPERM]:
state.trace("read only directory: %r" % fn_pypath.dirname)
write = False
else:
raise
cache_name = fn_pypath.basename[:-3] + PYC_TAIL
pyc = os.path.join(cache_dir, cache_name)
# Notice that even if we're in a read-only directory, I'm going
# to check for a cached pyc. This may not be optimal...
co = _read_pyc(fn_pypath, pyc, state.trace)
if co is None:
state.trace("rewriting %r" % (fn,))
source_stat, co = _rewrite_test(self.config, fn_pypath)
if co is None:
# Probably a SyntaxError in the test.
return None
if write:
self._writing_pyc = True
try:
_write_pyc(state, co, source_stat, pyc)
finally:
self._writing_pyc = False
else:
state.trace("found cached rewritten pyc for %r" % (fn,))
self.modules[name] = co, pyc
return self
def _early_rewrite_bailout(self, name, state):
"""
This is a fast way to get out of rewriting modules. Profiling has
shown that the call to imp.find_module (inside of the find_module
from this class) is a major slowdown, so, this method tries to
filter what we're sure won't be rewritten before getting to it.
"""
if self.session is not None and not self._session_paths_checked:
self._session_paths_checked = True
for path in self.session._initialpaths:
# Make something as c:/projects/my_project/path.py ->
# ['c:', 'projects', 'my_project', 'path.py']
parts = str(path).split(os.path.sep)
# add 'path' to basenames to be checked.
self._basenames_to_check_rewrite.add(os.path.splitext(parts[-1])[0])
# Note: conftest already by default in _basenames_to_check_rewrite.
parts = name.split(".")
if parts[-1] in self._basenames_to_check_rewrite:
return False
# For matching the name it must be as if it was a filename.
path = PurePath(os.path.sep.join(parts) + ".py")
for pat in self.fnpats:
# if the pattern contains subdirectories ("tests/**.py" for example) we can't bail out based
# on the name alone because we need to match against the full path
if os.path.dirname(pat):
return False
if fnmatch_ex(pat, path):
return False
if self._is_marked_for_rewrite(name, state):
return False
state.trace("early skip of rewriting module: %s" % (name,))
return True
def _should_rewrite(self, name, fn_pypath, state):
# always rewrite conftest files
fn = str(fn_pypath)
if fn_pypath.basename == "conftest.py":
state.trace("rewriting conftest file: %r" % (fn,))
return True
if self.session is not None:
if self.session.isinitpath(fn):
state.trace("matched test file (was specified on cmdline): %r" % (fn,))
return True
# modules not passed explicitly on the command line are only
# rewritten if they match the naming convention for test files
for pat in self.fnpats:
if fn_pypath.fnmatch(pat):
state.trace("matched test file %r" % (fn,))
return True
return self._is_marked_for_rewrite(name, state)
def _is_marked_for_rewrite(self, name, state):
try:
return self._marked_for_rewrite_cache[name]
except KeyError:
for marked in self._must_rewrite:
if name == marked or name.startswith(marked + "."):
state.trace("matched marked file %r (from %r)" % (name, marked))
self._marked_for_rewrite_cache[name] = True
return True
self._marked_for_rewrite_cache[name] = False
return False
def mark_rewrite(self, *names):
"""Mark import names as needing to be rewritten.
The named module or package as well as any nested modules will
be rewritten on import.
"""
already_imported = (
set(names).intersection(sys.modules).difference(self._rewritten_names)
)
for name in already_imported:
if not AssertionRewriter.is_rewrite_disabled(
sys.modules[name].__doc__ or ""
):
self._warn_already_imported(name)
self._must_rewrite.update(names)
self._marked_for_rewrite_cache.clear()
def _warn_already_imported(self, name):
from _pytest.warning_types import PytestWarning
from _pytest.warnings import _issue_config_warning
_issue_config_warning(
PytestWarning("Module already imported so cannot be rewritten: %s" % name),
self.config,
)
def load_module(self, name):
co, pyc = self.modules.pop(name)
if name in sys.modules:
# If there is an existing module object named 'fullname' in
# sys.modules, the loader must use that existing module. (Otherwise,
# the reload() builtin will not work correctly.)
mod = sys.modules[name]
else:
# I wish I could just call imp.load_compiled here, but __file__ has to
# be set properly. In Python 3.2+, this all would be handled correctly
# by load_compiled.
mod = sys.modules[name] = imp.new_module(name)
try:
mod.__file__ = co.co_filename
# Normally, this attribute is 3.2+.
mod.__cached__ = pyc
mod.__loader__ = self
# Normally, this attribute is 3.4+
mod.__spec__ = spec_from_file_location(name, co.co_filename, loader=self)
six.exec_(co, mod.__dict__)
except: # noqa
if name in sys.modules:
del sys.modules[name]
raise
return sys.modules[name]
def is_package(self, name):
try:
fd, fn, desc = self._imp_find_module(name)
except ImportError:
return False
if fd is not None:
fd.close()
tp = desc[2]
return tp == imp.PKG_DIRECTORY
@classmethod
def _register_with_pkg_resources(cls):
"""
Ensure package resources can be loaded from this loader. May be called
multiple times, as the operation is idempotent.
"""
try:
import pkg_resources
# access an attribute in case a deferred importer is present
pkg_resources.__name__
except ImportError:
return
# Since pytest tests are always located in the file system, the
# DefaultProvider is appropriate.
pkg_resources.register_loader_type(cls, pkg_resources.DefaultProvider)
def get_data(self, pathname):
"""Optional PEP302 get_data API.
"""
with open(pathname, "rb") as f:
return f.read()
def _write_pyc(state, co, source_stat, pyc):
# Technically, we don't have to have the same pyc format as
# (C)Python, since these "pycs" should never be seen by builtin
# import. However, there's little reason deviate, and I hope
# sometime to be able to use imp.load_compiled to load them. (See
# the comment in load_module above.)
try:
with atomicwrites.atomic_write(pyc, mode="wb", overwrite=True) as fp:
fp.write(imp.get_magic())
mtime = int(source_stat.mtime)
size = source_stat.size & 0xFFFFFFFF
fp.write(struct.pack("<ll", mtime, size))
fp.write(marshal.dumps(co))
except EnvironmentError as e:
state.trace("error writing pyc file at %s: errno=%s" % (pyc, e.errno))
# we ignore any failure to write the cache file
# there are many reasons, permission-denied, __pycache__ being a
# file etc.
return False
return True
RN = "\r\n".encode("utf-8")
N = "\n".encode("utf-8")
cookie_re = re.compile(r"^[ \t\f]*#.*coding[:=][ \t]*[-\w.]+")
BOM_UTF8 = "\xef\xbb\xbf"
def _rewrite_test(config, fn):
"""Try to read and rewrite *fn* and return the code object."""
state = config._assertstate
try:
stat = fn.stat()
source = fn.read("rb")
except EnvironmentError:
return None, None
if ASCII_IS_DEFAULT_ENCODING:
# ASCII is the default encoding in Python 2. Without a coding
# declaration, Python 2 will complain about any bytes in the file
# outside the ASCII range. Sadly, this behavior does not extend to
# compile() or ast.parse(), which prefer to interpret the bytes as
# latin-1. (At least they properly handle explicit coding cookies.) To
# preserve this error behavior, we could force ast.parse() to use ASCII
# as the encoding by inserting a coding cookie. Unfortunately, that
# messes up line numbers. Thus, we have to check ourselves if anything
# is outside the ASCII range in the case no encoding is explicitly
# declared. For more context, see issue #269. Yay for Python 3 which
# gets this right.
end1 = source.find("\n")
end2 = source.find("\n", end1 + 1)
if (
not source.startswith(BOM_UTF8)
and cookie_re.match(source[0:end1]) is None
and cookie_re.match(source[end1 + 1 : end2]) is None
):
if hasattr(state, "_indecode"):
# encodings imported us again, so don't rewrite.
return None, None
state._indecode = True
try:
try:
source.decode("ascii")
except UnicodeDecodeError:
# Let it fail in real import.
return None, None
finally:
del state._indecode
try:
tree = ast.parse(source)
except SyntaxError:
# Let this pop up again in the real import.
state.trace("failed to parse: %r" % (fn,))
return None, None
rewrite_asserts(tree, fn, config)
try:
co = compile(tree, fn.strpath, "exec", dont_inherit=True)
except SyntaxError:
# It's possible that this error is from some bug in the
# assertion rewriting, but I don't know of a fast way to tell.
state.trace("failed to compile: %r" % (fn,))
return None, None
return stat, co
def _read_pyc(source, pyc, trace=lambda x: None):
"""Possibly read a pytest pyc containing rewritten code.
Return rewritten code if successful or None if not.
"""
try:
fp = open(pyc, "rb")
except IOError:
return None
with fp:
try:
mtime = int(source.mtime())
size = source.size()
data = fp.read(12)
except EnvironmentError as e:
trace("_read_pyc(%s): EnvironmentError %s" % (source, e))
return None
# Check for invalid or out of date pyc file.
if (
len(data) != 12
or data[:4] != imp.get_magic()
or struct.unpack("<ll", data[4:]) != (mtime, size)
):
trace("_read_pyc(%s): invalid or out of date pyc" % source)
return None
try:
co = marshal.load(fp)
except Exception as e:
trace("_read_pyc(%s): marshal.load error %s" % (source, e))
return None
if not isinstance(co, types.CodeType):
trace("_read_pyc(%s): not a code object" % source)
return None
return co
def rewrite_asserts(mod, module_path=None, config=None):
"""Rewrite the assert statements in mod."""
AssertionRewriter(module_path, config).run(mod)
def _saferepr(obj):
"""Get a safe repr of an object for assertion error messages.
The assertion formatting (util.format_explanation()) requires
newlines to be escaped since they are a special character for it.
Normally assertion.util.format_explanation() does this but for a
custom repr it is possible to contain one of the special escape
sequences, especially '\n{' and '\n}' are likely to be present in
JSON reprs.
"""
r = py.io.saferepr(obj)
# only occurs in python2.x, repr must return text in python3+
if isinstance(r, bytes):
# Represent unprintable bytes as `\x##`
r = u"".join(
u"\\x{:x}".format(ord(c)) if c not in string.printable else c.decode()
for c in r
)
return r.replace(u"\n", u"\\n")
from _pytest.assertion.util import format_explanation as _format_explanation # noqa
def _format_assertmsg(obj):
"""Format the custom assertion message given.
For strings this simply replaces newlines with '\n~' so that
util.format_explanation() will preserve them instead of escaping
newlines. For other objects py.io.saferepr() is used first.
"""
# reprlib appears to have a bug which means that if a string
# contains a newline it gets escaped, however if an object has a
# .__repr__() which contains newlines it does not get escaped.
# However in either case we want to preserve the newline.
replaces = [(u"\n", u"\n~"), (u"%", u"%%")]
if not isinstance(obj, six.string_types):
obj = py.io.saferepr(obj)
replaces.append((u"\\n", u"\n~"))
if isinstance(obj, bytes):
replaces = [(r1.encode(), r2.encode()) for r1, r2 in replaces]
for r1, r2 in replaces:
obj = obj.replace(r1, r2)
return obj
def _should_repr_global_name(obj):
return not hasattr(obj, "__name__") and not callable(obj)
def _format_boolop(explanations, is_or):
explanation = "(" + (is_or and " or " or " and ").join(explanations) + ")"
if isinstance(explanation, six.text_type):
return explanation.replace(u"%", u"%%")
else:
return explanation.replace(b"%", b"%%")
def _call_reprcompare(ops, results, expls, each_obj):
for i, res, expl in zip(range(len(ops)), results, expls):
try:
done = not res
except Exception:
done = True
if done:
break
if util._reprcompare is not None:
custom = util._reprcompare(ops[i], each_obj[i], each_obj[i + 1])
if custom is not None:
return custom
return expl
unary_map = {ast.Not: "not %s", ast.Invert: "~%s", ast.USub: "-%s", ast.UAdd: "+%s"}
binop_map = {
ast.BitOr: "|",
ast.BitXor: "^",
ast.BitAnd: "&",
ast.LShift: "<<",
ast.RShift: ">>",
ast.Add: "+",
ast.Sub: "-",
ast.Mult: "*",
ast.Div: "/",
ast.FloorDiv: "//",
ast.Mod: "%%", # escaped for string formatting
ast.Eq: "==",
ast.NotEq: "!=",
ast.Lt: "<",
ast.LtE: "<=",
ast.Gt: ">",
ast.GtE: ">=",
ast.Pow: "**",
ast.Is: "is",
ast.IsNot: "is not",
ast.In: "in",
ast.NotIn: "not in",
}
# Python 3.5+ compatibility
try:
binop_map[ast.MatMult] = "@"
except AttributeError:
pass
# Python 3.4+ compatibility
if hasattr(ast, "NameConstant"):
_NameConstant = ast.NameConstant
else:
def _NameConstant(c):
return ast.Name(str(c), ast.Load())
def set_location(node, lineno, col_offset):
"""Set node location information recursively."""
def _fix(node, lineno, col_offset):
if "lineno" in node._attributes:
node.lineno = lineno
if "col_offset" in node._attributes:
node.col_offset = col_offset
for child in ast.iter_child_nodes(node):
_fix(child, lineno, col_offset)
_fix(node, lineno, col_offset)
return node
class AssertionRewriter(ast.NodeVisitor):
"""Assertion rewriting implementation.
The main entrypoint is to call .run() with an ast.Module instance,
this will then find all the assert statements and rewrite them to
provide intermediate values and a detailed assertion error. See
http://pybites.blogspot.be/2011/07/behind-scenes-of-pytests-new-assertion.html
for an overview of how this works.
The entry point here is .run() which will iterate over all the
statements in an ast.Module and for each ast.Assert statement it
finds call .visit() with it. Then .visit_Assert() takes over and
is responsible for creating new ast statements to replace the
original assert statement: it rewrites the test of an assertion
to provide intermediate values and replace it with an if statement
which raises an assertion error with a detailed explanation in
case the expression is false.
For this .visit_Assert() uses the visitor pattern to visit all the
AST nodes of the ast.Assert.test field, each visit call returning
an AST node and the corresponding explanation string. During this
state is kept in several instance attributes:
:statements: All the AST statements which will replace the assert
statement.
:variables: This is populated by .variable() with each variable
used by the statements so that they can all be set to None at
the end of the statements.
:variable_counter: Counter to create new unique variables needed
by statements. Variables are created using .variable() and
have the form of "@py_assert0".
:on_failure: The AST statements which will be executed if the
assertion test fails. This is the code which will construct
the failure message and raises the AssertionError.
:explanation_specifiers: A dict filled by .explanation_param()
with %-formatting placeholders and their corresponding
expressions to use in the building of an assertion message.
This is used by .pop_format_context() to build a message.
:stack: A stack of the explanation_specifiers dicts maintained by
.push_format_context() and .pop_format_context() which allows
to build another %-formatted string while already building one.
This state is reset on every new assert statement visited and used
by the other visitors.
"""
def __init__(self, module_path, config):
super(AssertionRewriter, self).__init__()
self.module_path = module_path
self.config = config
def run(self, mod):
"""Find all assert statements in *mod* and rewrite them."""
if not mod.body:
# Nothing to do.
return
# Insert some special imports at the top of the module but after any
# docstrings and __future__ imports.
aliases = [
ast.alias(py.builtin.builtins.__name__, "@py_builtins"),
ast.alias("_pytest.assertion.rewrite", "@pytest_ar"),
]
doc = getattr(mod, "docstring", None)
expect_docstring = doc is None
if doc is not None and self.is_rewrite_disabled(doc):
return
pos = 0
lineno = 1
for item in mod.body:
if (
expect_docstring
and isinstance(item, ast.Expr)
and isinstance(item.value, ast.Str)
):
doc = item.value.s
if self.is_rewrite_disabled(doc):
return
expect_docstring = False
elif (
not isinstance(item, ast.ImportFrom)
or item.level > 0
or item.module != "__future__"
):
lineno = item.lineno
break
pos += 1
else:
lineno = item.lineno
imports = [
ast.Import([alias], lineno=lineno, col_offset=0) for alias in aliases
]
mod.body[pos:pos] = imports
# Collect asserts.
nodes = [mod]
while nodes:
node = nodes.pop()
for name, field in ast.iter_fields(node):
if isinstance(field, list):
new = []
for i, child in enumerate(field):
if isinstance(child, ast.Assert):
# Transform assert.
new.extend(self.visit(child))
else:
new.append(child)
if isinstance(child, ast.AST):
nodes.append(child)
setattr(node, name, new)
elif (
isinstance(field, ast.AST)
and
# Don't recurse into expressions as they can't contain
# asserts.
not isinstance(field, ast.expr)
):
nodes.append(field)
@staticmethod
def is_rewrite_disabled(docstring):
return "PYTEST_DONT_REWRITE" in docstring
def variable(self):
"""Get a new variable."""
# Use a character invalid in python identifiers to avoid clashing.
name = "@py_assert" + str(next(self.variable_counter))
self.variables.append(name)
return name
def assign(self, expr):
"""Give *expr* a name."""
name = self.variable()
self.statements.append(ast.Assign([ast.Name(name, ast.Store())], expr))
return ast.Name(name, ast.Load())
def display(self, expr):
"""Call py.io.saferepr on the expression."""
return self.helper("saferepr", expr)
def helper(self, name, *args):
"""Call a helper in this module."""
py_name = ast.Name("@pytest_ar", ast.Load())
attr = ast.Attribute(py_name, "_" + name, ast.Load())
return ast_Call(attr, list(args), [])
def builtin(self, name):
"""Return the builtin called *name*."""
builtin_name = ast.Name("@py_builtins", ast.Load())
return ast.Attribute(builtin_name, name, ast.Load())
def explanation_param(self, expr):
"""Return a new named %-formatting placeholder for expr.
This creates a %-formatting placeholder for expr in the
current formatting context, e.g. ``%(py0)s``. The placeholder
and expr are placed in the current format context so that it
can be used on the next call to .pop_format_context().
"""
specifier = "py" + str(next(self.variable_counter))
self.explanation_specifiers[specifier] = expr
return "%(" + specifier + ")s"
def push_format_context(self):
"""Create a new formatting context.
The format context is used for when an explanation wants to
have a variable value formatted in the assertion message. In
this case the value required can be added using
.explanation_param(). Finally .pop_format_context() is used
to format a string of %-formatted values as added by
.explanation_param().
"""
self.explanation_specifiers = {}
self.stack.append(self.explanation_specifiers)
def pop_format_context(self, expl_expr):
"""Format the %-formatted string with current format context.
The expl_expr should be an ast.Str instance constructed from
the %-placeholders created by .explanation_param(). This will
add the required code to format said string to .on_failure and
return the ast.Name instance of the formatted string.
"""
current = self.stack.pop()
if self.stack:
self.explanation_specifiers = self.stack[-1]
keys = [ast.Str(key) for key in current.keys()]
format_dict = ast.Dict(keys, list(current.values()))
form = ast.BinOp(expl_expr, ast.Mod(), format_dict)
name = "@py_format" + str(next(self.variable_counter))
self.on_failure.append(ast.Assign([ast.Name(name, ast.Store())], form))
return ast.Name(name, ast.Load())
def generic_visit(self, node):
"""Handle expressions we don't have custom code for."""
assert isinstance(node, ast.expr)
res = self.assign(node)
return res, self.explanation_param(self.display(res))
def visit_Assert(self, assert_):
"""Return the AST statements to replace the ast.Assert instance.
This rewrites the test of an assertion to provide
intermediate values and replace it with an if statement which
raises an assertion error with a detailed explanation in case
the expression is false.
"""
if isinstance(assert_.test, ast.Tuple) and len(assert_.test.elts) >= 1:
from _pytest.warning_types import PytestWarning
import warnings
warnings.warn_explicit(
PytestWarning("assertion is always true, perhaps remove parentheses?"),
category=None,
filename=str(self.module_path),
lineno=assert_.lineno,
)
self.statements = []
self.variables = []
self.variable_counter = itertools.count()
self.stack = []
self.on_failure = []
self.push_format_context()
# Rewrite assert into a bunch of statements.
top_condition, explanation = self.visit(assert_.test)
# Create failure message.
body = self.on_failure
negation = ast.UnaryOp(ast.Not(), top_condition)
self.statements.append(ast.If(negation, body, []))
if assert_.msg:
assertmsg = self.helper("format_assertmsg", assert_.msg)
explanation = "\n>assert " + explanation
else:
assertmsg = ast.Str("")
explanation = "assert " + explanation
template = ast.BinOp(assertmsg, ast.Add(), ast.Str(explanation))
msg = self.pop_format_context(template)
fmt = self.helper("format_explanation", msg)
err_name = ast.Name("AssertionError", ast.Load())
exc = ast_Call(err_name, [fmt], [])
if sys.version_info[0] >= 3:
raise_ = ast.Raise(exc, None)
else:
raise_ = ast.Raise(exc, None, None)
body.append(raise_)
# Clear temporary variables by setting them to None.
if self.variables:
variables = [ast.Name(name, ast.Store()) for name in self.variables]
clear = ast.Assign(variables, _NameConstant(None))
self.statements.append(clear)
# Fix line numbers.
for stmt in self.statements:
set_location(stmt, assert_.lineno, assert_.col_offset)
return self.statements
def visit_Name(self, name):
# Display the repr of the name if it's a local variable or
# _should_repr_global_name() thinks it's acceptable.
locs = ast_Call(self.builtin("locals"), [], [])
inlocs = ast.Compare(ast.Str(name.id), [ast.In()], [locs])
dorepr = self.helper("should_repr_global_name", name)
test = ast.BoolOp(ast.Or(), [inlocs, dorepr])
expr = ast.IfExp(test, self.display(name), ast.Str(name.id))
return name, self.explanation_param(expr)
def visit_BoolOp(self, boolop):
res_var = self.variable()
expl_list = self.assign(ast.List([], ast.Load()))
app = ast.Attribute(expl_list, "append", ast.Load())
is_or = int(isinstance(boolop.op, ast.Or))
body = save = self.statements
fail_save = self.on_failure
levels = len(boolop.values) - 1
self.push_format_context()
# Process each operand, short-circuting if needed.
for i, v in enumerate(boolop.values):
if i:
fail_inner = []
# cond is set in a prior loop iteration below
self.on_failure.append(ast.If(cond, fail_inner, [])) # noqa
self.on_failure = fail_inner
self.push_format_context()
res, expl = self.visit(v)
body.append(ast.Assign([ast.Name(res_var, ast.Store())], res))
expl_format = self.pop_format_context(ast.Str(expl))
call = ast_Call(app, [expl_format], [])
self.on_failure.append(ast.Expr(call))
if i < levels:
cond = res
if is_or:
cond = ast.UnaryOp(ast.Not(), cond)
inner = []
self.statements.append(ast.If(cond, inner, []))
self.statements = body = inner
self.statements = save
self.on_failure = fail_save
expl_template = self.helper("format_boolop", expl_list, ast.Num(is_or))
expl = self.pop_format_context(expl_template)
return ast.Name(res_var, ast.Load()), self.explanation_param(expl)
def visit_UnaryOp(self, unary):
pattern = unary_map[unary.op.__class__]
operand_res, operand_expl = self.visit(unary.operand)
res = self.assign(ast.UnaryOp(unary.op, operand_res))
return res, pattern % (operand_expl,)
def visit_BinOp(self, binop):
symbol = binop_map[binop.op.__class__]
left_expr, left_expl = self.visit(binop.left)
right_expr, right_expl = self.visit(binop.right)
explanation = "(%s %s %s)" % (left_expl, symbol, right_expl)
res = self.assign(ast.BinOp(left_expr, binop.op, right_expr))
return res, explanation
def visit_Call_35(self, call):
"""
visit `ast.Call` nodes on Python3.5 and after
"""
new_func, func_expl = self.visit(call.func)
arg_expls = []
new_args = []
new_kwargs = []
for arg in call.args:
res, expl = self.visit(arg)
arg_expls.append(expl)
new_args.append(res)
for keyword in call.keywords:
res, expl = self.visit(keyword.value)
new_kwargs.append(ast.keyword(keyword.arg, res))
if keyword.arg:
arg_expls.append(keyword.arg + "=" + expl)
else: # **args have `arg` keywords with an .arg of None
arg_expls.append("**" + expl)
expl = "%s(%s)" % (func_expl, ", ".join(arg_expls))
new_call = ast.Call(new_func, new_args, new_kwargs)
res = self.assign(new_call)
res_expl = self.explanation_param(self.display(res))
outer_expl = "%s\n{%s = %s\n}" % (res_expl, res_expl, expl)
return res, outer_expl
def visit_Starred(self, starred):
# From Python 3.5, a Starred node can appear in a function call
res, expl = self.visit(starred.value)
return starred, "*" + expl
def visit_Call_legacy(self, call):
"""
visit `ast.Call nodes on 3.4 and below`
"""
new_func, func_expl = self.visit(call.func)
arg_expls = []
new_args = []
new_kwargs = []
new_star = new_kwarg = None
for arg in call.args:
res, expl = self.visit(arg)
new_args.append(res)
arg_expls.append(expl)
for keyword in call.keywords:
res, expl = self.visit(keyword.value)
new_kwargs.append(ast.keyword(keyword.arg, res))
arg_expls.append(keyword.arg + "=" + expl)
if call.starargs:
new_star, expl = self.visit(call.starargs)
arg_expls.append("*" + expl)
if call.kwargs:
new_kwarg, expl = self.visit(call.kwargs)
arg_expls.append("**" + expl)
expl = "%s(%s)" % (func_expl, ", ".join(arg_expls))
new_call = ast.Call(new_func, new_args, new_kwargs, new_star, new_kwarg)
res = self.assign(new_call)
res_expl = self.explanation_param(self.display(res))
outer_expl = "%s\n{%s = %s\n}" % (res_expl, res_expl, expl)
return res, outer_expl
# ast.Call signature changed on 3.5,
# conditionally change which methods is named
# visit_Call depending on Python version
if sys.version_info >= (3, 5):
visit_Call = visit_Call_35
else:
visit_Call = visit_Call_legacy
def visit_Attribute(self, attr):
if not isinstance(attr.ctx, ast.Load):
return self.generic_visit(attr)
value, value_expl = self.visit(attr.value)
res = self.assign(ast.Attribute(value, attr.attr, ast.Load()))
res_expl = self.explanation_param(self.display(res))
pat = "%s\n{%s = %s.%s\n}"
expl = pat % (res_expl, res_expl, value_expl, attr.attr)
return res, expl
def visit_Compare(self, comp):
self.push_format_context()
left_res, left_expl = self.visit(comp.left)
if isinstance(comp.left, (ast.Compare, ast.BoolOp)):
left_expl = "({})".format(left_expl)
res_variables = [self.variable() for i in range(len(comp.ops))]
load_names = [ast.Name(v, ast.Load()) for v in res_variables]
store_names = [ast.Name(v, ast.Store()) for v in res_variables]
it = zip(range(len(comp.ops)), comp.ops, comp.comparators)
expls = []
syms = []
results = [left_res]
for i, op, next_operand in it:
next_res, next_expl = self.visit(next_operand)
if isinstance(next_operand, (ast.Compare, ast.BoolOp)):
next_expl = "({})".format(next_expl)
results.append(next_res)
sym = binop_map[op.__class__]
syms.append(ast.Str(sym))
expl = "%s %s %s" % (left_expl, sym, next_expl)
expls.append(ast.Str(expl))
res_expr = ast.Compare(left_res, [op], [next_res])
self.statements.append(ast.Assign([store_names[i]], res_expr))
left_res, left_expl = next_res, next_expl
# Use pytest.assertion.util._reprcompare if that's available.
expl_call = self.helper(
"call_reprcompare",
ast.Tuple(syms, ast.Load()),
ast.Tuple(load_names, ast.Load()),
ast.Tuple(expls, ast.Load()),
ast.Tuple(results, ast.Load()),
)
if len(comp.ops) > 1:
res = ast.BoolOp(ast.And(), load_names)
else:
res = load_names[0]
return res, self.explanation_param(self.pop_format_context(expl_call))
|
|
# (c) Copyright 2012-2015 Hewlett Packard Enterprise Development LP
# All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
#
"""
Volume driver for HPE 3PAR Storage array.
This driver requires 3.1.3 or later firmware on the 3PAR array, using
the 4.x version of the hpe3parclient.
You will need to install the python hpe3parclient.
sudo pip install --upgrade "hpe3parclient>=4.0"
Set the following in the cinder.conf file to enable the
3PAR iSCSI Driver along with the required flags:
volume_driver=cinder.volume.drivers.hpe.hpe_3par_iscsi.HPE3PARISCSIDriver
"""
import re
import sys
try:
from hpe3parclient import exceptions as hpeexceptions
except ImportError:
hpeexceptions = None
from oslo_log import log as logging
from oslo_utils.excutils import save_and_reraise_exception
from cinder import coordination
from cinder import exception
from cinder.i18n import _
from cinder import interface
from cinder.volume.drivers.hpe import hpe_3par_base as hpebasedriver
from cinder.volume import volume_utils
LOG = logging.getLogger(__name__)
# EXISTENT_PATH error code returned from hpe3parclient
EXISTENT_PATH = 73
DEFAULT_ISCSI_PORT = 3260
CHAP_USER_KEY = "HPQ-cinder-CHAP-name"
CHAP_PASS_KEY = "HPQ-cinder-CHAP-secret"
@interface.volumedriver
class HPE3PARISCSIDriver(hpebasedriver.HPE3PARDriverBase):
"""OpenStack iSCSI driver to enable 3PAR storage array.
Version history:
.. code-block:: none
1.0 - Initial driver
1.1 - QoS, extend volume, multiple iscsi ports, remove domain,
session changes, faster clone, requires 3.1.2 MU2 firmware.
1.2.0 - Updated the use of the hp3parclient to 2.0.0 and refactored
the drivers to use the new APIs.
1.2.1 - Synchronized extend_volume method.
1.2.2 - Added try/finally around client login/logout.
1.2.3 - log exceptions before raising
1.2.4 - Fixed iSCSI active path bug #1224594
1.2.5 - Added metadata during attach/detach bug #1258033
1.2.6 - Use least-used iscsi n:s:p for iscsi volume attach bug #1269515
This update now requires 3.1.2 MU3 firmware
1.3.0 - Removed all SSH code. We rely on the hp3parclient now.
2.0.0 - Update hp3parclient API uses 3.0.x
2.0.2 - Add back-end assisted volume migrate
2.0.3 - Added support for managing/unmanaging of volumes
2.0.4 - Added support for volume retype
2.0.5 - Added CHAP support, requires 3.1.3 MU1 firmware
and hp3parclient 3.1.0.
2.0.6 - Fixing missing login/logout around attach/detach bug #1367429
2.0.7 - Add support for pools with model update
2.0.8 - Migrate without losing type settings bug #1356608
2.0.9 - Removing locks bug #1381190
2.0.10 - Add call to queryHost instead SSH based findHost #1398206
2.0.11 - Added missing host name during attach fix #1398206
2.0.12 - Removed usage of host name cache #1398914
2.0.13 - Update LOG usage to fix translations. bug #1384312
2.0.14 - Do not allow a different iSCSI IP (hp3par_iscsi_ips) to be
used during live-migration. bug #1423958
2.0.15 - Added support for updated detach_volume attachment.
2.0.16 - Added encrypted property to initialize_connection #1439917
2.0.17 - Python 3 fixes
2.0.18 - Improved VLUN creation and deletion logic. #1469816
2.0.19 - Changed initialize_connection to use getHostVLUNs. #1475064
2.0.20 - Adding changes to support 3PAR iSCSI multipath.
2.0.21 - Adds consistency group support
2.0.22 - Update driver to use ABC metaclasses
2.0.23 - Added update_migrated_volume. bug # 1492023
3.0.0 - Rebranded HP to HPE.
3.0.1 - Python 3 support
3.0.2 - Remove db access for consistency groups
3.0.3 - Fix multipath dictionary key error. bug #1522062
3.0.4 - Adds v2 managed replication support
3.0.5 - Adds v2 unmanaged replication support
3.0.6 - Adding manage/unmanage snapshot support
3.0.7 - Optimize array ID retrieval
3.0.8 - Update replication to version 2.1
3.0.9 - Use same LUN ID for each VLUN path #1551994
3.0.10 - Remove metadata that tracks the instance ID. bug #1572665
3.0.11 - _create_3par_iscsi_host() now accepts iscsi_iqn as list only.
Bug #1590180
3.0.12 - Added entry point tracing
3.0.13 - Handling HTTP conflict 409, host WWN/iSCSI name already used
by another host, while creating 3PAR iSCSI Host. bug #1642945
3.0.14 - Handle manage and unmanage hosts present. bug #1648067
3.0.15 - Adds consistency group capability in generic volume groups.
3.0.16 - Get host from os-brick connector. bug #1690244
4.0.0 - Adds base class.
4.0.1 - Update CHAP on host record when volume is migrated
to new compute host. bug # 1737181
4.0.2 - Handle force detach case. bug #1686745
4.0.3 - Set proper backend on subsequent operation, after group
failover. bug #1773069
4.0.4 - Added Peer Persistence feature
4.0.5 - Added Primera array check. bug #1849525
4.0.6 - Allow iSCSI support for Primera 4.2 onwards
"""
VERSION = "4.0.6"
# The name of the CI wiki page.
CI_WIKI_NAME = "HPE_Storage_CI"
def __init__(self, *args, **kwargs):
super(HPE3PARISCSIDriver, self).__init__(*args, **kwargs)
self.protocol = 'iSCSI'
def _do_setup(self, common):
client_obj = common.client
is_primera = client_obj.is_primera_array()
if is_primera:
api_version = client_obj.getWsApiVersion()
array_version = api_version['build']
LOG.debug("array version: %(version)s",
{'version': array_version})
if array_version < 40200000:
err_msg = (_('The iSCSI driver is not supported for '
'Primera %(version)s. It is supported '
'for Primera 4.2 or higher versions.')
% {'version': array_version})
LOG.error(err_msg)
raise NotImplementedError()
self.iscsi_ips = {}
common.client_login()
try:
self.initialize_iscsi_ports(common)
finally:
self._logout(common)
def initialize_iscsi_ports(self, common,
remote_target=None, remote_client=None):
# map iscsi_ip-> ip_port
# -> iqn
# -> nsp
iscsi_ip_list = {}
temp_iscsi_ip = {}
if remote_target:
backend_conf = remote_target
else:
backend_conf = common._client_conf
# use the 3PAR ip_addr list for iSCSI configuration
if len(backend_conf['hpe3par_iscsi_ips']) > 0:
# add port values to ip_addr, if necessary
for ip_addr in backend_conf['hpe3par_iscsi_ips']:
ip = ip_addr.split(':')
if len(ip) == 1:
temp_iscsi_ip[ip_addr] = {'ip_port': DEFAULT_ISCSI_PORT}
elif len(ip) == 2:
temp_iscsi_ip[ip[0]] = {'ip_port': ip[1]}
else:
LOG.warning("Invalid IP address format '%s'", ip_addr)
# add the single value iscsi_ip_address option to the IP dictionary.
# This way we can see if it's a valid iSCSI IP. If it's not valid,
# we won't use it and won't bother to report it, see below
if 'iscsi_ip_address' in backend_conf:
if (backend_conf['iscsi_ip_address'] not in temp_iscsi_ip):
ip = backend_conf['iscsi_ip_address']
ip_port = backend_conf['iscsi_port']
temp_iscsi_ip[ip] = {'ip_port': ip_port}
# get all the valid iSCSI ports from 3PAR
# when found, add the valid iSCSI ip, ip port, iqn and nsp
# to the iSCSI IP dictionary
iscsi_ports = common.get_active_iscsi_target_ports(remote_client)
for port in iscsi_ports:
ip = port['IPAddr']
if ip in temp_iscsi_ip:
ip_port = temp_iscsi_ip[ip]['ip_port']
iscsi_ip_list[ip] = {'ip_port': ip_port,
'nsp': port['nsp'],
'iqn': port['iSCSIName']}
del temp_iscsi_ip[ip]
# if the single value iscsi_ip_address option is still in the
# temp dictionary it's because it defaults to $my_ip which doesn't
# make sense in this context. So, if present, remove it and move on.
if 'iscsi_ip_address' in backend_conf:
if backend_conf['iscsi_ip_address'] in temp_iscsi_ip:
del temp_iscsi_ip[backend_conf['iscsi_ip_address']]
# lets see if there are invalid iSCSI IPs left in the temp dict
if len(temp_iscsi_ip) > 0:
LOG.warning("Found invalid iSCSI IP address(s) in "
"configuration option(s) hpe3par_iscsi_ips or "
"target_ip_address '%s.'",
(", ".join(temp_iscsi_ip)))
if not len(iscsi_ip_list):
msg = _('At least one valid iSCSI IP address must be set.')
LOG.error(msg)
raise exception.InvalidInput(reason=msg)
if remote_target:
self.iscsi_ips[remote_target['hpe3par_api_url']] = iscsi_ip_list
else:
self.iscsi_ips[common._client_conf['hpe3par_api_url']] = (
iscsi_ip_list)
def _initialize_connection_common(self, volume, connector, common,
host, iscsi_ips, ready_ports,
target_portals, target_iqns, target_luns,
remote_client=None):
# Target portal ips are defined in cinder.conf.
target_portal_ips = iscsi_ips.keys()
# Collect all existing VLUNs for this volume/host combination.
existing_vluns = common.find_existing_vluns(volume, host,
remote_client)
# Cycle through each ready iSCSI port and determine if a new
# VLUN should be created or an existing one used.
lun_id = None
for port in ready_ports:
iscsi_ip = port['IPAddr']
if iscsi_ip in target_portal_ips:
vlun = None
# check for an already existing VLUN matching the
# nsp for this iSCSI IP. If one is found, use it
# instead of creating a new VLUN.
for v in existing_vluns:
portPos = common.build_portPos(
iscsi_ips[iscsi_ip]['nsp'])
if v['portPos'] == portPos:
vlun = v
break
else:
vlun = common.create_vlun(
volume, host, iscsi_ips[iscsi_ip]['nsp'],
lun_id=lun_id, remote_client=remote_client)
# We want to use the same LUN ID for every port
if lun_id is None:
lun_id = vlun['lun']
iscsi_ip_port = "%s:%s" % (
iscsi_ip, iscsi_ips[iscsi_ip]['ip_port'])
target_portals.append(iscsi_ip_port)
target_iqns.append(port['iSCSIName'])
target_luns.append(vlun['lun'])
else:
LOG.warning("iSCSI IP: '%s' was not found in "
"hpe3par_iscsi_ips list defined in "
"cinder.conf.", iscsi_ip)
@volume_utils.trace
@coordination.synchronized('3par-{volume.id}')
def initialize_connection(self, volume, connector):
"""Assigns the volume to a server.
Assign any created volume to a compute node/host so that it can be
used from that host.
This driver returns a driver_volume_type of 'iscsi'.
The format of the driver data is defined in _get_iscsi_properties.
Example return value:
.. code-block:: default
{
'driver_volume_type': 'iscsi',
'data': {
'encrypted': False,
'target_discovered': True,
'target_iqn': 'iqn.2010-10.org.openstack:volume-00000001',
'target_protal': '127.0.0.1:3260',
'volume_id': 1,
}
}
Steps to export a volume on 3PAR
* Get the 3PAR iSCSI iqn
* Create a host on the 3par
* create vlun on the 3par
"""
LOG.debug("volume id: %(volume_id)s",
{'volume_id': volume['id']})
array_id = self.get_volume_replication_driver_data(volume)
common = self._login(array_id=array_id)
try:
# If the volume has been failed over, we need to reinitialize
# iSCSI ports so they represent the new array.
if volume.get('replication_status') == 'failed-over' and (
common._client_conf['hpe3par_api_url'] not in self.iscsi_ips):
self.initialize_iscsi_ports(common)
# Grab the correct iSCSI ports
iscsi_ips = self.iscsi_ips[common._client_conf['hpe3par_api_url']]
# we have to make sure we have a host
host, username, password, cpg = self._create_host(
common,
volume,
connector)
multipath = connector.get('multipath')
LOG.debug("multipath: %(multipath)s",
{'multipath': multipath})
if multipath:
ready_ports = common.client.getiSCSIPorts(
state=common.client.PORT_STATE_READY)
target_portals = []
target_iqns = []
target_luns = []
self._initialize_connection_common(
volume, connector, common,
host, iscsi_ips, ready_ports,
target_portals, target_iqns, target_luns)
if volume.get('replication_status') == 'enabled':
LOG.debug('This is a replication setup')
remote_target = common._replication_targets[0]
replication_mode = remote_target['replication_mode']
quorum_witness_ip = (
remote_target.get('quorum_witness_ip'))
if replication_mode == 1:
LOG.debug('replication_mode is sync')
if quorum_witness_ip:
LOG.debug('quorum_witness_ip is present')
LOG.debug('Peer Persistence has been configured')
else:
LOG.debug('Since quorum_witness_ip is absent, '
'considering this as Active/Passive '
'replication')
else:
LOG.debug('Active/Passive replication has been '
'configured')
if replication_mode == 1 and quorum_witness_ip:
remote_client = (
common._create_replication_client(remote_target))
self.initialize_iscsi_ports(
common, remote_target, remote_client)
remote_iscsi_ips = (
self.iscsi_ips[remote_target['hpe3par_api_url']])
# we have to make sure we have a host
host, username, password, cpg = (
self._create_host(
common, volume, connector,
remote_target, cpg, remote_client))
ready_ports = remote_client.getiSCSIPorts(
state=remote_client.PORT_STATE_READY)
self._initialize_connection_common(
volume, connector, common,
host, remote_iscsi_ips, ready_ports,
target_portals, target_iqns, target_luns,
remote_client)
common._destroy_replication_client(remote_client)
info = {'driver_volume_type': 'iscsi',
'data': {'target_portals': target_portals,
'target_iqns': target_iqns,
'target_luns': target_luns,
'target_discovered': True
}
}
else:
least_used_nsp = None
# check if a VLUN already exists for this host
existing_vlun = common.find_existing_vlun(volume, host)
if existing_vlun:
# We override the nsp here on purpose to force the
# volume to be exported out the same IP as it already is.
# This happens during nova live-migration, we want to
# disable the picking of a different IP that we export
# the volume to, or nova complains.
least_used_nsp = common.build_nsp(existing_vlun['portPos'])
if not least_used_nsp:
least_used_nsp = self._get_least_used_nsp_for_host(
common,
host['name'])
vlun = None
if existing_vlun is None:
# now that we have a host, create the VLUN
vlun = common.create_vlun(volume, host, least_used_nsp)
else:
vlun = existing_vlun
if least_used_nsp is None:
LOG.warning("Least busy iSCSI port not found, "
"using first iSCSI port in list.")
iscsi_ip = list(iscsi_ips)[0]
else:
iscsi_ip = self._get_ip_using_nsp(least_used_nsp, common)
iscsi_ip_port = iscsi_ips[iscsi_ip]['ip_port']
iscsi_target_iqn = iscsi_ips[iscsi_ip]['iqn']
info = {'driver_volume_type': 'iscsi',
'data': {'target_portal': "%s:%s" %
(iscsi_ip, iscsi_ip_port),
'target_iqn': iscsi_target_iqn,
'target_lun': vlun['lun'],
'target_discovered': True
}
}
if common._client_conf['hpe3par_iscsi_chap_enabled']:
info['data']['auth_method'] = 'CHAP'
info['data']['auth_username'] = username
info['data']['auth_password'] = password
encryption_key_id = volume.get('encryption_key_id', None)
info['data']['encrypted'] = encryption_key_id is not None
return info
finally:
self._logout(common)
@volume_utils.trace
@coordination.synchronized('3par-{volume.id}')
def terminate_connection(self, volume, connector, **kwargs):
"""Driver entry point to detach a volume from an instance."""
array_id = self.get_volume_replication_driver_data(volume)
common = self._login(array_id=array_id)
try:
is_force_detach = connector is None
remote_client = None
multipath = False
if connector:
multipath = connector.get('multipath')
LOG.debug("multipath: %(multipath)s",
{'multipath': multipath})
if multipath:
if volume.get('replication_status') == 'enabled':
LOG.debug('This is a replication setup')
remote_target = common._replication_targets[0]
replication_mode = remote_target['replication_mode']
quorum_witness_ip = (
remote_target.get('quorum_witness_ip'))
if replication_mode == 1:
LOG.debug('replication_mode is sync')
if quorum_witness_ip:
LOG.debug('quorum_witness_ip is present')
LOG.debug('Peer Persistence has been configured')
else:
LOG.debug('Since quorum_witness_ip is absent, '
'considering this as Active/Passive '
'replication')
else:
LOG.debug('Active/Passive replication has been '
'configured')
if replication_mode == 1 and quorum_witness_ip:
remote_client = (
common._create_replication_client(remote_target))
if is_force_detach:
common.terminate_connection(volume, None, None)
else:
hostname = common._safe_hostname(connector, self.configuration)
common.terminate_connection(
volume,
hostname,
iqn=connector['initiator'],
remote_client=remote_client)
self._clear_chap_3par(common, volume)
finally:
self._logout(common)
def _clear_chap_3par(self, common, volume):
"""Clears CHAP credentials on a 3par volume.
Ignore exceptions caused by the keys not being present on a volume.
"""
vol_name = common._get_3par_vol_name(volume)
try:
common.client.removeVolumeMetaData(vol_name, CHAP_USER_KEY)
except hpeexceptions.HTTPNotFound:
pass
except Exception:
raise
try:
common.client.removeVolumeMetaData(vol_name, CHAP_PASS_KEY)
except hpeexceptions.HTTPNotFound:
pass
except Exception:
raise
def _create_3par_iscsi_host(self, common, hostname, iscsi_iqn, domain,
persona_id, remote_client=None):
"""Create a 3PAR host.
Create a 3PAR host, if there is already a host on the 3par using
the same iqn but with a different hostname, return the hostname
used by 3PAR.
"""
# first search for an existing host
host_found = None
if remote_client:
client_obj = remote_client
else:
client_obj = common.client
hosts = client_obj.queryHost(iqns=iscsi_iqn)
if hosts and hosts['members'] and 'name' in hosts['members'][0]:
host_found = hosts['members'][0]['name']
if host_found is not None:
return host_found
else:
persona_id = int(persona_id)
try:
client_obj.createHost(hostname, iscsiNames=iscsi_iqn,
optional={'domain': domain,
'persona': persona_id})
except hpeexceptions.HTTPConflict as path_conflict:
msg = "Create iSCSI host caught HTTP conflict code: %s"
with save_and_reraise_exception(reraise=False) as ctxt:
if path_conflict.get_code() is EXISTENT_PATH:
# Handle exception : EXISTENT_PATH - host WWN/iSCSI
# name already used by another host
hosts = client_obj.queryHost(iqns=iscsi_iqn)
if hosts and hosts['members'] and (
'name' in hosts['members'][0]):
hostname = hosts['members'][0]['name']
else:
# re-raise last caught exception
ctxt.reraise = True
LOG.exception(msg, path_conflict.get_code())
else:
# re-raise last caught exception
# for other HTTP conflict
ctxt.reraise = True
LOG.exception(msg, path_conflict.get_code())
return hostname
def _modify_3par_iscsi_host(self, common, hostname, iscsi_iqn):
mod_request = {'pathOperation': common.client.HOST_EDIT_ADD,
'iSCSINames': [iscsi_iqn]}
common.client.modifyHost(hostname, mod_request)
def _set_3par_chaps(self, common, hostname, volume, username, password):
"""Sets a 3PAR host's CHAP credentials."""
if not common._client_conf['hpe3par_iscsi_chap_enabled']:
return
mod_request = {'chapOperation': common.client.HOST_EDIT_ADD,
'chapOperationMode': common.client.CHAP_INITIATOR,
'chapName': username,
'chapSecret': password}
common.client.modifyHost(hostname, mod_request)
def _create_host(self, common, volume, connector,
remote_target=None, src_cpg=None, remote_client=None):
"""Creates or modifies existing 3PAR host."""
# make sure we don't have the host already
host = None
domain = None
username = None
password = None
hostname = common._safe_hostname(connector, self.configuration)
if remote_target:
cpg = common._get_cpg_from_cpg_map(
remote_target['cpg_map'], src_cpg)
cpg_obj = remote_client.getCPG(cpg)
if 'domain' in cpg_obj:
domain = cpg_obj['domain']
else:
cpg = common.get_cpg(volume, allowSnap=True)
domain = common.get_domain(cpg)
if not remote_target:
# Get the CHAP secret if CHAP is enabled
if common._client_conf['hpe3par_iscsi_chap_enabled']:
vol_name = common._get_3par_vol_name(volume)
username = common.client.getVolumeMetaData(
vol_name, CHAP_USER_KEY)['value']
password = common.client.getVolumeMetaData(
vol_name, CHAP_PASS_KEY)['value']
try:
if remote_target:
host = remote_client.getHost(hostname)
else:
host = common._get_3par_host(hostname)
# Check whether host with iqn of initiator present on 3par
hosts = common.client.queryHost(iqns=[connector['initiator']])
host, hostname = (
common._get_prioritized_host_on_3par(
host, hosts, hostname))
except hpeexceptions.HTTPNotFound:
# get persona from the volume type extra specs
persona_id = common.get_persona_type(volume)
# host doesn't exist, we have to create it
hostname = self._create_3par_iscsi_host(common,
hostname,
[connector['initiator']],
domain,
persona_id,
remote_client)
else:
if not remote_target:
if 'iSCSIPaths' not in host or len(host['iSCSIPaths']) < 1:
self._modify_3par_iscsi_host(
common, hostname,
connector['initiator'])
elif (not host['initiatorChapEnabled'] and
common._client_conf['hpe3par_iscsi_chap_enabled']):
LOG.warning("Host exists without CHAP credentials set and "
"has iSCSI attachments but CHAP is enabled. "
"Updating host with new CHAP credentials.")
if remote_target:
host = remote_client.getHost(hostname)
else:
# set/update the chap details for the host
self._set_3par_chaps(common, hostname, volume, username, password)
host = common._get_3par_host(hostname)
return host, username, password, cpg
def _do_export(self, common, volume, connector):
"""Gets the associated account, generates CHAP info and updates."""
model_update = {}
if not common._client_conf['hpe3par_iscsi_chap_enabled']:
model_update['provider_auth'] = None
return model_update
# CHAP username will be the hostname
chap_username = connector['host']
chap_password = None
try:
# Get all active VLUNs for the host
vluns = common.client.getHostVLUNs(chap_username)
# Host has active VLUNs... is CHAP enabled on host?
host_info = common.client.getHost(chap_username)
if not host_info['initiatorChapEnabled']:
LOG.warning("Host has no CHAP key, but CHAP is enabled.")
except hpeexceptions.HTTPNotFound:
chap_password = volume_utils.generate_password(16)
LOG.warning("No host or VLUNs exist. Generating new "
"CHAP key.")
else:
# Get a list of all iSCSI VLUNs and see if there is already a CHAP
# key assigned to one of them. Use that CHAP key if present,
# otherwise create a new one. Skip any VLUNs that are missing
# CHAP credentials in metadata.
chap_exists = False
active_vluns = 0
for vlun in vluns:
if not vlun['active']:
continue
active_vluns += 1
# iSCSI connections start with 'iqn'.
if ('remoteName' in vlun and
re.match('iqn.*', vlun['remoteName'])):
try:
chap_password = common.client.getVolumeMetaData(
vlun['volumeName'], CHAP_PASS_KEY)['value']
chap_exists = True
break
except hpeexceptions.HTTPNotFound:
LOG.debug("The VLUN %s is missing CHAP credentials "
"but CHAP is enabled. Skipping.",
vlun['remoteName'])
else:
LOG.warning("Non-iSCSI VLUN detected.")
if not chap_exists:
chap_password = volume_utils.generate_password(16)
LOG.warning("No VLUN contained CHAP credentials. "
"Generating new CHAP key.")
# Add CHAP credentials to the volume metadata
vol_name = common._get_3par_vol_name(volume)
common.client.setVolumeMetaData(
vol_name, CHAP_USER_KEY, chap_username)
common.client.setVolumeMetaData(
vol_name, CHAP_PASS_KEY, chap_password)
model_update['provider_auth'] = ('CHAP %s %s' %
(chap_username, chap_password))
return model_update
@volume_utils.trace
def create_export(self, context, volume, connector):
common = self._login()
try:
return self._do_export(common, volume, connector)
finally:
self._logout(common)
@volume_utils.trace
def ensure_export(self, context, volume):
"""Ensure the volume still exists on the 3PAR.
Also retrieves CHAP credentials, if present on the volume
"""
common = self._login()
try:
vol_name = common._get_3par_vol_name(volume)
common.client.getVolume(vol_name)
except hpeexceptions.HTTPNotFound:
LOG.error("Volume %s doesn't exist on array.", vol_name)
else:
metadata = common.client.getAllVolumeMetaData(vol_name)
username = None
password = None
model_update = {}
model_update['provider_auth'] = None
for member in metadata['members']:
if member['key'] == CHAP_USER_KEY:
username = member['value']
elif member['key'] == CHAP_PASS_KEY:
password = member['value']
if username and password:
model_update['provider_auth'] = ('CHAP %s %s' %
(username, password))
return model_update
finally:
self._logout(common)
def _get_least_used_nsp_for_host(self, common, hostname):
"""Get the least used NSP for the current host.
Steps to determine which NSP to use.
* If only one iSCSI NSP, return it
* If there is already an active vlun to this host, return its NSP
* Return NSP with fewest active vluns
"""
iscsi_nsps = self._get_iscsi_nsps(common)
# If there's only one path, use it
if len(iscsi_nsps) == 1:
return iscsi_nsps[0]
# Try to reuse an existing iscsi path to the host
vluns = common.client.getVLUNs()
for vlun in vluns['members']:
if vlun['active']:
if vlun['hostname'] == hostname:
temp_nsp = common.build_nsp(vlun['portPos'])
if temp_nsp in iscsi_nsps:
# this host already has an iscsi path, so use it
return temp_nsp
# Calculate the least used iscsi nsp
least_used_nsp = self._get_least_used_nsp(common,
vluns['members'],
self._get_iscsi_nsps(common))
return least_used_nsp
def _get_iscsi_nsps(self, common):
"""Return the list of candidate nsps."""
nsps = []
iscsi_ips = self.iscsi_ips[common._client_conf['hpe3par_api_url']]
for value in iscsi_ips.values():
nsps.append(value['nsp'])
return nsps
def _get_ip_using_nsp(self, nsp, common):
"""Return IP associated with given nsp."""
iscsi_ips = self.iscsi_ips[common._client_conf['hpe3par_api_url']]
for (key, value) in iscsi_ips.items():
if value['nsp'] == nsp:
return key
def _get_least_used_nsp(self, common, vluns, nspss):
"""Return the nsp that has the fewest active vluns."""
# return only the nsp (node:server:port)
# count the number of nsps
nsp_counts = {}
for nsp in nspss:
# initialize counts to zero
nsp_counts[nsp] = 0
current_least_used_nsp = None
for vlun in vluns:
if vlun['active']:
nsp = common.build_nsp(vlun['portPos'])
if nsp in nsp_counts:
nsp_counts[nsp] = nsp_counts[nsp] + 1
# identify key (nsp) of least used nsp
current_smallest_count = sys.maxsize
for (nsp, count) in nsp_counts.items():
if count < current_smallest_count:
current_least_used_nsp = nsp
current_smallest_count = count
return current_least_used_nsp
|
|
#
# __COPYRIGHT__
#
# Permission is hereby granted, free of charge, to any person obtaining
# a copy of this software and associated documentation files (the
# "Software"), to deal in the Software without restriction, including
# without limitation the rights to use, copy, modify, merge, publish,
# distribute, sublicense, and/or sell copies of the Software, and to
# permit persons to whom the Software is furnished to do so, subject to
# the following conditions:
#
# The above copyright notice and this permission notice shall be included
# in all copies or substantial portions of the Software.
#
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY
# KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE
# WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
# NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE
# LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION
# OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION
# WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
#
__revision__ = "__FILE__ __REVISION__ __DATE__ __DEVELOPER__"
import os
import os.path
import sys
import unittest
import SCons.Scanner.Fortran
import SCons.Node.FS
import SCons.Warnings
import TestCmd
import TestUnit
original = os.getcwd()
test = TestCmd.TestCmd(workdir = '')
os.chdir(test.workpath(''))
# create some source files and headers:
test.write('fff1.f',"""
PROGRAM FOO
INCLUDE 'f1.f'
include 'f2.f'
STOP
END
""")
test.write('fff2.f',"""
PROGRAM FOO
INCLUDE 'f2.f'
include 'd1/f2.f'
INCLUDE 'd2/f2.f'
STOP
END
""")
test.write('fff3.f',"""
PROGRAM FOO
INCLUDE 'f3.f' ; INCLUDE\t'd1/f3.f'
STOP
END
""")
# for Emacs -> "
test.subdir('d1', ['d1', 'd2'])
headers = ['fi.f', 'never.f',
'd1/f1.f', 'd1/f2.f', 'd1/f3.f', 'd1/fi.f',
'd1/d2/f1.f', 'd1/d2/f2.f', 'd1/d2/f3.f',
'd1/d2/f4.f', 'd1/d2/fi.f']
for h in headers:
test.write(h, "\n")
test.subdir('include', 'subdir', ['subdir', 'include'])
test.write('fff4.f',"""
PROGRAM FOO
INCLUDE 'f4.f'
STOP
END
""")
test.write('include/f4.f', "\n")
test.write('subdir/include/f4.f', "\n")
test.write('fff5.f',"""
PROGRAM FOO
INCLUDE 'f5.f'
INCLUDE 'not_there.f'
STOP
END
""")
test.write('f5.f', "\n")
test.subdir('repository', ['repository', 'include'],
[ 'repository', 'src' ])
test.subdir('work', ['work', 'src'])
test.write(['repository', 'include', 'iii.f'], "\n")
test.write(['work', 'src', 'fff.f'], """
PROGRAM FOO
INCLUDE 'iii.f'
INCLUDE 'jjj.f'
STOP
END
""")
test.write([ 'work', 'src', 'aaa.f'], """
PROGRAM FOO
INCLUDE 'bbb.f'
STOP
END
""")
test.write([ 'work', 'src', 'bbb.f'], "\n")
test.write([ 'repository', 'src', 'ccc.f'], """
PROGRAM FOO
INCLUDE 'ddd.f'
STOP
END
""")
test.write([ 'repository', 'src', 'ddd.f'], "\n")
test.write('fff90a.f90',"""
PROGRAM FOO
! Test comments - these includes should NOT be picked up
C INCLUDE 'fi.f'
# INCLUDE 'fi.f'
! INCLUDE 'fi.f'
INCLUDE 'f1.f' ! in-line comments are valid syntax
INCLUDE"fi.f" ! space is significant - this should be ignored
INCLUDE <f2.f> ! Absoft compiler allows greater than/less than delimiters
!
! Allow kind type parameters
INCLUDE kindType_"f3.f"
INCLUDE kind_Type_"f4.f"
!
! Test multiple statements per line - use various spacings between semicolons
incLUDE 'f5.f';include "f6.f" ; include <f7.f>; include 'f8.f' ;include kindType_'f9.f'
!
! Test various USE statement syntaxes
!
USE Mod01
use mod02
use use
USE mOD03, ONLY : someVar
USE MOD04 ,only:someVar
USE Mod05 , ONLY: someVar ! in-line comment
USE Mod06,ONLY :someVar,someOtherVar
USE mod07;USE mod08; USE mod09 ;USE mod10 ; USE mod11 ! Test various semicolon placements
use mod12 ;use mod13! Test comment at end of line
! USE modi
! USE modia ; use modib ! Scanner regexp will only ignore the first - this is a deficiency in the regexp
! USE modic ; ! use modid ! Scanner regexp should ignore both modules
USE mod14 !; USE modi ! Only ignore the second
USE mod15!;USE modi
USE mod16 ! ; USE modi
! Test semicolon syntax - use various spacings
USE :: mod17
USE::mod18
USE ::mod19 ; USE:: mod20
use, non_intrinsic :: mod21, ONLY : someVar ; use,intrinsic:: mod22
USE, NON_INTRINSIC::mod23 ; USE ,INTRINSIC ::mod24
USE mod25 ! Test USE statement at the beginning of line
; USE modi ! Scanner should ignore this since it isn't valid syntax
USEmodi ! No space in between USE and module name - ignore it
USE mod01 ! This one is a duplicate - there should only be one dependency to it.
STOP
END
""")
modules = ['mod01.mod', 'mod02.mod', 'mod03.mod', 'mod04.mod', 'mod05.mod',
'mod06.mod', 'mod07.mod', 'mod08.mod', 'mod09.mod', 'mod10.mod',
'mod11.mod', 'mod12.mod', 'mod13.mod', 'mod14.mod', 'mod15.mod',
'mod16.mod', 'mod17.mod', 'mod18.mod', 'mod19.mod', 'mod20.mod',
'mod21.mod', 'mod22.mod', 'mod23.mod', 'mod24.mod', 'mod25.mod']
for m in modules:
test.write(m, "\n")
test.subdir('modules')
test.write(['modules', 'use.mod'], "\n")
# define some helpers:
class DummyEnvironment(object):
def __init__(self, listCppPath):
self.path = listCppPath
self.fs = SCons.Node.FS.FS(test.workpath(''))
def Dictionary(self, *args):
if not args:
return { 'FORTRANPATH': self.path, 'FORTRANMODSUFFIX' : ".mod" }
elif len(args) == 1 and args[0] == 'FORTRANPATH':
return self.path
else:
raise KeyError("Dummy environment only has FORTRANPATH attribute.")
def has_key(self, key):
return key in self.Dictionary()
def __getitem__(self,key):
return self.Dictionary()[key]
def __setitem__(self,key,value):
self.Dictionary()[key] = value
def __delitem__(self,key):
del self.Dictionary()[key]
def subst(self, arg, target=None, source=None, conv=None):
if arg[0] == '$':
return self[arg[1:]]
return arg
def subst_path(self, path, target=None, source=None, conv=None):
if not isinstance(path, list):
path = [path]
return list(map(self.subst, path))
def get_calculator(self):
return None
def get_factory(self, factory):
return factory or self.fs.File
def Dir(self, filename):
return self.fs.Dir(filename)
def File(self, filename):
return self.fs.File(filename)
def deps_match(self, deps, headers):
scanned = list(map(os.path.normpath, list(map(str, deps))))
expect = list(map(os.path.normpath, headers))
self.failUnless(scanned == expect, "expect %s != scanned %s" % (expect, scanned))
# define some tests:
class FortranScannerTestCase1(unittest.TestCase):
def runTest(self):
test.write('f1.f', "\n")
test.write('f2.f', " INCLUDE 'fi.f'\n")
env = DummyEnvironment([])
s = SCons.Scanner.Fortran.FortranScan()
path = s.path(env)
deps = s(env.File('fff1.f'), env, path)
headers = ['f1.f', 'f2.f']
deps_match(self, deps, headers)
test.unlink('f1.f')
test.unlink('f2.f')
class FortranScannerTestCase2(unittest.TestCase):
def runTest(self):
test.write('f1.f', "\n")
test.write('f2.f', " INCLUDE 'fi.f'\n")
env = DummyEnvironment([test.workpath("d1")])
s = SCons.Scanner.Fortran.FortranScan()
path = s.path(env)
deps = s(env.File('fff1.f'), env, path)
headers = ['f1.f', 'f2.f']
deps_match(self, deps, headers)
test.unlink('f1.f')
test.unlink('f2.f')
class FortranScannerTestCase3(unittest.TestCase):
def runTest(self):
env = DummyEnvironment([test.workpath("d1")])
s = SCons.Scanner.Fortran.FortranScan()
path = s.path(env)
deps = s(env.File('fff1.f'), env, path)
headers = ['d1/f1.f', 'd1/f2.f']
deps_match(self, deps, headers)
class FortranScannerTestCase4(unittest.TestCase):
def runTest(self):
test.write(['d1', 'f2.f'], " INCLUDE 'fi.f'\n")
env = DummyEnvironment([test.workpath("d1")])
s = SCons.Scanner.Fortran.FortranScan()
path = s.path(env)
deps = s(env.File('fff1.f'), env, path)
headers = ['d1/f1.f', 'd1/f2.f']
deps_match(self, deps, headers)
test.write(['d1', 'f2.f'], "\n")
class FortranScannerTestCase5(unittest.TestCase):
def runTest(self):
env = DummyEnvironment([test.workpath("d1")])
s = SCons.Scanner.Fortran.FortranScan()
path = s.path(env)
deps = s(env.File('fff2.f'), env, path)
headers = ['d1/f2.f', 'd1/d2/f2.f', 'd1/f2.f']
deps_match(self, deps, headers)
class FortranScannerTestCase6(unittest.TestCase):
def runTest(self):
test.write('f2.f', "\n")
env = DummyEnvironment([test.workpath("d1")])
s = SCons.Scanner.Fortran.FortranScan()
path = s.path(env)
deps = s(env.File('fff2.f'), env, path)
headers = ['d1/f2.f', 'd1/d2/f2.f', 'f2.f']
deps_match(self, deps, headers)
test.unlink('f2.f')
class FortranScannerTestCase7(unittest.TestCase):
def runTest(self):
env = DummyEnvironment([test.workpath("d1/d2"), test.workpath("d1")])
s = SCons.Scanner.Fortran.FortranScan()
path = s.path(env)
deps = s(env.File('fff2.f'), env, path)
headers = ['d1/f2.f', 'd1/d2/f2.f', 'd1/d2/f2.f']
deps_match(self, deps, headers)
class FortranScannerTestCase8(unittest.TestCase):
def runTest(self):
test.write('f2.f', "\n")
env = DummyEnvironment([test.workpath("d1/d2"), test.workpath("d1")])
s = SCons.Scanner.Fortran.FortranScan()
path = s.path(env)
deps = s(env.File('fff2.f'), env, path)
headers = ['d1/f2.f', 'd1/d2/f2.f', 'f2.f']
deps_match(self, deps, headers)
test.unlink('f2.f')
class FortranScannerTestCase9(unittest.TestCase):
def runTest(self):
test.write('f3.f', "\n")
env = DummyEnvironment([])
s = SCons.Scanner.Fortran.FortranScan()
path = s.path(env)
n = env.File('fff3.f')
def my_rexists(s):
s.Tag('rexists_called', 1)
return SCons.Node._rexists_map[s.GetTag('old_rexists')](s)
n.Tag('old_rexists', n._func_rexists)
SCons.Node._rexists_map[3] = my_rexists
n._func_rexists = 3
deps = s(n, env, path)
# Make sure rexists() got called on the file node being
# scanned, essential for cooperation with VariantDir functionality.
assert n.GetTag('rexists_called')
headers = ['d1/f3.f', 'f3.f']
deps_match(self, deps, headers)
test.unlink('f3.f')
class FortranScannerTestCase10(unittest.TestCase):
def runTest(self):
env = DummyEnvironment(["include"])
s = SCons.Scanner.Fortran.FortranScan()
path = s.path(env)
deps1 = s(env.File('fff4.f'), env, path)
env.fs.chdir(env.Dir('subdir'))
dir = env.fs.getcwd()
env.fs.chdir(env.Dir(''))
path = s.path(env, dir)
deps2 = s(env.File('#fff4.f'), env, path)
headers1 = list(map(test.workpath, ['include/f4.f']))
headers2 = ['include/f4.f']
deps_match(self, deps1, headers1)
deps_match(self, deps2, headers2)
class FortranScannerTestCase11(unittest.TestCase):
def runTest(self):
SCons.Warnings.enableWarningClass(SCons.Warnings.DependencyWarning)
class TestOut(object):
def __call__(self, x):
self.out = x
to = TestOut()
to.out = None
SCons.Warnings._warningOut = to
env = DummyEnvironment([])
s = SCons.Scanner.Fortran.FortranScan()
path = s.path(env)
deps = s(env.File('fff5.f'), env, path)
# Did we catch the warning from not finding not_there.f?
assert to.out
deps_match(self, deps, [ 'f5.f' ])
class FortranScannerTestCase12(unittest.TestCase):
def runTest(self):
env = DummyEnvironment([])
env.fs.chdir(env.Dir('include'))
s = SCons.Scanner.Fortran.FortranScan()
path = s.path(env)
test.write('include/fff4.f', test.read('fff4.f'))
deps = s(env.File('#include/fff4.f'), env, path)
env.fs.chdir(env.Dir(''))
deps_match(self, deps, ['f4.f'])
test.unlink('include/fff4.f')
class FortranScannerTestCase13(unittest.TestCase):
def runTest(self):
os.chdir(test.workpath('work'))
fs = SCons.Node.FS.FS(test.workpath('work'))
fs.Repository(test.workpath('repository'))
# Create a derived file in a directory that does not exist yet.
# This was a bug at one time.
f1=fs.File('include2/jjj.f')
f1.builder=1
env = DummyEnvironment(['include','include2'])
env.fs = fs
s = SCons.Scanner.Fortran.FortranScan()
path = s.path(env)
deps = s(fs.File('src/fff.f'), env, path)
deps_match(self, deps, [test.workpath('repository/include/iii.f'), 'include2/jjj.f'])
os.chdir(test.workpath(''))
class FortranScannerTestCase14(unittest.TestCase):
def runTest(self):
os.chdir(test.workpath('work'))
fs = SCons.Node.FS.FS(test.workpath('work'))
fs.VariantDir('build1', 'src', 1)
fs.VariantDir('build2', 'src', 0)
fs.Repository(test.workpath('repository'))
env = DummyEnvironment([])
env.fs = fs
s = SCons.Scanner.Fortran.FortranScan()
path = s.path(env)
deps1 = s(fs.File('build1/aaa.f'), env, path)
deps_match(self, deps1, [ 'build1/bbb.f' ])
deps2 = s(fs.File('build2/aaa.f'), env, path)
deps_match(self, deps2, [ 'src/bbb.f' ])
deps3 = s(fs.File('build1/ccc.f'), env, path)
deps_match(self, deps3, [ 'build1/ddd.f' ])
deps4 = s(fs.File('build2/ccc.f'), env, path)
deps_match(self, deps4, [ test.workpath('repository/src/ddd.f') ])
os.chdir(test.workpath(''))
class FortranScannerTestCase15(unittest.TestCase):
def runTest(self):
class SubstEnvironment(DummyEnvironment):
def subst(self, arg, target=None, source=None, conv=None, test=test):
if arg == "$junk":
return test.workpath("d1")
else:
return arg
test.write(['d1', 'f2.f'], " INCLUDE 'fi.f'\n")
env = SubstEnvironment(["$junk"])
s = SCons.Scanner.Fortran.FortranScan()
path = s.path(env)
deps = s(env.File('fff1.f'), env, path)
headers = ['d1/f1.f', 'd1/f2.f']
deps_match(self, deps, headers)
test.write(['d1', 'f2.f'], "\n")
class FortranScannerTestCase16(unittest.TestCase):
def runTest(self):
test.write('f1.f', "\n")
test.write('f2.f', "\n")
test.write('f3.f', "\n")
test.write('f4.f', "\n")
test.write('f5.f', "\n")
test.write('f6.f', "\n")
test.write('f7.f', "\n")
test.write('f8.f', "\n")
test.write('f9.f', "\n")
test.write('f10.f', "\n")
env = DummyEnvironment([test.workpath('modules')])
s = SCons.Scanner.Fortran.FortranScan()
path = s.path(env)
deps = s(env.File('fff90a.f90'), env, path)
headers = ['f1.f', 'f2.f', 'f3.f', 'f4.f', 'f5.f', 'f6.f', 'f7.f', 'f8.f', 'f9.f']
modules = ['mod01.mod', 'mod02.mod', 'mod03.mod', 'mod04.mod', 'mod05.mod',
'mod06.mod', 'mod07.mod', 'mod08.mod', 'mod09.mod', 'mod10.mod',
'mod11.mod', 'mod12.mod', 'mod13.mod', 'mod14.mod', 'mod15.mod',
'mod16.mod', 'mod17.mod', 'mod18.mod', 'mod19.mod', 'mod20.mod',
'mod21.mod', 'mod22.mod', 'mod23.mod', 'mod24.mod', 'mod25.mod', 'modules/use.mod']
deps_expected = headers + modules
deps_match(self, deps, deps_expected)
test.unlink('f1.f')
test.unlink('f2.f')
test.unlink('f3.f')
test.unlink('f4.f')
test.unlink('f5.f')
test.unlink('f6.f')
test.unlink('f7.f')
test.unlink('f8.f')
test.unlink('f9.f')
test.unlink('f10.f')
def suite():
suite = unittest.TestSuite()
suite.addTest(FortranScannerTestCase1())
suite.addTest(FortranScannerTestCase2())
suite.addTest(FortranScannerTestCase3())
suite.addTest(FortranScannerTestCase4())
suite.addTest(FortranScannerTestCase5())
suite.addTest(FortranScannerTestCase6())
suite.addTest(FortranScannerTestCase7())
suite.addTest(FortranScannerTestCase8())
suite.addTest(FortranScannerTestCase9())
suite.addTest(FortranScannerTestCase10())
suite.addTest(FortranScannerTestCase11())
suite.addTest(FortranScannerTestCase12())
suite.addTest(FortranScannerTestCase13())
suite.addTest(FortranScannerTestCase14())
suite.addTest(FortranScannerTestCase15())
suite.addTest(FortranScannerTestCase16())
return suite
if __name__ == "__main__":
TestUnit.run(suite())
# Local Variables:
# tab-width:4
# indent-tabs-mode:nil
# End:
# vim: set expandtab tabstop=4 shiftwidth=4:
|
|
# Copyright 2014 IBM Corp.
# All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
from oslo_serialization import jsonutils as json
from tempest.tests.lib import fake_http
FAKE_AUTH_URL = 'http://fake_uri.com/auth'
TOKEN = "fake_token"
ALT_TOKEN = "alt_fake_token"
# Fake Identity v2 constants
COMPUTE_ENDPOINTS_V2 = {
"endpoints": [
{
"adminURL": "http://fake_url/v2/first_endpoint/admin",
"region": "NoMatchRegion",
"internalURL": "http://fake_url/v2/first_endpoint/internal",
"publicURL": "http://fake_url/v2/first_endpoint/public"
},
{
"adminURL": "http://fake_url/v2/second_endpoint/admin",
"region": "FakeRegion",
"internalURL": "http://fake_url/v2/second_endpoint/internal",
"publicURL": "http://fake_url/v2/second_endpoint/public"
},
],
"type": "compute",
"name": "nova"
}
CATALOG_V2 = [COMPUTE_ENDPOINTS_V2, ]
ALT_IDENTITY_V2_RESPONSE = {
"access": {
"token": {
"expires": "2020-01-01T00:00:10Z",
"id": ALT_TOKEN,
"tenant": {
"id": "fake_alt_tenant_id"
},
},
"user": {
"id": "fake_alt_user_id",
"password_expires_at": None,
},
"serviceCatalog": CATALOG_V2,
},
}
IDENTITY_V2_RESPONSE = {
"access": {
"token": {
"expires": "2020-01-01T00:00:10Z",
"id": TOKEN,
"tenant": {
"id": "fake_tenant_id"
},
},
"user": {
"id": "fake_user_id",
"password_expires_at": None,
},
"serviceCatalog": CATALOG_V2,
},
}
# Fake Identity V3 constants
COMPUTE_ENDPOINTS_V3 = {
"endpoints": [
{
"id": "first_compute_fake_service",
"interface": "public",
"region": "NoMatchRegion",
"region_id": "NoMatchRegion",
"url": "http://fake_url/v3/first_endpoint/api"
},
{
"id": "second_fake_service",
"interface": "public",
"region": "FakeRegion",
"region_id": "FakeRegion",
"url": "http://fake_url/v3/second_endpoint/api"
},
{
"id": "third_fake_service",
"interface": "admin",
"region": "MiddleEarthRegion",
"region_id": "MiddleEarthRegion",
"url": "http://fake_url/v3/third_endpoint/api"
}
],
"type": "compute",
"id": "fake_compute_endpoint",
"name": "nova"
}
CATALOG_V3 = [COMPUTE_ENDPOINTS_V3, ]
IDENTITY_V3_RESPONSE = {
"token": {
"audit_ids": ["ny5LA5YXToa_mAVO8Hnupw", "9NPTvsRDSkmsW61abP978Q"],
"methods": [
"token",
"password"
],
"expires_at": "2020-01-01T00:00:10.000123Z",
"project": {
"domain": {
"id": "fake_domain_id",
"name": "fake"
},
"id": "project_id",
"name": "project_name"
},
"user": {
"domain": {
"id": "fake_domain_id",
"name": "domain_name"
},
"id": "fake_user_id",
"name": "username",
"password_expires_at": None,
},
"issued_at": "2013-05-29T16:55:21.468960Z",
"catalog": CATALOG_V3
}
}
IDENTITY_V3_RESPONSE_DOMAIN_SCOPE = {
"token": {
"audit_ids": ["ny5LA5YXToa_mAVO8Hnupw", "9NPTvsRDSkmsW61abP978Q"],
"methods": [
"token",
"password"
],
"expires_at": "2020-01-01T00:00:10.000123Z",
"domain": {
"id": "fake_domain_id",
"name": "domain_name"
},
"user": {
"domain": {
"id": "fake_domain_id",
"name": "domain_name"
},
"id": "fake_user_id",
"name": "username",
"password_expires_at": None,
},
"issued_at": "2013-05-29T16:55:21.468960Z",
"catalog": CATALOG_V3
}
}
IDENTITY_V3_RESPONSE_NO_SCOPE = {
"token": {
"audit_ids": ["ny5LA5YXToa_mAVO8Hnupw", "9NPTvsRDSkmsW61abP978Q"],
"methods": [
"token",
"password"
],
"expires_at": "2020-01-01T00:00:10.000123Z",
"user": {
"domain": {
"id": "fake_domain_id",
"name": "domain_name"
},
"id": "fake_user_id",
"name": "username",
"password_expires_at": None,
},
"issued_at": "2013-05-29T16:55:21.468960Z",
}
}
ALT_IDENTITY_V3 = IDENTITY_V3_RESPONSE
def _fake_v3_response(self, uri, method="GET", body=None, headers=None,
redirections=5, connection_type=None, log_req_body=None):
fake_headers = {
"x-subject-token": TOKEN
}
return (fake_http.fake_http_response(fake_headers, status=201),
json.dumps(IDENTITY_V3_RESPONSE))
def _fake_v3_response_domain_scope(self, uri, method="GET", body=None,
headers=None, redirections=5,
connection_type=None, log_req_body=None):
fake_headers = {
"status": "201",
"x-subject-token": TOKEN
}
return (fake_http.fake_http_response(fake_headers, status=201),
json.dumps(IDENTITY_V3_RESPONSE_DOMAIN_SCOPE))
def _fake_v3_response_no_scope(self, uri, method="GET", body=None,
headers=None, redirections=5,
connection_type=None, log_req_body=None):
fake_headers = {
"status": "201",
"x-subject-token": TOKEN
}
return (fake_http.fake_http_response(fake_headers, status=201),
json.dumps(IDENTITY_V3_RESPONSE_NO_SCOPE))
def _fake_v2_response(self, uri, method="GET", body=None, headers=None,
redirections=5, connection_type=None, log_req_body=None):
return (fake_http.fake_http_response({}, status=200),
json.dumps(IDENTITY_V2_RESPONSE))
def _fake_auth_failure_response():
# the response body isn't really used in this case, but lets send it anyway
# to have a safe check in some future change on the rest client.
body = {
"unauthorized": {
"message": "Unauthorized",
"code": "401"
}
}
return fake_http.fake_http_response({}, status=401), json.dumps(body)
|
|
"""
Handle a region file, containing 32x32 chunks.
For more info of the region file format look:
http://www.minecraftwiki.net/wiki/Region_file_format
"""
from .nbt import NBTFile, MalformedFileError
from struct import pack, unpack
from gzip import GzipFile
from collections import Mapping
import zlib
import gzip
from io import BytesIO
import math, time
from os.path import getsize
from os import SEEK_END
# constants
SECTOR_LENGTH = 4096
"""Constant indicating the length of a sector. A Region file is divided in sectors of 4096 bytes each."""
# Status is a number representing:
# -5 = Error, the chunk is overlapping with another chunk
# -4 = Error, the chunk length is too large to fit in the sector length in the region header
# -3 = Error, chunk header has a 0 length
# -2 = Error, chunk inside the header of the region file
# -1 = Error, chunk partially/completely outside of file
# 0 = Ok
# 1 = Chunk non-existant yet
STATUS_CHUNK_OVERLAPPING = -5
"""Constant indicating an error status: the chunk is allocated a sector already occupied by another chunk"""
STATUS_CHUNK_MISMATCHED_LENGTHS = -4
"""Constant indicating an error status: the region header length and the chunk length are incompatible"""
STATUS_CHUNK_ZERO_LENGTH = -3
"""Constant indicating an error status: chunk header has a 0 length"""
STATUS_CHUNK_IN_HEADER = -2
"""Constant indicating an error status: chunk inside the header of the region file"""
STATUS_CHUNK_OUT_OF_FILE = -1
"""Constant indicating an error status: chunk partially/completely outside of file"""
STATUS_CHUNK_OK = 0
"""Constant indicating an normal status: the chunk exists and the metadata is valid"""
STATUS_CHUNK_NOT_CREATED = 1
"""Constant indicating an normal status: the chunk does not exist"""
COMPRESSION_NONE = 0
"""Constant indicating tha tthe chunk is not compressed."""
COMPRESSION_GZIP = 1
"""Constant indicating tha tthe chunk is GZip compressed."""
COMPRESSION_ZLIB = 2
"""Constant indicating tha tthe chunk is zlib compressed."""
# TODO: reconsider these errors. where are they catched? Where would an implementation make a difference in handling the different exceptions.
class RegionFileFormatError(Exception):
"""Base class for all file format errors.
Note: InconceivedChunk is not a child class, because it is not considered a format error."""
def __init__(self, msg=""):
self.msg = msg
def __str__(self):
return self.msg
class NoRegionHeader(RegionFileFormatError):
"""The size of the region file is too small to contain a header."""
class RegionHeaderError(RegionFileFormatError):
"""Error in the header of the region file for a given chunk."""
class ChunkHeaderError(RegionFileFormatError):
"""Error in the header of a chunk, included the bytes of length and byte version."""
class ChunkDataError(RegionFileFormatError):
"""Error in the data of a chunk."""
class InconceivedChunk(LookupError):
"""Specified chunk has not yet been generated."""
def __init__(self, msg=""):
self.msg = msg
class ChunkMetadata(object):
"""
Metadata for a particular chunk found in the 8 kiByte header and 5-byte chunk header.
"""
def __init__(self, x, z):
self.x = x
"""x-coordinate of the chunk in the file"""
self.z = z
"""z-coordinate of the chunk in the file"""
self.blockstart = 0
"""start of the chunk block, counted in 4 kiByte sectors from the
start of the file. (24 bit int)"""
self.blocklength = 0
"""amount of 4 kiBytes sectors in the block (8 bit int)"""
self.timestamp = 0
"""a Unix timestamps (seconds since epoch) (32 bits), found in the
second sector in the file."""
self.length = 0
"""length of the block in bytes. This excludes the 4-byte length header,
and includes the 1-byte compression byte. (32 bit int)"""
self.compression = None
"""type of compression used for the chunk block. (8 bit int).
- 0: uncompressed
- 1: gzip compression
- 2: zlib compression"""
self.status = STATUS_CHUNK_NOT_CREATED
"""status as determined from blockstart, blocklength, length, file size
and location of other chunks in the file.
- STATUS_CHUNK_OVERLAPPING
- STATUS_CHUNK_MISMATCHED_LENGTHS
- STATUS_CHUNK_ZERO_LENGTH
- STATUS_CHUNK_IN_HEADER
- STATUS_CHUNK_OUT_OF_FILE
- STATUS_CHUNK_OK
- STATUS_CHUNK_NOT_CREATED"""
def __str__(self):
return "%s(%d, %d, sector=%s, blocklength=%s, timestamp=%s, bytelength=%s, compression=%s, status=%s)" % \
(self.__class__.__name__, self.x, self.z, self.blockstart, self.blocklength, self.timestamp, \
self.length, self.compression, self.status)
def __repr__(self):
return "%s(%d,%d)" % (self.__class__.__name__, self.x, self.z)
def requiredblocks(self):
# slightly faster variant of: floor(self.length + 4) / 4096))
return (self.length + 3 + SECTOR_LENGTH) // SECTOR_LENGTH
def is_created(self):
"""return True if this chunk is created according to the header.
This includes chunks which are not readable for other reasons."""
return self.blockstart != 0
class _HeaderWrapper(Mapping):
"""Wrapper around self.metadata to emulate the old self.header variable"""
def __init__(self, metadata):
self.metadata = metadata
def __getitem__(self, xz):
m = self.metadata[xz]
return (m.blockstart, m.blocklength, m.timestamp, m.status)
def __iter__(self):
return iter(self.metadata) # iterates of the keys
def __len__(self):
return len(self.metadata)
class _ChunkHeaderWrapper(Mapping):
"""Wrapper around self.metadata to emulate the old self.chunk_headers variable"""
def __init__(self, metadata):
self.metadata = metadata
def __getitem__(self, xz):
m = self.metadata[xz]
return (m.length if m.length > 0 else None, m.compression, m.status)
def __iter__(self):
return iter(self.metadata) # iterates of the keys
def __len__(self):
return len(self.metadata)
class RegionFile(object):
"""A convenience class for extracting NBT files from the Minecraft Beta Region Format."""
# Redefine constants for backward compatibility.
STATUS_CHUNK_OVERLAPPING = STATUS_CHUNK_OVERLAPPING
"""Constant indicating an error status: the chunk is allocated a sector
already occupied by another chunk.
Deprecated. Use :const:`nbt.region.STATUS_CHUNK_OVERLAPPING` instead."""
STATUS_CHUNK_MISMATCHED_LENGTHS = STATUS_CHUNK_MISMATCHED_LENGTHS
"""Constant indicating an error status: the region header length and the chunk
length are incompatible. Deprecated. Use :const:`nbt.region.STATUS_CHUNK_MISMATCHED_LENGTHS` instead."""
STATUS_CHUNK_ZERO_LENGTH = STATUS_CHUNK_ZERO_LENGTH
"""Constant indicating an error status: chunk header has a 0 length.
Deprecated. Use :const:`nbt.region.STATUS_CHUNK_ZERO_LENGTH` instead."""
STATUS_CHUNK_IN_HEADER = STATUS_CHUNK_IN_HEADER
"""Constant indicating an error status: chunk inside the header of the region file.
Deprecated. Use :const:`nbt.region.STATUS_CHUNK_IN_HEADER` instead."""
STATUS_CHUNK_OUT_OF_FILE = STATUS_CHUNK_OUT_OF_FILE
"""Constant indicating an error status: chunk partially/completely outside of file.
Deprecated. Use :const:`nbt.region.STATUS_CHUNK_OUT_OF_FILE` instead."""
STATUS_CHUNK_OK = STATUS_CHUNK_OK
"""Constant indicating an normal status: the chunk exists and the metadata is valid.
Deprecated. Use :const:`nbt.region.STATUS_CHUNK_OK` instead."""
STATUS_CHUNK_NOT_CREATED = STATUS_CHUNK_NOT_CREATED
"""Constant indicating an normal status: the chunk does not exist.
Deprecated. Use :const:`nbt.region.STATUS_CHUNK_NOT_CREATED` instead."""
def __init__(self, filename=None, fileobj=None):
"""
Read a region file by filename or file object.
If a fileobj is specified, it is not closed after use; it is the callers responibility to close it.
"""
self.file = None
self.filename = None
self._closefile = False
if filename:
self.filename = filename
self.file = open(filename, 'r+b') # open for read and write in binary mode
self._closefile = True
elif fileobj:
if hasattr(fileobj, 'name'):
self.filename = fileobj.name
self.file = fileobj
elif not self.file:
raise ValueError("RegionFile(): Need to specify either a filename or a file object")
# Some variables
self.metadata = {}
"""
dict containing ChunkMetadata objects, gathered from metadata found in the
8 kiByte header and 5-byte chunk header.
``metadata[x, z]: ChunkMetadata()``
"""
self.header = _HeaderWrapper(self.metadata)
"""
dict containing the metadata found in the 8 kiByte header:
``header[x, z]: (offset, sectionlength, timestamp, status)``
:offset: counts in 4 kiByte sectors, starting from the start of the file. (24 bit int)
:blocklength: is in 4 kiByte sectors (8 bit int)
:timestamp: is a Unix timestamps (seconds since epoch) (32 bits)
:status: can be any of:
- STATUS_CHUNK_OVERLAPPING
- STATUS_CHUNK_MISMATCHED_LENGTHS
- STATUS_CHUNK_ZERO_LENGTH
- STATUS_CHUNK_IN_HEADER
- STATUS_CHUNK_OUT_OF_FILE
- STATUS_CHUNK_OK
- STATUS_CHUNK_NOT_CREATED
Deprecated. Use :attr:`metadata` instead.
"""
self.chunk_headers = _ChunkHeaderWrapper(self.metadata)
"""
dict containing the metadata found in each chunk block:
``chunk_headers[x, z]: (length, compression, chunk_status)``
:chunk length: in bytes, starting from the compression byte (32 bit int)
:compression: is 1 (Gzip) or 2 (bzip) (8 bit int)
:chunk_status: is equal to status in :attr:`header`.
If the chunk is not defined, the tuple is (None, None, STATUS_CHUNK_NOT_CREATED)
Deprecated. Use :attr:`metadata` instead.
"""
self._init_header()
self._parse_header()
self._parse_chunk_headers()
def get_size(self):
""" Returns the file size in bytes. """
# seek(0,2) jumps to 0-bytes from the end of the file.
# Python 2.6 support: seek does not yet return the position.
self.file.seek(0, SEEK_END)
return self.file.tell()
@staticmethod
def _bytes_to_sector(bsize, sectorlength=SECTOR_LENGTH):
"""Given a size in bytes, return how many sections of length sectorlen are required to contain it.
This is equivalent to ceil(bsize/sectorlen), if Python would use floating
points for division, and integers for ceil(), rather than the other way around."""
sectors, remainder = divmod(bsize, sectorlength)
return sectors if remainder == 0 else sectors + 1
def close(self):
if self._closefile:
try:
self.file.close()
except IOError:
pass
def __del__(self):
self.close()
# Parent object() has no __del__ method, otherwise it should be called here.
def _init_file(self):
"""Initialise the file header. This will erase any data previously in the file."""
header_length = 2*SECTOR_LENGTH
if self.size > header_length:
self.file.truncate(header_length)
self.file.seek(0)
self.file.write(header_length*b'\x00')
self.size = header_length
def _init_header(self):
for x in range(32):
for z in range(32):
self.metadata[x,z] = ChunkMetadata(x, z)
def _parse_header(self):
"""Read the region header and stores: offset, length and status."""
# update the file size, needed when parse_header is called after
# we have unlinked a chunk or writed a new one
self.size = self.get_size()
if self.size == 0:
# Some region files seems to have 0 bytes of size, and
# Minecraft handle them without problems. Take them
# as empty region files.
return
elif self.size < 2*SECTOR_LENGTH:
raise NoRegionHeader('The region file is %d bytes, too small in size to have a header.' % self.size)
for index in range(0, SECTOR_LENGTH, 4):
x = int(index//4) % 32
z = int(index//4)//32
m = self.metadata[x, z]
self.file.seek(index)
offset, length = unpack(">IB", b"\0" + self.file.read(4))
m.blockstart, m.blocklength = offset, length
self.file.seek(index + SECTOR_LENGTH)
m.timestamp = unpack(">I", self.file.read(4))[0]
if offset == 0 and length == 0:
m.status = STATUS_CHUNK_NOT_CREATED
elif length == 0:
m.status = STATUS_CHUNK_ZERO_LENGTH
elif offset < 2 and offset != 0:
m.status = STATUS_CHUNK_IN_HEADER
elif SECTOR_LENGTH * offset + 5 > self.size:
# Chunk header can't be read.
m.status = STATUS_CHUNK_OUT_OF_FILE
else:
m.status = STATUS_CHUNK_OK
# Check for chunks overlapping in the file
for chunks in self._sectors()[2:]:
if len(chunks) > 1:
# overlapping chunks
for m in chunks:
# Update status, unless these more severe errors take precedence
if m.status not in (STATUS_CHUNK_ZERO_LENGTH, STATUS_CHUNK_IN_HEADER,
STATUS_CHUNK_OUT_OF_FILE):
m.status = STATUS_CHUNK_OVERLAPPING
def _parse_chunk_headers(self):
for x in range(32):
for z in range(32):
m = self.metadata[x, z]
if m.status not in (STATUS_CHUNK_OK, STATUS_CHUNK_OVERLAPPING, \
STATUS_CHUNK_MISMATCHED_LENGTHS):
# skip to next if status is NOT_CREATED, OUT_OF_FILE, IN_HEADER,
# ZERO_LENGTH or anything else.
continue
try:
self.file.seek(m.blockstart*SECTOR_LENGTH) # offset comes in sectors of 4096 bytes
length = unpack(">I", self.file.read(4))
m.length = length[0] # unpack always returns a tuple, even unpacking one element
compression = unpack(">B",self.file.read(1))
m.compression = compression[0]
except IOError:
m.status = STATUS_CHUNK_OUT_OF_FILE
continue
if m.blockstart*SECTOR_LENGTH + m.length + 4 > self.size:
m.status = STATUS_CHUNK_OUT_OF_FILE
elif m.length <= 1: # chunk can't be zero length
m.status = STATUS_CHUNK_ZERO_LENGTH
elif m.length + 4 > m.blocklength * SECTOR_LENGTH:
# There are not enough sectors allocated for the whole block
m.status = STATUS_CHUNK_MISMATCHED_LENGTHS
def _sectors(self, ignore_chunk=None):
"""
Return a list of all sectors, each sector is a list of chunks occupying the block.
"""
sectorsize = self._bytes_to_sector(self.size)
sectors = [[] for s in range(sectorsize)]
sectors[0] = True # locations
sectors[1] = True # timestamps
for m in self.metadata.values():
if not m.is_created():
continue
if ignore_chunk == m:
continue
if m.blocklength and m.blockstart:
blockend = m.blockstart + max(m.blocklength, m.requiredblocks())
# Ensure 2 <= b < sectorsize, as well as m.blockstart <= b < blockend
for b in range(max(m.blockstart, 2), min(blockend, sectorsize)):
sectors[b].append(m)
return sectors
def _locate_free_sectors(self, ignore_chunk=None):
"""Return a list of booleans, indicating the free sectors."""
sectors = self._sectors(ignore_chunk=ignore_chunk)
# Sectors are considered free, if the value is an empty list.
return [not i for i in sectors]
def _find_free_location(self, free_locations, required_sectors=1, preferred=None):
"""
Given a list of booleans, find a list of <required_sectors> consecutive True values.
If no such list is found, return length(free_locations).
Assumes first two values are always False.
"""
# check preferred (current) location
if preferred and all(free_locations[preferred:preferred+required_sectors]):
return preferred
# check other locations
# Note: the slicing may exceed the free_location boundary.
# This implementation relies on the fact that slicing will work anyway,
# and the any() function returns True for an empty list. This ensures
# that blocks outside the file are considered Free as well.
i = 2 # First two sectors are in use by the header
while i < len(free_locations):
if all(free_locations[i:i+required_sectors]):
break
i += 1
return i
def get_metadata(self):
"""
Return a list of the metadata of each chunk that is defined in te regionfile.
This includes chunks which may not be readable for whatever reason,
but excludes chunks that are not yet defined.
"""
return [m for m in self.metadata.values() if m.is_created()]
def get_chunks(self):
"""
Return the x,z coordinates and length of the chunks that are defined in te regionfile.
This includes chunks which may not be readable for whatever reason.
Warning: despite the name, this function does not actually return the chunk,
but merely it's metadata. Use get_chunk(x,z) to get the NBTFile, and then Chunk()
to get the actual chunk.
This method is deprecated. Use :meth:`get_metadata` instead.
"""
return self.get_chunk_coords()
def get_chunk_coords(self):
"""
Return the x,z coordinates and length of the chunks that are defined in te regionfile.
This includes chunks which may not be readable for whatever reason.
This method is deprecated. Use :meth:`get_metadata` instead.
"""
chunks = []
for x in range(32):
for z in range(32):
m = self.metadata[x,z]
if m.is_created():
chunks.append({'x': x, 'z': z, 'length': m.blocklength})
return chunks
def iter_chunks(self):
"""
Yield each readable chunk present in the region.
Chunks that can not be read for whatever reason are silently skipped.
Warning: this function returns a :class:`nbt.nbt.NBTFile` object, use ``Chunk(nbtfile)`` to get a
:class:`nbt.chunk.Chunk` instance.
"""
for m in self.get_metadata():
try:
yield self.get_chunk(m.x, m.z)
except RegionFileFormatError:
pass
def __iter__(self):
return self.iter_chunks()
def get_timestamp(self, x, z):
"""Return the timestamp of when this region file was last modified."""
# TODO: raise an exception if chunk does not exist?
# TODO: return a datetime.datetime object using datetime.fromtimestamp()
return self.metadata[x,z].timestamp
def chunk_count(self):
"""Return the number of defined chunks. This includes potentially corrupt chunks."""
return len(self.get_metadata())
def get_blockdata(self, x, z):
"""
Return the decompressed binary data representing a chunk.
May raise a RegionFileFormatError().
If decompression of the data succeeds, all available data is returned,
even if it is shorter than what is specified in the header (e.g. in case
of a truncated while and non-compressed data).
"""
# read metadata block
m = self.metadata[x, z]
if m.status == STATUS_CHUNK_NOT_CREATED:
raise InconceivedChunk("Chunk is not created")
elif m.status == STATUS_CHUNK_IN_HEADER:
raise RegionHeaderError('Chunk %d,%d is in the region header' % (x,z))
elif m.status == STATUS_CHUNK_OUT_OF_FILE and (m.length <= 1 or m.compression == None):
# Chunk header is outside of the file.
raise RegionHeaderError('Chunk %d,%d is partially/completely outside the file' % (x,z))
elif m.status == STATUS_CHUNK_ZERO_LENGTH:
if m.blocklength == 0:
raise RegionHeaderError('Chunk %d,%d has zero length' % (x,z))
else:
raise ChunkHeaderError('Chunk %d,%d has zero length' % (x,z))
elif m.blockstart * SECTOR_LENGTH + 5 >= self.size:
raise RegionHeaderError('Chunk %d,%d is partially/completely outside the file' % (x,z))
# status is STATUS_CHUNK_OK, STATUS_CHUNK_MISMATCHED_LENGTHS, STATUS_CHUNK_OVERLAPPING
# or STATUS_CHUNK_OUT_OF_FILE.
# The chunk is always read, but in case of an error, the exception may be different
# based on the status.
err = None
try:
# offset comes in sectors of 4096 bytes + length bytes + compression byte
self.file.seek(m.blockstart * SECTOR_LENGTH + 5)
# Do not read past the length of the file.
# The length in the file includes the compression byte, hence the -1.
length = min(m.length - 1, self.size - (m.blockstart * SECTOR_LENGTH + 5))
chunk = self.file.read(length)
if (m.compression == COMPRESSION_GZIP):
# Python 3.1 and earlier do not yet support gzip.decompress(chunk)
f = gzip.GzipFile(fileobj=BytesIO(chunk))
chunk = bytes(f.read())
f.close()
elif (m.compression == COMPRESSION_ZLIB):
chunk = zlib.decompress(chunk)
elif m.compression != COMPRESSION_NONE:
raise ChunkDataError('Unknown chunk compression/format (%s)' % m.compression)
return chunk
except RegionFileFormatError:
raise
except Exception as e:
# Deliberately catch the Exception and re-raise.
# The details in gzip/zlib/nbt are irrelevant, just that the data is garbled.
err = '%s' % e # avoid str(e) due to Unicode issues in Python 2.
if err:
# don't raise during exception handling to avoid the warning
# "During handling of the above exception, another exception occurred".
# Python 3.3 solution (see PEP 409 & 415): "raise ChunkDataError(str(e)) from None"
if m.status == STATUS_CHUNK_MISMATCHED_LENGTHS:
raise ChunkHeaderError('The length in region header and the length in the header of chunk %d,%d are incompatible' % (x,z))
elif m.status == STATUS_CHUNK_OVERLAPPING:
raise ChunkHeaderError('Chunk %d,%d is overlapping with another chunk' % (x,z))
else:
raise ChunkDataError(err)
def get_nbt(self, x, z):
"""
Return a NBTFile of the specified chunk.
Raise InconceivedChunk if the chunk is not included in the file.
"""
data = self.get_blockdata(x, z) # This may raise a RegionFileFormatError.
data = BytesIO(data)
err = None
try:
return NBTFile(buffer=data)
# this may raise a MalformedFileError. Convert to ChunkDataError.
except MalformedFileError as e:
err = '%s' % e # avoid str(e) due to Unicode issues in Python 2.
if err:
raise ChunkDataError(err)
def get_chunk(self, x, z):
"""
Return a NBTFile of the specified chunk.
Raise InconceivedChunk if the chunk is not included in the file.
Note: this function may be changed later to return a Chunk() rather
than a NBTFile() object. To keep the old functionality, use get_nbt().
"""
return self.get_nbt(x, z)
def write_blockdata(self, x, z, data):
"""
Compress the data, write it to file, and add pointers in the header so it
can be found as chunk(x,z).
"""
data = zlib.compress(data) # use zlib compression, rather than Gzip
length = len(data)
# 5 extra bytes are required for the chunk block header
nsectors = self._bytes_to_sector(length + 5)
if nsectors >= 256:
raise ChunkDataError("Chunk is too large (%d sectors exceeds 255 maximum)" % (nsectors))
# Ensure file has a header
if self.size < 2*SECTOR_LENGTH:
self._init_file()
# search for a place where to write the chunk:
current = self.metadata[x, z]
free_sectors = self._locate_free_sectors(ignore_chunk=current)
sector = self._find_free_location(free_sectors, nsectors, preferred=current.blockstart)
# If file is smaller than sector*SECTOR_LENGTH (it was truncated), pad it with zeroes.
if self.size < sector*SECTOR_LENGTH:
# jump to end of file
self.file.seek(0, SEEK_END)
self.file.write((sector*SECTOR_LENGTH - self.size) * b"\x00")
assert self.file.tell() == sector*SECTOR_LENGTH
# write out chunk to region
self.file.seek(sector*SECTOR_LENGTH)
self.file.write(pack(">I", length + 1)) #length field
self.file.write(pack(">B", COMPRESSION_ZLIB)) #compression field
self.file.write(data) #compressed data
# Write zeros up to the end of the chunk
remaining_length = SECTOR_LENGTH * nsectors - length - 5
self.file.write(remaining_length * b"\x00")
#seek to header record and write offset and length records
self.file.seek(4 * (x + 32*z))
self.file.write(pack(">IB", sector, nsectors)[1:])
#write timestamp
self.file.seek(SECTOR_LENGTH + 4 * (x + 32*z))
timestamp = int(time.time())
self.file.write(pack(">I", timestamp))
# Update free_sectors with newly written block
# This is required for calculating file truncation and zeroing freed blocks.
free_sectors.extend((sector + nsectors - len(free_sectors)) * [True])
for s in range(sector, sector + nsectors):
free_sectors[s] = False
# Check if file should be truncated:
truncate_count = list(reversed(free_sectors)).index(False)
if truncate_count > 0:
self.size = SECTOR_LENGTH * (len(free_sectors) - truncate_count)
self.file.truncate(self.size)
free_sectors = free_sectors[:-truncate_count]
# Calculate freed sectors
for s in range(current.blockstart, min(current.blockstart + current.blocklength, len(free_sectors))):
if free_sectors[s]:
# zero sector s
self.file.seek(SECTOR_LENGTH*s)
self.file.write(SECTOR_LENGTH*b'\x00')
# update file size and header information
self.size = max((sector + nsectors)*SECTOR_LENGTH, self.size)
assert self.get_size() == self.size
current.blockstart = sector
current.blocklength = nsectors
current.status = STATUS_CHUNK_OK
current.timestamp = timestamp
current.length = length + 1
current.compression = COMPRESSION_ZLIB
# self.parse_header()
# self.parse_chunk_headers()
def write_chunk(self, x, z, nbt_file):
"""
Pack the NBT file as binary data, and write to file in a compressed format.
"""
data = BytesIO()
nbt_file.write_file(buffer=data) # render to buffer; uncompressed
self.write_blockdata(x, z, data.getvalue())
def unlink_chunk(self, x, z):
"""
Remove a chunk from the header of the region file.
Fragmentation is not a problem, chunks are written to free sectors when possible.
"""
# This function fails for an empty file. If that is the case, just return.
if self.size < 2*SECTOR_LENGTH:
return
# zero the region header for the chunk (offset length and time)
self.file.seek(4 * (x + 32*z))
self.file.write(pack(">IB", 0, 0)[1:])
self.file.seek(SECTOR_LENGTH + 4 * (x + 32*z))
self.file.write(pack(">I", 0))
# Check if file should be truncated:
current = self.metadata[x, z]
free_sectors = self._locate_free_sectors(ignore_chunk=current)
truncate_count = list(reversed(free_sectors)).index(False)
if truncate_count > 0:
self.size = SECTOR_LENGTH * (len(free_sectors) - truncate_count)
self.file.truncate(self.size)
free_sectors = free_sectors[:-truncate_count]
# Calculate freed sectors
for s in range(current.blockstart, min(current.blockstart + current.blocklength, len(free_sectors))):
if free_sectors[s]:
# zero sector s
self.file.seek(SECTOR_LENGTH*s)
self.file.write(SECTOR_LENGTH*b'\x00')
# update the header
self.metadata[x, z] = ChunkMetadata(x, z)
def _classname(self):
"""Return the fully qualified class name."""
if self.__class__.__module__ in (None,):
return self.__class__.__name__
else:
return "%s.%s" % (self.__class__.__module__, self.__class__.__name__)
def __str__(self):
if self.filename:
return "<%s(%r)>" % (self._classname(), self.filename)
else:
return '<%s object at %d>' % (self._classname(), id(self))
def __repr__(self):
if self.filename:
return "%s(%r)" % (self._classname(), self.filename)
else:
return '<%s object at %d>' % (self._classname(), id(self))
|
|
# Copyright 2012 OpenStack Foundation.
# All Rights Reserved
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
#
from __future__ import print_function
import abc
import argparse
from cliff import lister
from cliff import show
from oslo_serialization import jsonutils
import six
from neutronclient._i18n import _
from neutronclient.common import exceptions
from neutronclient.common import utils
from neutronclient.neutron import v2_0 as neutronV20
def get_tenant_id(args, client):
return (args.pos_tenant_id or args.tenant_id or
client.get_quotas_tenant()['tenant']['tenant_id'])
class DeleteQuota(neutronV20.NeutronCommand):
"""Delete defined quotas of a given tenant."""
resource = 'quota'
def get_parser(self, prog_name):
parser = super(DeleteQuota, self).get_parser(prog_name)
parser.add_argument(
'--tenant-id', metavar='tenant-id',
help=_('The owner tenant ID.'))
parser.add_argument(
'--tenant_id',
help=argparse.SUPPRESS)
parser.add_argument(
'pos_tenant_id',
help=argparse.SUPPRESS, nargs='?')
return parser
def take_action(self, parsed_args):
neutron_client = self.get_client()
tenant_id = get_tenant_id(parsed_args, neutron_client)
obj_deleter = getattr(neutron_client,
"delete_%s" % self.resource)
obj_deleter(tenant_id)
print((_('Deleted %(resource)s: %(tenant_id)s')
% {'tenant_id': tenant_id,
'resource': self.resource}),
file=self.app.stdout)
return
class ListQuota(neutronV20.NeutronCommand, lister.Lister):
"""List quotas of all tenants who have non-default quota values."""
resource = 'quota'
def get_parser(self, prog_name):
parser = super(ListQuota, self).get_parser(prog_name)
return parser
def take_action(self, parsed_args):
neutron_client = self.get_client()
search_opts = {}
self.log.debug('search options: %s', search_opts)
obj_lister = getattr(neutron_client,
"list_%ss" % self.resource)
data = obj_lister(**search_opts)
info = []
collection = self.resource + "s"
if collection in data:
info = data[collection]
_columns = len(info) > 0 and sorted(info[0].keys()) or []
return (_columns, (utils.get_item_properties(s, _columns)
for s in info))
class ShowQuotaBase(neutronV20.NeutronCommand, show.ShowOne):
"""Base class to show quotas of a given tenant."""
resource = "quota"
@abc.abstractmethod
def retrieve_data(self, tenant_id, neutron_client):
"""Retrieve data using neutron client for the given tenant."""
def get_parser(self, prog_name):
parser = super(ShowQuotaBase, self).get_parser(prog_name)
parser.add_argument(
'--tenant-id', metavar='tenant-id',
help=_('The owner tenant ID.'))
parser.add_argument(
'--tenant_id',
help=argparse.SUPPRESS)
# allow people to do neutron quota-show <tenant-id>.
# we use a different name for this because the default will
# override whatever is in the named arg otherwise.
parser.add_argument(
'pos_tenant_id',
help=argparse.SUPPRESS, nargs='?')
return parser
def take_action(self, parsed_args):
neutron_client = self.get_client()
tenant_id = get_tenant_id(parsed_args, neutron_client)
data = self.retrieve_data(tenant_id, neutron_client)
if self.resource in data:
return zip(*sorted(six.iteritems(data[self.resource])))
return
class ShowQuota(ShowQuotaBase):
"""Show quotas for a given tenant."""
def retrieve_data(self, tenant_id, neutron_client):
return neutron_client.show_quota(tenant_id)
class ShowQuotaDefault(ShowQuotaBase):
"""Show default quotas for a given tenant."""
def retrieve_data(self, tenant_id, neutron_client):
return neutron_client.show_quota_default(tenant_id)
class UpdateQuota(neutronV20.NeutronCommand, show.ShowOne):
"""Update a given tenant's quotas."""
resource = 'quota'
def get_parser(self, prog_name):
parser = super(UpdateQuota, self).get_parser(prog_name)
parser.add_argument(
'--tenant-id', metavar='tenant-id',
help=_('The owner tenant ID.'))
parser.add_argument(
'--tenant_id',
help=argparse.SUPPRESS)
parser.add_argument(
'--network', metavar='networks',
help=_('The limit of networks.'))
parser.add_argument(
'--subnet', metavar='subnets',
help=_('The limit of subnets.'))
parser.add_argument(
'--port', metavar='ports',
help=_('The limit of ports.'))
parser.add_argument(
'--router', metavar='routers',
help=_('The limit of routers.'))
parser.add_argument(
'--floatingip', metavar='floatingips',
help=_('The limit of floating IPs.'))
parser.add_argument(
'--security-group', metavar='security_groups',
help=_('The limit of security groups.'))
parser.add_argument(
'--security-group-rule', metavar='security_group_rules',
help=_('The limit of security groups rules.'))
parser.add_argument(
'--vip', metavar='vips',
help=_('The limit of vips.'))
parser.add_argument(
'--pool', metavar='pools',
help=_('The limit of pools.'))
parser.add_argument(
'--member', metavar='members',
help=_('The limit of pool members.'))
parser.add_argument(
'--health-monitor', metavar='health_monitors',
dest='healthmonitor',
help=_('The limit of health monitors.'))
parser.add_argument(
'--loadbalancer', metavar='loadbalancers',
help=_('The limit of load balancers.'))
parser.add_argument(
'--listener', metavar='listeners',
help=_('The limit of listeners.'))
parser.add_argument(
'--rbac-policy', metavar='rbac_policies',
help=_('The limit of RBAC policies.'))
parser.add_argument(
'pos_tenant_id',
help=argparse.SUPPRESS, nargs='?')
return parser
def _validate_int(self, name, value):
try:
return_value = int(value)
except Exception:
message = (_('Quota limit for %(name)s must be an integer') %
{'name': name})
raise exceptions.CommandError(message=message)
return return_value
def args2body(self, parsed_args):
quota = {}
for resource in ('network', 'subnet', 'port', 'router', 'floatingip',
'security_group', 'security_group_rule',
'vip', 'pool', 'member', 'healthmonitor',
'loadbalancer', 'listener', 'rbac_policy'):
if getattr(parsed_args, resource):
quota[resource] = self._validate_int(
resource,
getattr(parsed_args, resource))
if not quota:
raise exceptions.CommandError(
message=_('Must specify a valid resource with new quota '
'value'))
return {self.resource: quota}
def take_action(self, parsed_args):
neutron_client = self.get_client()
_extra_values = neutronV20.parse_args_to_dict(self.values_specs)
neutronV20._merge_args(self, parsed_args, _extra_values,
self.values_specs)
body = self.args2body(parsed_args)
if self.resource in body:
body[self.resource].update(_extra_values)
else:
body[self.resource] = _extra_values
obj_updator = getattr(neutron_client,
"update_%s" % self.resource)
tenant_id = get_tenant_id(parsed_args, neutron_client)
data = obj_updator(tenant_id, body)
if self.resource in data:
for k, v in six.iteritems(data[self.resource]):
if isinstance(v, list):
value = ""
for _item in v:
if value:
value += "\n"
if isinstance(_item, dict):
value += jsonutils.dumps(_item)
else:
value += str(_item)
data[self.resource][k] = value
elif v is None:
data[self.resource][k] = ''
return zip(*sorted(six.iteritems(data[self.resource])))
else:
return
|
|
from clang.cindex import *
import vim
import time
import re
import threading
def initClangComplete(clang_complete_flags):
global index
index = Index.create()
global translationUnits
translationUnits = dict()
global complete_flags
complete_flags = int(clang_complete_flags)
# Get a tuple (fileName, fileContent) for the file opened in the current
# vim buffer. The fileContent contains the unsafed buffer content.
def getCurrentFile():
file = "\n".join(vim.eval("getline(1, '$')"))
return (vim.current.buffer.name, file)
def getCurrentTranslationUnit(args, currentFile, fileName, update = False):
if fileName in translationUnits:
tu = translationUnits[fileName]
if update:
if debug:
start = time.time()
tu.reparse([currentFile])
if debug:
elapsed = (time.time() - start)
print "LibClang - Reparsing: %.3f" % elapsed
return tu
if debug:
start = time.time()
flags = TranslationUnit.PrecompiledPreamble | TranslationUnit.CXXPrecompiledPreamble # | TranslationUnit.CacheCompletionResults
tu = index.parse(fileName, args, [currentFile], flags)
if debug:
elapsed = (time.time() - start)
print "LibClang - First parse: %.3f" % elapsed
if tu == None:
print "Cannot parse this source file. The following arguments " \
+ "are used for clang: " + " ".join(args)
return None
translationUnits[fileName] = tu
# Reparse to initialize the PCH cache even for auto completion
# This should be done by index.parse(), however it is not.
# So we need to reparse ourselves.
if debug:
start = time.time()
tu.reparse([currentFile])
if debug:
elapsed = (time.time() - start)
print "LibClang - First reparse (generate PCH cache): %.3f" % elapsed
return tu
def splitOptions(options):
optsList = []
opt = ""
quoted = False
for char in options:
if char == ' ' and not quoted:
if opt != "":
optsList += [opt]
opt = ""
continue
elif char == '"':
quoted = not quoted
opt += char
if opt != "":
optsList += [opt]
return optsList
def getQuickFix(diagnostic):
# Some diagnostics have no file, e.g. "too many errors emitted, stopping now"
if diagnostic.location.file:
filename = diagnostic.location.file.name
else:
filename = ""
if diagnostic.severity == diagnostic.Ignored:
type = 'I'
elif diagnostic.severity == diagnostic.Note:
type = 'I'
elif diagnostic.severity == diagnostic.Warning:
type = 'W'
elif diagnostic.severity == diagnostic.Error:
type = 'E'
elif diagnostic.severity == diagnostic.Fatal:
type = 'E'
else:
return None
return dict({ 'bufnr' : int(vim.eval("bufnr('" + filename + "', 1)")),
'lnum' : diagnostic.location.line,
'col' : diagnostic.location.column,
'text' : diagnostic.spelling,
'type' : type})
def getQuickFixList(tu):
return filter (None, map (getQuickFix, tu.diagnostics))
def highlightRange(range, hlGroup):
pattern = '/\%' + str(range.start.line) + 'l' + '\%' \
+ str(range.start.column) + 'c' + '.*' \
+ '\%' + str(range.end.column) + 'c/'
command = "exe 'syntax match' . ' " + hlGroup + ' ' + pattern + "'"
vim.command(command)
def highlightDiagnostic(diagnostic):
if diagnostic.severity == diagnostic.Warning:
hlGroup = 'SpellLocal'
elif diagnostic.severity == diagnostic.Error:
hlGroup = 'SpellBad'
else:
return
pattern = '/\%' + str(diagnostic.location.line) + 'l\%' \
+ str(diagnostic.location.column) + 'c./'
command = "exe 'syntax match' . ' " + hlGroup + ' ' + pattern + "'"
vim.command(command)
# Use this wired kind of iterator as the python clang libraries
# have a bug in the range iterator that stops us to use:
#
# | for range in diagnostic.ranges
#
for i in range(len(diagnostic.ranges)):
highlightRange(diagnostic.ranges[i], hlGroup)
def highlightDiagnostics(tu):
map (highlightDiagnostic, tu.diagnostics)
def highlightCurrentDiagnostics():
if vim.current.buffer.name in translationUnits:
highlightDiagnostics(translationUnits[vim.current.buffer.name])
def getCurrentQuickFixList():
if vim.current.buffer.name in translationUnits:
return getQuickFixList(translationUnits[vim.current.buffer.name])
return []
def updateCurrentDiagnostics():
global debug
debug = int(vim.eval("g:clang_debug")) == 1
userOptionsGlobal = splitOptions(vim.eval("g:clang_user_options"))
userOptionsLocal = splitOptions(vim.eval("b:clang_user_options"))
args = userOptionsGlobal + userOptionsLocal
getCurrentTranslationUnit(args, getCurrentFile(),
vim.current.buffer.name, update = True)
def getCurrentCompletionResults(line, column, args, currentFile, fileName):
tu = getCurrentTranslationUnit(args, currentFile, fileName)
if debug:
start = time.time()
cr = tu.codeComplete(fileName, line, column, [currentFile],
complete_flags)
if debug:
elapsed = (time.time() - start)
print "LibClang - Code completion time (library): %.3f" % elapsed
return cr
def formatResult(result):
completion = dict()
returnValue = None
abbr = ""
chunks = filter(lambda x: not x.isKindInformative(), result.string)
args_pos = []
cur_pos = 0
word = ""
for chunk in chunks:
if chunk.isKindResultType():
returnValue = chunk
continue
chunk_spelling = chunk.spelling
if chunk.isKindTypedText():
abbr = chunk_spelling
chunk_len = len(chunk_spelling)
if chunk.isKindPlaceHolder():
args_pos += [[ cur_pos, cur_pos + chunk_len ]]
cur_pos += chunk_len
word += chunk_spelling
menu = word
if returnValue:
menu = returnValue.spelling + " " + menu
completion['word'] = word
completion['abbr'] = abbr
completion['menu'] = menu
completion['info'] = word
completion['args_pos'] = args_pos
completion['dup'] = 0
# Replace the number that represents a specific kind with a better
# textual representation.
completion['kind'] = kinds[result.cursorKind]
return completion
class CompleteThread(threading.Thread):
lock = threading.Lock()
def __init__(self, line, column, currentFile, fileName):
threading.Thread.__init__(self)
self.line = line
self.column = column
self.currentFile = currentFile
self.fileName = fileName
self.result = None
userOptionsGlobal = splitOptions(vim.eval("g:clang_user_options"))
userOptionsLocal = splitOptions(vim.eval("b:clang_user_options"))
self.args = userOptionsGlobal + userOptionsLocal
def run(self):
try:
CompleteThread.lock.acquire()
if self.line == -1:
# Warm up the caches. For this it is sufficient to get the current
# translation unit. No need to retrieve completion results.
# This short pause is necessary to allow vim to initialize itself.
# Otherwise we would get: E293: block was not locked
# The user does not see any delay, as we just pause a background thread.
time.sleep(0.1)
getCurrentTranslationUnit(self.args, self.currentFile, self.fileName)
else:
self.result = getCurrentCompletionResults(self.line, self.column,
self.args, self.currentFile, self.fileName)
except Exception:
pass
CompleteThread.lock.release()
def WarmupCache():
global debug
debug = int(vim.eval("g:clang_debug")) == 1
t = CompleteThread(-1, -1, getCurrentFile(), vim.current.buffer.name)
t.start()
def getCurrentCompletions(base):
global debug
debug = int(vim.eval("g:clang_debug")) == 1
sorting = vim.eval("g:clang_sort_algo")
line = int(vim.eval("line('.')"))
column = int(vim.eval("b:col"))
if debug:
start = time.time()
t = CompleteThread(line, column, getCurrentFile(), vim.current.buffer.name)
t.start()
while t.isAlive():
t.join(0.01)
cancel = int(vim.eval('complete_check()'))
if cancel != 0:
return []
cr = t.result
if cr is None:
return []
results = cr.results
if base != "":
regexp = re.compile("^" + base)
results = filter(lambda x: regexp.match(getAbbr(x.string)), results)
if sorting == 'priority':
getPriority = lambda x: x.string.priority
results = sorted(results, None, getPriority)
if sorting == 'alpha':
getAbbrevation = lambda x: getAbbr(x.string).lower()
results = sorted(results, None, getAbbrevation)
result = map(formatResult, results)
if debug:
elapsed = (time.time() - start)
print "LibClang - Code completion time (library + formatting): %.3f" \
% elapsed
time.sleep(1)
return result
def getAbbr(strings):
tmplst = filter(lambda x: x.isKindTypedText(), strings)
if len(tmplst) == 0:
return ""
else:
return tmplst[0].spelling
kinds = dict({ \
# Declarations \
1 : 't', # CXCursor_UnexposedDecl (A declaration whose specific kind is not \
# exposed via this interface) \
2 : 't', # CXCursor_StructDecl (A C or C++ struct) \
3 : 't', # CXCursor_UnionDecl (A C or C++ union) \
4 : 't', # CXCursor_ClassDecl (A C++ class) \
5 : 't', # CXCursor_EnumDecl (An enumeration) \
6 : 'm', # CXCursor_FieldDecl (A field (in C) or non-static data member \
# (in C++) in a struct, union, or C++ class) \
7 : 'e', # CXCursor_EnumConstantDecl (An enumerator constant) \
8 : 'f', # CXCursor_FunctionDecl (A function) \
9 : 'v', # CXCursor_VarDecl (A variable) \
10 : 'a', # CXCursor_ParmDecl (A function or method parameter) \
11 : '11', # CXCursor_ObjCInterfaceDecl (An Objective-C @interface) \
12 : '12', # CXCursor_ObjCCategoryDecl (An Objective-C @interface for a \
# category) \
13 : '13', # CXCursor_ObjCProtocolDecl (An Objective-C @protocol declaration) \
14 : '14', # CXCursor_ObjCPropertyDecl (An Objective-C @property declaration) \
15 : '15', # CXCursor_ObjCIvarDecl (An Objective-C instance variable) \
16 : '16', # CXCursor_ObjCInstanceMethodDecl (An Objective-C instance method) \
17 : '17', # CXCursor_ObjCClassMethodDecl (An Objective-C class method) \
18 : '18', # CXCursor_ObjCImplementationDec (An Objective-C @implementation) \
19 : '19', # CXCursor_ObjCCategoryImplDecll (An Objective-C @implementation \
# for a category) \
20 : 't', # CXCursor_TypedefDecl (A typedef) \
21 : 'f', # CXCursor_CXXMethod (A C++ class method) \
22 : 'n', # CXCursor_Namespace (A C++ namespace) \
23 : '23', # CXCursor_LinkageSpec (A linkage specification, e.g. 'extern "C"') \
24 : '+', # CXCursor_Constructor (A C++ constructor) \
25 : '~', # CXCursor_Destructor (A C++ destructor) \
26 : '26', # CXCursor_ConversionFunction (A C++ conversion function) \
27 : 'a', # CXCursor_TemplateTypeParameter (A C++ template type parameter) \
28 : 'a', # CXCursor_NonTypeTemplateParameter (A C++ non-type template \
# parameter) \
29 : 'a', # CXCursor_TemplateTemplateParameter (A C++ template template \
# parameter) \
30 : 'f', # CXCursor_FunctionTemplate (A C++ function template) \
31 : 'p', # CXCursor_ClassTemplate (A C++ class template) \
32 : '32', # CXCursor_ClassTemplatePartialSpecialization (A C++ class template \
# partial specialization) \
33 : 'n', # CXCursor_NamespaceAlias (A C++ namespace alias declaration) \
34 : '34', # CXCursor_UsingDirective (A C++ using directive) \
35 : '35', # CXCursor_UsingDeclaration (A using declaration) \
\
# References \
40 : '40', # CXCursor_ObjCSuperClassRef \
41 : '41', # CXCursor_ObjCProtocolRef \
42 : '42', # CXCursor_ObjCClassRef \
43 : '43', # CXCursor_TypeRef \
44 : '44', # CXCursor_CXXBaseSpecifier \
45 : '45', # CXCursor_TemplateRef (A reference to a class template, function \
# template, template template parameter, or class template partial \
# specialization) \
46 : '46', # CXCursor_NamespaceRef (A reference to a namespace or namespace \
# alias) \
47 : '47', # CXCursor_MemberRef (A reference to a member of a struct, union, \
# or class that occurs in some non-expression context, e.g., a \
# designated initializer) \
48 : '48', # CXCursor_LabelRef (A reference to a labeled statement) \
49 : '49', # CXCursor_OverloadedDeclRef (A reference to a set of overloaded \
# functions or function templates that has not yet been resolved to \
# a specific function or function template) \
\
# Error conditions \
#70 : '70', # CXCursor_FirstInvalid \
70 : '70', # CXCursor_InvalidFile \
71 : '71', # CXCursor_NoDeclFound \
72 : 'u', # CXCursor_NotImplemented \
73 : '73', # CXCursor_InvalidCode \
\
# Expressions \
100 : '100', # CXCursor_UnexposedExpr (An expression whose specific kind is \
# not exposed via this interface) \
101 : '101', # CXCursor_DeclRefExpr (An expression that refers to some value \
# declaration, such as a function, varible, or enumerator) \
102 : '102', # CXCursor_MemberRefExpr (An expression that refers to a member \
# of a struct, union, class, Objective-C class, etc) \
103 : '103', # CXCursor_CallExpr (An expression that calls a function) \
104 : '104', # CXCursor_ObjCMessageExpr (An expression that sends a message \
# to an Objective-C object or class) \
105 : '105', # CXCursor_BlockExpr (An expression that represents a block \
# literal) \
\
# Statements \
200 : '200', # CXCursor_UnexposedStmt (A statement whose specific kind is not \
# exposed via this interface) \
201 : '201', # CXCursor_LabelStmt (A labelled statement in a function) \
\
# Translation unit \
300 : '300', # CXCursor_TranslationUnit (Cursor that represents the \
# translation unit itself) \
\
# Attributes \
400 : '400', # CXCursor_UnexposedAttr (An attribute whose specific kind is \
# not exposed via this interface) \
401 : '401', # CXCursor_IBActionAttr \
402 : '402', # CXCursor_IBOutletAttr \
403 : '403', # CXCursor_IBOutletCollectionAttr \
\
# Preprocessing \
500 : '500', # CXCursor_PreprocessingDirective \
501 : 'd', # CXCursor_MacroDefinition \
502 : '502', # CXCursor_MacroInstantiation \
503 : '503' # CXCursor_InclusionDirective \
})
# vim: set ts=2 sts=2 sw=2 expandtab :
|
|
#!/usr/bin/env python
# Copyright (c) 2012 The Chromium Authors. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
from cpp_type_generator import CppTypeGenerator
from json_schema import CachedLoad
import model
import unittest
class CppTypeGeneratorTest(unittest.TestCase):
def setUp(self):
self.model = model.Model()
self.forbidden_json = CachedLoad('test/forbidden.json')
self.model.AddNamespace(self.forbidden_json[0],
'path/to/forbidden.json')
self.forbidden = self.model.namespaces.get('forbidden')
self.permissions_json = CachedLoad('test/permissions.json')
self.model.AddNamespace(self.permissions_json[0],
'path/to/permissions.json')
self.permissions = self.model.namespaces.get('permissions')
self.windows_json = CachedLoad('test/windows.json')
self.model.AddNamespace(self.windows_json[0],
'path/to/window.json')
self.windows = self.model.namespaces.get('windows')
self.tabs_json = CachedLoad('test/tabs.json')
self.model.AddNamespace(self.tabs_json[0],
'path/to/tabs.json')
self.tabs = self.model.namespaces.get('tabs')
self.browser_action_json = CachedLoad('test/browser_action.json')
self.model.AddNamespace(self.browser_action_json[0],
'path/to/browser_action.json')
self.browser_action = self.model.namespaces.get('browserAction')
self.font_settings_json = CachedLoad('test/font_settings.json')
self.model.AddNamespace(self.font_settings_json[0],
'path/to/font_settings.json')
self.font_settings = self.model.namespaces.get('fontSettings')
self.dependency_tester_json = CachedLoad('test/dependency_tester.json')
self.model.AddNamespace(self.dependency_tester_json[0],
'path/to/dependency_tester.json')
self.dependency_tester = self.model.namespaces.get('dependencyTester')
def testGenerateIncludesAndForwardDeclarations(self):
manager = CppTypeGenerator('', self.windows, self.windows.unix_name)
manager.AddNamespace(self.tabs, self.tabs.unix_name)
self.assertEquals('#include "path/to/tabs.h"',
manager.GenerateIncludes().Render())
self.assertEquals(
'namespace tabs {\n'
'struct Tab;\n'
'}\n'
'namespace windows {\n'
'struct Window;\n'
'} // windows',
manager.GenerateForwardDeclarations().Render())
manager = CppTypeGenerator('', self.permissions, self.permissions.unix_name)
self.assertEquals('', manager.GenerateIncludes().Render())
self.assertEquals('namespace permissions {\n'
'struct Permissions;\n'
'} // permissions',
manager.GenerateForwardDeclarations().Render())
def testGenerateIncludesAndForwardDeclarationsMultipleTypes(self):
m = model.Model()
self.tabs_json[0]['types'].append(self.permissions_json[0]['types'][0])
self.windows_json[0]['functions'].append(
self.permissions_json[0]['functions'][1])
# Insert 'windows' before 'tabs' in order to test that they are sorted
# properly.
windows = m.AddNamespace(self.windows_json[0],
'path/to/windows.json')
tabs_namespace = m.AddNamespace(self.tabs_json[0],
'path/to/tabs.json')
manager = CppTypeGenerator('', windows, self.windows.unix_name)
manager.AddNamespace(tabs_namespace, self.tabs.unix_name)
self.assertEquals('#include "path/to/tabs.h"',
manager.GenerateIncludes().Render())
self.assertEquals(
'namespace tabs {\n'
'struct Permissions;\n'
'struct Tab;\n'
'}\n'
'namespace windows {\n'
'struct Window;\n'
'} // windows',
manager.GenerateForwardDeclarations().Render())
def testGenerateIncludesAndForwardDeclarationsDependencies(self):
m = model.Model()
# Insert 'font_settings' before 'browser_action' in order to test that
# CppTypeGenerator sorts them properly.
font_settings_namespace = m.AddNamespace(self.font_settings_json[0],
'path/to/font_settings.json')
browser_action_namespace = m.AddNamespace(self.browser_action_json[0],
'path/to/browser_action.json')
manager = CppTypeGenerator('', self.dependency_tester,
self.dependency_tester.unix_name)
manager.AddNamespace(font_settings_namespace,
self.font_settings.unix_name)
manager.AddNamespace(browser_action_namespace,
self.browser_action.unix_name)
self.assertEquals('#include "path/to/browser_action.h"\n'
'#include "path/to/font_settings.h"',
manager.GenerateIncludes().Render())
self.assertEquals(
'namespace browserAction {\n'
'typedef std::vector<int> ColorArray;\n'
'}\n'
'namespace fontSettings {\n'
'typedef std::string ScriptCode;\n'
'}\n'
'namespace dependency_tester {\n'
'} // dependency_tester',
manager.GenerateForwardDeclarations().Render())
def testChoicesEnum(self):
manager = CppTypeGenerator('', self.tabs, self.tabs.unix_name)
prop = self.tabs.functions['move'].params[0]
self.assertEquals('TAB_IDS_ARRAY',
manager.GetEnumValue(prop, model.PropertyType.ARRAY.name))
self.assertEquals('TAB_IDS_INTEGER',
manager.GetEnumValue(prop, model.PropertyType.INTEGER.name))
self.assertEquals('TabIdsType',
manager.GetChoicesEnumType(prop))
def testGetTypeSimple(self):
manager = CppTypeGenerator('', self.tabs, self.tabs.unix_name)
self.assertEquals('int',
manager.GetType(
self.tabs.types['tabs.Tab'].properties['id']))
self.assertEquals('std::string',
manager.GetType(
self.tabs.types['tabs.Tab'].properties['status']))
self.assertEquals('bool',
manager.GetType(
self.tabs.types['tabs.Tab'].properties['selected']))
def testStringAsType(self):
manager = CppTypeGenerator('', self.font_settings,
self.font_settings.unix_name)
self.assertEquals('std::string',
manager.GetType(
self.font_settings.types['fontSettings.ScriptCode']))
def testArrayAsType(self):
manager = CppTypeGenerator('', self.browser_action,
self.browser_action.unix_name)
self.assertEquals('std::vector<int>',
manager.GetType(
self.browser_action.types['browserAction.ColorArray']))
def testGetTypeArray(self):
manager = CppTypeGenerator('', self.windows, self.windows.unix_name)
self.assertEquals('std::vector<linked_ptr<Window> >',
manager.GetType(
self.windows.functions['getAll'].callback.params[0]))
manager = CppTypeGenerator('', self.permissions, self.permissions.unix_name)
self.assertEquals('std::vector<std::string>', manager.GetType(
self.permissions.types['permissions.Permissions'].properties['origins']))
def testGetTypeLocalRef(self):
manager = CppTypeGenerator('', self.tabs, self.tabs.unix_name)
self.assertEquals('Tab',
manager.GetType(
self.tabs.functions['get'].callback.params[0]))
def testGetTypeIncludedRef(self):
manager = CppTypeGenerator('', self.windows, self.windows.unix_name)
manager.AddNamespace(self.tabs, self.tabs.unix_name)
self.assertEquals('std::vector<linked_ptr<tabs::Tab> >',
manager.GetType(
self.windows.types['windows.Window'].properties['tabs']))
def testGetTypeNotfound(self):
prop = self.windows.types['windows.Window'].properties['tabs'].item_type
prop.ref_type = 'Something'
manager = CppTypeGenerator('', self.windows, self.windows.unix_name)
self.assertRaises(KeyError, manager.GetType, prop)
def testGetTypeNotimplemented(self):
prop = self.windows.types['windows.Window'].properties['tabs'].item_type
prop.type_ = 10
manager = CppTypeGenerator('', self.windows, self.windows.unix_name)
self.assertRaises(NotImplementedError, manager.GetType, prop)
def testGetTypeWithPadForGeneric(self):
manager = CppTypeGenerator('', self.permissions, self.permissions.unix_name)
self.assertEquals('std::vector<std::string> ',
manager.GetType(
self.permissions.types['permissions.Permissions'].properties['origins'],
pad_for_generics=True))
self.assertEquals('bool',
manager.GetType(
self.permissions.functions['contains'].callback.params[0],
pad_for_generics=True))
def testNamespaceDeclaration(self):
manager = CppTypeGenerator('extensions', self.permissions,
self.permissions.unix_name)
self.assertEquals(
'namespace extensions {',
manager.GetRootNamespaceStart().Render())
manager = CppTypeGenerator('extensions::gen::api', self.permissions,
self.permissions.unix_name)
self.assertEquals('namespace permissions {',
manager.GetNamespaceStart().Render())
self.assertEquals('} // permissions',
manager.GetNamespaceEnd().Render())
self.assertEquals(
'namespace extensions {\n'
'namespace gen {\n'
'namespace api {',
manager.GetRootNamespaceStart().Render())
self.assertEquals(
'} // api\n'
'} // gen\n'
'} // extensions',
manager.GetRootNamespaceEnd().Render())
def testExpandParams(self):
manager = CppTypeGenerator('extensions', self.tabs,
self.tabs.unix_name)
props = self.tabs.functions['move'].params
self.assertEquals(2, len(props))
self.assertEquals(['move_properties', 'tab_ids_array', 'tab_ids_integer'],
sorted([x.unix_name for x in manager.ExpandParams(props)])
)
def testGetAllPossibleParameterLists(self):
manager = CppTypeGenerator('extensions', self.tabs,
self.tabs.unix_name)
props = self.forbidden.functions['forbiddenParameters'].params
self.assertEquals(4, len(props))
param_lists = manager.GetAllPossibleParameterLists(props)
expected_lists = [
['first_choice_array', 'first_string',
'second_choice_array', 'second_string'],
['first_choice_array', 'first_string',
'second_choice_integer', 'second_string'],
['first_choice_integer', 'first_string',
'second_choice_array', 'second_string'],
['first_choice_integer', 'first_string',
'second_choice_integer', 'second_string']]
result_lists = sorted([[param.unix_name for param in param_list]
for param_list in param_lists])
self.assertEquals(expected_lists, result_lists)
if __name__ == '__main__':
unittest.main()
|
|
#
# Licensed to the Apache Software Foundation (ASF) under one or more
# contributor license agreements. See the NOTICE file distributed with
# this work for additional information regarding copyright ownership.
# The ASF licenses this file to You under the Apache License, Version 2.0
# (the "License"); you may not use this file except in compliance with
# the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
"""
Python package for random data generation.
"""
from functools import wraps
from pyspark.mllib.common import callMLlibFunc
__all__ = ['RandomRDDs', ]
def toArray(f):
@wraps(f)
def func(sc, *a, **kw):
rdd = f(sc, *a, **kw)
return rdd.map(lambda vec: vec.toArray())
return func
class RandomRDDs(object):
"""
Generator methods for creating RDDs comprised of i.i.d samples from
some distribution.
"""
@staticmethod
def uniformRDD(sc, size, numPartitions=None, seed=None):
"""
Generates an RDD comprised of i.i.d. samples from the
uniform distribution U(0.0, 1.0).
To transform the distribution in the generated RDD from U(0.0, 1.0)
to U(a, b), use
C{RandomRDDs.uniformRDD(sc, n, p, seed)\
.map(lambda v: a + (b - a) * v)}
:param sc: SparkContext used to create the RDD.
:param size: Size of the RDD.
:param numPartitions: Number of partitions in the RDD (default: `sc.defaultParallelism`).
:param seed: Random seed (default: a random long integer).
:return: RDD of float comprised of i.i.d. samples ~ `U(0.0, 1.0)`.
>>> x = RandomRDDs.uniformRDD(sc, 100).collect()
>>> len(x)
100
>>> max(x) <= 1.0 and min(x) >= 0.0
True
>>> RandomRDDs.uniformRDD(sc, 100, 4).getNumPartitions()
4
>>> parts = RandomRDDs.uniformRDD(sc, 100, seed=4).getNumPartitions()
>>> parts == sc.defaultParallelism
True
"""
return callMLlibFunc("uniformRDD", sc._jsc, size, numPartitions, seed)
@staticmethod
def normalRDD(sc, size, numPartitions=None, seed=None):
"""
Generates an RDD comprised of i.i.d. samples from the standard normal
distribution.
To transform the distribution in the generated RDD from standard normal
to some other normal N(mean, sigma^2), use
C{RandomRDDs.normal(sc, n, p, seed)\
.map(lambda v: mean + sigma * v)}
:param sc: SparkContext used to create the RDD.
:param size: Size of the RDD.
:param numPartitions: Number of partitions in the RDD (default: `sc.defaultParallelism`).
:param seed: Random seed (default: a random long integer).
:return: RDD of float comprised of i.i.d. samples ~ N(0.0, 1.0).
>>> x = RandomRDDs.normalRDD(sc, 1000, seed=1)
>>> stats = x.stats()
>>> stats.count()
1000
>>> abs(stats.mean() - 0.0) < 0.1
True
>>> abs(stats.stdev() - 1.0) < 0.1
True
"""
return callMLlibFunc("normalRDD", sc._jsc, size, numPartitions, seed)
@staticmethod
def logNormalRDD(sc, mean, std, size, numPartitions=None, seed=None):
"""
Generates an RDD comprised of i.i.d. samples from the log normal
distribution with the input mean and standard distribution.
:param sc: SparkContext used to create the RDD.
:param mean: mean for the log Normal distribution
:param std: std for the log Normal distribution
:param size: Size of the RDD.
:param numPartitions: Number of partitions in the RDD (default: `sc.defaultParallelism`).
:param seed: Random seed (default: a random long integer).
:return: RDD of float comprised of i.i.d. samples ~ log N(mean, std).
>>> from math import sqrt, exp
>>> mean = 0.0
>>> std = 1.0
>>> expMean = exp(mean + 0.5 * std * std)
>>> expStd = sqrt((exp(std * std) - 1.0) * exp(2.0 * mean + std * std))
>>> x = RandomRDDs.logNormalRDD(sc, mean, std, 1000, seed=2)
>>> stats = x.stats()
>>> stats.count()
1000
>>> abs(stats.mean() - expMean) < 0.5
True
>>> from math import sqrt
>>> abs(stats.stdev() - expStd) < 0.5
True
"""
return callMLlibFunc("logNormalRDD", sc._jsc, float(mean), float(std),
size, numPartitions, seed)
@staticmethod
def poissonRDD(sc, mean, size, numPartitions=None, seed=None):
"""
Generates an RDD comprised of i.i.d. samples from the Poisson
distribution with the input mean.
:param sc: SparkContext used to create the RDD.
:param mean: Mean, or lambda, for the Poisson distribution.
:param size: Size of the RDD.
:param numPartitions: Number of partitions in the RDD (default: `sc.defaultParallelism`).
:param seed: Random seed (default: a random long integer).
:return: RDD of float comprised of i.i.d. samples ~ Pois(mean).
>>> mean = 100.0
>>> x = RandomRDDs.poissonRDD(sc, mean, 1000, seed=2)
>>> stats = x.stats()
>>> stats.count()
1000
>>> abs(stats.mean() - mean) < 0.5
True
>>> from math import sqrt
>>> abs(stats.stdev() - sqrt(mean)) < 0.5
True
"""
return callMLlibFunc("poissonRDD", sc._jsc, float(mean), size, numPartitions, seed)
@staticmethod
def exponentialRDD(sc, mean, size, numPartitions=None, seed=None):
"""
Generates an RDD comprised of i.i.d. samples from the Exponential
distribution with the input mean.
:param sc: SparkContext used to create the RDD.
:param mean: Mean, or 1 / lambda, for the Exponential distribution.
:param size: Size of the RDD.
:param numPartitions: Number of partitions in the RDD (default: `sc.defaultParallelism`).
:param seed: Random seed (default: a random long integer).
:return: RDD of float comprised of i.i.d. samples ~ Exp(mean).
>>> mean = 2.0
>>> x = RandomRDDs.exponentialRDD(sc, mean, 1000, seed=2)
>>> stats = x.stats()
>>> stats.count()
1000
>>> abs(stats.mean() - mean) < 0.5
True
>>> from math import sqrt
>>> abs(stats.stdev() - sqrt(mean)) < 0.5
True
"""
return callMLlibFunc("exponentialRDD", sc._jsc, float(mean), size, numPartitions, seed)
@staticmethod
def gammaRDD(sc, shape, scale, size, numPartitions=None, seed=None):
"""
Generates an RDD comprised of i.i.d. samples from the Gamma
distribution with the input shape and scale.
:param sc: SparkContext used to create the RDD.
:param shape: shape (> 0) parameter for the Gamma distribution
:param scale: scale (> 0) parameter for the Gamma distribution
:param size: Size of the RDD.
:param numPartitions: Number of partitions in the RDD (default: `sc.defaultParallelism`).
:param seed: Random seed (default: a random long integer).
:return: RDD of float comprised of i.i.d. samples ~ Gamma(shape, scale).
>>> from math import sqrt
>>> shape = 1.0
>>> scale = 2.0
>>> expMean = shape * scale
>>> expStd = sqrt(shape * scale * scale)
>>> x = RandomRDDs.gammaRDD(sc, shape, scale, 1000, seed=2)
>>> stats = x.stats()
>>> stats.count()
1000
>>> abs(stats.mean() - expMean) < 0.5
True
>>> abs(stats.stdev() - expStd) < 0.5
True
"""
return callMLlibFunc("gammaRDD", sc._jsc, float(shape),
float(scale), size, numPartitions, seed)
@staticmethod
@toArray
def uniformVectorRDD(sc, numRows, numCols, numPartitions=None, seed=None):
"""
Generates an RDD comprised of vectors containing i.i.d. samples drawn
from the uniform distribution U(0.0, 1.0).
:param sc: SparkContext used to create the RDD.
:param numRows: Number of Vectors in the RDD.
:param numCols: Number of elements in each Vector.
:param numPartitions: Number of partitions in the RDD.
:param seed: Seed for the RNG that generates the seed for the generator in each partition.
:return: RDD of Vector with vectors containing i.i.d samples ~ `U(0.0, 1.0)`.
>>> import numpy as np
>>> mat = np.matrix(RandomRDDs.uniformVectorRDD(sc, 10, 10).collect())
>>> mat.shape
(10, 10)
>>> mat.max() <= 1.0 and mat.min() >= 0.0
True
>>> RandomRDDs.uniformVectorRDD(sc, 10, 10, 4).getNumPartitions()
4
"""
return callMLlibFunc("uniformVectorRDD", sc._jsc, numRows, numCols, numPartitions, seed)
@staticmethod
@toArray
def normalVectorRDD(sc, numRows, numCols, numPartitions=None, seed=None):
"""
Generates an RDD comprised of vectors containing i.i.d. samples drawn
from the standard normal distribution.
:param sc: SparkContext used to create the RDD.
:param numRows: Number of Vectors in the RDD.
:param numCols: Number of elements in each Vector.
:param numPartitions: Number of partitions in the RDD (default: `sc.defaultParallelism`).
:param seed: Random seed (default: a random long integer).
:return: RDD of Vector with vectors containing i.i.d. samples ~ `N(0.0, 1.0)`.
>>> import numpy as np
>>> mat = np.matrix(RandomRDDs.normalVectorRDD(sc, 100, 100, seed=1).collect())
>>> mat.shape
(100, 100)
>>> abs(mat.mean() - 0.0) < 0.1
True
>>> abs(mat.std() - 1.0) < 0.1
True
"""
return callMLlibFunc("normalVectorRDD", sc._jsc, numRows, numCols, numPartitions, seed)
@staticmethod
@toArray
def logNormalVectorRDD(sc, mean, std, numRows, numCols, numPartitions=None, seed=None):
"""
Generates an RDD comprised of vectors containing i.i.d. samples drawn
from the log normal distribution.
:param sc: SparkContext used to create the RDD.
:param mean: Mean of the log normal distribution
:param std: Standard Deviation of the log normal distribution
:param numRows: Number of Vectors in the RDD.
:param numCols: Number of elements in each Vector.
:param numPartitions: Number of partitions in the RDD (default: `sc.defaultParallelism`).
:param seed: Random seed (default: a random long integer).
:return: RDD of Vector with vectors containing i.i.d. samples ~ log `N(mean, std)`.
>>> import numpy as np
>>> from math import sqrt, exp
>>> mean = 0.0
>>> std = 1.0
>>> expMean = exp(mean + 0.5 * std * std)
>>> expStd = sqrt((exp(std * std) - 1.0) * exp(2.0 * mean + std * std))
>>> m = RandomRDDs.logNormalVectorRDD(sc, mean, std, 100, 100, seed=1).collect()
>>> mat = np.matrix(m)
>>> mat.shape
(100, 100)
>>> abs(mat.mean() - expMean) < 0.1
True
>>> abs(mat.std() - expStd) < 0.1
True
"""
return callMLlibFunc("logNormalVectorRDD", sc._jsc, float(mean), float(std),
numRows, numCols, numPartitions, seed)
@staticmethod
@toArray
def poissonVectorRDD(sc, mean, numRows, numCols, numPartitions=None, seed=None):
"""
Generates an RDD comprised of vectors containing i.i.d. samples drawn
from the Poisson distribution with the input mean.
:param sc: SparkContext used to create the RDD.
:param mean: Mean, or lambda, for the Poisson distribution.
:param numRows: Number of Vectors in the RDD.
:param numCols: Number of elements in each Vector.
:param numPartitions: Number of partitions in the RDD (default: `sc.defaultParallelism`)
:param seed: Random seed (default: a random long integer).
:return: RDD of Vector with vectors containing i.i.d. samples ~ Pois(mean).
>>> import numpy as np
>>> mean = 100.0
>>> rdd = RandomRDDs.poissonVectorRDD(sc, mean, 100, 100, seed=1)
>>> mat = np.mat(rdd.collect())
>>> mat.shape
(100, 100)
>>> abs(mat.mean() - mean) < 0.5
True
>>> from math import sqrt
>>> abs(mat.std() - sqrt(mean)) < 0.5
True
"""
return callMLlibFunc("poissonVectorRDD", sc._jsc, float(mean), numRows, numCols,
numPartitions, seed)
@staticmethod
@toArray
def exponentialVectorRDD(sc, mean, numRows, numCols, numPartitions=None, seed=None):
"""
Generates an RDD comprised of vectors containing i.i.d. samples drawn
from the Exponential distribution with the input mean.
:param sc: SparkContext used to create the RDD.
:param mean: Mean, or 1 / lambda, for the Exponential distribution.
:param numRows: Number of Vectors in the RDD.
:param numCols: Number of elements in each Vector.
:param numPartitions: Number of partitions in the RDD (default: `sc.defaultParallelism`)
:param seed: Random seed (default: a random long integer).
:return: RDD of Vector with vectors containing i.i.d. samples ~ Exp(mean).
>>> import numpy as np
>>> mean = 0.5
>>> rdd = RandomRDDs.exponentialVectorRDD(sc, mean, 100, 100, seed=1)
>>> mat = np.mat(rdd.collect())
>>> mat.shape
(100, 100)
>>> abs(mat.mean() - mean) < 0.5
True
>>> from math import sqrt
>>> abs(mat.std() - sqrt(mean)) < 0.5
True
"""
return callMLlibFunc("exponentialVectorRDD", sc._jsc, float(mean), numRows, numCols,
numPartitions, seed)
@staticmethod
@toArray
def gammaVectorRDD(sc, shape, scale, numRows, numCols, numPartitions=None, seed=None):
"""
Generates an RDD comprised of vectors containing i.i.d. samples drawn
from the Gamma distribution.
:param sc: SparkContext used to create the RDD.
:param shape: Shape (> 0) of the Gamma distribution
:param scale: Scale (> 0) of the Gamma distribution
:param numRows: Number of Vectors in the RDD.
:param numCols: Number of elements in each Vector.
:param numPartitions: Number of partitions in the RDD (default: `sc.defaultParallelism`).
:param seed: Random seed (default: a random long integer).
:return: RDD of Vector with vectors containing i.i.d. samples ~ Gamma(shape, scale).
>>> import numpy as np
>>> from math import sqrt
>>> shape = 1.0
>>> scale = 2.0
>>> expMean = shape * scale
>>> expStd = sqrt(shape * scale * scale)
>>> mat = np.matrix(RandomRDDs.gammaVectorRDD(sc, shape, scale, 100, 100, seed=1).collect())
>>> mat.shape
(100, 100)
>>> abs(mat.mean() - expMean) < 0.1
True
>>> abs(mat.std() - expStd) < 0.1
True
"""
return callMLlibFunc("gammaVectorRDD", sc._jsc, float(shape), float(scale),
numRows, numCols, numPartitions, seed)
def _test():
import doctest
from pyspark.context import SparkContext
globs = globals().copy()
# The small batch size here ensures that we see multiple batches,
# even in these small test examples:
globs['sc'] = SparkContext('local[2]', 'PythonTest', batchSize=2)
(failure_count, test_count) = doctest.testmod(globs=globs, optionflags=doctest.ELLIPSIS)
globs['sc'].stop()
if failure_count:
exit(-1)
if __name__ == "__main__":
_test()
|
|
#!/usr/bin/env python
#
# Licensed to the Apache Software Foundation (ASF) under one or more
# contributor license agreements. See the NOTICE file distributed with
# this work for additional information regarding copyright ownership.
# The ASF licenses this file to You under the Apache License, Version 2.0
# (the "License"); you may not use this file except in compliance with
# the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
# Utility for creating well-formed pull request merges and pushing them to Apache
# Spark.
# usage: ./merge_spark_pr.py (see config env vars below)
#
# This utility assumes you already have a local Spark git folder and that you
# have added remotes corresponding to both (i) the github apache Spark
# mirror and (ii) the apache git repo.
import json
import os
import re
import subprocess
import sys
import urllib2
try:
import jira.client
JIRA_IMPORTED = True
except ImportError:
JIRA_IMPORTED = False
# Location of your Spark git development area
SPARK_HOME = os.environ.get("SPARK_HOME", os.getcwd())
# Remote name which points to the Gihub site
PR_REMOTE_NAME = os.environ.get("PR_REMOTE_NAME", "apache-github")
# Remote name which points to Apache git
PUSH_REMOTE_NAME = os.environ.get("PUSH_REMOTE_NAME", "apache")
# ASF JIRA username
JIRA_USERNAME = os.environ.get("JIRA_USERNAME", "")
# ASF JIRA password
JIRA_PASSWORD = os.environ.get("JIRA_PASSWORD", "")
# OAuth key used for issuing requests against the GitHub API. If this is not defined, then requests
# will be unauthenticated. You should only need to configure this if you find yourself regularly
# exceeding your IP's unauthenticated request rate limit. You can create an OAuth key at
# https://github.com/settings/tokens. This script only requires the "public_repo" scope.
GITHUB_OAUTH_KEY = os.environ.get("GITHUB_OAUTH_KEY")
GITHUB_BASE = "https://github.com/apache/spark/pull"
GITHUB_API_BASE = "https://api.github.com/repos/apache/spark"
JIRA_BASE = "https://issues.apache.org/jira/browse"
JIRA_API_BASE = "https://issues.apache.org/jira"
# Prefix added to temporary branches
BRANCH_PREFIX = "PR_TOOL"
def get_json(url):
try:
request = urllib2.Request(url)
if GITHUB_OAUTH_KEY:
request.add_header('Authorization', 'token %s' % GITHUB_OAUTH_KEY)
return json.load(urllib2.urlopen(request))
except urllib2.HTTPError as e:
if "X-RateLimit-Remaining" in e.headers and e.headers["X-RateLimit-Remaining"] == '0':
print("Exceeded the GitHub API rate limit; see the instructions in " +
"dev/merge_spark_pr.py to configure an OAuth token for making authenticated " +
"GitHub requests.")
else:
print("Unable to fetch URL, exiting: %s" % url)
sys.exit(-1)
def fail(msg):
print(msg)
clean_up()
sys.exit(-1)
def run_cmd(cmd):
print(cmd)
if isinstance(cmd, list):
return subprocess.check_output(cmd)
else:
return subprocess.check_output(cmd.split(" "))
def continue_maybe(prompt):
result = raw_input("\n%s (y/n): " % prompt)
if result.lower() != "y":
fail("Okay, exiting")
def clean_up():
print("Restoring head pointer to %s" % original_head)
run_cmd("git checkout %s" % original_head)
branches = run_cmd("git branch").replace(" ", "").split("\n")
for branch in filter(lambda x: x.startswith(BRANCH_PREFIX), branches):
print("Deleting local branch %s" % branch)
run_cmd("git branch -D %s" % branch)
# merge the requested PR and return the merge hash
def merge_pr(pr_num, target_ref, title, body, pr_repo_desc):
pr_branch_name = "%s_MERGE_PR_%s" % (BRANCH_PREFIX, pr_num)
target_branch_name = "%s_MERGE_PR_%s_%s" % (BRANCH_PREFIX, pr_num, target_ref.upper())
run_cmd("git fetch %s pull/%s/head:%s" % (PR_REMOTE_NAME, pr_num, pr_branch_name))
run_cmd("git fetch %s %s:%s" % (PUSH_REMOTE_NAME, target_ref, target_branch_name))
run_cmd("git checkout %s" % target_branch_name)
had_conflicts = False
try:
run_cmd(['git', 'merge', pr_branch_name, '--squash'])
except Exception as e:
msg = "Error merging: %s\nWould you like to manually fix-up this merge?" % e
continue_maybe(msg)
msg = "Okay, please fix any conflicts and 'git add' conflicting files... Finished?"
continue_maybe(msg)
had_conflicts = True
commit_authors = run_cmd(['git', 'log', 'HEAD..%s' % pr_branch_name,
'--pretty=format:%an <%ae>']).split("\n")
distinct_authors = sorted(set(commit_authors),
key=lambda x: commit_authors.count(x), reverse=True)
primary_author = raw_input(
"Enter primary author in the format of \"name <email>\" [%s]: " %
distinct_authors[0])
if primary_author == "":
primary_author = distinct_authors[0]
commits = run_cmd(['git', 'log', 'HEAD..%s' % pr_branch_name,
'--pretty=format:%h [%an] %s']).split("\n\n")
merge_message_flags = []
merge_message_flags += ["-m", title]
if body is not None:
# We remove @ symbols from the body to avoid triggering e-mails
# to people every time someone creates a public fork of Spark.
merge_message_flags += ["-m", body.replace("@", "")]
authors = "\n".join(["Author: %s" % a for a in distinct_authors])
merge_message_flags += ["-m", authors]
if had_conflicts:
committer_name = run_cmd("git config --get user.name").strip()
committer_email = run_cmd("git config --get user.email").strip()
message = "This patch had conflicts when merged, resolved by\nCommitter: %s <%s>" % (
committer_name, committer_email)
merge_message_flags += ["-m", message]
# The string "Closes #%s" string is required for GitHub to correctly close the PR
merge_message_flags += ["-m", "Closes #%s from %s." % (pr_num, pr_repo_desc)]
run_cmd(['git', 'commit', '--author="%s"' % primary_author] + merge_message_flags)
continue_maybe("Merge complete (local ref %s). Push to %s?" % (
target_branch_name, PUSH_REMOTE_NAME))
try:
run_cmd('git push %s %s:%s' % (PUSH_REMOTE_NAME, target_branch_name, target_ref))
except Exception as e:
clean_up()
fail("Exception while pushing: %s" % e)
merge_hash = run_cmd("git rev-parse %s" % target_branch_name)[:8]
clean_up()
print("Pull request #%s merged!" % pr_num)
print("Merge hash: %s" % merge_hash)
return merge_hash
def cherry_pick(pr_num, merge_hash, default_branch):
pick_ref = raw_input("Enter a branch name [%s]: " % default_branch)
if pick_ref == "":
pick_ref = default_branch
pick_branch_name = "%s_PICK_PR_%s_%s" % (BRANCH_PREFIX, pr_num, pick_ref.upper())
run_cmd("git fetch %s %s:%s" % (PUSH_REMOTE_NAME, pick_ref, pick_branch_name))
run_cmd("git checkout %s" % pick_branch_name)
try:
run_cmd("git cherry-pick -sx %s" % merge_hash)
except Exception as e:
msg = "Error cherry-picking: %s\nWould you like to manually fix-up this merge?" % e
continue_maybe(msg)
msg = "Okay, please fix any conflicts and finish the cherry-pick. Finished?"
continue_maybe(msg)
continue_maybe("Pick complete (local ref %s). Push to %s?" % (
pick_branch_name, PUSH_REMOTE_NAME))
try:
run_cmd('git push %s %s:%s' % (PUSH_REMOTE_NAME, pick_branch_name, pick_ref))
except Exception as e:
clean_up()
fail("Exception while pushing: %s" % e)
pick_hash = run_cmd("git rev-parse %s" % pick_branch_name)[:8]
clean_up()
print("Pull request #%s picked into %s!" % (pr_num, pick_ref))
print("Pick hash: %s" % pick_hash)
return pick_ref
def fix_version_from_branch(branch, versions):
# Note: Assumes this is a sorted (newest->oldest) list of un-released versions
if branch == "master":
return versions[0]
else:
branch_ver = branch.replace("branch-", "")
return filter(lambda x: x.name.startswith(branch_ver), versions)[-1]
def resolve_jira_issue(merge_branches, comment, default_jira_id=""):
asf_jira = jira.client.JIRA({'server': JIRA_API_BASE},
basic_auth=(JIRA_USERNAME, JIRA_PASSWORD))
jira_id = raw_input("Enter a JIRA id [%s]: " % default_jira_id)
if jira_id == "":
jira_id = default_jira_id
try:
issue = asf_jira.issue(jira_id)
except Exception as e:
fail("ASF JIRA could not find %s\n%s" % (jira_id, e))
cur_status = issue.fields.status.name
cur_summary = issue.fields.summary
cur_assignee = issue.fields.assignee
if cur_assignee is None:
cur_assignee = "NOT ASSIGNED!!!"
else:
cur_assignee = cur_assignee.displayName
if cur_status == "Resolved" or cur_status == "Closed":
fail("JIRA issue %s already has status '%s'" % (jira_id, cur_status))
print("=== JIRA %s ===" % jira_id)
print("summary\t\t%s\nassignee\t%s\nstatus\t\t%s\nurl\t\t%s/%s\n" %
(cur_summary, cur_assignee, cur_status, JIRA_BASE, jira_id))
versions = asf_jira.project_versions("SPARK")
versions = sorted(versions, key=lambda x: x.name, reverse=True)
versions = filter(lambda x: x.raw['released'] is False, versions)
# Consider only x.y.z versions
versions = filter(lambda x: re.match('\d+\.\d+\.\d+', x.name), versions)
default_fix_versions = map(lambda x: fix_version_from_branch(x, versions).name, merge_branches)
for v in default_fix_versions:
# Handles the case where we have forked a release branch but not yet made the release.
# In this case, if the PR is committed to the master branch and the release branch, we
# only consider the release branch to be the fix version. E.g. it is not valid to have
# both 1.1.0 and 1.0.0 as fix versions.
(major, minor, patch) = v.split(".")
if patch == "0":
previous = "%s.%s.%s" % (major, int(minor) - 1, 0)
if previous in default_fix_versions:
default_fix_versions = filter(lambda x: x != v, default_fix_versions)
default_fix_versions = ",".join(default_fix_versions)
fix_versions = raw_input("Enter comma-separated fix version(s) [%s]: " % default_fix_versions)
if fix_versions == "":
fix_versions = default_fix_versions
fix_versions = fix_versions.replace(" ", "").split(",")
def get_version_json(version_str):
return filter(lambda v: v.name == version_str, versions)[0].raw
jira_fix_versions = map(lambda v: get_version_json(v), fix_versions)
resolve = filter(lambda a: a['name'] == "Resolve Issue", asf_jira.transitions(jira_id))[0]
resolution = filter(lambda r: r.raw['name'] == "Fixed", asf_jira.resolutions())[0]
asf_jira.transition_issue(
jira_id, resolve["id"], fixVersions=jira_fix_versions,
comment=comment, resolution={'id': resolution.raw['id']})
print("Successfully resolved %s with fixVersions=%s!" % (jira_id, fix_versions))
def resolve_jira_issues(title, merge_branches, comment):
jira_ids = re.findall("SPARK-[0-9]{4,5}", title)
if len(jira_ids) == 0:
resolve_jira_issue(merge_branches, comment)
for jira_id in jira_ids:
resolve_jira_issue(merge_branches, comment, jira_id)
def standardize_jira_ref(text):
"""
Standardize the [SPARK-XXXXX] [MODULE] prefix
Converts "[SPARK-XXX][mllib] Issue", "[MLLib] SPARK-XXX. Issue" or "SPARK XXX [MLLIB]: Issue" to
"[SPARK-XXX][MLLIB] Issue"
>>> standardize_jira_ref(
... "[SPARK-5821] [SQL] ParquetRelation2 CTAS should check if delete is successful")
'[SPARK-5821][SQL] ParquetRelation2 CTAS should check if delete is successful'
>>> standardize_jira_ref(
... "[SPARK-4123][Project Infra][WIP]: Show new dependencies added in pull requests")
'[SPARK-4123][PROJECT INFRA][WIP] Show new dependencies added in pull requests'
>>> standardize_jira_ref("[MLlib] Spark 5954: Top by key")
'[SPARK-5954][MLLIB] Top by key'
>>> standardize_jira_ref("[SPARK-979] a LRU scheduler for load balancing in TaskSchedulerImpl")
'[SPARK-979] a LRU scheduler for load balancing in TaskSchedulerImpl'
>>> standardize_jira_ref(
... "SPARK-1094 Support MiMa for reporting binary compatibility accross versions.")
'[SPARK-1094] Support MiMa for reporting binary compatibility accross versions.'
>>> standardize_jira_ref("[WIP] [SPARK-1146] Vagrant support for Spark")
'[SPARK-1146][WIP] Vagrant support for Spark'
>>> standardize_jira_ref(
... "SPARK-1032. If Yarn app fails before registering, app master stays aroun...")
'[SPARK-1032] If Yarn app fails before registering, app master stays aroun...'
>>> standardize_jira_ref(
... "[SPARK-6250][SPARK-6146][SPARK-5911][SQL] Types are now reserved words in DDL parser.")
'[SPARK-6250][SPARK-6146][SPARK-5911][SQL] Types are now reserved words in DDL parser.'
>>> standardize_jira_ref("Additional information for users building from source code")
'Additional information for users building from source code'
"""
jira_refs = []
components = []
# If the string is compliant, no need to process any further
if (re.search(r'^\[SPARK-[0-9]{3,6}\](\[[A-Z0-9_\s,]+\] )+\S+', text)):
return text
# Extract JIRA ref(s):
pattern = re.compile(r'(SPARK[-\s]*[0-9]{3,6})+', re.IGNORECASE)
for ref in pattern.findall(text):
# Add brackets, replace spaces with a dash, & convert to uppercase
jira_refs.append('[' + re.sub(r'\s+', '-', ref.upper()) + ']')
text = text.replace(ref, '')
# Extract spark component(s):
# Look for alphanumeric chars, spaces, dashes, periods, and/or commas
pattern = re.compile(r'(\[[\w\s,-\.]+\])', re.IGNORECASE)
for component in pattern.findall(text):
components.append(component.upper())
text = text.replace(component, '')
# Cleanup any remaining symbols:
pattern = re.compile(r'^\W+(.*)', re.IGNORECASE)
if (pattern.search(text) is not None):
text = pattern.search(text).groups()[0]
# Assemble full text (JIRA ref(s), module(s), remaining text)
clean_text = ''.join(jira_refs).strip() + ''.join(components).strip() + " " + text.strip()
# Replace multiple spaces with a single space, e.g. if no jira refs and/or components were
# included
clean_text = re.sub(r'\s+', ' ', clean_text.strip())
return clean_text
def get_current_ref():
ref = run_cmd("git rev-parse --abbrev-ref HEAD").strip()
if ref == 'HEAD':
# The current ref is a detached HEAD, so grab its SHA.
return run_cmd("git rev-parse HEAD").strip()
else:
return ref
def main():
global original_head
os.chdir(SPARK_HOME)
original_head = get_current_ref()
branches = get_json("%s/branches" % GITHUB_API_BASE)
branch_names = filter(lambda x: x.startswith("branch-"), [x['name'] for x in branches])
# Assumes branch names can be sorted lexicographically
latest_branch = sorted(branch_names, reverse=True)[0]
pr_num = raw_input("Which pull request would you like to merge? (e.g. 34): ")
pr = get_json("%s/pulls/%s" % (GITHUB_API_BASE, pr_num))
pr_events = get_json("%s/issues/%s/events" % (GITHUB_API_BASE, pr_num))
url = pr["url"]
# Decide whether to use the modified title or not
modified_title = standardize_jira_ref(pr["title"])
if modified_title != pr["title"]:
print("I've re-written the title as follows to match the standard format:")
print("Original: %s" % pr["title"])
print("Modified: %s" % modified_title)
result = raw_input("Would you like to use the modified title? (y/n): ")
if result.lower() == "y":
title = modified_title
print("Using modified title:")
else:
title = pr["title"]
print("Using original title:")
print(title)
else:
title = pr["title"]
body = pr["body"]
target_ref = pr["base"]["ref"]
user_login = pr["user"]["login"]
base_ref = pr["head"]["ref"]
pr_repo_desc = "%s/%s" % (user_login, base_ref)
# Merged pull requests don't appear as merged in the GitHub API;
# Instead, they're closed by asfgit.
merge_commits = \
[e for e in pr_events if e["actor"]["login"] == "asfgit" and e["event"] == "closed"]
if merge_commits:
merge_hash = merge_commits[0]["commit_id"]
message = get_json("%s/commits/%s" % (GITHUB_API_BASE, merge_hash))["commit"]["message"]
print("Pull request %s has already been merged, assuming you want to backport" % pr_num)
commit_is_downloaded = run_cmd(['git', 'rev-parse', '--quiet', '--verify',
"%s^{commit}" % merge_hash]).strip() != ""
if not commit_is_downloaded:
fail("Couldn't find any merge commit for #%s, you may need to update HEAD." % pr_num)
print("Found commit %s:\n%s" % (merge_hash, message))
cherry_pick(pr_num, merge_hash, latest_branch)
sys.exit(0)
if not bool(pr["mergeable"]):
msg = "Pull request %s is not mergeable in its current form.\n" % pr_num + \
"Continue? (experts only!)"
continue_maybe(msg)
print("\n=== Pull Request #%s ===" % pr_num)
print("title\t%s\nsource\t%s\ntarget\t%s\nurl\t%s" %
(title, pr_repo_desc, target_ref, url))
continue_maybe("Proceed with merging pull request #%s?" % pr_num)
merged_refs = [target_ref]
merge_hash = merge_pr(pr_num, target_ref, title, body, pr_repo_desc)
pick_prompt = "Would you like to pick %s into another branch?" % merge_hash
while raw_input("\n%s (y/n): " % pick_prompt).lower() == "y":
merged_refs = merged_refs + [cherry_pick(pr_num, merge_hash, latest_branch)]
if JIRA_IMPORTED:
if JIRA_USERNAME and JIRA_PASSWORD:
continue_maybe("Would you like to update an associated JIRA?")
jira_comment = "Issue resolved by pull request %s\n[%s/%s]" % \
(pr_num, GITHUB_BASE, pr_num)
resolve_jira_issues(title, merged_refs, jira_comment)
else:
print("JIRA_USERNAME and JIRA_PASSWORD not set")
print("Exiting without trying to close the associated JIRA.")
else:
print("Could not find jira-python library. Run 'sudo pip install jira' to install.")
print("Exiting without trying to close the associated JIRA.")
if __name__ == "__main__":
import doctest
(failure_count, test_count) = doctest.testmod()
if failure_count:
exit(-1)
try:
main()
except:
clean_up()
raise
|
|
"""Tests for acme.messages."""
import unittest
import mock
from acme import challenges
from acme import jose
from acme import test_util
CERT = test_util.load_comparable_cert('cert.der')
CSR = test_util.load_comparable_csr('csr.der')
KEY = test_util.load_rsa_private_key('rsa512_key.pem')
class ErrorTest(unittest.TestCase):
"""Tests for acme.messages.Error."""
def setUp(self):
from acme.messages import Error
self.error = Error(
detail='foo', typ='urn:acme:error:malformed', title='title')
self.jobj = {
'detail': 'foo',
'title': 'some title',
'type': 'urn:acme:error:malformed',
}
self.error_custom = Error(typ='custom', detail='bar')
self.jobj_cusom = {'type': 'custom', 'detail': 'bar'}
def test_from_json_hashable(self):
from acme.messages import Error
hash(Error.from_json(self.error.to_json()))
def test_description(self):
self.assertEqual(
'The request message was malformed', self.error.description)
self.assertTrue(self.error_custom.description is None)
def test_str(self):
self.assertEqual(
'urn:acme:error:malformed :: The request message was '
'malformed :: foo :: title', str(self.error))
self.assertEqual('custom :: bar', str(self.error_custom))
class ConstantTest(unittest.TestCase):
"""Tests for acme.messages._Constant."""
def setUp(self):
from acme.messages import _Constant
class MockConstant(_Constant): # pylint: disable=missing-docstring
POSSIBLE_NAMES = {}
self.MockConstant = MockConstant # pylint: disable=invalid-name
self.const_a = MockConstant('a')
self.const_b = MockConstant('b')
def test_to_partial_json(self):
self.assertEqual('a', self.const_a.to_partial_json())
self.assertEqual('b', self.const_b.to_partial_json())
def test_from_json(self):
self.assertEqual(self.const_a, self.MockConstant.from_json('a'))
self.assertRaises(
jose.DeserializationError, self.MockConstant.from_json, 'c')
def test_from_json_hashable(self):
hash(self.MockConstant.from_json('a'))
def test_repr(self):
self.assertEqual('MockConstant(a)', repr(self.const_a))
self.assertEqual('MockConstant(b)', repr(self.const_b))
def test_equality(self):
const_a_prime = self.MockConstant('a')
self.assertFalse(self.const_a == self.const_b)
self.assertTrue(self.const_a == const_a_prime)
self.assertTrue(self.const_a != self.const_b)
self.assertFalse(self.const_a != const_a_prime)
class DirectoryTest(unittest.TestCase):
"""Tests for acme.messages.Directory."""
def setUp(self):
from acme.messages import Directory
self.dir = Directory({
'new-reg': 'reg',
mock.MagicMock(resource_type='new-cert'): 'cert',
})
def test_init_wrong_key_value_error(self):
from acme.messages import Directory
self.assertRaises(ValueError, Directory, {'foo': 'bar'})
def test_getitem(self):
self.assertEqual('reg', self.dir['new-reg'])
from acme.messages import NewRegistration
self.assertEqual('reg', self.dir[NewRegistration])
self.assertEqual('reg', self.dir[NewRegistration()])
def test_getitem_fails_with_key_error(self):
self.assertRaises(KeyError, self.dir.__getitem__, 'foo')
def test_getattr(self):
self.assertEqual('reg', self.dir.new_reg)
def test_getattr_fails_with_attribute_error(self):
self.assertRaises(AttributeError, self.dir.__getattr__, 'foo')
def test_to_partial_json(self):
self.assertEqual(
self.dir.to_partial_json(), {'new-reg': 'reg', 'new-cert': 'cert'})
def test_from_json_deserialization_error_on_wrong_key(self):
from acme.messages import Directory
self.assertRaises(
jose.DeserializationError, Directory.from_json, {'foo': 'bar'})
class RegistrationTest(unittest.TestCase):
"""Tests for acme.messages.Registration."""
def setUp(self):
key = jose.jwk.JWKRSA(key=KEY.public_key())
contact = (
'mailto:admin@foo.com',
'tel:1234',
)
agreement = 'https://letsencrypt.org/terms'
from acme.messages import Registration
self.reg = Registration(key=key, contact=contact, agreement=agreement)
self.reg_none = Registration(authorizations='uri/authorizations',
certificates='uri/certificates')
self.jobj_to = {
'contact': contact,
'agreement': agreement,
'key': key,
}
self.jobj_from = self.jobj_to.copy()
self.jobj_from['key'] = key.to_json()
def test_from_data(self):
from acme.messages import Registration
reg = Registration.from_data(phone='1234', email='admin@foo.com')
self.assertEqual(reg.contact, (
'tel:1234',
'mailto:admin@foo.com',
))
def test_phones(self):
self.assertEqual(('1234',), self.reg.phones)
def test_emails(self):
self.assertEqual(('admin@foo.com',), self.reg.emails)
def test_to_partial_json(self):
self.assertEqual(self.jobj_to, self.reg.to_partial_json())
def test_from_json(self):
from acme.messages import Registration
self.assertEqual(self.reg, Registration.from_json(self.jobj_from))
def test_from_json_hashable(self):
from acme.messages import Registration
hash(Registration.from_json(self.jobj_from))
class UpdateRegistrationTest(unittest.TestCase):
"""Tests for acme.messages.UpdateRegistration."""
def test_empty(self):
from acme.messages import UpdateRegistration
jstring = '{"resource": "reg"}'
self.assertEqual(jstring, UpdateRegistration().json_dumps())
self.assertEqual(
UpdateRegistration(), UpdateRegistration.json_loads(jstring))
class RegistrationResourceTest(unittest.TestCase):
"""Tests for acme.messages.RegistrationResource."""
def setUp(self):
from acme.messages import RegistrationResource
self.regr = RegistrationResource(
body=mock.sentinel.body, uri=mock.sentinel.uri,
new_authzr_uri=mock.sentinel.new_authzr_uri,
terms_of_service=mock.sentinel.terms_of_service)
def test_to_partial_json(self):
self.assertEqual(self.regr.to_json(), {
'body': mock.sentinel.body,
'uri': mock.sentinel.uri,
'new_authzr_uri': mock.sentinel.new_authzr_uri,
'terms_of_service': mock.sentinel.terms_of_service,
})
class ChallengeResourceTest(unittest.TestCase):
"""Tests for acme.messages.ChallengeResource."""
def test_uri(self):
from acme.messages import ChallengeResource
self.assertEqual('http://challb', ChallengeResource(body=mock.MagicMock(
uri='http://challb'), authzr_uri='http://authz').uri)
class ChallengeBodyTest(unittest.TestCase):
"""Tests for acme.messages.ChallengeBody."""
def setUp(self):
self.chall = challenges.DNS(token=jose.b64decode(
'evaGxfADs6pSRb2LAv9IZf17Dt3juxGJ-PCt92wr-oA'))
from acme.messages import ChallengeBody
from acme.messages import Error
from acme.messages import STATUS_INVALID
self.status = STATUS_INVALID
error = Error(typ='urn:acme:error:serverInternal',
detail='Unable to communicate with DNS server')
self.challb = ChallengeBody(
uri='http://challb', chall=self.chall, status=self.status,
error=error)
self.jobj_to = {
'uri': 'http://challb',
'status': self.status,
'type': 'dns',
'token': 'evaGxfADs6pSRb2LAv9IZf17Dt3juxGJ-PCt92wr-oA',
'error': error,
}
self.jobj_from = self.jobj_to.copy()
self.jobj_from['status'] = 'invalid'
self.jobj_from['error'] = {
'type': 'urn:acme:error:serverInternal',
'detail': 'Unable to communicate with DNS server',
}
def test_to_partial_json(self):
self.assertEqual(self.jobj_to, self.challb.to_partial_json())
def test_from_json(self):
from acme.messages import ChallengeBody
self.assertEqual(self.challb, ChallengeBody.from_json(self.jobj_from))
def test_from_json_hashable(self):
from acme.messages import ChallengeBody
hash(ChallengeBody.from_json(self.jobj_from))
def test_proxy(self):
self.assertEqual(jose.b64decode(
'evaGxfADs6pSRb2LAv9IZf17Dt3juxGJ-PCt92wr-oA'), self.challb.token)
class AuthorizationTest(unittest.TestCase):
"""Tests for acme.messages.Authorization."""
def setUp(self):
from acme.messages import ChallengeBody
from acme.messages import STATUS_VALID
self.challbs = (
ChallengeBody(
uri='http://challb1', status=STATUS_VALID,
chall=challenges.HTTP01(token=b'IlirfxKKXAsHtmzK29Pj8A')),
ChallengeBody(uri='http://challb2', status=STATUS_VALID,
chall=challenges.DNS(
token=b'DGyRejmCefe7v4NfDGDKfA')),
ChallengeBody(uri='http://challb3', status=STATUS_VALID,
chall=challenges.RecoveryContact()),
)
combinations = ((0, 2), (1, 2))
from acme.messages import Authorization
from acme.messages import Identifier
from acme.messages import IDENTIFIER_FQDN
identifier = Identifier(typ=IDENTIFIER_FQDN, value='example.com')
self.authz = Authorization(
identifier=identifier, combinations=combinations,
challenges=self.challbs)
self.jobj_from = {
'identifier': identifier.to_json(),
'challenges': [challb.to_json() for challb in self.challbs],
'combinations': combinations,
}
def test_from_json(self):
from acme.messages import Authorization
Authorization.from_json(self.jobj_from)
def test_from_json_hashable(self):
from acme.messages import Authorization
hash(Authorization.from_json(self.jobj_from))
def test_resolved_combinations(self):
self.assertEqual(self.authz.resolved_combinations, (
(self.challbs[0], self.challbs[2]),
(self.challbs[1], self.challbs[2]),
))
class AuthorizationResourceTest(unittest.TestCase):
"""Tests for acme.messages.AuthorizationResource."""
def test_json_de_serializable(self):
from acme.messages import AuthorizationResource
authzr = AuthorizationResource(
uri=mock.sentinel.uri,
body=mock.sentinel.body,
new_cert_uri=mock.sentinel.new_cert_uri,
)
self.assertTrue(isinstance(authzr, jose.JSONDeSerializable))
class CertificateRequestTest(unittest.TestCase):
"""Tests for acme.messages.CertificateRequest."""
def setUp(self):
from acme.messages import CertificateRequest
self.req = CertificateRequest(csr=CSR)
def test_json_de_serializable(self):
self.assertTrue(isinstance(self.req, jose.JSONDeSerializable))
from acme.messages import CertificateRequest
self.assertEqual(
self.req, CertificateRequest.from_json(self.req.to_json()))
class CertificateResourceTest(unittest.TestCase):
"""Tests for acme.messages.CertificateResourceTest."""
def setUp(self):
from acme.messages import CertificateResource
self.certr = CertificateResource(
body=CERT, uri=mock.sentinel.uri, authzrs=(),
cert_chain_uri=mock.sentinel.cert_chain_uri)
def test_json_de_serializable(self):
self.assertTrue(isinstance(self.certr, jose.JSONDeSerializable))
from acme.messages import CertificateResource
self.assertEqual(
self.certr, CertificateResource.from_json(self.certr.to_json()))
class RevocationTest(unittest.TestCase):
"""Tests for acme.messages.RevocationTest."""
def setUp(self):
from acme.messages import Revocation
self.rev = Revocation(certificate=CERT)
def test_from_json_hashable(self):
from acme.messages import Revocation
hash(Revocation.from_json(self.rev.to_json()))
if __name__ == '__main__':
unittest.main() # pragma: no cover
|
|
# Copyright The Cloud Custodian Authors.
# SPDX-License-Identifier: Apache-2.0
from c7n.utils import local_session, type_schema
from c7n_gcp.actions import MethodAction
class SetIamPolicy(MethodAction):
""" Sets IAM policy. It works with bindings only.
The action supports two lists for modifying the existing IAM policy: `add-bindings` and
`remove-bindings`. The `add-bindings` records are merged with the existing bindings, hereby
no changes are made if all the required bindings are already present in the applicable
resource. The `remove-bindings` records are used to filter out the existing bindings,
so the action will take no effect if there are no matches. For more information,
please refer to the `_add_bindings` and `_remove_bindings` methods respectively.
Considering a record added both to the `add-bindings` and `remove-bindings` lists, which
though is not a recommended thing to do in general, the latter is designed to be a more
restrictive one, so the record will be removed from the existing IAM bindings in the end.
There following member types are available to work with:
- allUsers,
- allAuthenticatedUsers,
- user,
- group,
- domain,
- serviceAccount.
Note the `resource` field in the example that could be changed to another resource that has
both `setIamPolicy` and `getIamPolicy` methods (such as gcp.spanner-database-instance).
Example:
.. code-block:: yaml
policies:
- name: gcp-spanner-instance-set-iam-policy
resource: gcp.spanner-instance
actions:
- type: set-iam-policy
add-bindings:
- members:
- user:user1@test.com
- user:user2@test.com
role: roles/owner
- members:
- user:user3@gmail.com
role: roles/viewer
remove-bindings:
- members:
- user:user4@test.com
role: roles/owner
- members:
- user:user5@gmail.com
- user:user6@gmail.com
role: roles/viewer
"""
schema = type_schema('set-iam-policy',
**{
'minProperties': 1,
'additionalProperties': False,
'add-bindings': {
'type': 'array',
'minItems': 1,
'items': {'role': {'type': 'string'},
'members': {'type': 'array',
'items': {
'type': 'string'},
'minItems': 1}}
},
'remove-bindings': {
'type': 'array',
'minItems': 1,
'items': {'role': {'type': 'string'},
'members': {'oneOf': [
{'type': 'array',
'items': {'type': 'string'},
'minItems': 1},
{'enum': ['*']}]}}},
})
method_spec = {'op': 'setIamPolicy'}
schema_alias = True
def get_resource_params(self, model, resource):
"""
Collects `existing_bindings` with the `_get_existing_bindings` method, `add_bindings` and
`remove_bindings` from a policy, then calls `_remove_bindings` with the result of
`_add_bindings` being applied to the `existing_bindings`, and finally sets the resulting
list at the 'bindings' key if there is at least a single record there, or assigns an empty
object to the 'policy' key in order to avoid errors produced by the API.
:param model: the parameters that are defined in a resource manager
:param resource: the resource the action is applied to
"""
params = self._verb_arguments(resource)
existing_bindings = self._get_existing_bindings(model, resource)
add_bindings = self.data['add-bindings'] if 'add-bindings' in self.data else []
remove_bindings = self.data['remove-bindings'] if 'remove-bindings' in self.data else []
bindings_to_set = self._add_bindings(existing_bindings, add_bindings)
bindings_to_set = self._remove_bindings(bindings_to_set, remove_bindings)
params['body'] = {
'policy': {'bindings': bindings_to_set} if len(bindings_to_set) > 0 else {}}
return params
def _get_existing_bindings(self, model, resource):
"""
Calls the `getIamPolicy` method on the resource the action is applied to and returns
either a list of existing bindings or an empty one if there is no 'bindings' key.
:param model: the same as in `get_resource_params` (needed to take `component` from)
:param resource: the same as in `get_resource_params` (passed into `_verb_arguments`)
"""
existing_bindings = local_session(self.manager.session_factory).client(
self.manager.resource_type.service,
self.manager.resource_type.version,
model.component).execute_query(
'getIamPolicy', verb_arguments=self._verb_arguments(resource))
return existing_bindings['bindings'] if 'bindings' in existing_bindings else []
def _verb_arguments(self, resource):
"""
Returns a dictionary passed when making the `getIamPolicy` and 'setIamPolicy' API calls.
:param resource: the same as in `get_resource_params`
"""
return {'resource': resource[self.manager.resource_type.id]}
def _add_bindings(self, existing_bindings, bindings_to_add):
"""
Converts the provided lists using `_get_roles_to_bindings_dict`, then iterates through
them so that the returned list combines:
- among the roles mentioned in a policy, the existing members merged with the ones to add
so that there are no duplicates,
- as for the other roles, all their members.
The roles or members that are mentioned in the policy and already present
in the existing bindings are simply ignored with no errors produced.
An empty list could be returned only if both `existing_bindings` and `bindings_to_remove`
are empty, the possibility of which is defined by the caller of the method.
For additional information on how the method works, please refer to the tests
(e.g. test_spanner).
:param existing_bindings: a list of dictionaries containing the 'role' and 'members' keys
taken from the resource the action is applied to
:param bindings_to_add: a list of dictionaries containing the 'role' and 'members' keys
taken from the policy
"""
bindings = []
roles_to_existing_bindings = self._get_roles_to_bindings_dict(existing_bindings)
roles_to_bindings_to_add = self._get_roles_to_bindings_dict(bindings_to_add)
for role in roles_to_bindings_to_add:
updated_members = dict(roles_to_bindings_to_add[role])
if role in roles_to_existing_bindings:
existing_members = roles_to_existing_bindings[role]['members']
members_to_add = list(filter(lambda member: member not in existing_members,
updated_members['members']))
updated_members['members'] = existing_members + members_to_add
bindings.append(updated_members)
for role in roles_to_existing_bindings:
if role not in roles_to_bindings_to_add:
bindings.append(roles_to_existing_bindings[role])
return bindings
def _remove_bindings(self, existing_bindings, bindings_to_remove):
"""
Converts the provided lists using `_get_roles_to_bindings_dict`, then iterates through
them so that the returned list combines:
- among the roles mentioned in a policy, only the members that are not marked for removal,
- as for the other roles, all their members.
The roles or members that are mentioned in the policy but are absent
in the existing bindings are simply ignored with no errors produced.
As can be observed, it is possible to have an empty list returned either if
`existing_bindings` is already empty or `bindings_to_remove` filters everything out.
In addition, a star wildcard could be used as the `members` key value (members: '*')
in order to remove all members from a role.
For additional information on how the method works, please refer to the tests
(e.g. test_spanner).
:param existing_bindings: a list of dictionaries containing the 'role' and 'members' keys
taken from the resource the action is applied to
:param bindings_to_remove: a list of dictionaries containing the 'role' and 'members' keys
taken from the policy
"""
bindings = []
roles_to_existing_bindings = self._get_roles_to_bindings_dict(existing_bindings)
roles_to_bindings_to_remove = self._get_roles_to_bindings_dict(bindings_to_remove)
for role in roles_to_bindings_to_remove:
if (role in roles_to_existing_bindings and
roles_to_bindings_to_remove[role]['members'] != '*'):
updated_members = dict(roles_to_existing_bindings[role])
members_to_remove = roles_to_bindings_to_remove[role]
updated_members['members'] = list(filter(
lambda member: member not in members_to_remove['members'],
updated_members['members']))
if len(updated_members['members']) > 0:
bindings.append(updated_members)
for role in roles_to_existing_bindings:
if role not in roles_to_bindings_to_remove:
bindings.append(roles_to_existing_bindings[role])
return bindings
def _get_roles_to_bindings_dict(self, bindings_list):
"""
Converts a given list to a dictionary, values under the 'role' key in elements of whose
become keys in the resulting dictionary while the elements themselves become values
associated with these keys.
:param bindings_list: a list whose elements are expected to have the 'role' key
"""
return {binding['role']: binding for binding in bindings_list}
|
|
"""Testing the metric for classification with imbalanced dataset"""
# Authors: Guillaume Lemaitre <g.lemaitre58@gmail.com>
# Christos Aridas
# License: MIT
from __future__ import division, print_function
from functools import partial
import numpy as np
from pytest import approx, raises
from sklearn import datasets
from sklearn import svm
from sklearn.preprocessing import label_binarize
from sklearn.utils.fixes import np_version
from sklearn.utils.validation import check_random_state
from sklearn.utils.testing import assert_allclose, assert_array_equal
from sklearn.utils.testing import assert_no_warnings
from sklearn.utils.testing import ignore_warnings
from sklearn.metrics import accuracy_score, average_precision_score
from sklearn.metrics import brier_score_loss, cohen_kappa_score
from sklearn.metrics import jaccard_similarity_score, precision_score
from sklearn.metrics import recall_score, roc_auc_score
from imblearn.metrics import sensitivity_specificity_support
from imblearn.metrics import sensitivity_score
from imblearn.metrics import specificity_score
from imblearn.metrics import geometric_mean_score
from imblearn.metrics import make_index_balanced_accuracy
from imblearn.metrics import classification_report_imbalanced
from imblearn.utils.testing import warns
RND_SEED = 42
R_TOL = 1e-2
###############################################################################
# Utilities for testing
def make_prediction(dataset=None, binary=False):
"""Make some classification predictions on a toy dataset using a SVC
If binary is True restrict to a binary classification problem instead of a
multiclass classification problem
"""
if dataset is None:
# import some data to play with
dataset = datasets.load_iris()
X = dataset.data
y = dataset.target
if binary:
# restrict to a binary classification task
X, y = X[y < 2], y[y < 2]
n_samples, n_features = X.shape
p = np.arange(n_samples)
rng = check_random_state(37)
rng.shuffle(p)
X, y = X[p], y[p]
half = int(n_samples / 2)
# add noisy features to make the problem harder and avoid perfect results
rng = np.random.RandomState(0)
X = np.c_[X, rng.randn(n_samples, 200 * n_features)]
# run classifier, get class probabilities and label predictions
clf = svm.SVC(kernel='linear', probability=True, random_state=0)
probas_pred = clf.fit(X[:half], y[:half]).predict_proba(X[half:])
if binary:
# only interested in probabilities of the positive case
# XXX: do we really want a special API for the binary case?
probas_pred = probas_pred[:, 1]
y_pred = clf.predict(X[half:])
y_true = y[half:]
return y_true, y_pred, probas_pred
###############################################################################
# Tests
def test_sensitivity_specificity_score_binary():
y_true, y_pred, _ = make_prediction(binary=True)
# detailed measures for each class
sen, spe, sup = sensitivity_specificity_support(
y_true, y_pred, average=None)
assert_allclose(sen, [0.88, 0.68], rtol=R_TOL)
assert_allclose(spe, [0.68, 0.88], rtol=R_TOL)
assert_array_equal(sup, [25, 25])
# individual scoring function that can be used for grid search: in the
# binary class case the score is the value of the measure for the positive
# class (e.g. label == 1). This is deprecated for average != 'binary'.
for kwargs, my_assert in [({}, assert_no_warnings), ({
'average': 'binary'
}, assert_no_warnings)]:
sen = my_assert(sensitivity_score, y_true, y_pred, **kwargs)
assert_allclose(sen, 0.68, rtol=R_TOL)
spe = my_assert(specificity_score, y_true, y_pred, **kwargs)
assert_allclose(spe, 0.88, rtol=R_TOL)
def test_sensitivity_specificity_f_binary_single_class():
# Such a case may occur with non-stratified cross-validation
assert sensitivity_score([1, 1], [1, 1]) == 1.
assert specificity_score([1, 1], [1, 1]) == 0.
assert sensitivity_score([-1, -1], [-1, -1]) == 0.
assert specificity_score([-1, -1], [-1, -1]) == 0.
@ignore_warnings
def test_sensitivity_specificity_extra_labels():
y_true = [1, 3, 3, 2]
y_pred = [1, 1, 3, 2]
# No average: zeros in array
actual = specificity_score(
y_true, y_pred, labels=[0, 1, 2, 3, 4], average=None)
assert_allclose([1., 0.67, 1., 1., 1.], actual, rtol=R_TOL)
# Macro average is changed
actual = specificity_score(
y_true, y_pred, labels=[0, 1, 2, 3, 4], average='macro')
assert_allclose(np.mean([1., 0.67, 1., 1., 1.]), actual, rtol=R_TOL)
# Check for micro
actual = specificity_score(
y_true, y_pred, labels=[0, 1, 2, 3, 4], average='micro')
assert_allclose(15. / 16., actual, rtol=R_TOL)
# Check for weighted
actual = specificity_score(
y_true, y_pred, labels=[0, 1, 2, 3, 4], average='macro')
assert_allclose(np.mean([1., 0.67, 1., 1., 1.]), actual, rtol=R_TOL)
@ignore_warnings
def test_sensitivity_specificity_ignored_labels():
y_true = [1, 1, 2, 3]
y_pred = [1, 3, 3, 3]
specificity_13 = partial(specificity_score, y_true, y_pred, labels=[1, 3])
specificity_all = partial(specificity_score, y_true, y_pred, labels=None)
assert_allclose([1., 0.33], specificity_13(average=None), rtol=R_TOL)
assert_allclose(
np.mean([1., 0.33]), specificity_13(average='macro'), rtol=R_TOL)
assert_allclose(
np.average(
[1., .33], weights=[2., 1.]),
specificity_13(average='weighted'),
rtol=R_TOL)
assert_allclose(3. / (3. + 2.), specificity_13(average='micro'),
rtol=R_TOL)
# ensure the above were meaningful tests:
for each in ['macro', 'weighted', 'micro']:
assert specificity_13(average=each) != specificity_all(average=each)
def test_sensitivity_specificity_error_multilabels():
y_true = [1, 3, 3, 2]
y_pred = [1, 1, 3, 2]
y_true_bin = label_binarize(y_true, classes=np.arange(5))
y_pred_bin = label_binarize(y_pred, classes=np.arange(5))
with raises(ValueError):
sensitivity_score(y_true_bin, y_pred_bin)
@ignore_warnings
def test_sensitivity_specificity_support_errors():
y_true, y_pred, _ = make_prediction(binary=True)
# Bad pos_label
with raises(ValueError):
sensitivity_specificity_support(y_true, y_pred, pos_label=2,
average='binary')
# Bad average option
with raises(ValueError):
sensitivity_specificity_support([0, 1, 2], [1, 2, 0], average='mega')
def test_sensitivity_specificity_unused_pos_label():
# but average != 'binary'; even if data is binary
with warns(UserWarning, "use labels=\[pos_label\] to specify a single"):
sensitivity_specificity_support([1, 2, 1], [1, 2, 2],
pos_label=2,
average='macro')
def test_geometric_mean_support_binary():
y_true, y_pred, _ = make_prediction(binary=True)
# compute the geometric mean for the binary problem
geo_mean = geometric_mean_score(y_true, y_pred)
assert_allclose(geo_mean, 0.77, rtol=R_TOL)
def test_geometric_mean_multiclass():
y_true = [0, 0, 1, 1]
y_pred = [0, 0, 1, 1]
assert_allclose(geometric_mean_score(y_true, y_pred), 1.0, rtol=R_TOL)
y_true = [0, 0, 0, 0]
y_pred = [1, 1, 1, 1]
print(geometric_mean_score(y_true, y_pred))
assert_allclose(geometric_mean_score(y_true, y_pred), 0.0, rtol=R_TOL)
cor = 0.001
y_true = [0, 0, 0, 0]
y_pred = [0, 0, 0, 0]
assert_allclose(geometric_mean_score(y_true, y_pred, correction=cor),
1.0, rtol=R_TOL)
y_true = [0, 0, 0, 0]
y_pred = [1, 1, 1, 1]
assert_allclose(geometric_mean_score(y_true, y_pred, correction=cor),
cor, rtol=R_TOL)
y_true = [0, 0, 1, 1]
y_pred = [0, 1, 1, 0]
assert_allclose(geometric_mean_score(y_true, y_pred, correction=cor),
0.5, rtol=R_TOL)
y_true = [0, 1, 2, 0, 1, 2]
y_pred = [0, 2, 1, 0, 0, 1]
assert_allclose(geometric_mean_score(y_true, y_pred, correction=cor),
(1*cor*cor)**(1.0/3.0), rtol=R_TOL)
y_true = [0, 1, 2, 3, 4, 5]
y_pred = [0, 1, 2, 3, 4, 5]
assert_allclose(geometric_mean_score(y_true, y_pred, correction=cor),
1, rtol=R_TOL)
y_true = [0, 1, 1, 1, 1, 0]
y_pred = [0, 0, 1, 1, 1, 1]
assert_allclose(geometric_mean_score(y_true, y_pred, correction=cor),
(0.5*0.75)**0.5, rtol=R_TOL)
y_true = [0, 1, 2, 0, 1, 2]
y_pred = [0, 2, 1, 0, 0, 1]
assert_allclose(geometric_mean_score(y_true, y_pred, average='macro'),
0.47140452079103168, rtol=R_TOL)
assert_allclose(geometric_mean_score(y_true, y_pred, average='micro'),
0.47140452079103168, rtol=R_TOL)
assert_allclose(geometric_mean_score(y_true, y_pred,
average='weighted'),
0.47140452079103168, rtol=R_TOL)
assert_allclose(geometric_mean_score(y_true, y_pred, average=None),
[0.8660254, 0.0, 0.0], rtol=R_TOL)
y_true = [0, 1, 2, 0, 1, 2]
y_pred = [0, 1, 1, 0, 0, 1]
assert_allclose(geometric_mean_score(y_true, y_pred, labels=[0, 1]),
0.70710678118654752, rtol=R_TOL)
assert_allclose(geometric_mean_score(y_true, y_pred, labels=[0, 1],
sample_weight=[1, 2, 1, 1, 2, 1]),
0.70710678118654752, rtol=R_TOL)
assert_allclose(geometric_mean_score(y_true, y_pred, labels=[0, 1],
sample_weight=[1, 2, 1, 1, 2, 1],
average='weighted'),
0.3333333333, rtol=R_TOL)
y_true, y_pred, _ = make_prediction(binary=False)
geo_mean = geometric_mean_score(y_true, y_pred)
assert_allclose(geo_mean, 0.41, rtol=R_TOL)
# Compute the geometric mean for each of the classes
geo_mean = geometric_mean_score(y_true, y_pred, average=None)
assert_allclose(geo_mean, [0.85, 0.29, 0.7], rtol=R_TOL)
# average tests
geo_mean = geometric_mean_score(y_true, y_pred, average='macro')
assert_allclose(geo_mean, 0.68, rtol=R_TOL)
geo_mean = geometric_mean_score(y_true, y_pred, average='weighted')
assert_allclose(geo_mean, 0.65, rtol=R_TOL)
def test_iba_geo_mean_binary():
y_true, y_pred, _ = make_prediction(binary=True)
iba_gmean = make_index_balanced_accuracy(
alpha=0.5, squared=True)(geometric_mean_score)
iba = iba_gmean(y_true, y_pred)
assert_allclose(iba, 0.5948, rtol=R_TOL)
def _format_report(report):
return ' '.join(report.split())
def test_classification_report_imbalanced_multiclass():
iris = datasets.load_iris()
y_true, y_pred, _ = make_prediction(dataset=iris, binary=False)
# print classification report with class names
expected_report = ('pre rec spe f1 geo iba sup setosa 0.83 0.79 0.92 '
'0.81 0.86 0.74 24 versicolor 0.33 0.10 0.86 0.15 '
'0.44 0.19 31 virginica 0.42 0.90 0.55 0.57 0.63 '
'0.37 20 avg / total 0.51 0.53 0.80 0.47 0.62 0.41 75')
report = classification_report_imbalanced(
y_true,
y_pred,
labels=np.arange(len(iris.target_names)),
target_names=iris.target_names)
assert _format_report(report) == expected_report
# print classification report with label detection
expected_report = ('pre rec spe f1 geo iba sup 0 0.83 0.79 0.92 0.81 '
'0.86 0.74 24 1 0.33 0.10 0.86 0.15 0.44 0.19 31 2 '
'0.42 0.90 0.55 0.57 0.63 0.37 20 avg / total 0.51 '
'0.53 0.80 0.47 0.62 0.41 75')
report = classification_report_imbalanced(y_true, y_pred)
assert _format_report(report) == expected_report
def test_classification_report_imbalanced_multiclass_with_digits():
iris = datasets.load_iris()
y_true, y_pred, _ = make_prediction(dataset=iris, binary=False)
# print classification report with class names
expected_report = ('pre rec spe f1 geo iba sup setosa 0.82609 0.79167 '
'0.92157 0.80851 0.86409 0.74085 24 versicolor '
'0.33333 0.09677 0.86364 0.15000 0.43809 0.18727 31 '
'virginica 0.41860 0.90000 0.54545 0.57143 0.62645 '
'0.37208 20 avg / total 0.51375 0.53333 0.79733 '
'0.47310 0.62464 0.41370 75')
report = classification_report_imbalanced(
y_true,
y_pred,
labels=np.arange(len(iris.target_names)),
target_names=iris.target_names,
digits=5)
assert _format_report(report) == expected_report
# print classification report with label detection
expected_report = ('pre rec spe f1 geo iba sup 0 0.83 0.79 0.92 0.81 '
'0.86 0.74 24 1 0.33 0.10 0.86 0.15 0.44 0.19 31 2 '
'0.42 0.90 0.55 0.57 0.63 0.37 20 avg / total 0.51 '
'0.53 0.80 0.47 0.62 0.41 75')
report = classification_report_imbalanced(y_true, y_pred)
assert _format_report(report) == expected_report
def test_classification_report_imbalanced_multiclass_with_string_label():
y_true, y_pred, _ = make_prediction(binary=False)
y_true = np.array(["blue", "green", "red"])[y_true]
y_pred = np.array(["blue", "green", "red"])[y_pred]
expected_report = ('pre rec spe f1 geo iba sup blue 0.83 0.79 0.92 '
'0.81 0.86 0.74 24 green 0.33 0.10 0.86 0.15 0.44 '
'0.19 31 red 0.42 0.90 0.55 0.57 0.63 0.37 20 '
'avg / total 0.51 0.53 0.80 0.47 0.62 0.41 75')
report = classification_report_imbalanced(y_true, y_pred)
assert _format_report(report) == expected_report
expected_report = ('pre rec spe f1 geo iba sup a 0.83 0.79 0.92 0.81 '
'0.86 0.74 24 b 0.33 0.10 0.86 0.15 0.44 0.19 31 '
'c 0.42 0.90 0.55 0.57 0.63 0.37 20 avg / total '
'0.51 0.53 0.80 0.47 0.62 0.41 75')
report = classification_report_imbalanced(
y_true, y_pred, target_names=["a", "b", "c"])
assert _format_report(report) == expected_report
def test_classification_report_imbalanced_multiclass_with_unicode_label():
y_true, y_pred, _ = make_prediction(binary=False)
labels = np.array([u"blue\xa2", u"green\xa2", u"red\xa2"])
y_true = labels[y_true]
y_pred = labels[y_pred]
expected_report = (u'pre rec spe f1 geo iba sup blue\xa2 0.83 0.79 '
u'0.92 0.81 0.86 0.74 24 green\xa2 0.33 0.10 0.86 '
u'0.15 0.44 0.19 31 red\xa2 0.42 0.90 0.55 0.57 0.63 '
u'0.37 20 avg / total 0.51 0.53 0.80 0.47 0.62 0.41 75')
if np_version[:3] < (1, 7, 0):
with raises(RuntimeError, match="NumPy < 1.7.0"):
classification_report_imbalanced(y_true, y_pred)
else:
report = classification_report_imbalanced(y_true, y_pred)
assert _format_report(report) == expected_report
def test_classification_report_imbalanced_multiclass_with_long_string_label():
y_true, y_pred, _ = make_prediction(binary=False)
labels = np.array(["blue", "green" * 5, "red"])
y_true = labels[y_true]
y_pred = labels[y_pred]
expected_report = ('pre rec spe f1 geo iba sup blue 0.83 0.79 0.92 0.81 '
'0.86 0.74 24 greengreengreengreengreen 0.33 0.10 '
'0.86 0.15 0.44 0.19 31 red 0.42 0.90 0.55 0.57 0.63 '
'0.37 20 avg / total 0.51 0.53 0.80 0.47 0.62 0.41 75')
report = classification_report_imbalanced(y_true, y_pred)
assert _format_report(report) == expected_report
def test_iba_sklearn_metrics():
y_true, y_pred, _ = make_prediction(binary=True)
acc = make_index_balanced_accuracy(alpha=0.5, squared=True)(
accuracy_score)
score = acc(y_true, y_pred)
assert score == approx(0.54756)
jss = make_index_balanced_accuracy(alpha=0.5, squared=True)(
jaccard_similarity_score)
score = jss(y_true, y_pred)
assert score == approx(0.54756)
pre = make_index_balanced_accuracy(alpha=0.5, squared=True)(
precision_score)
score = pre(y_true, y_pred)
assert score == approx(0.65025)
rec = make_index_balanced_accuracy(alpha=0.5, squared=True)(
recall_score)
score = rec(y_true, y_pred)
assert score == approx(0.41616000000000009)
def test_iba_error_y_score_prob():
y_true, y_pred, _ = make_prediction(binary=True)
aps = make_index_balanced_accuracy(alpha=0.5, squared=True)(
average_precision_score)
with raises(AttributeError):
aps(y_true, y_pred)
brier = make_index_balanced_accuracy(alpha=0.5, squared=True)(
brier_score_loss)
with raises(AttributeError):
brier(y_true, y_pred)
kappa = make_index_balanced_accuracy(alpha=0.5, squared=True)(
cohen_kappa_score)
with raises(AttributeError):
kappa(y_true, y_pred)
ras = make_index_balanced_accuracy(alpha=0.5, squared=True)(
roc_auc_score)
with raises(AttributeError):
ras(y_true, y_pred)
|
|
"""The tests for the time_pattern automation."""
import pytest
import homeassistant.components.automation as automation
from homeassistant.setup import async_setup_component
import homeassistant.util.dt as dt_util
from tests.common import async_fire_time_changed, async_mock_service, mock_component
from tests.components.automation import common
@pytest.fixture
def calls(hass):
"""Track calls to a mock service."""
return async_mock_service(hass, "test", "automation")
@pytest.fixture(autouse=True)
def setup_comp(hass):
"""Initialize components."""
mock_component(hass, "group")
async def test_if_fires_when_hour_matches(hass, calls):
"""Test for firing if hour is matching."""
assert await async_setup_component(
hass,
automation.DOMAIN,
{
automation.DOMAIN: {
"trigger": {
"platform": "time_pattern",
"hours": 0,
"minutes": "*",
"seconds": "*",
},
"action": {"service": "test.automation"},
}
},
)
async_fire_time_changed(hass, dt_util.utcnow().replace(hour=0))
await hass.async_block_till_done()
assert 1 == len(calls)
await common.async_turn_off(hass)
await hass.async_block_till_done()
async_fire_time_changed(hass, dt_util.utcnow().replace(hour=0))
await hass.async_block_till_done()
assert 1 == len(calls)
async def test_if_fires_when_minute_matches(hass, calls):
"""Test for firing if minutes are matching."""
assert await async_setup_component(
hass,
automation.DOMAIN,
{
automation.DOMAIN: {
"trigger": {
"platform": "time_pattern",
"hours": "*",
"minutes": 0,
"seconds": "*",
},
"action": {"service": "test.automation"},
}
},
)
async_fire_time_changed(hass, dt_util.utcnow().replace(minute=0))
await hass.async_block_till_done()
assert 1 == len(calls)
async def test_if_fires_when_second_matches(hass, calls):
"""Test for firing if seconds are matching."""
assert await async_setup_component(
hass,
automation.DOMAIN,
{
automation.DOMAIN: {
"trigger": {
"platform": "time_pattern",
"hours": "*",
"minutes": "*",
"seconds": 0,
},
"action": {"service": "test.automation"},
}
},
)
async_fire_time_changed(hass, dt_util.utcnow().replace(second=0))
await hass.async_block_till_done()
assert 1 == len(calls)
async def test_if_fires_when_all_matches(hass, calls):
"""Test for firing if everything matches."""
assert await async_setup_component(
hass,
automation.DOMAIN,
{
automation.DOMAIN: {
"trigger": {
"platform": "time_pattern",
"hours": 1,
"minutes": 2,
"seconds": 3,
},
"action": {"service": "test.automation"},
}
},
)
async_fire_time_changed(hass, dt_util.utcnow().replace(hour=1, minute=2, second=3))
await hass.async_block_till_done()
assert 1 == len(calls)
async def test_if_fires_periodic_seconds(hass, calls):
"""Test for firing periodically every second."""
assert await async_setup_component(
hass,
automation.DOMAIN,
{
automation.DOMAIN: {
"trigger": {
"platform": "time_pattern",
"hours": "*",
"minutes": "*",
"seconds": "/2",
},
"action": {"service": "test.automation"},
}
},
)
async_fire_time_changed(hass, dt_util.utcnow().replace(hour=0, minute=0, second=2))
await hass.async_block_till_done()
assert 1 == len(calls)
async def test_if_fires_periodic_minutes(hass, calls):
"""Test for firing periodically every minute."""
assert await async_setup_component(
hass,
automation.DOMAIN,
{
automation.DOMAIN: {
"trigger": {
"platform": "time_pattern",
"hours": "*",
"minutes": "/2",
"seconds": "*",
},
"action": {"service": "test.automation"},
}
},
)
async_fire_time_changed(hass, dt_util.utcnow().replace(hour=0, minute=2, second=0))
await hass.async_block_till_done()
assert 1 == len(calls)
async def test_if_fires_periodic_hours(hass, calls):
"""Test for firing periodically every hour."""
assert await async_setup_component(
hass,
automation.DOMAIN,
{
automation.DOMAIN: {
"trigger": {
"platform": "time_pattern",
"hours": "/2",
"minutes": "*",
"seconds": "*",
},
"action": {"service": "test.automation"},
}
},
)
async_fire_time_changed(hass, dt_util.utcnow().replace(hour=2, minute=0, second=0))
await hass.async_block_till_done()
assert 1 == len(calls)
async def test_default_values(hass, calls):
"""Test for firing at 2 minutes every hour."""
assert await async_setup_component(
hass,
automation.DOMAIN,
{
automation.DOMAIN: {
"trigger": {"platform": "time_pattern", "minutes": "2"},
"action": {"service": "test.automation"},
}
},
)
async_fire_time_changed(hass, dt_util.utcnow().replace(hour=1, minute=2, second=0))
await hass.async_block_till_done()
assert 1 == len(calls)
async_fire_time_changed(hass, dt_util.utcnow().replace(hour=1, minute=2, second=1))
await hass.async_block_till_done()
assert 1 == len(calls)
async_fire_time_changed(hass, dt_util.utcnow().replace(hour=2, minute=2, second=0))
await hass.async_block_till_done()
assert 2 == len(calls)
|
|
from django.forms import ValidationError
from django.contrib.gis.gdal import HAS_GDAL
from django.contrib.gis.tests.utils import HAS_SPATIALREFSYS
from django.test import SimpleTestCase
from django.utils import six
from django.utils.html import escape
from django.utils.unittest import skipUnless
if HAS_SPATIALREFSYS:
from django.contrib.gis import forms
from django.contrib.gis.geos import GEOSGeometry
@skipUnless(HAS_GDAL and HAS_SPATIALREFSYS, "GeometryFieldTest needs gdal support and a spatial database")
class GeometryFieldTest(SimpleTestCase):
def test_init(self):
"Testing GeometryField initialization with defaults."
fld = forms.GeometryField()
for bad_default in ('blah', 3, 'FoO', None, 0):
self.assertRaises(ValidationError, fld.clean, bad_default)
def test_srid(self):
"Testing GeometryField with a SRID set."
# Input that doesn't specify the SRID is assumed to be in the SRID
# of the input field.
fld = forms.GeometryField(srid=4326)
geom = fld.clean('POINT(5 23)')
self.assertEqual(4326, geom.srid)
# Making the field in a different SRID from that of the geometry, and
# asserting it transforms.
fld = forms.GeometryField(srid=32140)
tol = 0.0000001
xform_geom = GEOSGeometry('POINT (951640.547328465 4219369.26171664)', srid=32140)
# The cleaned geometry should be transformed to 32140.
cleaned_geom = fld.clean('SRID=4326;POINT (-95.363151 29.763374)')
self.assertTrue(xform_geom.equals_exact(cleaned_geom, tol))
def test_null(self):
"Testing GeometryField's handling of null (None) geometries."
# Form fields, by default, are required (`required=True`)
fld = forms.GeometryField()
with six.assertRaisesRegex(self, forms.ValidationError,
"No geometry value provided."):
fld.clean(None)
# This will clean None as a geometry (See #10660).
fld = forms.GeometryField(required=False)
self.assertIsNone(fld.clean(None))
def test_geom_type(self):
"Testing GeometryField's handling of different geometry types."
# By default, all geometry types are allowed.
fld = forms.GeometryField()
for wkt in ('POINT(5 23)', 'MULTIPOLYGON(((0 0, 0 1, 1 1, 1 0, 0 0)))', 'LINESTRING(0 0, 1 1)'):
self.assertEqual(GEOSGeometry(wkt), fld.clean(wkt))
pnt_fld = forms.GeometryField(geom_type='POINT')
self.assertEqual(GEOSGeometry('POINT(5 23)'), pnt_fld.clean('POINT(5 23)'))
# a WKT for any other geom_type will be properly transformed by `to_python`
self.assertEqual(GEOSGeometry('LINESTRING(0 0, 1 1)'), pnt_fld.to_python('LINESTRING(0 0, 1 1)'))
# but rejected by `clean`
self.assertRaises(forms.ValidationError, pnt_fld.clean, 'LINESTRING(0 0, 1 1)')
def test_to_python(self):
"""
Testing to_python returns a correct GEOSGeometry object or
a ValidationError
"""
fld = forms.GeometryField()
# to_python returns the same GEOSGeometry for a WKT
for wkt in ('POINT(5 23)', 'MULTIPOLYGON(((0 0, 0 1, 1 1, 1 0, 0 0)))', 'LINESTRING(0 0, 1 1)'):
self.assertEqual(GEOSGeometry(wkt), fld.to_python(wkt))
# but raises a ValidationError for any other string
for wkt in ('POINT(5)', 'MULTI POLYGON(((0 0, 0 1, 1 1, 1 0, 0 0)))', 'BLAH(0 0, 1 1)'):
self.assertRaises(forms.ValidationError, fld.to_python, wkt)
@skipUnless(HAS_GDAL and HAS_SPATIALREFSYS,
"SpecializedFieldTest needs gdal support and a spatial database")
class SpecializedFieldTest(SimpleTestCase):
def setUp(self):
self.geometries = {
'point': GEOSGeometry("SRID=4326;POINT(9.052734375 42.451171875)"),
'multipoint': GEOSGeometry("SRID=4326;MULTIPOINT("
"(13.18634033203125 14.504356384277344),"
"(13.207969665527 14.490966796875),"
"(13.177070617675 14.454917907714))"),
'linestring': GEOSGeometry("SRID=4326;LINESTRING("
"-8.26171875 -0.52734375,"
"-7.734375 4.21875,"
"6.85546875 3.779296875,"
"5.44921875 -3.515625)"),
'multilinestring': GEOSGeometry("SRID=4326;MULTILINESTRING("
"(-16.435546875 -2.98828125,"
"-17.2265625 2.98828125,"
"-0.703125 3.515625,"
"-1.494140625 -3.33984375),"
"(-8.0859375 -5.9765625,"
"8.525390625 -8.7890625,"
"12.392578125 -0.87890625,"
"10.01953125 7.646484375))"),
'polygon': GEOSGeometry("SRID=4326;POLYGON("
"(-1.669921875 6.240234375,"
"-3.8671875 -0.615234375,"
"5.9765625 -3.955078125,"
"18.193359375 3.955078125,"
"9.84375 9.4921875,"
"-1.669921875 6.240234375))"),
'multipolygon': GEOSGeometry("SRID=4326;MULTIPOLYGON("
"((-17.578125 13.095703125,"
"-17.2265625 10.8984375,"
"-13.974609375 10.1953125,"
"-13.359375 12.744140625,"
"-15.732421875 13.7109375,"
"-17.578125 13.095703125)),"
"((-8.525390625 5.537109375,"
"-8.876953125 2.548828125,"
"-5.888671875 1.93359375,"
"-5.09765625 4.21875,"
"-6.064453125 6.240234375,"
"-8.525390625 5.537109375)))"),
'geometrycollection': GEOSGeometry("SRID=4326;GEOMETRYCOLLECTION("
"POINT(5.625 -0.263671875),"
"POINT(6.767578125 -3.603515625),"
"POINT(8.525390625 0.087890625),"
"POINT(8.0859375 -2.13134765625),"
"LINESTRING("
"6.273193359375 -1.175537109375,"
"5.77880859375 -1.812744140625,"
"7.27294921875 -2.230224609375,"
"7.657470703125 -1.25244140625))"),
}
def assertMapWidget(self, form_instance):
"""
Make sure the MapWidget js is passed in the form media and a MapWidget
is actually created
"""
self.assertTrue(form_instance.is_valid())
rendered = form_instance.as_p()
self.assertIn('new MapWidget(options);', rendered)
self.assertIn('gis/js/OLMapWidget.js', str(form_instance.media))
def assertTextarea(self, geom, rendered):
"""Makes sure the wkt and a textarea are in the content"""
self.assertIn('<textarea ', rendered)
self.assertIn('required', rendered)
self.assertIn(geom.wkt, rendered)
def test_pointfield(self):
class PointForm(forms.Form):
p = forms.PointField()
geom = self.geometries['point']
form = PointForm(data={'p': geom})
self.assertTextarea(geom, form.as_p())
self.assertMapWidget(form)
self.assertFalse(PointForm().is_valid())
invalid = PointForm(data={'p': 'some invalid geom'})
self.assertFalse(invalid.is_valid())
self.assertTrue('Invalid geometry value' in str(invalid.errors))
for invalid in [geom for key, geom in self.geometries.items() if key!='point']:
self.assertFalse(PointForm(data={'p': invalid.wkt}).is_valid())
def test_multipointfield(self):
class PointForm(forms.Form):
p = forms.MultiPointField()
geom = self.geometries['multipoint']
form = PointForm(data={'p': geom})
self.assertTextarea(geom, form.as_p())
self.assertMapWidget(form)
self.assertFalse(PointForm().is_valid())
for invalid in [geom for key, geom in self.geometries.items() if key!='multipoint']:
self.assertFalse(PointForm(data={'p': invalid.wkt}).is_valid())
def test_linestringfield(self):
class LineStringForm(forms.Form):
l = forms.LineStringField()
geom = self.geometries['linestring']
form = LineStringForm(data={'l': geom})
self.assertTextarea(geom, form.as_p())
self.assertMapWidget(form)
self.assertFalse(LineStringForm().is_valid())
for invalid in [geom for key, geom in self.geometries.items() if key!='linestring']:
self.assertFalse(LineStringForm(data={'p': invalid.wkt}).is_valid())
def test_multilinestringfield(self):
class LineStringForm(forms.Form):
l = forms.MultiLineStringField()
geom = self.geometries['multilinestring']
form = LineStringForm(data={'l': geom})
self.assertTextarea(geom, form.as_p())
self.assertMapWidget(form)
self.assertFalse(LineStringForm().is_valid())
for invalid in [geom for key, geom in self.geometries.items() if key!='multilinestring']:
self.assertFalse(LineStringForm(data={'p': invalid.wkt}).is_valid())
def test_polygonfield(self):
class PolygonForm(forms.Form):
p = forms.PolygonField()
geom = self.geometries['polygon']
form = PolygonForm(data={'p': geom})
self.assertTextarea(geom, form.as_p())
self.assertMapWidget(form)
self.assertFalse(PolygonForm().is_valid())
for invalid in [geom for key, geom in self.geometries.items() if key!='polygon']:
self.assertFalse(PolygonForm(data={'p': invalid.wkt}).is_valid())
def test_multipolygonfield(self):
class PolygonForm(forms.Form):
p = forms.MultiPolygonField()
geom = self.geometries['multipolygon']
form = PolygonForm(data={'p': geom})
self.assertTextarea(geom, form.as_p())
self.assertMapWidget(form)
self.assertFalse(PolygonForm().is_valid())
for invalid in [geom for key, geom in self.geometries.items() if key!='multipolygon']:
self.assertFalse(PolygonForm(data={'p': invalid.wkt}).is_valid())
def test_geometrycollectionfield(self):
class GeometryForm(forms.Form):
g = forms.GeometryCollectionField()
geom = self.geometries['geometrycollection']
form = GeometryForm(data={'g': geom})
self.assertTextarea(geom, form.as_p())
self.assertMapWidget(form)
self.assertFalse(GeometryForm().is_valid())
for invalid in [geom for key, geom in self.geometries.items() if key!='geometrycollection']:
self.assertFalse(GeometryForm(data={'g': invalid.wkt}).is_valid())
def test_osm_widget(self):
class PointForm(forms.Form):
p = forms.PointField(widget=forms.OSMWidget)
geom = self.geometries['point']
form = PointForm(data={'p': geom})
rendered = form.as_p()
self.assertIn("OpenStreetMap (Mapnik)", rendered)
self.assertIn("id: 'id_p',", rendered)
@skipUnless(HAS_GDAL and HAS_SPATIALREFSYS,
"CustomGeometryWidgetTest needs gdal support and a spatial database")
class CustomGeometryWidgetTest(SimpleTestCase):
def test_custom_serialization_widget(self):
class CustomGeometryWidget(forms.BaseGeometryWidget):
template_name = 'gis/openlayers.html'
deserialize_called = 0
def serialize(self, value):
return value.json if value else ''
def deserialize(self, value):
self.deserialize_called += 1
return GEOSGeometry(value)
class PointForm(forms.Form):
p = forms.PointField(widget=CustomGeometryWidget)
point = GEOSGeometry("SRID=4326;POINT(9.052734375 42.451171875)")
form = PointForm(data={'p': point})
self.assertIn(escape(point.json), form.as_p())
CustomGeometryWidget.called = 0
widget = form.fields['p'].widget
# Force deserialize use due to a string value
self.assertIn(escape(point.json), widget.render('p', point.json))
self.assertEqual(widget.deserialize_called, 1)
form = PointForm(data={'p': point.json})
self.assertTrue(form.is_valid())
# Ensure that resulting geometry has srid set
self.assertEqual(form.cleaned_data['p'].srid, 4326)
|
|
#!/usr/bin/env python
# coding=utf-8
# Copyright 2013 The Swarming Authors. All rights reserved.
# Use of this source code is governed by the Apache v2.0 license that can be
# found in the LICENSE file.
import StringIO
import base64
import json
import logging
import os
import signal
import shutil
import subprocess
import sys
import tempfile
import time
import unittest
import zipfile
THIS_DIR = os.path.dirname(os.path.abspath(__file__))
# Small hack to make it work on Windows even without symlink support.
if os.path.isfile(os.path.join(THIS_DIR, 'utils')):
sys.path.insert(0, os.path.join(THIS_DIR, '..', '..', '..', 'client'))
# Import everything that does not require sys.path hack first.
import logging_utils
import task_runner
BASE_DIR = os.path.dirname(os.path.abspath(__file__))
ROOT_DIR = os.path.dirname(BASE_DIR)
sys.path.insert(0, ROOT_DIR)
import test_env
test_env.setup_test_env()
import xsrf_client
from utils import subprocess42
CLIENT_TESTS = os.path.join(ROOT_DIR, '..', '..', 'client', 'tests')
sys.path.insert(0, CLIENT_TESTS)
# Creates a server mock for functions in net.py.
import net_utils
def compress_to_zip(files):
out = StringIO.StringIO()
with zipfile.ZipFile(out, 'w') as zip_file:
for item, content in files.iteritems():
zip_file.writestr(item, content)
return out.getvalue()
class TestTaskRunnerBase(net_utils.TestCase):
def setUp(self):
super(TestTaskRunnerBase, self).setUp()
self.root_dir = tempfile.mkdtemp(prefix='task_runner')
self.work_dir = os.path.join(self.root_dir, 'work')
os.chdir(self.root_dir)
os.mkdir(self.work_dir)
def tearDown(self):
os.chdir(BASE_DIR)
shutil.rmtree(self.root_dir)
super(TestTaskRunnerBase, self).tearDown()
@staticmethod
def get_task_details(
script, hard_timeout=10., io_timeout=10., grace_period=30.):
return task_runner.TaskDetails(
{
'bot_id': 'localhost',
'command': [sys.executable, '-u', '-c', script],
'data': [],
'env': {},
'grace_period': grace_period,
'hard_timeout': hard_timeout,
'io_timeout': io_timeout,
'task_id': 23,
})
def requests(self, cost_usd=0., **kwargs):
requests = [
(
'https://localhost:1/auth/api/v1/accounts/self/xsrf_token',
{'data': {}, 'headers': {'X-XSRF-Token-Request': '1'}},
{'xsrf_token': 'token'},
),
(
'https://localhost:1/swarming/api/v1/bot/task_update/23',
self.get_check_first(cost_usd),
{},
),
(
'https://localhost:1/swarming/api/v1/bot/task_update/23',
self.get_check_final(**kwargs),
{},
),
]
self.expected_requests(requests)
def get_check_first(self, cost_usd):
def check_first(kwargs):
self.assertLessEqual(cost_usd, kwargs['data'].pop('cost_usd'))
self.assertEqual(
{
'data': {
'id': 'localhost',
'task_id': 23,
},
'headers': {'X-XSRF-Token': 'token'},
},
kwargs)
return check_first
class TestTaskRunner(TestTaskRunnerBase):
def setUp(self):
super(TestTaskRunner, self).setUp()
self.mock(time, 'time', lambda: 1000000000.)
def get_check_final(self, exit_code=0, output='hi\n'):
def check_final(kwargs):
# It makes the diffing easier.
kwargs['data']['output'] = base64.b64decode(kwargs['data']['output'])
self.assertEqual(
{
'data': {
'cost_usd': 10.,
'duration': 0.,
'exit_code': exit_code,
'hard_timeout': False,
'id': 'localhost',
'io_timeout': False,
'output': output,
'output_chunk_start': 0,
'task_id': 23,
},
'headers': {'X-XSRF-Token': 'token'},
},
kwargs)
return check_final
def _run(self, task_details):
start = time.time()
self.mock(time, 'time', lambda: start + 10)
server = xsrf_client.XsrfRemote('https://localhost:1/')
return task_runner.run_command(
server, task_details, '.', 3600., start)
def test_download_data(self):
requests = [
(
'https://localhost:1/a',
{},
compress_to_zip({'file1': 'content1', 'file2': 'content2'}),
None,
),
(
'https://localhost:1/b',
{},
compress_to_zip({'file3': 'content3'}),
None,
),
]
self.expected_requests(requests)
items = [(i[0], 'foo.zip') for i in requests]
task_runner.download_data(self.root_dir, items)
self.assertEqual(
['file1', 'file2', 'file3', 'work'], sorted(os.listdir(self.root_dir)))
def test_load_and_run(self):
requests = [
(
'https://localhost:1/f',
{},
compress_to_zip({'file3': 'content3'}),
None,
),
]
self.expected_requests(requests)
server = xsrf_client.XsrfRemote('https://localhost:1/')
runs = []
def run_command(
swarming_server, task_details, work_dir, cost_usd_hour, start):
self.assertEqual(server, swarming_server)
# Necessary for OSX.
self.assertEqual(os.path.realpath(self.work_dir), work_dir)
self.assertTrue(isinstance(task_details, task_runner.TaskDetails))
self.assertEqual(3600., cost_usd_hour)
self.assertEqual(time.time(), start)
runs.append(0)
return 0
self.mock(task_runner, 'run_command', run_command)
manifest = os.path.join(self.root_dir, 'manifest')
with open(manifest, 'wb') as f:
data = {
'bot_id': 'localhost',
'command': ['a'],
'data': [('https://localhost:1/f', 'foo.zip')],
'env': {'d': 'e'},
'grace_period': 30.,
'hard_timeout': 10,
'io_timeout': 11,
'task_id': 23,
}
json.dump(data, f)
self.assertEqual(
True, task_runner.load_and_run(manifest, server, 3600., time.time()))
self.assertEqual([0], runs)
def test_load_and_run_fail(self):
requests = [
(
'https://localhost:1/f',
{},
compress_to_zip({'file3': 'content3'}),
None,
),
]
self.expected_requests(requests)
server = xsrf_client.XsrfRemote('https://localhost:1/')
runs = []
def run_command(
swarming_server, task_details, work_dir, cost_usd_hour, start):
self.assertEqual(server, swarming_server)
# Necessary for OSX.
self.assertEqual(os.path.realpath(self.work_dir), work_dir)
self.assertTrue(isinstance(task_details, task_runner.TaskDetails))
self.assertEqual(3600., cost_usd_hour)
self.assertEqual(time.time(), start)
runs.append(0)
# Fails the first, pass the second.
return 1 if len(runs) == 1 else 0
self.mock(task_runner, 'run_command', run_command)
manifest = os.path.join(self.root_dir, 'manifest')
with open(manifest, 'wb') as f:
data = {
'bot_id': 'localhost',
'command': ['a'],
'data': [('https://localhost:1/f', 'foo.zip')],
'env': {'d': 'e'},
'grace_period': 30.,
'hard_timeout': 10,
'io_timeout': 11,
'task_id': 23,
}
json.dump(data, f)
self.assertEqual(
False, task_runner.load_and_run(manifest, server, 3600., time.time()))
self.assertEqual([0], runs)
def test_run_command(self):
# This runs the command for real.
self.requests(cost_usd=1, exit_code=0)
task_details = self.get_task_details('print(\'hi\')')
self.assertEqual(0, self._run(task_details))
def test_run_command_fail(self):
# This runs the command for real.
self.requests(cost_usd=10., exit_code=1)
task_details = self.get_task_details(
'import sys; print(\'hi\'); sys.exit(1)')
self.assertEqual(1, self._run(task_details))
def test_run_command_os_error(self):
# This runs the command for real.
# OS specific error, fix expectation for other OSes.
output = (
'Command "executable_that_shouldnt_be_on_your_system '
'thus_raising_OSError" failed to start.\n'
'Error: [Error 2] The system cannot find the file specified'
) if sys.platform == 'win32' else (
'Command "executable_that_shouldnt_be_on_your_system '
'thus_raising_OSError" failed to start.\n'
'Error: [Errno 2] No such file or directory')
self.requests(cost_usd=10., exit_code=1, output=output)
task_details = task_runner.TaskDetails(
{
'bot_id': 'localhost',
'command': [
'executable_that_shouldnt_be_on_your_system',
'thus_raising_OSError',
],
'data': [],
'env': {},
'grace_period': 30.,
'hard_timeout': 6,
'io_timeout': 6,
'task_id': 23,
})
self.assertEqual(1, self._run(task_details))
def test_run_command_large(self):
# Method should have "self" as first argument - pylint: disable=E0213
class Popen(object):
"""Mocks the process so we can control how data is returned."""
def __init__(self2, cmd, cwd, env, stdout, stderr, stdin, detached):
self.assertEqual(task_details.command, cmd)
self.assertEqual('./', cwd)
self.assertEqual(os.environ, env)
self.assertEqual(subprocess.PIPE, stdout)
self.assertEqual(subprocess.STDOUT, stderr)
self.assertEqual(subprocess.PIPE, stdin)
self.assertEqual(True, detached)
self2._out = [
'hi!\n',
'hi!\n',
'hi!\n' * 100000,
'hi!\n',
]
def yield_any(self2, maxsize, soft_timeout):
self.assertLess(0, maxsize)
self.assertLess(0, soft_timeout)
for i in self2._out:
yield 'stdout', i
@staticmethod
def wait():
return 0
@staticmethod
def kill():
self.fail()
self.mock(subprocess42, 'Popen', Popen)
def check_final(kwargs):
self.assertEqual(
{
'data': {
# That's because the cost includes the duration starting at start,
# not when the process was started.
'cost_usd': 10.,
'duration': 0.,
'exit_code': 0,
'hard_timeout': False,
'id': 'localhost',
'io_timeout': False,
'output': base64.b64encode('hi!\n'),
'output_chunk_start': 100002*4,
'task_id': 23,
},
'headers': {'X-XSRF-Token': 'token'},
},
kwargs)
requests = [
(
'https://localhost:1/auth/api/v1/accounts/self/xsrf_token',
{'data': {}, 'headers': {'X-XSRF-Token-Request': '1'}},
{'xsrf_token': 'token'},
),
(
'https://localhost:1/swarming/api/v1/bot/task_update/23',
{
'data': {
'cost_usd': 10.,
'id': 'localhost',
'task_id': 23,
},
'headers': {'X-XSRF-Token': 'token'},
},
{},
),
(
'https://localhost:1/swarming/api/v1/bot/task_update/23',
{
'data': {
'cost_usd': 10.,
'id': 'localhost',
'output': base64.b64encode('hi!\n' * 100002),
'output_chunk_start': 0,
'task_id': 23,
},
'headers': {'X-XSRF-Token': 'token'},
},
{},
),
(
'https://localhost:1/swarming/api/v1/bot/task_update/23',
check_final,
{},
),
]
self.expected_requests(requests)
server = xsrf_client.XsrfRemote('https://localhost:1/')
task_details = task_runner.TaskDetails(
{
'bot_id': 'localhost',
'command': ['large', 'executable'],
'data': [],
'env': {},
'grace_period': 30.,
'hard_timeout': 60,
'io_timeout': 60,
'task_id': 23,
})
start = time.time()
self.mock(time, 'time', lambda: start + 10)
r = task_runner.run_command(
server, task_details, './', 3600., start)
self.assertEqual(0, r)
def test_main(self):
def load_and_run(manifest, swarming_server, cost_usd_hour, start):
self.assertEqual('foo', manifest)
self.assertEqual('http://localhost', swarming_server.url)
self.assertEqual(3600., cost_usd_hour)
self.assertEqual(time.time(), start)
return True
self.mock(task_runner, 'load_and_run', load_and_run)
cmd = [
'--swarming-server', 'http://localhost', '--file', 'foo',
'--cost-usd-hour', '3600', '--start', str(time.time()),
]
self.assertEqual(0, task_runner.main(cmd))
def test_main_reboot(self):
def load_and_run(manifest, swarming_server, cost_usd_hour, start):
self.assertEqual('foo', manifest)
self.assertEqual('http://localhost', swarming_server.url)
self.assertEqual(3600., cost_usd_hour)
self.assertEqual(time.time(), start)
return False
self.mock(task_runner, 'load_and_run', load_and_run)
cmd = [
'--swarming-server', 'http://localhost', '--file', 'foo',
'--cost-usd-hour', '3600', '--start', str(time.time()),
]
self.assertEqual(task_runner.TASK_FAILED, task_runner.main(cmd))
class TestTaskRunnerNoTimeMock(TestTaskRunnerBase):
# Do not mock time.time() for these tests otherwise it becomes a tricky
# implementation detail check.
# These test cases run the command for real.
# TODO(maruel): Calculate this value automatically through iteration?
SHORT_TIME_OUT = 0.3
# Here's a simple script that handles signals properly. Sadly SIGBREAK is not
# defined on posix.
SCRIPT_SIGNAL = (
'import signal, sys, time;\n'
'l = [];\n'
'def handler(signum, _):\n'
' l.append(signum);\n'
' print(\'got signal %%d\' %% signum);\n'
' sys.stdout.flush();\n'
'signal.signal(%s, handler);\n'
'print(\'hi\');\n'
'sys.stdout.flush();\n'
'while not l:\n'
' try:\n'
' time.sleep(0.01);\n'
' except IOError:\n'
' pass;\n'
'print(\'bye\')') % (
'signal.SIGBREAK' if sys.platform == 'win32' else 'signal.SIGTERM')
SCRIPT_SIGNAL_HANG = (
'import signal, sys, time;\n'
'l = [];\n'
'def handler(signum, _):\n'
' l.append(signum);\n'
' print(\'got signal %%d\' %% signum);\n'
' sys.stdout.flush();\n'
'signal.signal(%s, handler);\n'
'print(\'hi\');\n'
'sys.stdout.flush();\n'
'while not l:\n'
' try:\n'
' time.sleep(0.01);\n'
' except IOError:\n'
' pass;\n'
'print(\'bye\');\n'
'time.sleep(100)') % (
'signal.SIGBREAK' if sys.platform == 'win32' else 'signal.SIGTERM')
SCRIPT_HANG = 'import time; print(\'hi\'); time.sleep(100)'
def get_check_final(
self, hard_timeout=False, io_timeout=False, exit_code=None,
output='hi\n'):
def check_final(kwargs):
self.assertLess(self.SHORT_TIME_OUT, kwargs['data'].pop('cost_usd'))
self.assertLess(self.SHORT_TIME_OUT, kwargs['data'].pop('duration'))
# It makes the diffing easier.
kwargs['data']['output'] = base64.b64decode(kwargs['data']['output'])
self.assertEqual(
{
'data': {
'exit_code': exit_code,
'hard_timeout': hard_timeout,
'id': 'localhost',
'io_timeout': io_timeout,
'output': output,
'output_chunk_start': 0,
'task_id': 23,
},
'headers': {'X-XSRF-Token': 'token'},
},
kwargs)
return check_final
def _run(self, task_details):
server = xsrf_client.XsrfRemote('https://localhost:1/')
return task_runner.run_command(
server, task_details, '.', 3600., time.time())
def test_hard(self):
# Actually 0xc000013a
sig = -1073741510 if sys.platform == 'win32' else -signal.SIGTERM
self.requests(hard_timeout=True, exit_code=sig)
details = self.get_task_details(
self.SCRIPT_HANG, hard_timeout=self.SHORT_TIME_OUT)
self.assertEqual(sig, self._run(details))
def test_io(self):
# Actually 0xc000013a
sig = -1073741510 if sys.platform == 'win32' else -signal.SIGTERM
self.requests(io_timeout=True, exit_code=sig)
details = self.get_task_details(
self.SCRIPT_HANG, io_timeout=self.SHORT_TIME_OUT)
self.assertEqual(sig, self._run(details))
def test_hard_signal(self):
sig = signal.SIGBREAK if sys.platform == 'win32' else signal.SIGTERM
self.requests(
hard_timeout=True, exit_code=0, output='hi\ngot signal %d\nbye\n' % sig)
details = self.get_task_details(
self.SCRIPT_SIGNAL, hard_timeout=self.SHORT_TIME_OUT)
# Returns 0 because the process cleaned up itself.
self.assertEqual(0, self._run(details))
def test_io_signal(self):
sig = signal.SIGBREAK if sys.platform == 'win32' else signal.SIGTERM
self.requests(
io_timeout=True, exit_code=0, output='hi\ngot signal %d\nbye\n' % sig)
details = self.get_task_details(
self.SCRIPT_SIGNAL, io_timeout=self.SHORT_TIME_OUT)
# Returns 0 because the process cleaned up itself.
self.assertEqual(0, self._run(details))
def test_hard_no_grace(self):
# Actually 0xc000013a
sig = -1073741510 if sys.platform == 'win32' else -signal.SIGTERM
self.requests(hard_timeout=True, exit_code=sig)
details = self.get_task_details(
self.SCRIPT_HANG, hard_timeout=self.SHORT_TIME_OUT,
grace_period=self.SHORT_TIME_OUT)
self.assertEqual(sig, self._run(details))
def test_io_no_grace(self):
# Actually 0xc000013a
sig = -1073741510 if sys.platform == 'win32' else -signal.SIGTERM
self.requests(io_timeout=True, exit_code=sig)
details = self.get_task_details(
self.SCRIPT_HANG, io_timeout=self.SHORT_TIME_OUT,
grace_period=self.SHORT_TIME_OUT)
self.assertEqual(sig, self._run(details))
def test_hard_signal_no_grace(self):
exit_code = 1 if sys.platform == 'win32' else -signal.SIGKILL
sig = signal.SIGBREAK if sys.platform == 'win32' else signal.SIGTERM
self.requests(
hard_timeout=True, exit_code=exit_code,
output='hi\ngot signal %d\nbye\n' % sig)
details = self.get_task_details(
self.SCRIPT_SIGNAL_HANG, hard_timeout=self.SHORT_TIME_OUT,
grace_period=self.SHORT_TIME_OUT)
# Returns 0 because the process cleaned up itself.
self.assertEqual(exit_code, self._run(details))
def test_io_signal_no_grace(self):
exit_code = 1 if sys.platform == 'win32' else -signal.SIGKILL
sig = signal.SIGBREAK if sys.platform == 'win32' else signal.SIGTERM
self.requests(
io_timeout=True, exit_code=exit_code,
output='hi\ngot signal %d\nbye\n' % sig)
details = self.get_task_details(
self.SCRIPT_SIGNAL_HANG, io_timeout=self.SHORT_TIME_OUT,
grace_period=self.SHORT_TIME_OUT)
# Returns 0 because the process cleaned up itself.
self.assertEqual(exit_code, self._run(details))
if __name__ == '__main__':
if '-v' in sys.argv:
unittest.TestCase.maxDiff = None
logging_utils.prepare_logging(None)
logging_utils.set_console_level(
logging.DEBUG if '-v' in sys.argv else logging.CRITICAL+1)
unittest.main()
|
|
#!/usr/bin/env python
#
# __COPYRIGHT__
#
# Permission is hereby granted, free of charge, to any person obtaining
# a copy of this software and associated documentation files (the
# "Software"), to deal in the Software without restriction, including
# without limitation the rights to use, copy, modify, merge, publish,
# distribute, sublicense, and/or sell copies of the Software, and to
# permit persons to whom the Software is furnished to do so, subject to
# the following conditions:
#
# The above copyright notice and this permission notice shall be included
# in all copies or substantial portions of the Software.
#
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY
# KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE
# WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
# NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE
# LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION
# OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION
# WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
"""
Test bin/update-release-info.py. Also verify that the original files
have the appropriate triggers to cause the modifications.
"""
__revision__ = "__FILE__ __REVISION__ __DATE__ __DEVELOPER__"
import os, sys, time
import TestRuntest
# Needed to ensure we're using the correct year
this_year=time.localtime()[0]
TestSCons = 'QMTest/TestSCons.py' .split('/')
README = 'README.rst' .split('/')
ReleaseConfig = 'ReleaseConfig' .split('/')
SConstruct = 'SConstruct' .split('/')
Announce = 'src/Announce.txt' .split('/')
CHANGES = 'src/CHANGES.txt' .split('/')
RELEASE = 'src/RELEASE.txt' .split('/')
Main = 'src/engine/SCons/Script/Main.py' .split('/')
main_in = 'doc/user/main.in' .split('/')
main_xml = 'doc/user/main.xml' .split('/')
test = TestRuntest.TestRuntest(
program = os.path.join('bin', 'update-release-info.py'),
things_to_copy = ['bin']
)
if not os.path.exists(test.program):
test.skip_test("update-release-info.py is not distributed in this package\n")
test.run(arguments = 'bad', status = 1)
# Strings to go in ReleaseConfig
combo_strings = [
# Index 0: version tuple with bad release level
"""version_tuple = (2, 0, 0, 'bad', 0)
""",
# Index 1: Python version tuple
"""unsupported_python_version = (2, 3)
""",
# Index 2: Python version tuple
"""deprecated_python_version = (2, 4)
""",
# Index 3: alpha version tuple
"""version_tuple = (2, 0, 0, 'alpha', 0)
""",
# Index 4: final version tuple
"""version_tuple = (2, 0, 0, 'final', 0)
""",
# Index 5: bad release date
"""release_date = (%d, 12)
"""%this_year,
# Index 6: release date (hhhh, mm, dd)
"""release_date = (%d, 12, 21)
"""%this_year,
# Index 7: release date (hhhh, mm, dd, hh, mm, ss)
"""release_date = (%d, 12, 21, 12, 21, 12)
"""%this_year,
]
combo_error = \
"""ERROR: Config file must contain at least version_tuple,
\tunsupported_python_version, and deprecated_python_version.
"""
def combo_fail(*args, **kw):
kw.setdefault('status', 1)
combo_run(*args, **kw)
def combo_run(*args, **kw):
t = '\n'
for a in args:
t += combo_strings[a]
test.write(ReleaseConfig, t)
kw.setdefault('stdout', combo_error)
test.run(**kw)
combo_fail()
combo_fail(0)
combo_fail(1)
combo_fail(2)
combo_fail(0, 1)
combo_fail(0, 2)
combo_fail(1, 2)
combo_fail(0, 1, 2, stdout =
"""ERROR: `bad' is not a valid release type in version tuple;
\tit must be one of alpha, beta, candidate, or final
""")
# We won't need this entry again, so put in a default
combo_strings[0] = combo_strings[1] + combo_strings[2] + combo_strings[3]
combo_fail(0, 5, stdout =
"""ERROR: Invalid release date (%d, 12)
"""%this_year )
def pave(path):
path = path[:-1]
if not path or os.path.isdir(os.path.join(*path)):
return
pave(path)
test.subdir(path)
def pave_write(file, contents):
pave(file)
test.write(file, contents)
pave_write(CHANGES, """
RELEASE It doesn't matter what goes here...
""")
pave_write(RELEASE, """
This file has a 3.2.1.beta.20121221 version string in it
""")
pave_write(Announce, """
RELEASE It doesn't matter what goes here...
""")
pave_write(SConstruct, """
month_year = 'March 1945'
copyright_years = '2001, 2002, 2003, 2004, 2005, 2006, 2007'
default_version = '0.98.97'
""")
pave_write(README, """
These files are a part of 33.22.11:
scons-33.22.11.tar.gz
scons-33.22.11.win32.exe
scons-33.22.11.zip
scons-33.22.11.rpm
scons-33.22.11.deb
scons-33.22.11.beta.20012122112.suffix
""")
pave_write(TestSCons, """
copyright_years = Some junk to be overwritten
default_version = More junk
python_version_unsupported = Yep, more junk
python_version_deprecated = And still more
""")
pave_write(Main, """
unsupported_python_version = Not done with junk
deprecated_python_version = It goes on forever
""")
pave_write(main_in, """
TODO
""")
pave_write(main_xml, """
TODO
""")
def updating_run(*args):
stdout = ''
for file in args:
stdout += 'Updating %s...\n' % os.path.join(*file)
combo_run(0, 7, stdout = stdout)
updating_run(CHANGES, RELEASE, Announce, SConstruct, README, TestSCons, Main)
test.must_match(CHANGES, """
RELEASE 2.0.0.alpha.yyyymmdd - NEW DATE WILL BE INSERTED HERE
""", mode = 'r')
test.must_match(RELEASE, """
This file has a 2.0.0.alpha.yyyymmdd version string in it
""", mode = 'r')
test.must_match(Announce, """
RELEASE 2.0.0.alpha.yyyymmdd - NEW DATE WILL BE INSERTED HERE
""", mode = 'r')
years = '2001 - %d'%(this_year + 1)
test.must_match(SConstruct, """
month_year = 'MONTH YEAR'
copyright_years = %s
default_version = '2.0.0.alpha.yyyymmdd'
""" % repr(years), mode = 'r')
test.must_match(README, """
These files are a part of 33.22.11:
scons-2.0.0.alpha.yyyymmdd.tar.gz
scons-2.0.0.alpha.yyyymmdd.win32.exe
scons-2.0.0.alpha.yyyymmdd.zip
scons-2.0.0.alpha.yyyymmdd.rpm
scons-2.0.0.alpha.yyyymmdd.deb
scons-2.0.0.alpha.yyyymmdd.suffix
""", mode = 'r')
# should get Python floors from TestSCons module.
test.must_match(TestSCons, """
copyright_years = '%s'
default_version = '2.0.0.alpha.yyyymmdd'
python_version_unsupported = (2, 3)
python_version_deprecated = (2, 4)
"""%years, mode = 'r')
# should get Python floors from TestSCons module.
test.must_match(Main, """
unsupported_python_version = (2, 3)
deprecated_python_version = (2, 4)
""", mode = 'r')
#TODO: Release option
#TODO: ==============
#TODO:
#TODO: Dates in beta/candidate flow
#TODO:
#TODO: Dates in final flow
#TODO:
#TODO: Post option
#TODO: ===========
#TODO:
#TODO: Dates in post flow
#TODO:
#TODO: Update minor or micro version
#TODO:
#TODO: ReleaseConfig - new version tuple
#TODO:
#TODO: CHANGES - new section
#TODO:
#TODO: RELEASE - new template
#TODO:
#TODO: Announce - new section
test.pass_test()
# Local Variables:
# tab-width:4
# indent-tabs-mode:nil
# End:
# vim: set expandtab tabstop=4 shiftwidth=4:
|
|
import sys, os, math
from priority_queue import *
class Node:
def __init__(self, name, group, value = 0):
self.name = name;
self.value = value;
self.group = group;
# def __eq__( self, other ):
# return self.name == other.name and self.value == other.value;
def same_name_with( self, other ):
return self.name == other.name;
class Graph:
def __init__(self):
self.graphdict = dict();
'''
A graph is of this format:
{
'A' : [('B', 1), ('C', 2)],
'B' : [('A', 1), ('C', 3)],
'C' : [('A', 2), ('B', 3)]
}
1
A ------- B
\ /
2 \ / 3
\ /
C
'''
############################################################
######## Utility private methods #########
def __get_adj_nodes__(self, node):
edges = self.graphdict[node];
nodes = [];
for edge in edges:
nodes.append( edge[0] );
return nodes;
def __edge_eq__( self, edge1, edge2 ):
'''determine if two undirected edges are equal'''
node_eq = False;
weight_eq = False;
if edge1[0] == edge2[0] and edge1[0] == edge2[0]:
node_eq = True;
elif edge1[0] == edge2[1] and edge1[1] == edge2[0]:
node_eq = True;
if edge1[2] == edge2[2]:
weight_eq = True;
return node_eq and weight_eq;
############################################################
######## Utility prublic methods #########
def copy(self):
'''return a copy of the graph. Nodes and edges all remain the same memory addresses.
Only the reference of of graph itself is different. '''
newgraph = Graph();
#all_nodes = self.get_all_nodes();
all_edges = self.get_all_edges();
newgraph.add_edges( all_edges );
return newgraph;
def size(self):
'''return the size of the graph. |E|'''
return len(self.get_all_edges());
def contains(self, node_or_edge):
'''determine if an edge or node exist in the graph'''
if isinstance( node_or_edge, tuple ): # This is an edge
# Determine is the two nodes exist
if not self.graphdict.has_key( node_or_edge[0] ) or not self.graphdict.has_key( node_or_edge[1] ):
return False;
edges = self.graphdict[ node_or_edge[0] ]; # get edges associated to the first node of the edge
for edge in edges:
if edge[0] == node_or_edge[1] and edge[1] == node_or_edge[2]:
return True;
return False;
else: # This is a node
return self.graphdict.has_key( node_or_edge );
def add_nodes(self, nodelist):
'''add a list of nodes.'''
if not isinstance(nodelist, list):
nodelist = [nodelist];
for node in nodelist:
if not self.contains( node ):
self.graphdict[node] = [];
def add_edges(self, edgelist):
'''add a list of edges, An edge is of the format (node1, node2, weight)'''
if not isinstance(edgelist, list):
edgelist = [edgelist];
for edge in edgelist:
if self.contains( edge ): # if the edge is already in the graph
continue;
node1 = edge[0]; node2 = edge[1]; weight = edge[2];
if not self.contains( node1 ): self.add_nodes( node1 ); # add the node if it doesn't exist in the graph
if not self.contains( node2 ): self.add_nodes( node2 );
if (node2, weight) not in self.graphdict[node1]:
self.graphdict[node1].append( (node2, weight) );
if (node1, weight) not in self.graphdict[node2]:
self.graphdict[node2].append( (node1, weight) );
def get_all_nodes(self):
'''get all nodes in the graph'''
return self.graphdict.keys();
def get_edges_of(self, node):
'''get edges associated to the node'''
return self.graphdict[node];
def get_all_edges(self):
'''get all edges.'''
edges = [];
added_edges = {}; # record edges that is already in the list.
for node in self.graphdict.keys():
node_edges = self.graphdict[node]; # get the edges of each node.
for edge in node_edges:
existing_edge = False; # determine if the edge is already in the set
if added_edges.has_key( ( node, edge[0] ) ) or added_edges.has_key( (edge[0], node) ):
existing_edge = True;
if not existing_edge:
edges.append( (node, edge[0], edge[1]) );
added_edges[ ( node, edge[0] ) ] = 1;
added_edges[ ( edge[0], node ) ] = 1;
return edges;
def share_nodes_with(self, other):
'''determine if two graphs share same ndoes'''
othernodes = other.get_all_nodes();
for node in othernodes:
if self.graphdict.has_key( node ):
return True;
return False;
def loop_free( self ):
'''determine if the graph is loop free'''
return self.num_of_loops() == 0;
def mergeable_with(self, other):
'''define two graph to be mergable, if they satisfy both
1. they share some nodes
2. the new graph is loop free
'''
if self == other:
return False;
share_nodes = self.share_nodes_with( other );
if not share_nodes:
return False;
merged_graph = self.merge_with( other );
new_graph_loop_free = merged_graph.loop_free();
return share_nodes and new_graph_loop_free;
def strong_mergeable_with(self, other):
'''define two graph to be strong mergable, if they satisfy all
1. they share some nodes
2. the new graph is loop free
3. no nodes have neighbors more than 2.
'''
if self == other:
return False;
share_nodes = self.share_nodes_with( other );
if not share_nodes:
return False;
merged_graph = self.merge_with( other );
new_graph_loop_free = merged_graph.loop_free();
nodes = merged_graph.get_all_nodes();
#for node in nodes:
# if len(merged_graph.get_edges_of( node )) > 2:
# return False;
return share_nodes and new_graph_loop_free;
'''
def self_merge_with(self, other):
''merge the other graph with self. do not return a new graph. self is changed''
otheredges = other.get_all_edges();
self.add_edges( otheredges );
return;'''
def merge_with( self, other ):
'''Merge with another graph. return a new graph, while self remain unchanged'''
selfcopy = self.copy();
otheredges = other.get_all_edges();
selfcopy.add_edges( otheredges );
return selfcopy;
############################################################
######## Path methods #########
def find_one_path(self, start, end, path=[]):
'''find one path from the start node to the end node. This method is recursive and very inefficient.'''
path = path +[start];
if start == end:
return path;
elif not self.graphdict.has_key(start) or not self.graphdict.has_key(end):
return None;
for edge in self.graphdict[start]:
node = edge[0];
if node not in path:
newpath = self.find_one_path( node, end, path );
if newpath:
return newpath;
return None;
def find_all_paths(self, start, end, path=[]):
path = path + [start];
if start == end:
return [path];
if not self.graphdict.has_key(start) or not self.graphdict.has_key(end):
return [];
paths = [];
for edge in self.graphdict[start]:
node = edge[0];
if node not in path:
newpaths = self.find_all_paths( node, end, path);
for newpath in newpaths:
paths.append(newpath);
return paths;
def adjMat(self):
'''Get the adjacency matrix'''
nodes = self.graphdict.keys();
mat = [];
for node in nodes:
row = [];
adjnodes = self.__get_adj_nodes__(node);
for other in nodes:
if other == node: row.append(0);
elif other in adjnodes: row.append(1);
else: row.append(0);
mat.append(row);
return mat;
############################################################
######## Graph Construction and Save #########
def loadMat(self, mat):
'''Load graph from an adjacency matrix (2d)'''
n = len(mat);
nodes = [];
for i in range(0,n):
nodes.append( Node( str(i), i ) );
self.add_nodes( nodes );
edges = [];
i = 0;
for row in mat:
j = 0;
for elem in row:
if i != j and elem != 0:
edges.append( (nodes[i], nodes[j], elem ));
j+=1;
pass;
i+=1;
self.add_edges( edges );
def saveJson(self, filename):
'''Save the graph in a Json file.'''
jsonfile = open( filename, 'w' );
nodeStr = "";
linkStr = "";
nodes = self.graphdict.keys();
for node in nodes:
nodeStr += "\t\t{\"name\":\"" + str(node.name) + "\", \"group\":"+ str(node.group) + ", \"value\":\"" + str(node.value) +"\"},\n";
for edge in self.graphdict[node]:
linkStr += "\t\t{\"source\":"+ str(nodes.index(node)) +",\"target\":" + str(nodes.index(edge[0])) + ",\"value\":"+ str(edge[1]) +"},\n";
nodeStr = nodeStr[0:-2] + "\n";
linkStr = linkStr[0:-2] + "\n";
nodeSection = "\n\t\"nodes\":[\n{0}\t]".format(nodeStr);
linkSection = "\n\t\"links\":[\n{0}\t]".format(linkStr);
jsonStr = "{ "+ nodeSection +","+ linkSection +"\n}";
#print jsonStr;
jsonfile.write( jsonStr );
jsonfile.close();
def num_of_loops(self):
'''the number of 1d holes'''
mat = self.adjMat();
edge_num = 0;
for row in mat:
for elem in row:
if elem - 1.0 == 0.0:
edge_num += 1;
edge_num /= 2;
vert_num = len(self.graphdict.keys());
return edge_num - vert_num + 1;
############################################################
######## Partitioning Graph #########
############################################################
class GraphBreaker(object):
"""class for break a graph into several loop-free parts"""
def __init__(self, graph):
self.origin_graph = graph;
def break_it(self):
'''break the graph into several parts, and save them to another non-connected graph'''
graphs = [];
self_edges = self.origin_graph.get_all_edges();
for edge in self_edges:
graph = Graph();
graph.add_edges( edge );
graphs.append( graph );
return self.merge_graphs( graphs );
def merge_graphs(self, graphs):
pq = PriorityQueue();
for graph in graphs:
pq.push( graph, graph.size() );
visited = [];
while pq.size() > 1:
this_time_popped = [];
smallest_graph = pq.pop();
this_time_popped.append(smallest_graph);
while (smallest_graph in visited) and (not pq.isEmpty()):
smallest_graph = pq.pop();
this_time_popped.append( smallest_graph );
visited.append( smallest_graph );
for popped in this_time_popped:
if popped not in visited:
pq.push( popped, popped.size() );
choices = [];
small_size_graphs = pq.get_all();
#small_size_graphs = pq.get_smalls( smallest_graph.size() ); # Choose all graphs such that
# 1. they have priority no less than current one, and
# 2. when merge with the smallest one they get a same size new graph
for graph in small_size_graphs:
if smallest_graph.strong_mergeable_with( graph ): # test if they are mergeable
choices.append( graph );
if len(choices) == 0: # if no mergeable graph with the smallest one
pq.push( smallest_graph, smallest_graph.size() );
continue;
# Now we have all graphs mergeable with the smallest graph that will produce the same sized new graph
''' naive strategy'''
print '--------------'
print len(graphs), pq.size();
graphs.remove(smallest_graph);
graphs.remove( choices[0] );
pq.remove_task( choices[0] );
merged_graph = smallest_graph.merge_with( choices[0] );
graphs.append( merged_graph );
pq.push( merged_graph, merged_graph.size() );
print len(graphs), pq.size();
'''better strategy:
--------------------------------------------------------------------------
define 'credit' for a list of graphs as:
The sum of the number of all mergeable graphs for every graph in the list
--------------------------------------------------------------------------
Merging two graphs means decreasing the credit by at least two.
With more credits, we can merge more graphs in the list.
So we prefer to merge graphs that results in larger credit.
(Or, we prefer to merge graphs that decrease credit least.)
'''
#################
### 'graphs' is a list of loop-free graphs. Each cannot merge with another.
print len(graphs)
composed_graph = self.compose( graphs );
return composed_graph;
def compose(self, graphs):
'''graphs are a list of graphs that each share some nodes.
We need to compose them into one graph, to be shown in a webpage.
All nodes remain the same name, but are different now.'''
def find_same_name( nodes, node ):
'''Find a node in a list that has the same name with the 'node' '''
for item in nodes:
if item.same_name_with( node ):
return item;
new_graph = Graph();
for graph in graphs:
nodes = graph.get_all_nodes();
new_nodes = [];
for node in nodes:
new_nodes.append( Node(node.name, node.group, node.value));
edges = graph.get_all_edges();
new_edges = [];
for edge in edges:
corr_node1 = find_same_name( new_nodes, edge[0] );
corr_node2 = find_same_name( new_nodes, edge[1] );
new_edges.append( ( corr_node1, corr_node2, edge[2] ) );
new_graph.add_edges( new_edges );
return new_graph;
'''
graph = Graph();
matrix = [ [0,1,0,1,1,0],
[1,0,1,0,0,1],
[0,1,0,1,0,1],
[1,0,1,0,1,0],
[1,0,0,1,0,1],
[0,1,1,0,1,0] ];
matrix2= [ [0,1,0,0,0,1],
[1,0,1,0,1,0],
[0,1,0,1,0,0],
[0,0,1,0,1,0],
[0,1,0,1,0,1],
[1,0,0,0,1,0] ]
matrix3 = [ [0,1,0,0,0,0,0,1,1,0],
[1,0,1,0,0,0,0,0,1,0],
[0,1,0,1,0,0,0,0,1,0],
[0,0,1,0,1,0,0,0,1,0],
[0,0,0,1,0,1,0,0,0,1],
[0,0,0,0,1,0,1,0,0,1],
[0,0,0,0,0,1,0,1,0,1],
[1,0,0,0,0,0,1,0,0,1],
[1,1,1,1,0,0,0,0,0,1],
[0,0,0,0,1,1,1,1,1,0] ]
graph.loadMat( matrix3);
graph.saveJson('./graph_drawing/data/graph2.json');
graphBreaker = GraphBreaker(graph);
breaked_graph = graphBreaker.break_it();
breaked_graph.saveJson('./graph_drawing/data/broken_graph.json');
'''
|
|
# coding=utf-8
from lib.Logging import get_log
__author__ = 'Anatoli Kalysch'
import ui.SettingsWindow as SettingsWindow
from dynamic.dynamic_deobfuscate import *
from lib.VMRepresentation import *
from static.static_deobfuscate import *
from ui.AboutWindow import AboutWindow
from ui.UIManager import UIManager
class VMAttack_Manager(object):
def __init__(self):
self.choice = None
self._vmr = get_vmr()
# UI Management
self.ui_mgr = UIManager()
self.ui_mgr.get_init_menu()
self.menu_name = "VMAttack"
self.menu_extensions = []
### EVIRONMENT AND INIT ###
@property
def trace(self):
return self.vmr._trace
@trace.setter
def trace(self, value):
self.vmr._trace = value
@property
def vmr(self):
self.update_vmr()
return self._vmr
@property
def dbg_handl(self):
return get_dh(self.choice)
@dbg_handl.setter
def dbg_handl(self, value):
self.vmr._dbg_handl = value
@property
def vm_operands(self):
return self.vmr._vm_operands
@vm_operands.setter
def vm_operands(self, value):
self.vmr._vm_operands = value
@property
def vm_returns(self):
return self.vmr._vm_returns
@vm_returns.setter
def vm_returns(self, value):
self.vmr._vm_returns = value
@property
def vm_ctx(self):
return self.vmr._vm_ctx
@vm_ctx.setter
def vm_ctx(self, value):
self.vmr._vm_ctx = value
def select_debugger(self):
c = Choose([], "Choose your preferred debugger:", 1)
c.list = ["Currently selected IDA Debugger", "Bochs Dbg", "Win32 Dbg"] # TODO , "OllyDbg", "Immunity Dbg"]
c.width = 33
# choose() starts counting at 1, not 0
self.choice = c.choose() - 1
if self.choice == 1:
LoadDebugger('Bochs', 0)
elif self.choice == 2:
LoadDebugger('Win32', 0)
def update_vmr(self):
self._vmr = get_vmr()
### UI MANAGEMENT ###
@staticmethod
def show_about():
AboutWindow().exec_()
@staticmethod
def show_settings():
SettingsWindow.Show()
def show_trace(self):
self.update_vmr()
if self._vmr.trace is not None:
for line in self._vmr.trace:
print line.to_str_line()
def remove_colors(self):
# reset color
heads = Heads(SegStart(ScreenEA()), SegEnd(ScreenEA()))
for i in heads:
SetColor(i, CIC_ITEM, 0xFFFFFF)
def extend_menu(self):
"""
Extends the menu.
"""
try:
self.revert_menu()
menu_path = self.menu_name + "/"
self.ui_mgr.get_init_menu()
self.ui_mgr.add_menu('&'+self.menu_name)
# debugger selection - will be added after plugin interaction with ollydbg and immunitydbg will be enabled - as of now no additional value is generated compared to the debugger selection in IDA itself so it is commented out
# An alternative to the chooser would be to hook IDAs Debugger selection?
#select_debugger_menu_item = add_menu_item(menu_path, "Select VMAttack Debugger", "", 0, self.select_debugger, None)
# credits & settings
settings_menu_item = add_menu_item(menu_path, "Settings", "", 0, self.show_settings, None)
about_menu_item = add_menu_item(menu_path, "About ...", "", 0, self.show_about, None)
# instruction trace generation and handling
remove_colors_menu_item = add_menu_item(menu_path + "Instruction Trace/", "Remove Colors from Graph", "", 0, self.remove_colors, None)
load_trace_menu_item = add_menu_item(menu_path + "Instruction Trace/", "Load Trace", "", 0, load_trace, None)
save_trace_menu_item = add_menu_item(menu_path + "Instruction Trace/", "Save Trace", "", 0, save_trace, None)
gen_trace_menu_item = add_menu_item(menu_path + "Instruction Trace/", "Generate Trace", "", 0, gen_instruction_trace, (self.choice,))
show_trace_menu_item = add_menu_item(menu_path + "Instruction Trace/", "Show Trace", "", 0, self.show_trace, None)
### automation ###
grading_menu_item = add_menu_item(menu_path + 'Automated Analysis/', "Grading System Analysis", "", 0, grading_automaton, None)
automaton_menu_item = add_menu_item(menu_path + 'Automated Analysis/', "Run all analysis capabilities", "", 0, self.automaton, None)
show_opti_menu_item = add_menu_item(menu_path + "Automated Analysis/Semi Automated (dynamic)/", "Dynamic Trace Optimization", "", 0, optimization_analysis, None)
analyze_addr_trace_menu_item = add_menu_item(menu_path + 'Automated Analysis/Semi Automated (dynamic)/', "Clustering Analysis", "", 0, clustering_analysis, None)
show_input_output = add_menu_item(menu_path + "Automated Analysis/Semi Automated (dynamic)/", "VM Input<=>Ouput Analysis", "", 0, input_output_analysis, None)
deobfuscate_from_menu_item = add_menu_item(menu_path + "Automated Analysis/Semi Automated (static)/", "Static deobfuscate", "", 0, static_deobfuscate, None)
show_abstract_graph_menu_item = add_menu_item(menu_path + "Automated Analysis/Semi Automated (static)/", "Create Abstract VM-Graph", "", 0, static_deobfuscate, (2,))
### manual analysis ###
# vm context related
static_start_search_menu_item = add_menu_item(menu_path + "Manual Analysis/VM Context/", "Find VM Context (static)", "", 0, static_vmctx, (True,))
find_vm_values_menu_item = add_menu_item(menu_path + "Manual Analysis/VM Context/", "Find VM Context (dynamic)", "", 0, dynamic_vmctx, (True,))
# static analysis menu items
manual_static_menu_item = add_menu_item(menu_path + "Manual Analysis/Static/", "Deobfuscate from ...", "", 0, static_deobfuscate, (0,True))
# dynamic analysis menu items
follow_virt_register = add_menu_item(menu_path + "Manual Analysis/Dynamic/", "Follow Virtual Register", "", 0, manual_analysis, (3,))
find_reg_mapping = add_menu_item(menu_path + "Manual Analysis/Dynamic/", "Find Virtual Reg to Reg mapping", "", 0, manual_analysis, (2,))
find_vmfunc_input = add_menu_item(menu_path + "Manual Analysis/Dynamic/", "Find VM Function Input Parameter", "", 0, manual_analysis, (1,))
find_vmfunc_output = add_menu_item(menu_path + "Manual Analysis/Dynamic/", "Find VM Function Output Parameter", "", 0, manual_analysis, (0,))
analyze_count_menu_item = add_menu_item(menu_path + "Manual Analysis/Dynamic/", "Address Count", "", 0, address_heuristic, None)
#manual_input_output = add_menu_item(menu_path + "Manual Analysis/Dynamic/", " Run Input<=>Ouput Analysis on Function", "", 0, input_output_analysis, (True,))
self.menu_extensions.append(deobfuscate_from_menu_item)
self.menu_extensions.append(settings_menu_item)
#self.menu_extensions.append(select_debugger_menu_item)
self.menu_extensions.append(load_trace_menu_item)
self.menu_extensions.append(save_trace_menu_item)
self.menu_extensions.append(gen_trace_menu_item)
self.menu_extensions.append(analyze_count_menu_item)
self.menu_extensions.append(analyze_addr_trace_menu_item)
self.menu_extensions.append(static_start_search_menu_item)
self.menu_extensions.append(find_vm_values_menu_item)
self.menu_extensions.append(automaton_menu_item)
self.menu_extensions.append(show_abstract_graph_menu_item)
self.menu_extensions.append(find_vmfunc_input)
self.menu_extensions.append(find_vmfunc_output)
self.menu_extensions.append(manual_static_menu_item)
self.menu_extensions.append(find_reg_mapping)
self.menu_extensions.append(follow_virt_register)
self.menu_extensions.append(show_input_output)
self.menu_extensions.append(show_trace_menu_item)
self.menu_extensions.append(about_menu_item)
self.menu_extensions.append(show_opti_menu_item)
self.menu_extensions.append(grading_menu_item)
#self.menu_extensions.append(manual_input_output)
self.menu_extensions.append(remove_colors_menu_item)
except Exception, e:
print "[*] Menu could not be added! Following Error occurred:\n %s" % e.message
def revert_menu(self):
for i in self.menu_extensions:
del_menu_item(i)
self.ui_mgr.clear()
def welcome(self):
msg("\n\
..........llllllllllllllllll..llllllll......llllll.......llllll..........\n\
..........llllllllllllllllll..lllllllll.....llllll.......llllll..........\n\
..........llllll.............lllll.lllll....llllll.......llllll..........\n\
..........llllll............,lllll.lllll,...llllll.......llllll..........\n\
..........llllll............llllll.llllll...llllll.......llllll..........\n\
..........lllllllllllllllllllllll...lllll...llllll.......llllll..........\n\
..........lllllllllllllllllllllll...llllll..llllll.......llllll..........\n\
..........llllllllllllllllllllll.....lllll..llllll.......llllll..........\n\
..........llllll..........lllllllllllllllll.llllll.......llllll..........\n\
..........llllll.........llllllllllllllllll.llllll.......llllll..........\n\
..........llllll........lllllllllllllllllllllllllll......llllll..........\n\
..........llllll........llllll.........lllllllllllllllllllllll...........\n\
..........llllll.......lllllll.........lllllll.lllllllllllll.............\n\
..........llllll......lllllll...........lllllll.lllllllllll..............\n\
............Friedrich-Alexander University Erlangen-Nuremberg............\n\
")
def reset_grade(self, trace):
for line in trace:
line.grade = 0
return trace
def grade(self, trace, excerpt):
for line in excerpt:
trace[trace.index(line)].raise_grade()
return trace
# automaton
def automaton(self):
trace = prepare_trace()
self.reset_grade(trace)
# load current IDA-Debugger
if self.dbg_handl.dbg.module_name is "NoDbg":
self.dbg_handl = self.select_debugger()
# instruction trace
if trace is None:
try:
trace = self.dbg_handl.gen_instruction_trace()
except:
self._vmr.trace = prepare_trace()
# run all trace analysis functions and present the results
dynamic_vmctx()
# afterwards if the VM context was found run the static analysis automatically since it depends on the VM context
try:
self.update_vmr()
deobfuscate(self._vmr.code_start, self._vmr.base_addr, self._vmr.code_end, self._vmr.vm_addr)
except Exception, e:
try:
static_vmctx()
self.update_vmr()
deobfuscate(self._vmr.code_start, self._vmr.base_addr, self._vmr.code_end, self._vmr.vm_addr)
except Exception, ex:
msg("[*] Could not provide static deobfuscation analysis! The following errors occured:\n %s \n %s" % (
e.message, ex.message))
# run the dynamic analysis capabilities of the plugin -> each analysis increases a special trace lines grade which will be evaluated at the end of the analysis
# input / output
try:
input_output_analysis()
except Exception, e:
print '[*] Exception occured while running Input/Output analysis!\n %s' % e.message
# clustering
try:
clustering_analysis()
except Exception, e:
print '[*] Exception occured while running Clustering analysis!\n %s' % e.message
# optimizations
try:
optimization_analysis()
except Exception, e:
print '[*] Exception occured while running optimization analysis!\n %s' % e.message
# grade the trace line
try:
grading_automaton()
except Exception, e:
print '[*] Exception occured while running grading analysis!\n %s' % e.message
# Virtualization obfuscated interpretation
class VMAttack(plugin_t):
flags = PLUGIN_PROC
comment = "This Framework is supposed to help with the analysis of virtualization obfuscated binaries."
help = "HELP!"
wanted_name = "VMAttack"
wanted_hotkey = ""
def init(self):
self.vma_mgr = None
try:
self.vma_mgr = get_mgr()
self.vma_mgr.extend_menu()
#self.vma_mgr.welcome()
msg('[*] Starting VMAttack plugin...\n')
get_log().log('[VMA] Starting VMAttack and initiating variables ...\n')
return PLUGIN_KEEP
except Exception, e:
msg("[*] Failed to initialize VMAttack.\n %s\n" % e.message)
if self.vma_mgr is not None:
self.vma_mgr.revert_menu()
del self.vma_mgr
return PLUGIN_SKIP
def run(self, arg):
try:
self.vma_mgr = get_mgr()
self.vma_mgr.extend_menu()
#self.vma_mgr.welcome()
msg('[*] Reloading VMAttack plugin...\n')
add_menu_item('Edit/Plugins/', 'Load VMAttack', None, 0, self.vma_mgr.extend_menu, ())
except Exception,e:
msg("[*] Failed to initialize VMAttack.\n %s\n" % e.message)
msg(e.args)
def term(self):
if self.vma_mgr is not None:
get_log().finalize()
self.vma_mgr.revert_menu()
del_vmr()
del self
def PLUGIN_ENTRY():
return VMAttack()
# Singelton VMA MGR
vma_mgr = None
def get_mgr():
global vma_mgr
if vma_mgr is None:
vma_mgr = VMAttack_Manager()
return vma_mgr
|
|
# MyInventory.setInventory(self.tab_inventory)
from tkinter import *
from tkinter.ttk import *
from hack_classes import Item
import os.path
from tooltip import ToolTip
from tkinter import messagebox
from random import *
class SetInventoryItem():
"""
Store inventory item info, grid position, and character item reference
"""
def __init__(self, name, file, description, effect, uses,
value, item_type, active, row, column, item_ref, component):
self.name = name
self.file = file
self.description = description
self.effect = effect
self.uses = uses
self.value = value
self.item_type = item_type
self.row = row
self.column = column
self.item_ref = item_ref
self.active = active
self.component = component
""""
getter methods
"""
def getName(self):
return self.name
def getFile(self):
return self.file
def getDescription(self):
return self.description
def getEffect(self):
return self.effect
def getUses(self):
return self.uses
def getValue(self):
return self.value
def getItemType(self):
return self.item_type
def getRow(self):
return self.row
def getColumn(self):
return self.column
def getItemRef(self):
return self.item_ref
def getActive(self):
return self.active
def getComponent(self):
return self.component
"""
setter methods
"""
def setUses(self, uses):
self.uses = uses
def setActive(self, active):
self.active = active
def setRow(self, row):
self.row = row
def setColumn(self, col):
self.column = col
class MyInventory():
"""
this class is used for linking the Inventory class with the GUI
class before creating the Inventory class object
"""
api = None # reference to GUI class object
items_frame = None # reference to GUI class items in use frame
my_inventory = None # Inventory class object stored here
# default active item limits
max_software = 0
max_hardware = 1
max_components = 1
# current number of items active
software_count = 0
hardware_count = 0
components_count = 0
# store each active item info here
software_frame = []
software_items = []
software_tips = []
software_effects = {}
hardware_frame = []
hardware_items = []
hardware_tips = []
components_frame = []
component_items = []
components_tips = []
component_effects = {}
# called in work_space.py
def setInventory(inventory_frame):
MyInventory.my_inventory = Inventory(inventory_frame)
# called at the end of engine/GUI's constructor function
def setApi(api_ref, items_frame_ref):
MyInventory.api = api_ref
MyInventory.items_frame = items_frame_ref
MyInventory.makeItemSlots()
MyInventory.my_inventory.initInventory(MyInventory.api.get_character_items())
def makeItemSlots():
style = Style()
for i in range(MyInventory.max_software):
software_frame = Frame(MyInventory.items_frame, width=25, height=25, style='item.TFrame')
software_frame.grid(row=1, column=i+1, sticky=W, pady=4, padx=4)
style.configure('item.TFrame', background='#AEAEAE')
MyInventory.software_frame.append(software_frame)
MyInventory.software_items.append([None, None])
MyInventory.software_tips.append(None)
for i in range(MyInventory.max_hardware):
hardware_frame = Frame(MyInventory.items_frame, width=25, height=25, style='item.TFrame')
hardware_frame.grid(row=2, column=i+1, sticky=W, pady=4, padx=4)
style.configure('item.TFrame', background='#AEAEAE')
MyInventory.hardware_frame.append(hardware_frame)
MyInventory.hardware_items.append([None, None])
MyInventory.hardware_tips.append(None)
for i in range(MyInventory.max_components):
component_frame = Frame(MyInventory.items_frame, width=25, height=25, style='item.TFrame')
component_frame.grid(row=3, column=i+1, sticky=W, pady=4, padx=4)
style.configure('item.TFrame', background='#AEAEAE')
MyInventory.components_frame.append(component_frame)
MyInventory.component_items.append([None, None])
MyInventory.components_tips.append(None)
def getEmblemImage(name):
emblems = {'Motherboard':os.path.join("assets", "art", "emblem_mobo.gif"),
'RAM 16gb':os.path.join("assets", "art", "emblem_ramred.gif"),
'RAM 8gb':os.path.join("assets", "art", "emblem_ramgreen.gif"),
'CPU fan':os.path.join("assets", "art", "emblem_fan.gif"),
'CPU 3.5 Ghz':os.path.join("assets", "art", "emblem_cpu.gif"),
'GPU 2gb':os.path.join("assets", "art", "emblem_gpu2.gif"),
'GPU 1gb':os.path.join("assets", "art", "emblem_gpu1.gif"),
'Fortify':os.path.join("assets", "art", "emblem_fortify.gif"),
'Fork':os.path.join("assets", "art", "emblem_fork.gif"),
'Smokescreen':os.path.join("assets", "art", "emblem_smoke.gif"),
'Penetrate':os.path.join("assets", "art", "emblem_penetrate.gif"),
'Fortify Burst':os.path.join("assets", "art", "emblem_fortify.gif"),
'Fork Burst':os.path.join("assets", "art", "emblem_fork.gif"),
'Smokescreen Burst':os.path.join("assets", "art", "emblem_smoke.gif"),
'Penetrate Burst':os.path.join("assets", "art", "emblem_penetrate.gif"),
'Laptop':os.path.join("assets", "art", "emblem_laptop.gif"),
'Desktop':os.path.join("assets", "art", "emblem_desktop.gif"),
'Terminal':os.path.join("assets", "art", "emblem_terminal.gif"),
'Server':os.path.join("assets", "art", "emblem_server.gif"),
'Desk':os.path.join("assets", "art", "emblem_desk.gif")}
return emblems[name]
def addSoftware(item):
if item.item_type in MyInventory.software_effects:
return False
if MyInventory.software_count < MyInventory.max_software:
i = MyInventory.software_count
img = PhotoImage(file=MyInventory.getEmblemImage(item.name))
software_label = Label(MyInventory.software_frame[MyInventory.software_count], image=img)
software_label.grid(sticky=W)
software_label.configure(font='arial 14', cursor='hand2', borderwidth=0, background='red', foreground='black')
software_label.bind('<1>', lambda e, _id=i: MyInventory.removeSoftware(_id))
software_label.image = img
MyInventory.software_tips[i] = ToolTip(software_label, item.name + ': ' + item.description)
MyInventory.software_items[i] = [software_label, item]
MyInventory.software_count += 1
MyInventory.software_effects[item.item_type] = item
return True
else:
return False # software slots full
def resetSoftware(item, i):
img = PhotoImage(file=MyInventory.getEmblemImage(item.name))
software_label = Label(MyInventory.software_frame[i-1], image=img)
software_label.grid(sticky=W)
software_label.configure(font='arial 14', cursor='hand2', borderwidth=0, background='red', foreground='black')
software_label.bind('<1>', lambda e, _id=i-1: MyInventory.removeSoftware(_id))
software_label.image = img
MyInventory.software_tips[i-1] = ToolTip(software_label, item.name + ': ' + item.description)
MyInventory.software_items[i-1] = [software_label, item]
def removeSoftware(_id):
if MyInventory.software_items[_id][0] != None:
uses_left = int(MyInventory.software_items[_id][1].uses)
item_name = MyInventory.software_items[_id][1].name
msg = 'Move "' + item_name + '" to your inventory?'
if uses_left == 0:
msg = 'WARNING: 0 uses are left for "' + item_name + \
'", this item will be deleted.'
answer = messagebox.askokcancel("Remove Active Software Item", msg)
if answer:
MyInventory.software_effects.pop(MyInventory.software_items[_id][1].item_type, None)
MyInventory.api.remove_effect(MyInventory.software_items[_id][1])
MyInventory.software_items[_id][0].destroy()
MyInventory.software_items[_id][0] = None
MyInventory.my_inventory.addItem(MyInventory.software_items[_id][1])
MyInventory.api.set_active(MyInventory.software_items[_id][1], 'False')
# shift all items to the left on the gui if any visual node gaps
for i in range(_id+1, MyInventory.max_software, 1):
if MyInventory.software_items[i][0] != None:
MyInventory.resetSoftware(MyInventory.software_items[i][1], i)
MyInventory.software_items[i][0].destroy()
MyInventory.software_items[i][0] = None
if MyInventory.software_count > 0:
MyInventory.software_count -= 1
def addHardware(item):
if MyInventory.hardware_count < MyInventory.max_hardware:
i = MyInventory.hardware_count
img = PhotoImage(file=MyInventory.getEmblemImage(item.name))
hardware_label = Label(MyInventory.hardware_frame[MyInventory.hardware_count], image=img)
hardware_label.grid(sticky=W)
hardware_label.configure(font='arial 14', cursor='hand2', borderwidth=0, background='yellow', foreground='black')
hardware_label.bind('<1>', lambda e, _id=i: MyInventory.removeHardware(_id))
hardware_label.image = img
MyInventory.hardware_tips[i] = ToolTip(hardware_label, item.name + ': ' + item.description)
MyInventory.hardware_items[i] = [hardware_label, item]
MyInventory.hardware_count += 1
return True
else:
return False # software slots full
def resetHardware(item, i):
img = PhotoImage(file=MyInventory.getEmblemImage(item.name))
hardware_label = Label(MyInventory.hardware_frame[i-1], image=img)
hardware_label.grid(sticky=W)
hardware_label.configure(font='arial 14', cursor='hand2', borderwidth=0, background='yellow', foreground='black')
hardware_label.bind('<1>', lambda e, _id=i-1: MyInventory.removeHardware(_id))
hardware_label.image = img
MyInventory.hardware_tips[i-1] = ToolTip(hardware_label, item.name + ': ' + item.description)
MyInventory.hardware_items[i-1] = [hardware_label, item]
def removeHardware(_id):
if MyInventory.hardware_items[_id][0] != None:
uses_left = int(MyInventory.hardware_items[_id][1].uses)
item_name = MyInventory.hardware_items[_id][1].name
msg = 'Move "' + item_name + '" to your inventory?'
if uses_left == 0:
msg = 'WARNING: 0 uses are left for "' + item_name + \
'", this item will be deleted.'
answer = messagebox.askokcancel("Remove Active Hardware Item", msg)
if answer:
MyInventory.api.unequip_item(MyInventory.hardware_items[_id][1])
MyInventory.hardware_items[_id][0].destroy()
MyInventory.hardware_items[_id][0] = None
MyInventory.my_inventory.addItem(MyInventory.hardware_items[_id][1])
MyInventory.api.set_active(MyInventory.hardware_items[_id][1], 'False')
# shift all items to the left on the gui if any visual node gaps
for i in range(_id+1, MyInventory.max_hardware, 1):
if MyInventory.hardware_items[i][0] != None:
MyInventory.resetHardware(MyInventory.hardware_items[i][1], i)
MyInventory.hardware_items[i][0].destroy()
MyInventory.hardware_items[i][0] = None
if MyInventory.hardware_count > 0:
MyInventory.hardware_count -= 1
MyInventory.max_software = 0
MyInventory.max_components = 1
MyInventory.resetInventory()
def addComponent(item):
#if item.item_type in MyInventory.component_effects:
#return False
if MyInventory.components_count < MyInventory.max_components:
i = MyInventory.components_count
img = PhotoImage(file=MyInventory.getEmblemImage(item.name))
component_label = Label(MyInventory.components_frame[MyInventory.components_count], image=img)
component_label.grid(sticky=W)
component_label.configure(font='arial 14', cursor='hand2', borderwidth=0, background='blue', foreground='black')
component_label.bind('<1>', lambda e, _id=i: MyInventory.removeComponent(_id))
component_label.image = img
MyInventory.components_tips[i] = ToolTip(component_label, item.name + ': ' + item.description)
MyInventory.component_items[i] = [component_label, item]
MyInventory.components_count += 1
MyInventory.component_effects[item.item_type] = item
return True
else:
return False # software slots full
def resetComponent(item, i):
img = PhotoImage(file=MyInventory.getEmblemImage(item.name))
component_label = Label(MyInventory.components_frame[i-1], image=img)
component_label.grid(sticky=W)
component_label.configure(font='arial 14', cursor='hand2', borderwidth=0, background='blue', foreground='black')
component_label.bind('<1>', lambda e, _id=i-1: MyInventory.removeComponent(_id))
component_label.image = img
MyInventory.components_tips[i-1] = ToolTip(component_label, item.name + ': ' + item.description)
MyInventory.component_items[i-1] = [component_label, item]
def removeComponent(_id):
if MyInventory.component_items[_id][0] != None:
uses_left = int(MyInventory.component_items[_id][1].uses)
item_name = MyInventory.component_items[_id][1].name
msg = 'Move "' + item_name + '" to your inventory?'
if uses_left == 0:
msg = 'WARNING: 0 uses are left for "' + item_name + \
'", this item will be deleted.'
answer = messagebox.askokcancel("Remove Active Component Item", msg)
if answer:
MyInventory.component_effects.pop(MyInventory.component_items[_id][1].item_type, None)
MyInventory.api.unequip_item(MyInventory.component_items[_id][1])
MyInventory.component_items[_id][0].destroy()
MyInventory.component_items[_id][0] = None
MyInventory.my_inventory.addItem(MyInventory.component_items[_id][1])
MyInventory.api.set_active(MyInventory.component_items[_id][1], 'False')
# shift all items to the left on the gui if any visual node gaps
for i in range(_id+1, MyInventory.max_components, 1):
if MyInventory.component_items[i][0] != None:
MyInventory.resetComponent(MyInventory.component_items[i][1], i)
MyInventory.component_items[i][0].destroy()
MyInventory.component_items[i][0] = None
if MyInventory.components_count > 0:
MyInventory.components_count -= 1
def resetInventory():
tmpComponent = MyInventory.component_items
tmpSoftware = MyInventory.software_items
for i in range(len(MyInventory.software_frame)):
MyInventory.software_frame[i].destroy()
if MyInventory.software_tips[i] != None:
MyInventory.software_tips[i].destroy()
if MyInventory.software_items[i][0] != None:
MyInventory.software_items[i][0].destroy()
for i in range(len(MyInventory.components_frame)):
MyInventory.components_frame[i].destroy()
if MyInventory.components_tips[i] != None:
MyInventory.components_tips[i].destroy()
if MyInventory.component_items[i][0] != None:
MyInventory.component_items[i][0].destroy()
MyInventory.software_frame = []
MyInventory.software_tips = []
MyInventory.software_items = []
MyInventory.software_count = 0
MyInventory.components_frame = []
MyInventory.components_tips = []
MyInventory.component_items = []
MyInventory.components_count = 0
style = Style()
for i in range(MyInventory.max_software):
software_frame = Frame(MyInventory.items_frame, width=25, height=25, style='item.TFrame')
software_frame.grid(row=1, column=i+1, sticky=W, pady=4, padx=4)
style.configure('item.TFrame', background='#AEAEAE')
MyInventory.software_frame.append(software_frame)
MyInventory.software_items.append([None, None])
MyInventory.software_tips.append(None)
for i in range(MyInventory.max_components):
component_frame = Frame(MyInventory.items_frame, width=25, height=25, style='item.TFrame')
component_frame.grid(row=3, column=i+1, sticky=W, pady=4, padx=4)
style.configure('item.TFrame', background='#AEAEAE')
MyInventory.components_frame.append(component_frame)
MyInventory.component_items.append([None, None])
MyInventory.components_tips.append(None)
addedEffects = []
equippedItems = []
for i in range(len(tmpSoftware)):
not_full = True
if tmpSoftware[i][0] != None:
effect = MyInventory.api.remove_effect(tmpSoftware[i][1])
MyInventory.software_effects.pop(tmpSoftware[i][1].item_type, None)
not_full = MyInventory.addSoftware(tmpSoftware[i][1])
if not not_full:
MyInventory.api.set_active(tmpSoftware[i][1], 'False')
MyInventory.my_inventory.addItem(tmpSoftware[i][1])
else:
MyInventory.api.set_active(tmpSoftware[i][1], 'True')
addedEffects.append(effect)
for i in range(len(tmpComponent)):
not_full = True
if tmpComponent[i][0] != None:
equipped = MyInventory.api.unequip_item(tmpComponent[i][1])
MyInventory.component_effects.pop(tmpComponent[i][1].item_type, None)
not_full = MyInventory.addComponent(tmpComponent[i][1])
if not not_full:
MyInventory.api.set_active(tmpComponent[i][1], 'False')
MyInventory.my_inventory.addItem(tmpComponent[i][1])
else:
MyInventory.api.set_active(tmpComponent[i][1], 'True')
equippedItems.append(equipped)
for i in addedEffects:
MyInventory.api.use_item(i)
for i in equippedItems:
MyInventory.api.use_item(i)
def time_up(item_type):
for i in range(len(MyInventory.software_items)):
if item_type == MyInvenetory.software_items[i][1].item_type:
MyInventory.removeSoftware(i)
class Inventory():
def __init__(self, inventory_plugin_frame):
self.frame = inventory_plugin_frame
self.canvas_color = '#EBEDF1'
self.items = [] # store all of the player's Item objects here
self.item_tips = []
self.selceted_item_id = None
self.selected_item_ref = None
self.item_set = False
def initInventory(self, char_items):
row, col = 0, 0
for item in char_items:
# item does not need to be added to the inventory since it's active
if item.active == 'True': # item.active is a string
item_type = item.component
if item_type == 'hardware':
MyInventory.addHardware(item)
if item.item_type == 'laptop':
MyInventory.max_software = 1
MyInventory.max_components = 3
MyInventory.resetInventory()
if item.item_type == 'desktop':
MyInventory.max_software = 2
MyInventory.max_components = 3
MyInventory.resetInventory()
if item.item_type == 'terminal':
MyInventory.max_software = 3
MyInventory.max_components = 5
MyInventory.resetInventory()
if item.item_type == 'server':
MyInventory.max_software = 3
MyInventory.max_components = 4
MyInventory.resetInventory()
if item.item_type == 'desk':
MyInventory.max_software = 1
MyInventory.max_components = 2
MyInventory.resetInventory()
elif item_type == 'software':
MyInventory.addSoftware(item)
MyInventory.api.use_item(item)
elif item_type == 'component':
MyInventory.addComponent(item)
MyInventory.api.use_item(item)
else:
self.items.append([SetInventoryItem(item.name, item.image, item.description, item.effect,
item.uses, item.value, item.item_type, item.active,
row, col, item, item.component), None])
col += 1
if col == 5: # max of 5 items in a row
col = 0
row += 1
self.makeLayout()
self.createItems()
def createItems(self):
# loops through the list of items that need to be created and shown in the shop
for i in range(len(self.items)):
self.makeItemFrame(i)
def addItem(self, item):
if len(self.items) == 0:
self.items.append([SetInventoryItem(item.name, item.image, item.description, item.effect,
item.uses, item.value, item.item_type, item.active,
0, 0, item, item.component), None])
self.makeItemFrame(0)
else:
# this function is called when a new item is purchased
col = self.items[len(self.items)-1][0].getColumn() + 1
row = self.items[len(self.items)-1][0].getRow()
if col == 5: # max of 5 items in a row
col = 0
row += 1
self.items.append([SetInventoryItem(item.name, item.image, item.description, item.effect,
item.uses, item.value, item.item_type, item.active,
row, col, item, item.component), None])
self.makeItemFrame(len(self.items)-1)
self.reposition()
def makeItemFrame(self, i):
# prep appropriate data for when the user clicks on a item
t_name = self.items[i][0].getName()
t_descript = self.items[i][0].getDescription()
t_cost = self.items[i][0].getValue()
t_type = self.items[i][0].getComponent()
# create frame border for item
border_frame = Frame(self.inventoryFrame, padding=2, cursor='hand2', style='c.TFrame')
border_frame.grid(row=self.items[i][0].getRow(), column=self.items[i][0].getColumn(),
padx=7, pady=7, sticky='wn')
self.style.configure('c.TFrame', background=self.canvas_color)
# Important step. This refer ence is used to visually delete the item from the inventory
self.items[i][1] = border_frame
# user clicks on the frame border
border_frame.bind('<1>', lambda e, item_id=i, item_ref=self.items[i][0].getItemRef(),
name=t_name, descript=t_descript, cost=t_cost, i_type=t_type :
self.selectItem(item_id, item_ref, name, descript, self.items[i][0].getUses(), cost, i_type))
self.item_tips.append(ToolTip(border_frame, t_name + ': ' + t_descript))
# create frame for item data
item_frame = Frame(border_frame, style='itemFrame.TFrame', padding=9, cursor='hand2')
item_frame.grid(sticky='wn')
self.style.configure('itemFrame.TFrame', background=self.canvas_color)
# user clicks on the frame containing all of the item's info
item_frame.bind('<1>', lambda e, item_id=i, item_ref=self.items[i][0].getItemRef(),
name=t_name, descript=t_descript, cost=t_cost, i_type=t_type :
self.selectItem(item_id, item_ref, name, descript, self.items[i][0].getUses(), cost, i_type))
item_frame.bind('<Enter>', lambda e, _i=i: self.highlight(_i, True))
item_frame.bind('<Leave>', lambda e, _i=i: self.highlight(_i, False))
# create label to hold the item's image
img = PhotoImage(file=self.items[i][0].getFile())
new_item = Label(item_frame, padding=10, image=img, style='item.TLabel', cursor='hand2')
new_item.grid(row=1, column=0)
new_item.image = img
self.style.configure('item.TLabel', background=self.canvas_color)
# user clicks on the image of the item
new_item.bind('<1>', lambda e, item_id=i, item_ref=self.items[i][0].getItemRef(),
name=t_name, descript=t_descript, cost=t_cost, i_type=t_type :
self.selectItem(item_id, item_ref, name, descript, self.items[i][0].getUses(), cost, i_type))
def remove_selected_item(self):
"""
remove the item from the panel on the
right side of the inventory
"""
self.selectedItem.image = None
self.selectedItem.destroy()
self.selectedItemFrame.destroy()
def remove_item_from_inventory(self, item_id):
"""
remove the selected item from the inventory
on both sell and use commands
"""
self.remove_selected_item()
self.items[item_id][1].destroy()
self.items[item_id][1] = None
self.reposition()
self.item_tips[item_id] = None
self.item_set = False
def selectItem(self, item_id, item, name, descript, uses, cost, item_type):
"""
called when the user clicks on an item in the
inventory panel. The item is prepped for the
item manager panel on the right side of the GUI
"""
# set item info in the item manager
self.setItemInfo(name, descript, uses, cost, item_type)
# currently selected item is reset when the user selects a new item
if self.item_set:
self.remove_selected_item()
# make sure the item has not been deleted or used
if self.items[item_id][1] != None:
# selected item info goes in this frame
self.selectedItemFrame = Frame(self.item_info_frame, borderwidth=0, style='select.TFrame')
self.selectedItemFrame.grid(row=2, columnspan=2, sticky='news')
self.selectedItemFrame.grid_columnconfigure(1, weight=1)
self.style.configure('select.TFrame', background='#B4B4B4')
# selected item stored here
self.selectedItem = Label(self.selectedItemFrame, padding='10 5 5 5')
self.selectedItem.grid(row=0, columnspan=2, sticky='news')
self.selectedItem.grid_columnconfigure(1, weight=1)
img = PhotoImage(file=self.items[item_id][0].getFile())
cost = int(float(self.items[item_id][0].getValue())*0.75)
self.selectedItem.configure(text=' Sell for: $' + str(cost), image=img,
compound = 'left', font='arial 12 bold italic',
foreground='black', background='#FCE081', padding='10 5 5 5')
self.selectedItem.image = img
self.selceted_item_id = item_id
self.selected_item_ref = item
self.item_set = True
def sellItem(self, item_id, item):
if item_id != None:
if self.items[item_id][1] != None:
MyInventory.api.sell_item(item)
self.remove_item_from_inventory(item_id)
self.selceted_item_id = None
self.selected_item_ref = None
self.resetItemInfo()
def useItem(self, item_id, item):
if item_id != None:
if self.items[item_id][1] != None:
not_full = True
item_type = item.component
if item_type == 'food' or item_type == 'misc':
new_uses = int(self.items[item_id][0].getUses()) - 1
self.items[item_id][0].setUses(str(new_uses))
item.active = 'False'
MyInventory.api.use_item(item)
self.remove_item_from_inventory(item_id)
self.selceted_item_id = None
self.selected_item_ref = None
self.resetItemInfo()
else:
if item_type == 'software':
item.active = 'True'
if item.item_type != 'smokescreen':
not_full = MyInventory.addSoftware(item)
elif item_type == 'hardware':
item.active = 'True'
not_full = MyInventory.addHardware(item)
elif item_type == 'component':
item.active = 'True'
not_full = MyInventory.addComponent(item)
# item slot not full, so add the item to the active items area of the GUI
if not_full:
if item.component == 'hardware':
if item.item_type == 'laptop':
MyInventory.max_software = 1
MyInventory.max_components = 3
MyInventory.resetInventory()
if item.item_type == 'desktop':
MyInventory.max_software = 2
MyInventory.max_components = 3
MyInventory.resetInventory()
if item.item_type == 'terminal':
MyInventory.max_software = 3
MyInventory.max_components = 5
MyInventory.resetInventory()
if item.item_type == 'server':
MyInventory.max_software = 3
MyInventory.max_components = 4
MyInventory.resetInventory()
if item.item_type == 'desk':
MyInventory.max_software = 1
MyInventory.max_components = 2
MyInventory.resetInventory()
MyInventory.api.use_item(item)
self.remove_item_from_inventory(item_id)
self.selceted_item_id = None
self.selected_item_ref = None
self.resetItemInfo()
else:
messagebox.showinfo('Active Space Full', 'Active ' + item_type + ' area full.')
item.active = 'False'
def resetItemInfo(self):
self.nameSelect.configure(text='')
self.descriptSelect.configure(text='')
self.costSelect.configure(text='')
self.typeSelect.configure(text='')
def setItemInfo(self, name, descript, uses, cost, item_type):
"""
setItemInfo sets the info to be shown in the item
information frame, right above the buy button
"""
name = "Item name: " + name
self.nameSelect.configure(text=name)
cost = "Purchase price: $" + str(cost)
self.costSelect.configure(text=cost)
item_type = "Item type: " + item_type
self.typeSelect.configure(text=item_type)
descript = "Item effect: " + descript
self.descriptSelect.configure(text=descript)
def reposition(self):
"""
repositions all items in the inventory once a item is removed or added
"""
_row, _col = 0, 0
for i in range(len(self.items)):
if self.items[i][1] != None:
self.items[i][0].setRow(_row)
self.items[i][0].setColumn(_col)
self.items[i][1].grid(row=_row, column=_col)
_col += 1
if _col == 5:
_col = 0
_row += 1
def highlight(self, i, enter):
# this loop is needed to prevent all item border frames from becoming highlighted
for j in range(len(self.items)):
if self.items[j][1] != None:
self.items[j][1].config(style='a.TFrame')
self.style.configure('a.TFrame', background=self.canvas_color)
if self.items[i][1] != None:
self.items[i][1].config(style='b.TFrame')
if enter: # mouse enter
self.style.configure('b.TFrame', background='#26507D')
else: # mouse leave
self.style.configure('b.TFrame', background=self.canvas_color)
def makeCanvas(self):
# create a canvas to allow for scrolling of the shop Frame
self.inv_canvas = Canvas(self.frame, highlightthickness=0, borderwidth=0, background=self.canvas_color)
self.inv_canvas.grid(row=1, column=0, sticky='news')
# create the inventory frame and place it inside the canvas
self.inventoryFrame = Frame(self.inv_canvas, borderwidth=0,
style='inventoryFrame.TFrame', padding=10)
self.inventoryFrame.grid(sticky='news')
self.scrollBarY = Scrollbar(self.frame, orient="vertical", command=self.inv_canvas.yview)
self.scrollBarX = Scrollbar(self.frame, orient="horizontal", command=self.inv_canvas.xview)
self.inv_canvas.configure(yscrollcommand=self.scrollBarY.set,
xscrollcommand=self.scrollBarX.set)
self.frame.grid_columnconfigure(0, weight=1)
self.frame.grid_rowconfigure(1, weight=1)
self.inv_canvas.create_window((0,0), window=self.inventoryFrame,
anchor='nw', tags='self.inventoryFrame')
self.scrollBarY.grid(row=1, column=1, rowspan=2, sticky='ns')
self.scrollBarX.grid(row=3, column=0, columnspan=1, sticky='we')
def setupCanvasFrame(event):
# resets the scroll region for the frame inserted into the canvas
self.inv_canvas.configure(scrollregion=self.inv_canvas.bbox("all"))
self.inventoryFrame.bind("<Configure>", setupCanvasFrame)
self.inv_canvas.bind("<Enter>", lambda e: self.setScrolling())
self.style = Style()
self.style.configure('inventoryFrame.TFrame', background=self.canvas_color)
def scrollMouse(self, event):
""" allows for mouse wheel scrolling """
try:
self.inv_canvas.yview_scroll(-1 * int(event.delta/120), "units")
except:
pass
def setScrolling(self):
self.inv_canvas.bind_all("<MouseWheel>", lambda e: self.scrollMouse(e))
def makeLayout(self):
# Inventory title
self.invTitle = Label(self.frame, text='My Inventory', padding='10 5 5 5')
self.invTitle.grid(sticky='news', row=0, columnspan=3)
self.invTitle.configure(font='arial 14 bold', foreground='#FFD237', background='#1E1E1E')
self.makeCanvas()
self.item_info_frame = Frame(self.frame, borderwidth=0)
self.item_info_frame.grid(row=1, column=2, rowspan=3, sticky='news')
self.item_info_frame.grid_columnconfigure(2, weight=1)
self.item_info_frame.grid_rowconfigure(3, weight=1)
# inventory panel title
self.panelTitle = Label(self.item_info_frame, text='Item Manager',
anchor=CENTER, width=32, padding='10 5 5 5')
self.panelTitle.grid(sticky='news', row=0, columnspan=2)
self.panelTitle.configure(font='arial 16 bold italic', foreground='#F5F5F5', background='#282828')
# sell item button
self.sellButton = Label(self.item_info_frame, text='sell', padding='10 5 5 5', cursor='hand2')
self.sellButton.grid(sticky='news', row=1, column=0)
self.sellButton.configure(font='arial 14 bold', anchor=CENTER,
foreground='white', background='#3E710F')
self.sellButton.bind('<1>', lambda e: self.sellItem(self.selceted_item_id, self.selected_item_ref))
self.sellButton.bind('<Enter>', lambda e: self.sellButton.configure(background='#5CA916'))
self.sellButton.bind('<Leave>', lambda e: self.sellButton.configure(background='#3E710F'))
# use item button
self.useButton = Label(self.item_info_frame, text='use', padding='10 5 5 5', cursor='hand2')
self.useButton.grid(sticky='news', row=1, column=1)
self.useButton.configure(font='arial 14 bold', anchor=CENTER,
foreground='white', background='#0F5A71')
self.useButton.bind('<1>', lambda e: self.useItem(self.selceted_item_id, self.selected_item_ref))
self.useButton.bind('<Enter>', lambda e: self.useButton.configure(background='#1795B9'))
self.useButton.bind('<Leave>', lambda e: self.useButton.configure(background='#0F5A71'))
panel_color_bg = '#0F0F0F'
item_info_div = Frame(self.item_info_frame, borderwidth=0, style='Info.TFrame')
item_info_div.grid(row=3, columnspan=2, sticky='news')
self.style.configure('Info.TFrame', background=panel_color_bg)
# item's name
self.nameSelect = Label(item_info_div, text='', width=30, padding='10 10 5 5')
self.nameSelect.grid(sticky='news', row=0, columnspan=2)
self.nameSelect.configure(font='arial 12 bold', foreground='#48D220', background=panel_color_bg)
# selected item's type
self.typeSelect = Label(item_info_div, text='', padding='10 5 5 5')
self.typeSelect.grid(sticky='news', row=1, columnspan=2)
self.typeSelect.configure(font='arial 12 bold', foreground='#48D220', background=panel_color_bg)
# selected item's cost
self.costSelect = Label(item_info_div, text='', padding='10 5 5 5')
self.costSelect.grid(sticky='news', row=2, columnspan=2)
self.costSelect.configure(font='arial 12 bold', foreground='#48D220', background=panel_color_bg)
# selected item's description
self.descriptSelect = Label(item_info_div, text='', wraplength=370, padding='10 5 5 5')
self.descriptSelect.grid(sticky='news', row=3, columnspan=2)
self.descriptSelect.configure(font='arial 12 bold', foreground='#48D220', background=panel_color_bg)
self.descriptSelect.grid_columnconfigure(1, weight=1)
|
|
# Copyright (c) 2013 OpenStack Foundation
# All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
import contextlib
import mock
import testtools
import uuid
import webob
from neutron.common import constants
from neutron.common import exceptions as exc
from neutron.common import utils
from neutron import context
from neutron.extensions import multiprovidernet as mpnet
from neutron.extensions import portbindings
from neutron.extensions import providernet as pnet
from neutron import manager
from neutron.plugins.common import constants as service_constants
from neutron.plugins.ml2.common import exceptions as ml2_exc
from neutron.plugins.ml2 import config
from neutron.plugins.ml2 import db as ml2_db
from neutron.plugins.ml2 import driver_api
from neutron.plugins.ml2 import driver_context
from neutron.plugins.ml2.drivers import type_vlan
from neutron.plugins.ml2 import plugin as ml2_plugin
from neutron.tests.unit import _test_extension_portbindings as test_bindings
from neutron.tests.unit.ml2.drivers import mechanism_logger as mech_logger
from neutron.tests.unit.ml2.drivers import mechanism_test as mech_test
from neutron.tests.unit import test_db_plugin as test_plugin
from neutron.tests.unit import test_extension_allowedaddresspairs as test_pair
from neutron.tests.unit import test_extension_extradhcpopts as test_dhcpopts
from neutron.tests.unit import test_security_groups_rpc as test_sg_rpc
config.cfg.CONF.import_opt('network_vlan_ranges',
'neutron.plugins.ml2.drivers.type_vlan',
group='ml2_type_vlan')
PLUGIN_NAME = 'neutron.plugins.ml2.plugin.Ml2Plugin'
class Ml2PluginV2TestCase(test_plugin.NeutronDbPluginV2TestCase):
_plugin_name = PLUGIN_NAME
_mechanism_drivers = ['logger', 'test']
def setUp(self):
# We need a L3 service plugin
l3_plugin = ('neutron.tests.unit.test_l3_plugin.'
'TestL3NatServicePlugin')
service_plugins = {'l3_plugin_name': l3_plugin}
# Enable the test mechanism driver to ensure that
# we can successfully call through to all mechanism
# driver apis.
config.cfg.CONF.set_override('mechanism_drivers',
self._mechanism_drivers,
group='ml2')
self.physnet = 'physnet1'
self.vlan_range = '1:100'
self.vlan_range2 = '200:300'
self.physnet2 = 'physnet2'
self.phys_vrange = ':'.join([self.physnet, self.vlan_range])
self.phys2_vrange = ':'.join([self.physnet2, self.vlan_range2])
config.cfg.CONF.set_override('network_vlan_ranges',
[self.phys_vrange, self.phys2_vrange],
group='ml2_type_vlan')
super(Ml2PluginV2TestCase, self).setUp(PLUGIN_NAME,
service_plugins=service_plugins)
self.port_create_status = 'DOWN'
self.driver = ml2_plugin.Ml2Plugin()
self.context = context.get_admin_context()
class TestMl2BulkToggleWithBulkless(Ml2PluginV2TestCase):
_mechanism_drivers = ['logger', 'test', 'bulkless']
def test_bulk_disable_with_bulkless_driver(self):
self.assertTrue(self._skip_native_bulk)
class TestMl2BulkToggleWithoutBulkless(Ml2PluginV2TestCase):
_mechanism_drivers = ['logger', 'test']
def test_bulk_enabled_with_bulk_drivers(self):
self.assertFalse(self._skip_native_bulk)
class TestMl2BasicGet(test_plugin.TestBasicGet,
Ml2PluginV2TestCase):
pass
class TestMl2V2HTTPResponse(test_plugin.TestV2HTTPResponse,
Ml2PluginV2TestCase):
pass
class TestMl2NetworksV2(test_plugin.TestNetworksV2,
Ml2PluginV2TestCase):
pass
class TestMl2SubnetsV2(test_plugin.TestSubnetsV2,
Ml2PluginV2TestCase):
pass
class TestMl2PortsV2(test_plugin.TestPortsV2, Ml2PluginV2TestCase):
def test_update_port_status_build(self):
with self.port() as port:
self.assertEqual('DOWN', port['port']['status'])
self.assertEqual('DOWN', self.port_create_status)
def test_update_non_existent_port(self):
ctx = context.get_admin_context()
plugin = manager.NeutronManager.get_plugin()
data = {'port': {'admin_state_up': False}}
self.assertRaises(exc.PortNotFound, plugin.update_port, ctx,
'invalid-uuid', data)
def test_delete_non_existent_port(self):
ctx = context.get_admin_context()
plugin = manager.NeutronManager.get_plugin()
with mock.patch.object(ml2_plugin.LOG, 'debug') as log_debug:
plugin.delete_port(ctx, 'invalid-uuid', l3_port_check=False)
log_debug.assert_has_calls([
mock.call(_("Deleting port %s"), 'invalid-uuid'),
mock.call(_("The port '%s' was deleted"), 'invalid-uuid')
])
def test_delete_port_no_notify_in_disassociate_floatingips(self):
ctx = context.get_admin_context()
plugin = manager.NeutronManager.get_plugin()
l3plugin = manager.NeutronManager.get_service_plugins().get(
service_constants.L3_ROUTER_NAT)
with contextlib.nested(
self.port(do_delete=False),
mock.patch.object(l3plugin, 'disassociate_floatingips'),
mock.patch.object(l3plugin, 'notify_routers_updated')
) as (port, disassociate_floatingips, notify):
port_id = port['port']['id']
plugin.delete_port(ctx, port_id)
# check that no notification was requested while under
# transaction
disassociate_floatingips.assert_has_calls([
mock.call(ctx, port_id, do_notify=False)
])
# check that notifier was still triggered
notify.assert_has_calls([
mock.call(ctx, disassociate_floatingips.return_value)
])
def test_check_if_compute_port_serviced_by_dvr(self):
self.assertTrue(utils.is_dvr_serviced('compute:None'))
def test_check_if_lbaas_vip_port_serviced_by_dvr(self):
self.assertTrue(utils.is_dvr_serviced(
constants.DEVICE_OWNER_LOADBALANCER))
def test_check_if_port_not_serviced_by_dvr(self):
self.assertFalse(utils.is_dvr_serviced(
constants.DEVICE_OWNER_ROUTER_INTF))
def test_disassociate_floatingips_do_notify_returns_nothing(self):
ctx = context.get_admin_context()
l3plugin = manager.NeutronManager.get_service_plugins().get(
service_constants.L3_ROUTER_NAT)
with self.port() as port:
port_id = port['port']['id']
# check that nothing is returned when notifications are handled
# by the called method
self.assertIsNone(l3plugin.disassociate_floatingips(ctx, port_id))
class TestMl2DvrPortsV2(TestMl2PortsV2):
def setUp(self):
super(TestMl2DvrPortsV2, self).setUp()
extensions = ['router',
constants.L3_AGENT_SCHEDULER_EXT_ALIAS,
constants.L3_DISTRIBUTED_EXT_ALIAS]
self.plugin = manager.NeutronManager.get_plugin()
self.l3plugin = mock.Mock()
type(self.l3plugin).supported_extension_aliases = (
mock.PropertyMock(return_value=extensions))
self.service_plugins = {'L3_ROUTER_NAT': self.l3plugin}
def test_delete_last_vm_port(self):
fip_set = set()
ns_to_delete = {'host': 'vmhost', 'agent_id': 'vm_l3_agent',
'router_id': 'my_router'}
with contextlib.nested(
mock.patch.object(manager.NeutronManager,
'get_service_plugins',
return_value=self.service_plugins),
self.port(do_delete=False, device_owner='compute:None'),
mock.patch.object(self.l3plugin, 'notify_routers_updated'),
mock.patch.object(self.l3plugin, 'disassociate_floatingips',
return_value=fip_set),
mock.patch.object(self.l3plugin, 'dvr_deletens_if_no_vm',
return_value=[ns_to_delete]),
mock.patch.object(self.l3plugin, 'remove_router_from_l3_agent')
) as (get_service_plugin, port, notify, disassociate_floatingips,
ddinv, remove_router_from_l3_agent):
port_id = port['port']['id']
self.plugin.delete_port(self.context, port_id)
notify.assert_has_calls([mock.call(self.context, fip_set)])
remove_router_from_l3_agent.assert_has_calls([
mock.call(self.context, ns_to_delete['agent_id'],
ns_to_delete['router_id'])
])
def test_delete_last_vm_port_with_floatingip(self):
ns_to_delete = {'host': 'vmhost', 'agent_id': 'vm_l3_agent',
'router_id': 'my_router'}
fip_set = set([ns_to_delete['router_id']])
with contextlib.nested(
mock.patch.object(manager.NeutronManager,
'get_service_plugins',
return_value=self.service_plugins),
self.port(do_delete=False, device_owner='compute:None'),
mock.patch.object(self.l3plugin, 'notify_routers_updated'),
mock.patch.object(self.l3plugin, 'disassociate_floatingips',
return_value=fip_set),
mock.patch.object(self.l3plugin, 'dvr_deletens_if_no_vm',
return_value=[ns_to_delete]),
mock.patch.object(self.l3plugin, 'remove_router_from_l3_agent')
) as (get_service_plugins, port, notify, disassociate_floatingips,
ddinv, remove_router_from_l3_agent):
port_id = port['port']['id']
self.plugin.delete_port(self.context, port_id)
notify.assert_has_calls([mock.call(self.context, fip_set)])
remove_router_from_l3_agent.assert_has_calls([
mock.call(self.context, ns_to_delete['agent_id'],
ns_to_delete['router_id'])
])
class TestMl2PortBinding(Ml2PluginV2TestCase,
test_bindings.PortBindingsTestCase):
# Test case does not set binding:host_id, so ml2 does not attempt
# to bind port
VIF_TYPE = portbindings.VIF_TYPE_UNBOUND
HAS_PORT_FILTER = False
ENABLE_SG = True
FIREWALL_DRIVER = test_sg_rpc.FIREWALL_HYBRID_DRIVER
def setUp(self, firewall_driver=None):
test_sg_rpc.set_firewall_driver(self.FIREWALL_DRIVER)
config.cfg.CONF.set_override(
'enable_security_group', self.ENABLE_SG,
group='SECURITYGROUP')
super(TestMl2PortBinding, self).setUp()
def _check_port_binding_profile(self, port, profile=None):
self.assertIn('id', port)
self.assertIn(portbindings.PROFILE, port)
value = port[portbindings.PROFILE]
self.assertEqual(profile or {}, value)
def test_create_port_binding_profile(self):
self._test_create_port_binding_profile({'a': 1, 'b': 2})
def test_update_port_binding_profile(self):
self._test_update_port_binding_profile({'c': 3})
def test_create_port_binding_profile_too_big(self):
s = 'x' * 5000
profile_arg = {portbindings.PROFILE: {'d': s}}
try:
with self.port(expected_res_status=400,
arg_list=(portbindings.PROFILE,),
**profile_arg):
pass
except webob.exc.HTTPClientError:
pass
def test_remove_port_binding_profile(self):
profile = {'e': 5}
profile_arg = {portbindings.PROFILE: profile}
with self.port(arg_list=(portbindings.PROFILE,),
**profile_arg) as port:
self._check_port_binding_profile(port['port'], profile)
port_id = port['port']['id']
profile_arg = {portbindings.PROFILE: None}
port = self._update('ports', port_id,
{'port': profile_arg})['port']
self._check_port_binding_profile(port)
port = self._show('ports', port_id)['port']
self._check_port_binding_profile(port)
def test_return_on_concurrent_delete_and_binding(self):
# create a port and delete it so we have an expired mechanism context
with self.port() as port:
plugin = manager.NeutronManager.get_plugin()
binding = ml2_db.get_locked_port_and_binding(self.context.session,
port['port']['id'])[1]
binding['host'] = 'test'
mech_context = driver_context.PortContext(
plugin, self.context, port['port'],
plugin.get_network(self.context, port['port']['network_id']),
binding)
with contextlib.nested(
mock.patch('neutron.plugins.ml2.plugin.'
'db.get_locked_port_and_binding',
return_value=(None, None)),
mock.patch('neutron.plugins.ml2.plugin.Ml2Plugin._make_port_dict')
) as (glpab_mock, mpd_mock):
plugin._bind_port_if_needed(mech_context)
# called during deletion to get port
self.assertTrue(glpab_mock.mock_calls)
# should have returned before calling _make_port_dict
self.assertFalse(mpd_mock.mock_calls)
def test_port_binding_profile_not_changed(self):
profile = {'e': 5}
profile_arg = {portbindings.PROFILE: profile}
with self.port(arg_list=(portbindings.PROFILE,),
**profile_arg) as port:
self._check_port_binding_profile(port['port'], profile)
port_id = port['port']['id']
state_arg = {'admin_state_up': True}
port = self._update('ports', port_id,
{'port': state_arg})['port']
self._check_port_binding_profile(port, profile)
port = self._show('ports', port_id)['port']
self._check_port_binding_profile(port, profile)
class TestMl2PortBindingNoSG(TestMl2PortBinding):
HAS_PORT_FILTER = False
ENABLE_SG = False
FIREWALL_DRIVER = test_sg_rpc.FIREWALL_NOOP_DRIVER
class TestMl2PortBindingHost(Ml2PluginV2TestCase,
test_bindings.PortBindingsHostTestCaseMixin):
pass
class TestMl2PortBindingVnicType(Ml2PluginV2TestCase,
test_bindings.PortBindingsVnicTestCaseMixin):
pass
class TestMultiSegmentNetworks(Ml2PluginV2TestCase):
def setUp(self, plugin=None):
super(TestMultiSegmentNetworks, self).setUp()
def test_allocate_dynamic_segment(self):
data = {'network': {'name': 'net1',
'tenant_id': 'tenant_one'}}
network_req = self.new_create_request('networks', data)
network = self.deserialize(self.fmt,
network_req.get_response(self.api))
segment = {driver_api.NETWORK_TYPE: 'vlan',
driver_api.PHYSICAL_NETWORK: 'physnet1'}
network_id = network['network']['id']
self.driver.type_manager.allocate_dynamic_segment(
self.context.session, network_id, segment)
dynamic_segment = ml2_db.get_dynamic_segment(self.context.session,
network_id,
'physnet1')
self.assertEqual('vlan', dynamic_segment[driver_api.NETWORK_TYPE])
self.assertEqual('physnet1',
dynamic_segment[driver_api.PHYSICAL_NETWORK])
self.assertTrue(dynamic_segment[driver_api.SEGMENTATION_ID] > 0)
segment2 = {driver_api.NETWORK_TYPE: 'vlan',
driver_api.SEGMENTATION_ID: 1234,
driver_api.PHYSICAL_NETWORK: 'physnet3'}
self.driver.type_manager.allocate_dynamic_segment(
self.context.session, network_id, segment2)
dynamic_segment = ml2_db.get_dynamic_segment(self.context.session,
network_id,
segmentation_id='1234')
self.assertEqual('vlan', dynamic_segment[driver_api.NETWORK_TYPE])
self.assertEqual('physnet3',
dynamic_segment[driver_api.PHYSICAL_NETWORK])
self.assertEqual(dynamic_segment[driver_api.SEGMENTATION_ID], 1234)
def test_allocate_dynamic_segment_multiple_physnets(self):
data = {'network': {'name': 'net1',
'tenant_id': 'tenant_one'}}
network_req = self.new_create_request('networks', data)
network = self.deserialize(self.fmt,
network_req.get_response(self.api))
segment = {driver_api.NETWORK_TYPE: 'vlan',
driver_api.PHYSICAL_NETWORK: 'physnet1'}
network_id = network['network']['id']
self.driver.type_manager.allocate_dynamic_segment(
self.context.session, network_id, segment)
dynamic_segment = ml2_db.get_dynamic_segment(self.context.session,
network_id,
'physnet1')
self.assertEqual('vlan', dynamic_segment[driver_api.NETWORK_TYPE])
self.assertEqual('physnet1',
dynamic_segment[driver_api.PHYSICAL_NETWORK])
dynamic_segmentation_id = dynamic_segment[driver_api.SEGMENTATION_ID]
self.assertTrue(dynamic_segmentation_id > 0)
dynamic_segment1 = ml2_db.get_dynamic_segment(self.context.session,
network_id,
'physnet1')
dynamic_segment1_id = dynamic_segment1[driver_api.SEGMENTATION_ID]
self.assertEqual(dynamic_segmentation_id, dynamic_segment1_id)
segment2 = {driver_api.NETWORK_TYPE: 'vlan',
driver_api.PHYSICAL_NETWORK: 'physnet2'}
self.driver.type_manager.allocate_dynamic_segment(
self.context.session, network_id, segment2)
dynamic_segment2 = ml2_db.get_dynamic_segment(self.context.session,
network_id,
'physnet2')
dynamic_segmentation2_id = dynamic_segment2[driver_api.SEGMENTATION_ID]
self.assertNotEqual(dynamic_segmentation_id, dynamic_segmentation2_id)
def test_allocate_release_dynamic_segment(self):
data = {'network': {'name': 'net1',
'tenant_id': 'tenant_one'}}
network_req = self.new_create_request('networks', data)
network = self.deserialize(self.fmt,
network_req.get_response(self.api))
segment = {driver_api.NETWORK_TYPE: 'vlan',
driver_api.PHYSICAL_NETWORK: 'physnet1'}
network_id = network['network']['id']
self.driver.type_manager.allocate_dynamic_segment(
self.context.session, network_id, segment)
dynamic_segment = ml2_db.get_dynamic_segment(self.context.session,
network_id,
'physnet1')
self.assertEqual('vlan', dynamic_segment[driver_api.NETWORK_TYPE])
self.assertEqual('physnet1',
dynamic_segment[driver_api.PHYSICAL_NETWORK])
dynamic_segmentation_id = dynamic_segment[driver_api.SEGMENTATION_ID]
self.assertTrue(dynamic_segmentation_id > 0)
self.driver.type_manager.release_dynamic_segment(
self.context.session, dynamic_segment[driver_api.ID])
self.assertIsNone(ml2_db.get_dynamic_segment(
self.context.session, network_id, 'physnet1'))
def test_create_network_provider(self):
data = {'network': {'name': 'net1',
pnet.NETWORK_TYPE: 'vlan',
pnet.PHYSICAL_NETWORK: 'physnet1',
pnet.SEGMENTATION_ID: 1,
'tenant_id': 'tenant_one'}}
network_req = self.new_create_request('networks', data)
network = self.deserialize(self.fmt,
network_req.get_response(self.api))
self.assertEqual('vlan', network['network'][pnet.NETWORK_TYPE])
self.assertEqual('physnet1', network['network'][pnet.PHYSICAL_NETWORK])
self.assertEqual(1, network['network'][pnet.SEGMENTATION_ID])
self.assertNotIn(mpnet.SEGMENTS, network['network'])
def test_create_network_single_multiprovider(self):
data = {'network': {'name': 'net1',
mpnet.SEGMENTS:
[{pnet.NETWORK_TYPE: 'vlan',
pnet.PHYSICAL_NETWORK: 'physnet1',
pnet.SEGMENTATION_ID: 1}],
'tenant_id': 'tenant_one'}}
net_req = self.new_create_request('networks', data)
network = self.deserialize(self.fmt, net_req.get_response(self.api))
self.assertEqual('vlan', network['network'][pnet.NETWORK_TYPE])
self.assertEqual('physnet1', network['network'][pnet.PHYSICAL_NETWORK])
self.assertEqual(1, network['network'][pnet.SEGMENTATION_ID])
self.assertNotIn(mpnet.SEGMENTS, network['network'])
# Tests get_network()
net_req = self.new_show_request('networks', network['network']['id'])
network = self.deserialize(self.fmt, net_req.get_response(self.api))
self.assertEqual('vlan', network['network'][pnet.NETWORK_TYPE])
self.assertEqual('physnet1', network['network'][pnet.PHYSICAL_NETWORK])
self.assertEqual(1, network['network'][pnet.SEGMENTATION_ID])
self.assertNotIn(mpnet.SEGMENTS, network['network'])
def test_create_network_multiprovider(self):
data = {'network': {'name': 'net1',
mpnet.SEGMENTS:
[{pnet.NETWORK_TYPE: 'vlan',
pnet.PHYSICAL_NETWORK: 'physnet1',
pnet.SEGMENTATION_ID: 1},
{pnet.NETWORK_TYPE: 'vlan',
pnet.PHYSICAL_NETWORK: 'physnet1',
pnet.SEGMENTATION_ID: 2}],
'tenant_id': 'tenant_one'}}
network_req = self.new_create_request('networks', data)
network = self.deserialize(self.fmt,
network_req.get_response(self.api))
tz = network['network'][mpnet.SEGMENTS]
for tz in data['network'][mpnet.SEGMENTS]:
for field in [pnet.NETWORK_TYPE, pnet.PHYSICAL_NETWORK,
pnet.SEGMENTATION_ID]:
self.assertEqual(tz.get(field), tz.get(field))
# Tests get_network()
net_req = self.new_show_request('networks', network['network']['id'])
network = self.deserialize(self.fmt, net_req.get_response(self.api))
tz = network['network'][mpnet.SEGMENTS]
for tz in data['network'][mpnet.SEGMENTS]:
for field in [pnet.NETWORK_TYPE, pnet.PHYSICAL_NETWORK,
pnet.SEGMENTATION_ID]:
self.assertEqual(tz.get(field), tz.get(field))
def test_create_network_with_provider_and_multiprovider_fail(self):
data = {'network': {'name': 'net1',
mpnet.SEGMENTS:
[{pnet.NETWORK_TYPE: 'vlan',
pnet.PHYSICAL_NETWORK: 'physnet1',
pnet.SEGMENTATION_ID: 1}],
pnet.NETWORK_TYPE: 'vlan',
pnet.PHYSICAL_NETWORK: 'physnet1',
pnet.SEGMENTATION_ID: 1,
'tenant_id': 'tenant_one'}}
network_req = self.new_create_request('networks', data)
res = network_req.get_response(self.api)
self.assertEqual(400, res.status_int)
def test_create_network_duplicate_full_segments(self):
data = {'network': {'name': 'net1',
mpnet.SEGMENTS:
[{pnet.NETWORK_TYPE: 'vlan',
pnet.PHYSICAL_NETWORK: 'physnet1',
pnet.SEGMENTATION_ID: 1},
{pnet.NETWORK_TYPE: 'vlan',
pnet.PHYSICAL_NETWORK: 'physnet1',
pnet.SEGMENTATION_ID: 1}],
'tenant_id': 'tenant_one'}}
network_req = self.new_create_request('networks', data)
res = network_req.get_response(self.api)
self.assertEqual(400, res.status_int)
def test_create_network_duplicate_partial_segments(self):
data = {'network': {'name': 'net1',
mpnet.SEGMENTS:
[{pnet.NETWORK_TYPE: 'vlan',
pnet.PHYSICAL_NETWORK: 'physnet1'},
{pnet.NETWORK_TYPE: 'vlan',
pnet.PHYSICAL_NETWORK: 'physnet1'}],
'tenant_id': 'tenant_one'}}
network_req = self.new_create_request('networks', data)
res = network_req.get_response(self.api)
self.assertEqual(201, res.status_int)
def test_release_network_segments(self):
data = {'network': {'name': 'net1',
'admin_state_up': True,
'shared': False,
pnet.NETWORK_TYPE: 'vlan',
pnet.PHYSICAL_NETWORK: 'physnet1',
pnet.SEGMENTATION_ID: 1,
'tenant_id': 'tenant_one'}}
network_req = self.new_create_request('networks', data)
res = network_req.get_response(self.api)
network = self.deserialize(self.fmt, res)
network_id = network['network']['id']
segment = {driver_api.NETWORK_TYPE: 'vlan',
driver_api.PHYSICAL_NETWORK: 'physnet2'}
self.driver.type_manager.allocate_dynamic_segment(
self.context.session, network_id, segment)
dynamic_segment = ml2_db.get_dynamic_segment(self.context.session,
network_id,
'physnet2')
self.assertEqual('vlan', dynamic_segment[driver_api.NETWORK_TYPE])
self.assertEqual('physnet2',
dynamic_segment[driver_api.PHYSICAL_NETWORK])
self.assertTrue(dynamic_segment[driver_api.SEGMENTATION_ID] > 0)
with mock.patch.object(type_vlan.VlanTypeDriver,
'release_segment') as rs:
req = self.new_delete_request('networks', network_id)
res = req.get_response(self.api)
self.assertEqual(2, rs.call_count)
self.assertEqual(ml2_db.get_network_segments(
self.context.session, network_id), [])
self.assertIsNone(ml2_db.get_dynamic_segment(
self.context.session, network_id, 'physnet2'))
def test_release_segment_no_type_driver(self):
data = {'network': {'name': 'net1',
'admin_state_up': True,
'shared': False,
pnet.NETWORK_TYPE: 'vlan',
pnet.PHYSICAL_NETWORK: 'physnet1',
pnet.SEGMENTATION_ID: 1,
'tenant_id': 'tenant_one'}}
network_req = self.new_create_request('networks', data)
res = network_req.get_response(self.api)
network = self.deserialize(self.fmt, res)
network_id = network['network']['id']
segment = {driver_api.NETWORK_TYPE: 'faketype',
driver_api.PHYSICAL_NETWORK: 'physnet1',
driver_api.ID: 1}
with mock.patch('neutron.plugins.ml2.managers.LOG') as log:
with mock.patch('neutron.plugins.ml2.managers.db') as db:
db.get_network_segments.return_value = (segment,)
self.driver.type_manager.release_network_segments(
self.context.session, network_id)
log.error.assert_called_once_with(
"Failed to release segment '%s' because "
"network type is not supported.", segment)
def test_create_provider_fail(self):
segment = {pnet.NETWORK_TYPE: None,
pnet.PHYSICAL_NETWORK: 'phys_net',
pnet.SEGMENTATION_ID: None}
with testtools.ExpectedException(exc.InvalidInput):
self.driver.type_manager._process_provider_create(segment)
def test_create_network_plugin(self):
data = {'network': {'name': 'net1',
'admin_state_up': True,
'shared': False,
pnet.NETWORK_TYPE: 'vlan',
pnet.PHYSICAL_NETWORK: 'physnet1',
pnet.SEGMENTATION_ID: 1,
'tenant_id': 'tenant_one'}}
def raise_mechanism_exc(*args, **kwargs):
raise ml2_exc.MechanismDriverError(
method='create_network_postcommit')
with mock.patch('neutron.plugins.ml2.managers.MechanismManager.'
'create_network_precommit', new=raise_mechanism_exc):
with testtools.ExpectedException(ml2_exc.MechanismDriverError):
self.driver.create_network(self.context, data)
def test_extend_dictionary_no_segments(self):
network = dict(name='net_no_segment', id='5', tenant_id='tenant_one')
self.driver.type_manager._extend_network_dict_provider(self.context,
network)
self.assertIsNone(network[pnet.NETWORK_TYPE])
self.assertIsNone(network[pnet.PHYSICAL_NETWORK])
self.assertIsNone(network[pnet.SEGMENTATION_ID])
class TestMl2AllowedAddressPairs(Ml2PluginV2TestCase,
test_pair.TestAllowedAddressPairs):
def setUp(self, plugin=None):
super(test_pair.TestAllowedAddressPairs, self).setUp(
plugin=PLUGIN_NAME)
class DHCPOptsTestCase(test_dhcpopts.TestExtraDhcpOpt):
def setUp(self, plugin=None):
super(test_dhcpopts.ExtraDhcpOptDBTestCase, self).setUp(
plugin=PLUGIN_NAME)
class Ml2PluginV2FaultyDriverTestCase(test_plugin.NeutronDbPluginV2TestCase):
def setUp(self):
# Enable the test mechanism driver to ensure that
# we can successfully call through to all mechanism
# driver apis.
config.cfg.CONF.set_override('mechanism_drivers',
['test', 'logger'],
group='ml2')
super(Ml2PluginV2FaultyDriverTestCase, self).setUp(PLUGIN_NAME)
self.port_create_status = 'DOWN'
class TestFaultyMechansimDriver(Ml2PluginV2FaultyDriverTestCase):
def test_create_network_faulty(self):
with mock.patch.object(mech_test.TestMechanismDriver,
'create_network_postcommit',
side_effect=ml2_exc.MechanismDriverError):
tenant_id = str(uuid.uuid4())
data = {'network': {'name': 'net1',
'tenant_id': tenant_id}}
req = self.new_create_request('networks', data)
res = req.get_response(self.api)
self.assertEqual(500, res.status_int)
error = self.deserialize(self.fmt, res)
self.assertEqual('MechanismDriverError',
error['NeutronError']['type'])
query_params = "tenant_id=%s" % tenant_id
nets = self._list('networks', query_params=query_params)
self.assertFalse(nets['networks'])
def test_delete_network_faulty(self):
with mock.patch.object(mech_test.TestMechanismDriver,
'delete_network_postcommit',
side_effect=ml2_exc.MechanismDriverError):
with mock.patch.object(mech_logger.LoggerMechanismDriver,
'delete_network_postcommit') as dnp:
data = {'network': {'name': 'net1',
'tenant_id': 'tenant_one'}}
network_req = self.new_create_request('networks', data)
network_res = network_req.get_response(self.api)
self.assertEqual(201, network_res.status_int)
network = self.deserialize(self.fmt, network_res)
net_id = network['network']['id']
req = self.new_delete_request('networks', net_id)
res = req.get_response(self.api)
self.assertEqual(204, res.status_int)
# Test if other mechanism driver was called
self.assertTrue(dnp.called)
self._show('networks', net_id,
expected_code=webob.exc.HTTPNotFound.code)
def test_update_network_faulty(self):
with mock.patch.object(mech_test.TestMechanismDriver,
'update_network_postcommit',
side_effect=ml2_exc.MechanismDriverError):
with mock.patch.object(mech_logger.LoggerMechanismDriver,
'update_network_postcommit') as unp:
data = {'network': {'name': 'net1',
'tenant_id': 'tenant_one'}}
network_req = self.new_create_request('networks', data)
network_res = network_req.get_response(self.api)
self.assertEqual(201, network_res.status_int)
network = self.deserialize(self.fmt, network_res)
net_id = network['network']['id']
new_name = 'a_brand_new_name'
data = {'network': {'name': new_name}}
req = self.new_update_request('networks', data, net_id)
res = req.get_response(self.api)
self.assertEqual(500, res.status_int)
error = self.deserialize(self.fmt, res)
self.assertEqual('MechanismDriverError',
error['NeutronError']['type'])
# Test if other mechanism driver was called
self.assertTrue(unp.called)
net = self._show('networks', net_id)
self.assertEqual(new_name, net['network']['name'])
self._delete('networks', net_id)
def test_create_subnet_faulty(self):
with mock.patch.object(mech_test.TestMechanismDriver,
'create_subnet_postcommit',
side_effect=ml2_exc.MechanismDriverError):
with self.network() as network:
net_id = network['network']['id']
data = {'subnet': {'network_id': net_id,
'cidr': '10.0.20.0/24',
'ip_version': '4',
'name': 'subnet1',
'tenant_id':
network['network']['tenant_id'],
'gateway_ip': '10.0.20.1'}}
req = self.new_create_request('subnets', data)
res = req.get_response(self.api)
self.assertEqual(500, res.status_int)
error = self.deserialize(self.fmt, res)
self.assertEqual('MechanismDriverError',
error['NeutronError']['type'])
query_params = "network_id=%s" % net_id
subnets = self._list('subnets', query_params=query_params)
self.assertFalse(subnets['subnets'])
def test_delete_subnet_faulty(self):
with mock.patch.object(mech_test.TestMechanismDriver,
'delete_subnet_postcommit',
side_effect=ml2_exc.MechanismDriverError):
with mock.patch.object(mech_logger.LoggerMechanismDriver,
'delete_subnet_postcommit') as dsp:
with self.network() as network:
data = {'subnet': {'network_id':
network['network']['id'],
'cidr': '10.0.20.0/24',
'ip_version': '4',
'name': 'subnet1',
'tenant_id':
network['network']['tenant_id'],
'gateway_ip': '10.0.20.1'}}
subnet_req = self.new_create_request('subnets', data)
subnet_res = subnet_req.get_response(self.api)
self.assertEqual(201, subnet_res.status_int)
subnet = self.deserialize(self.fmt, subnet_res)
subnet_id = subnet['subnet']['id']
req = self.new_delete_request('subnets', subnet_id)
res = req.get_response(self.api)
self.assertEqual(204, res.status_int)
# Test if other mechanism driver was called
self.assertTrue(dsp.called)
self._show('subnets', subnet_id,
expected_code=webob.exc.HTTPNotFound.code)
def test_update_subnet_faulty(self):
with mock.patch.object(mech_test.TestMechanismDriver,
'update_subnet_postcommit',
side_effect=ml2_exc.MechanismDriverError):
with mock.patch.object(mech_logger.LoggerMechanismDriver,
'update_subnet_postcommit') as usp:
with self.network() as network:
data = {'subnet': {'network_id':
network['network']['id'],
'cidr': '10.0.20.0/24',
'ip_version': '4',
'name': 'subnet1',
'tenant_id':
network['network']['tenant_id'],
'gateway_ip': '10.0.20.1'}}
subnet_req = self.new_create_request('subnets', data)
subnet_res = subnet_req.get_response(self.api)
self.assertEqual(201, subnet_res.status_int)
subnet = self.deserialize(self.fmt, subnet_res)
subnet_id = subnet['subnet']['id']
new_name = 'a_brand_new_name'
data = {'subnet': {'name': new_name}}
req = self.new_update_request('subnets', data, subnet_id)
res = req.get_response(self.api)
self.assertEqual(500, res.status_int)
error = self.deserialize(self.fmt, res)
self.assertEqual('MechanismDriverError',
error['NeutronError']['type'])
# Test if other mechanism driver was called
self.assertTrue(usp.called)
subnet = self._show('subnets', subnet_id)
self.assertEqual(new_name, subnet['subnet']['name'])
self._delete('subnets', subnet['subnet']['id'])
def test_create_port_faulty(self):
with mock.patch.object(mech_test.TestMechanismDriver,
'create_port_postcommit',
side_effect=ml2_exc.MechanismDriverError):
with self.network() as network:
net_id = network['network']['id']
data = {'port': {'network_id': net_id,
'tenant_id':
network['network']['tenant_id'],
'name': 'port1',
'admin_state_up': 1,
'fixed_ips': []}}
req = self.new_create_request('ports', data)
res = req.get_response(self.api)
self.assertEqual(500, res.status_int)
error = self.deserialize(self.fmt, res)
self.assertEqual('MechanismDriverError',
error['NeutronError']['type'])
query_params = "network_id=%s" % net_id
ports = self._list('ports', query_params=query_params)
self.assertFalse(ports['ports'])
def test_update_port_faulty(self):
with mock.patch.object(mech_test.TestMechanismDriver,
'update_port_postcommit',
side_effect=ml2_exc.MechanismDriverError):
with mock.patch.object(mech_logger.LoggerMechanismDriver,
'update_port_postcommit') as upp:
with self.network() as network:
data = {'port': {'network_id': network['network']['id'],
'tenant_id':
network['network']['tenant_id'],
'name': 'port1',
'admin_state_up': 1,
'fixed_ips': []}}
port_req = self.new_create_request('ports', data)
port_res = port_req.get_response(self.api)
self.assertEqual(201, port_res.status_int)
port = self.deserialize(self.fmt, port_res)
port_id = port['port']['id']
new_name = 'a_brand_new_name'
data = {'port': {'name': new_name}}
req = self.new_update_request('ports', data, port_id)
res = req.get_response(self.api)
self.assertEqual(500, res.status_int)
error = self.deserialize(self.fmt, res)
self.assertEqual('MechanismDriverError',
error['NeutronError']['type'])
# Test if other mechanism driver was called
self.assertTrue(upp.called)
port = self._show('ports', port_id)
self.assertEqual(new_name, port['port']['name'])
self._delete('ports', port['port']['id'])
|
|
"""
Test lldb-mi startup options.
"""
from __future__ import print_function
import lldbmi_testcase
from lldbsuite.test.decorators import *
from lldbsuite.test.lldbtest import *
from lldbsuite.test import lldbutil
import os
class MiStartupOptionsTestCase(lldbmi_testcase.MiTestCaseBase):
mydir = TestBase.compute_mydir(__file__)
@skipIfRemote # We do not currently support remote debugging via the MI.
@skipIfWindows # llvm.org/pr24452: Get lldb-mi tests working on Windows
@skipIfFreeBSD # llvm.org/pr22411: Failure presumably due to known thread races
@skipIfDarwin
def test_lldbmi_executable_option_file(self):
"""Test that 'lldb-mi --interpreter %s' loads executable file."""
self.spawnLldbMi(args="%s" % self.myexe)
# Test that the executable is loaded when file was specified
self.expect("-file-exec-and-symbols \"%s\"" % self.myexe)
self.expect("\^done")
# Test that lldb-mi is ready when executable was loaded
self.expect(self.child_prompt, exactly=True)
# Run to main
self.runCmd("-break-insert -f main")
self.expect("\^done,bkpt={number=\"1\"")
self.runCmd("-exec-run")
self.expect("\^running")
self.expect("\*stopped,reason=\"breakpoint-hit\"")
# Continue
self.runCmd("-exec-continue")
self.expect("\^running")
self.expect("\*stopped,reason=\"exited-normally\"")
@skipIfWindows # llvm.org/pr24452: Get lldb-mi tests working on Windows
@skipIfFreeBSD # llvm.org/pr22411: Failure presumably due to known thread races
@skipIfDarwin
def test_lldbmi_executable_option_unknown_file(self):
"""Test that 'lldb-mi --interpreter %s' fails on unknown executable file."""
# Prepare path to executable
path = "unknown_file"
self.spawnLldbMi(args="%s" % path)
# Test that the executable isn't loaded when unknown file was specified
self.expect("-file-exec-and-symbols \"%s\"" % path)
self.expect(
"\^error,msg=\"Command 'file-exec-and-symbols'. Target binary '%s' is invalid. error: unable to find executable for '%s'\"" %
(path, path))
# Test that lldb-mi is ready when executable was loaded
self.expect(self.child_prompt, exactly=True)
@skipIfRemote # We do not currently support remote debugging via the MI.
@skipIfWindows # llvm.org/pr24452: Get lldb-mi tests working on Windows
@skipIfFreeBSD # llvm.org/pr22411: Failure presumably due to known thread races
@skipIfDarwin
def test_lldbmi_executable_option_absolute_path(self):
"""Test that 'lldb-mi --interpreter %s' loads executable which is specified via absolute path."""
# Prepare path to executable
self.spawnLldbMi(args="%s" % self.myexe)
# Test that the executable is loaded when file was specified using
# absolute path
self.expect("-file-exec-and-symbols \"%s\"" % self.myexe)
self.expect("\^done")
# Test that lldb-mi is ready when executable was loaded
self.expect(self.child_prompt, exactly=True)
# Run
self.runCmd("-exec-run")
self.expect("\^running")
self.expect("\*stopped,reason=\"exited-normally\"")
@skipIfRemote # We do not currently support remote debugging via the MI.
@skipIfWindows # llvm.org/pr24452: Get lldb-mi tests working on Windows
@skipIfFreeBSD # llvm.org/pr22411: Failure presumably due to known thread races
@skipIfDarwin
def test_lldbmi_executable_option_relative_path(self):
"""Test that 'lldb-mi --interpreter %s' loads executable which is specified via relative path."""
# Prepare path to executable
path = os.path.relpath(self.myexe, self.getBuildDir())
self.spawnLldbMi(args="%s" % path)
# Test that the executable is loaded when file was specified using
# relative path
self.expect("-file-exec-and-symbols \"%s\"" % path)
self.expect("\^done")
# Test that lldb-mi is ready when executable was loaded
self.expect(self.child_prompt, exactly=True)
# Run
self.runCmd("-exec-run")
self.expect("\^running")
self.expect("\*stopped,reason=\"exited-normally\"")
@skipIfWindows # llvm.org/pr24452: Get lldb-mi tests working on Windows
@skipIfFreeBSD # llvm.org/pr22411: Failure presumably due to known thread races
@skipIfDarwin
def test_lldbmi_executable_option_unknown_path(self):
"""Test that 'lldb-mi --interpreter %s' fails on executable file which is specified via unknown path."""
# Prepare path to executable
path = "unknown_dir" + self.myexe
self.spawnLldbMi(args="%s" % path)
# Test that the executable isn't loaded when file was specified using
# unknown path
self.expect("-file-exec-and-symbols \"%s\"" % path)
self.expect(
"\^error,msg=\"Command 'file-exec-and-symbols'. Target binary '%s' is invalid. error: unable to find executable for '%s'\"" %
(path, path))
# Test that lldb-mi is ready when executable was loaded
self.expect(self.child_prompt, exactly=True)
def copyScript(self, sourceFile):
"""copy the script to builddir and replace a.out with the full path"""
destFile = os.path.join(os.path.dirname(self.myexe),
sourceFile+'.script')
with open(sourceFile, 'r') as src:
with open(destFile, 'w+') as dest:
dest.write(src.read().replace("a.out", self.myexe))
return destFile
@skipIfRemote # We do not currently support remote debugging via the MI.
@skipIfWindows # llvm.org/pr24452: Get lldb-mi tests working on Windows
@skipIfFreeBSD # llvm.org/pr22411: Failure presumably due to known thread races
@skipIfLinux # llvm.org/pr22841: lldb-mi tests fail on all Linux buildbots
@skipIfDarwin
def test_lldbmi_source_option_start_script(self):
"""Test that 'lldb-mi --interpreter' can execute user's commands after initial commands were executed."""
# Prepared source file
sourceFile = self.copyScript("start_script")
self.spawnLldbMi(args="--source %s" % sourceFile)
# After '-file-exec-and-symbols a.out'
self.expect("-file-exec-and-symbols %s" % self.myexe)
self.expect("\^done")
# After '-break-insert -f main'
self.expect("-break-insert -f main")
self.expect("\^done,bkpt={number=\"1\"")
# After '-exec-run'
self.expect("-exec-run")
self.expect("\^running")
self.expect("\*stopped,reason=\"breakpoint-hit\"")
# After '-break-insert main.cpp:BP_return'
line = line_number('main.cpp', '//BP_return')
self.expect("-break-insert main.cpp:%d" % line)
self.expect("\^done,bkpt={number=\"2\"")
# After '-exec-continue'
self.expect("-exec-continue")
self.expect("\^running")
self.expect("\*stopped,reason=\"breakpoint-hit\"")
# Test that lldb-mi is ready after execution of --source start_script
self.expect(self.child_prompt, exactly=True)
# Try to evaluate 'a' expression
self.runCmd("-data-evaluate-expression a")
self.expect("\^done,value=\"10\"")
self.expect(self.child_prompt, exactly=True)
os.unlink(sourceFile)
@skipIfRemote # We do not currently support remote debugging via the MI.
@skipIfWindows # llvm.org/pr24452: Get lldb-mi tests working on Windows
@skipIfFreeBSD # llvm.org/pr22411: Failure presumably due to known thread races
@skipIfLinux # llvm.org/pr22841: lldb-mi tests fail on all Linux buildbots
@skipIfDarwin
def test_lldbmi_source_option_start_script_exit(self):
"""Test that 'lldb-mi --interpreter' can execute a prepared file which passed via --source option."""
# Prepared source file
sourceFile = self.copyScript("start_script_exit")
self.spawnLldbMi(args="--source %s" % sourceFile)
# After '-file-exec-and-symbols a.out'
self.expect("-file-exec-and-symbols %s" % self.myexe)
self.expect("\^done")
# After '-break-insert -f main'
self.expect("-break-insert -f main")
self.expect("\^done,bkpt={number=\"1\"")
# After '-exec-run'
self.expect("-exec-run")
self.expect("\^running")
self.expect("\*stopped,reason=\"breakpoint-hit\"")
# After '-break-insert main.cpp:BP_return'
line = line_number('main.cpp', '//BP_return')
self.expect("-break-insert main.cpp:%d" % line)
self.expect("\^done,bkpt={number=\"2\"")
# After '-exec-continue'
self.expect("-exec-continue")
self.expect("\^running")
self.expect("\*stopped,reason=\"breakpoint-hit\"")
# After '-data-evaluate-expression a'
self.expect("-data-evaluate-expression a")
self.expect("\^done,value=\"10\"")
# After '-gdb-exit'
self.expect("-gdb-exit")
self.expect("\^exit")
self.expect("\*stopped,reason=\"exited-normally\"")
os.unlink(sourceFile)
@skipIfWindows # llvm.org/pr24452: Get lldb-mi tests working on Windows
@skipIfFreeBSD # llvm.org/pr22411: Failure presumably due to known thread races
@skipIfDarwin
def test_lldbmi_source_option_start_script_error(self):
"""Test that 'lldb-mi --interpreter' stops execution of initial commands in case of error."""
# Prepared source file
sourceFile = self.copyScript("start_script_error")
self.spawnLldbMi(args="--source %s" % sourceFile)
# After '-file-exec-and-symbols a.out'
self.expect("-file-exec-and-symbols %s" % self.myexe)
self.expect("\^done")
# After '-break-ins -f main'
self.expect("-break-ins -f main")
self.expect("\^error")
# Test that lldb-mi is ready after execution of --source start_script
self.expect(self.child_prompt, exactly=True)
os.unlink(sourceFile)
@skipIfRemote # We do not currently support remote debugging via the MI.
@skipIfWindows # llvm.org/pr24452: Get lldb-mi tests working on Windows
@skipIfFreeBSD # llvm.org/pr22411: Failure presumably due to known thread races
@skipIfDarwin
def test_lldbmi_log_option(self):
"""Test that 'lldb-mi --log' creates a log file in the current directory."""
logDirectory = self.getBuildDir()
self.spawnLldbMi(args="%s --log" % self.myexe)
# Test that the executable is loaded when file was specified
self.expect("-file-exec-and-symbols \"%s\"" % self.myexe)
self.expect("\^done")
# Test that lldb-mi is ready when executable was loaded
self.expect(self.child_prompt, exactly=True)
# Run
self.runCmd("-exec-run")
self.expect("\^running")
self.expect("\*stopped,reason=\"exited-normally\"")
# Check log file is created
import glob
import os
logFile = glob.glob(logDirectory + "/lldb-mi-*.log")
if not logFile:
self.fail("log file not found")
# Delete log
for f in logFile:
os.remove(f)
@skipIfRemote # We do not currently support remote debugging via the MI.
@skipIfWindows # llvm.org/pr24452: Get lldb-mi tests working on Windows
@skipIfFreeBSD # llvm.org/pr22411: Failure presumably due to known thread races
@skipIfDarwin
def test_lldbmi_log_directory_option(self):
"""Test that 'lldb-mi --log --log-dir' creates a log file in the directory specified by --log-dir."""
# Create log in temp directory
import tempfile
logDirectory = tempfile.gettempdir()
self.spawnLldbMi(
args="%s --log --log-dir=%s" %
(self.myexe, logDirectory))
# Test that the executable is loaded when file was specified
self.expect("-file-exec-and-symbols \"%s\"" % self.myexe)
self.expect("\^done")
# Test that lldb-mi is ready when executable was loaded
self.expect(self.child_prompt, exactly=True)
# Run
self.runCmd("-exec-run")
self.expect("\^running")
self.expect("\*stopped,reason=\"exited-normally\"")
# Check log file is created
import glob
import os
logFile = glob.glob(logDirectory + "/lldb-mi-*.log")
if not logFile:
self.fail("log file not found")
# Delete log
for f in logFile:
os.remove(f)
|
|
"""
NetworkX to d3.js Force Layout
==============================
MPLD3 Plugin to convert a NetworkX graph to a force layout.
This is an example demoed `here
<http://blog.kdheepak.com/mpld3-networkx-d3js-force-layout.html>`_
You can download the plugin from the Github repo `here
<https://github.com/kdheepak/mpld3_plugins/blob/master/mpld3_plugins/plugins/networkxd3forcelayout.py>`_
BSD-Clause 3 License
Copyright (C) 2016 Dheepak Krishnamurthy
"""
import mpld3
graph = {'directed': False,
'graph': {'name': "Zachary's Karate Club"},
'links': [{'source': 0, 'target': 1},
{'source': 0, 'target': 2},
{'source': 0, 'target': 3},
{'source': 0, 'target': 4},
{'source': 0, 'target': 5},
{'source': 0, 'target': 6},
{'source': 0, 'target': 7},
{'source': 0, 'target': 8},
{'source': 0, 'target': 10},
{'source': 0, 'target': 11},
{'source': 0, 'target': 12},
{'source': 0, 'target': 13},
{'source': 0, 'target': 17},
{'source': 0, 'target': 19},
{'source': 0, 'target': 21},
{'source': 0, 'target': 31},
{'source': 1, 'target': 2},
{'source': 1, 'target': 3},
{'source': 1, 'target': 7},
{'source': 1, 'target': 13},
{'source': 1, 'target': 17},
{'source': 1, 'target': 19},
{'source': 1, 'target': 21},
{'source': 1, 'target': 30},
{'source': 2, 'target': 3},
{'source': 2, 'target': 32},
{'source': 2, 'target': 7},
{'source': 2, 'target': 8},
{'source': 2, 'target': 9},
{'source': 2, 'target': 13},
{'source': 2, 'target': 27},
{'source': 2, 'target': 28},
{'source': 3, 'target': 7},
{'source': 3, 'target': 12},
{'source': 3, 'target': 13},
{'source': 4, 'target': 10},
{'source': 4, 'target': 6},
{'source': 5, 'target': 16},
{'source': 5, 'target': 10},
{'source': 5, 'target': 6},
{'source': 6, 'target': 16},
{'source': 8, 'target': 32},
{'source': 8, 'target': 30},
{'source': 8, 'target': 33},
{'source': 9, 'target': 33},
{'source': 13, 'target': 33},
{'source': 14, 'target': 32},
{'source': 14, 'target': 33},
{'source': 15, 'target': 32},
{'source': 15, 'target': 33},
{'source': 18, 'target': 32},
{'source': 18, 'target': 33},
{'source': 19, 'target': 33},
{'source': 20, 'target': 32},
{'source': 20, 'target': 33},
{'source': 22, 'target': 32},
{'source': 22, 'target': 33},
{'source': 23, 'target': 32},
{'source': 23, 'target': 25},
{'source': 23, 'target': 27},
{'source': 23, 'target': 29},
{'source': 23, 'target': 33},
{'source': 24, 'target': 25},
{'source': 24, 'target': 27},
{'source': 24, 'target': 31},
{'source': 25, 'target': 31},
{'source': 26, 'target': 33},
{'source': 26, 'target': 29},
{'source': 27, 'target': 33},
{'source': 28, 'target': 33},
{'source': 28, 'target': 31},
{'source': 29, 'target': 32},
{'source': 29, 'target': 33},
{'source': 30, 'target': 33},
{'source': 30, 'target': 32},
{'source': 31, 'target': 33},
{'source': 31, 'target': 32},
{'source': 32, 'target': 33}],
'multigraph': False,
'nodes': [{'club': 'Mr. Hi', 'color': 'purple', 'id': 0, 'size': 16},
{'club': 'Mr. Hi', 'color': 'purple', 'id': 1, 'size': 9},
{'club': 'Mr. Hi', 'color': 'purple', 'id': 2, 'size': 10},
{'club': 'Mr. Hi', 'color': 'purple', 'id': 3, 'size': 6},
{'club': 'Mr. Hi', 'color': 'purple', 'id': 4, 'size': 3},
{'club': 'Mr. Hi', 'color': 'purple', 'id': 5, 'size': 4},
{'club': 'Mr. Hi', 'color': 'purple', 'id': 6, 'size': 4},
{'club': 'Mr. Hi', 'color': 'purple', 'id': 7, 'size': 4},
{'club': 'Mr. Hi', 'color': 'purple', 'id': 8, 'size': 5},
{'club': 'Officer', 'color': 'orange', 'id': 9, 'size': 2},
{'club': 'Mr. Hi', 'color': 'purple', 'id': 10, 'size': 3},
{'club': 'Mr. Hi', 'color': 'purple', 'id': 11, 'size': 1},
{'club': 'Mr. Hi', 'color': 'purple', 'id': 12, 'size': 2},
{'club': 'Mr. Hi', 'color': 'purple', 'id': 13, 'size': 5},
{'club': 'Officer', 'color': 'orange', 'id': 14, 'size': 2},
{'club': 'Officer', 'color': 'orange', 'id': 15, 'size': 2},
{'club': 'Mr. Hi', 'color': 'purple', 'id': 16, 'size': 2},
{'club': 'Mr. Hi', 'color': 'purple', 'id': 17, 'size': 2},
{'club': 'Officer', 'color': 'orange', 'id': 18, 'size': 2},
{'club': 'Mr. Hi', 'color': 'purple', 'id': 19, 'size': 3},
{'club': 'Officer', 'color': 'orange', 'id': 20, 'size': 2},
{'club': 'Mr. Hi', 'color': 'purple', 'id': 21, 'size': 2},
{'club': 'Officer', 'color': 'orange', 'id': 22, 'size': 2},
{'club': 'Officer', 'color': 'orange', 'id': 23, 'size': 5},
{'club': 'Officer', 'color': 'orange', 'id': 24, 'size': 3},
{'club': 'Officer', 'color': 'orange', 'id': 25, 'size': 3},
{'club': 'Officer', 'color': 'orange', 'id': 26, 'size': 2},
{'club': 'Officer', 'color': 'orange', 'id': 27, 'size': 4},
{'club': 'Officer', 'color': 'orange', 'id': 28, 'size': 3},
{'club': 'Officer', 'color': 'orange', 'id': 29, 'size': 4},
{'club': 'Officer', 'color': 'orange', 'id': 30, 'size': 4},
{'club': 'Officer', 'color': 'orange', 'id': 31, 'size': 6},
{'club': 'Officer', 'color': 'orange', 'id': 32, 'size': 12},
{'club': 'Officer', 'color': 'orange', 'id': 33, 'size': 17}]}
class NetworkXD3ForceLayout(mpld3.plugins.PluginBase):
"""A NetworkX to D3 Force Layout Plugin"""
JAVASCRIPT = """
mpld3.register_plugin("networkxd3forcelayout", NetworkXD3ForceLayoutPlugin);
NetworkXD3ForceLayoutPlugin.prototype = Object.create(mpld3.Plugin.prototype);
NetworkXD3ForceLayoutPlugin.prototype.constructor = NetworkXD3ForceLayoutPlugin;
NetworkXD3ForceLayoutPlugin.prototype.requiredProps = ["graph",
"ax_id",];
NetworkXD3ForceLayoutPlugin.prototype.defaultProps = { coordinates: "data",
gravity: 1,
charge: -30,
link_strength: 1,
friction: 0.9,
link_distance: 20,
maximum_stroke_width: 2,
minimum_stroke_width: 1,
nominal_stroke_width: 1,
maximum_radius: 10,
minimum_radius: 1,
nominal_radius: 5,
};
function NetworkXD3ForceLayoutPlugin(fig, props){
mpld3.Plugin.call(this, fig, props);
};
var color = d3.scaleOrdinal(d3.schemeCategory10);
NetworkXD3ForceLayoutPlugin.prototype.zoomScaleProp = function (nominal_prop, minimum_prop, maximum_prop) {
var zoom = this.ax.zoom;
scalerFunction = function() {
var prop = nominal_prop;
if (nominal_prop*zoom.scale()>maximum_prop) prop = maximum_prop/zoom.scale();
if (nominal_prop*zoom.scale()<minimum_prop) prop = minimum_prop/zoom.scale();
return prop
}
return scalerFunction;
}
NetworkXD3ForceLayoutPlugin.prototype.setupDefaults = function () {
this.zoomScaleStroke = this.zoomScaleProp(this.props.nominal_stroke_width,
this.props.minimum_stroke_width,
this.props.maximum_stroke_width)
this.zoomScaleRadius = this.zoomScaleProp(this.props.nominal_radius,
this.props.minimum_radius,
this.props.maximum_radius)
}
NetworkXD3ForceLayoutPlugin.prototype.zoomed = function() {
this.tick()
}
NetworkXD3ForceLayoutPlugin.prototype.draw = function(){
plugin = this
DEFAULT_NODE_SIZE = this.props.nominal_radius;
var height = this.fig.height
var width = this.fig.width
var graph = this.props.graph
var gravity = this.props.gravity.toFixed()
var charge = this.props.charge.toFixed()
var link_distance = this.props.link_distance.toFixed()
var link_strength = this.props.link_strength.toFixed()
var friction = this.props.friction.toFixed()
this.ax = mpld3.get_element(this.props.ax_id, this.fig)
var ax = this.ax;
this.ax.elements.push(this)
ax_obj = this.ax;
var width = d3.max(ax.x.range()) - d3.min(ax.x.range()),
height = d3.max(ax.y.range()) - d3.min(ax.y.range());
var color = d3.scaleOrdinal(d3.schemeCategory10);
this.xScale = d3.scaleLinear().domain([0, 1]).range([0, width]) // ax.x;
this.yScale = d3.scaleLinear().domain([0, 1]).range([height, 0]) // ax.y;
this.force = d3.forceSimulation();
this.svg = this.ax.axes.append("g");
for(var i = 0; i < graph.nodes.length; i++){
var node = graph.nodes[i];
if (node.hasOwnProperty('x')) {
node.x = this.ax.x(node.x);
}
if (node.hasOwnProperty('y')) {
node.y = this.ax.y(node.y);
}
}
this.force
.force("link",
d3.forceLink()
.id(function(d) { return d.index })
.strength(link_strength)
.distance(link_distance)
)
.force("collide", d3.forceCollide(function(d){return d.r + 8 }).iterations(16))
.force("charge", d3.forceManyBody().strength(charge))
.force("center", d3.forceCenter(width / 2, height / 2))
.force("y", d3.forceY(0))
.force("x", d3.forceX(0));
this.force.nodes(graph.nodes);
this.force.force("link").links(graph.links);
this.link = this.svg.selectAll(".link")
.data(graph.links)
.enter().append("line")
.attr("class", "link")
.attr("stroke", "black")
.style("stroke-width", function (d) { return Math.sqrt(d.value); });
this.node = this.svg.selectAll(".node")
.data(graph.nodes)
.enter().append("circle")
.attr("class", "node")
.attr("r", function(d) {return d.size === undefined ? DEFAULT_NODE_SIZE : d.size ;})
.style("fill", function (d) { return color(d); });
this.node.append("title")
.text(function (d) { return d.name; });
this.force.on("tick", this.tick.bind(this));
this.setupDefaults()
};
NetworkXD3ForceLayoutPlugin.prototype.tick = function() {
this.link.attr("x1", function (d) { return this.ax.x(this.xScale.invert(d.source.x)); }.bind(this))
.attr("y1", function (d) { return this.ax.y(this.yScale.invert(d.source.y)); }.bind(this))
.attr("x2", function (d) { return this.ax.x(this.xScale.invert(d.target.x)); }.bind(this))
.attr("y2", function (d) { return this.ax.y(this.yScale.invert(d.target.y)); }.bind(this));
this.node.attr("transform", function (d) {
return "translate(" + this.ax.x(this.xScale.invert(d.x)) + "," + this.ax.y(this.yScale.invert(d.y)) + ")";
}.bind(this)
);
}
"""
def __init__(self, graph, ax,
gravity=1,
link_distance=20,
charge=-30,
node_size=5,
link_strength=1,
friction=0.9):
self.dict_ = {"type": "networkxd3forcelayout",
"graph": graph,
"ax_id": mpld3.utils.get_id(ax),
"gravity": gravity,
"charge": charge,
"friction": friction,
"link_distance": link_distance,
"link_strength": link_strength,
"nominal_radius": node_size}
import matplotlib.pyplot as plt
fig, axs = plt.subplots(1, 1, figsize=(10, 10))
ax = axs
mpld3.plugins.connect(fig, NetworkXD3ForceLayout(graph,
ax,
gravity=.5,
link_distance=20,
charge=-600,
friction=1
)
)
mpld3.show()
|
|
from __future__ import absolute_import
from __future__ import with_statement
import socket
from mock import patch
from kombu import common
from kombu.common import (Broadcast, maybe_declare, declared_entities,
send_reply, isend_reply, collect_replies)
from .utils import TestCase
from .utils import ContextMock, Mock, MockPool
class test_Broadcast(TestCase):
def test_arguments(self):
q = Broadcast(name="test_Broadcast")
self.assertTrue(q.name.startswith("bcast."))
self.assertEqual(q.alias, "test_Broadcast")
self.assertTrue(q.auto_delete)
self.assertEqual(q.exchange.name, "test_Broadcast")
self.assertEqual(q.exchange.type, "fanout")
q = Broadcast("test_Broadcast", "explicit_queue_name")
self.assertEqual(q.name, "explicit_queue_name")
self.assertEqual(q.exchange.name, "test_Broadcast")
class test_maybe_declare(TestCase):
def test_cacheable(self):
channel = Mock()
entity = Mock()
entity.can_cache_declaration = True
entity.is_bound = True
maybe_declare(entity, channel)
self.assertEqual(entity.declare.call_count, 1)
self.assertIn(entity, declared_entities[channel.connection.client])
maybe_declare(entity, channel)
self.assertEqual(entity.declare.call_count, 1)
def test_uncacheable(self):
channel = Mock()
entity = Mock()
entity.can_cache_declaration = False
entity.is_bound = True
maybe_declare(entity, channel)
self.assertEqual(entity.declare.call_count, 1)
maybe_declare(entity, channel)
self.assertEqual(entity.declare.call_count, 2)
def test_binds_entities(self):
channel = Mock()
entity = Mock()
entity.can_cache_declaration = True
entity.is_bound = False
maybe_declare(entity, channel)
entity.bind.assert_called_with(channel)
def test_with_retry(self):
channel = Mock()
entity = Mock()
entity.can_cache_declaration = True
entity.is_bound = True
maybe_declare(entity, channel, retry=True)
self.assertTrue(channel.connection.client.ensure.call_count)
class test_replies(TestCase):
def test_send_reply(self):
req = Mock()
req.content_type = "application/json"
req.properties = {"reply_to": "hello",
"correlation_id": "world"}
exchange = Mock()
exchange.is_bound = True
producer = Mock()
send_reply(exchange, req, {"hello": "world"}, producer)
self.assertTrue(producer.publish.call_count)
args = producer.publish.call_args
self.assertDictEqual(args[0][0], {"hello": "world"})
self.assertDictEqual(args[1], {"exchange": exchange,
"routing_key": "hello",
"correlation_id": "world",
"serializer": "json"})
exchange.declare.assert_called_with()
@patch("kombu.common.ipublish")
def test_isend_reply(self, ipublish):
pool, exchange, req, msg, props = (Mock(), Mock(), Mock(),
Mock(), Mock())
isend_reply(pool, exchange, req, msg, props)
ipublish.assert_called_with(pool, send_reply,
(exchange, req, msg), props)
@patch("kombu.common.itermessages")
def test_collect_replies_with_ack(self, itermessages):
conn, channel, queue = Mock(), Mock(), Mock()
body, message = Mock(), Mock()
itermessages.return_value = [(body, message)]
it = collect_replies(conn, channel, queue, no_ack=False)
m = it.next()
self.assertIs(m, body)
itermessages.assert_called_with(conn, channel, queue, no_ack=False)
message.ack.assert_called_with()
with self.assertRaises(StopIteration):
it.next()
channel.after_reply_message_received.assert_called_with(queue.name)
@patch("kombu.common.itermessages")
def test_collect_replies_no_ack(self, itermessages):
conn, channel, queue = Mock(), Mock(), Mock()
body, message = Mock(), Mock()
itermessages.return_value = [(body, message)]
it = collect_replies(conn, channel, queue)
m = it.next()
self.assertIs(m, body)
itermessages.assert_called_with(conn, channel, queue, no_ack=True)
self.assertFalse(message.ack.called)
@patch("kombu.common.itermessages")
def test_collect_replies_no_replies(self, itermessages):
conn, channel, queue = Mock(), Mock(), Mock()
itermessages.return_value = []
it = collect_replies(conn, channel, queue)
with self.assertRaises(StopIteration):
it.next()
self.assertFalse(channel.after_reply_message_received.called)
class test_insured(TestCase):
@patch("kombu.common.insured_logger")
def test_ensure_errback(self, insured_logger):
common._ensure_errback("foo", 30)
self.assertTrue(insured_logger.error.called)
def test_revive_connection(self):
on_revive = Mock()
channel = Mock()
common.revive_connection(Mock(), channel, on_revive)
on_revive.assert_called_with(channel)
common.revive_connection(Mock(), channel, None)
def test_revive_producer(self):
on_revive = Mock()
channel = Mock()
common.revive_producer(Mock(), channel, on_revive)
on_revive.assert_called_with(channel)
common.revive_producer(Mock(), channel, None)
def get_insured_mocks(self, insured_returns=("works", "ignored")):
conn = ContextMock()
pool = MockPool(conn)
fun = Mock()
insured = conn.autoretry.return_value = Mock()
insured.return_value = insured_returns
return conn, pool, fun, insured
def test_insured(self):
conn, pool, fun, insured = self.get_insured_mocks()
ret = common.insured(pool, fun, (2, 2), {"foo": "bar"})
self.assertEqual(ret, "works")
conn.ensure_connection.assert_called_with(
errback=common._ensure_errback)
self.assertTrue(insured.called)
i_args, i_kwargs = insured.call_args
self.assertTupleEqual(i_args, (2, 2))
self.assertDictEqual(i_kwargs, {"foo": "bar",
"connection": conn})
self.assertTrue(conn.autoretry.called)
ar_args, ar_kwargs = conn.autoretry.call_args
self.assertTupleEqual(ar_args, (fun, conn.default_channel))
self.assertTrue(ar_kwargs.get("on_revive"))
self.assertTrue(ar_kwargs.get("errback"))
def test_insured_custom_errback(self):
conn, pool, fun, insured = self.get_insured_mocks()
custom_errback = Mock()
common.insured(pool, fun, (2, 2), {"foo": "bar"},
errback=custom_errback)
conn.ensure_connection.assert_called_with(errback=custom_errback)
def get_ipublish_args(self, ensure_returns=None):
producer = ContextMock()
pool = MockPool(producer)
fun = Mock()
ensure_returns = ensure_returns or Mock()
producer.connection.ensure.return_value = ensure_returns
return producer, pool, fun, ensure_returns
def test_ipublish(self):
producer, pool, fun, ensure_returns = self.get_ipublish_args()
ensure_returns.return_value = "works"
ret = common.ipublish(pool, fun, (2, 2), {"foo": "bar"})
self.assertEqual(ret, "works")
self.assertTrue(producer.connection.ensure.called)
e_args, e_kwargs = producer.connection.ensure.call_args
self.assertTupleEqual(e_args, (producer, fun))
self.assertTrue(e_kwargs.get("on_revive"))
self.assertEqual(e_kwargs.get("errback"), common._ensure_errback)
ensure_returns.assert_called_with(2, 2, foo="bar", producer=producer)
def test_ipublish_with_custom_errback(self):
producer, pool, fun, _ = self.get_ipublish_args()
errback = Mock()
common.ipublish(pool, fun, (2, 2), {"foo": "bar"}, errback=errback)
_, e_kwargs = producer.connection.ensure.call_args
self.assertEqual(e_kwargs.get("errback"), errback)
class MockConsumer(object):
consumers = set()
def __init__(self, channel, queues, callbacks, **kwargs):
self.channel = channel
self.queues = queues
self.callbacks = callbacks
def __enter__(self):
self.consumers.add(self)
return self
def __exit__(self, *exc_info):
self.consumers.discard(self)
class test_itermessages(TestCase):
class MockConnection(object):
should_raise_timeout = False
def drain_events(self, **kwargs):
if self.should_raise_timeout:
raise socket.timeout()
for consumer in MockConsumer.consumers:
for callback in consumer.callbacks:
callback("body", "message")
def test_default(self):
conn = self.MockConnection()
channel = Mock()
it = common.itermessages(conn, channel, "q", limit=1,
Consumer=MockConsumer)
ret = it.next()
self.assertTupleEqual(ret, ("body", "message"))
with self.assertRaises(StopIteration):
it.next()
def test_when_raises_socket_timeout(self):
conn = self.MockConnection()
conn.should_raise_timeout = True
channel = Mock()
it = common.itermessages(conn, channel, "q", limit=1,
Consumer=MockConsumer)
with self.assertRaises(StopIteration):
it.next()
@patch("kombu.common.deque")
def test_when_raises_IndexError(self, deque):
deque_instance = deque.return_value = Mock()
deque_instance.popleft.side_effect = IndexError()
conn = self.MockConnection()
channel = Mock()
it = common.itermessages(conn, channel, "q", limit=1,
Consumer=MockConsumer)
with self.assertRaises(StopIteration):
it.next()
|
|
"""Utilities related archives.
"""
import logging
import os
import shutil
import stat
import tarfile
import zipfile
from pip._internal.exceptions import InstallationError
from pip._internal.utils.filetypes import (
BZ2_EXTENSIONS,
TAR_EXTENSIONS,
XZ_EXTENSIONS,
ZIP_EXTENSIONS,
)
from pip._internal.utils.misc import ensure_dir
from pip._internal.utils.typing import MYPY_CHECK_RUNNING
if MYPY_CHECK_RUNNING:
from typing import Iterable, List, Optional
from zipfile import ZipInfo
logger = logging.getLogger(__name__)
SUPPORTED_EXTENSIONS = ZIP_EXTENSIONS + TAR_EXTENSIONS
try:
import bz2 # noqa
SUPPORTED_EXTENSIONS += BZ2_EXTENSIONS
except ImportError:
logger.debug('bz2 module is not available')
try:
# Only for Python 3.3+
import lzma # noqa
SUPPORTED_EXTENSIONS += XZ_EXTENSIONS
except ImportError:
logger.debug('lzma module is not available')
def current_umask():
# type: () -> int
"""Get the current umask which involves having to set it temporarily."""
mask = os.umask(0)
os.umask(mask)
return mask
def split_leading_dir(path):
# type: (str) -> List[str]
path = path.lstrip('/').lstrip('\\')
if (
'/' in path and (
('\\' in path and path.find('/') < path.find('\\')) or
'\\' not in path
)
):
return path.split('/', 1)
elif '\\' in path:
return path.split('\\', 1)
else:
return [path, '']
def has_leading_dir(paths):
# type: (Iterable[str]) -> bool
"""Returns true if all the paths have the same leading path name
(i.e., everything is in one subdirectory in an archive)"""
common_prefix = None
for path in paths:
prefix, rest = split_leading_dir(path)
if not prefix:
return False
elif common_prefix is None:
common_prefix = prefix
elif prefix != common_prefix:
return False
return True
def is_within_directory(directory, target):
# type: (str, str) -> bool
"""
Return true if the absolute path of target is within the directory
"""
abs_directory = os.path.abspath(directory)
abs_target = os.path.abspath(target)
prefix = os.path.commonprefix([abs_directory, abs_target])
return prefix == abs_directory
def set_extracted_file_to_default_mode_plus_executable(path):
# type: (str) -> None
"""
Make file present at path have execute for user/group/world
(chmod +x) is no-op on windows per python docs
"""
os.chmod(path, (0o777 & ~current_umask() | 0o111))
def zip_item_is_executable(info):
# type: (ZipInfo) -> bool
mode = info.external_attr >> 16
# if mode and regular file and any execute permissions for
# user/group/world?
return bool(mode and stat.S_ISREG(mode) and mode & 0o111)
def unzip_file(filename, location, flatten=True):
# type: (str, str, bool) -> None
"""
Unzip the file (with path `filename`) to the destination `location`. All
files are written based on system defaults and umask (i.e. permissions are
not preserved), except that regular file members with any execute
permissions (user, group, or world) have "chmod +x" applied after being
written. Note that for windows, any execute changes using os.chmod are
no-ops per the python docs.
"""
ensure_dir(location)
zipfp = open(filename, 'rb')
try:
zip = zipfile.ZipFile(zipfp, allowZip64=True)
leading = has_leading_dir(zip.namelist()) and flatten
for info in zip.infolist():
name = info.filename
fn = name
if leading:
fn = split_leading_dir(name)[1]
fn = os.path.join(location, fn)
dir = os.path.dirname(fn)
if not is_within_directory(location, fn):
message = (
'The zip file ({}) has a file ({}) trying to install '
'outside target directory ({})'
)
raise InstallationError(message.format(filename, fn, location))
if fn.endswith('/') or fn.endswith('\\'):
# A directory
ensure_dir(fn)
else:
ensure_dir(dir)
# Don't use read() to avoid allocating an arbitrarily large
# chunk of memory for the file's content
fp = zip.open(name)
try:
with open(fn, 'wb') as destfp:
shutil.copyfileobj(fp, destfp)
finally:
fp.close()
if zip_item_is_executable(info):
set_extracted_file_to_default_mode_plus_executable(fn)
finally:
zipfp.close()
def untar_file(filename, location):
# type: (str, str) -> None
"""
Untar the file (with path `filename`) to the destination `location`.
All files are written based on system defaults and umask (i.e. permissions
are not preserved), except that regular file members with any execute
permissions (user, group, or world) have "chmod +x" applied after being
written. Note that for windows, any execute changes using os.chmod are
no-ops per the python docs.
"""
ensure_dir(location)
if filename.lower().endswith('.gz') or filename.lower().endswith('.tgz'):
mode = 'r:gz'
elif filename.lower().endswith(BZ2_EXTENSIONS):
mode = 'r:bz2'
elif filename.lower().endswith(XZ_EXTENSIONS):
mode = 'r:xz'
elif filename.lower().endswith('.tar'):
mode = 'r'
else:
logger.warning(
'Cannot determine compression type for file %s', filename,
)
mode = 'r:*'
tar = tarfile.open(filename, mode)
try:
leading = has_leading_dir([
member.name for member in tar.getmembers()
])
for member in tar.getmembers():
fn = member.name
if leading:
fn = split_leading_dir(fn)[1]
path = os.path.join(location, fn)
if not is_within_directory(location, path):
message = (
'The tar file ({}) has a file ({}) trying to install '
'outside target directory ({})'
)
raise InstallationError(
message.format(filename, path, location)
)
if member.isdir():
ensure_dir(path)
elif member.issym():
try:
# https://github.com/python/typeshed/issues/2673
tar._extract_member(member, path) # type: ignore
except Exception as exc:
# Some corrupt tar files seem to produce this
# (specifically bad symlinks)
logger.warning(
'In the tar file %s the member %s is invalid: %s',
filename, member.name, exc,
)
continue
else:
try:
fp = tar.extractfile(member)
except (KeyError, AttributeError) as exc:
# Some corrupt tar files seem to produce this
# (specifically bad symlinks)
logger.warning(
'In the tar file %s the member %s is invalid: %s',
filename, member.name, exc,
)
continue
ensure_dir(os.path.dirname(path))
assert fp is not None
with open(path, 'wb') as destfp:
shutil.copyfileobj(fp, destfp)
fp.close()
# Update the timestamp (useful for cython compiled files)
tar.utime(member, path)
# member have any execute permissions for user/group/world?
if member.mode & 0o111:
set_extracted_file_to_default_mode_plus_executable(path)
finally:
tar.close()
def unpack_file(
filename, # type: str
location, # type: str
content_type=None, # type: Optional[str]
):
# type: (...) -> None
filename = os.path.realpath(filename)
if (
content_type == 'application/zip' or
filename.lower().endswith(ZIP_EXTENSIONS) or
zipfile.is_zipfile(filename)
):
unzip_file(
filename,
location,
flatten=not filename.endswith('.whl')
)
elif (
content_type == 'application/x-gzip' or
tarfile.is_tarfile(filename) or
filename.lower().endswith(
TAR_EXTENSIONS + BZ2_EXTENSIONS + XZ_EXTENSIONS
)
):
untar_file(filename, location)
else:
# FIXME: handle?
# FIXME: magic signatures?
logger.critical(
'Cannot unpack file %s (downloaded from %s, content-type: %s); '
'cannot detect archive format',
filename, location, content_type,
)
raise InstallationError(
f'Cannot determine archive format of {location}'
)
|
|
from direct.directnotify import DirectNotifyGlobal
from toontown.battle import BattlePlace
from direct.fsm import ClassicFSM, State
from direct.fsm import State
from direct.showbase import BulletinBoardWatcher
from pandac.PandaModules import *
from otp.distributed.TelemetryLimiter import RotationLimitToH, TLGatherAllAvs
from toontown.nametag import NametagGlobals
from toontown.toon import Toon
from toontown.toonbase import ToontownGlobals
from toontown.hood import ZoneUtil
from toontown.toonbase import TTLocalizer
from toontown.toontowngui import TTDialog
from toontown.toonbase import ToontownBattleGlobals
from toontown.coghq import DistributedCountryClub
from toontown.building import Elevator
from toontown.nametag import NametagGlobals
import random
class CountryClubInterior(BattlePlace.BattlePlace):
notify = DirectNotifyGlobal.directNotify.newCategory('CountryClubInterior')
def __init__(self, loader, parentFSM, doneEvent):
BattlePlace.BattlePlace.__init__(self, loader, doneEvent)
self.parentFSM = parentFSM
self.zoneId = loader.countryClubId
self.elevatorDoneEvent = 'elevatorDone'
self.fsm = ClassicFSM.ClassicFSM('CountryClubInterior', [State.State('start', self.enterStart, self.exitStart, ['walk', 'teleportIn', 'fallDown']),
State.State('walk', self.enterWalk, self.exitWalk, ['push',
'sit',
'stickerBook',
'WaitForBattle',
'battle',
'died',
'teleportOut',
'squished',
'DFA',
'fallDown',
'stopped',
'elevator']),
State.State('stopped', self.enterStopped, self.exitStopped, ['walk', 'teleportOut', 'stickerBook']),
State.State('sit', self.enterSit, self.exitSit, ['walk', 'died', 'teleportOut']),
State.State('push', self.enterPush, self.exitPush, ['walk', 'died', 'teleportOut']),
State.State('stickerBook', self.enterStickerBook, self.exitStickerBook, ['walk',
'battle',
'DFA',
'WaitForBattle',
'died',
'teleportOut']),
State.State('WaitForBattle', self.enterWaitForBattle, self.exitWaitForBattle, ['battle',
'walk',
'died',
'teleportOut']),
State.State('battle', self.enterBattle, self.exitBattle, ['walk', 'teleportOut', 'died']),
State.State('fallDown', self.enterFallDown, self.exitFallDown, ['walk', 'died', 'teleportOut']),
State.State('squished', self.enterSquished, self.exitSquished, ['walk', 'died', 'teleportOut']),
State.State('teleportIn', self.enterTeleportIn, self.exitTeleportIn, ['walk',
'teleportOut',
'quietZone',
'died']),
State.State('teleportOut', self.enterTeleportOut, self.exitTeleportOut, ['teleportIn',
'FLA',
'quietZone',
'WaitForBattle']),
State.State('DFA', self.enterDFA, self.exitDFA, ['DFAReject', 'teleportOut']),
State.State('DFAReject', self.enterDFAReject, self.exitDFAReject, ['walkteleportOut']),
State.State('died', self.enterDied, self.exitDied, ['teleportOut']),
State.State('FLA', self.enterFLA, self.exitFLA, ['quietZone']),
State.State('quietZone', self.enterQuietZone, self.exitQuietZone, ['teleportIn']),
State.State('elevator', self.enterElevator, self.exitElevator, ['walk']),
State.State('final', self.enterFinal, self.exitFinal, ['start'])], 'start', 'final')
def load(self):
self.parentFSM.getStateNamed('countryClubInterior').addChild(self.fsm)
BattlePlace.BattlePlace.load(self)
musicName = random.choice(['phase_12/audio/bgm/Bossbot_Factory_v1.ogg', 'phase_12/audio/bgm/Bossbot_Factory_v2.ogg', 'phase_12/audio/bgm/Bossbot_Factory_v3.ogg'])
self.music = base.loadMusic(musicName)
def unload(self):
self.parentFSM.getStateNamed('countryClubInterior').removeChild(self.fsm)
del self.music
del self.fsm
del self.parentFSM
BattlePlace.BattlePlace.unload(self)
def enter(self, requestStatus):
self.fsm.enterInitialState()
base.transitions.fadeOut(t=0)
base.localAvatar.inventory.setRespectInvasions(0)
base.cr.forbidCheesyEffects(1)
self._telemLimiter = TLGatherAllAvs('CountryClubInterior', RotationLimitToH)
def commence(self = self):
NametagGlobals.setWant2dNametags(True)
self.fsm.request(requestStatus['how'], [requestStatus])
base.playMusic(self.music, looping=1, volume=0.8)
base.transitions.irisIn()
CountryClub = bboard.get(DistributedCountryClub.DistributedCountryClub.ReadyPost)
self.loader.hood.spawnTitleText(CountryClub.countryClubId, CountryClub.floorNum)
self.CountryClubReadyWatcher = BulletinBoardWatcher.BulletinBoardWatcher('CountryClubReady', DistributedCountryClub.DistributedCountryClub.ReadyPost, commence)
self.CountryClubDefeated = 0
self.acceptOnce(DistributedCountryClub.DistributedCountryClub.WinEvent, self.handleCountryClubWinEvent)
if __debug__ and 0:
self.accept('f10', lambda : messenger.send(DistributedCountryClub.DistributedCountryClub.WinEvent))
self.confrontedBoss = 0
def handleConfrontedBoss(self = self):
self.confrontedBoss = 1
self.acceptOnce('localToonConfrontedCountryClubBoss', handleConfrontedBoss)
def exit(self):
NametagGlobals.setWant2dNametags(False)
bboard.remove(DistributedCountryClub.DistributedCountryClub.ReadyPost)
self._telemLimiter.destroy()
del self._telemLimiter
base.cr.forbidCheesyEffects(0)
base.localAvatar.inventory.setRespectInvasions(1)
self.fsm.requestFinalState()
self.loader.music.stop()
self.music.stop()
self.ignoreAll()
del self.CountryClubReadyWatcher
def enterStopped(self):
BattlePlace.BattlePlace.enterStopped(self)
self.ignore('teleportQuery')
base.localAvatar.setTeleportAvailable(0)
def enterWalk(self, teleportIn = 0):
BattlePlace.BattlePlace.enterWalk(self, teleportIn)
self.ignore('teleportQuery')
base.localAvatar.setTeleportAvailable(0)
def enterPush(self):
BattlePlace.BattlePlace.enterPush(self)
self.ignore('teleportQuery')
base.localAvatar.setTeleportAvailable(0)
def enterWaitForBattle(self):
CountryClubInterior.notify.debug('enterWaitForBattle')
BattlePlace.BattlePlace.enterWaitForBattle(self)
if base.localAvatar.getParent() != render:
base.localAvatar.wrtReparentTo(render)
base.localAvatar.b_setParent(ToontownGlobals.SPRender)
def exitWaitForBattle(self):
CountryClubInterior.notify.debug('exitWaitForBattle')
BattlePlace.BattlePlace.exitWaitForBattle(self)
def enterBattle(self, event):
CountryClubInterior.notify.debug('enterBattle')
self.music.stop()
BattlePlace.BattlePlace.enterBattle(self, event)
self.ignore('teleportQuery')
base.localAvatar.setTeleportAvailable(0)
def enterTownBattle(self, event):
mult = ToontownBattleGlobals.getCountryClubCreditMultiplier(self.zoneId)
base.localAvatar.inventory.setBattleCreditMultiplier(mult)
self.loader.townBattle.enter(event, self.fsm.getStateNamed('battle'), bldg=1, creditMultiplier=mult)
def exitBattle(self):
CountryClubInterior.notify.debug('exitBattle')
BattlePlace.BattlePlace.exitBattle(self)
self.loader.music.stop()
base.playMusic(self.music, looping=1, volume=0.8)
def enterStickerBook(self, page = None):
BattlePlace.BattlePlace.enterStickerBook(self, page)
self.ignore('teleportQuery')
base.localAvatar.setTeleportAvailable(0)
def enterSit(self):
BattlePlace.BattlePlace.enterSit(self)
self.ignore('teleportQuery')
base.localAvatar.setTeleportAvailable(0)
def enterZone(self, zoneId):
pass
def enterTeleportOut(self, requestStatus):
CountryClubInterior.notify.debug('enterTeleportOut()')
BattlePlace.BattlePlace.enterTeleportOut(self, requestStatus, self.__teleportOutDone)
def __processLeaveRequest(self, requestStatus):
hoodId = requestStatus['hoodId']
if hoodId == ToontownGlobals.MyEstate:
self.getEstateZoneAndGoHome(requestStatus)
else:
self.doneStatus = requestStatus
messenger.send(self.doneEvent)
def __teleportOutDone(self, requestStatus):
CountryClubInterior.notify.debug('__teleportOutDone()')
messenger.send('leavingCountryClub')
messenger.send('localToonLeft')
if self.CountryClubDefeated and not self.confrontedBoss:
self.fsm.request('FLA', [requestStatus])
else:
self.__processLeaveRequest(requestStatus)
def exitTeleportOut(self):
CountryClubInterior.notify.debug('exitTeleportOut()')
BattlePlace.BattlePlace.exitTeleportOut(self)
def handleCountryClubWinEvent(self):
CountryClubInterior.notify.debug('handleCountryClubWinEvent')
if base.cr.playGame.getPlace().fsm.getCurrentState().getName() == 'died':
return
self.CountryClubDefeated = 1
if 1:
zoneId = ZoneUtil.getHoodId(self.zoneId)
else:
zoneId = ZoneUtil.getSafeZoneId(base.localAvatar.defaultZone)
self.fsm.request('teleportOut', [{
'loader': ZoneUtil.getLoaderName(zoneId),
'where': ZoneUtil.getToonWhereName(zoneId),
'how': 'teleportIn',
'hoodId': zoneId,
'zoneId': zoneId,
'shardId': None,
'avId': -1,
}])
def enterDied(self, requestStatus, callback = None):
CountryClubInterior.notify.debug('enterDied')
def diedDone(requestStatus, self = self, callback = callback):
if callback is not None:
callback()
messenger.send('leavingCountryClub')
self.doneStatus = requestStatus
messenger.send(self.doneEvent)
return
BattlePlace.BattlePlace.enterDied(self, requestStatus, diedDone)
def enterFLA(self, requestStatus):
CountryClubInterior.notify.debug('enterFLA')
self.flaDialog = TTDialog.TTGlobalDialog(message=TTLocalizer.ForcedLeaveCountryClubAckMsg, doneEvent='FLADone', style=TTDialog.Acknowledge, fadeScreen=1)
def continueExit(self = self, requestStatus = requestStatus):
self.__processLeaveRequest(requestStatus)
self.accept('FLADone', continueExit)
self.flaDialog.show()
def exitFLA(self):
CountryClubInterior.notify.debug('exitFLA')
if hasattr(self, 'flaDialog'):
self.flaDialog.cleanup()
del self.flaDialog
def detectedElevatorCollision(self, distElevator):
self.fsm.request('elevator', [distElevator])
def enterElevator(self, distElevator, skipDFABoard = 0):
self.accept(self.elevatorDoneEvent, self.handleElevatorDone)
self.elevator = Elevator.Elevator(self.fsm.getStateNamed('elevator'), self.elevatorDoneEvent, distElevator)
if skipDFABoard:
self.elevator.skipDFABoard = 1
self.elevator.setReverseBoardingCamera(True)
distElevator.elevatorFSM = self.elevator
self.elevator.load()
self.elevator.enter()
def exitElevator(self):
self.ignore(self.elevatorDoneEvent)
self.elevator.unload()
self.elevator.exit()
def handleElevatorDone(self, doneStatus):
self.notify.debug('handling elevator done event')
where = doneStatus['where']
if where == 'reject':
if hasattr(base.localAvatar, 'elevatorNotifier') and base.localAvatar.elevatorNotifier.isNotifierOpen():
pass
else:
self.fsm.request('walk')
elif where == 'exit':
self.fsm.request('walk')
elif where == 'factoryInterior' or where == 'suitInterior':
self.doneStatus = doneStatus
self.doneEvent = 'lawOfficeFloorDone'
messenger.send(self.doneEvent)
else:
self.notify.error('Unknown mode: ' + where + ' in handleElevatorDone')
|
|
# Lint as: python3
# Copyright 2019 Google LLC
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# https://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Implementations of finite difference methods."""
from autograd import elementwise_grad as egrad
import autograd.numpy as np
import pandas as pd
import sobol_seq
def rosenbrock(x):
"""Rosenbrock function: test function for evaluating algorithms."""
x = np.array(x)
x_curr, x_next = x[..., :-1], x[..., 1:]
terms = 100 * np.square(x_next - np.square(x_curr)) + np.square(1 - x_curr)
return np.sum(terms, axis=-1)
def grid(num, ndim, large=False):
"""Build a uniform grid with num points along each of ndim axes."""
if not large:
_check_not_too_large(np.power(num, ndim) * ndim)
x = np.linspace(0, 1, num, dtype='float64')
w = 1 / (num - 1)
points = np.stack(
np.meshgrid(*[x for _ in range(ndim)], indexing='ij'), axis=-1)
return points, w
def non_uniform_grid(num, ndim, skip=42, large=False):
"""Build a non-uniform grid with num points of ndim dimensions."""
if not large:
_check_not_too_large(num * ndim)
return sobol_seq.i4_sobol_generate(ndim, num, skip=skip)
def autograd(f, ds, points):
"""Evaluate derivatives of f on the given points."""
df_ds = lambda *args: f(np.stack(args, axis=-1))
for i in ds:
df_ds = egrad(df_ds, i)
ndim = points.shape[-1]
return df_ds(*[points[..., i] for i in range(ndim)])
def central(f0, ds, w):
"""Apply central difference method to estimate derivatives."""
f = lambda o: shift(f0, o)
eye = np.eye(f0.ndim, dtype=int)
offsets = [-eye[d] for d in ds]
if not ds:
return f0
elif len(ds) == 1: # First order derivatives.
i = offsets[0]
return (f(i) - f(-i)) / (2 * w)
elif len(ds) == 2: # Second order derivatives.
i, j = offsets
w2 = np.square(w)
if ds[0] == ds[1]: # d^2/dxdx
return (f(i) - 2 * f0 + f(-i)) / w2
else: # d^2/dxdy
return (f(i + j) - f(i - j) - f(j - i) + f(-i - j)) / (4 * w2)
else:
raise NotImplementedError(ds)
def triangular(n):
"""Compute the n-th triangular number."""
return np.floor_divide(n * (n + 1), 2)
def derivative_names(ndim):
"""Iterate over derivative speficiations and their names."""
# Note: len(list(derivative_names(ndim)) == triangular(ndim + 1).
yield (), 'f' # Function value.
for i in range(ndim):
yield (i,), 'df/d%i' % i # First derivative along an axis.
for i in range(ndim):
yield (i, i), 'd^2f/d%i^2' % i # Second derivative along an axis.
for i, j in zip(*np.triu_indices(ndim, k=1)):
# Second derivarive along mixed axes.
yield (int(i), int(j)), 'd^2f/(d%i d%i)' % (i, j)
def taylor_approx(target, stencil, values):
"""Use taylor series to approximate up to second order derivatives.
Args:
target: An array of shape (..., n), a batch of n-dimensional points
where one wants to approximate function value and derivatives.
stencil: An array of shape broadcastable to (..., k, n), for each target
point a set of k = triangle(n + 1) points to use on its approximation.
values: An array of shape broadcastable to (..., k), the function value at
each of the stencil points.
Returns:
An array of shape (..., k), for each target point the approximated
function value, gradient and hessian evaluated at that point (flattened
and in the same order as returned by derivative_names).
"""
# Broadcast arrays to their required shape.
batch_shape, ndim = target.shape[:-1], target.shape[-1]
stencil = np.broadcast_to(stencil, batch_shape + (triangular(ndim + 1), ndim))
values = np.broadcast_to(values, stencil.shape[:-1])
# Subtract target from each stencil point.
delta_x = stencil - np.expand_dims(target, axis=-2)
delta_xy = np.matmul(
np.expand_dims(delta_x, axis=-1), np.expand_dims(delta_x, axis=-2))
i = np.arange(ndim)
j, k = np.triu_indices(ndim, k=1)
# Build coefficients for the Taylor series equations, namely:
# f(stencil) = coeffs @ [f(target), df/d0(target), ...]
coeffs = np.concatenate([
np.ones(delta_x.shape[:-1] + (1,)), # f(target)
delta_x, # df/di(target)
delta_xy[..., i, i] / 2, # d^2f/di^2(target)
delta_xy[..., j, k], # d^2f/{dj dk}(target)
], axis=-1)
# Then: [f(target), df/d0(target), ...] = coeffs^{-1} @ f(stencil)
return np.squeeze(
np.matmul(np.linalg.inv(coeffs), values[..., np.newaxis]), axis=-1)
def non_uniform_approx_nearest(points, values):
"""Approximate derivatives using nearest points in non-uniform grid."""
ndim = points.shape[-1]
k = triangular(ndim + 1)
diffs = np.expand_dims(points, axis=0) - np.expand_dims(points, axis=1)
norms = np.linalg.norm(diffs, axis=-1)
nearest_k = np.argpartition(norms, k)[..., :k]
return taylor_approx(points, points[nearest_k], values[nearest_k])
def central_errors(f, num, ndim, label=None):
"""Build DataFrame of approximation errors with central differences method."""
points, w = grid(num, ndim)
values = f(points)
def name_errors():
for ds, name in derivative_names(ndim):
actual = autograd(f, ds, points)
approx = central(values, ds, w)
yield name, np.abs(actual - approx)
return _build_errors_df(name_errors(), label)
def non_uniform_errors(f, num, ndim, label=None):
"""Build DataFrame of approximation errors with non uniform grid."""
points = non_uniform_grid(np.power(num, ndim), ndim)
values = f(points)
approx_all = non_uniform_approx_nearest(points, values)
def name_errors():
for (ds, name), approx in zip(derivative_names(ndim), approx_all.T):
actual = autograd(f, ds, points)
yield name, np.abs(actual - approx)
return _build_errors_df(name_errors(), label)
def _build_errors_df(name_errors, label):
"""Helper to build errors DataFrame."""
series = []
percentiles = np.linspace(0, 100, 21)
index = percentiles / 100
for name, errors in name_errors:
series.append(pd.Series(
np.nanpercentile(errors, q=percentiles), index=index, name=name))
df = pd.concat(series, axis=1)
df.columns.name = 'derivative'
df.index.name = 'quantile'
df = df.stack().rename('error').reset_index()
with np.errstate(divide='ignore'):
df['log(error)'] = np.log(df['error'])
if label is not None:
df['label'] = label
return df
def shift(x, offsets):
"""Similar to np.roll, but fills with nan instead of rolling values over.
Also shifts along multiple axes at the same time.
Args:
x: The input array to shift.
offsets: How much to shift each axis, offsets[i] is the offset for the i-th
axis.
Returns:
An array with same shape as the input, with specified shifts applied.
"""
def to_slice(offset):
return slice(offset, None) if offset >= 0 else slice(None, offset)
out = np.empty_like(x)
out.fill(np.nan)
ind_src = tuple(to_slice(-o) for o in offsets)
ind_dst = tuple(to_slice(o) for o in offsets)
out[ind_dst] = x[ind_src]
return out
def _check_not_too_large(num_values):
if num_values > 10e6:
raise ValueError('Attempting to create an array with more than 10M values')
|
|
from __future__ import print_function, division
import matplotlib
import logging
from sys import stdout
matplotlib.use('Agg') # Must be before importing matplotlib.pyplot or pylab!
from neuralnilm import (Net, RealApplianceSource,
BLSTMLayer, DimshuffleLayer,
BidirectionalRecurrentLayer)
from neuralnilm.source import standardise, discretize, fdiff, power_and_fdiff
from neuralnilm.experiment import run_experiment, init_experiment
from neuralnilm.net import TrainingError
from neuralnilm.layers import MixtureDensityLayer
from neuralnilm.objectives import (scaled_cost, mdn_nll,
scaled_cost_ignore_inactive, ignore_inactive,
scaled_cost3)
from neuralnilm.plot import MDNPlotter, CentralOutputPlotter
from lasagne.nonlinearities import sigmoid, rectify, tanh
from lasagne.objectives import mse, binary_crossentropy
from lasagne.init import Uniform, Normal
from lasagne.layers import (LSTMLayer, DenseLayer, Conv1DLayer,
ReshapeLayer, FeaturePoolLayer, RecurrentLayer)
from lasagne.updates import nesterov_momentum, momentum
from functools import partial
import os
import __main__
from copy import deepcopy
from math import sqrt
import numpy as np
import theano.tensor as T
NAME = os.path.splitext(os.path.split(__main__.__file__)[1])[0]
#PATH = "/homes/dk3810/workspace/python/neuralnilm/figures"
PATH = "/data/dk3810/figures"
SAVE_PLOT_INTERVAL = 5000
GRADIENT_STEPS = 100
source_dict = dict(
filename='/data/dk3810/ukdale.h5',
appliances=[
['fridge freezer', 'fridge', 'freezer'],
'hair straighteners',
'television',
'dish washer',
['washer dryer', 'washing machine']
],
on_power_thresholds=[5] * 5,
min_on_durations=[60, 60, 60, 1800, 1800],
min_off_durations=[12, 12, 12, 1800, 600],
window=("2013-06-01", "2014-07-01"),
seq_length=512,
# random_window=64,
output_one_appliance=False,
boolean_targets=False,
train_buildings=[1],
validation_buildings=[1],
skip_probability=0.8,
one_target_per_seq=False,
n_seq_per_batch=16,
# subsample_target=2,
include_diff=True,
include_power=False,
clip_appliance_power=True,
target_is_prediction=False,
# independently_center_inputs = True,
standardise_input=True,
# unit_variance_targets=True,
# input_padding=8,
lag=0,
classification=True
# reshape_target_to_2D=True
# input_stats={'mean': np.array([ 0.05526326], dtype=np.float32),
# 'std': np.array([ 0.12636775], dtype=np.float32)},
# target_stats={
# 'mean': np.array([ 0.04066789, 0.01881946,
# 0.24639061, 0.17608672, 0.10273963],
# dtype=np.float32),
# 'std': np.array([ 0.11449792, 0.07338708,
# 0.26608968, 0.33463112, 0.21250485],
# dtype=np.float32)}
)
N = 50
net_dict = dict(
save_plot_interval=SAVE_PLOT_INTERVAL,
# loss_function=partial(ignore_inactive, loss_func=mdn_nll, seq_length=SEQ_LENGTH),
# loss_function=lambda x, t: mdn_nll(x, t).mean(),
# loss_function=lambda x, t: mse(x, t).mean(),
loss_function=lambda x, t: binary_crossentropy(x, t).mean(),
# loss_function=partial(scaled_cost, loss_func=mse),
# loss_function=ignore_inactive,
# loss_function=partial(scaled_cost3, ignore_inactive=False),
updates_func=momentum,
learning_rate=1e-4,
learning_rate_changes_by_iteration={
#10000: 1e-5,
# 400: 1e-3,
# 800: 1e-4
# 500: 1e-3
# 4000: 1e-03,
# 6000: 5e-06,
# 7000: 1e-06
# 2000: 5e-06
# 3000: 1e-05
# 7000: 5e-06,
# 10000: 1e-06,
# 15000: 5e-07,
# 50000: 1e-07
},
do_save_activations=True,
auto_reshape=False,
plotter=CentralOutputPlotter
# plotter=MDNPlotter
)
"""
||||||||||
||||||||||
||||||||||
||||||||||
||||||||||
||||||||||
12345678901234567890
"""
def exp_a(name):
global source
source_dict_copy = deepcopy(source_dict)
source = RealApplianceSource(**source_dict_copy)
net_dict_copy = deepcopy(net_dict)
net_dict_copy.update(dict(
experiment_name=name,
source=source
))
N = 512 * 8
output_shape = source.output_shape_after_processing()
net_dict_copy['layers_config'] = [
{
'type': DenseLayer,
'num_units': N * 2,
'nonlinearity': rectify
},
{
'type': DenseLayer,
'num_units': N,
'nonlinearity': rectify
},
{
'type': DenseLayer,
'num_units': N // 2,
'nonlinearity': rectify
},
{
'type': DenseLayer,
'num_units': N // 4,
'nonlinearity': rectify
},
{
'type': DenseLayer,
'num_units': output_shape[1] * output_shape[2],
'nonlinearity': sigmoid
}
]
net = Net(**net_dict_copy)
net.load_params(120000)
return net
def exp_b(name):
global source
source_dict_copy = deepcopy(source_dict)
source_dict_copy['random_window'] = 128
source = RealApplianceSource(**source_dict_copy)
net_dict_copy = deepcopy(net_dict)
net_dict_copy.update(dict(
experiment_name=name,
source=source
))
N = 512 * 8
output_shape = source.output_shape_after_processing()
net_dict_copy['layers_config'] = [
{
'type': DenseLayer,
'num_units': N * 2,
'nonlinearity': rectify
},
{
'type': DenseLayer,
'num_units': N,
'nonlinearity': rectify
},
{
'type': DenseLayer,
'num_units': N // 2,
'nonlinearity': rectify
},
{
'type': DenseLayer,
'num_units': N // 4,
'nonlinearity': rectify
},
{
'type': DenseLayer,
'num_units': output_shape[1] * output_shape[2],
'nonlinearity': sigmoid
}
]
net = Net(**net_dict_copy)
return net
def exp_c(name):
global source
source_dict_copy = deepcopy(source_dict)
source_dict_copy['random_window'] = 256
source = RealApplianceSource(**source_dict_copy)
net_dict_copy = deepcopy(net_dict)
net_dict_copy.update(dict(
experiment_name=name,
source=source,
learning_rate=1e-5
))
N = 512 * 8
output_shape = source.output_shape_after_processing()
net_dict_copy['layers_config'] = [
{
'type': DenseLayer,
'num_units': N * 2,
'nonlinearity': rectify
},
{
'type': DenseLayer,
'num_units': N,
'nonlinearity': rectify
},
{
'type': DenseLayer,
'num_units': N // 2,
'nonlinearity': rectify
},
{
'type': DenseLayer,
'num_units': N // 4,
'nonlinearity': rectify
},
{
'type': DenseLayer,
'num_units': output_shape[1] * output_shape[2],
'nonlinearity': sigmoid
}
]
net = Net(**net_dict_copy)
net.load_params(30000)
return net
def exp_d(name):
global source
source_dict_copy = deepcopy(source_dict)
source_dict_copy['random_window'] = 512
source = RealApplianceSource(**source_dict_copy)
net_dict_copy = deepcopy(net_dict)
net_dict_copy.update(dict(
experiment_name=name,
source=source
))
N = 512 * 8
output_shape = source.output_shape_after_processing()
net_dict_copy['layers_config'] = [
{
'type': DenseLayer,
'num_units': N * 2,
'nonlinearity': rectify
},
{
'type': DenseLayer,
'num_units': N,
'nonlinearity': rectify
},
{
'type': DenseLayer,
'num_units': N // 2,
'nonlinearity': rectify
},
{
'type': DenseLayer,
'num_units': N // 4,
'nonlinearity': rectify
},
{
'type': DenseLayer,
'num_units': output_shape[1] * output_shape[2],
'nonlinearity': sigmoid
}
]
net = Net(**net_dict_copy)
return net
def exp_e(name):
global source
source_dict_copy = deepcopy(source_dict)
source_dict_copy['random_window'] = 0
source = RealApplianceSource(**source_dict_copy)
net_dict_copy = deepcopy(net_dict)
net_dict_copy.update(dict(
experiment_name=name,
source=source
))
N = 512 * 8
output_shape = source.output_shape_after_processing()
net_dict_copy['layers_config'] = [
{
'type': DenseLayer,
'num_units': N * 2,
'nonlinearity': rectify
},
{
'type': DenseLayer,
'num_units': N,
'nonlinearity': rectify
},
{
'type': DenseLayer,
'num_units': N // 2,
'nonlinearity': rectify
},
{
'type': DenseLayer,
'num_units': N // 4,
'nonlinearity': rectify
},
{
'type': DenseLayer,
'num_units': output_shape[1] * output_shape[2],
'nonlinearity': sigmoid
}
]
net = Net(**net_dict_copy)
return net
def main():
# EXPERIMENTS = list('abcdefghijklmnopqrstuvwxyz')
EXPERIMENTS = list('a')
for experiment in EXPERIMENTS:
full_exp_name = NAME + experiment
func_call = init_experiment(PATH, experiment, full_exp_name)
logger = logging.getLogger(full_exp_name)
try:
net = eval(func_call)
run_experiment(net, epochs=100000)
except KeyboardInterrupt:
logger.info("KeyboardInterrupt")
break
except Exception as exception:
logger.exception("Exception")
# raise
finally:
logging.shutdown()
if __name__ == "__main__":
main()
|
|
#
# Contents.py -- Table of Contents plugin for fits viewer
#
# This is open-source software licensed under a BSD license.
# Please see the file LICENSE.txt for details.
#
from ginga.util.six import itervalues
from ginga.util.six.moves import map
from ginga import GingaPlugin
from ginga.misc import Bunch
from ginga.gw import Widgets
import time
class Contents(GingaPlugin.GlobalPlugin):
def __init__(self, fv):
# superclass defines some variables for us, like logger
super(Contents, self).__init__(fv)
columns = [ ('Name', 'NAME'), ('Object', 'OBJECT'),
('Date', 'DATE-OBS'), ('Time UT', 'UT'),
('Modified', 'MODIFIED')
]
prefs = self.fv.get_preferences()
self.settings = prefs.createCategory('plugin_Contents')
self.settings.addDefaults(columns=columns,
always_expand=True,
highlight_tracks_keyboard_focus=True,
color_alternate_rows=True,
row_font_color='green',
max_rows_for_col_resize=100)
self.settings.load(onError='silent')
# For table-of-contents pane
self.name_dict = Bunch.caselessDict()
# TODO: this ought to be customizable by channel
self.columns = self.settings.get('columns', columns)
self.treeview = None
# paths of highlighted entries, by channel
self.highlight_tracks_keyboard_focus = self.settings.get(
'highlight_tracks_keyboard_focus', True)
self._hl_path = set([])
fv.add_callback('add-image', self.add_image_cb)
fv.add_callback('add-image-info', self.add_image_info_cb)
fv.add_callback('remove-image', self.remove_image_cb)
fv.add_callback('add-channel', self.add_channel_cb)
fv.add_callback('delete-channel', self.delete_channel_cb)
fv.add_callback('channel-change', self.focus_cb)
self.gui_up = False
def build_gui(self, container):
# create the Treeview
always_expand = self.settings.get('always_expand', False)
color_alternate = self.settings.get('color_alternate_rows', True)
treeview = Widgets.TreeView(auto_expand=always_expand,
sortable=True,
use_alt_row_color=color_alternate)
self.treeview = treeview
treeview.setup_table(self.columns, 2, 'NAME')
treeview.add_callback('selected', self.switch_image)
container.add_widget(treeview, stretch=1)
self.gui_up = True
def stop(self):
self.gui_up = False
def switch_image(self, widget, res_dict):
if len(res_dict) == 0:
return
chname = list(res_dict.keys())[0]
img_dict = res_dict[chname]
if len(img_dict) == 0:
return
imname = list(img_dict.keys())[0]
bnch = img_dict[imname]
path = bnch.path
self.logger.debug("chname=%s name=%s path=%s" % (
chname, imname, path))
self.fv.switch_name(chname, imname, path=path,
image_future=bnch.image_future)
def get_info(self, chname, name, image, info):
path = info.get('path', None)
future = info.get('image_future', None)
bnch = Bunch.Bunch(CHNAME=chname, imname=name, path=path,
image_future=future)
# Get header keywords of interest
if image is not None:
header = image.get_header()
else:
header = {}
for hdr, key in self.columns:
bnch[key] = str(header.get(key, 'N/A'))
# name should always be available
bnch.NAME = name
# Modified timestamp will be set if image data is modified
timestamp = info.time_modified
if timestamp is not None:
# Z: Zulu time, GMT, UTC
timestamp = timestamp.strftime('%Y-%m-%d %H:%M:%SZ')
bnch.MODIFIED = timestamp
return bnch
def recreate_toc(self):
self.logger.debug("Recreating table of contents...")
self.treeview.set_tree(self.name_dict)
# re-highlight as necessary
if self.highlight_tracks_keyboard_focus:
new_highlight = self._hl_path
else:
new_highlight = set([])
for chname in self.name_dict:
channel = self.fv.get_channel_info(chname)
new_highlight |= channel.extdata.contents_old_highlight
self.update_highlights(set([]), new_highlight)
# Resize column widths
n_rows = sum(map(len, self.name_dict.values()))
if n_rows < self.settings.get('max_rows_for_col_resize', 100):
self.treeview.set_optimal_column_widths()
self.logger.debug("Resized columns for {0} row(s)".format(n_rows))
def is_in_contents(self, chname, imname):
if not chname in self.name_dict:
return False
file_dict = self.name_dict[chname]
if not imname in file_dict:
return False
return True
def add_image_cb(self, viewer, chname, image, image_info):
if not self.gui_up:
return False
name = image_info.name
self.logger.debug("name=%s" % (name))
if image is not None:
nothumb = image.get('nothumb', False)
if nothumb:
return
bnch = self.get_info(chname, name, image, image_info)
if not chname in self.name_dict:
# channel does not exist yet in contents
# Note: this typically shouldn't happen, because add_channel_cb()
# will have added an empty dict
file_dict = {}
self.name_dict[chname] = file_dict
else:
file_dict = self.name_dict[chname]
if not name in file_dict:
# new image
file_dict[name] = bnch
else:
# old image
file_dict[name].update(bnch)
# TODO: either make add_tree() merge updates or make an
# update_tree() method--shouldn't need to recreate entire
# tree, just add new entry and possibly rehighlight
## tree_dict = { chname: { name: bnch } }
## self.treeview.add_tree(tree_dict)
self.recreate_toc()
self.logger.debug("%s added to Contents" % (name))
def add_image_info_cb(self, viewer, channel, image_info):
"""Almost the same as add_image_info(), except that the image
may not be loaded in memory.
"""
chname = channel.name
name = image_info.name
self.logger.debug("name=%s" % (name))
# Updates of any extant information
try:
image = channel.get_loaded_image(name)
except KeyError:
# images that are not yet loaded will show "N/A" for keywords
image = None
self.add_image_cb(viewer, chname, image, image_info)
def remove_image_cb(self, viewer, chname, name, path):
if not self.gui_up:
return False
if chname not in self.name_dict:
return
file_dict = self.name_dict[chname]
if name not in file_dict:
return
del file_dict[name]
# Unhighlight
channel = self.fv.get_channel_info(chname)
key = (chname, name)
self._hl_path.discard(key)
channel.extdata.contents_old_highlight.discard(key)
self.recreate_toc()
self.logger.debug("%s removed from Contents" % (name))
def clear(self):
self.name_dict = Bunch.caselessDict()
self._hl_path = set([])
self.recreate_toc()
def add_channel_cb(self, viewer, channel):
"""Called when a channel is added from the main interface.
Parameter is a channel (a Channel object)."""
chname = channel.name
# add old highlight set to channel external data
channel.extdata.setdefault('contents_old_highlight', set([]))
# Add the channel to the treeview
file_dict = {}
self.name_dict.setdefault(chname, file_dict)
if not self.gui_up:
return False
tree_dict = { chname: { } }
self.treeview.add_tree(tree_dict)
def delete_channel_cb(self, viewer, channel):
"""Called when a channel is deleted from the main interface.
Parameter is a channel (a Channel object)."""
chname = channel.name
del self.name_dict[chname]
# Unhighlight
un_hilite_set = set([])
for path in self._hl_path:
if path[0] == chname:
un_hilite_set.add(path)
self._hl_path -= un_hilite_set
if not self.gui_up:
return False
self.recreate_toc()
def _get_hl_key(self, chname, image):
return (chname, image.get('name', 'none'))
def _highlight_path(self, hl_path, tf):
"""Highlight or unhighlight a single entry.
Examples
--------
>>> hl_path = self._get_hl_key(chname, image)
>>> self._highlight_path(hl_path, True)
"""
fc = self.settings.get('row_font_color', 'green')
try:
self.treeview.highlight_path(hl_path, tf, font_color=fc)
except Exception as e:
self.logger.error('Error changing highlight on treeview path '
'({0}): {1}'.format(hl_path, str(e)))
def update_highlights(self, old_highlight_set, new_highlight_set):
"""Unhighlight the entries represented by ``old_highlight_set``
and highlight the ones represented by ``new_highlight_set``.
Both are sets of keys.
"""
un_hilite_set = old_highlight_set - new_highlight_set
re_hilite_set = new_highlight_set - old_highlight_set
# unhighlight entries that should NOT be highlighted any more
for key in un_hilite_set:
self._highlight_path(key, False)
# highlight new entries that should be
for key in re_hilite_set:
self._highlight_path(key, True)
def redo(self, channel, image):
"""This method is called when an image is set in a channel."""
imname = image.get('name', 'none')
chname = channel.name
# is image in contents tree yet?
in_contents = self.is_in_contents(chname, imname)
# get old highlighted entries for this channel -- will be
# an empty set or one key
old_highlight = channel.extdata.contents_old_highlight
# calculate new highlight keys -- again, an empty set or one key
if image is not None:
key = self._get_hl_key(chname, image)
new_highlight = set([key])
else:
# no image has the focus
new_highlight = set([])
# Only highlights active image in the current channel
if self.highlight_tracks_keyboard_focus:
if in_contents:
self.update_highlights(self._hl_path, new_highlight)
self._hl_path = new_highlight
# Highlight all active images in all channels
else:
if in_contents:
self.update_highlights(old_highlight, new_highlight)
channel.extdata.contents_old_highlight = new_highlight
return True
def focus_cb(self, viewer, channel):
chname = channel.name
image = channel.get_current_image()
if image is not None:
key = self._get_hl_key(chname, image)
new_highlight = set([key])
else:
# no image has the focus
new_highlight = set([])
if self.highlight_tracks_keyboard_focus:
self.update_highlights(self._hl_path, new_highlight)
self._hl_path = new_highlight
def __str__(self):
return 'contents'
#END
|
|
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
import hashlib
from oslo.config import cfg
import requests
import webob
from heat.api.aws import exception
from heat.api.aws.exception import HeatAPIException
from heat.common import wsgi
from heat.openstack.common import gettextutils
from heat.openstack.common.gettextutils import _
from heat.openstack.common import importutils
from heat.openstack.common import jsonutils as json
from heat.openstack.common import log as logging
gettextutils.install('heat')
logger = logging.getLogger(__name__)
opts = [
cfg.StrOpt('auth_uri',
default=None,
help=_("Authentication Endpoint URI.")),
cfg.BoolOpt('multi_cloud',
default=False,
help=_('Allow orchestration of multiple clouds.')),
cfg.ListOpt('allowed_auth_uris',
default=[],
help=_('Allowed keystone endpoints for auth_uri when '
'multi_cloud is enabled. At least one endpoint needs '
'to be specified.'))
]
cfg.CONF.register_opts(opts, group='ec2authtoken')
class EC2Token(wsgi.Middleware):
"""Authenticate an EC2 request with keystone and convert to token."""
def __init__(self, app, conf):
self.conf = conf
self.application = app
def _conf_get(self, name):
# try config from paste-deploy first
if name in self.conf:
return self.conf[name]
else:
return cfg.CONF.ec2authtoken[name]
def _conf_get_auth_uri(self):
auth_uri = self._conf_get('auth_uri')
if auth_uri:
return auth_uri
else:
# Import auth_token to have keystone_authtoken settings setup.
# We can use the auth_uri from the keystone_authtoken section
importutils.import_module('keystoneclient.middleware.auth_token')
return cfg.CONF.keystone_authtoken['auth_uri']
@staticmethod
def _conf_get_keystone_ec2_uri(auth_uri):
if auth_uri.endswith('/'):
return '%sec2tokens' % auth_uri
return '%s/ec2tokens' % auth_uri
def _get_signature(self, req):
"""
Extract the signature from the request, this can be a get/post
variable or for v4 also in a header called 'Authorization'
- params['Signature'] == version 0,1,2,3
- params['X-Amz-Signature'] == version 4
- header 'Authorization' == version 4
"""
sig = req.params.get('Signature') or req.params.get('X-Amz-Signature')
if sig is None and 'Authorization' in req.headers:
auth_str = req.headers['Authorization']
sig = auth_str.partition("Signature=")[2].split(',')[0]
return sig
def _get_access(self, req):
"""
Extract the access key identifier, for v 0/1/2/3 this is passed
as the AccessKeyId parameter, for version4 it is either and
X-Amz-Credential parameter or a Credential= field in the
'Authorization' header string
"""
access = req.params.get('AWSAccessKeyId')
if access is None:
cred_param = req.params.get('X-Amz-Credential')
if cred_param:
access = cred_param.split("/")[0]
if access is None and 'Authorization' in req.headers:
auth_str = req.headers['Authorization']
cred_str = auth_str.partition("Credential=")[2].split(',')[0]
access = cred_str.split("/")[0]
return access
@webob.dec.wsgify(RequestClass=wsgi.Request)
def __call__(self, req):
if not self._conf_get('multi_cloud'):
return self._authorize(req, self._conf_get_auth_uri())
else:
# attempt to authorize for each configured allowed_auth_uris
# until one is successful.
# This is safe for the following reasons:
# 1. AWSAccessKeyId is a randomly generated sequence
# 2. No secret is transferred to validate a request
last_failure = None
for auth_uri in self._conf_get('allowed_auth_uris'):
try:
logger.debug(_("Attempt authorize on %s") % auth_uri)
return self._authorize(req, auth_uri)
except HeatAPIException as e:
logger.debug(_("Authorize failed: %s") % e.__class__)
last_failure = e
raise last_failure or exception.HeatAccessDeniedError()
def _authorize(self, req, auth_uri):
# Read request signature and access id.
# If we find X-Auth-User in the headers we ignore a key error
# here so that we can use both authentication methods.
# Returning here just means the user didn't supply AWS
# authentication and we'll let the app try native keystone next.
logger.info(_("Checking AWS credentials.."))
signature = self._get_signature(req)
if not signature:
if 'X-Auth-User' in req.headers:
return self.application
else:
logger.info(_("No AWS Signature found."))
raise exception.HeatIncompleteSignatureError()
access = self._get_access(req)
if not access:
if 'X-Auth-User' in req.headers:
return self.application
else:
logger.info(_("No AWSAccessKeyId/Authorization Credential"))
raise exception.HeatMissingAuthenticationTokenError()
logger.info(_("AWS credentials found, checking against keystone."))
if not auth_uri:
logger.error(_("Ec2Token authorization failed, no auth_uri "
"specified in config file"))
raise exception.HeatInternalFailureError(_('Service '
'misconfigured'))
# Make a copy of args for authentication and signature verification.
auth_params = dict(req.params)
# 'Signature' param Not part of authentication args
auth_params.pop('Signature', None)
# Authenticate the request.
# AWS v4 authentication requires a hash of the body
body_hash = hashlib.sha256(req.body).hexdigest()
creds = {'ec2Credentials': {'access': access,
'signature': signature,
'host': req.host,
'verb': req.method,
'path': req.path,
'params': auth_params,
'headers': req.headers,
'body_hash': body_hash
}}
creds_json = json.dumps(creds)
headers = {'Content-Type': 'application/json'}
keystone_ec2_uri = self._conf_get_keystone_ec2_uri(auth_uri)
logger.info(_('Authenticating with %s') % keystone_ec2_uri)
response = requests.post(keystone_ec2_uri, data=creds_json,
headers=headers)
result = response.json()
try:
token_id = result['access']['token']['id']
tenant = result['access']['token']['tenant']['name']
tenant_id = result['access']['token']['tenant']['id']
logger.info(_("AWS authentication successful."))
except (AttributeError, KeyError):
logger.info(_("AWS authentication failure."))
# Try to extract the reason for failure so we can return the
# appropriate AWS error via raising an exception
try:
reason = result['error']['message']
except KeyError:
reason = None
if reason == "EC2 access key not found.":
raise exception.HeatInvalidClientTokenIdError()
elif reason == "EC2 signature not supplied.":
raise exception.HeatSignatureError()
else:
raise exception.HeatAccessDeniedError()
# Authenticated!
ec2_creds = {'ec2Credentials': {'access': access,
'signature': signature}}
req.headers['X-Auth-EC2-Creds'] = json.dumps(ec2_creds)
req.headers['X-Auth-Token'] = token_id
req.headers['X-Tenant-Name'] = tenant
req.headers['X-Tenant-Id'] = tenant_id
req.headers['X-Auth-URL'] = auth_uri
metadata = result['access'].get('metadata', {})
roles = metadata.get('roles', [])
req.headers['X-Roles'] = ','.join(roles)
return self.application
def EC2Token_filter_factory(global_conf, **local_conf):
"""
Factory method for paste.deploy
"""
conf = global_conf.copy()
conf.update(local_conf)
def filter(app):
return EC2Token(app, conf)
return filter
|
|
from __future__ import print_function
from collections import namedtuple, defaultdict
import copy
import os
import sys
from itertools import permutations, takewhile
import numpy as np
from llvmlite import ir as llvmir
import llvmlite.llvmpy.core as lc
from llvmlite.llvmpy.core import Type, Constant, LLVMException
import llvmlite.binding as ll
from numba import types, utils, cgutils, typing
from numba import _dynfunc, _helperlib
from numba.pythonapi import PythonAPI
from . import arrayobj, builtins, imputils
from .imputils import (user_function, user_generator,
builtin_registry, impl_ret_borrowed,
RegistryLoader)
from numba import datamodel
GENERIC_POINTER = Type.pointer(Type.int(8))
PYOBJECT = GENERIC_POINTER
void_ptr = GENERIC_POINTER
class OverloadSelector(object):
"""
An object matching an actual signature against a registry of formal
signatures and choosing the best candidate, if any.
In the current implementation:
- a "signature" is a tuple of type classes or type instances
- the "best candidate" is the most specific match
"""
def __init__(self):
# A list of (formal args tuple, value)
self.versions = []
self._cache = {}
def find(self, sig):
out = self._cache.get(sig)
if out is None:
out = self._find(sig)
self._cache[sig] = out
return out
def _find(self, sig):
candidates = self._select_compatible(sig)
if candidates:
return candidates[self._best_signature(candidates)]
else:
raise NotImplementedError(self, sig)
def _select_compatible(self, sig):
"""
Select all compatible signatures and their implementation.
"""
out = {}
for ver_sig, impl in self.versions:
if self._match_arglist(ver_sig, sig):
out[ver_sig] = impl
return out
def _best_signature(self, candidates):
"""
Returns the best signature out of the candidates
"""
ordered, genericity = self._sort_signatures(candidates)
# check for ambiguous signatures
if len(ordered) > 1:
firstscore = genericity[ordered[0]]
same = list(takewhile(lambda x: genericity[x] == firstscore,
ordered))
if len(same) > 1:
msg = ["{n} ambiguous signatures".format(n=len(same))]
for sig in same:
msg += ["{0} => {1}".format(sig, candidates[sig])]
raise TypeError('\n'.join(msg))
return ordered[0]
def _sort_signatures(self, candidates):
"""
Sort signatures in ascending level of genericity.
Returns a 2-tuple:
* ordered list of signatures
* dictionary containing genericity scores
"""
# score by genericity
genericity = defaultdict(int)
for this, other in permutations(candidates.keys(), r=2):
matched = self._match_arglist(formal_args=this, actual_args=other)
if matched:
# genericity score +1 for every another compatible signature
genericity[this] += 1
# order candidates in ascending level of genericity
ordered = sorted(candidates.keys(), key=lambda x: genericity[x])
return ordered, genericity
def _match_arglist(self, formal_args, actual_args):
"""
Returns True if the the signature is "matching".
A formal signature is "matching" if the actual signature matches exactly
or if the formal signature is a compatible generic signature.
"""
# normalize VarArg
if formal_args and isinstance(formal_args[-1], types.VarArg):
ndiff = len(actual_args) - len(formal_args) + 1
formal_args = formal_args[:-1] + (formal_args[-1].dtype,) * ndiff
if len(formal_args) != len(actual_args):
return False
for formal, actual in zip(formal_args, actual_args):
if not self._match(formal, actual):
return False
return True
def _match(self, formal, actual):
if formal == actual:
# formal argument matches actual arguments
return True
elif types.Any == formal:
# formal argument is any
return True
elif isinstance(formal, type) and issubclass(formal, types.Type):
if isinstance(actual, type) and issubclass(actual, formal):
# formal arg is a type class and actual arg is a subclass
return True
elif isinstance(actual, formal):
# formal arg is a type class of which actual arg is an instance
return True
def append(self, value, sig):
"""
Add a formal signature and its associated value.
"""
assert isinstance(sig, tuple), (value, sig)
self.versions.append((sig, value))
self._cache.clear()
@utils.runonce
def _load_global_helpers():
"""
Execute once to install special symbols into the LLVM symbol table.
"""
# This is Py_None's real C name
ll.add_symbol("_Py_NoneStruct", id(None))
# Add Numba C helper functions
for c_helpers in (_helperlib.c_helpers, _dynfunc.c_helpers):
for py_name, c_address in c_helpers.items():
c_name = "numba_" + py_name
ll.add_symbol(c_name, c_address)
# Add Numpy C helpers (npy_XXX)
for c_name, c_address in _helperlib.npymath_exports.items():
ll.add_symbol(c_name, c_address)
# Add all built-in exception classes
for obj in utils.builtins.__dict__.values():
if isinstance(obj, type) and issubclass(obj, BaseException):
ll.add_symbol("PyExc_%s" % (obj.__name__), id(obj))
class BaseContext(object):
"""
Notes on Structure
------------------
Most objects are lowered as plain-old-data structure in the generated
llvm. They are passed around by reference (a pointer to the structure).
Only POD structure can life across function boundaries by copying the
data.
"""
# True if the target requires strict alignment
# Causes exception to be raised if the record members are not aligned.
strict_alignment = False
# Use default mangler (no specific requirement)
mangler = None
# Force powi implementation as math.pow call
implement_powi_as_math_call = False
implement_pow_as_math_call = False
# Bound checking
enable_boundcheck = False
# NRT
enable_nrt = False
# PYCC
aot_mode = False
# Error model for various operations (only FP exceptions currently)
error_model = None
def __init__(self, typing_context):
_load_global_helpers()
self.address_size = utils.MACHINE_BITS
self.typing_context = typing_context
# A mapping of installed registries to their loaders
self._registries = {}
# Declarations loaded from registries and other sources
self._defns = defaultdict(OverloadSelector)
self._getattrs = defaultdict(OverloadSelector)
self._setattrs = defaultdict(OverloadSelector)
self._casts = OverloadSelector()
self._get_constants = OverloadSelector()
# Other declarations
self._generators = {}
self.special_ops = {}
self.cached_internal_func = {}
self._pid = None
self.data_model_manager = datamodel.default_manager
# Initialize
self.init()
def init(self):
"""
For subclasses to add initializer
"""
def refresh(self):
"""
Refresh context with new declarations from known registries.
Useful for third-party extensions.
"""
# Populate built-in registry
from . import (arraymath, enumimpl, iterators, linalg, numbers,
optional, rangeobj, slicing, smartarray, tupleobj)
try:
from . import npdatetime
except NotImplementedError:
pass
self.install_registry(builtin_registry)
self.load_additional_registries()
# Also refresh typing context, since @overload declarations can
# affect it.
self.typing_context.refresh()
def load_additional_registries(self):
"""
Load target-specific registries. Can be overriden by subclasses.
"""
def get_arg_packer(self, fe_args):
return datamodel.ArgPacker(self.data_model_manager, fe_args)
def get_data_packer(self, fe_types):
return datamodel.DataPacker(self.data_model_manager, fe_types)
@property
def target_data(self):
raise NotImplementedError
def subtarget(self, **kws):
obj = copy.copy(self) # shallow copy
for k, v in kws.items():
if not hasattr(obj, k):
raise NameError("unknown option {0!r}".format(k))
setattr(obj, k, v)
if obj.codegen() is not self.codegen():
# We can't share functions accross different codegens
obj.cached_internal_func = {}
return obj
def install_registry(self, registry):
"""
Install a *registry* (a imputils.Registry instance) of function
and attribute implementations.
"""
try:
loader = self._registries[registry]
except KeyError:
loader = RegistryLoader(registry)
self._registries[registry] = loader
self.insert_func_defn(loader.new_registrations('functions'))
self._insert_getattr_defn(loader.new_registrations('getattrs'))
self._insert_setattr_defn(loader.new_registrations('setattrs'))
self._insert_cast_defn(loader.new_registrations('casts'))
self._insert_get_constant_defn(loader.new_registrations('constants'))
def insert_func_defn(self, defns):
for impl, func, sig in defns:
self._defns[func].append(impl, sig)
def _insert_getattr_defn(self, defns):
for impl, attr, sig in defns:
self._getattrs[attr].append(impl, sig)
def _insert_setattr_defn(self, defns):
for impl, attr, sig in defns:
self._setattrs[attr].append(impl, sig)
def _insert_cast_defn(self, defns):
for impl, sig in defns:
self._casts.append(impl, sig)
def _insert_get_constant_defn(self, defns):
for impl, sig in defns:
self._get_constants.append(impl, sig)
def insert_user_function(self, func, fndesc, libs=()):
impl = user_function(fndesc, libs)
self._defns[func].append(impl, impl.signature)
def add_user_function(self, func, fndesc, libs=()):
if func not in self._defns:
msg = "{func} is not a registered user function"
raise KeyError(msg.format(func=func))
impl = user_function(fndesc, libs)
self._defns[func].append(impl, impl.signature)
def insert_generator(self, genty, gendesc, libs=()):
assert isinstance(genty, types.Generator)
impl = user_generator(gendesc, libs)
self._generators[genty] = gendesc, impl
def remove_user_function(self, func):
"""
Remove user function *func*.
KeyError is raised if the function isn't known to us.
"""
del self._defns[func]
def get_external_function_type(self, fndesc):
argtypes = [self.get_argument_type(aty)
for aty in fndesc.argtypes]
# don't wrap in pointer
restype = self.get_argument_type(fndesc.restype)
fnty = Type.function(restype, argtypes)
return fnty
def declare_function(self, module, fndesc):
fnty = self.call_conv.get_function_type(fndesc.restype, fndesc.argtypes)
fn = module.get_or_insert_function(fnty, name=fndesc.mangled_name)
self.call_conv.decorate_function(fn, fndesc.args, fndesc.argtypes)
if fndesc.inline:
fn.attributes.add('alwaysinline')
return fn
def declare_external_function(self, module, fndesc):
fnty = self.get_external_function_type(fndesc)
fn = module.get_or_insert_function(fnty, name=fndesc.mangled_name)
assert fn.is_declaration
for ak, av in zip(fndesc.args, fn.args):
av.name = "arg.%s" % ak
return fn
def insert_const_string(self, mod, string):
"""
Insert constant *string* (a str object) into module *mod*.
"""
stringtype = GENERIC_POINTER
name = ".const.%s" % string
text = cgutils.make_bytearray(string.encode("utf-8") + b"\x00")
gv = self.insert_unique_const(mod, name, text)
return Constant.bitcast(gv, stringtype)
def insert_unique_const(self, mod, name, val):
"""
Insert a unique internal constant named *name*, with LLVM value
*val*, into module *mod*.
"""
gv = mod.get_global(name)
if gv is not None:
return gv
else:
return cgutils.global_constant(mod, name, val)
def get_argument_type(self, ty):
return self.data_model_manager[ty].get_argument_type()
def get_return_type(self, ty):
return self.data_model_manager[ty].get_return_type()
def get_data_type(self, ty):
"""
Get a LLVM data representation of the Numba type *ty* that is safe
for storage. Record data are stored as byte array.
The return value is a llvmlite.ir.Type object, or None if the type
is an opaque pointer (???).
"""
return self.data_model_manager[ty].get_data_type()
def get_value_type(self, ty):
return self.data_model_manager[ty].get_value_type()
def pack_value(self, builder, ty, value, ptr, align=None):
"""
Pack value into the array storage at *ptr*.
If *align* is given, it is the guaranteed alignment for *ptr*
(by default, the standard ABI alignment).
"""
dataval = self.data_model_manager[ty].as_data(builder, value)
builder.store(dataval, ptr, align=align)
def unpack_value(self, builder, ty, ptr, align=None):
"""
Unpack value from the array storage at *ptr*.
If *align* is given, it is the guaranteed alignment for *ptr*
(by default, the standard ABI alignment).
"""
dm = self.data_model_manager[ty]
return dm.load_from_data_pointer(builder, ptr, align)
def get_constant_generic(self, builder, ty, val):
"""
Return a LLVM constant representing value *val* of Numba type *ty*.
"""
try:
impl = self._get_constants.find((ty,))
return impl(self, builder, ty, val)
except NotImplementedError:
raise NotImplementedError("cannot lower constant of type '%s'" % (ty,))
def get_constant(self, ty, val):
"""
Same as get_constant_generic(), but without specifying *builder*.
Works only for simple types.
"""
# HACK: pass builder=None to preserve get_constant() API
return self.get_constant_generic(None, ty, val)
def get_constant_undef(self, ty):
lty = self.get_value_type(ty)
return Constant.undef(lty)
def get_constant_null(self, ty):
lty = self.get_value_type(ty)
return Constant.null(lty)
def get_function(self, fn, sig):
"""
Return the implementation of function *fn* for signature *sig*.
The return value is a callable with the signature (builder, args).
"""
sig = sig.as_function()
if isinstance(fn, (types.Function, types.BoundFunction,
types.Dispatcher)):
key = fn.get_impl_key(sig)
overloads = self._defns[key]
else:
key = fn
overloads = self._defns[key]
try:
return _wrap_impl(overloads.find(sig.args), self, sig)
except NotImplementedError:
pass
if isinstance(fn, types.Type):
# It's a type instance => try to find a definition for the type class
try:
return self.get_function(type(fn), sig)
except NotImplementedError:
# Raise exception for the type instance, for a better error message
pass
raise NotImplementedError("No definition for lowering %s%s" % (key, sig))
def get_generator_desc(self, genty):
"""
"""
return self._generators[genty][0]
def get_generator_impl(self, genty):
"""
"""
return self._generators[genty][1]
def get_bound_function(self, builder, obj, ty):
assert self.get_value_type(ty) == obj.type
return obj
def get_getattr(self, typ, attr):
"""
Get the getattr() implementation for the given type and attribute name.
The return value is a callable with the signature
(context, builder, typ, val, attr).
"""
if isinstance(typ, types.Module):
# Implement getattr for module-level globals.
# We are treating them as constants.
# XXX We shouldn't have to retype this
attrty = self.typing_context.resolve_module_constants(typ, attr)
if attrty is None or isinstance(attrty, types.Dummy):
# No implementation required for dummies (functions, modules...),
# which are dealt with later
return None
else:
pyval = getattr(typ.pymod, attr)
llval = self.get_constant(attrty, pyval)
def imp(context, builder, typ, val, attr):
return impl_ret_borrowed(context, builder, attrty, llval)
return imp
# Lookup specific getattr implementation for this type and attribute
overloads = self._getattrs[attr]
try:
return overloads.find((typ,))
except NotImplementedError:
pass
# Lookup generic getattr implementation for this type
overloads = self._getattrs[None]
try:
return overloads.find((typ,))
except NotImplementedError:
pass
raise NotImplementedError("No definition for lowering %s.%s" % (typ, attr))
def get_setattr(self, attr, sig):
"""
Get the setattr() implementation for the given attribute name
and signature.
The return value is a callable with the signature (builder, args).
"""
assert len(sig.args) == 2
typ = sig.args[0]
valty = sig.args[1]
def wrap_setattr(impl):
def wrapped(builder, args):
return impl(self, builder, sig, args, attr)
return wrapped
# Lookup specific setattr implementation for this type and attribute
overloads = self._setattrs[attr]
try:
return wrap_setattr(overloads.find((typ, valty)))
except NotImplementedError:
pass
# Lookup generic setattr implementation for this type
overloads = self._setattrs[None]
try:
return wrap_setattr(overloads.find((typ, valty)))
except NotImplementedError:
pass
raise NotImplementedError("No definition for lowering %s.%s = %s"
% (typ, attr, valty))
def get_argument_value(self, builder, ty, val):
"""
Argument representation to local value representation
"""
return self.data_model_manager[ty].from_argument(builder, val)
def get_returned_value(self, builder, ty, val):
"""
Return value representation to local value representation
"""
return self.data_model_manager[ty].from_return(builder, val)
def get_return_value(self, builder, ty, val):
"""
Local value representation to return type representation
"""
return self.data_model_manager[ty].as_return(builder, val)
def get_value_as_argument(self, builder, ty, val):
"""Prepare local value representation as argument type representation
"""
return self.data_model_manager[ty].as_argument(builder, val)
def get_value_as_data(self, builder, ty, val):
return self.data_model_manager[ty].as_data(builder, val)
def get_data_as_value(self, builder, ty, val):
return self.data_model_manager[ty].from_data(builder, val)
def pair_first(self, builder, val, ty):
"""
Extract the first element of a heterogenous pair.
"""
pair = self.make_helper(builder, ty, val)
return pair.first
def pair_second(self, builder, val, ty):
"""
Extract the second element of a heterogenous pair.
"""
pair = self.make_helper(builder, ty, val)
return pair.second
def cast(self, builder, val, fromty, toty):
"""
Cast a value of type *fromty* to type *toty*.
This implements implicit conversions as can happen due to the
granularity of the Numba type system, or lax Python semantics.
"""
if fromty == toty or toty == types.Any:
return val
try:
impl = self._casts.find((fromty, toty))
return impl(self, builder, fromty, toty, val)
except NotImplementedError:
raise NotImplementedError(
"Cannot cast %s to %s: %s" % (fromty, toty, val))
def generic_compare(self, builder, key, argtypes, args):
"""
Compare the given LLVM values of the given Numba types using
the comparison *key* (e.g. '=='). The values are first cast to
a common safe conversion type.
"""
at, bt = argtypes
av, bv = args
ty = self.typing_context.unify_types(at, bt)
assert ty is not None
cav = self.cast(builder, av, at, ty)
cbv = self.cast(builder, bv, bt, ty)
cmpsig = typing.signature(types.boolean, ty, ty)
cmpfunc = self.get_function(key, cmpsig)
return cmpfunc(builder, (cav, cbv))
def make_optional_none(self, builder, valtype):
optval = self.make_helper(builder, types.Optional(valtype))
optval.valid = cgutils.false_bit
return optval._getvalue()
def make_optional_value(self, builder, valtype, value):
optval = self.make_helper(builder, types.Optional(valtype))
optval.valid = cgutils.true_bit
optval.data = value
return optval._getvalue()
def is_true(self, builder, typ, val):
"""
Return the truth value of a value of the given Numba type.
"""
impl = self.get_function(bool, typing.signature(types.boolean, typ))
return impl(builder, (val,))
def get_c_value(self, builder, typ, name, dllimport=False):
"""
Get a global value through its C-accessible *name*, with the given
LLVM type.
If *dllimport* is true, the symbol will be marked as imported
from a DLL (necessary for AOT compilation under Windows).
"""
module = builder.function.module
try:
gv = module.get_global_variable_named(name)
except LLVMException:
gv = module.add_global_variable(typ, name)
if dllimport and self.aot_mode and sys.platform == 'win32':
gv.storage_class = "dllimport"
return gv
def call_external_function(self, builder, callee, argtys, args):
args = [self.get_value_as_argument(builder, ty, arg)
for ty, arg in zip(argtys, args)]
retval = builder.call(callee, args)
return retval
def get_function_pointer_type(self, typ):
return self.data_model_manager[typ].get_data_type()
def call_function_pointer(self, builder, funcptr, args, cconv=None):
return builder.call(funcptr, args, cconv=cconv)
def print_string(self, builder, text):
mod = builder.module
cstring = GENERIC_POINTER
fnty = Type.function(Type.int(), [cstring])
puts = mod.get_or_insert_function(fnty, "puts")
return builder.call(puts, [text])
def debug_print(self, builder, text):
mod = builder.module
cstr = self.insert_const_string(mod, str(text))
self.print_string(builder, cstr)
def printf(self, builder, format_string, *args):
mod = builder.module
if isinstance(format_string, str):
cstr = self.insert_const_string(mod, format_string)
else:
cstr = format_string
fnty = Type.function(Type.int(), (GENERIC_POINTER,), var_arg=True)
fn = mod.get_or_insert_function(fnty, "printf")
return builder.call(fn, (cstr,) + tuple(args))
def get_struct_type(self, struct):
"""
Get the LLVM struct type for the given Structure class *struct*.
"""
fields = [self.get_value_type(v) for _, v in struct._fields]
return Type.struct(fields)
def get_dummy_value(self):
return Constant.null(self.get_dummy_type())
def get_dummy_type(self):
return GENERIC_POINTER
def compile_subroutine_no_cache(self, builder, impl, sig, locals={}, flags=None):
"""
Invoke the compiler to compile a function to be used inside a
nopython function, but without generating code to call that
function.
Note this context's flags are not inherited.
"""
# Compile
from numba import compiler
codegen = self.codegen()
library = codegen.create_library(impl.__name__)
if flags is None:
flags = compiler.Flags()
flags.set('no_compile')
flags.set('no_cpython_wrapper')
cres = compiler.compile_internal(self.typing_context, self,
library,
impl, sig.args,
sig.return_type, flags,
locals=locals)
# Allow inlining the function inside callers.
codegen.add_linking_library(cres.library)
return cres
def compile_subroutine(self, builder, impl, sig, locals={}):
"""
Compile the function *impl* for the given *sig* (in nopython mode).
Return a placeholder object that's callable from another Numba
function.
"""
cache_key = (impl.__code__, sig, type(self.error_model))
if impl.__closure__:
# XXX This obviously won't work if a cell's value is
# unhashable.
cache_key += tuple(c.cell_contents for c in impl.__closure__)
ty = self.cached_internal_func.get(cache_key)
if ty is None:
cres = self.compile_subroutine_no_cache(builder, impl, sig,
locals=locals)
ty = types.NumbaFunction(cres.fndesc, sig)
self.cached_internal_func[cache_key] = ty
return ty
def compile_internal(self, builder, impl, sig, args, locals={}):
"""
Like compile_subroutine(), but also call the function with the given
*args*.
"""
ty = self.compile_subroutine(builder, impl, sig, locals)
return self.call_internal(builder, ty.fndesc, sig, args)
def call_internal(self, builder, fndesc, sig, args):
"""
Given the function descriptor of an internally compiled function,
emit a call to that function with the given arguments.
"""
# Add call to the generated function
llvm_mod = builder.module
fn = self.declare_function(llvm_mod, fndesc)
status, res = self.call_conv.call_function(builder, fn, sig.return_type,
sig.args, args)
with cgutils.if_unlikely(builder, status.is_error):
self.call_conv.return_status_propagate(builder, status)
return res
def get_executable(self, func, fndesc):
raise NotImplementedError
def get_python_api(self, builder):
return PythonAPI(self, builder)
def sentry_record_alignment(self, rectyp, attr):
"""
Assumes offset starts from a properly aligned location
"""
if self.strict_alignment:
offset = rectyp.offset(attr)
elemty = rectyp.typeof(attr)
align = self.get_abi_alignment(self.get_data_type(elemty))
if offset % align:
msg = "{rec}.{attr} of type {type} is not aligned".format(
rec=rectyp, attr=attr, type=elemty)
raise TypeError(msg)
def get_helper_class(self, typ, kind='value'):
"""
Get a helper class for the given *typ*.
"""
# XXX handle all types: complex, array, etc.
# XXX should it be a method on the model instead? this would allow a default kind...
return cgutils.create_struct_proxy(typ, kind)
def _make_helper(self, builder, typ, value=None, ref=None, kind='value'):
cls = self.get_helper_class(typ, kind)
return cls(self, builder, value=value, ref=ref)
def make_helper(self, builder, typ, value=None, ref=None):
"""
Get a helper object to access the *typ*'s members,
for the given value or reference.
"""
return self._make_helper(builder, typ, value, ref, kind='value')
def make_data_helper(self, builder, typ, ref=None):
"""
As make_helper(), but considers the value as stored in memory,
rather than a live value.
"""
return self._make_helper(builder, typ, ref=ref, kind='data')
def make_array(self, typ):
return arrayobj.make_array(typ)
def populate_array(self, arr, **kwargs):
"""
Populate array structure.
"""
return arrayobj.populate_array(arr, **kwargs)
def make_complex(self, builder, typ, value=None):
"""
Get a helper object to access the given complex numbers' members.
"""
assert isinstance(typ, types.Complex), typ
return self.make_helper(builder, typ, value)
def make_tuple(self, builder, typ, values):
"""
Create a tuple of the given *typ* containing the *values*.
"""
tup = self.get_constant_undef(typ)
for i, val in enumerate(values):
tup = builder.insert_value(tup, val, i)
return tup
def make_constant_array(self, builder, typ, ary):
"""
Create an array structure reifying the given constant array.
A low-level contiguous array constant is created in the LLVM IR.
"""
assert typ.layout == 'C' # assumed in typeinfer.py
# Handle data: reify the flattened array in "C" order as a
# global array of bytes.
flat = ary.flatten()
# Note: we use `bytearray(flat.data)` instead of `bytearray(flat)` to
# workaround issue #1850 which is due to numpy issue #3147
consts = Constant.array(Type.int(8), bytearray(flat.data))
data = cgutils.global_constant(builder, ".const.array.data", consts)
# Handle shape
llintp = self.get_value_type(types.intp)
shapevals = [self.get_constant(types.intp, s) for s in ary.shape]
cshape = Constant.array(llintp, shapevals)
# Handle strides
if ary.ndim > 0:
# Use strides of the equivalent C-contiguous array.
contig = np.ascontiguousarray(ary)
stridevals = [self.get_constant(types.intp, s) for s in contig.strides]
else:
stridevals = []
cstrides = Constant.array(llintp, stridevals)
# Create array structure
cary = self.make_array(typ)(self, builder)
rt_addr = self.get_constant(types.uintp, id(ary)).inttoptr(
self.get_value_type(types.pyobject))
intp_itemsize = self.get_constant(types.intp, ary.dtype.itemsize)
self.populate_array(cary,
data=builder.bitcast(data, cary.data.type),
shape=cshape,
strides=cstrides,
itemsize=intp_itemsize,
parent=rt_addr,
meminfo=None)
return cary._getvalue()
def get_abi_sizeof(self, ty):
"""
Get the ABI size of LLVM type *ty*.
"""
if isinstance(ty, llvmir.Type):
return ty.get_abi_size(self.target_data)
# XXX this one unused?
return self.target_data.get_abi_size(ty)
def get_abi_alignment(self, ty):
"""
Get the ABI alignment of LLVM type *ty*.
"""
assert isinstance(ty, llvmir.Type), "Expected LLVM type"
return ty.get_abi_alignment(self.target_data)
def get_preferred_array_alignment(context, ty):
"""
Get preferred array alignment for Numba type *ty*.
"""
# AVX prefers 32-byte alignment
return 32
def post_lowering(self, mod, library):
"""Run target specific post-lowering transformation here.
"""
def create_module(self, name):
"""Create a LLVM module
"""
return lc.Module(name)
def _require_nrt(self):
if not self.enable_nrt:
raise RuntimeError("Require NRT")
def nrt_allocate(self, builder, size):
"""
Low-level allocate a new memory area of `size` bytes.
"""
self._require_nrt()
mod = builder.module
fnty = llvmir.FunctionType(void_ptr,
[self.get_value_type(types.intp)])
fn = mod.get_or_insert_function(fnty, name="NRT_Allocate")
fn.return_value.add_attribute("noalias")
return builder.call(fn, [size])
def nrt_free(self, builder, ptr):
"""
Low-level free a memory area allocated with nrt_allocate().
"""
self._require_nrt()
mod = builder.module
fnty = llvmir.FunctionType(llvmir.VoidType(), [void_ptr])
fn = mod.get_or_insert_function(fnty, name="NRT_Free")
return builder.call(fn, [ptr])
def nrt_meminfo_alloc(self, builder, size):
"""
Allocate a new MemInfo with a data payload of `size` bytes.
A pointer to the MemInfo is returned.
"""
self._require_nrt()
mod = builder.module
fnty = llvmir.FunctionType(void_ptr,
[self.get_value_type(types.intp)])
fn = mod.get_or_insert_function(fnty, name="NRT_MemInfo_alloc_safe")
fn.return_value.add_attribute("noalias")
return builder.call(fn, [size])
def nrt_meminfo_alloc_dtor(self, builder, size, dtor):
self._require_nrt()
mod = builder.module
ll_void_ptr = self.get_value_type(types.voidptr)
fnty = llvmir.FunctionType(llvmir.IntType(8).as_pointer(),
[self.get_value_type(types.intp),
ll_void_ptr])
fn = mod.get_or_insert_function(fnty,
name="NRT_MemInfo_alloc_dtor_safe")
fn.return_value.add_attribute("noalias")
return builder.call(fn, [size, builder.bitcast(dtor, ll_void_ptr)])
def nrt_meminfo_alloc_aligned(self, builder, size, align):
"""
Allocate a new MemInfo with an aligned data payload of `size` bytes.
The data pointer is aligned to `align` bytes. `align` can be either
a Python int or a LLVM uint32 value.
A pointer to the MemInfo is returned.
"""
self._require_nrt()
mod = builder.module
intp = self.get_value_type(types.intp)
u32 = self.get_value_type(types.uint32)
fnty = llvmir.FunctionType(void_ptr, [intp, u32])
fn = mod.get_or_insert_function(fnty,
name="NRT_MemInfo_alloc_safe_aligned")
fn.return_value.add_attribute("noalias")
if isinstance(align, int):
align = self.get_constant(types.uint32, align)
else:
assert align.type == u32, "align must be a uint32"
return builder.call(fn, [size, align])
def nrt_meminfo_new_varsize(self, builder, size):
"""
Allocate a MemInfo pointing to a variable-sized data area. The area
is separately allocated (i.e. two allocations are made) so that
re-allocating it doesn't change the MemInfo's address.
A pointer to the MemInfo is returned.
"""
self._require_nrt()
mod = builder.module
fnty = llvmir.FunctionType(void_ptr,
[self.get_value_type(types.intp)])
fn = mod.get_or_insert_function(fnty, name="NRT_MemInfo_new_varsize")
fn.return_value.add_attribute("noalias")
return builder.call(fn, [size])
def nrt_meminfo_varsize_alloc(self, builder, meminfo, size):
"""
Allocate a new data area for a MemInfo created by nrt_meminfo_new_varsize().
The new data pointer is returned, for convenience.
Contrary to realloc(), this always allocates a new area and doesn't
copy the old data. This is useful if resizing a container needs
more than simply copying the data area (e.g. for hash tables).
The old pointer will have to be freed with nrt_meminfo_varsize_free().
"""
return self._call_nrt_varsize_alloc(builder, meminfo, size,
"NRT_MemInfo_varsize_alloc")
def nrt_meminfo_varsize_realloc(self, builder, meminfo, size):
"""
Reallocate a data area allocated by nrt_meminfo_new_varsize().
The new data pointer is returned, for convenience.
"""
return self._call_nrt_varsize_alloc(builder, meminfo, size,
"NRT_MemInfo_varsize_realloc")
def nrt_meminfo_varsize_free(self, builder, meminfo, ptr):
"""
Free a memory area allocated for a NRT varsize object.
Note this does *not* free the NRT object itself!
"""
self._require_nrt()
mod = builder.module
fnty = llvmir.FunctionType(llvmir.VoidType(),
[void_ptr, void_ptr])
fn = mod.get_or_insert_function(fnty, name="NRT_MemInfo_varsize_free")
return builder.call(fn, (meminfo, ptr))
def _call_nrt_varsize_alloc(self, builder, meminfo, size, funcname):
self._require_nrt()
mod = builder.module
fnty = llvmir.FunctionType(void_ptr,
[void_ptr, self.get_value_type(types.intp)])
fn = mod.get_or_insert_function(fnty, name=funcname)
fn.return_value.add_attribute("noalias")
return builder.call(fn, [meminfo, size])
def nrt_meminfo_data(self, builder, meminfo):
"""
Given a MemInfo pointer, return a pointer to the allocated data
managed by it. This works for MemInfos allocated with all the
above methods.
"""
self._require_nrt()
from numba.runtime.atomicops import meminfo_data_ty
mod = builder.module
fn = mod.get_or_insert_function(meminfo_data_ty,
name="NRT_MemInfo_data_fast")
return builder.call(fn, [meminfo])
def _call_nrt_incref_decref(self, builder, root_type, typ, value,
funcname, getters=()):
self._require_nrt()
from numba.runtime.atomicops import incref_decref_ty
data_model = self.data_model_manager[typ]
members = data_model.traverse(builder)
for mtyp, getter in members:
self._call_nrt_incref_decref(builder, root_type, mtyp, value,
funcname, getters + (getter,))
if data_model.has_nrt_meminfo():
# Call the chain of getters to compute the member value
for getter in getters:
value = getter(value)
try:
meminfo = data_model.get_nrt_meminfo(builder, value)
except NotImplementedError as e:
raise NotImplementedError("%s: %s" % (root_type, str(e)))
assert meminfo is not None # since has_nrt_meminfo()
mod = builder.module
fn = mod.get_or_insert_function(incref_decref_ty, name=funcname)
# XXX "nonnull" causes a crash in test_dyn_array: can this
# function be called with a NULL pointer?
fn.args[0].add_attribute("noalias")
fn.args[0].add_attribute("nocapture")
builder.call(fn, [meminfo])
def nrt_incref(self, builder, typ, value):
"""
Recursively incref the given *value* and its members.
"""
self._call_nrt_incref_decref(builder, typ, typ, value, "NRT_incref")
def nrt_decref(self, builder, typ, value):
"""
Recursively decref the given *value* and its members.
"""
self._call_nrt_incref_decref(builder, typ, typ, value, "NRT_decref")
class _wrap_impl(object):
"""
A wrapper object to call an implementation function with some predefined
(context, signature) arguments.
The wrapper also forwards attribute queries, which is important.
"""
def __init__(self, imp, context, sig):
self._imp = imp
self._context = context
self._sig = sig
def __call__(self, builder, args):
return self._imp(self._context, builder, self._sig, args)
def __getattr__(self, item):
return getattr(self._imp, item)
def __repr__(self):
return "<wrapped %s>" % self._imp
|
|
from utils import ViewBox, attributesToSVG
from ..utils.struct import Vector as V, identity
from ..utils.dictionary import Dictionary
class Element (object):
def __init__ (self, **attr):
if attr.has_key ('name'):
self.name = attr['name']
else:
self.name = None
if attr.has_key ('id'):
self.id = attr['id']
else:
self.id = None
if attr.has_key ('className'):
self.className = attr['className']
else:
self.className = None
self.parent = None
self.style = Style ()
self.xml = {}
def root (self):
parent = self
while not parent.parent is None:
parent = parent.parent
return parent
def createDef (self, element):
if not hasattr (self, 'defs') :
self.defs = Defs ()
node = self.defs.getElementById (element.id)
if node is None:
self.defs.draw (element)
return True
else:
return False
def setSVG (self):
attr = {'class': self.className,
'id': self.id}
attr.update (self.style.setSVG ())
attr.update (self.xml)
return attr
def SVG (self, indent):
attr = self.setSVG ()
return indent + '<' + self.name + ' ' + attributesToSVG (attr) + ' />\n'
class Style:
def setSVG (self):
attr = {}
elements = []
for key, value in self.__dict__.iteritems ():
try:
elements.append ((StyleDict[key], value))
except KeyError:
print 'Warning: ' + str(key) + ' is not a known style attribute. Skipping.'
attr.update (elements)
return attr
StyleDict = {'strokeColor': 'stroke',
'strokeWidth': 'stroke-width',
'stokeWidth': 'stoke-width',
'fill': 'fill',
'opacity': 'opacity',
'shapeRendering': 'shape-rendering',
}
class Script (Element):
def __init__ (self, cdata, **attr):
Element.__init__ (self, name = 'script', **attr)
self.xml ['type'] = 'text/javascript'
#file = open (filename)
#self.text = file.read ()
self.text = cdata
def SVG (self, indent):
tag = '<script type="text/javascript"><![CDATA[\n'
tag += self.text + '\n'
tag += ']]></script>\n'
return tag
class PositionableElement (Element):
def __init__ (self, **attr):
Element.__init__ (self, **attr)
if attr.has_key ('x'):
self.x = float (attr['x'])
else:
self.x = 0.0
if attr.has_key ('y'):
self.y = float (attr['y'])
else:
self.y = 0.0
def position (self, x = None, y = None):
if (x is None) and (y is None):
return (self.x, self.y)
if not (x is None):
self.x = float (x)
if not (y is None):
self.y = float (y)
def applyTransform (self, *points):
parent = self.parent
matrix = identity (3)
while not (parent is None):
matrix = parent.transform * matrix
if parent.name == 'svg':
break
parent = parent.parent
for p in points:
q = matrix * p.matrix ()
p.x = q.get (0, 0)
p.y = q.get (1, 0)
def getWorldPoint (self, x, y):
parent = self.parent
matrix = identity (3)
while not (parent is None):
matrix = parent.transform * matrix
parent = parent.parent
point = V (x, y)
return (matrix * point)
class BoxElement (PositionableElement):
def __init__ (self, **attr):
PositionableElement.__init__ (self, **attr)
if attr.has_key ('width'):
self.width = float (attr['width'])
else:
self.width = None
if attr.has_key ('height'):
self.height = float (attr['height'])
else:
self.height = None
def calulateBox (self, x, y, width, height):
try:
minPoint = V (x, y)
maxPoint = V (x + width, y + height)
self.applyTransform (minPoint, maxPoint)
self.reconfigurePoints (minPoint, maxPoint)
x = minPoint.x
y = minPoint.y
return {'x': x,
'y': y,
'width': (maxPoint.x - minPoint.x),
'height': (maxPoint.y - minPoint.y)
}
except TypeError:
minPoint = V (x, y)
x = minPoint.x
y = minPoint.y
self.applyTransform (minPoint)
return {'x': x,
'y': y,
'width': None,
'height': None
}
def reconfigurePoints (self, minPoint, maxPoint):
if minPoint.x > maxPoint.x:
tmp = minPoint.x
minPoint.x = maxPoint.x
maxPoint.x = tmp
if minPoint.y > maxPoint.y:
tmp = minPoint.y
minPoint.y = maxPoint.y
maxPoint.y = tmp
def setSVG (self):
attr = PositionableElement.setSVG (self)
points = self.calulateBox (self.x, self.y, self.width, self.height)
attr.update (points)
return attr
from defs import Defs
### OLD CODE ###
"""
class Node:
def __init__ (self, **attr):
self.parent = None
self.attributes = Dictionary ()
if attr.has_key ('id'):
self.id = attr['id']
else:
self.id = None
if attr.has_key ('className'):
self.className = attr['className']
else:
self.className = None
def getAttribute (self, key):
if self.attributes.has_key (key):
return self.attributes[key]
else:
return None
def setAttribute (self, key, value):
self.attributes.update ([(key, value)])
def SVG (self, indent=''):
self.setSVG ()
output = indent + '<' + self.name
attr = attributesToSVG (self.attributes)
if attr:
output += ' ' + attr
output += ' />\n'
return output
def setSVG (self):
self.setAttribute ('id', self.id)
self.setAttribute ('class', self.className)
class ScriptNode (Node):
def __init__ (self, type, filename, **attr):
Node.__init__ (self, **attr)
self.name = 'script'
self.type = type
self.filename = filename
def setSVG (self):
self.setAttribute ('type', self.type)
self.setAttribute ('xlink:href', self.filename)
class PositionableNode (Node):
def __init__ (self, **attr):
Node.__init__ (self, **attr)
def setPosition (self):
raise NotImplementedError ()
def setSVG (self):
Node.setSVG (self)
matrix = identity (3)
parent = self.parent
while not parent is None:
matrix = parent.transform * matrix
parent = parent.parent
points = []
for p in self.points ():
q = matrix * p
points.append (q)
self.returnPoints (points)
self.setPosition ()
def translate (self, vect):
for p in self.points ():
p += vect
def points (self):
raise NotImplementedError ()
def returnPoints (self, points):
raise NotImplementedError ()
class BoxNode (PositionableNode):
def __init__ (self, **attr):
PositionableNode.__init__ (self, **attr)
if attr.has_key ('position'):
minPoint = attr['position']
else:
minPoint = V (0, 0)
if attr.has_key ('width'):
width = attr['width']
else:
width = 0
if attr.has_key ('height'):
height = attr['height']
else:
height = 0
self.minPoint = minPoint
self.maxPoint = minPoint + V (width, height)
self.minPoint.set (1, 2, 0)
self.maxPoint.set (1, 2, 0)
self.reconfigurePoints ()
def reconfigurePoints (self):
if self.minPoint.x > self.minPoint.x:
tmp = self.minPoint.x
self.minPoint.x = self.maxPoint.x
self.maxPoint.x = tmp
if self.minPoint.y > self.maxPoint.y:
tmp = self.minPoint.y
self.minPoint.y = self.maxPoint.y
self.maxPoint.y = tmp
def changePosition (self, dx, dy):
d = V (dx, dy)
self.minPoint += d
self.maxPoint += d
def changeSize (self, dx, dy):
d = V (dx, dy)
self.maxPoint += d
self.reconfigurePoints ()
def setPosition (self):
if self.minPoint.x:
self.setAttribute ('x', self.minPoint.x)
if self.minPoint.y:
self.setAttribute ('y', self.minPoint.y)
def setSize (self):
width = self.maxPoint.x - self.minPoint.x
height = self.maxPoint.y - self.minPoint.y
if width:
self.setAttribute ('width', width)
if height:
self.setAttribute ('height', height)
def width (self):
return self.maxPoint.x - self.minPoint.x
def height (self):
return self.maxPoint.y - self.minPoint.y
def points (self):
return [self.minPoint, self.maxPoint]
def returnPoints (self, points):
self.minPoint = points[0]
self.maxPoint = points[1]
self.reconfigurePoints ()
def setSVG (self):
PositionableNode.setSVG (self)
self.setSize ()
class GroupableNode (Node):
def __init__ (self, **attr):
Node.__init__ (self, **attr)
self.children = []
self.transform = identity (3)
def clear (self):
for child in self.children:
child.parent = None
self.children = []
def draw (self, nodeToDraw):
nodeToDraw.parent = self
self.children.append (nodeToDraw)
def drawBefore (self, nodeToDraw, existingNode):
index = 0
for node in self.children:
if existingNode is node:
nodeToDraw.parent = self
break
index += 1
self.children.insert (index, nodeToDraw)
def drawAt (self, nodeToDraw, index):
nodeToDraw.parent = self
self.children.insert (index, nodeToDraw)
def getGroupById (self, id):
for node in self.children:
if node.id == id:
return node
return None
def removeNodeById (self, id):
index = 0
for node in self.children:
if node.id == id:
self.children[index].parent = None
del self.children[index]
return
else:
index += 1
#def transform (self, matrix):
# for child in self.children:
# child.transform (matrix)
def __len__ (self):
return len (self.children)
def __iter__ (self):
return iter (self.children)
def SVG (self, indent=''):
self.setSVG ()
output = indent + '<' + self.name
attr = attributesToSVG (self.attributes)
if attr:
output += ' ' + attr
if len (self.children) > 0:
output += '>\n'
nextIndent = ' ' + indent
for node in self.children:
output += node.SVG (nextIndent)
output += indent + '</' + self.name + '>\n'
else:
output += ' />\n'
return output
class Group (GroupableNode):
def __init__ (self, **attr):
GroupableNode.__init__ (self, **attr)
self.name = 'g'
self.transforms = []
def appendTransform (self, transform):
self.transforms.append (transform)
def setSVG (self):
GroupableNode.setSVG (self)
transforms = []
for trans in self.transforms:
transforms.append (str (trans))
finalTransform = ' '.join (transforms)
if finalTransform != '':
self.setAttribute ('transform', finalTransform)
class Canvas (GroupableNode, BoxNode):
def __init__ (self, **attr):
GroupableNode.__init__ (self, **attr)
BoxNode.__init__ (self, **attr)
self.name = 'svg'
if attr.has_key ('viewBox'):
self.viewBox = attr['viewBox']
else:
self.viewBox = None
#def transform (self, matrix):
# BoxNode.transform (self, matrix)
# GroupableNode.transform (self, matrix)
def coordinateFrame (self, viewBox = ViewBox (), aspect = None):
self.viewBox = viewBox
if aspect is not None:
self.setAttribute ('preserveAspectRatio', aspect)
def setSVG (self):
GroupableNode.setSVG (self)
BoxNode.setSVG (self)
self.setAttribute ('viewBox', self.viewBox)
class PrintableCanvas (Canvas):
def __init__ (self):
Canvas.__init__ (self)
self.stylesheets = []
self.scripts = []
def addJS (self, filename):
self.scripts.append (filename)
def addCSS (self, filename):
self.stylesheets.append (filename)
def setSVG (self):
Canvas.setSVG (self)
self.setAttribute ('xmlns', 'http://www.w3.org/2000/svg')
self.setAttribute ('xmlns:xlink', 'http://www.w3.org/1999/xlink')
self.setAttribute ('xmlns:ev', 'http://www.w3.org/2001/xml-events')
def SVG (self):
self.scripts.reverse ()
for script in self.scripts:
s = ScriptNode ('text/javascript', script)
self.drawAt (s, 0)
output = Canvas.SVG (self)
prepend = '<?xml version="1.0" standalone="no"?>\n'
prepend += '<!DOCTYPE svg PUBLIC "-//W3C//DTD SVG 1.1//EN" '
prepend += '"http://www.w3.org/Graphics/SVG/1.1/DTD/svg11.dtd">\n'
for entry in self.stylesheets:
prepend += '<?xml-stylesheet type="text/css" href="'
prepend += entry + '" ?>\n'
return prepend + output
def save (self, fileOrString):
needToClose = False
if isinstance (fileOrString, str):
needToClose = True
fileOrString = open (fileOrString, 'w')
fileOrString.write (self.SVG ())
if needToClose:
fileOrString.close ()
class Shape (PositionableNode):
def __init__ (self, **attr):
PositionableNode.__init__ (self, **attr)
def setSVG (self):
PositionableNode.setSVG (self)
"""
|
|
# Copyright 2014 OpenStack Foundation
# All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
import collections
import copy
from cryptography import exceptions as crypto_exception
from cursive import exception as cursive_exception
from cursive import signature_utils
import glance_store as store
from oslo_config import cfg
from oslo_log import log as logging
from oslo_utils import encodeutils
from oslo_utils import excutils
from glance.common import exception
from glance.common import utils
import glance.domain.proxy
from glance.i18n import _, _LE, _LI, _LW
CONF = cfg.CONF
LOG = logging.getLogger(__name__)
class ImageRepoProxy(glance.domain.proxy.Repo):
def __init__(self, image_repo, context, store_api, store_utils):
self.context = context
self.store_api = store_api
proxy_kwargs = {'context': context, 'store_api': store_api,
'store_utils': store_utils}
super(ImageRepoProxy, self).__init__(image_repo,
item_proxy_class=ImageProxy,
item_proxy_kwargs=proxy_kwargs)
self.db_api = glance.db.get_api()
def _set_acls(self, image):
public = image.visibility == 'public'
member_ids = []
if image.locations and not public:
member_repo = _get_member_repo_for_store(image,
self.context,
self.db_api,
self.store_api)
member_ids = [m.member_id for m in member_repo.list()]
for location in image.locations:
self.store_api.set_acls(location['url'], public=public,
read_tenants=member_ids,
context=self.context)
def add(self, image):
result = super(ImageRepoProxy, self).add(image)
self._set_acls(image)
return result
def save(self, image, from_state=None):
result = super(ImageRepoProxy, self).save(image, from_state=from_state)
self._set_acls(image)
return result
def _get_member_repo_for_store(image, context, db_api, store_api):
image_member_repo = glance.db.ImageMemberRepo(context, db_api, image)
store_image_repo = glance.location.ImageMemberRepoProxy(
image_member_repo, image, context, store_api)
return store_image_repo
def _check_location_uri(context, store_api, store_utils, uri):
"""Check if an image location is valid.
:param context: Glance request context
:param store_api: store API module
:param store_utils: store utils module
:param uri: location's uri string
"""
try:
# NOTE(zhiyan): Some stores return zero when it catch exception
is_ok = (store_utils.validate_external_location(uri) and
store_api.get_size_from_backend(uri, context=context) > 0)
except (store.UnknownScheme, store.NotFound, store.BadStoreUri):
is_ok = False
if not is_ok:
reason = _('Invalid location')
raise exception.BadStoreUri(message=reason)
def _check_image_location(context, store_api, store_utils, location):
_check_location_uri(context, store_api, store_utils, location['url'])
store_api.check_location_metadata(location['metadata'])
def _set_image_size(context, image, locations):
if not image.size:
for location in locations:
size_from_backend = store.get_size_from_backend(
location['url'], context=context)
if size_from_backend:
# NOTE(flwang): This assumes all locations have the same size
image.size = size_from_backend
break
def _count_duplicated_locations(locations, new):
"""
To calculate the count of duplicated locations for new one.
:param locations: The exiting image location set
:param new: The new image location
:returns: The count of duplicated locations
"""
ret = 0
for loc in locations:
if loc['url'] == new['url'] and loc['metadata'] == new['metadata']:
ret += 1
return ret
class ImageFactoryProxy(glance.domain.proxy.ImageFactory):
def __init__(self, factory, context, store_api, store_utils):
self.context = context
self.store_api = store_api
self.store_utils = store_utils
proxy_kwargs = {'context': context, 'store_api': store_api,
'store_utils': store_utils}
super(ImageFactoryProxy, self).__init__(factory,
proxy_class=ImageProxy,
proxy_kwargs=proxy_kwargs)
def new_image(self, **kwargs):
locations = kwargs.get('locations', [])
for loc in locations:
_check_image_location(self.context,
self.store_api,
self.store_utils,
loc)
loc['status'] = 'active'
if _count_duplicated_locations(locations, loc) > 1:
raise exception.DuplicateLocation(location=loc['url'])
return super(ImageFactoryProxy, self).new_image(**kwargs)
class StoreLocations(collections.MutableSequence):
"""
The proxy for store location property. It takes responsibility for::
1. Location uri correctness checking when adding a new location.
2. Remove the image data from the store when a location is removed
from an image.
"""
def __init__(self, image_proxy, value):
self.image_proxy = image_proxy
if isinstance(value, list):
self.value = value
else:
self.value = list(value)
def append(self, location):
# NOTE(flaper87): Insert this
# location at the very end of
# the value list.
self.insert(len(self.value), location)
def extend(self, other):
if isinstance(other, StoreLocations):
locations = other.value
else:
locations = list(other)
for location in locations:
self.append(location)
def insert(self, i, location):
_check_image_location(self.image_proxy.context,
self.image_proxy.store_api,
self.image_proxy.store_utils,
location)
location['status'] = 'active'
if _count_duplicated_locations(self.value, location) > 0:
raise exception.DuplicateLocation(location=location['url'])
self.value.insert(i, location)
_set_image_size(self.image_proxy.context,
self.image_proxy,
[location])
def pop(self, i=-1):
location = self.value.pop(i)
try:
self.image_proxy.store_utils.delete_image_location_from_backend(
self.image_proxy.context,
self.image_proxy.image.image_id,
location)
except Exception:
with excutils.save_and_reraise_exception():
self.value.insert(i, location)
return location
def count(self, location):
return self.value.count(location)
def index(self, location, *args):
return self.value.index(location, *args)
def remove(self, location):
if self.count(location):
self.pop(self.index(location))
else:
self.value.remove(location)
def reverse(self):
self.value.reverse()
# Mutable sequence, so not hashable
__hash__ = None
def __getitem__(self, i):
return self.value.__getitem__(i)
def __setitem__(self, i, location):
_check_image_location(self.image_proxy.context,
self.image_proxy.store_api,
self.image_proxy.store_utils,
location)
location['status'] = 'active'
self.value.__setitem__(i, location)
_set_image_size(self.image_proxy.context,
self.image_proxy,
[location])
def __delitem__(self, i):
if isinstance(i, slice):
if i.step not in (None, 1):
raise NotImplementedError("slice with step")
self.__delslice__(i.start, i.stop)
return
location = None
try:
location = self.value[i]
except Exception:
del self.value[i]
return
self.image_proxy.store_utils.delete_image_location_from_backend(
self.image_proxy.context,
self.image_proxy.image.image_id,
location)
del self.value[i]
def __delslice__(self, i, j):
i = 0 if i is None else max(i, 0)
j = len(self) if j is None else max(j, 0)
locations = []
try:
locations = self.value[i:j]
except Exception:
del self.value[i:j]
return
for location in locations:
self.image_proxy.store_utils.delete_image_location_from_backend(
self.image_proxy.context,
self.image_proxy.image.image_id,
location)
del self.value[i]
def __iadd__(self, other):
self.extend(other)
return self
def __contains__(self, location):
return location in self.value
def __len__(self):
return len(self.value)
def __cast(self, other):
if isinstance(other, StoreLocations):
return other.value
else:
return other
def __cmp__(self, other):
return cmp(self.value, self.__cast(other))
def __eq__(self, other):
return self.value == self.__cast(other)
def __ne__(self, other):
return not self.__eq__(other)
def __iter__(self):
return iter(self.value)
def __copy__(self):
return type(self)(self.image_proxy, self.value)
def __deepcopy__(self, memo):
# NOTE(zhiyan): Only copy location entries, others can be reused.
value = copy.deepcopy(self.value, memo)
self.image_proxy.image.locations = value
return type(self)(self.image_proxy, value)
def _locations_proxy(target, attr):
"""
Make a location property proxy on the image object.
:param target: the image object on which to add the proxy
:param attr: the property proxy we want to hook
"""
def get_attr(self):
value = getattr(getattr(self, target), attr)
return StoreLocations(self, value)
def set_attr(self, value):
if not isinstance(value, (list, StoreLocations)):
reason = _('Invalid locations')
raise exception.BadStoreUri(message=reason)
ori_value = getattr(getattr(self, target), attr)
if ori_value != value:
# NOTE(flwang): If all the URL of passed-in locations are same as
# current image locations, that means user would like to only
# update the metadata, not the URL.
ordered_value = sorted([loc['url'] for loc in value])
ordered_ori = sorted([loc['url'] for loc in ori_value])
if len(ori_value) > 0 and ordered_value != ordered_ori:
raise exception.Invalid(_('Original locations is not empty: '
'%s') % ori_value)
# NOTE(zhiyan): Check locations are all valid
# NOTE(flwang): If all the URL of passed-in locations are same as
# current image locations, then it's not necessary to verify those
# locations again. Otherwise, if there is any restricted scheme in
# existing locations. _check_image_location will fail.
if ordered_value != ordered_ori:
for loc in value:
_check_image_location(self.context,
self.store_api,
self.store_utils,
loc)
loc['status'] = 'active'
if _count_duplicated_locations(value, loc) > 1:
raise exception.DuplicateLocation(location=loc['url'])
_set_image_size(self.context, getattr(self, target), value)
else:
for loc in value:
loc['status'] = 'active'
return setattr(getattr(self, target), attr, list(value))
def del_attr(self):
value = getattr(getattr(self, target), attr)
while len(value):
self.store_utils.delete_image_location_from_backend(
self.context,
self.image.image_id,
value[0])
del value[0]
setattr(getattr(self, target), attr, value)
return delattr(getattr(self, target), attr)
return property(get_attr, set_attr, del_attr)
class ImageProxy(glance.domain.proxy.Image):
locations = _locations_proxy('image', 'locations')
def __init__(self, image, context, store_api, store_utils):
self.image = image
self.context = context
self.store_api = store_api
self.store_utils = store_utils
proxy_kwargs = {
'context': context,
'image': self,
'store_api': store_api,
}
super(ImageProxy, self).__init__(
image, member_repo_proxy_class=ImageMemberRepoProxy,
member_repo_proxy_kwargs=proxy_kwargs)
def delete(self):
self.image.delete()
if self.image.locations:
for location in self.image.locations:
self.store_utils.delete_image_location_from_backend(
self.context,
self.image.image_id,
location)
def set_data(self, data, size=None):
if size is None:
size = 0 # NOTE(markwash): zero -> unknown size
# Create the verifier for signature verification (if correct properties
# are present)
extra_props = self.image.extra_properties
if (signature_utils.should_create_verifier(extra_props)):
# NOTE(bpoulos): if creating verifier fails, exception will be
# raised
img_signature = extra_props[signature_utils.SIGNATURE]
hash_method = extra_props[signature_utils.HASH_METHOD]
key_type = extra_props[signature_utils.KEY_TYPE]
cert_uuid = extra_props[signature_utils.CERT_UUID]
verifier = signature_utils.get_verifier(
context=self.context,
img_signature_certificate_uuid=cert_uuid,
img_signature_hash_method=hash_method,
img_signature=img_signature,
img_signature_key_type=key_type
)
else:
verifier = None
location, size, checksum, loc_meta = self.store_api.add_to_backend(
CONF,
self.image.image_id,
utils.LimitingReader(utils.CooperativeReader(data),
CONF.image_size_cap),
size,
context=self.context,
verifier=verifier)
# NOTE(bpoulos): if verification fails, exception will be raised
if verifier:
try:
verifier.verify()
LOG.info(_LI("Successfully verified signature for image %s"),
self.image.image_id)
except crypto_exception.InvalidSignature:
raise cursive_exception.SignatureVerificationError(
_('Signature verification failed')
)
self.image.locations = [{'url': location, 'metadata': loc_meta,
'status': 'active'}]
self.image.size = size
self.image.checksum = checksum
self.image.status = 'active'
def get_data(self, offset=0, chunk_size=None):
if not self.image.locations:
# NOTE(mclaren): This is the only set of arguments
# which work with this exception currently, see:
# https://bugs.launchpad.net/glance-store/+bug/1501443
# When the above glance_store bug is fixed we can
# add a msg as usual.
raise store.NotFound(image=None)
err = None
for loc in self.image.locations:
try:
data, size = self.store_api.get_from_backend(
loc['url'],
offset=offset,
chunk_size=chunk_size,
context=self.context)
return data
except Exception as e:
LOG.warn(_LW('Get image %(id)s data failed: '
'%(err)s.')
% {'id': self.image.image_id,
'err': encodeutils.exception_to_unicode(e)})
err = e
# tried all locations
LOG.error(_LE('Glance tried all active locations to get data for '
'image %s but all have failed.') % self.image.image_id)
raise err
class ImageMemberRepoProxy(glance.domain.proxy.Repo):
def __init__(self, repo, image, context, store_api):
self.repo = repo
self.image = image
self.context = context
self.store_api = store_api
super(ImageMemberRepoProxy, self).__init__(repo)
def _set_acls(self):
public = self.image.visibility == 'public'
if self.image.locations and not public:
member_ids = [m.member_id for m in self.repo.list()]
for location in self.image.locations:
self.store_api.set_acls(location['url'], public=public,
read_tenants=member_ids,
context=self.context)
def add(self, member):
super(ImageMemberRepoProxy, self).add(member)
self._set_acls()
def remove(self, member):
super(ImageMemberRepoProxy, self).remove(member)
self._set_acls()
|
|
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
import copy
from openstack_dashboard.test.test_data import utils
from saharaclient.api import cluster_templates
from saharaclient.api import clusters
from saharaclient.api import data_sources
from saharaclient.api import job_binaries
from saharaclient.api import job_executions
from saharaclient.api import job_types
from saharaclient.api import jobs
from saharaclient.api import node_group_templates
from saharaclient.api import plugins
def data(TEST):
TEST.plugins = utils.TestDataContainer()
TEST.plugins_configs = utils.TestDataContainer()
TEST.nodegroup_templates = utils.TestDataContainer()
TEST.cluster_templates = utils.TestDataContainer()
TEST.clusters = utils.TestDataContainer()
TEST.data_sources = utils.TestDataContainer()
TEST.job_binaries = utils.TestDataContainer()
TEST.jobs = utils.TestDataContainer()
TEST.job_executions = utils.TestDataContainer()
TEST.registered_images = copy.copy(TEST.images)
TEST.job_types = utils.TestDataContainer()
plugin1_dict = {
"description": "vanilla plugin",
"name": "vanilla",
"title": "Vanilla Apache Hadoop",
"versions": ["2.3.0", "1.2.1"]
}
plugin1 = plugins.Plugin(plugins.PluginManager(None), plugin1_dict)
TEST.plugins.add(plugin1)
plugin_config1_dict = {
"node_processes": {
"HDFS": [
"namenode",
"datanode",
"secondarynamenode"
],
"MapReduce": [
"tasktracker",
"jobtracker"
]
},
"description": "This plugin provides an ability to launch vanilla "
"Apache Hadoop cluster without any management "
"consoles.",
"versions": [
"1.2.1"
],
"required_image_tags": [
"vanilla",
"1.2.1"
],
"configs": [
{
"default_value": "/tmp/hadoop-${user.name}",
"name": "hadoop.tmp.dir",
"priority": 2,
"config_type": "string",
"applicable_target": "HDFS",
"is_optional": True,
"scope": "node",
"description": "A base for other temporary directories."
},
{
"default_value": True,
"name": "hadoop.native.lib",
"priority": 2,
"config_type": "bool",
"applicable_target": "HDFS",
"is_optional": True,
"scope": "node",
"description": "Should native hadoop libraries, if present, "
"be used."
},
],
"title": "Vanilla Apache Hadoop",
"name": "vanilla"
}
TEST.plugins_configs.add(plugins.Plugin(plugins.PluginManager(None),
plugin_config1_dict))
# Nodegroup_Templates.
ngt1_dict = {
"created_at": "2014-06-04 14:01:03.701243",
"description": None,
"flavor_id": "aaaaaaaa-aaaa-aaaa-aaaa-aaaaaaaaaaaa",
"availability_zone": None,
"floating_ip_pool": None,
"auto_security_group": True,
"security_groups": [],
"hadoop_version": "1.2.1",
"id": "c166dfcc-9cc7-4b48-adc9-f0946169bb36",
"image_id": None,
"name": "sample-template",
"node_configs": {},
"node_processes": [
"namenode",
"jobtracker",
"secondarynamenode",
"hiveserver",
"oozie"
],
"plugin_name": "vanilla",
"tenant_id": "429ad8447c2d47bc8e0382d244e1d1df",
"updated_at": None,
"volume_mount_prefix": "/volumes/disk",
"volumes_per_node": 0,
"volumes_size": 0,
"security_groups": [],
"volumes_availability_zone": None,
}
ngt1 = node_group_templates.NodeGroupTemplate(
node_group_templates.NodeGroupTemplateManager(None), ngt1_dict)
TEST.nodegroup_templates.add(ngt1)
# Cluster_templates.
ct1_dict = {
"anti_affinity": [],
"cluster_configs": {},
"created_at": "2014-06-04 14:01:06.460711",
"default_image_id": None,
"description": None,
"hadoop_version": "1.2.1",
"id": "a2c3743f-31a2-4919-8d02-792138a87a98",
"name": "sample-cluster-template",
"neutron_management_network": None,
"node_groups": [
{
"count": 1,
"created_at": "2014-06-04 14:01:06.462512",
"flavor_id": "aaaaaaaa-aaaa-aaaa-aaaa-aaaaaaaaaaaa",
"floating_ip_pool": None,
"image_id": None,
"name": "master",
"node_configs": {},
"node_group_template_id": "c166dfcc-9cc7-4b48-adc9",
"node_processes": [
"namenode",
"jobtracker",
"secondarynamenode",
"hiveserver",
"oozie"
],
"updated_at": None,
"volume_mount_prefix": "/volumes/disk",
"volumes_per_node": 0,
"volumes_size": 0,
"volumes_availability_zone": None,
},
{
"count": 2,
"created_at": "2014-06-04 14:01:06.463214",
"flavor_id": "aaaaaaaa-aaaa-aaaa-aaaa-aaaaaaaaaaaa",
"floating_ip_pool": None,
"image_id": None,
"name": "workers",
"node_configs": {},
"node_group_template_id": "4eb5504c-94c9-4049-a440",
"node_processes": [
"datanode",
"tasktracker"
],
"updated_at": None,
"volume_mount_prefix": "/volumes/disk",
"volumes_per_node": 0,
"volumes_size": 0,
"volumes_availability_zone": None,
}
],
"plugin_name": "vanilla",
"tenant_id": "429ad8447c2d47bc8e0382d244e1d1df",
"updated_at": None
}
ct1 = cluster_templates.ClusterTemplate(
cluster_templates.ClusterTemplateManager(None), ct1_dict)
TEST.cluster_templates.add(ct1)
# Clusters.
cluster1_dict = {
"anti_affinity": [],
"cluster_configs": {},
"cluster_template_id": "a2c3743f-31a2-4919-8d02-792138a87a98",
"created_at": "2014-06-04 20:02:14.051328",
"default_image_id": "9eb4643c-dca8-4ea7-92d2-b773f88a8dc6",
"description": "",
"hadoop_version": "1.2.1",
"id": "ec9a0d28-5cfb-4028-a0b5-40afe23f1533",
"info": {},
"is_transient": False,
"management_public_key": "fakekey",
"name": "cercluster",
"neutron_management_network": None,
"node_groups": [
{
"count": 1,
"created_at": "2014-06-04 20:02:14.053153",
"flavor_id": "0",
"floating_ip_pool": None,
"image_id": None,
"instances": [
{
"created_at": "2014-06-04 20:02:14.834529",
"id": "c3b8004b-7063-4b99-a082-820cdc6e961c",
"instance_id": "a45f5495-4a10-4f17-8fae",
"instance_name": "cercluster-master-001",
"internal_ip": None,
"management_ip": None,
"updated_at": None,
"volumes": []
}
],
"name": "master",
"node_configs": {},
"node_group_template_id": "c166dfcc-9cc7-4b48-adc9",
"node_processes": [
"namenode",
"jobtracker",
"secondarynamenode",
"hiveserver",
"oozie"
],
"updated_at": "2014-06-04 20:02:14.841760",
"volume_mount_prefix": "/volumes/disk",
"volumes_per_node": 0,
"volumes_size": 0,
"security_groups": [],
"volumes_availability_zone": None,
},
{
"count": 2,
"created_at": "2014-06-04 20:02:14.053849",
"flavor_id": "0",
"floating_ip_pool": None,
"image_id": None,
"instances": [
{
"created_at": "2014-06-04 20:02:15.097655",
"id": "6a8ae0b1-bb28-4de2-bfbb-bdd3fd2d72b2",
"instance_id": "38bf8168-fb30-483f-8d52",
"instance_name": "cercluster-workers-001",
"internal_ip": None,
"management_ip": None,
"updated_at": None,
"volumes": []
},
{
"created_at": "2014-06-04 20:02:15.344515",
"id": "17b98ed3-a776-467a-90cf-9f46a841790b",
"instance_id": "85606938-8e53-46a5-a50b",
"instance_name": "cercluster-workers-002",
"internal_ip": None,
"management_ip": None,
"updated_at": None,
"volumes": []
}
],
"name": "workers",
"node_configs": {},
"node_group_template_id": "4eb5504c-94c9-4049-a440",
"node_processes": [
"datanode",
"tasktracker"
],
"updated_at": "2014-06-04 20:02:15.355745",
"volume_mount_prefix": "/volumes/disk",
"volumes_per_node": 0,
"volumes_size": 0,
"security_groups": ["b7857890-09bf-4ee0-a0d5-322d7a6978bf"],
"volumes_availability_zone": None,
}
],
"plugin_name": "vanilla",
"status": "Active",
"status_description": "",
"tenant_id": "429ad8447c2d47bc8e0382d244e1d1df",
"trust_id": None,
"updated_at": "2014-06-04 20:02:15.446087",
"user_keypair_id": "stackboxkp"
}
cluster1 = clusters.Cluster(
clusters.ClusterManager(None), cluster1_dict)
TEST.clusters.add(cluster1)
# Data Sources.
data_source1_dict = {
"created_at": "2014-06-04 14:01:10.371562",
"description": "sample output",
"id": "426fb01c-5c7e-472d-bba2-b1f0fe7e0ede",
"name": "sampleOutput",
"tenant_id": "429ad8447c2d47bc8e0382d244e1d1df",
"type": "swift",
"updated_at": None,
"url": "swift://example.sahara/output"
}
data_source2_dict = {
"created_at": "2014-06-05 15:01:12.331361",
"description": "second sample output",
"id": "ab3413-adfb-bba2-123456785675",
"name": "sampleOutput2",
"tenant_id": "429ad8447c2d47bc8e0382d244e1d1df",
"type": "hdfs",
"updated_at": None,
"url": "hdfs://example.sahara/output"
}
data_source1 = data_sources.DataSources(
data_sources.DataSourceManager(None), data_source1_dict)
data_source2 = data_sources.DataSources(
data_sources.DataSourceManager(None), data_source2_dict)
TEST.data_sources.add(data_source1)
TEST.data_sources.add(data_source2)
# Job Binaries.
job_binary1_dict = {
"created_at": "2014-06-05 18:15:15.581285",
"description": "",
"id": "3f3a07ac-7d6f-49e8-8669-40b25ee891b7",
"name": "example.pig",
"tenant_id": "429ad8447c2d47bc8e0382d244e1d1df",
"updated_at": None,
"url": "internal-db://80121dea-f8bd-4ad3-bcc7-096f4bfc722d"
}
job_binary2_dict = {
"created_at": "2014-10-10 13:12:15.583631",
"description": "Test for spaces in name",
"id": "abcdef56-1234-abcd-abcd-defabcdaedcb",
"name": "example with spaces.pig",
"tenant_id": "429ad8447c2d47bc8e0382d244e1d1df",
"updated_at": None,
"url": "internal-db://abcdef56-1234-abcd-abcd-defabcdaedcb"
}
job_binary1 = job_binaries.JobBinaries(
job_binaries.JobBinariesManager(None), job_binary1_dict)
job_binary2 = job_binaries.JobBinaries(
job_binaries.JobBinariesManager(None), job_binary2_dict)
TEST.job_binaries.add(job_binary1)
TEST.job_binaries.add(job_binary2)
# Jobs.
job1_dict = {
"created_at": "2014-06-05 19:23:59.637165",
"description": "",
"id": "a077b851-46be-4ad7-93c3-2d83894546ef",
"libs": [
{
"created_at": "2014-06-05 19:23:42.742057",
"description": "",
"id": "ab140807-59f8-4235-b4f2-e03daf946256",
"name": "udf.jar",
"tenant_id": "429ad8447c2d47bc8e0382d244e1d1df",
"updated_at": None,
"url": "internal-db://d186e2bb-df93-47eb-8c0e-ce21e7ecb78b"
}
],
"mains": [
{
"created_at": "2014-06-05 18:15:15.581285",
"description": "",
"id": "3f3a07ac-7d6f-49e8-8669-40b25ee891b7",
"name": "example.pig",
"tenant_id": "429ad8447c2d47bc8e0382d244e1d1df",
"updated_at": None,
"url": "internal-db://80121dea-f8bd-4ad3-bcc7-096f4bfc722d"
}
],
"name": "pigjob",
"tenant_id": "429ad8447c2d47bc8e0382d244e1d1df",
"type": "Pig",
"updated_at": None
}
job1 = jobs.Job(jobs.JobsManager(None), job1_dict)
TEST.jobs.add(job1)
# Job Executions.
jobex1_dict = {
"cluster_id": "ec9a0d28-5cfb-4028-a0b5-40afe23f1533",
"created_at": "2014-06-05 20:03:06.195937",
"end_time": None,
"id": "4b6c1cbf-c713-49d3-8025-808a87c514a6",
"info": {
"acl": None,
"actions": [
{
"consoleUrl": "-",
"cred": "None",
"data": None,
"endTime": "Thu,05 Jun 2014 20:03:32 GMT",
"errorCode": None,
"errorMessage": None,
"externalChildIDs": None,
"externalId": "-",
"externalStatus": "OK",
"id": "0000000-140604200538581-oozie-hado-W@:start:",
"name": ":start:",
"retries": 0,
"startTime": "Thu,05 Jun 2014 20:03:32 GMT",
"stats": None,
"status": "OK",
"toString": "Action name[:start:] status[OK]",
"trackerUri": "-",
"transition": "job-node",
"type": ":START:"
},
{
"consoleUrl": "fake://console.url",
"cred": "None",
"data": None,
"endTime": None,
"errorCode": None,
"errorMessage": None,
"externalChildIDs": None,
"externalId": "job_201406042004_0001",
"externalStatus": "RUNNING",
"id": "0000000-140604200538581-oozie-hado-W@job-node",
"name": "job-node",
"retries": 0,
"startTime": "Thu,05 Jun 2014 20:03:33 GMT",
"stats": None,
"status": "RUNNING",
"toString": "Action name[job-node] status[RUNNING]",
"trackerUri": "cercluster-master-001:8021",
"transition": None,
"type": "pig"
}
],
"appName": "job-wf",
"appPath": "hdfs://fakepath/workflow.xml",
"conf": "<configuration>fakeconfig</configuration>",
"consoleUrl": "fake://consoleURL",
"createdTime": "Thu,05 Jun 2014 20:03:32 GMT",
"endTime": None,
"externalId": None,
"group": None,
"id": "0000000-140604200538581-oozie-hado-W",
"lastModTime": "Thu,05 Jun 2014 20:03:35 GMT",
"parentId": None,
"run": 0,
"startTime": "Thu,05 Jun 2014 20:03:32 GMT",
"status": "RUNNING",
"toString": "Workflow ...status[RUNNING]",
"user": "hadoop"
},
"input_id": "85884883-3083-49eb-b442-71dd3734d02c",
"job_configs": {
"args": [],
"configs": {},
"params": {}
},
"job_id": "a077b851-46be-4ad7-93c3-2d83894546ef",
"oozie_job_id": "0000000-140604200538581-oozie-hado-W",
"output_id": "426fb01c-5c7e-472d-bba2-b1f0fe7e0ede",
"progress": None,
"return_code": None,
"start_time": "2014-06-05T16:03:32",
"tenant_id": "429ad8447c2d47bc8e0382d244e1d1df",
"updated_at": "2014-06-05 20:03:46.438248",
"cluster_name_set": True,
"job_name_set": True,
"cluster_name": "cluster-1",
"job_name": "job-1"
}
jobex1 = job_executions.JobExecution(
job_executions.JobExecutionsManager(None), jobex1_dict)
TEST.job_executions.add(jobex1)
augmented_image = TEST.registered_images.first()
augmented_image.tags = {}
augmented_image.username = 'myusername'
augmented_image.description = 'mydescription'
job_type1_dict = {
"name": "Pig",
"plugins": [
{
"description": "Fake description",
"versions": {
"2.6.0": {
},
"1.2.1": {
}
},
"name": "vanilla",
"title": "Vanilla Apache Hadoop"
},
]
}
job_types1 = job_types.JobType(
job_types.JobTypesManager(None), job_type1_dict)
TEST.job_types.add(job_types1)
|
|
x = ('R0lGODlhigJnAef/AAABAAEEAAkCAAMGAg0GBAYJBQoMCBMODQ4QDRITEBkS'
+'CxsSEhkWDhYYFQ0aJhkaGBweGyccGh8hHiIkIiMmGTEiHhQoPSYoJSkqKDcp'
+'Ii0uLDAxLzI0Mh44U0gxMDI5JkM0JjU3NDY6Kjc5Njo7OUE8Ozw+Oz89QTxA'
+'F1akOFFiRIgPHTZksKBAgMCLGTdGNIAAQgKfDAcgZbj0odOnUA8GBAA7'
+'CxsSEhkWDhYYFQ0aJhkaGBweGyccGh8hHiIkIiMmGTEiHhQoPSYoJSkqKDcp'
+'Ii0uLDAxLzI0Mh44U0gxMDI5JkM0JjU3NDY6Kjc5Njo7OUE8Ozw+Oz89QTxA'
+'F1akOFFiRIgPHTZksKBAgMCLGTdGNIAAQgKfDAcgZbj0odOnUA8GBAA7'
+'CxsSEhkWDhYYFQ0aJhkaGBweGyccGh8hHiIkIiMmGTEiHhQoPSYoJSkqKDcp'
+'Ii0uLDAxLzI0Mh44U0gxMDI5JkM0JjU3NDY6Kjc5Njo7OUE8Ozw+Oz89QTxA'
+'F1akOFFiRIgPHTZksKBAgMCLGTdGNIAAQgKfDAcgZbj0odOnUA8GBAA7'
+'CxsSEhkWDhYYFQ0aJhkaGBweGyccGh8hHiIkIiMmGTEiHhQoPSYoJSkqKDcp'
+'Ii0uLDAxLzI0Mh44U0gxMDI5JkM0JjU3NDY6Kjc5Njo7OUE8Ozw+Oz89QTxA'
+'F1akOFFiRIgPHTZksKBAgMCLGTdGNIAAQgKfDAcgZbj0odOnUA8GBAA7'
+'CxsSEhkWDhYYFQ0aJhkaGBweGyccGh8hHiIkIiMmGTEiHhQoPSYoJSkqKDcp'
+'Ii0uLDAxLzI0Mh44U0gxMDI5JkM0JjU3NDY6Kjc5Njo7OUE8Ozw+Oz89QTxA'
+'F1akOFFiRIgPHTZksKBAgMCLGTdGNIAAQgKfDAcgZbj0odOnUA8GBAA7'
+'CxsSEhkWDhYYFQ0aJhkaGBweGyccGh8hHiIkIiMmGTEiHhQoPSYoJSkqKDcp'
+'Ii0uLDAxLzI0Mh44U0gxMDI5JkM0JjU3NDY6Kjc5Njo7OUE8Ozw+Oz89QTxA'
+'F1akOFFiRIgPHTZksKBAgMCLGTdGNIAAQgKfDAcgZbj0odOnUA8GBAA7'
+'CxsSEhkWDhYYFQ0aJhkaGBweGyccGh8hHiIkIiMmGTEiHhQoPSYoJSkqKDcp'
+'Ii0uLDAxLzI0Mh44U0gxMDI5JkM0JjU3NDY6Kjc5Njo7OUE8Ozw+Oz89QTxA'
+'F1akOFFiRIgPHTZksKBAgMCLGTdGNIAAQgKfDAcgZbj0odOnUA8GBAA7'
+'CxsSEhkWDhYYFQ0aJhkaGBweGyccGh8hHiIkIiMmGTEiHhQoPSYoJSkqKDcp'
+'Ii0uLDAxLzI0Mh44U0gxMDI5JkM0JjU3NDY6Kjc5Njo7OUE8Ozw+Oz89QTxA'
+'F1akOFFiRIgPHTZksKBAgMCLGTdGNIAAQgKfDAcgZbj0odOnUA8GBAA7'
+'CxsSEhkWDhYYFQ0aJhkaGBweGyccGh8hHiIkIiMmGTEiHhQoPSYoJSkqKDcp'
+'Ii0uLDAxLzI0Mh44U0gxMDI5JkM0JjU3NDY6Kjc5Njo7OUE8Ozw+Oz89QTxA'
+'F1akOFFiRIgPHTZksKBAgMCLGTdGNIAAQgKfDAcgZbj0odOnUA8GBAA7'
+'CxsSEhkWDhYYFQ0aJhkaGBweGyccGh8hHiIkIiMmGTEiHhQoPSYoJSkqKDcp'
+'Ii0uLDAxLzI0Mh44U0gxMDI5JkM0JjU3NDY6Kjc5Njo7OUE8Ozw+Oz89QTxA'
+'F1akOFFiRIgPHTZksKBAgMCLGTdGNIAAQgKfDAcgZbj0odOnUA8GBAA7'
+'CxsSEhkWDhYYFQ0aJhkaGBweGyccGh8hHiIkIiMmGTEiHhQoPSYoJSkqKDcp'
+'Ii0uLDAxLzI0Mh44U0gxMDI5JkM0JjU3NDY6Kjc5Njo7OUE8Ozw+Oz89QTxA'
+'F1akOFFiRIgPHTZksKBAgMCLGTdGNIAAQgKfDAcgZbj0odOnUA8GBAA7'
+'CxsSEhkWDhYYFQ0aJhkaGBweGyccGh8hHiIkIiMmGTEiHhQoPSYoJSkqKDcp'
+'Ii0uLDAxLzI0Mh44U0gxMDI5JkM0JjU3NDY6Kjc5Njo7OUE8Ozw+Oz89QTxA'
+'F1akOFFiRIgPHTZksKBAgMCLGTdGNIAAQgKfDAcgZbj0odOnUA8GBAA7'
+'CxsSEhkWDhYYFQ0aJhkaGBweGyccGh8hHiIkIiMmGTEiHhQoPSYoJSkqKDcp'
+'Ii0uLDAxLzI0Mh44U0gxMDI5JkM0JjU3NDY6Kjc5Njo7OUE8Ozw+Oz89QTxA'
+'F1akOFFiRIgPHTZksKBAgMCLGTdGNIAAQgKfDAcgZbj0odOnUA8GBAA7'
+'CxsSEhkWDhYYFQ0aJhkaGBweGyccGh8hHiIkIiMmGTEiHhQoPSYoJSkqKDcp'
+'Ii0uLDAxLzI0Mh44U0gxMDI5JkM0JjU3NDY6Kjc5Njo7OUE8Ozw+Oz89QTxA'
+'F1akOFFiRIgPHTZksKBAgMCLGTdGNIAAQgKfDAcgZbj0odOnUA8GBAA7'
+'CxsSEhkWDhYYFQ0aJhkaGBweGyccGh8hHiIkIiMmGTEiHhQoPSYoJSkqKDcp'
+'Ii0uLDAxLzI0Mh44U0gxMDI5JkM0JjU3NDY6Kjc5Njo7OUE8Ozw+Oz89QTxA'
+'F1akOFFiRIgPHTZksKBAgMCLGTdGNIAAQgKfDAcgZbj0odOnUA8GBAA7'
+'CxsSEhkWDhYYFQ0aJhkaGBweGyccGh8hHiIkIiMmGTEiHhQoPSYoJSkqKDcp'
+'Ii0uLDAxLzI0Mh44U0gxMDI5JkM0JjU3NDY6Kjc5Njo7OUE8Ozw+Oz89QTxA'
+'F1akOFFiRIgPHTZksKBAgMCLGTdGNIAAQgKfDAcgZbj0odOnUA8GBAA7'
+'CxsSEhkWDhYYFQ0aJhkaGBweGyccGh8hHiIkIiMmGTEiHhQoPSYoJSkqKDcp'
+'Ii0uLDAxLzI0Mh44U0gxMDI5JkM0JjU3NDY6Kjc5Njo7OUE8Ozw+Oz89QTxA'
+'F1akOFFiRIgPHTZksKBAgMCLGTdGNIAAQgKfDAcgZbj0odOnUA8GBAA7'
+'CxsSEhkWDhYYFQ0aJhkaGBweGyccGh8hHiIkIiMmGTEiHhQoPSYoJSkqKDcp'
+'Ii0uLDAxLzI0Mh44U0gxMDI5JkM0JjU3NDY6Kjc5Njo7OUE8Ozw+Oz89QTxA'
+'F1akOFFiRIgPHTZksKBAgMCLGTdGNIAAQgKfDAcgZbj0odOnUA8GBAA7'
+'CxsSEhkWDhYYFQ0aJhkaGBweGyccGh8hHiIkIiMmGTEiHhQoPSYoJSkqKDcp'
+'Ii0uLDAxLzI0Mh44U0gxMDI5JkM0JjU3NDY6Kjc5Njo7OUE8Ozw+Oz89QTxA'
+'F1akOFFiRIgPHTZksKBAgMCLGTdGNIAAQgKfDAcgZbj0odOnUA8GBAA7'
+'CxsSEhkWDhYYFQ0aJhkaGBweGyccGh8hHiIkIiMmGTEiHhQoPSYoJSkqKDcp'
+'Ii0uLDAxLzI0Mh44U0gxMDI5JkM0JjU3NDY6Kjc5Njo7OUE8Ozw+Oz89QTxA'
+'F1akOFFiRIgPHTZksKBAgMCLGTdGNIAAQgKfDAcgZbj0odOnUA8GBAA7'
+'CxsSEhkWDhYYFQ0aJhkaGBweGyccGh8hHiIkIiMmGTEiHhQoPSYoJSkqKDcp'
+'Ii0uLDAxLzI0Mh44U0gxMDI5JkM0JjU3NDY6Kjc5Njo7OUE8Ozw+Oz89QTxA'
+'F1akOFFiRIgPHTZksKBAgMCLGTdGNIAAQgKfDAcgZbj0odOnUA8GBAA7'
+'CxsSEhkWDhYYFQ0aJhkaGBweGyccGh8hHiIkIiMmGTEiHhQoPSYoJSkqKDcp'
+'Ii0uLDAxLzI0Mh44U0gxMDI5JkM0JjU3NDY6Kjc5Njo7OUE8Ozw+Oz89QTxA'
+'F1akOFFiRIgPHTZksKBAgMCLGTdGNIAAQgKfDAcgZbj0odOnUA8GBAA7'
+'CxsSEhkWDhYYFQ0aJhkaGBweGyccGh8hHiIkIiMmGTEiHhQoPSYoJSkqKDcp'
+'Ii0uLDAxLzI0Mh44U0gxMDI5JkM0JjU3NDY6Kjc5Njo7OUE8Ozw+Oz89QTxA'
+'F1akOFFiRIgPHTZksKBAgMCLGTdGNIAAQgKfDAcgZbj0odOnUA8GBAA7'
+'CxsSEhkWDhYYFQ0aJhkaGBweGyccGh8hHiIkIiMmGTEiHhQoPSYoJSkqKDcp'
+'Ii0uLDAxLzI0Mh44U0gxMDI5JkM0JjU3NDY6Kjc5Njo7OUE8Ozw+Oz89QTxA'
+'F1akOFFiRIgPHTZksKBAgMCLGTdGNIAAQgKfDAcgZbj0odOnUA8GBAA7'
+'CxsSEhkWDhYYFQ0aJhkaGBweGyccGh8hHiIkIiMmGTEiHhQoPSYoJSkqKDcp'
+'Ii0uLDAxLzI0Mh44U0gxMDI5JkM0JjU3NDY6Kjc5Njo7OUE8Ozw+Oz89QTxA'
+'F1akOFFiRIgPHTZksKBAgMCLGTdGNIAAQgKfDAcgZbj0odOnUA8GBAA7'
+'CxsSEhkWDhYYFQ0aJhkaGBweGyccGh8hHiIkIiMmGTEiHhQoPSYoJSkqKDcp'
+'Ii0uLDAxLzI0Mh44U0gxMDI5JkM0JjU3NDY6Kjc5Njo7OUE8Ozw+Oz89QTxA'
+'F1akOFFiRIgPHTZksKBAgMCLGTdGNIAAQgKfDAcgZbj0odOnUA8GBAA7'
+'CxsSEhkWDhYYFQ0aJhkaGBweGyccGh8hHiIkIiMmGTEiHhQoPSYoJSkqKDcp'
+'Ii0uLDAxLzI0Mh44U0gxMDI5JkM0JjU3NDY6Kjc5Njo7OUE8Ozw+Oz89QTxA'
+'F1akOFFiRIgPHTZksKBAgMCLGTdGNIAAQgKfDAcgZbj0odOnUA8GBAA7'
+'CxsSEhkWDhYYFQ0aJhkaGBweGyccGh8hHiIkIiMmGTEiHhQoPSYoJSkqKDcp'
+'Ii0uLDAxLzI0Mh44U0gxMDI5JkM0JjU3NDY6Kjc5Njo7OUE8Ozw+Oz89QTxA'
+'F1akOFFiRIgPHTZksKBAgMCLGTdGNIAAQgKfDAcgZbj0odOnUA8GBAA7'
+'CxsSEhkWDhYYFQ0aJhkaGBweGyccGh8hHiIkIiMmGTEiHhQoPSYoJSkqKDcp'
+'Ii0uLDAxLzI0Mh44U0gxMDI5JkM0JjU3NDY6Kjc5Njo7OUE8Ozw+Oz89QTxA'
+'F1akOFFiRIgPHTZksKBAgMCLGTdGNIAAQgKfDAcgZbj0odOnUA8GBAA7'
+'CxsSEhkWDhYYFQ0aJhkaGBweGyccGh8hHiIkIiMmGTEiHhQoPSYoJSkqKDcp'
+'Ii0uLDAxLzI0Mh44U0gxMDI5JkM0JjU3NDY6Kjc5Njo7OUE8Ozw+Oz89QTxA'
+'F1akOFFiRIgPHTZksKBAgMCLGTdGNIAAQgKfDAcgZbj0odOnUA8GBAA7'
+'CxsSEhkWDhYYFQ0aJhkaGBweGyccGh8hHiIkIiMmGTEiHhQoPSYoJSkqKDcp'
+'Ii0uLDAxLzI0Mh44U0gxMDI5JkM0JjU3NDY6Kjc5Njo7OUE8Ozw+Oz89QTxA'
+'F1akOFFiRIgPHTZksKBAgMCLGTdGNIAAQgKfDAcgZbj0odOnUA8GBAA7'
+'CxsSEhkWDhYYFQ0aJhkaGBweGyccGh8hHiIkIiMmGTEiHhQoPSYoJSkqKDcp'
+'Ii0uLDAxLzI0Mh44U0gxMDI5JkM0JjU3NDY6Kjc5Njo7OUE8Ozw+Oz89QTxA'
+'F1akOFFiRIgPHTZksKBAgMCLGTdGNIAAQgKfDAcgZbj0odOnUA8GBAA7'
+'CxsSEhkWDhYYFQ0aJhkaGBweGyccGh8hHiIkIiMmGTEiHhQoPSYoJSkqKDcp'
+'Ii0uLDAxLzI0Mh44U0gxMDI5JkM0JjU3NDY6Kjc5Njo7OUE8Ozw+Oz89QTxA'
+'F1akOFFiRIgPHTZksKBAgMCLGTdGNIAAQgKfDAcgZbj0odOnUA8GBAA7'
+'CxsSEhkWDhYYFQ0aJhkaGBweGyccGh8hHiIkIiMmGTEiHhQoPSYoJSkqKDcp'
+'Ii0uLDAxLzI0Mh44U0gxMDI5JkM0JjU3NDY6Kjc5Njo7OUE8Ozw+Oz89QTxA'
+'F1akOFFiRIgPHTZksKBAgMCLGTdGNIAAQgKfDAcgZbj0odOnUA8GBAA7'
+'CxsSEhkWDhYYFQ0aJhkaGBweGyccGh8hHiIkIiMmGTEiHhQoPSYoJSkqKDcp'
+'Ii0uLDAxLzI0Mh44U0gxMDI5JkM0JjU3NDY6Kjc5Njo7OUE8Ozw+Oz89QTxA'
+'F1akOFFiRIgPHTZksKBAgMCLGTdGNIAAQgKfDAcgZbj0odOnUA8GBAA7'
+'CxsSEhkWDhYYFQ0aJhkaGBweGyccGh8hHiIkIiMmGTEiHhQoPSYoJSkqKDcp'
+'Ii0uLDAxLzI0Mh44U0gxMDI5JkM0JjU3NDY6Kjc5Njo7OUE8Ozw+Oz89QTxA'
+'F1akOFFiRIgPHTZksKBAgMCLGTdGNIAAQgKfDAcgZbj0odOnUA8GBAA7'
+'CxsSEhkWDhYYFQ0aJhkaGBweGyccGh8hHiIkIiMmGTEiHhQoPSYoJSkqKDcp'
+'Ii0uLDAxLzI0Mh44U0gxMDI5JkM0JjU3NDY6Kjc5Njo7OUE8Ozw+Oz89QTxA'
+'F1akOFFiRIgPHTZksKBAgMCLGTdGNIAAQgKfDAcgZbj0odOnUA8GBAA7'
+'CxsSEhkWDhYYFQ0aJhkaGBweGyccGh8hHiIkIiMmGTEiHhQoPSYoJSkqKDcp'
+'Ii0uLDAxLzI0Mh44U0gxMDI5JkM0JjU3NDY6Kjc5Njo7OUE8Ozw+Oz89QTxA'
+'F1akOFFiRIgPHTZksKBAgMCLGTdGNIAAQgKfDAcgZbj0odOnUA8GBAA7'
+'CxsSEhkWDhYYFQ0aJhkaGBweGyccGh8hHiIkIiMmGTEiHhQoPSYoJSkqKDcp'
+'Ii0uLDAxLzI0Mh44U0gxMDI5JkM0JjU3NDY6Kjc5Njo7OUE8Ozw+Oz89QTxA'
+'F1akOFFiRIgPHTZksKBAgMCLGTdGNIAAQgKfDAcgZbj0odOnUA8GBAA7'
+'CxsSEhkWDhYYFQ0aJhkaGBweGyccGh8hHiIkIiMmGTEiHhQoPSYoJSkqKDcp'
+'Ii0uLDAxLzI0Mh44U0gxMDI5JkM0JjU3NDY6Kjc5Njo7OUE8Ozw+Oz89QTxA'
+'F1akOFFiRIgPHTZksKBAgMCLGTdGNIAAQgKfDAcgZbj0odOnUA8GBAA7'
+'CxsSEhkWDhYYFQ0aJhkaGBweGyccGh8hHiIkIiMmGTEiHhQoPSYoJSkqKDcp'
+'Ii0uLDAxLzI0Mh44U0gxMDI5JkM0JjU3NDY6Kjc5Njo7OUE8Ozw+Oz89QTxA'
+'F1akOFFiRIgPHTZksKBAgMCLGTdGNIAAQgKfDAcgZbj0odOnUA8GBAA7'
+'CxsSEhkWDhYYFQ0aJhkaGBweGyccGh8hHiIkIiMmGTEiHhQoPSYoJSkqKDcp'
+'Ii0uLDAxLzI0Mh44U0gxMDI5JkM0JjU3NDY6Kjc5Njo7OUE8Ozw+Oz89QTxA'
+'F1akOFFiRIgPHTZksKBAgMCLGTdGNIAAQgKfDAcgZbj0odOnUA8GBAA7'
+'CxsSEhkWDhYYFQ0aJhkaGBweGyccGh8hHiIkIiMmGTEiHhQoPSYoJSkqKDcp'
+'Ii0uLDAxLzI0Mh44U0gxMDI5JkM0JjU3NDY6Kjc5Njo7OUE8Ozw+Oz89QTxA'
+'F1akOFFiRIgPHTZksKBAgMCLGTdGNIAAQgKfDAcgZbj0odOnUA8GBAA7'
+'CxsSEhkWDhYYFQ0aJhkaGBweGyccGh8hHiIkIiMmGTEiHhQoPSYoJSkqKDcp'
+'Ii0uLDAxLzI0Mh44U0gxMDI5JkM0JjU3NDY6Kjc5Njo7OUE8Ozw+Oz89QTxA'
+'F1akOFFiRIgPHTZksKBAgMCLGTdGNIAAQgKfDAcgZbj0odOnUA8GBAA7'
+'CxsSEhkWDhYYFQ0aJhkaGBweGyccGh8hHiIkIiMmGTEiHhQoPSYoJSkqKDcp'
+'Ii0uLDAxLzI0Mh44U0gxMDI5JkM0JjU3NDY6Kjc5Njo7OUE8Ozw+Oz89QTxA'
+'F1akOFFiRIgPHTZksKBAgMCLGTdGNIAAQgKfDAcgZbj0odOnUA8GBAA7'
+'CxsSEhkWDhYYFQ0aJhkaGBweGyccGh8hHiIkIiMmGTEiHhQoPSYoJSkqKDcp'
+'Ii0uLDAxLzI0Mh44U0gxMDI5JkM0JjU3NDY6Kjc5Njo7OUE8Ozw+Oz89QTxA'
+'F1akOFFiRIgPHTZksKBAgMCLGTdGNIAAQgKfDAcgZbj0odOnUA8GBAA7'
+'CxsSEhkWDhYYFQ0aJhkaGBweGyccGh8hHiIkIiMmGTEiHhQoPSYoJSkqKDcp'
+'Ii0uLDAxLzI0Mh44U0gxMDI5JkM0JjU3NDY6Kjc5Njo7OUE8Ozw+Oz89QTxA'
+'F1akOFFiRIgPHTZksKBAgMCLGTdGNIAAQgKfDAcgZbj0odOnUA8GBAA7'
+'CxsSEhkWDhYYFQ0aJhkaGBweGyccGh8hHiIkIiMmGTEiHhQoPSYoJSkqKDcp'
+'Ii0uLDAxLzI0Mh44U0gxMDI5JkM0JjU3NDY6Kjc5Njo7OUE8Ozw+Oz89QTxA'
+'F1akOFFiRIgPHTZksKBAgMCLGTdGNIAAQgKfDAcgZbj0odOnUA8GBAA7'
+'CxsSEhkWDhYYFQ0aJhkaGBweGyccGh8hHiIkIiMmGTEiHhQoPSYoJSkqKDcp'
+'Ii0uLDAxLzI0Mh44U0gxMDI5JkM0JjU3NDY6Kjc5Njo7OUE8Ozw+Oz89QTxA'
+'F1akOFFiRIgPHTZksKBAgMCLGTdGNIAAQgKfDAcgZbj0odOnUA8GBAA7'
+'CxsSEhkWDhYYFQ0aJhkaGBweGyccGh8hHiIkIiMmGTEiHhQoPSYoJSkqKDcp'
+'Ii0uLDAxLzI0Mh44U0gxMDI5JkM0JjU3NDY6Kjc5Njo7OUE8Ozw+Oz89QTxA'
+'F1akOFFiRIgPHTZksKBAgMCLGTdGNIAAQgKfDAcgZbj0odOnUA8GBAA7'
+'CxsSEhkWDhYYFQ0aJhkaGBweGyccGh8hHiIkIiMmGTEiHhQoPSYoJSkqKDcp'
+'Ii0uLDAxLzI0Mh44U0gxMDI5JkM0JjU3NDY6Kjc5Njo7OUE8Ozw+Oz89QTxA'
+'F1akOFFiRIgPHTZksKBAgMCLGTdGNIAAQgKfDAcgZbj0odOnUA8GBAA7'
+'CxsSEhkWDhYYFQ0aJhkaGBweGyccGh8hHiIkIiMmGTEiHhQoPSYoJSkqKDcp'
+'Ii0uLDAxLzI0Mh44U0gxMDI5JkM0JjU3NDY6Kjc5Njo7OUE8Ozw+Oz89QTxA'
+'F1akOFFiRIgPHTZksKBAgMCLGTdGNIAAQgKfDAcgZbj0odOnUA8GBAA7'
+'CxsSEhkWDhYYFQ0aJhkaGBweGyccGh8hHiIkIiMmGTEiHhQoPSYoJSkqKDcp'
+'Ii0uLDAxLzI0Mh44U0gxMDI5JkM0JjU3NDY6Kjc5Njo7OUE8Ozw+Oz89QTxA'
+'F1akOFFiRIgPHTZksKBAgMCLGTdGNIAAQgKfDAcgZbj0odOnUA8GBAA7'
+'CxsSEhkWDhYYFQ0aJhkaGBweGyccGh8hHiIkIiMmGTEiHhQoPSYoJSkqKDcp'
+'Ii0uLDAxLzI0Mh44U0gxMDI5JkM0JjU3NDY6Kjc5Njo7OUE8Ozw+Oz89QTxA'
+'F1akOFFiRIgPHTZksKBAgMCLGTdGNIAAQgKfDAcgZbj0odOnUA8GBAA7'
+'CxsSEhkWDhYYFQ0aJhkaGBweGyccGh8hHiIkIiMmGTEiHhQoPSYoJSkqKDcp'
+'Ii0uLDAxLzI0Mh44U0gxMDI5JkM0JjU3NDY6Kjc5Njo7OUE8Ozw+Oz89QTxA'
+'F1akOFFiRIgPHTZksKBAgMCLGTdGNIAAQgKfDAcgZbj0odOnUA8GBAA7'
+'CxsSEhkWDhYYFQ0aJhkaGBweGyccGh8hHiIkIiMmGTEiHhQoPSYoJSkqKDcp'
+'Ii0uLDAxLzI0Mh44U0gxMDI5JkM0JjU3NDY6Kjc5Njo7OUE8Ozw+Oz89QTxA'
+'F1akOFFiRIgPHTZksKBAgMCLGTdGNIAAQgKfDAcgZbj0odOnUA8GBAA7'
+'CxsSEhkWDhYYFQ0aJhkaGBweGyccGh8hHiIkIiMmGTEiHhQoPSYoJSkqKDcp'
+'Ii0uLDAxLzI0Mh44U0gxMDI5JkM0JjU3NDY6Kjc5Njo7OUE8Ozw+Oz89QTxA'
+'F1akOFFiRIgPHTZksKBAgMCLGTdGNIAAQgKfDAcgZbj0odOnUA8GBAA7'
+'CxsSEhkWDhYYFQ0aJhkaGBweGyccGh8hHiIkIiMmGTEiHhQoPSYoJSkqKDcp'
+'Ii0uLDAxLzI0Mh44U0gxMDI5JkM0JjU3NDY6Kjc5Njo7OUE8Ozw+Oz89QTxA'
+'F1akOFFiRIgPHTZksKBAgMCLGTdGNIAAQgKfDAcgZbj0odOnUA8GBAA7'
+'CxsSEhkWDhYYFQ0aJhkaGBweGyccGh8hHiIkIiMmGTEiHhQoPSYoJSkqKDcp'
+'Ii0uLDAxLzI0Mh44U0gxMDI5JkM0JjU3NDY6Kjc5Njo7OUE8Ozw+Oz89QTxA'
+'F1akOFFiRIgPHTZksKBAgMCLGTdGNIAAQgKfDAcgZbj0odOnUA8GBAA7'
+'CxsSEhkWDhYYFQ0aJhkaGBweGyccGh8hHiIkIiMmGTEiHhQoPSYoJSkqKDcp'
+'Ii0uLDAxLzI0Mh44U0gxMDI5JkM0JjU3NDY6Kjc5Njo7OUE8Ozw+Oz89QTxA'
+'F1akOFFiRIgPHTZksKBAgMCLGTdGNIAAQgKfDAcgZbj0odOnUA8GBAA7'
+'CxsSEhkWDhYYFQ0aJhkaGBweGyccGh8hHiIkIiMmGTEiHhQoPSYoJSkqKDcp'
+'Ii0uLDAxLzI0Mh44U0gxMDI5JkM0JjU3NDY6Kjc5Njo7OUE8Ozw+Oz89QTxA'
+'F1akOFFiRIgPHTZksKBAgMCLGTdGNIAAQgKfDAcgZbj0odOnUA8GBAA7'
+'CxsSEhkWDhYYFQ0aJhkaGBweGyccGh8hHiIkIiMmGTEiHhQoPSYoJSkqKDcp'
+'Ii0uLDAxLzI0Mh44U0gxMDI5JkM0JjU3NDY6Kjc5Njo7OUE8Ozw+Oz89QTxA'
+'F1akOFFiRIgPHTZksKBAgMCLGTdGNIAAQgKfDAcgZbj0odOnUA8GBAA7'
+'CxsSEhkWDhYYFQ0aJhkaGBweGyccGh8hHiIkIiMmGTEiHhQoPSYoJSkqKDcp'
+'Ii0uLDAxLzI0Mh44U0gxMDI5JkM0JjU3NDY6Kjc5Njo7OUE8Ozw+Oz89QTxA'
+'F1akOFFiRIgPHTZksKBAgMCLGTdGNIAAQgKfDAcgZbj0odOnUA8GBAA7'
+'CxsSEhkWDhYYFQ0aJhkaGBweGyccGh8hHiIkIiMmGTEiHhQoPSYoJSkqKDcp'
+'Ii0uLDAxLzI0Mh44U0gxMDI5JkM0JjU3NDY6Kjc5Njo7OUE8Ozw+Oz89QTxA'
+'F1akOFFiRIgPHTZksKBAgMCLGTdGNIAAQgKfDAcgZbj0odOnUA8GBAA7'
+'CxsSEhkWDhYYFQ0aJhkaGBweGyccGh8hHiIkIiMmGTEiHhQoPSYoJSkqKDcp'
+'Ii0uLDAxLzI0Mh44U0gxMDI5JkM0JjU3NDY6Kjc5Njo7OUE8Ozw+Oz89QTxA'
+'F1akOFFiRIgPHTZksKBAgMCLGTdGNIAAQgKfDAcgZbj0odOnUA8GBAA7'
+'CxsSEhkWDhYYFQ0aJhkaGBweGyccGh8hHiIkIiMmGTEiHhQoPSYoJSkqKDcp'
+'Ii0uLDAxLzI0Mh44U0gxMDI5JkM0JjU3NDY6Kjc5Njo7OUE8Ozw+Oz89QTxA'
+'F1akOFFiRIgPHTZksKBAgMCLGTdGNIAAQgKfDAcgZbj0odOnUA8GBAA7'
+'CxsSEhkWDhYYFQ0aJhkaGBweGyccGh8hHiIkIiMmGTEiHhQoPSYoJSkqKDcp'
+'Ii0uLDAxLzI0Mh44U0gxMDI5JkM0JjU3NDY6Kjc5Njo7OUE8Ozw+Oz89QTxA'
+'F1akOFFiRIgPHTZksKBAgMCLGTdGNIAAQgKfDAcgZbj0odOnUA8GBAA7'
+'CxsSEhkWDhYYFQ0aJhkaGBweGyccGh8hHiIkIiMmGTEiHhQoPSYoJSkqKDcp'
+'Ii0uLDAxLzI0Mh44U0gxMDI5JkM0JjU3NDY6Kjc5Njo7OUE8Ozw+Oz89QTxA'
+'F1akOFFiRIgPHTZksKBAgMCLGTdGNIAAQgKfDAcgZbj0odOnUA8GBAA7'
+'CxsSEhkWDhYYFQ0aJhkaGBweGyccGh8hHiIkIiMmGTEiHhQoPSYoJSkqKDcp'
+'Ii0uLDAxLzI0Mh44U0gxMDI5JkM0JjU3NDY6Kjc5Njo7OUE8Ozw+Oz89QTxA'
+'F1akOFFiRIgPHTZksKBAgMCLGTdGNIAAQgKfDAcgZbj0odOnUA8GBAA7'
+'CxsSEhkWDhYYFQ0aJhkaGBweGyccGh8hHiIkIiMmGTEiHhQoPSYoJSkqKDcp'
+'Ii0uLDAxLzI0Mh44U0gxMDI5JkM0JjU3NDY6Kjc5Njo7OUE8Ozw+Oz89QTxA'
+'F1akOFFiRIgPHTZksKBAgMCLGTdGNIAAQgKfDAcgZbj0odOnUA8GBAA7'
+'CxsSEhkWDhYYFQ0aJhkaGBweGyccGh8hHiIkIiMmGTEiHhQoPSYoJSkqKDcp'
+'Ii0uLDAxLzI0Mh44U0gxMDI5JkM0JjU3NDY6Kjc5Njo7OUE8Ozw+Oz89QTxA'
+'F1akOFFiRIgPHTZksKBAgMCLGTdGNIAAQgKfDAcgZbj0odOnUA8GBAA7'
+'CxsSEhkWDhYYFQ0aJhkaGBweGyccGh8hHiIkIiMmGTEiHhQoPSYoJSkqKDcp'
+'Ii0uLDAxLzI0Mh44U0gxMDI5JkM0JjU3NDY6Kjc5Njo7OUE8Ozw+Oz89QTxA'
+'F1akOFFiRIgPHTZksKBAgMCLGTdGNIAAQgKfDAcgZbj0odOnUA8GBAA7'
+'CxsSEhkWDhYYFQ0aJhkaGBweGyccGh8hHiIkIiMmGTEiHhQoPSYoJSkqKDcp'
+'Ii0uLDAxLzI0Mh44U0gxMDI5JkM0JjU3NDY6Kjc5Njo7OUE8Ozw+Oz89QTxA'
+'F1akOFFiRIgPHTZksKBAgMCLGTdGNIAAQgKfDAcgZbj0odOnUA8GBAA7'
+'CxsSEhkWDhYYFQ0aJhkaGBweGyccGh8hHiIkIiMmGTEiHhQoPSYoJSkqKDcp'
+'Ii0uLDAxLzI0Mh44U0gxMDI5JkM0JjU3NDY6Kjc5Njo7OUE8Ozw+Oz89QTxA'
+'F1akOFFiRIgPHTZksKBAgMCLGTdGNIAAQgKfDAcgZbj0odOnUA8GBAA7'
+'CxsSEhkWDhYYFQ0aJhkaGBweGyccGh8hHiIkIiMmGTEiHhQoPSYoJSkqKDcp'
+'Ii0uLDAxLzI0Mh44U0gxMDI5JkM0JjU3NDY6Kjc5Njo7OUE8Ozw+Oz89QTxA'
+'F1akOFFiRIgPHTZksKBAgMCLGTdGNIAAQgKfDAcgZbj0odOnUA8GBAA7'
+'CxsSEhkWDhYYFQ0aJhkaGBweGyccGh8hHiIkIiMmGTEiHhQoPSYoJSkqKDcp'
+'Ii0uLDAxLzI0Mh44U0gxMDI5JkM0JjU3NDY6Kjc5Njo7OUE8Ozw+Oz89QTxA'
+'F1akOFFiRIgPHTZksKBAgMCLGTdGNIAAQgKfDAcgZbj0odOnUA8GBAA7'
+'CxsSEhkWDhYYFQ0aJhkaGBweGyccGh8hHiIkIiMmGTEiHhQoPSYoJSkqKDcp'
+'Ii0uLDAxLzI0Mh44U0gxMDI5JkM0JjU3NDY6Kjc5Njo7OUE8Ozw+Oz89QTxA'
+'F1akOFFiRIgPHTZksKBAgMCLGTdGNIAAQgKfDAcgZbj0odOnUA8GBAA7'
+'CxsSEhkWDhYYFQ0aJhkaGBweGyccGh8hHiIkIiMmGTEiHhQoPSYoJSkqKDcp'
+'Ii0uLDAxLzI0Mh44U0gxMDI5JkM0JjU3NDY6Kjc5Njo7OUE8Ozw+Oz89QTxA'
+'F1akOFFiRIgPHTZksKBAgMCLGTdGNIAAQgKfDAcgZbj0odOnUA8GBAA7'
+'CxsSEhkWDhYYFQ0aJhkaGBweGyccGh8hHiIkIiMmGTEiHhQoPSYoJSkqKDcp'
+'Ii0uLDAxLzI0Mh44U0gxMDI5JkM0JjU3NDY6Kjc5Njo7OUE8Ozw+Oz89QTxA'
+'F1akOFFiRIgPHTZksKBAgMCLGTdGNIAAQgKfDAcgZbj0odOnUA8GBAA7'
+'CxsSEhkWDhYYFQ0aJhkaGBweGyccGh8hHiIkIiMmGTEiHhQoPSYoJSkqKDcp'
+'Ii0uLDAxLzI0Mh44U0gxMDI5JkM0JjU3NDY6Kjc5Njo7OUE8Ozw+Oz89QTxA'
+'F1akOFFiRIgPHTZksKBAgMCLGTdGNIAAQgKfDAcgZbj0odOnUA8GBAA7'
+'CxsSEhkWDhYYFQ0aJhkaGBweGyccGh8hHiIkIiMmGTEiHhQoPSYoJSkqKDcp'
+'Ii0uLDAxLzI0Mh44U0gxMDI5JkM0JjU3NDY6Kjc5Njo7OUE8Ozw+Oz89QTxA'
+'F1akOFFiRIgPHTZksKBAgMCLGTdGNIAAQgKfDAcgZbj0odOnUA8GBAA7'
+'CxsSEhkWDhYYFQ0aJhkaGBweGyccGh8hHiIkIiMmGTEiHhQoPSYoJSkqKDcp'
+'Ii0uLDAxLzI0Mh44U0gxMDI5JkM0JjU3NDY6Kjc5Njo7OUE8Ozw+Oz89QTxA'
+'F1akOFFiRIgPHTZksKBAgMCLGTdGNIAAQgKfDAcgZbj0odOnUA8GBAA7'
+'CxsSEhkWDhYYFQ0aJhkaGBweGyccGh8hHiIkIiMmGTEiHhQoPSYoJSkqKDcp'
+'Ii0uLDAxLzI0Mh44U0gxMDI5JkM0JjU3NDY6Kjc5Njo7OUE8Ozw+Oz89QTxA'
+'F1akOFFiRIgPHTZksKBAgMCLGTdGNIAAQgKfDAcgZbj0odOnUA8GBAA7'
+'CxsSEhkWDhYYFQ0aJhkaGBweGyccGh8hHiIkIiMmGTEiHhQoPSYoJSkqKDcp'
+'Ii0uLDAxLzI0Mh44U0gxMDI5JkM0JjU3NDY6Kjc5Njo7OUE8Ozw+Oz89QTxA'
+'F1akOFFiRIgPHTZksKBAgMCLGTdGNIAAQgKfDAcgZbj0odOnUA8GBAA7'
+'CxsSEhkWDhYYFQ0aJhkaGBweGyccGh8hHiIkIiMmGTEiHhQoPSYoJSkqKDcp'
+'Ii0uLDAxLzI0Mh44U0gxMDI5JkM0JjU3NDY6Kjc5Njo7OUE8Ozw+Oz89QTxA'
+'F1akOFFiRIgPHTZksKBAgMCLGTdGNIAAQgKfDAcgZbj0odOnUA8GBAA7'
+'CxsSEhkWDhYYFQ0aJhkaGBweGyccGh8hHiIkIiMmGTEiHhQoPSYoJSkqKDcp'
+'Ii0uLDAxLzI0Mh44U0gxMDI5JkM0JjU3NDY6Kjc5Njo7OUE8Ozw+Oz89QTxA'
+'F1akOFFiRIgPHTZksKBAgMCLGTdGNIAAQgKfDAcgZbj0odOnUA8GBAA7'
+'CxsSEhkWDhYYFQ0aJhkaGBweGyccGh8hHiIkIiMmGTEiHhQoPSYoJSkqKDcp'
+'Ii0uLDAxLzI0Mh44U0gxMDI5JkM0JjU3NDY6Kjc5Njo7OUE8Ozw+Oz89QTxA'
+'F1akOFFiRIgPHTZksKBAgMCLGTdGNIAAQgKfDAcgZbj0odOnUA8GBAA7'
+'CxsSEhkWDhYYFQ0aJhkaGBweGyccGh8hHiIkIiMmGTEiHhQoPSYoJSkqKDcp'
+'Ii0uLDAxLzI0Mh44U0gxMDI5JkM0JjU3NDY6Kjc5Njo7OUE8Ozw+Oz89QTxA'
+'F1akOFFiRIgPHTZksKBAgMCLGTdGNIAAQgKfDAcgZbj0odOnUA8GBAA7'
+'CxsSEhkWDhYYFQ0aJhkaGBweGyccGh8hHiIkIiMmGTEiHhQoPSYoJSkqKDcp'
+'Ii0uLDAxLzI0Mh44U0gxMDI5JkM0JjU3NDY6Kjc5Njo7OUE8Ozw+Oz89QTxA'
+'F1akOFFiRIgPHTZksKBAgMCLGTdGNIAAQgKfDAcgZbj0odOnUA8GBAA7'
+'CxsSEhkWDhYYFQ0aJhkaGBweGyccGh8hHiIkIiMmGTEiHhQoPSYoJSkqKDcp'
+'Ii0uLDAxLzI0Mh44U0gxMDI5JkM0JjU3NDY6Kjc5Njo7OUE8Ozw+Oz89QTxA'
+'F1akOFFiRIgPHTZksKBAgMCLGTdGNIAAQgKfDAcgZbj0odOnUA8GBAA7'
+'CxsSEhkWDhYYFQ0aJhkaGBweGyccGh8hHiIkIiMmGTEiHhQoPSYoJSkqKDcp'
+'Ii0uLDAxLzI0Mh44U0gxMDI5JkM0JjU3NDY6Kjc5Njo7OUE8Ozw+Oz89QTxA'
+'F1akOFFiRIgPHTZksKBAgMCLGTdGNIAAQgKfDAcgZbj0odOnUA8GBAA7'
+'CxsSEhkWDhYYFQ0aJhkaGBweGyccGh8hHiIkIiMmGTEiHhQoPSYoJSkqKDcp'
+'Ii0uLDAxLzI0Mh44U0gxMDI5JkM0JjU3NDY6Kjc5Njo7OUE8Ozw+Oz89QTxA'
+'F1akOFFiRIgPHTZksKBAgMCLGTdGNIAAQgKfDAcgZbj0odOnUA8GBAA7'
+'CxsSEhkWDhYYFQ0aJhkaGBweGyccGh8hHiIkIiMmGTEiHhQoPSYoJSkqKDcp'
+'Ii0uLDAxLzI0Mh44U0gxMDI5JkM0JjU3NDY6Kjc5Njo7OUE8Ozw+Oz89QTxA'
+'F1akOFFiRIgPHTZksKBAgMCLGTdGNIAAQgKfDAcgZbj0odOnUA8GBAA7'
+'CxsSEhkWDhYYFQ0aJhkaGBweGyccGh8hHiIkIiMmGTEiHhQoPSYoJSkqKDcp'
+'Ii0uLDAxLzI0Mh44U0gxMDI5JkM0JjU3NDY6Kjc5Njo7OUE8Ozw+Oz89QTxA'
+'F1akOFFiRIgPHTZksKBAgMCLGTdGNIAAQgKfDAcgZbj0odOnUA8GBAA7'
+'CxsSEhkWDhYYFQ0aJhkaGBweGyccGh8hHiIkIiMmGTEiHhQoPSYoJSkqKDcp'
+'Ii0uLDAxLzI0Mh44U0gxMDI5JkM0JjU3NDY6Kjc5Njo7OUE8Ozw+Oz89QTxA'
+'F1akOFFiRIgPHTZksKBAgMCLGTdGNIAAQgKfDAcgZbj0odOnUA8GBAA7'
+'CxsSEhkWDhYYFQ0aJhkaGBweGyccGh8hHiIkIiMmGTEiHhQoPSYoJSkqKDcp'
+'Ii0uLDAxLzI0Mh44U0gxMDI5JkM0JjU3NDY6Kjc5Njo7OUE8Ozw+Oz89QTxA'
+'F1akOFFiRIgPHTZksKBAgMCLGTdGNIAAQgKfDAcgZbj0odOnUA8GBAA7'
+'CxsSEhkWDhYYFQ0aJhkaGBweGyccGh8hHiIkIiMmGTEiHhQoPSYoJSkqKDcp'
+'Ii0uLDAxLzI0Mh44U0gxMDI5JkM0JjU3NDY6Kjc5Njo7OUE8Ozw+Oz89QTxA'
+'F1akOFFiRIgPHTZksKBAgMCLGTdGNIAAQgKfDAcgZbj0odOnUA8GBAA7'
+'CxsSEhkWDhYYFQ0aJhkaGBweGyccGh8hHiIkIiMmGTEiHhQoPSYoJSkqKDcp'
+'Ii0uLDAxLzI0Mh44U0gxMDI5JkM0JjU3NDY6Kjc5Njo7OUE8Ozw+Oz89QTxA'
+'F1akOFFiRIgPHTZksKBAgMCLGTdGNIAAQgKfDAcgZbj0odOnUA8GBAA7'
+'CxsSEhkWDhYYFQ0aJhkaGBweGyccGh8hHiIkIiMmGTEiHhQoPSYoJSkqKDcp'
+'Ii0uLDAxLzI0Mh44U0gxMDI5JkM0JjU3NDY6Kjc5Njo7OUE8Ozw+Oz89QTxA'
+'F1akOFFiRIgPHTZksKBAgMCLGTdGNIAAQgKfDAcgZbj0odOnUA8GBAA7'
+'CxsSEhkWDhYYFQ0aJhkaGBweGyccGh8hHiIkIiMmGTEiHhQoPSYoJSkqKDcp'
+'Ii0uLDAxLzI0Mh44U0gxMDI5JkM0JjU3NDY6Kjc5Njo7OUE8Ozw+Oz89QTxA'
+'F1akOFFiRIgPHTZksKBAgMCLGTdGNIAAQgKfDAcgZbj0odOnUA8GBAA7'
+'CxsSEhkWDhYYFQ0aJhkaGBweGyccGh8hHiIkIiMmGTEiHhQoPSYoJSkqKDcp'
+'Ii0uLDAxLzI0Mh44U0gxMDI5JkM0JjU3NDY6Kjc5Njo7OUE8Ozw+Oz89QTxA'
+'F1akOFFiRIgPHTZksKBAgMCLGTdGNIAAQgKfDAcgZbj0odOnUA8GBAA7'
+'CxsSEhkWDhYYFQ0aJhkaGBweGyccGh8hHiIkIiMmGTEiHhQoPSYoJSkqKDcp'
+'Ii0uLDAxLzI0Mh44U0gxMDI5JkM0JjU3NDY6Kjc5Njo7OUE8Ozw+Oz89QTxA'
+'F1akOFFiRIgPHTZksKBAgMCLGTdGNIAAQgKfDAcgZbj0odOnUA8GBAA7'
+'CxsSEhkWDhYYFQ0aJhkaGBweGyccGh8hHiIkIiMmGTEiHhQoPSYoJSkqKDcp'
+'Ii0uLDAxLzI0Mh44U0gxMDI5JkM0JjU3NDY6Kjc5Njo7OUE8Ozw+Oz89QTxA'
+'F1akOFFiRIgPHTZksKBAgMCLGTdGNIAAQgKfDAcgZbj0odOnUA8GBAA7'
+'CxsSEhkWDhYYFQ0aJhkaGBweGyccGh8hHiIkIiMmGTEiHhQoPSYoJSkqKDcp'
+'Ii0uLDAxLzI0Mh44U0gxMDI5JkM0JjU3NDY6Kjc5Njo7OUE8Ozw+Oz89QTxA'
+'F1akOFFiRIgPHTZksKBAgMCLGTdGNIAAQgKfDAcgZbj0odOnUA8GBAA7'
+'CxsSEhkWDhYYFQ0aJhkaGBweGyccGh8hHiIkIiMmGTEiHhQoPSYoJSkqKDcp'
+'Ii0uLDAxLzI0Mh44U0gxMDI5JkM0JjU3NDY6Kjc5Njo7OUE8Ozw+Oz89QTxA'
+'F1akOFFiRIgPHTZksKBAgMCLGTdGNIAAQgKfDAcgZbj0odOnUA8GBAA7'
+'CxsSEhkWDhYYFQ0aJhkaGBweGyccGh8hHiIkIiMmGTEiHhQoPSYoJSkqKDcp'
+'Ii0uLDAxLzI0Mh44U0gxMDI5JkM0JjU3NDY6Kjc5Njo7OUE8Ozw+Oz89QTxA'
+'F1akOFFiRIgPHTZksKBAgMCLGTdGNIAAQgKfDAcgZbj0odOnUA8GBAA7'
+'CxsSEhkWDhYYFQ0aJhkaGBweGyccGh8hHiIkIiMmGTEiHhQoPSYoJSkqKDcp'
+'Ii0uLDAxLzI0Mh44U0gxMDI5JkM0JjU3NDY6Kjc5Njo7OUE8Ozw+Oz89QTxA'
+'F1akOFFiRIgPHTZksKBAgMCLGTdGNIAAQgKfDAcgZbj0odOnUA8GBAA7'
+'CxsSEhkWDhYYFQ0aJhkaGBweGyccGh8hHiIkIiMmGTEiHhQoPSYoJSkqKDcp'
+'Ii0uLDAxLzI0Mh44U0gxMDI5JkM0JjU3NDY6Kjc5Njo7OUE8Ozw+Oz89QTxA'
+'F1akOFFiRIgPHTZksKBAgMCLGTdGNIAAQgKfDAcgZbj0odOnUA8GBAA7'
+'CxsSEhkWDhYYFQ0aJhkaGBweGyccGh8hHiIkIiMmGTEiHhQoPSYoJSkqKDcp'
+'Ii0uLDAxLzI0Mh44U0gxMDI5JkM0JjU3NDY6Kjc5Njo7OUE8Ozw+Oz89QTxA'
+'F1akOFFiRIgPHTZksKBAgMCLGTdGNIAAQgKfDAcgZbj0odOnUA8GBAA7'
+'CxsSEhkWDhYYFQ0aJhkaGBweGyccGh8hHiIkIiMmGTEiHhQoPSYoJSkqKDcp'
+'Ii0uLDAxLzI0Mh44U0gxMDI5JkM0JjU3NDY6Kjc5Njo7OUE8Ozw+Oz89QTxA'
+'F1akOFFiRIgPHTZksKBAgMCLGTdGNIAAQgKfDAcgZbj0odOnUA8GBAA7'
+'CxsSEhkWDhYYFQ0aJhkaGBweGyccGh8hHiIkIiMmGTEiHhQoPSYoJSkqKDcp'
+'Ii0uLDAxLzI0Mh44U0gxMDI5JkM0JjU3NDY6Kjc5Njo7OUE8Ozw+Oz89QTxA'
+'F1akOFFiRIgPHTZksKBAgMCLGTdGNIAAQgKfDAcgZbj0odOnUA8GBAA7'
+'CxsSEhkWDhYYFQ0aJhkaGBweGyccGh8hHiIkIiMmGTEiHhQoPSYoJSkqKDcp'
+'Ii0uLDAxLzI0Mh44U0gxMDI5JkM0JjU3NDY6Kjc5Njo7OUE8Ozw+Oz89QTxA'
+'F1akOFFiRIgPHTZksKBAgMCLGTdGNIAAQgKfDAcgZbj0odOnUA8GBAA7'
+'CxsSEhkWDhYYFQ0aJhkaGBweGyccGh8hHiIkIiMmGTEiHhQoPSYoJSkqKDcp'
+'Ii0uLDAxLzI0Mh44U0gxMDI5JkM0JjU3NDY6Kjc5Njo7OUE8Ozw+Oz89QTxA'
+'F1akOFFiRIgPHTZksKBAgMCLGTdGNIAAQgKfDAcgZbj0odOnUA8GBAA7'
+'CxsSEhkWDhYYFQ0aJhkaGBweGyccGh8hHiIkIiMmGTEiHhQoPSYoJSkqKDcp'
+'Ii0uLDAxLzI0Mh44U0gxMDI5JkM0JjU3NDY6Kjc5Njo7OUE8Ozw+Oz89QTxA'
+'F1akOFFiRIgPHTZksKBAgMCLGTdGNIAAQgKfDAcgZbj0odOnUA8GBAA7'
+'CxsSEhkWDhYYFQ0aJhkaGBweGyccGh8hHiIkIiMmGTEiHhQoPSYoJSkqKDcp'
+'Ii0uLDAxLzI0Mh44U0gxMDI5JkM0JjU3NDY6Kjc5Njo7OUE8Ozw+Oz89QTxA'
+'F1akOFFiRIgPHTZksKBAgMCLGTdGNIAAQgKfDAcgZbj0odOnUA8GBAA7'
+'CxsSEhkWDhYYFQ0aJhkaGBweGyccGh8hHiIkIiMmGTEiHhQoPSYoJSkqKDcp'
+'Ii0uLDAxLzI0Mh44U0gxMDI5JkM0JjU3NDY6Kjc5Njo7OUE8Ozw+Oz89QTxA'
+'F1akOFFiRIgPHTZksKBAgMCLGTdGNIAAQgKfDAcgZbj0odOnUA8GBAA7'
+'CxsSEhkWDhYYFQ0aJhkaGBweGyccGh8hHiIkIiMmGTEiHhQoPSYoJSkqKDcp'
+'Ii0uLDAxLzI0Mh44U0gxMDI5JkM0JjU3NDY6Kjc5Njo7OUE8Ozw+Oz89QTxA'
+'F1akOFFiRIgPHTZksKBAgMCLGTdGNIAAQgKfDAcgZbj0odOnUA8GBAA7'
+'CxsSEhkWDhYYFQ0aJhkaGBweGyccGh8hHiIkIiMmGTEiHhQoPSYoJSkqKDcp'
+'Ii0uLDAxLzI0Mh44U0gxMDI5JkM0JjU3NDY6Kjc5Njo7OUE8Ozw+Oz89QTxA'
+'F1akOFFiRIgPHTZksKBAgMCLGTdGNIAAQgKfDAcgZbj0odOnUA8GBAA7'
+'CxsSEhkWDhYYFQ0aJhkaGBweGyccGh8hHiIkIiMmGTEiHhQoPSYoJSkqKDcp'
+'Ii0uLDAxLzI0Mh44U0gxMDI5JkM0JjU3NDY6Kjc5Njo7OUE8Ozw+Oz89QTxA'
+'F1akOFFiRIgPHTZksKBAgMCLGTdGNIAAQgKfDAcgZbj0odOnUA8GBAA7'
+'CxsSEhkWDhYYFQ0aJhkaGBweGyccGh8hHiIkIiMmGTEiHhQoPSYoJSkqKDcp'
+'Ii0uLDAxLzI0Mh44U0gxMDI5JkM0JjU3NDY6Kjc5Njo7OUE8Ozw+Oz89QTxA'
+'F1akOFFiRIgPHTZksKBAgMCLGTdGNIAAQgKfDAcgZbj0odOnUA8GBAA7'
+'CxsSEhkWDhYYFQ0aJhkaGBweGyccGh8hHiIkIiMmGTEiHhQoPSYoJSkqKDcp'
+'Ii0uLDAxLzI0Mh44U0gxMDI5JkM0JjU3NDY6Kjc5Njo7OUE8Ozw+Oz89QTxA'
+'F1akOFFiRIgPHTZksKBAgMCLGTdGNIAAQgKfDAcgZbj0odOnUA8GBAA7'
+'CxsSEhkWDhYYFQ0aJhkaGBweGyccGh8hHiIkIiMmGTEiHhQoPSYoJSkqKDcp'
+'Ii0uLDAxLzI0Mh44U0gxMDI5JkM0JjU3NDY6Kjc5Njo7OUE8Ozw+Oz89QTxA'
+'F1akOFFiRIgPHTZksKBAgMCLGTdGNIAAQgKfDAcgZbj0odOnUA8GBAA7'
+'CxsSEhkWDhYYFQ0aJhkaGBweGyccGh8hHiIkIiMmGTEiHhQoPSYoJSkqKDcp'
+'Ii0uLDAxLzI0Mh44U0gxMDI5JkM0JjU3NDY6Kjc5Njo7OUE8Ozw+Oz89QTxA'
+'F1akOFFiRIgPHTZksKBAgMCLGTdGNIAAQgKfDAcgZbj0odOnUA8GBAA7'
+'CxsSEhkWDhYYFQ0aJhkaGBweGyccGh8hHiIkIiMmGTEiHhQoPSYoJSkqKDcp'
+'Ii0uLDAxLzI0Mh44U0gxMDI5JkM0JjU3NDY6Kjc5Njo7OUE8Ozw+Oz89QTxA'
+'F1akOFFiRIgPHTZksKBAgMCLGTdGNIAAQgKfDAcgZbj0odOnUA8GBAA7'
+'CxsSEhkWDhYYFQ0aJhkaGBweGyccGh8hHiIkIiMmGTEiHhQoPSYoJSkqKDcp'
+'Ii0uLDAxLzI0Mh44U0gxMDI5JkM0JjU3NDY6Kjc5Njo7OUE8Ozw+Oz89QTxA'
+'F1akOFFiRIgPHTZksKBAgMCLGTdGNIAAQgKfDAcgZbj0odOnUA8GBAA7'
+'CxsSEhkWDhYYFQ0aJhkaGBweGyccGh8hHiIkIiMmGTEiHhQoPSYoJSkqKDcp'
+'Ii0uLDAxLzI0Mh44U0gxMDI5JkM0JjU3NDY6Kjc5Njo7OUE8Ozw+Oz89QTxA'
+'F1akOFFiRIgPHTZksKBAgMCLGTdGNIAAQgKfDAcgZbj0odOnUA8GBAA7'
+'CxsSEhkWDhYYFQ0aJhkaGBweGyccGh8hHiIkIiMmGTEiHhQoPSYoJSkqKDcp'
+'Ii0uLDAxLzI0Mh44U0gxMDI5JkM0JjU3NDY6Kjc5Njo7OUE8Ozw+Oz89QTxA'
+'F1akOFFiRIgPHTZksKBAgMCLGTdGNIAAQgKfDAcgZbj0odOnUA8GBAA7'
+'CxsSEhkWDhYYFQ0aJhkaGBweGyccGh8hHiIkIiMmGTEiHhQoPSYoJSkqKDcp'
+'Ii0uLDAxLzI0Mh44U0gxMDI5JkM0JjU3NDY6Kjc5Njo7OUE8Ozw+Oz89QTxA'
+'F1akOFFiRIgPHTZksKBAgMCLGTdGNIAAQgKfDAcgZbj0odOnUA8GBAA7'
+'CxsSEhkWDhYYFQ0aJhkaGBweGyccGh8hHiIkIiMmGTEiHhQoPSYoJSkqKDcp'
+'Ii0uLDAxLzI0Mh44U0gxMDI5JkM0JjU3NDY6Kjc5Njo7OUE8Ozw+Oz89QTxA'
+'F1akOFFiRIgPHTZksKBAgMCLGTdGNIAAQgKfDAcgZbj0odOnUA8GBAA7'
+'CxsSEhkWDhYYFQ0aJhkaGBweGyccGh8hHiIkIiMmGTEiHhQoPSYoJSkqKDcp'
+'Ii0uLDAxLzI0Mh44U0gxMDI5JkM0JjU3NDY6Kjc5Njo7OUE8Ozw+Oz89QTxA'
+'F1akOFFiRIgPHTZksKBAgMCLGTdGNIAAQgKfDAcgZbj0odOnUA8GBAA7'
+'CxsSEhkWDhYYFQ0aJhkaGBweGyccGh8hHiIkIiMmGTEiHhQoPSYoJSkqKDcp'
+'Ii0uLDAxLzI0Mh44U0gxMDI5JkM0JjU3NDY6Kjc5Njo7OUE8Ozw+Oz89QTxA'
+'F1akOFFiRIgPHTZksKBAgMCLGTdGNIAAQgKfDAcgZbj0odOnUA8GBAA7'
+'CxsSEhkWDhYYFQ0aJhkaGBweGyccGh8hHiIkIiMmGTEiHhQoPSYoJSkqKDcp'
+'Ii0uLDAxLzI0Mh44U0gxMDI5JkM0JjU3NDY6Kjc5Njo7OUE8Ozw+Oz89QTxA'
+'F1akOFFiRIgPHTZksKBAgMCLGTdGNIAAQgKfDAcgZbj0odOnUA8GBAA7'
+'CxsSEhkWDhYYFQ0aJhkaGBweGyccGh8hHiIkIiMmGTEiHhQoPSYoJSkqKDcp'
+'Ii0uLDAxLzI0Mh44U0gxMDI5JkM0JjU3NDY6Kjc5Njo7OUE8Ozw+Oz89QTxA'
+'F1akOFFiRIgPHTZksKBAgMCLGTdGNIAAQgKfDAcgZbj0odOnUA8GBAA7'
+'CxsSEhkWDhYYFQ0aJhkaGBweGyccGh8hHiIkIiMmGTEiHhQoPSYoJSkqKDcp'
+'Ii0uLDAxLzI0Mh44U0gxMDI5JkM0JjU3NDY6Kjc5Njo7OUE8Ozw+Oz89QTxA'
+'F1akOFFiRIgPHTZksKBAgMCLGTdGNIAAQgKfDAcgZbj0odOnUA8GBAA7'
+'CxsSEhkWDhYYFQ0aJhkaGBweGyccGh8hHiIkIiMmGTEiHhQoPSYoJSkqKDcp'
+'Ii0uLDAxLzI0Mh44U0gxMDI5JkM0JjU3NDY6Kjc5Njo7OUE8Ozw+Oz89QTxA'
+'F1akOFFiRIgPHTZksKBAgMCLGTdGNIAAQgKfDAcgZbj0odOnUA8GBAA7'
+'CxsSEhkWDhYYFQ0aJhkaGBweGyccGh8hHiIkIiMmGTEiHhQoPSYoJSkqKDcp'
+'Ii0uLDAxLzI0Mh44U0gxMDI5JkM0JjU3NDY6Kjc5Njo7OUE8Ozw+Oz89QTxA'
+'F1akOFFiRIgPHTZksKBAgMCLGTdGNIAAQgKfDAcgZbj0odOnUA8GBAA7'
+'CxsSEhkWDhYYFQ0aJhkaGBweGyccGh8hHiIkIiMmGTEiHhQoPSYoJSkqKDcp'
+'Ii0uLDAxLzI0Mh44U0gxMDI5JkM0JjU3NDY6Kjc5Njo7OUE8Ozw+Oz89QTxA'
+'F1akOFFiRIgPHTZksKBAgMCLGTdGNIAAQgKfDAcgZbj0odOnUA8GBAA7'
+'CxsSEhkWDhYYFQ0aJhkaGBweGyccGh8hHiIkIiMmGTEiHhQoPSYoJSkqKDcp'
+'Ii0uLDAxLzI0Mh44U0gxMDI5JkM0JjU3NDY6Kjc5Njo7OUE8Ozw+Oz89QTxA'
+'F1akOFFiRIgPHTZksKBAgMCLGTdGNIAAQgKfDAcgZbj0odOnUA8GBAA7'
+'CxsSEhkWDhYYFQ0aJhkaGBweGyccGh8hHiIkIiMmGTEiHhQoPSYoJSkqKDcp'
+'Ii0uLDAxLzI0Mh44U0gxMDI5JkM0JjU3NDY6Kjc5Njo7OUE8Ozw+Oz89QTxA'
+'F1akOFFiRIgPHTZksKBAgMCLGTdGNIAAQgKfDAcgZbj0odOnUA8GBAA7'
+'CxsSEhkWDhYYFQ0aJhkaGBweGyccGh8hHiIkIiMmGTEiHhQoPSYoJSkqKDcp'
+'Ii0uLDAxLzI0Mh44U0gxMDI5JkM0JjU3NDY6Kjc5Njo7OUE8Ozw+Oz89QTxA'
+'F1akOFFiRIgPHTZksKBAgMCLGTdGNIAAQgKfDAcgZbj0odOnUA8GBAA7'
+'CxsSEhkWDhYYFQ0aJhkaGBweGyccGh8hHiIkIiMmGTEiHhQoPSYoJSkqKDcp'
+'Ii0uLDAxLzI0Mh44U0gxMDI5JkM0JjU3NDY6Kjc5Njo7OUE8Ozw+Oz89QTxA'
+'F1akOFFiRIgPHTZksKBAgMCLGTdGNIAAQgKfDAcgZbj0odOnUA8GBAA7'
+'CxsSEhkWDhYYFQ0aJhkaGBweGyccGh8hHiIkIiMmGTEiHhQoPSYoJSkqKDcp'
+'Ii0uLDAxLzI0Mh44U0gxMDI5JkM0JjU3NDY6Kjc5Njo7OUE8Ozw+Oz89QTxA'
+'F1akOFFiRIgPHTZksKBAgMCLGTdGNIAAQgKfDAcgZbj0odOnUA8GBAA7'
+'CxsSEhkWDhYYFQ0aJhkaGBweGyccGh8hHiIkIiMmGTEiHhQoPSYoJSkqKDcp'
+'Ii0uLDAxLzI0Mh44U0gxMDI5JkM0JjU3NDY6Kjc5Njo7OUE8Ozw+Oz89QTxA'
+'F1akOFFiRIgPHTZksKBAgMCLGTdGNIAAQgKfDAcgZbj0odOnUA8GBAA7'
+'CxsSEhkWDhYYFQ0aJhkaGBweGyccGh8hHiIkIiMmGTEiHhQoPSYoJSkqKDcp'
+'Ii0uLDAxLzI0Mh44U0gxMDI5JkM0JjU3NDY6Kjc5Njo7OUE8Ozw+Oz89QTxA'
+'F1akOFFiRIgPHTZksKBAgMCLGTdGNIAAQgKfDAcgZbj0odOnUA8GBAA7'
+'CxsSEhkWDhYYFQ0aJhkaGBweGyccGh8hHiIkIiMmGTEiHhQoPSYoJSkqKDcp'
+'Ii0uLDAxLzI0Mh44U0gxMDI5JkM0JjU3NDY6Kjc5Njo7OUE8Ozw+Oz89QTxA'
+'F1akOFFiRIgPHTZksKBAgMCLGTdGNIAAQgKfDAcgZbj0odOnUA8GBAA7'
+'CxsSEhkWDhYYFQ0aJhkaGBweGyccGh8hHiIkIiMmGTEiHhQoPSYoJSkqKDcp'
+'Ii0uLDAxLzI0Mh44U0gxMDI5JkM0JjU3NDY6Kjc5Njo7OUE8Ozw+Oz89QTxA'
+'F1akOFFiRIgPHTZksKBAgMCLGTdGNIAAQgKfDAcgZbj0odOnUA8GBAA7'
+'CxsSEhkWDhYYFQ0aJhkaGBweGyccGh8hHiIkIiMmGTEiHhQoPSYoJSkqKDcp'
+'Ii0uLDAxLzI0Mh44U0gxMDI5JkM0JjU3NDY6Kjc5Njo7OUE8Ozw+Oz89QTxA'
+'F1akOFFiRIgPHTZksKBAgMCLGTdGNIAAQgKfDAcgZbj0odOnUA8GBAA7'
+'CxsSEhkWDhYYFQ0aJhkaGBweGyccGh8hHiIkIiMmGTEiHhQoPSYoJSkqKDcp'
+'Ii0uLDAxLzI0Mh44U0gxMDI5JkM0JjU3NDY6Kjc5Njo7OUE8Ozw+Oz89QTxA'
+'F1akOFFiRIgPHTZksKBAgMCLGTdGNIAAQgKfDAcgZbj0odOnUA8GBAA7'
+'CxsSEhkWDhYYFQ0aJhkaGBweGyccGh8hHiIkIiMmGTEiHhQoPSYoJSkqKDcp'
+'Ii0uLDAxLzI0Mh44U0gxMDI5JkM0JjU3NDY6Kjc5Njo7OUE8Ozw+Oz89QTxA'
+'F1akOFFiRIgPHTZksKBAgMCLGTdGNIAAQgKfDAcgZbj0odOnUA8GBAA7'
+'CxsSEhkWDhYYFQ0aJhkaGBweGyccGh8hHiIkIiMmGTEiHhQoPSYoJSkqKDcp'
+'Ii0uLDAxLzI0Mh44U0gxMDI5JkM0JjU3NDY6Kjc5Njo7OUE8Ozw+Oz89QTxA'
+'F1akOFFiRIgPHTZksKBAgMCLGTdGNIAAQgKfDAcgZbj0odOnUA8GBAA7'
+'CxsSEhkWDhYYFQ0aJhkaGBweGyccGh8hHiIkIiMmGTEiHhQoPSYoJSkqKDcp'
+'Ii0uLDAxLzI0Mh44U0gxMDI5JkM0JjU3NDY6Kjc5Njo7OUE8Ozw+Oz89QTxA'
+'F1akOFFiRIgPHTZksKBAgMCLGTdGNIAAQgKfDAcgZbj0odOnUA8GBAA7'
+'CxsSEhkWDhYYFQ0aJhkaGBweGyccGh8hHiIkIiMmGTEiHhQoPSYoJSkqKDcp'
+'Ii0uLDAxLzI0Mh44U0gxMDI5JkM0JjU3NDY6Kjc5Njo7OUE8Ozw+Oz89QTxA'
+'F1akOFFiRIgPHTZksKBAgMCLGTdGNIAAQgKfDAcgZbj0odOnUA8GBAA7'
+'CxsSEhkWDhYYFQ0aJhkaGBweGyccGh8hHiIkIiMmGTEiHhQoPSYoJSkqKDcp'
+'Ii0uLDAxLzI0Mh44U0gxMDI5JkM0JjU3NDY6Kjc5Njo7OUE8Ozw+Oz89QTxA'
+'F1akOFFiRIgPHTZksKBAgMCLGTdGNIAAQgKfDAcgZbj0odOnUA8GBAA7'
+'CxsSEhkWDhYYFQ0aJhkaGBweGyccGh8hHiIkIiMmGTEiHhQoPSYoJSkqKDcp'
+'Ii0uLDAxLzI0Mh44U0gxMDI5JkM0JjU3NDY6Kjc5Njo7OUE8Ozw+Oz89QTxA'
+'F1akOFFiRIgPHTZksKBAgMCLGTdGNIAAQgKfDAcgZbj0odOnUA8GBAA7'
+'CxsSEhkWDhYYFQ0aJhkaGBweGyccGh8hHiIkIiMmGTEiHhQoPSYoJSkqKDcp'
+'Ii0uLDAxLzI0Mh44U0gxMDI5JkM0JjU3NDY6Kjc5Njo7OUE8Ozw+Oz89QTxA'
+'F1akOFFiRIgPHTZksKBAgMCLGTdGNIAAQgKfDAcgZbj0odOnUA8GBAA7'
+'CxsSEhkWDhYYFQ0aJhkaGBweGyccGh8hHiIkIiMmGTEiHhQoPSYoJSkqKDcp'
+'Ii0uLDAxLzI0Mh44U0gxMDI5JkM0JjU3NDY6Kjc5Njo7OUE8Ozw+Oz89QTxA'
+'F1akOFFiRIgPHTZksKBAgMCLGTdGNIAAQgKfDAcgZbj0odOnUA8GBAA7'
+'CxsSEhkWDhYYFQ0aJhkaGBweGyccGh8hHiIkIiMmGTEiHhQoPSYoJSkqKDcp'
+'Ii0uLDAxLzI0Mh44U0gxMDI5JkM0JjU3NDY6Kjc5Njo7OUE8Ozw+Oz89QTxA'
+'F1akOFFiRIgPHTZksKBAgMCLGTdGNIAAQgKfDAcgZbj0odOnUA8GBAA7'
+'CxsSEhkWDhYYFQ0aJhkaGBweGyccGh8hHiIkIiMmGTEiHhQoPSYoJSkqKDcp'
+'Ii0uLDAxLzI0Mh44U0gxMDI5JkM0JjU3NDY6Kjc5Njo7OUE8Ozw+Oz89QTxA'
+'F1akOFFiRIgPHTZksKBAgMCLGTdGNIAAQgKfDAcgZbj0odOnUA8GBAA7'
+'CxsSEhkWDhYYFQ0aJhkaGBweGyccGh8hHiIkIiMmGTEiHhQoPSYoJSkqKDcp'
+'Ii0uLDAxLzI0Mh44U0gxMDI5JkM0JjU3NDY6Kjc5Njo7OUE8Ozw+Oz89QTxA'
+'F1akOFFiRIgPHTZksKBAgMCLGTdGNIAAQgKfDAcgZbj0odOnUA8GBAA7'
+'CxsSEhkWDhYYFQ0aJhkaGBweGyccGh8hHiIkIiMmGTEiHhQoPSYoJSkqKDcp'
+'Ii0uLDAxLzI0Mh44U0gxMDI5JkM0JjU3NDY6Kjc5Njo7OUE8Ozw+Oz89QTxA'
+'F1akOFFiRIgPHTZksKBAgMCLGTdGNIAAQgKfDAcgZbj0odOnUA8GBAA7'
+'CxsSEhkWDhYYFQ0aJhkaGBweGyccGh8hHiIkIiMmGTEiHhQoPSYoJSkqKDcp'
+'Ii0uLDAxLzI0Mh44U0gxMDI5JkM0JjU3NDY6Kjc5Njo7OUE8Ozw+Oz89QTxA'
+'F1akOFFiRIgPHTZksKBAgMCLGTdGNIAAQgKfDAcgZbj0odOnUA8GBAA7'
+'CxsSEhkWDhYYFQ0aJhkaGBweGyccGh8hHiIkIiMmGTEiHhQoPSYoJSkqKDcp'
+'Ii0uLDAxLzI0Mh44U0gxMDI5JkM0JjU3NDY6Kjc5Njo7OUE8Ozw+Oz89QTxA'
+'F1akOFFiRIgPHTZksKBAgMCLGTdGNIAAQgKfDAcgZbj0odOnUA8GBAA7'
+'CxsSEhkWDhYYFQ0aJhkaGBweGyccGh8hHiIkIiMmGTEiHhQoPSYoJSkqKDcp'
+'Ii0uLDAxLzI0Mh44U0gxMDI5JkM0JjU3NDY6Kjc5Njo7OUE8Ozw+Oz89QTxA'
+'F1akOFFiRIgPHTZksKBAgMCLGTdGNIAAQgKfDAcgZbj0odOnUA8GBAA7'
+'CxsSEhkWDhYYFQ0aJhkaGBweGyccGh8hHiIkIiMmGTEiHhQoPSYoJSkqKDcp'
+'Ii0uLDAxLzI0Mh44U0gxMDI5JkM0JjU3NDY6Kjc5Njo7OUE8Ozw+Oz89QTxA'
+'F1akOFFiRIgPHTZksKBAgMCLGTdGNIAAQgKfDAcgZbj0odOnUA8GBAA7'
+'CxsSEhkWDhYYFQ0aJhkaGBweGyccGh8hHiIkIiMmGTEiHhQoPSYoJSkqKDcp'
+'Ii0uLDAxLzI0Mh44U0gxMDI5JkM0JjU3NDY6Kjc5Njo7OUE8Ozw+Oz89QTxA'
+'F1akOFFiRIgPHTZksKBAgMCLGTdGNIAAQgKfDAcgZbj0odOnUA8GBAA7'
+'CxsSEhkWDhYYFQ0aJhkaGBweGyccGh8hHiIkIiMmGTEiHhQoPSYoJSkqKDcp'
+'Ii0uLDAxLzI0Mh44U0gxMDI5JkM0JjU3NDY6Kjc5Njo7OUE8Ozw+Oz89QTxA'
+'F1akOFFiRIgPHTZksKBAgMCLGTdGNIAAQgKfDAcgZbj0odOnUA8GBAA7'
+'CxsSEhkWDhYYFQ0aJhkaGBweGyccGh8hHiIkIiMmGTEiHhQoPSYoJSkqKDcp'
+'Ii0uLDAxLzI0Mh44U0gxMDI5JkM0JjU3NDY6Kjc5Njo7OUE8Ozw+Oz89QTxA'
+'F1akOFFiRIgPHTZksKBAgMCLGTdGNIAAQgKfDAcgZbj0odOnUA8GBAA7'
+'CxsSEhkWDhYYFQ0aJhkaGBweGyccGh8hHiIkIiMmGTEiHhQoPSYoJSkqKDcp'
+'Ii0uLDAxLzI0Mh44U0gxMDI5JkM0JjU3NDY6Kjc5Njo7OUE8Ozw+Oz89QTxA'
+'F1akOFFiRIgPHTZksKBAgMCLGTdGNIAAQgKfDAcgZbj0odOnUA8GBAA7'
+'CxsSEhkWDhYYFQ0aJhkaGBweGyccGh8hHiIkIiMmGTEiHhQoPSYoJSkqKDcp'
+'Ii0uLDAxLzI0Mh44U0gxMDI5JkM0JjU3NDY6Kjc5Njo7OUE8Ozw+Oz89QTxA'
+'F1akOFFiRIgPHTZksKBAgMCLGTdGNIAAQgKfDAcgZbj0odOnUA8GBAA7'
+'CxsSEhkWDhYYFQ0aJhkaGBweGyccGh8hHiIkIiMmGTEiHhQoPSYoJSkqKDcp'
+'Ii0uLDAxLzI0Mh44U0gxMDI5JkM0JjU3NDY6Kjc5Njo7OUE8Ozw+Oz89QTxA'
+'F1akOFFiRIgPHTZksKBAgMCLGTdGNIAAQgKfDAcgZbj0odOnUA8GBAA7'
+'CxsSEhkWDhYYFQ0aJhkaGBweGyccGh8hHiIkIiMmGTEiHhQoPSYoJSkqKDcp'
+'Ii0uLDAxLzI0Mh44U0gxMDI5JkM0JjU3NDY6Kjc5Njo7OUE8Ozw+Oz89QTxA'
+'F1akOFFiRIgPHTZksKBAgMCLGTdGNIAAQgKfDAcgZbj0odOnUA8GBAA7'
+'CxsSEhkWDhYYFQ0aJhkaGBweGyccGh8hHiIkIiMmGTEiHhQoPSYoJSkqKDcp'
+'Ii0uLDAxLzI0Mh44U0gxMDI5JkM0JjU3NDY6Kjc5Njo7OUE8Ozw+Oz89QTxA'
+'F1akOFFiRIgPHTZksKBAgMCLGTdGNIAAQgKfDAcgZbj0odOnUA8GBAA7'
+'CxsSEhkWDhYYFQ0aJhkaGBweGyccGh8hHiIkIiMmGTEiHhQoPSYoJSkqKDcp'
+'Ii0uLDAxLzI0Mh44U0gxMDI5JkM0JjU3NDY6Kjc5Njo7OUE8Ozw+Oz89QTxA'
+'F1akOFFiRIgPHTZksKBAgMCLGTdGNIAAQgKfDAcgZbj0odOnUA8GBAA7'
+'CxsSEhkWDhYYFQ0aJhkaGBweGyccGh8hHiIkIiMmGTEiHhQoPSYoJSkqKDcp'
+'Ii0uLDAxLzI0Mh44U0gxMDI5JkM0JjU3NDY6Kjc5Njo7OUE8Ozw+Oz89QTxA'
+'F1akOFFiRIgPHTZksKBAgMCLGTdGNIAAQgKfDAcgZbj0odOnUA8GBAA7'
+'CxsSEhkWDhYYFQ0aJhkaGBweGyccGh8hHiIkIiMmGTEiHhQoPSYoJSkqKDcp'
+'Ii0uLDAxLzI0Mh44U0gxMDI5JkM0JjU3NDY6Kjc5Njo7OUE8Ozw+Oz89QTxA'
+'F1akOFFiRIgPHTZksKBAgMCLGTdGNIAAQgKfDAcgZbj0odOnUA8GBAA7'
+'CxsSEhkWDhYYFQ0aJhkaGBweGyccGh8hHiIkIiMmGTEiHhQoPSYoJSkqKDcp'
+'Ii0uLDAxLzI0Mh44U0gxMDI5JkM0JjU3NDY6Kjc5Njo7OUE8Ozw+Oz89QTxA'
+'F1akOFFiRIgPHTZksKBAgMCLGTdGNIAAQgKfDAcgZbj0odOnUA8GBAA7'
+'CxsSEhkWDhYYFQ0aJhkaGBweGyccGh8hHiIkIiMmGTEiHhQoPSYoJSkqKDcp'
+'Ii0uLDAxLzI0Mh44U0gxMDI5JkM0JjU3NDY6Kjc5Njo7OUE8Ozw+Oz89QTxA'
+'F1akOFFiRIgPHTZksKBAgMCLGTdGNIAAQgKfDAcgZbj0odOnUA8GBAA7'
+'CxsSEhkWDhYYFQ0aJhkaGBweGyccGh8hHiIkIiMmGTEiHhQoPSYoJSkqKDcp'
+'Ii0uLDAxLzI0Mh44U0gxMDI5JkM0JjU3NDY6Kjc5Njo7OUE8Ozw+Oz89QTxA'
+'F1akOFFiRIgPHTZksKBAgMCLGTdGNIAAQgKfDAcgZbj0odOnUA8GBAA7'
+'CxsSEhkWDhYYFQ0aJhkaGBweGyccGh8hHiIkIiMmGTEiHhQoPSYoJSkqKDcp'
+'Ii0uLDAxLzI0Mh44U0gxMDI5JkM0JjU3NDY6Kjc5Njo7OUE8Ozw+Oz89QTxA'
+'F1akOFFiRIgPHTZksKBAgMCLGTdGNIAAQgKfDAcgZbj0odOnUA8GBAA7'
+'CxsSEhkWDhYYFQ0aJhkaGBweGyccGh8hHiIkIiMmGTEiHhQoPSYoJSkqKDcp'
+'Ii0uLDAxLzI0Mh44U0gxMDI5JkM0JjU3NDY6Kjc5Njo7OUE8Ozw+Oz89QTxA'
+'F1akOFFiRIgPHTZksKBAgMCLGTdGNIAAQgKfDAcgZbj0odOnUA8GBAA7'
+'CxsSEhkWDhYYFQ0aJhkaGBweGyccGh8hHiIkIiMmGTEiHhQoPSYoJSkqKDcp'
+'Ii0uLDAxLzI0Mh44U0gxMDI5JkM0JjU3NDY6Kjc5Njo7OUE8Ozw+Oz89QTxA'
+'F1akOFFiRIgPHTZksKBAgMCLGTdGNIAAQgKfDAcgZbj0odOnUA8GBAA7'
+'CxsSEhkWDhYYFQ0aJhkaGBweGyccGh8hHiIkIiMmGTEiHhQoPSYoJSkqKDcp'
+'Ii0uLDAxLzI0Mh44U0gxMDI5JkM0JjU3NDY6Kjc5Njo7OUE8Ozw+Oz89QTxA'
+'F1akOFFiRIgPHTZksKBAgMCLGTdGNIAAQgKfDAcgZbj0odOnUA8GBAA7'
+'CxsSEhkWDhYYFQ0aJhkaGBweGyccGh8hHiIkIiMmGTEiHhQoPSYoJSkqKDcp'
+'Ii0uLDAxLzI0Mh44U0gxMDI5JkM0JjU3NDY6Kjc5Njo7OUE8Ozw+Oz89QTxA'
+'F1akOFFiRIgPHTZksKBAgMCLGTdGNIAAQgKfDAcgZbj0odOnUA8GBAA7'
+'CxsSEhkWDhYYFQ0aJhkaGBweGyccGh8hHiIkIiMmGTEiHhQoPSYoJSkqKDcp'
+'Ii0uLDAxLzI0Mh44U0gxMDI5JkM0JjU3NDY6Kjc5Njo7OUE8Ozw+Oz89QTxA'
+'F1akOFFiRIgPHTZksKBAgMCLGTdGNIAAQgKfDAcgZbj0odOnUA8GBAA7'
+'CxsSEhkWDhYYFQ0aJhkaGBweGyccGh8hHiIkIiMmGTEiHhQoPSYoJSkqKDcp'
+'Ii0uLDAxLzI0Mh44U0gxMDI5JkM0JjU3NDY6Kjc5Njo7OUE8Ozw+Oz89QTxA'
+'F1akOFFiRIgPHTZksKBAgMCLGTdGNIAAQgKfDAcgZbj0odOnUA8GBAA7'
+'CxsSEhkWDhYYFQ0aJhkaGBweGyccGh8hHiIkIiMmGTEiHhQoPSYoJSkqKDcp'
+'Ii0uLDAxLzI0Mh44U0gxMDI5JkM0JjU3NDY6Kjc5Njo7OUE8Ozw+Oz89QTxA'
+'F1akOFFiRIgPHTZksKBAgMCLGTdGNIAAQgKfDAcgZbj0odOnUA8GBAA7'
+'CxsSEhkWDhYYFQ0aJhkaGBweGyccGh8hHiIkIiMmGTEiHhQoPSYoJSkqKDcp'
+'Ii0uLDAxLzI0Mh44U0gxMDI5JkM0JjU3NDY6Kjc5Njo7OUE8Ozw+Oz89QTxA'
+'F1akOFFiRIgPHTZksKBAgMCLGTdGNIAAQgKfDAcgZbj0odOnUA8GBAA7'
+'CxsSEhkWDhYYFQ0aJhkaGBweGyccGh8hHiIkIiMmGTEiHhQoPSYoJSkqKDcp'
+'Ii0uLDAxLzI0Mh44U0gxMDI5JkM0JjU3NDY6Kjc5Njo7OUE8Ozw+Oz89QTxA'
+'F1akOFFiRIgPHTZksKBAgMCLGTdGNIAAQgKfDAcgZbj0odOnUA8GBAA7'
+'CxsSEhkWDhYYFQ0aJhkaGBweGyccGh8hHiIkIiMmGTEiHhQoPSYoJSkqKDcp'
+'Ii0uLDAxLzI0Mh44U0gxMDI5JkM0JjU3NDY6Kjc5Njo7OUE8Ozw+Oz89QTxA'
+'F1akOFFiRIgPHTZksKBAgMCLGTdGNIAAQgKfDAcgZbj0odOnUA8GBAA7'
+'CxsSEhkWDhYYFQ0aJhkaGBweGyccGh8hHiIkIiMmGTEiHhQoPSYoJSkqKDcp'
+'Ii0uLDAxLzI0Mh44U0gxMDI5JkM0JjU3NDY6Kjc5Njo7OUE8Ozw+Oz89QTxA'
+'F1akOFFiRIgPHTZksKBAgMCLGTdGNIAAQgKfDAcgZbj0odOnUA8GBAA7'
+'CxsSEhkWDhYYFQ0aJhkaGBweGyccGh8hHiIkIiMmGTEiHhQoPSYoJSkqKDcp'
+'Ii0uLDAxLzI0Mh44U0gxMDI5JkM0JjU3NDY6Kjc5Njo7OUE8Ozw+Oz89QTxA'
+'F1akOFFiRIgPHTZksKBAgMCLGTdGNIAAQgKfDAcgZbj0odOnUA8GBAA7'
+'CxsSEhkWDhYYFQ0aJhkaGBweGyccGh8hHiIkIiMmGTEiHhQoPSYoJSkqKDcp'
+'Ii0uLDAxLzI0Mh44U0gxMDI5JkM0JjU3NDY6Kjc5Njo7OUE8Ozw+Oz89QTxA'
+'F1akOFFiRIgPHTZksKBAgMCLGTdGNIAAQgKfDAcgZbj0odOnUA8GBAA7'
+'CxsSEhkWDhYYFQ0aJhkaGBweGyccGh8hHiIkIiMmGTEiHhQoPSYoJSkqKDcp'
+'Ii0uLDAxLzI0Mh44U0gxMDI5JkM0JjU3NDY6Kjc5Njo7OUE8Ozw+Oz89QTxA'
+'F1akOFFiRIgPHTZksKBAgMCLGTdGNIAAQgKfDAcgZbj0odOnUA8GBAA7'
+'CxsSEhkWDhYYFQ0aJhkaGBweGyccGh8hHiIkIiMmGTEiHhQoPSYoJSkqKDcp'
+'Ii0uLDAxLzI0Mh44U0gxMDI5JkM0JjU3NDY6Kjc5Njo7OUE8Ozw+Oz89QTxA'
+'F1akOFFiRIgPHTZksKBAgMCLGTdGNIAAQgKfDAcgZbj0odOnUA8GBAA7'
+'CxsSEhkWDhYYFQ0aJhkaGBweGyccGh8hHiIkIiMmGTEiHhQoPSYoJSkqKDcp'
+'Ii0uLDAxLzI0Mh44U0gxMDI5JkM0JjU3NDY6Kjc5Njo7OUE8Ozw+Oz89QTxA'
+'F1akOFFiRIgPHTZksKBAgMCLGTdGNIAAQgKfDAcgZbj0odOnUA8GBAA7'
+'CxsSEhkWDhYYFQ0aJhkaGBweGyccGh8hHiIkIiMmGTEiHhQoPSYoJSkqKDcp'
+'Ii0uLDAxLzI0Mh44U0gxMDI5JkM0JjU3NDY6Kjc5Njo7OUE8Ozw+Oz89QTxA'
+'F1akOFFiRIgPHTZksKBAgMCLGTdGNIAAQgKfDAcgZbj0odOnUA8GBAA7'
+'CxsSEhkWDhYYFQ0aJhkaGBweGyccGh8hHiIkIiMmGTEiHhQoPSYoJSkqKDcp'
+'Ii0uLDAxLzI0Mh44U0gxMDI5JkM0JjU3NDY6Kjc5Njo7OUE8Ozw+Oz89QTxA'
+'F1akOFFiRIgPHTZksKBAgMCLGTdGNIAAQgKfDAcgZbj0odOnUA8GBAA7'
+'CxsSEhkWDhYYFQ0aJhkaGBweGyccGh8hHiIkIiMmGTEiHhQoPSYoJSkqKDcp'
+'Ii0uLDAxLzI0Mh44U0gxMDI5JkM0JjU3NDY6Kjc5Njo7OUE8Ozw+Oz89QTxA'
+'F1akOFFiRIgPHTZksKBAgMCLGTdGNIAAQgKfDAcgZbj0odOnUA8GBAA7'
+'CxsSEhkWDhYYFQ0aJhkaGBweGyccGh8hHiIkIiMmGTEiHhQoPSYoJSkqKDcp'
+'Ii0uLDAxLzI0Mh44U0gxMDI5JkM0JjU3NDY6Kjc5Njo7OUE8Ozw+Oz89QTxA'
+'F1akOFFiRIgPHTZksKBAgMCLGTdGNIAAQgKfDAcgZbj0odOnUA8GBAA7'
+'CxsSEhkWDhYYFQ0aJhkaGBweGyccGh8hHiIkIiMmGTEiHhQoPSYoJSkqKDcp'
+'Ii0uLDAxLzI0Mh44U0gxMDI5JkM0JjU3NDY6Kjc5Njo7OUE8Ozw+Oz89QTxA'
+'F1akOFFiRIgPHTZksKBAgMCLGTdGNIAAQgKfDAcgZbj0odOnUA8GBAA7'
+'CxsSEhkWDhYYFQ0aJhkaGBweGyccGh8hHiIkIiMmGTEiHhQoPSYoJSkqKDcp'
+'Ii0uLDAxLzI0Mh44U0gxMDI5JkM0JjU3NDY6Kjc5Njo7OUE8Ozw+Oz89QTxA'
+'F1akOFFiRIgPHTZksKBAgMCLGTdGNIAAQgKfDAcgZbj0odOnUA8GBAA7'
+'CxsSEhkWDhYYFQ0aJhkaGBweGyccGh8hHiIkIiMmGTEiHhQoPSYoJSkqKDcp'
+'Ii0uLDAxLzI0Mh44U0gxMDI5JkM0JjU3NDY6Kjc5Njo7OUE8Ozw+Oz89QTxA'
+'F1akOFFiRIgPHTZksKBAgMCLGTdGNIAAQgKfDAcgZbj0odOnUA8GBAA7'
+'CxsSEhkWDhYYFQ0aJhkaGBweGyccGh8hHiIkIiMmGTEiHhQoPSYoJSkqKDcp'
+'Ii0uLDAxLzI0Mh44U0gxMDI5JkM0JjU3NDY6Kjc5Njo7OUE8Ozw+Oz89QTxA'
+'F1akOFFiRIgPHTZksKBAgMCLGTdGNIAAQgKfDAcgZbj0odOnUA8GBAA7'
+'CxsSEhkWDhYYFQ0aJhkaGBweGyccGh8hHiIkIiMmGTEiHhQoPSYoJSkqKDcp'
+'Ii0uLDAxLzI0Mh44U0gxMDI5JkM0JjU3NDY6Kjc5Njo7OUE8Ozw+Oz89QTxA'
+'F1akOFFiRIgPHTZksKBAgMCLGTdGNIAAQgKfDAcgZbj0odOnUA8GBAA7'
+'CxsSEhkWDhYYFQ0aJhkaGBweGyccGh8hHiIkIiMmGTEiHhQoPSYoJSkqKDcp'
+'Ii0uLDAxLzI0Mh44U0gxMDI5JkM0JjU3NDY6Kjc5Njo7OUE8Ozw+Oz89QTxA'
+'F1akOFFiRIgPHTZksKBAgMCLGTdGNIAAQgKfDAcgZbj0odOnUA8GBAA7'
+'CxsSEhkWDhYYFQ0aJhkaGBweGyccGh8hHiIkIiMmGTEiHhQoPSYoJSkqKDcp'
+'Ii0uLDAxLzI0Mh44U0gxMDI5JkM0JjU3NDY6Kjc5Njo7OUE8Ozw+Oz89QTxA'
+'F1akOFFiRIgPHTZksKBAgMCLGTdGNIAAQgKfDAcgZbj0odOnUA8GBAA7'
+'CxsSEhkWDhYYFQ0aJhkaGBweGyccGh8hHiIkIiMmGTEiHhQoPSYoJSkqKDcp'
+'Ii0uLDAxLzI0Mh44U0gxMDI5JkM0JjU3NDY6Kjc5Njo7OUE8Ozw+Oz89QTxA'
+'F1akOFFiRIgPHTZksKBAgMCLGTdGNIAAQgKfDAcgZbj0odOnUA8GBAA7'
+'CxsSEhkWDhYYFQ0aJhkaGBweGyccGh8hHiIkIiMmGTEiHhQoPSYoJSkqKDcp'
+'Ii0uLDAxLzI0Mh44U0gxMDI5JkM0JjU3NDY6Kjc5Njo7OUE8Ozw+Oz89QTxA'
+'F1akOFFiRIgPHTZksKBAgMCLGTdGNIAAQgKfDAcgZbj0odOnUA8GBAA7'
+'CxsSEhkWDhYYFQ0aJhkaGBweGyccGh8hHiIkIiMmGTEiHhQoPSYoJSkqKDcp'
+'Ii0uLDAxLzI0Mh44U0gxMDI5JkM0JjU3NDY6Kjc5Njo7OUE8Ozw+Oz89QTxA'
+'F1akOFFiRIgPHTZksKBAgMCLGTdGNIAAQgKfDAcgZbj0odOnUA8GBAA7'
+'CxsSEhkWDhYYFQ0aJhkaGBweGyccGh8hHiIkIiMmGTEiHhQoPSYoJSkqKDcp'
+'Ii0uLDAxLzI0Mh44U0gxMDI5JkM0JjU3NDY6Kjc5Njo7OUE8Ozw+Oz89QTxA'
+'F1akOFFiRIgPHTZksKBAgMCLGTdGNIAAQgKfDAcgZbj0odOnUA8GBAA7'
+'CxsSEhkWDhYYFQ0aJhkaGBweGyccGh8hHiIkIiMmGTEiHhQoPSYoJSkqKDcp'
+'Ii0uLDAxLzI0Mh44U0gxMDI5JkM0JjU3NDY6Kjc5Njo7OUE8Ozw+Oz89QTxA'
+'F1akOFFiRIgPHTZksKBAgMCLGTdGNIAAQgKfDAcgZbj0odOnUA8GBAA7'
+'CxsSEhkWDhYYFQ0aJhkaGBweGyccGh8hHiIkIiMmGTEiHhQoPSYoJSkqKDcp'
+'Ii0uLDAxLzI0Mh44U0gxMDI5JkM0JjU3NDY6Kjc5Njo7OUE8Ozw+Oz89QTxA'
+'F1akOFFiRIgPHTZksKBAgMCLGTdGNIAAQgKfDAcgZbj0odOnUA8GBAA7'
+'CxsSEhkWDhYYFQ0aJhkaGBweGyccGh8hHiIkIiMmGTEiHhQoPSYoJSkqKDcp'
+'Ii0uLDAxLzI0Mh44U0gxMDI5JkM0JjU3NDY6Kjc5Njo7OUE8Ozw+Oz89QTxA'
+'F1akOFFiRIgPHTZksKBAgMCLGTdGNIAAQgKfDAcgZbj0odOnUA8GBAA7'
+'CxsSEhkWDhYYFQ0aJhkaGBweGyccGh8hHiIkIiMmGTEiHhQoPSYoJSkqKDcp'
+'Ii0uLDAxLzI0Mh44U0gxMDI5JkM0JjU3NDY6Kjc5Njo7OUE8Ozw+Oz89QTxA'
+'F1akOFFiRIgPHTZksKBAgMCLGTdGNIAAQgKfDAcgZbj0odOnUA8GBAA7'
+'CxsSEhkWDhYYFQ0aJhkaGBweGyccGh8hHiIkIiMmGTEiHhQoPSYoJSkqKDcp'
+'Ii0uLDAxLzI0Mh44U0gxMDI5JkM0JjU3NDY6Kjc5Njo7OUE8Ozw+Oz89QTxA'
+'F1akOFFiRIgPHTZksKBAgMCLGTdGNIAAQgKfDAcgZbj0odOnUA8GBAA7'
+'CxsSEhkWDhYYFQ0aJhkaGBweGyccGh8hHiIkIiMmGTEiHhQoPSYoJSkqKDcp'
+'Ii0uLDAxLzI0Mh44U0gxMDI5JkM0JjU3NDY6Kjc5Njo7OUE8Ozw+Oz89QTxA'
+'F1akOFFiRIgPHTZksKBAgMCLGTdGNIAAQgKfDAcgZbj0odOnUA8GBAA7'
+'CxsSEhkWDhYYFQ0aJhkaGBweGyccGh8hHiIkIiMmGTEiHhQoPSYoJSkqKDcp'
+'Ii0uLDAxLzI0Mh44U0gxMDI5JkM0JjU3NDY6Kjc5Njo7OUE8Ozw+Oz89QTxA'
+'F1akOFFiRIgPHTZksKBAgMCLGTdGNIAAQgKfDAcgZbj0odOnUA8GBAA7'
+'CxsSEhkWDhYYFQ0aJhkaGBweGyccGh8hHiIkIiMmGTEiHhQoPSYoJSkqKDcp'
+'Ii0uLDAxLzI0Mh44U0gxMDI5JkM0JjU3NDY6Kjc5Njo7OUE8Ozw+Oz89QTxA'
+'F1akOFFiRIgPHTZksKBAgMCLGTdGNIAAQgKfDAcgZbj0odOnUA8GBAA7'
+'CxsSEhkWDhYYFQ0aJhkaGBweGyccGh8hHiIkIiMmGTEiHhQoPSYoJSkqKDcp'
+'Ii0uLDAxLzI0Mh44U0gxMDI5JkM0JjU3NDY6Kjc5Njo7OUE8Ozw+Oz89QTxA'
+'F1akOFFiRIgPHTZksKBAgMCLGTdGNIAAQgKfDAcgZbj0odOnUA8GBAA7'
+'CxsSEhkWDhYYFQ0aJhkaGBweGyccGh8hHiIkIiMmGTEiHhQoPSYoJSkqKDcp'
+'Ii0uLDAxLzI0Mh44U0gxMDI5JkM0JjU3NDY6Kjc5Njo7OUE8Ozw+Oz89QTxA'
+'F1akOFFiRIgPHTZksKBAgMCLGTdGNIAAQgKfDAcgZbj0odOnUA8GBAA7'
+'CxsSEhkWDhYYFQ0aJhkaGBweGyccGh8hHiIkIiMmGTEiHhQoPSYoJSkqKDcp'
+'Ii0uLDAxLzI0Mh44U0gxMDI5JkM0JjU3NDY6Kjc5Njo7OUE8Ozw+Oz89QTxA'
+'F1akOFFiRIgPHTZksKBAgMCLGTdGNIAAQgKfDAcgZbj0odOnUA8GBAA7'
+'CxsSEhkWDhYYFQ0aJhkaGBweGyccGh8hHiIkIiMmGTEiHhQoPSYoJSkqKDcp'
+'Ii0uLDAxLzI0Mh44U0gxMDI5JkM0JjU3NDY6Kjc5Njo7OUE8Ozw+Oz89QTxA'
+'F1akOFFiRIgPHTZksKBAgMCLGTdGNIAAQgKfDAcgZbj0odOnUA8GBAA7'
+'CxsSEhkWDhYYFQ0aJhkaGBweGyccGh8hHiIkIiMmGTEiHhQoPSYoJSkqKDcp'
+'Ii0uLDAxLzI0Mh44U0gxMDI5JkM0JjU3NDY6Kjc5Njo7OUE8Ozw+Oz89QTxA'
+'F1akOFFiRIgPHTZksKBAgMCLGTdGNIAAQgKfDAcgZbj0odOnUA8GBAA7'
+'CxsSEhkWDhYYFQ0aJhkaGBweGyccGh8hHiIkIiMmGTEiHhQoPSYoJSkqKDcp'
+'Ii0uLDAxLzI0Mh44U0gxMDI5JkM0JjU3NDY6Kjc5Njo7OUE8Ozw+Oz89QTxA'
+'F1akOFFiRIgPHTZksKBAgMCLGTdGNIAAQgKfDAcgZbj0odOnUA8GBAA7'
+'CxsSEhkWDhYYFQ0aJhkaGBweGyccGh8hHiIkIiMmGTEiHhQoPSYoJSkqKDcp'
+'Ii0uLDAxLzI0Mh44U0gxMDI5JkM0JjU3NDY6Kjc5Njo7OUE8Ozw+Oz89QTxA'
+'F1akOFFiRIgPHTZksKBAgMCLGTdGNIAAQgKfDAcgZbj0odOnUA8GBAA7'
+'CxsSEhkWDhYYFQ0aJhkaGBweGyccGh8hHiIkIiMmGTEiHhQoPSYoJSkqKDcp'
+'Ii0uLDAxLzI0Mh44U0gxMDI5JkM0JjU3NDY6Kjc5Njo7OUE8Ozw+Oz89QTxA'
+'F1akOFFiRIgPHTZksKBAgMCLGTdGNIAAQgKfDAcgZbj0odOnUA8GBAA7'
+'CxsSEhkWDhYYFQ0aJhkaGBweGyccGh8hHiIkIiMmGTEiHhQoPSYoJSkqKDcp'
+'Ii0uLDAxLzI0Mh44U0gxMDI5JkM0JjU3NDY6Kjc5Njo7OUE8Ozw+Oz89QTxA'
+'F1akOFFiRIgPHTZksKBAgMCLGTdGNIAAQgKfDAcgZbj0odOnUA8GBAA7'
+'CxsSEhkWDhYYFQ0aJhkaGBweGyccGh8hHiIkIiMmGTEiHhQoPSYoJSkqKDcp'
+'Ii0uLDAxLzI0Mh44U0gxMDI5JkM0JjU3NDY6Kjc5Njo7OUE8Ozw+Oz89QTxA'
+'F1akOFFiRIgPHTZksKBAgMCLGTdGNIAAQgKfDAcgZbj0odOnUA8GBAA7'
+'CxsSEhkWDhYYFQ0aJhkaGBweGyccGh8hHiIkIiMmGTEiHhQoPSYoJSkqKDcp'
+'Ii0uLDAxLzI0Mh44U0gxMDI5JkM0JjU3NDY6Kjc5Njo7OUE8Ozw+Oz89QTxA'
+'F1akOFFiRIgPHTZksKBAgMCLGTdGNIAAQgKfDAcgZbj0odOnUA8GBAA7'
+'CxsSEhkWDhYYFQ0aJhkaGBweGyccGh8hHiIkIiMmGTEiHhQoPSYoJSkqKDcp'
+'Ii0uLDAxLzI0Mh44U0gxMDI5JkM0JjU3NDY6Kjc5Njo7OUE8Ozw+Oz89QTxA'
+'F1akOFFiRIgPHTZksKBAgMCLGTdGNIAAQgKfDAcgZbj0odOnUA8GBAA7'
+'CxsSEhkWDhYYFQ0aJhkaGBweGyccGh8hHiIkIiMmGTEiHhQoPSYoJSkqKDcp'
+'Ii0uLDAxLzI0Mh44U0gxMDI5JkM0JjU3NDY6Kjc5Njo7OUE8Ozw+Oz89QTxA'
+'F1akOFFiRIgPHTZksKBAgMCLGTdGNIAAQgKfDAcgZbj0odOnUA8GBAA7'
+'CxsSEhkWDhYYFQ0aJhkaGBweGyccGh8hHiIkIiMmGTEiHhQoPSYoJSkqKDcp'
+'Ii0uLDAxLzI0Mh44U0gxMDI5JkM0JjU3NDY6Kjc5Njo7OUE8Ozw+Oz89QTxA'
+'F1akOFFiRIgPHTZksKBAgMCLGTdGNIAAQgKfDAcgZbj0odOnUA8GBAA7'
+'CxsSEhkWDhYYFQ0aJhkaGBweGyccGh8hHiIkIiMmGTEiHhQoPSYoJSkqKDcp'
+'Ii0uLDAxLzI0Mh44U0gxMDI5JkM0JjU3NDY6Kjc5Njo7OUE8Ozw+Oz89QTxA'
+'F1akOFFiRIgPHTZksKBAgMCLGTdGNIAAQgKfDAcgZbj0odOnUA8GBAA7'
+'CxsSEhkWDhYYFQ0aJhkaGBweGyccGh8hHiIkIiMmGTEiHhQoPSYoJSkqKDcp'
+'Ii0uLDAxLzI0Mh44U0gxMDI5JkM0JjU3NDY6Kjc5Njo7OUE8Ozw+Oz89QTxA'
+'F1akOFFiRIgPHTZksKBAgMCLGTdGNIAAQgKfDAcgZbj0odOnUA8GBAA7'
+'CxsSEhkWDhYYFQ0aJhkaGBweGyccGh8hHiIkIiMmGTEiHhQoPSYoJSkqKDcp'
+'Ii0uLDAxLzI0Mh44U0gxMDI5JkM0JjU3NDY6Kjc5Njo7OUE8Ozw+Oz89QTxA'
+'F1akOFFiRIgPHTZksKBAgMCLGTdGNIAAQgKfDAcgZbj0odOnUA8GBAA7'
+'CxsSEhkWDhYYFQ0aJhkaGBweGyccGh8hHiIkIiMmGTEiHhQoPSYoJSkqKDcp'
+'Ii0uLDAxLzI0Mh44U0gxMDI5JkM0JjU3NDY6Kjc5Njo7OUE8Ozw+Oz89QTxA'
+'F1akOFFiRIgPHTZksKBAgMCLGTdGNIAAQgKfDAcgZbj0odOnUA8GBAA7'
+'CxsSEhkWDhYYFQ0aJhkaGBweGyccGh8hHiIkIiMmGTEiHhQoPSYoJSkqKDcp'
+'Ii0uLDAxLzI0Mh44U0gxMDI5JkM0JjU3NDY6Kjc5Njo7OUE8Ozw+Oz89QTxA'
+'F1akOFFiRIgPHTZksKBAgMCLGTdGNIAAQgKfDAcgZbj0odOnUA8GBAA7'
+'CxsSEhkWDhYYFQ0aJhkaGBweGyccGh8hHiIkIiMmGTEiHhQoPSYoJSkqKDcp'
+'Ii0uLDAxLzI0Mh44U0gxMDI5JkM0JjU3NDY6Kjc5Njo7OUE8Ozw+Oz89QTxA'
+'F1akOFFiRIgPHTZksKBAgMCLGTdGNIAAQgKfDAcgZbj0odOnUA8GBAA7'
+'CxsSEhkWDhYYFQ0aJhkaGBweGyccGh8hHiIkIiMmGTEiHhQoPSYoJSkqKDcp'
+'Ii0uLDAxLzI0Mh44U0gxMDI5JkM0JjU3NDY6Kjc5Njo7OUE8Ozw+Oz89QTxA'
+'F1akOFFiRIgPHTZksKBAgMCLGTdGNIAAQgKfDAcgZbj0odOnUA8GBAA7'
+'CxsSEhkWDhYYFQ0aJhkaGBweGyccGh8hHiIkIiMmGTEiHhQoPSYoJSkqKDcp'
+'Ii0uLDAxLzI0Mh44U0gxMDI5JkM0JjU3NDY6Kjc5Njo7OUE8Ozw+Oz89QTxA'
+'F1akOFFiRIgPHTZksKBAgMCLGTdGNIAAQgKfDAcgZbj0odOnUA8GBAA7'
+'CxsSEhkWDhYYFQ0aJhkaGBweGyccGh8hHiIkIiMmGTEiHhQoPSYoJSkqKDcp'
+'Ii0uLDAxLzI0Mh44U0gxMDI5JkM0JjU3NDY6Kjc5Njo7OUE8Ozw+Oz89QTxA'
+'F1akOFFiRIgPHTZksKBAgMCLGTdGNIAAQgKfDAcgZbj0odOnUA8GBAA7'
+'CxsSEhkWDhYYFQ0aJhkaGBweGyccGh8hHiIkIiMmGTEiHhQoPSYoJSkqKDcp'
+'Ii0uLDAxLzI0Mh44U0gxMDI5JkM0JjU3NDY6Kjc5Njo7OUE8Ozw+Oz89QTxA'
+'F1akOFFiRIgPHTZksKBAgMCLGTdGNIAAQgKfDAcgZbj0odOnUA8GBAA7'
+'CxsSEhkWDhYYFQ0aJhkaGBweGyccGh8hHiIkIiMmGTEiHhQoPSYoJSkqKDcp'
+'Ii0uLDAxLzI0Mh44U0gxMDI5JkM0JjU3NDY6Kjc5Njo7OUE8Ozw+Oz89QTxA'
+'F1akOFFiRIgPHTZksKBAgMCLGTdGNIAAQgKfDAcgZbj0odOnUA8GBAA7'
+'CxsSEhkWDhYYFQ0aJhkaGBweGyccGh8hHiIkIiMmGTEiHhQoPSYoJSkqKDcp'
+'Ii0uLDAxLzI0Mh44U0gxMDI5JkM0JjU3NDY6Kjc5Njo7OUE8Ozw+Oz89QTxA'
+'F1akOFFiRIgPHTZksKBAgMCLGTdGNIAAQgKfDAcgZbj0odOnUA8GBAA7'
+'CxsSEhkWDhYYFQ0aJhkaGBweGyccGh8hHiIkIiMmGTEiHhQoPSYoJSkqKDcp'
+'Ii0uLDAxLzI0Mh44U0gxMDI5JkM0JjU3NDY6Kjc5Njo7OUE8Ozw+Oz89QTxA'
+'F1akOFFiRIgPHTZksKBAgMCLGTdGNIAAQgKfDAcgZbj0odOnUA8GBAA7'
+'CxsSEhkWDhYYFQ0aJhkaGBweGyccGh8hHiIkIiMmGTEiHhQoPSYoJSkqKDcp'
+'Ii0uLDAxLzI0Mh44U0gxMDI5JkM0JjU3NDY6Kjc5Njo7OUE8Ozw+Oz89QTxA'
+'F1akOFFiRIgPHTZksKBAgMCLGTdGNIAAQgKfDAcgZbj0odOnUA8GBAA7'
+'CxsSEhkWDhYYFQ0aJhkaGBweGyccGh8hHiIkIiMmGTEiHhQoPSYoJSkqKDcp'
+'Ii0uLDAxLzI0Mh44U0gxMDI5JkM0JjU3NDY6Kjc5Njo7OUE8Ozw+Oz89QTxA'
+'F1akOFFiRIgPHTZksKBAgMCLGTdGNIAAQgKfDAcgZbj0odOnUA8GBAA7'
+'CxsSEhkWDhYYFQ0aJhkaGBweGyccGh8hHiIkIiMmGTEiHhQoPSYoJSkqKDcp'
+'Ii0uLDAxLzI0Mh44U0gxMDI5JkM0JjU3NDY6Kjc5Njo7OUE8Ozw+Oz89QTxA'
+'F1akOFFiRIgPHTZksKBAgMCLGTdGNIAAQgKfDAcgZbj0odOnUA8GBAA7'
+'CxsSEhkWDhYYFQ0aJhkaGBweGyccGh8hHiIkIiMmGTEiHhQoPSYoJSkqKDcp'
+'Ii0uLDAxLzI0Mh44U0gxMDI5JkM0JjU3NDY6Kjc5Njo7OUE8Ozw+Oz89QTxA'
+'F1akOFFiRIgPHTZksKBAgMCLGTdGNIAAQgKfDAcgZbj0odOnUA8GBAA7'
+'CxsSEhkWDhYYFQ0aJhkaGBweGyccGh8hHiIkIiMmGTEiHhQoPSYoJSkqKDcp'
+'Ii0uLDAxLzI0Mh44U0gxMDI5JkM0JjU3NDY6Kjc5Njo7OUE8Ozw+Oz89QTxA'
+'F1akOFFiRIgPHTZksKBAgMCLGTdGNIAAQgKfDAcgZbj0odOnUA8GBAA7'
+'CxsSEhkWDhYYFQ0aJhkaGBweGyccGh8hHiIkIiMmGTEiHhQoPSYoJSkqKDcp'
+'Ii0uLDAxLzI0Mh44U0gxMDI5JkM0JjU3NDY6Kjc5Njo7OUE8Ozw+Oz89QTxA'
+'F1akOFFiRIgPHTZksKBAgMCLGTdGNIAAQgKfDAcgZbj0odOnUA8GBAA7'
+'CxsSEhkWDhYYFQ0aJhkaGBweGyccGh8hHiIkIiMmGTEiHhQoPSYoJSkqKDcp'
+'Ii0uLDAxLzI0Mh44U0gxMDI5JkM0JjU3NDY6Kjc5Njo7OUE8Ozw+Oz89QTxA'
+'F1akOFFiRIgPHTZksKBAgMCLGTdGNIAAQgKfDAcgZbj0odOnUA8GBAA7'
+'CxsSEhkWDhYYFQ0aJhkaGBweGyccGh8hHiIkIiMmGTEiHhQoPSYoJSkqKDcp'
+'Ii0uLDAxLzI0Mh44U0gxMDI5JkM0JjU3NDY6Kjc5Njo7OUE8Ozw+Oz89QTxA'
+'F1akOFFiRIgPHTZksKBAgMCLGTdGNIAAQgKfDAcgZbj0odOnUA8GBAA7'
+'CxsSEhkWDhYYFQ0aJhkaGBweGyccGh8hHiIkIiMmGTEiHhQoPSYoJSkqKDcp'
+'Ii0uLDAxLzI0Mh44U0gxMDI5JkM0JjU3NDY6Kjc5Njo7OUE8Ozw+Oz89QTxA'
+'F1akOFFiRIgPHTZksKBAgMCLGTdGNIAAQgKfDAcgZbj0odOnUA8GBAA7'
+'CxsSEhkWDhYYFQ0aJhkaGBweGyccGh8hHiIkIiMmGTEiHhQoPSYoJSkqKDcp'
+'Ii0uLDAxLzI0Mh44U0gxMDI5JkM0JjU3NDY6Kjc5Njo7OUE8Ozw+Oz89QTxA'
+'F1akOFFiRIgPHTZksKBAgMCLGTdGNIAAQgKfDAcgZbj0odOnUA8GBAA7'
+'CxsSEhkWDhYYFQ0aJhkaGBweGyccGh8hHiIkIiMmGTEiHhQoPSYoJSkqKDcp'
+'Ii0uLDAxLzI0Mh44U0gxMDI5JkM0JjU3NDY6Kjc5Njo7OUE8Ozw+Oz89QTxA'
+'F1akOFFiRIgPHTZksKBAgMCLGTdGNIAAQgKfDAcgZbj0odOnUA8GBAA7'
+'CxsSEhkWDhYYFQ0aJhkaGBweGyccGh8hHiIkIiMmGTEiHhQoPSYoJSkqKDcp'
+'Ii0uLDAxLzI0Mh44U0gxMDI5JkM0JjU3NDY6Kjc5Njo7OUE8Ozw+Oz89QTxA'
+'F1akOFFiRIgPHTZksKBAgMCLGTdGNIAAQgKfDAcgZbj0odOnUA8GBAA7'
+'CxsSEhkWDhYYFQ0aJhkaGBweGyccGh8hHiIkIiMmGTEiHhQoPSYoJSkqKDcp'
+'Ii0uLDAxLzI0Mh44U0gxMDI5JkM0JjU3NDY6Kjc5Njo7OUE8Ozw+Oz89QTxA'
+'F1akOFFiRIgPHTZksKBAgMCLGTdGNIAAQgKfDAcgZbj0odOnUA8GBAA7'
+'CxsSEhkWDhYYFQ0aJhkaGBweGyccGh8hHiIkIiMmGTEiHhQoPSYoJSkqKDcp'
+'Ii0uLDAxLzI0Mh44U0gxMDI5JkM0JjU3NDY6Kjc5Njo7OUE8Ozw+Oz89QTxA'
+'F1akOFFiRIgPHTZksKBAgMCLGTdGNIAAQgKfDAcgZbj0odOnUA8GBAA7'
+'CxsSEhkWDhYYFQ0aJhkaGBweGyccGh8hHiIkIiMmGTEiHhQoPSYoJSkqKDcp'
+'Ii0uLDAxLzI0Mh44U0gxMDI5JkM0JjU3NDY6Kjc5Njo7OUE8Ozw+Oz89QTxA'
+'F1akOFFiRIgPHTZksKBAgMCLGTdGNIAAQgKfDAcgZbj0odOnUA8GBAA7'
+'CxsSEhkWDhYYFQ0aJhkaGBweGyccGh8hHiIkIiMmGTEiHhQoPSYoJSkqKDcp'
+'Ii0uLDAxLzI0Mh44U0gxMDI5JkM0JjU3NDY6Kjc5Njo7OUE8Ozw+Oz89QTxA'
+'F1akOFFiRIgPHTZksKBAgMCLGTdGNIAAQgKfDAcgZbj0odOnUA8GBAA7'
+'CxsSEhkWDhYYFQ0aJhkaGBweGyccGh8hHiIkIiMmGTEiHhQoPSYoJSkqKDcp'
+'Ii0uLDAxLzI0Mh44U0gxMDI5JkM0JjU3NDY6Kjc5Njo7OUE8Ozw+Oz89QTxA'
+'F1akOFFiRIgPHTZksKBAgMCLGTdGNIAAQgKfDAcgZbj0odOnUA8GBAA7'
+'CxsSEhkWDhYYFQ0aJhkaGBweGyccGh8hHiIkIiMmGTEiHhQoPSYoJSkqKDcp'
+'Ii0uLDAxLzI0Mh44U0gxMDI5JkM0JjU3NDY6Kjc5Njo7OUE8Ozw+Oz89QTxA'
+'F1akOFFiRIgPHTZksKBAgMCLGTdGNIAAQgKfDAcgZbj0odOnUA8GBAA7'
+'CxsSEhkWDhYYFQ0aJhkaGBweGyccGh8hHiIkIiMmGTEiHhQoPSYoJSkqKDcp'
+'Ii0uLDAxLzI0Mh44U0gxMDI5JkM0JjU3NDY6Kjc5Njo7OUE8Ozw+Oz89QTxA'
+'F1akOFFiRIgPHTZksKBAgMCLGTdGNIAAQgKfDAcgZbj0odOnUA8GBAA7'
+'CxsSEhkWDhYYFQ0aJhkaGBweGyccGh8hHiIkIiMmGTEiHhQoPSYoJSkqKDcp'
+'Ii0uLDAxLzI0Mh44U0gxMDI5JkM0JjU3NDY6Kjc5Njo7OUE8Ozw+Oz89QTxA'
+'F1akOFFiRIgPHTZksKBAgMCLGTdGNIAAQgKfDAcgZbj0odOnUA8GBAA7'
+'CxsSEhkWDhYYFQ0aJhkaGBweGyccGh8hHiIkIiMmGTEiHhQoPSYoJSkqKDcp'
+'Ii0uLDAxLzI0Mh44U0gxMDI5JkM0JjU3NDY6Kjc5Njo7OUE8Ozw+Oz89QTxA'
+'F1akOFFiRIgPHTZksKBAgMCLGTdGNIAAQgKfDAcgZbj0odOnUA8GBAA7'
+'CxsSEhkWDhYYFQ0aJhkaGBweGyccGh8hHiIkIiMmGTEiHhQoPSYoJSkqKDcp'
+'Ii0uLDAxLzI0Mh44U0gxMDI5JkM0JjU3NDY6Kjc5Njo7OUE8Ozw+Oz89QTxA'
+'F1akOFFiRIgPHTZksKBAgMCLGTdGNIAAQgKfDAcgZbj0odOnUA8GBAA7'
+'CxsSEhkWDhYYFQ0aJhkaGBweGyccGh8hHiIkIiMmGTEiHhQoPSYoJSkqKDcp'
+'Ii0uLDAxLzI0Mh44U0gxMDI5JkM0JjU3NDY6Kjc5Njo7OUE8Ozw+Oz89QTxA'
+'F1akOFFiRIgPHTZksKBAgMCLGTdGNIAAQgKfDAcgZbj0odOnUA8GBAA7'
+'CxsSEhkWDhYYFQ0aJhkaGBweGyccGh8hHiIkIiMmGTEiHhQoPSYoJSkqKDcp'
+'Ii0uLDAxLzI0Mh44U0gxMDI5JkM0JjU3NDY6Kjc5Njo7OUE8Ozw+Oz89QTxA'
+'F1akOFFiRIgPHTZksKBAgMCLGTdGNIAAQgKfDAcgZbj0odOnUA8GBAA7'
+'CxsSEhkWDhYYFQ0aJhkaGBweGyccGh8hHiIkIiMmGTEiHhQoPSYoJSkqKDcp'
+'Ii0uLDAxLzI0Mh44U0gxMDI5JkM0JjU3NDY6Kjc5Njo7OUE8Ozw+Oz89QTxA'
+'F1akOFFiRIgPHTZksKBAgMCLGTdGNIAAQgKfDAcgZbj0odOnUA8GBAA7'
+'CxsSEhkWDhYYFQ0aJhkaGBweGyccGh8hHiIkIiMmGTEiHhQoPSYoJSkqKDcp'
+'Ii0uLDAxLzI0Mh44U0gxMDI5JkM0JjU3NDY6Kjc5Njo7OUE8Ozw+Oz89QTxA'
+'F1akOFFiRIgPHTZksKBAgMCLGTdGNIAAQgKfDAcgZbj0odOnUA8GBAA7'
+'CxsSEhkWDhYYFQ0aJhkaGBweGyccGh8hHiIkIiMmGTEiHhQoPSYoJSkqKDcp'
+'Ii0uLDAxLzI0Mh44U0gxMDI5JkM0JjU3NDY6Kjc5Njo7OUE8Ozw+Oz89QTxA'
+'F1akOFFiRIgPHTZksKBAgMCLGTdGNIAAQgKfDAcgZbj0odOnUA8GBAA7'
+'CxsSEhkWDhYYFQ0aJhkaGBweGyccGh8hHiIkIiMmGTEiHhQoPSYoJSkqKDcp'
+'Ii0uLDAxLzI0Mh44U0gxMDI5JkM0JjU3NDY6Kjc5Njo7OUE8Ozw+Oz89QTxA'
+'F1akOFFiRIgPHTZksKBAgMCLGTdGNIAAQgKfDAcgZbj0odOnUA8GBAA7'
+'CxsSEhkWDhYYFQ0aJhkaGBweGyccGh8hHiIkIiMmGTEiHhQoPSYoJSkqKDcp'
+'Ii0uLDAxLzI0Mh44U0gxMDI5JkM0JjU3NDY6Kjc5Njo7OUE8Ozw+Oz89QTxA'
+'F1akOFFiRIgPHTZksKBAgMCLGTdGNIAAQgKfDAcgZbj0odOnUA8GBAA7'
+'CxsSEhkWDhYYFQ0aJhkaGBweGyccGh8hHiIkIiMmGTEiHhQoPSYoJSkqKDcp'
+'Ii0uLDAxLzI0Mh44U0gxMDI5JkM0JjU3NDY6Kjc5Njo7OUE8Ozw+Oz89QTxA'
+'F1akOFFiRIgPHTZksKBAgMCLGTdGNIAAQgKfDAcgZbj0odOnUA8GBAA7'
+'CxsSEhkWDhYYFQ0aJhkaGBweGyccGh8hHiIkIiMmGTEiHhQoPSYoJSkqKDcp'
+'Ii0uLDAxLzI0Mh44U0gxMDI5JkM0JjU3NDY6Kjc5Njo7OUE8Ozw+Oz89QTxA'
+'F1akOFFiRIgPHTZksKBAgMCLGTdGNIAAQgKfDAcgZbj0odOnUA8GBAA7'
+'CxsSEhkWDhYYFQ0aJhkaGBweGyccGh8hHiIkIiMmGTEiHhQoPSYoJSkqKDcp'
+'Ii0uLDAxLzI0Mh44U0gxMDI5JkM0JjU3NDY6Kjc5Njo7OUE8Ozw+Oz89QTxA'
+'F1akOFFiRIgPHTZksKBAgMCLGTdGNIAAQgKfDAcgZbj0odOnUA8GBAA7'
+'CxsSEhkWDhYYFQ0aJhkaGBweGyccGh8hHiIkIiMmGTEiHhQoPSYoJSkqKDcp'
+'Ii0uLDAxLzI0Mh44U0gxMDI5JkM0JjU3NDY6Kjc5Njo7OUE8Ozw+Oz89QTxA'
+'F1akOFFiRIgPHTZksKBAgMCLGTdGNIAAQgKfDAcgZbj0odOnUA8GBAA7'
+'CxsSEhkWDhYYFQ0aJhkaGBweGyccGh8hHiIkIiMmGTEiHhQoPSYoJSkqKDcp'
+'Ii0uLDAxLzI0Mh44U0gxMDI5JkM0JjU3NDY6Kjc5Njo7OUE8Ozw+Oz89QTxA'
+'F1akOFFiRIgPHTZksKBAgMCLGTdGNIAAQgKfDAcgZbj0odOnUA8GBAA7'
+'CxsSEhkWDhYYFQ0aJhkaGBweGyccGh8hHiIkIiMmGTEiHhQoPSYoJSkqKDcp'
+'Ii0uLDAxLzI0Mh44U0gxMDI5JkM0JjU3NDY6Kjc5Njo7OUE8Ozw+Oz89QTxA'
+'F1akOFFiRIgPHTZksKBAgMCLGTdGNIAAQgKfDAcgZbj0odOnUA8GBAA7'
+'CxsSEhkWDhYYFQ0aJhkaGBweGyccGh8hHiIkIiMmGTEiHhQoPSYoJSkqKDcp'
+'Ii0uLDAxLzI0Mh44U0gxMDI5JkM0JjU3NDY6Kjc5Njo7OUE8Ozw+Oz89QTxA'
+'F1akOFFiRIgPHTZksKBAgMCLGTdGNIAAQgKfDAcgZbj0odOnUA8GBAA7'
+'CxsSEhkWDhYYFQ0aJhkaGBweGyccGh8hHiIkIiMmGTEiHhQoPSYoJSkqKDcp'
+'Ii0uLDAxLzI0Mh44U0gxMDI5JkM0JjU3NDY6Kjc5Njo7OUE8Ozw+Oz89QTxA'
+'F1akOFFiRIgPHTZksKBAgMCLGTdGNIAAQgKfDAcgZbj0odOnUA8GBAA7'
+'CxsSEhkWDhYYFQ0aJhkaGBweGyccGh8hHiIkIiMmGTEiHhQoPSYoJSkqKDcp'
+'Ii0uLDAxLzI0Mh44U0gxMDI5JkM0JjU3NDY6Kjc5Njo7OUE8Ozw+Oz89QTxA'
+'F1akOFFiRIgPHTZksKBAgMCLGTdGNIAAQgKfDAcgZbj0odOnUA8GBAA7'
+'CxsSEhkWDhYYFQ0aJhkaGBweGyccGh8hHiIkIiMmGTEiHhQoPSYoJSkqKDcp'
+'Ii0uLDAxLzI0Mh44U0gxMDI5JkM0JjU3NDY6Kjc5Njo7OUE8Ozw+Oz89QTxA'
+'F1akOFFiRIgPHTZksKBAgMCLGTdGNIAAQgKfDAcgZbj0odOnUA8GBAA7'
+'CxsSEhkWDhYYFQ0aJhkaGBweGyccGh8hHiIkIiMmGTEiHhQoPSYoJSkqKDcp'
+'Ii0uLDAxLzI0Mh44U0gxMDI5JkM0JjU3NDY6Kjc5Njo7OUE8Ozw+Oz89QTxA'
+'F1akOFFiRIgPHTZksKBAgMCLGTdGNIAAQgKfDAcgZbj0odOnUA8GBAA7'
+'CxsSEhkWDhYYFQ0aJhkaGBweGyccGh8hHiIkIiMmGTEiHhQoPSYoJSkqKDcp'
+'Ii0uLDAxLzI0Mh44U0gxMDI5JkM0JjU3NDY6Kjc5Njo7OUE8Ozw+Oz89QTxA'
+'F1akOFFiRIgPHTZksKBAgMCLGTdGNIAAQgKfDAcgZbj0odOnUA8GBAA7'
+'CxsSEhkWDhYYFQ0aJhkaGBweGyccGh8hHiIkIiMmGTEiHhQoPSYoJSkqKDcp'
+'Ii0uLDAxLzI0Mh44U0gxMDI5JkM0JjU3NDY6Kjc5Njo7OUE8Ozw+Oz89QTxA'
+'F1akOFFiRIgPHTZksKBAgMCLGTdGNIAAQgKfDAcgZbj0odOnUA8GBAA7'
+'CxsSEhkWDhYYFQ0aJhkaGBweGyccGh8hHiIkIiMmGTEiHhQoPSYoJSkqKDcp'
+'Ii0uLDAxLzI0Mh44U0gxMDI5JkM0JjU3NDY6Kjc5Njo7OUE8Ozw+Oz89QTxA'
+'F1akOFFiRIgPHTZksKBAgMCLGTdGNIAAQgKfDAcgZbj0odOnUA8GBAA7'
+'CxsSEhkWDhYYFQ0aJhkaGBweGyccGh8hHiIkIiMmGTEiHhQoPSYoJSkqKDcp'
+'Ii0uLDAxLzI0Mh44U0gxMDI5JkM0JjU3NDY6Kjc5Njo7OUE8Ozw+Oz89QTxA'
+'F1akOFFiRIgPHTZksKBAgMCLGTdGNIAAQgKfDAcgZbj0odOnUA8GBAA7'
+'CxsSEhkWDhYYFQ0aJhkaGBweGyccGh8hHiIkIiMmGTEiHhQoPSYoJSkqKDcp'
+'Ii0uLDAxLzI0Mh44U0gxMDI5JkM0JjU3NDY6Kjc5Njo7OUE8Ozw+Oz89QTxA'
+'F1akOFFiRIgPHTZksKBAgMCLGTdGNIAAQgKfDAcgZbj0odOnUA8GBAA7'
+'CxsSEhkWDhYYFQ0aJhkaGBweGyccGh8hHiIkIiMmGTEiHhQoPSYoJSkqKDcp'
+'Ii0uLDAxLzI0Mh44U0gxMDI5JkM0JjU3NDY6Kjc5Njo7OUE8Ozw+Oz89QTxA'
+'F1akOFFiRIgPHTZksKBAgMCLGTdGNIAAQgKfDAcgZbj0odOnUA8GBAA7'
+'CxsSEhkWDhYYFQ0aJhkaGBweGyccGh8hHiIkIiMmGTEiHhQoPSYoJSkqKDcp'
+'Ii0uLDAxLzI0Mh44U0gxMDI5JkM0JjU3NDY6Kjc5Njo7OUE8Ozw+Oz89QTxA'
+'F1akOFFiRIgPHTZksKBAgMCLGTdGNIAAQgKfDAcgZbj0odOnUA8GBAA7'
+'CxsSEhkWDhYYFQ0aJhkaGBweGyccGh8hHiIkIiMmGTEiHhQoPSYoJSkqKDcp'
+'Ii0uLDAxLzI0Mh44U0gxMDI5JkM0JjU3NDY6Kjc5Njo7OUE8Ozw+Oz89QTxA'
+'F1akOFFiRIgPHTZksKBAgMCLGTdGNIAAQgKfDAcgZbj0odOnUA8GBAA7'
+'CxsSEhkWDhYYFQ0aJhkaGBweGyccGh8hHiIkIiMmGTEiHhQoPSYoJSkqKDcp'
+'Ii0uLDAxLzI0Mh44U0gxMDI5JkM0JjU3NDY6Kjc5Njo7OUE8Ozw+Oz89QTxA'
+'F1akOFFiRIgPHTZksKBAgMCLGTdGNIAAQgKfDAcgZbj0odOnUA8GBAA7'
+'CxsSEhkWDhYYFQ0aJhkaGBweGyccGh8hHiIkIiMmGTEiHhQoPSYoJSkqKDcp'
+'Ii0uLDAxLzI0Mh44U0gxMDI5JkM0JjU3NDY6Kjc5Njo7OUE8Ozw+Oz89QTxA'
+'F1akOFFiRIgPHTZksKBAgMCLGTdGNIAAQgKfDAcgZbj0odOnUA8GBAA7'
+'CxsSEhkWDhYYFQ0aJhkaGBweGyccGh8hHiIkIiMmGTEiHhQoPSYoJSkqKDcp'
+'Ii0uLDAxLzI0Mh44U0gxMDI5JkM0JjU3NDY6Kjc5Njo7OUE8Ozw+Oz89QTxA'
+'F1akOFFiRIgPHTZksKBAgMCLGTdGNIAAQgKfDAcgZbj0odOnUA8GBAA7'
+'CxsSEhkWDhYYFQ0aJhkaGBweGyccGh8hHiIkIiMmGTEiHhQoPSYoJSkqKDcp'
+'Ii0uLDAxLzI0Mh44U0gxMDI5JkM0JjU3NDY6Kjc5Njo7OUE8Ozw+Oz89QTxA'
+'F1akOFFiRIgPHTZksKBAgMCLGTdGNIAAQgKfDAcgZbj0odOnUA8GBAA7'
+'CxsSEhkWDhYYFQ0aJhkaGBweGyccGh8hHiIkIiMmGTEiHhQoPSYoJSkqKDcp'
+'Ii0uLDAxLzI0Mh44U0gxMDI5JkM0JjU3NDY6Kjc5Njo7OUE8Ozw+Oz89QTxA'
+'F1akOFFiRIgPHTZksKBAgMCLGTdGNIAAQgKfDAcgZbj0odOnUA8GBAA7'
+'CxsSEhkWDhYYFQ0aJhkaGBweGyccGh8hHiIkIiMmGTEiHhQoPSYoJSkqKDcp'
+'Ii0uLDAxLzI0Mh44U0gxMDI5JkM0JjU3NDY6Kjc5Njo7OUE8Ozw+Oz89QTxA'
+'F1akOFFiRIgPHTZksKBAgMCLGTdGNIAAQgKfDAcgZbj0odOnUA8GBAA7'
+'CxsSEhkWDhYYFQ0aJhkaGBweGyccGh8hHiIkIiMmGTEiHhQoPSYoJSkqKDcp'
+'Ii0uLDAxLzI0Mh44U0gxMDI5JkM0JjU3NDY6Kjc5Njo7OUE8Ozw+Oz89QTxA'
+'F1akOFFiRIgPHTZksKBAgMCLGTdGNIAAQgKfDAcgZbj0odOnUA8GBAA7'
+'CxsSEhkWDhYYFQ0aJhkaGBweGyccGh8hHiIkIiMmGTEiHhQoPSYoJSkqKDcp'
+'Ii0uLDAxLzI0Mh44U0gxMDI5JkM0JjU3NDY6Kjc5Njo7OUE8Ozw+Oz89QTxA'
+'F1akOFFiRIgPHTZksKBAgMCLGTdGNIAAQgKfDAcgZbj0odOnUA8GBAA7'
+'CxsSEhkWDhYYFQ0aJhkaGBweGyccGh8hHiIkIiMmGTEiHhQoPSYoJSkqKDcp'
+'Ii0uLDAxLzI0Mh44U0gxMDI5JkM0JjU3NDY6Kjc5Njo7OUE8Ozw+Oz89QTxA'
+'F1akOFFiRIgPHTZksKBAgMCLGTdGNIAAQgKfDAcgZbj0odOnUA8GBAA7'
+'CxsSEhkWDhYYFQ0aJhkaGBweGyccGh8hHiIkIiMmGTEiHhQoPSYoJSkqKDcp'
+'Ii0uLDAxLzI0Mh44U0gxMDI5JkM0JjU3NDY6Kjc5Njo7OUE8Ozw+Oz89QTxA'
+'F1akOFFiRIgPHTZksKBAgMCLGTdGNIAAQgKfDAcgZbj0odOnUA8GBAA7'
+'CxsSEhkWDhYYFQ0aJhkaGBweGyccGh8hHiIkIiMmGTEiHhQoPSYoJSkqKDcp'
+'Ii0uLDAxLzI0Mh44U0gxMDI5JkM0JjU3NDY6Kjc5Njo7OUE8Ozw+Oz89QTxA'
+'F1akOFFiRIgPHTZksKBAgMCLGTdGNIAAQgKfDAcgZbj0odOnUA8GBAA7'
+'CxsSEhkWDhYYFQ0aJhkaGBweGyccGh8hHiIkIiMmGTEiHhQoPSYoJSkqKDcp'
+'Ii0uLDAxLzI0Mh44U0gxMDI5JkM0JjU3NDY6Kjc5Njo7OUE8Ozw+Oz89QTxA'
+'F1akOFFiRIgPHTZksKBAgMCLGTdGNIAAQgKfDAcgZbj0odOnUA8GBAA7'
+'CxsSEhkWDhYYFQ0aJhkaGBweGyccGh8hHiIkIiMmGTEiHhQoPSYoJSkqKDcp'
+'Ii0uLDAxLzI0Mh44U0gxMDI5JkM0JjU3NDY6Kjc5Njo7OUE8Ozw+Oz89QTxA'
+'F1akOFFiRIgPHTZksKBAgMCLGTdGNIAAQgKfDAcgZbj0odOnUA8GBAA7'
+'CxsSEhkWDhYYFQ0aJhkaGBweGyccGh8hHiIkIiMmGTEiHhQoPSYoJSkqKDcp'
+'Ii0uLDAxLzI0Mh44U0gxMDI5JkM0JjU3NDY6Kjc5Njo7OUE8Ozw+Oz89QTxA'
+'F1akOFFiRIgPHTZksKBAgMCLGTdGNIAAQgKfDAcgZbj0odOnUA8GBAA7'
+'CxsSEhkWDhYYFQ0aJhkaGBweGyccGh8hHiIkIiMmGTEiHhQoPSYoJSkqKDcp'
+'Ii0uLDAxLzI0Mh44U0gxMDI5JkM0JjU3NDY6Kjc5Njo7OUE8Ozw+Oz89QTxA'
+'F1akOFFiRIgPHTZksKBAgMCLGTdGNIAAQgKfDAcgZbj0odOnUA8GBAA7'
+'CxsSEhkWDhYYFQ0aJhkaGBweGyccGh8hHiIkIiMmGTEiHhQoPSYoJSkqKDcp'
+'Ii0uLDAxLzI0Mh44U0gxMDI5JkM0JjU3NDY6Kjc5Njo7OUE8Ozw+Oz89QTxA'
+'F1akOFFiRIgPHTZksKBAgMCLGTdGNIAAQgKfDAcgZbj0odOnUA8GBAA7'
+'CxsSEhkWDhYYFQ0aJhkaGBweGyccGh8hHiIkIiMmGTEiHhQoPSYoJSkqKDcp'
+'Ii0uLDAxLzI0Mh44U0gxMDI5JkM0JjU3NDY6Kjc5Njo7OUE8Ozw+Oz89QTxA'
+'F1akOFFiRIgPHTZksKBAgMCLGTdGNIAAQgKfDAcgZbj0odOnUA8GBAA7'
+'CxsSEhkWDhYYFQ0aJhkaGBweGyccGh8hHiIkIiMmGTEiHhQoPSYoJSkqKDcp'
+'Ii0uLDAxLzI0Mh44U0gxMDI5JkM0JjU3NDY6Kjc5Njo7OUE8Ozw+Oz89QTxA'
+'F1akOFFiRIgPHTZksKBAgMCLGTdGNIAAQgKfDAcgZbj0odOnUA8GBAA7'
+'CxsSEhkWDhYYFQ0aJhkaGBweGyccGh8hHiIkIiMmGTEiHhQoPSYoJSkqKDcp'
+'Ii0uLDAxLzI0Mh44U0gxMDI5JkM0JjU3NDY6Kjc5Njo7OUE8Ozw+Oz89QTxA'
+'F1akOFFiRIgPHTZksKBAgMCLGTdGNIAAQgKfDAcgZbj0odOnUA8GBAA7'
+'CxsSEhkWDhYYFQ0aJhkaGBweGyccGh8hHiIkIiMmGTEiHhQoPSYoJSkqKDcp'
+'Ii0uLDAxLzI0Mh44U0gxMDI5JkM0JjU3NDY6Kjc5Njo7OUE8Ozw+Oz89QTxA'
+'F1akOFFiRIgPHTZksKBAgMCLGTdGNIAAQgKfDAcgZbj0odOnUA8GBAA7'
+'CxsSEhkWDhYYFQ0aJhkaGBweGyccGh8hHiIkIiMmGTEiHhQoPSYoJSkqKDcp'
+'Ii0uLDAxLzI0Mh44U0gxMDI5JkM0JjU3NDY6Kjc5Njo7OUE8Ozw+Oz89QTxA'
+'F1akOFFiRIgPHTZksKBAgMCLGTdGNIAAQgKfDAcgZbj0odOnUA8GBAA7'
+'CxsSEhkWDhYYFQ0aJhkaGBweGyccGh8hHiIkIiMmGTEiHhQoPSYoJSkqKDcp'
+'Ii0uLDAxLzI0Mh44U0gxMDI5JkM0JjU3NDY6Kjc5Njo7OUE8Ozw+Oz89QTxA'
+'F1akOFFiRIgPHTZksKBAgMCLGTdGNIAAQgKfDAcgZbj0odOnUA8GBAA7'
+'CxsSEhkWDhYYFQ0aJhkaGBweGyccGh8hHiIkIiMmGTEiHhQoPSYoJSkqKDcp'
+'Ii0uLDAxLzI0Mh44U0gxMDI5JkM0JjU3NDY6Kjc5Njo7OUE8Ozw+Oz89QTxA'
+'F1akOFFiRIgPHTZksKBAgMCLGTdGNIAAQgKfDAcgZbj0odOnUA8GBAA7'
+'CxsSEhkWDhYYFQ0aJhkaGBweGyccGh8hHiIkIiMmGTEiHhQoPSYoJSkqKDcp'
+'Ii0uLDAxLzI0Mh44U0gxMDI5JkM0JjU3NDY6Kjc5Njo7OUE8Ozw+Oz89QTxA'
+'F1akOFFiRIgPHTZksKBAgMCLGTdGNIAAQgKfDAcgZbj0odOnUA8GBAA7'
+'CxsSEhkWDhYYFQ0aJhkaGBweGyccGh8hHiIkIiMmGTEiHhQoPSYoJSkqKDcp'
+'Ii0uLDAxLzI0Mh44U0gxMDI5JkM0JjU3NDY6Kjc5Njo7OUE8Ozw+Oz89QTxA'
+'F1akOFFiRIgPHTZksKBAgMCLGTdGNIAAQgKfDAcgZbj0odOnUA8GBAA7'
+'CxsSEhkWDhYYFQ0aJhkaGBweGyccGh8hHiIkIiMmGTEiHhQoPSYoJSkqKDcp'
+'Ii0uLDAxLzI0Mh44U0gxMDI5JkM0JjU3NDY6Kjc5Njo7OUE8Ozw+Oz89QTxA'
+'F1akOFFiRIgPHTZksKBAgMCLGTdGNIAAQgKfDAcgZbj0odOnUA8GBAA7'
+'CxsSEhkWDhYYFQ0aJhkaGBweGyccGh8hHiIkIiMmGTEiHhQoPSYoJSkqKDcp'
+'Ii0uLDAxLzI0Mh44U0gxMDI5JkM0JjU3NDY6Kjc5Njo7OUE8Ozw+Oz89QTxA'
+'F1akOFFiRIgPHTZksKBAgMCLGTdGNIAAQgKfDAcgZbj0odOnUA8GBAA7'
+'CxsSEhkWDhYYFQ0aJhkaGBweGyccGh8hHiIkIiMmGTEiHhQoPSYoJSkqKDcp'
+'Ii0uLDAxLzI0Mh44U0gxMDI5JkM0JjU3NDY6Kjc5Njo7OUE8Ozw+Oz89QTxA'
+'F1akOFFiRIgPHTZksKBAgMCLGTdGNIAAQgKfDAcgZbj0odOnUA8GBAA7'
+'CxsSEhkWDhYYFQ0aJhkaGBweGyccGh8hHiIkIiMmGTEiHhQoPSYoJSkqKDcp'
+'Ii0uLDAxLzI0Mh44U0gxMDI5JkM0JjU3NDY6Kjc5Njo7OUE8Ozw+Oz89QTxA'
+'F1akOFFiRIgPHTZksKBAgMCLGTdGNIAAQgKfDAcgZbj0odOnUA8GBAA7'
+'CxsSEhkWDhYYFQ0aJhkaGBweGyccGh8hHiIkIiMmGTEiHhQoPSYoJSkqKDcp'
+'Ii0uLDAxLzI0Mh44U0gxMDI5JkM0JjU3NDY6Kjc5Njo7OUE8Ozw+Oz89QTxA'
+'F1akOFFiRIgPHTZksKBAgMCLGTdGNIAAQgKfDAcgZbj0odOnUA8GBAA7'
+'CxsSEhkWDhYYFQ0aJhkaGBweGyccGh8hHiIkIiMmGTEiHhQoPSYoJSkqKDcp'
+'Ii0uLDAxLzI0Mh44U0gxMDI5JkM0JjU3NDY6Kjc5Njo7OUE8Ozw+Oz89QTxA'
+'F1akOFFiRIgPHTZksKBAgMCLGTdGNIAAQgKfDAcgZbj0odOnUA8GBAA7'
+'CxsSEhkWDhYYFQ0aJhkaGBweGyccGh8hHiIkIiMmGTEiHhQoPSYoJSkqKDcp'
+'Ii0uLDAxLzI0Mh44U0gxMDI5JkM0JjU3NDY6Kjc5Njo7OUE8Ozw+Oz89QTxA'
+'F1akOFFiRIgPHTZksKBAgMCLGTdGNIAAQgKfDAcgZbj0odOnUA8GBAA7'
+'CxsSEhkWDhYYFQ0aJhkaGBweGyccGh8hHiIkIiMmGTEiHhQoPSYoJSkqKDcp'
+'Ii0uLDAxLzI0Mh44U0gxMDI5JkM0JjU3NDY6Kjc5Njo7OUE8Ozw+Oz89QTxA'
+'F1akOFFiRIgPHTZksKBAgMCLGTdGNIAAQgKfDAcgZbj0odOnUA8GBAA7'
+'CxsSEhkWDhYYFQ0aJhkaGBweGyccGh8hHiIkIiMmGTEiHhQoPSYoJSkqKDcp'
+'Ii0uLDAxLzI0Mh44U0gxMDI5JkM0JjU3NDY6Kjc5Njo7OUE8Ozw+Oz89QTxA'
+'F1akOFFiRIgPHTZksKBAgMCLGTdGNIAAQgKfDAcgZbj0odOnUA8GBAA7'
+'CxsSEhkWDhYYFQ0aJhkaGBweGyccGh8hHiIkIiMmGTEiHhQoPSYoJSkqKDcp'
+'Ii0uLDAxLzI0Mh44U0gxMDI5JkM0JjU3NDY6Kjc5Njo7OUE8Ozw+Oz89QTxA'
+'F1akOFFiRIgPHTZksKBAgMCLGTdGNIAAQgKfDAcgZbj0odOnUA8GBAA7'
+'CxsSEhkWDhYYFQ0aJhkaGBweGyccGh8hHiIkIiMmGTEiHhQoPSYoJSkqKDcp'
+'Ii0uLDAxLzI0Mh44U0gxMDI5JkM0JjU3NDY6Kjc5Njo7OUE8Ozw+Oz89QTxA'
+'F1akOFFiRIgPHTZksKBAgMCLGTdGNIAAQgKfDAcgZbj0odOnUA8GBAA7'
+'CxsSEhkWDhYYFQ0aJhkaGBweGyccGh8hHiIkIiMmGTEiHhQoPSYoJSkqKDcp'
+'Ii0uLDAxLzI0Mh44U0gxMDI5JkM0JjU3NDY6Kjc5Njo7OUE8Ozw+Oz89QTxA'
+'F1akOFFiRIgPHTZksKBAgMCLGTdGNIAAQgKfDAcgZbj0odOnUA8GBAA7'
+'CxsSEhkWDhYYFQ0aJhkaGBweGyccGh8hHiIkIiMmGTEiHhQoPSYoJSkqKDcp'
+'Ii0uLDAxLzI0Mh44U0gxMDI5JkM0JjU3NDY6Kjc5Njo7OUE8Ozw+Oz89QTxA'
+'F1akOFFiRIgPHTZksKBAgMCLGTdGNIAAQgKfDAcgZbj0odOnUA8GBAA7'
+'CxsSEhkWDhYYFQ0aJhkaGBweGyccGh8hHiIkIiMmGTEiHhQoPSYoJSkqKDcp'
+'Ii0uLDAxLzI0Mh44U0gxMDI5JkM0JjU3NDY6Kjc5Njo7OUE8Ozw+Oz89QTxA'
+'F1akOFFiRIgPHTZksKBAgMCLGTdGNIAAQgKfDAcgZbj0odOnUA8GBAA7'
+'CxsSEhkWDhYYFQ0aJhkaGBweGyccGh8hHiIkIiMmGTEiHhQoPSYoJSkqKDcp'
+'Ii0uLDAxLzI0Mh44U0gxMDI5JkM0JjU3NDY6Kjc5Njo7OUE8Ozw+Oz89QTxA'
+'F1akOFFiRIgPHTZksKBAgMCLGTdGNIAAQgKfDAcgZbj0odOnUA8GBAA7'
+'CxsSEhkWDhYYFQ0aJhkaGBweGyccGh8hHiIkIiMmGTEiHhQoPSYoJSkqKDcp'
+'Ii0uLDAxLzI0Mh44U0gxMDI5JkM0JjU3NDY6Kjc5Njo7OUE8Ozw+Oz89QTxA'
+'F1akOFFiRIgPHTZksKBAgMCLGTdGNIAAQgKfDAcgZbj0odOnUA8GBAA7'
+'CxsSEhkWDhYYFQ0aJhkaGBweGyccGh8hHiIkIiMmGTEiHhQoPSYoJSkqKDcp'
+'Ii0uLDAxLzI0Mh44U0gxMDI5JkM0JjU3NDY6Kjc5Njo7OUE8Ozw+Oz89QTxA'
+'F1akOFFiRIgPHTZksKBAgMCLGTdGNIAAQgKfDAcgZbj0odOnUA8GBAA7'
+'CxsSEhkWDhYYFQ0aJhkaGBweGyccGh8hHiIkIiMmGTEiHhQoPSYoJSkqKDcp'
+'Ii0uLDAxLzI0Mh44U0gxMDI5JkM0JjU3NDY6Kjc5Njo7OUE8Ozw+Oz89QTxA'
+'F1akOFFiRIgPHTZksKBAgMCLGTdGNIAAQgKfDAcgZbj0odOnUA8GBAA7'
+'CxsSEhkWDhYYFQ0aJhkaGBweGyccGh8hHiIkIiMmGTEiHhQoPSYoJSkqKDcp'
+'Ii0uLDAxLzI0Mh44U0gxMDI5JkM0JjU3NDY6Kjc5Njo7OUE8Ozw+Oz89QTxA'
+'F1akOFFiRIgPHTZksKBAgMCLGTdGNIAAQgKfDAcgZbj0odOnUA8GBAA7'
+'CxsSEhkWDhYYFQ0aJhkaGBweGyccGh8hHiIkIiMmGTEiHhQoPSYoJSkqKDcp'
+'Ii0uLDAxLzI0Mh44U0gxMDI5JkM0JjU3NDY6Kjc5Njo7OUE8Ozw+Oz89QTxA'
+'F1akOFFiRIgPHTZksKBAgMCLGTdGNIAAQgKfDAcgZbj0odOnUA8GBAA7'
+'CxsSEhkWDhYYFQ0aJhkaGBweGyccGh8hHiIkIiMmGTEiHhQoPSYoJSkqKDcp'
+'Ii0uLDAxLzI0Mh44U0gxMDI5JkM0JjU3NDY6Kjc5Njo7OUE8Ozw+Oz89QTxA'
+'F1akOFFiRIgPHTZksKBAgMCLGTdGNIAAQgKfDAcgZbj0odOnUA8GBAA7'
+'CxsSEhkWDhYYFQ0aJhkaGBweGyccGh8hHiIkIiMmGTEiHhQoPSYoJSkqKDcp'
+'Ii0uLDAxLzI0Mh44U0gxMDI5JkM0JjU3NDY6Kjc5Njo7OUE8Ozw+Oz89QTxA'
+'F1akOFFiRIgPHTZksKBAgMCLGTdGNIAAQgKfDAcgZbj0odOnUA8GBAA7'
+'CxsSEhkWDhYYFQ0aJhkaGBweGyccGh8hHiIkIiMmGTEiHhQoPSYoJSkqKDcp'
+'Ii0uLDAxLzI0Mh44U0gxMDI5JkM0JjU3NDY6Kjc5Njo7OUE8Ozw+Oz89QTxA'
+'F1akOFFiRIgPHTZksKBAgMCLGTdGNIAAQgKfDAcgZbj0odOnUA8GBAA7'
+'CxsSEhkWDhYYFQ0aJhkaGBweGyccGh8hHiIkIiMmGTEiHhQoPSYoJSkqKDcp'
+'Ii0uLDAxLzI0Mh44U0gxMDI5JkM0JjU3NDY6Kjc5Njo7OUE8Ozw+Oz89QTxA'
+'F1akOFFiRIgPHTZksKBAgMCLGTdGNIAAQgKfDAcgZbj0odOnUA8GBAA7'
+'CxsSEhkWDhYYFQ0aJhkaGBweGyccGh8hHiIkIiMmGTEiHhQoPSYoJSkqKDcp'
+'Ii0uLDAxLzI0Mh44U0gxMDI5JkM0JjU3NDY6Kjc5Njo7OUE8Ozw+Oz89QTxA'
+'F1akOFFiRIgPHTZksKBAgMCLGTdGNIAAQgKfDAcgZbj0odOnUA8GBAA7'
+'CxsSEhkWDhYYFQ0aJhkaGBweGyccGh8hHiIkIiMmGTEiHhQoPSYoJSkqKDcp'
+'Ii0uLDAxLzI0Mh44U0gxMDI5JkM0JjU3NDY6Kjc5Njo7OUE8Ozw+Oz89QTxA'
+'F1akOFFiRIgPHTZksKBAgMCLGTdGNIAAQgKfDAcgZbj0odOnUA8GBAA7'
+'CxsSEhkWDhYYFQ0aJhkaGBweGyccGh8hHiIkIiMmGTEiHhQoPSYoJSkqKDcp'
+'Ii0uLDAxLzI0Mh44U0gxMDI5JkM0JjU3NDY6Kjc5Njo7OUE8Ozw+Oz89QTxA'
+'F1akOFFiRIgPHTZksKBAgMCLGTdGNIAAQgKfDAcgZbj0odOnUA8GBAA7'
+'CxsSEhkWDhYYFQ0aJhkaGBweGyccGh8hHiIkIiMmGTEiHhQoPSYoJSkqKDcp'
+'Ii0uLDAxLzI0Mh44U0gxMDI5JkM0JjU3NDY6Kjc5Njo7OUE8Ozw+Oz89QTxA'
+'F1akOFFiRIgPHTZksKBAgMCLGTdGNIAAQgKfDAcgZbj0odOnUA8GBAA7'
+'CxsSEhkWDhYYFQ0aJhkaGBweGyccGh8hHiIkIiMmGTEiHhQoPSYoJSkqKDcp'
+'Ii0uLDAxLzI0Mh44U0gxMDI5JkM0JjU3NDY6Kjc5Njo7OUE8Ozw+Oz89QTxA'
+'F1akOFFiRIgPHTZksKBAgMCLGTdGNIAAQgKfDAcgZbj0odOnUA8GBAA7'
+'CxsSEhkWDhYYFQ0aJhkaGBweGyccGh8hHiIkIiMmGTEiHhQoPSYoJSkqKDcp'
+'Ii0uLDAxLzI0Mh44U0gxMDI5JkM0JjU3NDY6Kjc5Njo7OUE8Ozw+Oz89QTxA'
+'F1akOFFiRIgPHTZksKBAgMCLGTdGNIAAQgKfDAcgZbj0odOnUA8GBAA7'
+'CxsSEhkWDhYYFQ0aJhkaGBweGyccGh8hHiIkIiMmGTEiHhQoPSYoJSkqKDcp'
+'Ii0uLDAxLzI0Mh44U0gxMDI5JkM0JjU3NDY6Kjc5Njo7OUE8Ozw+Oz89QTxA'
+'F1akOFFiRIgPHTZksKBAgMCLGTdGNIAAQgKfDAcgZbj0odOnUA8GBAA7'
+'CxsSEhkWDhYYFQ0aJhkaGBweGyccGh8hHiIkIiMmGTEiHhQoPSYoJSkqKDcp'
+'Ii0uLDAxLzI0Mh44U0gxMDI5JkM0JjU3NDY6Kjc5Njo7OUE8Ozw+Oz89QTxA'
+'F1akOFFiRIgPHTZksKBAgMCLGTdGNIAAQgKfDAcgZbj0odOnUA8GBAA7')
|
|
"""Qubits for quantum computing.
Todo:
* Finish implementing measurement logic. This should include POVM.
* Update docstrings.
* Update tests.
"""
import math
from sympy import Integer, log, Mul, Add, Pow, conjugate
from sympy.core.basic import sympify
from sympy.matrices.matrices import Matrix, zeros
from sympy.printing.pretty.stringpict import prettyForm
from sympy.physics.quantum.hilbert import ComplexSpace
from sympy.physics.quantum.state import Ket, Bra, State
from sympy.physics.quantum.qexpr import QuantumError
from sympy.physics.quantum.represent import represent
from sympy.physics.quantum.matrixutils import (
numpy_ndarray, scipy_sparse_matrix
)
__all__ = [
'Qubit',
'QubitBra',
'IntQubit',
'IntQubitBra',
'qubit_to_matrix',
'matrix_to_qubit',
'measure_all',
'measure_partial',
'measure_partial_oneshot',
'measure_all_oneshot'
]
#-----------------------------------------------------------------------------
# Qubit Classes
#-----------------------------------------------------------------------------
class QubitState(State):
"""Base class for Qubit and QubitBra."""
#-------------------------------------------------------------------------
# Initialization/creation
#-------------------------------------------------------------------------
@classmethod
def _eval_args(cls, args):
# If we are passed a QubitState or subclass, we just take its qubit
# values directly.
if len(args) == 1 and isinstance(args[0], QubitState):
return args[0].qubit_values
# Turn strings into tuple of strings
if len(args) == 1 and isinstance(args[0], basestring):
args = tuple(args[0])
args = sympify(args)
# Validate input (must have 0 or 1 input)
for element in args:
if not (element == 1 or element == 0):
raise ValueError("Qubit values must be 0 or 1, got: %r" % element)
return args
@classmethod
def _eval_hilbert_space(cls, args):
return ComplexSpace(2)**len(args)
#-------------------------------------------------------------------------
# Properties
#-------------------------------------------------------------------------
@property
def dimension(self):
"""The number of Qubits in the state."""
return len(self.qubit_values)
@property
def nqubits(self):
return self.dimension
@property
def qubit_values(self):
"""Returns the values of the qubits as a tuple."""
return self.label
#-------------------------------------------------------------------------
# Special methods
#-------------------------------------------------------------------------
def __len__(self):
return self.dimension
def __getitem__(self, bit):
return self.qubit_values[int(self.dimension-bit-1)]
#-------------------------------------------------------------------------
# Utility methods
#-------------------------------------------------------------------------
def flip(self, *bits):
"""Flip the bit(s) given."""
newargs = list(self.qubit_values)
for i in bits:
bit = int(self.dimension-i-1)
if newargs[bit] == 1:
newargs[bit] = 0
else:
newargs[bit] = 1
return self.__class__(*tuple(newargs))
class Qubit(QubitState, Ket):
"""A multi-qubit ket in the computational (z) basis.
We use the normal convention that the least significant qubit is on the
right, so |00001> has a 1 in the least significant qubit.
Parameters
==========
values : list, str
The qubit values as a list of ints ([0,0,0,1,1,]) or a string ('011').
Examples
========
Create a qubit in a couple of different ways and look at their attributes:
>>> from sympy.physics.quantum.qubit import Qubit
>>> Qubit(0,0,0)
|000>
>>> q = Qubit('0101')
>>> q
|0101>
>>> q.nqubits
4
>>> len(q)
4
>>> q.dimension
4
>>> q.qubit_values
(0, 1, 0, 1)
We can flip the value of an individual qubit:
>>> q.flip(1)
|0111>
We can take the dagger of a Qubit to get a bra:
>>> from sympy.physics.quantum.dagger import Dagger
>>> Dagger(q)
<0101|
>>> type(Dagger(q))
<class 'sympy.physics.quantum.qubit.QubitBra'>
Inner products work as expected:
>>> ip = Dagger(q)*q
>>> ip
<0101|0101>
>>> ip.doit()
1
"""
@property
def dual_class(self):
return QubitBra
def _eval_innerproduct_QubitBra(self, bra, **hints):
if self.label == bra.label:
return Integer(1)
else:
return Integer(0)
def _represent_default_basis(self, **options):
return self._represent_ZGate(None, **options)
def _represent_ZGate(self, basis, **options):
"""Represent this qubits in the computational basis (ZGate).
"""
format = options.get('format', 'sympy')
n = 1
definite_state = 0
for it in reversed(self.qubit_values):
definite_state += n*it
n = n*2
result = [0]*(2**self.dimension)
result[int(definite_state)] = 1
if format == 'sympy':
return Matrix(result)
elif format == 'numpy':
import numpy as np
return np.matrix(result, dtype='complex').transpose()
elif format == 'scipy.sparse':
from scipy import sparse
return sparse.csr_matrix(result, dtype='complex').transpose()
class QubitBra(QubitState, Bra):
"""A multi-qubit bra in the computational (z) basis.
We use the normal convention that the least significant qubit is on the
right, so |00001> has a 1 in the least significant qubit.
Parameters
==========
values : list, str
The qubit values as a list of ints ([0,0,0,1,1,]) or a string ('011').
Examples
========
See ``Qubit`` for examples.
"""
@property
def dual_class(self):
return Qubit
class IntQubitState(QubitState):
"""A base class for qubits that work with binary representations."""
@classmethod
def _eval_args(cls, args):
# The case of a QubitState instance
if len(args) == 1 and isinstance(args[0], QubitState):
return QubitState._eval_args(args)
# For a single argument, we construct the binary representation of
# that integer with the minimal number of bits.
if len(args) == 1 and args[0] > 1:
#rvalues is the minimum number of bits needed to express the number
rvalues = reversed(
range(int(math.ceil(math.log(args[0], 2)+.01)+.001))
)
qubit_values = [(args[0]>>i)&1 for i in rvalues]
return QubitState._eval_args(qubit_values)
# For two numbers, the second number is the number of bits
# on which it is expressed, so IntQubit(0,5) == |00000>.
elif len(args) == 2 and args[1] > 1:
#TODO Raise error if there are not enough bits
qubit_values = [(args[0]>>i)&1 for i in reversed(range(args[1]))]
return QubitState._eval_args(qubit_values)
else:
return QubitState._eval_args(args)
def as_int(self):
"""Return the numerical value of the qubit."""
number = 0
n = 1
for i in reversed(self.qubit_values):
number += n*i
n = n<<1
return number
def _print_label(self, printer, *args):
return str(self.as_int())
def _print_label_pretty(self, printer, *args):
label = self._print_label(printer, *args)
return prettyForm(label)
_print_label_repr = _print_label
_print_label_latex = _print_label
class IntQubit(IntQubitState, Qubit):
"""A qubit ket that store integers as binary numbers in qubit values.
The differences between this class and ``Qubit`` are:
* The form of the constructor.
* The qubit values are printed as their corresponding integer, rather
than the raw qubit values. The internal storage format of the qubit
values in the same as ``Qubit``.
Parameters
==========
values : int, tuple
If a single argument, the integer we want to represent in the qubit
values. This integer will be represented using the fewest possible
number of qubits. If a pair of integers, the first integer gives the
integer to represent in binary form and the second integer gives
the number of qubits to use.
Examples
========
Create a qubit for the integer 5:
>>> from sympy.physics.quantum.qubit import IntQubit
>>> from sympy.physics.quantum.qubit import Qubit
>>> q = IntQubit(5)
>>> q
|5>
We can also create an ``IntQubit`` by passing a ``Qubit`` instance.
>>> q = IntQubit(Qubit('101'))
>>> q
|5>
>>> q.as_int()
5
>>> q.nqubits
3
>>> q.qubit_values
(1, 0, 1)
We can go back to the regular qubit form.
>>> Qubit(q)
|101>
"""
@property
def dual_class(self):
return IntQubitBra
class IntQubitBra(IntQubitState, QubitBra):
"""A qubit bra that store integers as binary numbers in qubit values."""
@property
def dual_class(self):
return IntQubit
#-----------------------------------------------------------------------------
# Qubit <---> Matrix conversion functions
#-----------------------------------------------------------------------------
def matrix_to_qubit(matrix):
"""Convert from the matrix repr. to a sum of Qubit objects.
Parameters
----------
matrix : Matrix, numpy.matrix, scipy.sparse
The matrix to build the Qubit representation of. This works with
sympy matrices, numpy matrices and scipy.sparse sparse matrices.
Examples
--------
Represent a state and then go back to its qubit form:
>>> from sympy.physics.quantum.qubit import matrix_to_qubit, Qubit
>>> from sympy.physics.quantum.gate import Z
>>> from sympy.physics.quantum.represent import represent
>>> q = Qubit('01')
>>> matrix_to_qubit(represent(q))
|01>
"""
# Determine the format based on the type of the input matrix
format = 'sympy'
if isinstance(matrix, numpy_ndarray):
format = 'numpy'
if isinstance(matrix, scipy_sparse_matrix):
format = 'scipy.sparse'
# Make sure it is of correct dimensions for a Qubit-matrix representation.
# This logic should work with sympy, numpy or scipy.sparse matrices.
if matrix.shape[0] == 1:
mlistlen = matrix.shape[1]
nqubits = log(mlistlen, 2)
ket = False
cls = QubitBra
elif matrix.shape[1] == 1:
mlistlen = matrix.shape[0]
nqubits = log(mlistlen, 2)
ket = True
cls = Qubit
else:
raise QuantumError(
'Matrix must be a row/column vector, got %r' % matrix
)
if not isinstance(nqubits, Integer):
raise QuantumError('Matrix must be a row/column vector of size '
'2**nqubits, got: %r' % matrix)
# Go through each item in matrix, if element is non-zero, make it into a
# Qubit item times the element.
result = 0
for i in range(mlistlen):
if ket:
element = matrix[i,0]
else:
element = matrix[0,i]
if format == 'numpy' or format == 'scipy.sparse':
element = complex(element)
if element != 0.0:
# Form Qubit array; 0 in bit-locations where i is 0, 1 in
# bit-locations where i is 1
qubit_array = [1 if i&(1<<x) else 0 for x in range(nqubits)]
qubit_array.reverse()
result = result + element*cls(*qubit_array)
# If sympy simplified by pulling out a constant coefficient, undo that.
if isinstance(result, (Mul,Add,Pow)):
result = result.expand()
return result
def qubit_to_matrix(qubit, format='sympy'):
"""Coverts an Add/Mul of Qubit objects into it's matrix representation
This function is the inverse of ``matrix_to_qubit`` and is a shorthand
for ``represent(qubit)``.
"""
from sympy.physics.quantum.gate import Z
return represent(qubit, format=format)
#-----------------------------------------------------------------------------
# Measurement
#-----------------------------------------------------------------------------
def measure_all(qubit, format='sympy'):
"""Perform an ensemble measurement of all qubits.
Parameters
==========
qubit : Qubit, Add
The qubit to measure. This can be any Qubit or a linear combination
of them.
format : str
The format of the intermediate matrices to use. Possible values are
('sympy','numpy','scipy.sparse'). Currently only 'sympy' is
implemented.
Returns
=======
result : list
A list that consists of primitive states and their probabilities.
Examples
========
>>> from sympy.physics.quantum.qubit import Qubit, measure_all
>>> from sympy.physics.quantum.gate import H, X, Y, Z
>>> from sympy.physics.quantum.applyops import apply_operators
>>> c = H(0)*H(1)*Qubit('00')
>>> c
H(0)*H(1)*|00>
>>> q = apply_operators(c)
>>> measure_all(q)
[(|00>, 1/4), (|01>, 1/4), (|10>, 1/4), (|11>, 1/4)]
"""
m = qubit_to_matrix(qubit, format)
if format == 'sympy':
results = []
m = m.normalized()
size = max(m.shape) # Max of shape to account for bra or ket
nqubits = int(math.log(size)/math.log(2))
for i in range(size):
if m[i] != 0.0:
results.append(
(Qubit(IntQubit(i, nqubits)), m[i]*conjugate(m[i]))
)
return results
else:
raise NotImplementedError(
"This function can't handle non-sympy matrix formats yet"
)
def measure_partial(qubit, bits, format='sympy'):
"""Perform a partial ensemble measure on the specifed qubits.
Parameters
==========
qubits : Qubit
The qubit to measure. This can be any Qubit or a linear combination
of them.
bits : tuple
The qubits to measure.
format : str
The format of the intermediate matrices to use. Possible values are
('sympy','numpy','scipy.sparse'). Currently only 'sympy' is
implemented.
Returns
=======
result : list
A list that consists of primitive states and their probabilities.
Examples
========
>>> from sympy.physics.quantum.qubit import Qubit, measure_partial
>>> from sympy.physics.quantum.gate import H, X, Y, Z
>>> from sympy.physics.quantum.applyops import apply_operators
>>> c = H(0)*H(1)*Qubit('00')
>>> c
H(0)*H(1)*|00>
>>> q = apply_operators(c)
>>> measure_partial(q, (0,))
[(2**(1/2)*|00>/2 + 2**(1/2)*|10>/2, 1/2), (2**(1/2)*|01>/2 + 2**(1/2)*|11>/2, 1/2)]
"""
m = qubit_to_matrix(qubit, format)
if isinstance(bits, (int, Integer)):
bits = (int(bits),)
if format == 'sympy':
m = m.normalized()
possible_outcomes = _get_possible_outcomes(m, bits)
# Form output from function.
output = []
for outcome in possible_outcomes:
# Calculate probability of finding the specified bits with
# given values.
prob_of_outcome = 0
prob_of_outcome += (outcome.H*outcome)[0]
# If the output has a chance, append it to output with found
# probability.
if prob_of_outcome != 0:
output.append((
matrix_to_qubit(outcome.normalized()),
prob_of_outcome
))
return output
else:
raise NotImplementedError(
"This function can't handle non-sympy matrix formats yet"
)
def measure_partial_oneshot(qubit, bits, format='sympy'):
"""Perform a partial oneshot measurement on the specified qubits.
A oneshot measurement is equivalent to performing a measurement on a
quantum system. This type of measurement does not return the probabilities
like an ensemble measurement does, but rather returns *one* of the
possible resulting states. The exact state that is returned is determined
by picking a state randomly according to the ensemble probabilities.
Parameters
----------
qubits : Qubit
The qubit to measure. This can be any Qubit or a linear combination
of them.
bits : tuple
The qubits to measure.
format : str
The format of the intermediate matrices to use. Possible values are
('sympy','numpy','scipy.sparse'). Currently only 'sympy' is
implemented.
Returns
-------
result : Qubit
The qubit that the system collapsed to upon measurement.
"""
import random
m = qubit_to_matrix(qubit, format)
if format == 'sympy':
m = m.normalized()
possible_outcomes = _get_possible_outcomes(m, bits)
# Form output from function
random_number = random.random()
total_prob = 0
for outcome in possible_outcomes:
# Calculate probability of finding the specified bits
# with given values
total_prob += (outcome.H*outcome)[0]
if total_prob >= random_number:
return matrix_to_qubit(outcome.normalized())
else:
raise NotImplementedError(
"This function can't handle non-sympy matrix formats yet"
)
def _get_possible_outcomes(m, bits):
"""Get the possible states that can be produced in a measurement.
Parameters
----------
m : Matrix
The matrix representing the state of the system.
bits : tuple, list
Which bits will be measured.
Returns
-------
result : list
The list of possible states which can occur given this measurement.
These are un-normalized so we can derive the probability of finding
this state by taking the inner product with itself
"""
# This is filled with loads of dirty binary tricks...You have been warned
size = max(m.shape) # Max of shape to account for bra or ket
nqubits = int(math.log(size,2)+.1) # Number of qubits possible
# Make the output states and put in output_matrices, nothing in them now.
# Each state will represent a possible outcome of the measurement
# Thus, output_matrices[0] is the matrix which we get when all measured
# bits return 0. and output_matrices[1] is the matrix for only the 0th
# bit being true
output_matrices = []
for i in range(1<<len(bits)):
output_matrices.append(zeros((2**nqubits, 1)))
# Bitmasks will help sort how to determine possible outcomes.
# When the bit mask is and-ed with a matrix-index,
# it will determine which state that index belongs to
bit_masks = []
for bit in bits:
bit_masks.append(1<<bit)
# Make possible outcome states
for i in range(2**nqubits):
trueness = 0 # This tells us to which output_matrix this value belongs
# Find trueness
for j in range(len(bit_masks)):
if i&bit_masks[j]:
trueness += j+1
# Put the value in the correct output matrix
output_matrices[trueness][i] = m[i]
return output_matrices
def measure_all_oneshot(qubit, format='sympy'):
"""Perform a oneshot ensemble measurement on all qubits.
A oneshot measurement is equivalent to performing a measurement on a
quantum system. This type of measurement does not return the probabilities
like an ensemble measurement does, but rather returns *one* of the
possible resulting states. The exact state that is returned is determined
by picking a state randomly according to the ensemble probabilities.
Parameters
----------
qubits : Qubit
The qubit to measure. This can be any Qubit or a linear combination
of them.
format : str
The format of the intermediate matrices to use. Possible values are
('sympy','numpy','scipy.sparse'). Currently only 'sympy' is
implemented.
Returns
-------
result : Qubit
The qubit that the system collapsed to upon measurement.
"""
import random
m = qubit_to_matrix(qubit)
if format == 'sympy':
m = m.normalized()
random_number = random.random()
total = 0
result = 0
for i in m:
total += i*i.conjugate()
if total > random_number:
break
result += 1
return Qubit(IntQubit(result, int(math.log(max(m.shape),2)+.1)))
else:
raise NotImplementedError(
"This function can't handle non-sympy matrix formats yet"
)
|
|
#!/usr/bin/python
import getopt
import sys
from Bio import SeqIO
import time
import os
import shutil
import pandas
__author__ = "Andriy Sheremet"
#Helper functions definitions
def parse_contigs_ind(f_name):
"""
Returns sequences index from the input files(s)
remember to close index object after use
"""
handle = open(f_name, "rU")
record_dict = SeqIO.index(f_name,"fasta")
handle.close()
return record_dict
#returning specific sequences and overal list
def retrive_sequence(contig_lst, rec_dic):
"""
Returns list of sequence elements from dictionary/index of SeqIO objects specific to the contig_lst parameter
"""
contig_seqs = list()
#record_dict = rec_dic
#handle.close()
for contig in contig_lst:
contig_seqs.append(rec_dic[contig].seq.tostring())
return contig_seqs
def filter_seq_dict(key_lst, rec_dic):
"""
Returns filtered dictionary element from rec_dic according to sequence names passed in key_lst
"""
return { key: rec_dic[key] for key in key_lst }
def unique_scaffold_topEval(dataframe):
#returns pandas series object
variables = list(dataframe.columns.values)
scaffolds=dict()
rows=list()
for row in dataframe.itertuples():
#if row[1]=='Ga0073928_10002560':
if row[1] not in scaffolds:
scaffolds[row[1]]=row
else:
if row[11]<scaffolds[row[1]][11]:
scaffolds[row[1]]=row
rows=scaffolds.values()
#variables=['quid', 'suid', 'iden', 'alen', 'mism', 'gapo', 'qsta', 'qend', 'ssta', 'send', 'eval', 'bits']
df = pandas.DataFrame([[getattr(i,j) for j in variables] for i in rows], columns = variables)
return df
def usage():
print "\nThis is the usage function\n"
# print 'Usage: '+sys.argv[0]+' -i <input_file> [-o <output>] [-l <minimum length>]'
# print 'Example: '+sys.argv[0]+' -i input.fasta -o output.fasta -l 100'
def main(argv):
#default parameters
mg_lst = []
ref_lst = []
e_val = 1e-5
alen = 50.0
iden = 95.0
name= "output"
fmt_lst = ["fasta"]
supported_formats =["fasta", "csv"]
iterations = 1
alen_increment = 5.0
iden_increment = 0.0
try:
opts, args = getopt.getopt(argv, "r:m:n:e:a:i:f:h", ["reference=", "metagenome=", "name=", "e_value=", "alignment_length=", "identity=","format=", "iterations=", "alen_increment=", "iden_increment=", "help"])
except getopt.GetoptError:
usage()
sys.exit(2)
for opt, arg in opts:
if opt in ("-h", "--help"):
usage()
sys.exit()
elif opt in ("-r", "--reference"):
if arg:
ref_lst=arg.split(',')
#infiles = arg
print "Reference file(s)", ref_lst
elif opt in ("-m", "--metagenome"):
if arg:
mg_lst=arg.split(',')
#infiles = arg
print "Metagenome file(s)", mg_lst
elif opt in ("-f", "--format"):
if arg:
fmt_lst=arg.split(',')
#infiles = arg
print "Output format(s)", fmt_lst
elif opt in ("-n", "--name"):
if arg.strip():
name = arg
print "Project name", name
elif opt in ("-e", "--e_value"):
try:
e_val = float(arg)
except:
print "\nERROR: Please enter numerical value as -e parameter (using default: 1e-5)"
usage()
sys.exit(1)
print "E value", e_val
elif opt in ("-a", "--alignment_length"):
try:
alen = float(arg)
except:
print "\nERROR: Please enter an numerical value as -alen parameter (using default: 50.0)"
usage()
sys.exit(1)
print "Alignment length", alen
elif opt in ("-i", "--identity"):
try:
iden = float(arg)
except:
print "\nERROR: Please enter an numerical value as -iden parameter (using default: 95.0)"
usage()
sys.exit(1)
print "Alignment length", iden
elif opt in ("--iterations"):
try:
iterations = int(arg)
except:
print "\nWARNING: Please enter integer value as --iterations parameter (using default: 1)"
print "Iterations: ", iterations
elif opt in ("--alen_increment"):
try:
alen_increment = float(arg)
except:
print "\nWARNING: Please enter numerical value as --alen_increment parameter (using default: )", alen_increment
print "Alignment length increment: ", alen_increment
elif opt in ("--iden_increment"):
try:
iden_increment = float(arg)
except:
print "\nWARNING: Please enter numerical value as --iden_increment parameter (using default: )", iden_increment
print "Alignment length increment: ", iden_increment
for ref_file in [x for x in ref_lst if x]:
try:
#
with open(ref_file, "rU") as hand_ref:
pass
except:
print "\nERROR: Reference File(s) ["+ref_file+"] doesn't exist"
usage()
sys.exit(1)
for mg_file in [x for x in mg_lst if x]:
try:
#
with open(mg_file, "rU") as hand_mg:
pass
except:
print "\nERROR: Metagenome File(s) ["+mg_file+"] doesn't exist"
usage()
sys.exit(1)
for fmt in [x for x in fmt_lst if x]:
if fmt not in supported_formats:
print "\nWARNING: Output format [",fmt,"] is not supported"
print "\tUse -h(--help) option for the list of supported formats"
fmt_lst=["fasta"]
print "\tUsing default output format: ", fmt_lst[0]
project_dir = name
if os.path.exists(project_dir):
shutil.rmtree(project_dir)
try:
os.mkdir(project_dir)
except OSError:
print "ERROR: Cannot create project directory: " + name
raise
print "\n\t Initial Parameters:"
print "\nProject Name: ", name,'\n'
print "Project Directory: ", os.path.abspath(name),'\n'
print "Reference File(s): ", ref_lst,'\n'
print "Metagenome File(s): ", mg_lst,'\n'
print "E Value: ", e_val, "\n"
print "Alignment Length: ", alen,'\n'
print "Sequence Identity: ", iden,'\n'
print "Output Format(s):", fmt_lst,'\n'
if iterations > 1:
print "Iterations: ", iterations, '\n'
print "Alignment Length Increment: ", alen_increment, '\n'
print "Sequence identity Increment: ", iden_increment, '\n'
# parsed = SeqIO.parse(handle, "fasta")
#
# records = list()
#
#
# total = 0
# processed = 0
# for record in parsed:
# total += 1
# #print(record.id), len(record.seq)
# if len(record.seq) >= length:
# processed += 1
# records.append(record)
# handle.close()
#
# print "%d sequences found"%(total)
#
# try:
# output_handle = open(outfile, "w")
# SeqIO.write(records, output_handle, "fasta")
# output_handle.close()
# print "%d sequences written"%(processed)
# except:
# print "ERROR: Illegal output filename"
# sys.exit(1)
if __name__ == "__main__":
main(sys.argv[1:])
|
|
"""Support for providing temporary directories to test functions."""
import os
import re
import sys
import tempfile
from pathlib import Path
from typing import Optional
import attr
from .pathlib import LOCK_TIMEOUT
from .pathlib import make_numbered_dir
from .pathlib import make_numbered_dir_with_cleanup
from .pathlib import rm_rf
from _pytest.compat import final
from _pytest.config import Config
from _pytest.deprecated import check_ispytest
from _pytest.fixtures import fixture
from _pytest.fixtures import FixtureRequest
from _pytest.monkeypatch import MonkeyPatch
@final
@attr.s(init=False)
class TempPathFactory:
"""Factory for temporary directories under the common base temp directory.
The base directory can be configured using the ``--basetemp`` option.
"""
_given_basetemp = attr.ib(type=Optional[Path])
_trace = attr.ib()
_basetemp = attr.ib(type=Optional[Path])
def __init__(
self,
given_basetemp: Optional[Path],
trace,
basetemp: Optional[Path] = None,
*,
_ispytest: bool = False,
) -> None:
check_ispytest(_ispytest)
if given_basetemp is None:
self._given_basetemp = None
else:
# Use os.path.abspath() to get absolute path instead of resolve() as it
# does not work the same in all platforms (see #4427).
# Path.absolute() exists, but it is not public (see https://bugs.python.org/issue25012).
self._given_basetemp = Path(os.path.abspath(str(given_basetemp)))
self._trace = trace
self._basetemp = basetemp
@classmethod
def from_config(
cls,
config: Config,
*,
_ispytest: bool = False,
) -> "TempPathFactory":
"""Create a factory according to pytest configuration.
:meta private:
"""
check_ispytest(_ispytest)
return cls(
given_basetemp=config.option.basetemp,
trace=config.trace.get("tmpdir"),
_ispytest=True,
)
def _ensure_relative_to_basetemp(self, basename: str) -> str:
basename = os.path.normpath(basename)
if (self.getbasetemp() / basename).resolve().parent != self.getbasetemp():
raise ValueError(f"{basename} is not a normalized and relative path")
return basename
def mktemp(self, basename: str, numbered: bool = True) -> Path:
"""Create a new temporary directory managed by the factory.
:param basename:
Directory base name, must be a relative path.
:param numbered:
If ``True``, ensure the directory is unique by adding a numbered
suffix greater than any existing one: ``basename="foo-"`` and ``numbered=True``
means that this function will create directories named ``"foo-0"``,
``"foo-1"``, ``"foo-2"`` and so on.
:returns:
The path to the new directory.
"""
basename = self._ensure_relative_to_basetemp(basename)
if not numbered:
p = self.getbasetemp().joinpath(basename)
p.mkdir(mode=0o700)
else:
p = make_numbered_dir(root=self.getbasetemp(), prefix=basename, mode=0o700)
self._trace("mktemp", p)
return p
def getbasetemp(self) -> Path:
"""Return the base temporary directory, creating it if needed."""
if self._basetemp is not None:
return self._basetemp
if self._given_basetemp is not None:
basetemp = self._given_basetemp
if basetemp.exists():
rm_rf(basetemp)
basetemp.mkdir(mode=0o700)
basetemp = basetemp.resolve()
else:
from_env = os.environ.get("PYTEST_DEBUG_TEMPROOT")
temproot = Path(from_env or tempfile.gettempdir()).resolve()
user = get_user() or "unknown"
# use a sub-directory in the temproot to speed-up
# make_numbered_dir() call
rootdir = temproot.joinpath(f"pytest-of-{user}")
try:
rootdir.mkdir(mode=0o700, exist_ok=True)
except OSError:
# getuser() likely returned illegal characters for the platform, use unknown back off mechanism
rootdir = temproot.joinpath("pytest-of-unknown")
rootdir.mkdir(mode=0o700, exist_ok=True)
# Because we use exist_ok=True with a predictable name, make sure
# we are the owners, to prevent any funny business (on unix, where
# temproot is usually shared).
# Also, to keep things private, fixup any world-readable temp
# rootdir's permissions. Historically 0o755 was used, so we can't
# just error out on this, at least for a while.
if sys.platform != "win32":
uid = os.getuid()
rootdir_stat = rootdir.stat()
# getuid shouldn't fail, but cpython defines such a case.
# Let's hope for the best.
if uid != -1:
if rootdir_stat.st_uid != uid:
raise OSError(
f"The temporary directory {rootdir} is not owned by the current user. "
"Fix this and try again."
)
if (rootdir_stat.st_mode & 0o077) != 0:
os.chmod(rootdir, rootdir_stat.st_mode & ~0o077)
basetemp = make_numbered_dir_with_cleanup(
prefix="pytest-",
root=rootdir,
keep=3,
lock_timeout=LOCK_TIMEOUT,
mode=0o700,
)
assert basetemp is not None, basetemp
self._basetemp = basetemp
self._trace("new basetemp", basetemp)
return basetemp
def get_user() -> Optional[str]:
"""Return the current user name, or None if getuser() does not work
in the current environment (see #1010)."""
import getpass
try:
return getpass.getuser()
except (ImportError, KeyError):
return None
def pytest_configure(config: Config) -> None:
"""Create a TempPathFactory and attach it to the config object.
This is to comply with existing plugins which expect the handler to be
available at pytest_configure time, but ideally should be moved entirely
to the tmp_path_factory session fixture.
"""
mp = MonkeyPatch()
config.add_cleanup(mp.undo)
_tmp_path_factory = TempPathFactory.from_config(config, _ispytest=True)
mp.setattr(config, "_tmp_path_factory", _tmp_path_factory, raising=False)
@fixture(scope="session")
def tmp_path_factory(request: FixtureRequest) -> TempPathFactory:
"""Return a :class:`pytest.TempPathFactory` instance for the test session."""
# Set dynamically by pytest_configure() above.
return request.config._tmp_path_factory # type: ignore
def _mk_tmp(request: FixtureRequest, factory: TempPathFactory) -> Path:
name = request.node.name
name = re.sub(r"[\W]", "_", name)
MAXVAL = 30
name = name[:MAXVAL]
return factory.mktemp(name, numbered=True)
@fixture
def tmp_path(request: FixtureRequest, tmp_path_factory: TempPathFactory) -> Path:
"""Return a temporary directory path object which is unique to each test
function invocation, created as a sub directory of the base temporary
directory.
By default, a new base temporary directory is created each test session,
and old bases are removed after 3 sessions, to aid in debugging. If
``--basetemp`` is used then it is cleared each session. See :ref:`base
temporary directory`.
The returned object is a :class:`pathlib.Path` object.
"""
return _mk_tmp(request, tmp_path_factory)
|
|
""" Properties are objects that can be assigned as class level
attributes on Bokeh models, to provide automatic serialization
and validation.
For example, the following defines a model that has integer,
string, and list[float] properties::
class Model(HasProps):
foo = Int
bar = String
baz = List(Float)
The properties of this class can be initialized by specifying
keyword arguments to the initializer::
m = Model(foo=10, bar="a str", baz=[1,2,3,4])
But also by setting the attributes on an instance::
m.foo = 20
Attempts to set a property to a value of the wrong type will
result in a ``ValueError`` exception::
>>> m.foo = 2.3
Traceback (most recent call last):
File "<stdin>", line 1, in <module>
File "/Users/bryan/work/bokeh/bokeh/properties.py", line 585, in __setattr__
super(HasProps, self).__setattr__(name, value)
File "/Users/bryan/work/bokeh/bokeh/properties.py", line 159, in __set__
raise e
File "/Users/bryan/work/bokeh/bokeh/properties.py", line 152, in __set__
self.validate(value)
File "/Users/bryan/work/bokeh/bokeh/properties.py", line 707, in validate
(nice_join([ cls.__name__ for cls in self._underlying_type ]), value, type(value).__name__))
ValueError: expected a value of type int8, int16, int32, int64 or int, got 2.3 of type float
Additionally, properties know how to serialize themselves,
to be understood by BokehJS.
"""
from __future__ import absolute_import, print_function
import re
import types
import difflib
import datetime
import dateutil.parser
import collections
from importlib import import_module
from copy import copy
from warnings import warn
import inspect
import logging
import numbers
logger = logging.getLogger(__name__)
from six import string_types, add_metaclass, iteritems
from . import enums
from .util.string import nice_join
from .property_containers import PropertyValueList, PropertyValueDict, PropertyValueContainer
def field(name):
''' Convenience function do explicitly mark a field specification for
a Bokeh model property.
Args:
name (str) : name of a data source field to reference for a property.
Returns:
dict : `{"field": name}`
Note:
This function is included for completeness. String values for
property specifications are by default interpreted as field names.
'''
return dict(field=name)
def value(val):
''' Convenience function do explicitly mark a value specification for
a Bokeh model property.
Args:
val (any) : a fixed value to specify for a property.
Returns:
dict : `{"value": name}`
Note:
String values for property specifications are by default interpreted
as field names. This function is especially useful when you want to
specify a fixed value with text properties.
Example:
.. code-block:: python
# The following will take text values to render from a data source
# column "text_column", but use a fixed value "12pt" for font size
p.text("x", "y", text="text_column",
text_font_size=value("12pt"), source=source)
'''
return dict(value=val)
bokeh_bool_types = (bool,)
try:
import numpy as np
bokeh_bool_types += (np.bool8,)
except ImportError:
pass
bokeh_integer_types = (numbers.Integral,)
# used to indicate properties that are not set (vs null, None, etc)
class _NotSet(object):
pass
class DeserializationError(Exception):
pass
class Property(object):
""" Base class for all type properties. """
def __init__(self, default=None, help=None):
""" This is how the descriptor is created in the class declaration """
if isinstance(default, types.FunctionType): # aka. lazy value
self.validate(default())
else:
self.validate(default)
self._default = default
self.__doc__ = help
self.alternatives = []
# This gets set by the class decorator at class creation time
self.name = "unnamed"
def __str__(self):
return self.__class__.__name__
@property
def _name(self):
return "_" + self.name
@property
def default(self):
if not isinstance(self._default, types.FunctionType):
return copy(self._default)
else:
value = self._default()
self.validate(value)
return value
@classmethod
def autocreate(cls, name=None):
""" Called by the metaclass to create a
new instance of this descriptor
if the user just assigned it to a property without trailing
parentheses.
"""
return cls()
def matches(self, new, old):
# XXX: originally this code warned about not being able to compare values, but that
# doesn't make sense, because most comparisons involving numpy arrays will fail with
# ValueError exception, thus warning about inevitable.
try:
if new is None or old is None:
return new is old # XXX: silence FutureWarning from NumPy
else:
return new == old
except (KeyboardInterrupt, SystemExit):
raise
except Exception as e:
logger.debug("could not compare %s and %s for property %s (Reason: %s)", new, old, self.name, e)
return False
def from_json(self, json, models=None):
return json
def transform(self, value):
return value
def validate(self, value):
pass
def is_valid(self, value):
try:
self.validate(value)
except ValueError:
return False
else:
return True
def _get(self, obj):
if not hasattr(obj, self._name):
self._set_default(obj, self.default)
return getattr(obj, self._name)
def __get__(self, obj, owner=None):
if obj is not None:
return self._get(obj)
elif owner is not None:
return self
else:
raise ValueError("both 'obj' and 'owner' are None, don't know what to do")
@classmethod
def _wrap_container(cls, value):
if isinstance(value, list):
if isinstance(value, PropertyValueList):
return value
else:
return PropertyValueList(value)
elif isinstance(value, dict):
if isinstance(value, PropertyValueDict):
return value
else:
return PropertyValueDict(value)
else:
return value
def _prepare_value(self, value):
try:
self.validate(value)
except ValueError as e:
for tp, converter in self.alternatives:
if tp.is_valid(value):
value = converter(value)
break
else:
raise e
else:
value = self.transform(value)
return self._wrap_container(value)
def _mark_dirty_and_trigger(self, obj, old, value):
obj._dirty = True
if hasattr(obj, 'trigger'):
obj.trigger(self.name, old, value)
# set a default, so no 'old' or notification
def _set_default(self, obj, value):
value = self._wrap_container(value)
if isinstance(value, PropertyValueContainer):
value._register_owner(obj, self)
setattr(obj, self._name, value)
def _real_set(self, obj, old, value):
obj._changed_vars.add(self.name)
if self._name in obj.__dict__ and self.matches(value, old):
return
# "old" is the logical old value, but it may not be
# the actual current attribute value if our value
# was mutated behind our back and we got _notify_mutated
old_attr_value = self.__get__(obj)
if old_attr_value is not value:
if isinstance(old_attr_value, PropertyValueContainer):
old_attr_value._unregister_owner(obj, self)
if isinstance(value, PropertyValueContainer):
value._register_owner(obj, self)
setattr(obj, self._name, value)
# for notification purposes, "old" should be the logical old
self._mark_dirty_and_trigger(obj, old, value)
def __set__(self, obj, value):
value = self._prepare_value(value)
old = self.__get__(obj)
self._real_set(obj, old, value)
# called when a container is mutated "behind our back" and
# we detect it with our collection wrappers. In this case,
# somewhat weirdly, "old" is a copy and the new "value"
# should already be set unless we change it due to
# validation.
def _notify_mutated(self, obj, old):
value = self.__get__(obj)
# re-validate because the contents of 'old' have changed,
# in some cases this could give us a new object for the value
value = self._prepare_value(value)
self._real_set(obj, old, value)
def __delete__(self, obj):
if hasattr(obj, self._name):
delattr(obj, self._name)
@property
def has_ref(self):
return False
def accepts(self, tp, converter):
tp = ParameterizedProperty._validate_type_param(tp)
self.alternatives.append((tp, converter))
return self
def __or__(self, other):
return Either(self, other)
class Include(object):
""" Include other properties from mixin Models, with a given prefix. """
def __init__(self, delegate, help="", use_prefix=True):
if not (isinstance(delegate, type) and issubclass(delegate, HasProps)):
raise ValueError("expected a subclass of HasProps, got %r" % delegate)
self.delegate = delegate
self.help = help
self.use_prefix = use_prefix
_EXAMPLE_TEMPLATE = """
Example
-------
.. bokeh-plot:: ../%(path)s
:source-position: none
*source:* `%(path)s <https://github.com/bokeh/bokeh/tree/master/%(path)s>`_
"""
class MetaHasProps(type):
def __new__(cls, class_name, bases, class_dict):
names = set()
names_with_refs = set()
container_names = set()
# First pre-process to handle all the Includes
includes = {}
removes = set()
for name, prop in class_dict.items():
if not isinstance(prop, Include):
continue
delegate = prop.delegate
if prop.use_prefix:
prefix = re.sub("_props$", "", name) + "_"
else:
prefix = ""
for subpropname in delegate.class_properties(withbases=False):
fullpropname = prefix + subpropname
subprop = delegate.lookup(subpropname)
if isinstance(subprop, Property):
# If it's an actual instance, then we need to make a copy
# so two properties don't write to the same hidden variable
# inside the instance.
subprop = copy(subprop)
if "%s" in prop.help:
doc = prop.help % subpropname.replace('_', ' ')
else:
doc = prop.help
try:
includes[fullpropname] = subprop(help=doc)
except TypeError:
includes[fullpropname] = subprop
subprop.__doc__ = doc
# Remove the name of the Include attribute itself
removes.add(name)
# Update the class dictionary, taking care not to overwrite values
# from the delegates that the subclass may have explicitly defined
for key, val in includes.items():
if key not in class_dict:
class_dict[key] = val
for tmp in removes:
del class_dict[tmp]
dataspecs = {}
units_to_add = {}
for name, prop in class_dict.items():
if isinstance(prop, Property):
prop.name = name
if prop.has_ref:
names_with_refs.add(name)
elif isinstance(prop, ContainerProperty):
container_names.add(name)
names.add(name)
if isinstance(prop, DataSpec):
dataspecs[name] = prop
if hasattr(prop, '_units_type'):
units_to_add[name+"_units"] = prop._units_type
elif isinstance(prop, type) and issubclass(prop, Property):
# Support the user adding a property without using parens,
# i.e. using just the Property subclass instead of an
# instance of the subclass
newprop = prop.autocreate(name=name)
class_dict[name] = newprop
newprop.name = name
names.add(name)
# Process dataspecs
if issubclass(prop, DataSpec):
dataspecs[name] = newprop
for name, prop in units_to_add.items():
prop.name = name
names.add(name)
class_dict[name] = prop
class_dict["__properties__"] = names
class_dict["__properties_with_refs__"] = names_with_refs
class_dict["__container_props__"] = container_names
if dataspecs:
class_dict["_dataspecs"] = dataspecs
if "__example__" in class_dict:
path = class_dict["__example__"]
class_dict["__doc__"] += _EXAMPLE_TEMPLATE % dict(path=path)
return type.__new__(cls, class_name, bases, class_dict)
def accumulate_from_subclasses(cls, propname):
s = set()
for c in inspect.getmro(cls):
if issubclass(c, HasProps):
s.update(getattr(c, propname))
return s
def abstract(cls):
""" A phony decorator to mark abstract base classes. """
if not issubclass(cls, HasProps):
raise TypeError("%s is not a subclass of HasProps" % cls.__name__)
return cls
@add_metaclass(MetaHasProps)
class HasProps(object):
def __init__(self, **properties):
super(HasProps, self).__init__()
self._changed_vars = set()
for name, value in properties.items():
setattr(self, name, value)
def __setattr__(self, name, value):
props = sorted(self.properties())
if name.startswith("_") or name in props:
super(HasProps, self).__setattr__(name, value)
else:
matches, text = difflib.get_close_matches(name.lower(), props), "similar"
if not matches:
matches, text = props, "possible"
raise AttributeError("unexpected attribute '%s' to %s, %s attributes are %s" %
(name, self.__class__.__name__, text, nice_join(matches)))
def clone(self):
""" Returns a duplicate of this object with all its properties
set appropriately. Values which are containers are shallow-copied.
"""
return self.__class__(**self.changed_properties_with_values())
@classmethod
def lookup(cls, name):
return getattr(cls, name)
@classmethod
def properties_with_refs(cls):
""" Returns a set of the names of this object's properties that
have references. We traverse the class hierarchy and
pull together the full list of properties.
"""
if not hasattr(cls, "__cached_allprops_with_refs"):
s = accumulate_from_subclasses(cls, "__properties_with_refs__")
cls.__cached_allprops_with_refs = s
return cls.__cached_allprops_with_refs
@classmethod
def properties_containers(cls):
""" Returns a list of properties that are containers
"""
if not hasattr(cls, "__cached_allprops_containers"):
s = accumulate_from_subclasses(cls, "__container_props__")
cls.__cached_allprops_containers = s
return cls.__cached_allprops_containers
@classmethod
def properties(cls):
""" Returns a set of the names of this object's properties. We
traverse the class hierarchy and pull together the full
list of properties.
"""
if not hasattr(cls, "__cached_allprops"):
s = cls.class_properties()
cls.__cached_allprops = s
return cls.__cached_allprops
@classmethod
def dataspecs(cls):
""" Returns a set of the names of this object's dataspecs (and
dataspec subclasses). Traverses the class hierarchy.
"""
if not hasattr(cls, "__cached_dataspecs"):
dataspecs = set()
for c in reversed(inspect.getmro(cls)):
if hasattr(c, "_dataspecs"):
dataspecs.update(c._dataspecs.keys())
cls.__cached_dataspecs = dataspecs
return cls.__cached_dataspecs
@classmethod
def dataspecs_with_refs(cls):
dataspecs = {}
for c in reversed(inspect.getmro(cls)):
if hasattr(c, "_dataspecs"):
dataspecs.update(c._dataspecs)
return dataspecs
def changed_vars(self):
""" Returns which variables changed since the creation of the object,
or the last called to reset_changed_vars().
"""
return set.union(self._changed_vars, self.properties_with_refs(),
self.properties_containers())
def reset_changed_vars(self):
self._changed_vars = set()
def properties_with_values(self):
return dict([ (attr, getattr(self, attr)) for attr in self.properties() ])
def changed_properties(self):
return self.changed_vars()
def changed_properties_with_values(self):
return dict([ (attr, getattr(self, attr)) for attr in self.changed_properties() ])
@classmethod
def class_properties(cls, withbases=True):
if withbases:
return accumulate_from_subclasses(cls, "__properties__")
else:
return set(cls.__properties__)
def set(self, **kwargs):
""" Sets a number of properties at once """
for kw in kwargs:
setattr(self, kw, kwargs[kw])
def pprint_props(self, indent=0):
""" Prints the properties of this object, nicely formatted """
for key, value in self.properties_with_values().items():
print("%s%s: %r" % (" "*indent, key, value))
class PrimitiveProperty(Property):
""" A base class for simple property types. Subclasses should
define a class attribute ``_underlying_type`` that is a tuple
of acceptable type values for the property.
"""
_underlying_type = None
def validate(self, value):
super(PrimitiveProperty, self).validate(value)
if not (value is None or isinstance(value, self._underlying_type)):
raise ValueError("expected a value of type %s, got %s of type %s" %
(nice_join([ cls.__name__ for cls in self._underlying_type ]), value, type(value).__name__))
def from_json(self, json, models=None):
if json is None or isinstance(json, self._underlying_type):
return json
else:
expected = nice_join([ cls.__name__ for cls in self._underlying_type ])
raise DeserializationError("%s expected %s, got %s" % (self, expected, json))
class Bool(PrimitiveProperty):
""" Boolean type property. """
_underlying_type = bokeh_bool_types
class Int(PrimitiveProperty):
""" Signed integer type property. """
_underlying_type = bokeh_integer_types
class Float(PrimitiveProperty):
""" Floating point type property. """
_underlying_type = (numbers.Real,)
class Complex(PrimitiveProperty):
""" Complex floating point type property. """
_underlying_type = (numbers.Complex,)
class String(PrimitiveProperty):
""" String type property. """
_underlying_type = string_types
class Regex(String):
""" Regex type property validates that text values match the
given regular expression.
"""
def __init__(self, regex, default=None, help=None):
self.regex = re.compile(regex)
super(Regex, self).__init__(default=default, help=help)
def validate(self, value):
super(Regex, self).validate(value)
if not (value is None or self.regex.match(value) is not None):
raise ValueError("expected a string matching %r pattern, got %r" % (self.regex.pattern, value))
def __str__(self):
return "%s(%r)" % (self.__class__.__name__, self.regex.pattern)
class JSON(String):
""" JSON type property validates that text values are valid JSON.
.. note::
The string is transmitted and received by BokehJS as a *string*
containing JSON content. i.e., you must use ``JSON.parse`` to unpack
the value into a JavaScript hash.
"""
def validate(self, value):
super(JSON, self).validate(value)
if value is None: return
try:
import json
json.loads(value)
except ValueError:
raise ValueError("expected JSON text, got %r" % value)
class ParameterizedProperty(Property):
""" Base class for Properties that have type parameters, e.g.
``List(String)``.
"""
@staticmethod
def _validate_type_param(type_param):
if isinstance(type_param, type):
if issubclass(type_param, Property):
return type_param()
else:
type_param = type_param.__name__
elif isinstance(type_param, Property):
return type_param
raise ValueError("expected a property as type parameter, got %s" % type_param)
@property
def type_params(self):
raise NotImplementedError("abstract method")
@property
def has_ref(self):
return any(type_param.has_ref for type_param in self.type_params)
class ContainerProperty(ParameterizedProperty):
""" Base class for Container-like type properties. """
pass
class Seq(ContainerProperty):
""" Sequence (list, tuple) type property.
"""
def _is_seq(self, value):
return isinstance(value, collections.Container) and not isinstance(value, collections.Mapping)
def _new_instance(self, value):
return value
def __init__(self, item_type, default=None, help=None):
self.item_type = self._validate_type_param(item_type)
super(Seq, self).__init__(default=default, help=help)
@property
def type_params(self):
return [self.item_type]
def validate(self, value):
super(Seq, self).validate(value)
if value is not None:
if not (self._is_seq(value) and all(self.item_type.is_valid(item) for item in value)):
if self._is_seq(value):
invalid = []
for item in value:
if not self.item_type.is_valid(item):
invalid.append(item)
raise ValueError("expected an element of %s, got seq with invalid items %r" % (self, invalid))
else:
raise ValueError("expected an element of %s, got %r" % (self, value))
def __str__(self):
return "%s(%s)" % (self.__class__.__name__, self.item_type)
def from_json(self, json, models=None):
if json is None:
return None
elif isinstance(json, list):
return self._new_instance([ self.item_type.from_json(item, models) for item in json ])
else:
raise DeserializationError("%s expected a list or None, got %s" % (self, json))
class List(Seq):
""" Python list type property.
"""
def __init__(self, item_type, default=[], help=None):
# todo: refactor to not use mutable objects as default values.
# Left in place for now because we want to allow None to express
# opional values. Also in Dict.
super(List, self).__init__(item_type, default=default, help=help)
def _is_seq(self, value):
return isinstance(value, list)
class Array(Seq):
""" NumPy array type property.
"""
def _is_seq(self, value):
import numpy as np
return isinstance(value, np.ndarray)
def _new_instance(self, value):
import numpy as np
return np.array(value)
class Dict(ContainerProperty):
""" Python dict type property.
If a default value is passed in, then a shallow copy of it will be
used for each new use of this property.
"""
def __init__(self, keys_type, values_type, default={}, help=None):
self.keys_type = self._validate_type_param(keys_type)
self.values_type = self._validate_type_param(values_type)
super(Dict, self).__init__(default=default, help=help)
@property
def type_params(self):
return [self.keys_type, self.values_type]
def validate(self, value):
super(Dict, self).validate(value)
if value is not None:
if not (isinstance(value, dict) and \
all(self.keys_type.is_valid(key) and self.values_type.is_valid(val) for key, val in iteritems(value))):
raise ValueError("expected an element of %s, got %r" % (self, value))
def __str__(self):
return "%s(%s, %s)" % (self.__class__.__name__, self.keys_type, self.values_type)
def from_json(self, json, models=None):
if json is None:
return None
elif isinstance(json, dict):
return { self.keys_type.from_json(key, models): self.values_type.from_json(value, models) for key, value in iteritems(json) }
else:
raise DeserializationError("%s expected a dict or None, got %s" % (self, json))
class Tuple(ContainerProperty):
""" Tuple type property. """
def __init__(self, tp1, tp2, *type_params, **kwargs):
self._type_params = list(map(self._validate_type_param, (tp1, tp2) + type_params))
super(Tuple, self).__init__(default=kwargs.get("default"), help=kwargs.get("help"))
@property
def type_params(self):
return self._type_params
def validate(self, value):
super(Tuple, self).validate(value)
if value is not None:
if not (isinstance(value, (tuple, list)) and len(self.type_params) == len(value) and \
all(type_param.is_valid(item) for type_param, item in zip(self.type_params, value))):
raise ValueError("expected an element of %s, got %r" % (self, value))
def __str__(self):
return "%s(%s)" % (self.__class__.__name__, ", ".join(map(str, self.type_params)))
def from_json(self, json, models=None):
if json is None:
return None
elif isinstance(json, list):
return tuple(type_param.from_json(item, models) for type_param, item in zip(self.type_params, json))
else:
raise DeserializationError("%s expected a list or None, got %s" % (self, json))
class Instance(Property):
""" Instance type property, for references to other Models in the object
graph.
"""
def __init__(self, instance_type, default=None, help=None):
if not isinstance(instance_type, (type,) + string_types):
raise ValueError("expected a type or string, got %s" % instance_type)
if isinstance(instance_type, type) and not issubclass(instance_type, HasProps):
raise ValueError("expected a subclass of HasProps, got %s" % instance_type)
self._instance_type = instance_type
super(Instance, self).__init__(default=default, help=help)
@property
def instance_type(self):
if isinstance(self._instance_type, str):
module, name = self._instance_type.rsplit(".", 1)
self._instance_type = getattr(import_module(module, "bokeh"), name)
return self._instance_type
@property
def has_ref(self):
return True
def validate(self, value):
super(Instance, self).validate(value)
if value is not None:
if not isinstance(value, self.instance_type):
raise ValueError("expected an instance of type %s, got %s of type %s" %
(self.instance_type.__name__, value, type(value).__name__))
def __str__(self):
return "%s(%s)" % (self.__class__.__name__, self.instance_type.__name__)
def from_json(self, json, models=None):
if json is None:
return None
elif isinstance(json, dict):
from .plot_object import PlotObject
if issubclass(self.instance_type, PlotObject):
if models is None:
raise DeserializationError("%s can't deserialize without models" % self)
else:
model = models.get(json["id"])
if model is not None:
return model
else:
raise DeserializationError("%s failed to deserialize reference to %s" % (self, json))
else:
attrs = {}
for name, value in iteritems(json):
prop = self.instance_type.lookup(name)
attrs[name] = prop.from_json(value, models)
# XXX: this doesn't work when Instance(Superclass) := Subclass()
# Serialization dict must carry type information to resolve this.
return self.instance_type(**attrs)
else:
raise DeserializationError("%s expected a dict or None, got %s" % (self, json))
class This(Property):
""" A reference to an instance of the class being defined. """
pass
# Fake types, ABCs
class Any(Property):
""" Any type property accepts any values. """
pass
class Function(Property):
""" Function type property. """
pass
class Event(Property):
""" Event type property. """
pass
class Interval(ParameterizedProperty):
''' Range type property ensures values are contained inside a given interval. '''
def __init__(self, interval_type, start, end, default=None, help=None):
self.interval_type = self._validate_type_param(interval_type)
self.interval_type.validate(start)
self.interval_type.validate(end)
self.start = start
self.end = end
super(Interval, self).__init__(default=default, help=help)
@property
def type_params(self):
return [self.interval_type]
def validate(self, value):
super(Interval, self).validate(value)
if not (value is None or self.interval_type.is_valid(value) and value >= self.start and value <= self.end):
raise ValueError("expected a value of type %s in range [%s, %s], got %r" % (self.interval_type, self.start, self.end, value))
def __str__(self):
return "%s(%s, %r, %r)" % (self.__class__.__name__, self.interval_type, self.start, self.end)
class Byte(Interval):
''' Byte type property. '''
def __init__(self, default=0, help=None):
super(Byte, self).__init__(Int, 0, 255, default=default, help=help)
class Either(ParameterizedProperty):
""" Takes a list of valid properties and validates against them in succession. """
def __init__(self, tp1, tp2, *type_params, **kwargs):
self._type_params = list(map(self._validate_type_param, (tp1, tp2) + type_params))
default = kwargs.get("default", self._type_params[0].default)
help = kwargs.get("help")
super(Either, self).__init__(default=default, help=help)
@property
def type_params(self):
return self._type_params
def validate(self, value):
super(Either, self).validate(value)
if not (value is None or any(param.is_valid(value) for param in self.type_params)):
raise ValueError("expected an element of either %s, got %r" % (nice_join(self.type_params), value))
def transform(self, value):
for param in self.type_params:
try:
return param.transform(value)
except ValueError:
pass
raise ValueError("Could not transform %r" % value)
def from_json(self, json, models=None):
for tp in self.type_params:
try:
return tp.from_json(json, models)
except DeserializationError:
pass
else:
raise DeserializationError("%s couldn't deserialize %s" % (self, json))
def __str__(self):
return "%s(%s)" % (self.__class__.__name__, ", ".join(map(str, self.type_params)))
def __or__(self, other):
return self.__class__(*(self.type_params + [other]), default=self._default, help=self.help)
class Enum(String):
""" An Enum with a list of allowed values. The first value in the list is
the default value, unless a default is provided with the "default" keyword
argument.
"""
def __init__(self, enum, *values, **kwargs):
if not (not values and isinstance(enum, enums.Enumeration)):
enum = enums.enumeration(enum, *values)
self._enum = enum
default = kwargs.get("default", enum._default)
help = kwargs.get("help")
super(Enum, self).__init__(default=default, help=help)
@property
def allowed_values(self):
return self._enum._values
def validate(self, value):
super(Enum, self).validate(value)
if not (value is None or value in self._enum):
raise ValueError("invalid value for %s: %r; allowed values are %s" % (self.name, value, nice_join(self.allowed_values)))
def __str__(self):
return "%s(%s)" % (self.__class__.__name__, ", ".join(map(repr, self.allowed_values)))
class Auto(Enum):
def __init__(self):
super(Auto, self).__init__("auto")
def __str__(self):
return self.__class__.__name__
# Properties useful for defining visual attributes
class Color(Either):
""" Accepts color definition in a variety of ways, and produces an
appropriate serialization of its value for whatever backend.
For colors, because we support named colors and hex values prefaced
with a "#", when we are handed a string value, there is a little
interpretation: if the value is one of the 147 SVG named colors or
it starts with a "#", then it is interpreted as a value.
If a 3-tuple is provided, then it is treated as an RGB (0..255).
If a 4-tuple is provided, then it is treated as an RGBa (0..255), with
alpha as a float between 0 and 1. (This follows the HTML5 Canvas API.)
"""
def __init__(self, default=None, help=None):
types = (Enum(enums.NamedColor),
Regex("^#[0-9a-fA-F]{6}$"),
Tuple(Byte, Byte, Byte),
Tuple(Byte, Byte, Byte, Percent))
super(Color, self).__init__(*types, default=default, help=help)
def __str__(self):
return self.__class__.__name__
class Align(Property):
pass
class DashPattern(Either):
""" Dash type property.
Express patterns that describe line dashes. ``DashPattern`` values
can be specified in a variety of ways:
* An enum: "solid", "dashed", "dotted", "dotdash", "dashdot"
* a tuple or list of integers in the `HTML5 Canvas dash specification style`_.
Note that if the list of integers has an odd number of elements, then
it is duplicated, and that duplicated list becomes the new dash list.
To indicate that dashing is turned off (solid lines), specify the empty
list [].
.. _HTML5 Canvas dash specification style: http://www.w3.org/html/wg/drafts/2dcontext/html5_canvas/#dash-list
"""
_dash_patterns = {
"solid": [],
"dashed": [6],
"dotted": [2,4],
"dotdash": [2,4,6,4],
"dashdot": [6,4,2,4],
}
def __init__(self, default=[], help=None):
types = Enum(enums.DashPattern), Regex(r"^(\d+(\s+\d+)*)?$"), Seq(Int)
super(DashPattern, self).__init__(*types, default=default, help=help)
def transform(self, value):
value = super(DashPattern, self).transform(value)
if isinstance(value, string_types):
try:
return self._dash_patterns[value]
except KeyError:
return [int(x) for x in value.split()]
else:
return value
def __str__(self):
return self.__class__.__name__
class Size(Float):
""" Size type property.
.. note::
``Size`` is equivalent to an unsigned int.
"""
def validate(self, value):
super(Size, self).validate(value)
if not (value is None or 0.0 <= value):
raise ValueError("expected a non-negative number, got %r" % value)
class Percent(Float):
""" Percentage type property.
Percents are useful for specifying alphas and coverage and extents; more
semantically meaningful than Float(0..1).
"""
def validate(self, value):
super(Percent, self).validate(value)
if not (value is None or 0.0 <= value <= 1.0):
raise ValueError("expected a value in range [0, 1], got %r" % value)
class Angle(Float):
""" Angle type property. """
pass
class Date(Property):
""" Date (not datetime) type property.
"""
def __init__(self, default=datetime.date.today(), help=None):
super(Date, self).__init__(default=default, help=help)
def validate(self, value):
super(Date, self).validate(value)
if not (value is None or isinstance(value, (datetime.date,) + string_types + (float,) + bokeh_integer_types)):
raise ValueError("expected a date, string or timestamp, got %r" % value)
def transform(self, value):
value = super(Date, self).transform(value)
if isinstance(value, (float,) + bokeh_integer_types):
try:
value = datetime.date.fromtimestamp(value)
except ValueError:
value = datetime.date.fromtimestamp(value/1000)
elif isinstance(value, string_types):
value = dateutil.parser.parse(value).date()
return value
class Datetime(Property):
""" Datetime type property.
"""
def __init__(self, default=datetime.date.today(), help=None):
super(Datetime, self).__init__(default=default, help=help)
def validate(self, value):
super(Datetime, self).validate(value)
datetime_types = (datetime.datetime, datetime.date)
try:
import numpy as np
datetime_types += (np.datetime64,)
except ImportError:
pass
if (isinstance(value, datetime_types)):
return
try:
import pandas
if isinstance(value, (pandas.Timestamp)):
return
except ImportError:
pass
raise ValueError("Expected a datetime instance, got %r" % value)
def transform(self, value):
value = super(Datetime, self).transform(value)
return value
# Handled by serialization in protocol.py for now
class RelativeDelta(Dict):
""" RelativeDelta type property for time deltas.
"""
def __init__(self, default={}, help=None):
keys = Enum("years", "months", "days", "hours", "minutes", "seconds", "microseconds")
values = Int
super(RelativeDelta, self).__init__(keys, values, default=default, help=help)
def __str__(self):
return self.__class__.__name__
class DataSpec(Either):
def __init__(self, typ, default, help=None):
super(DataSpec, self).__init__(String, Dict(String, Either(String, typ)), typ, default=default, help=help)
self._type = self._validate_type_param(typ)
def to_dict(self, obj):
val = getattr(obj, self._name, self.default)
# Check for None value
if val is None:
return dict(value=None)
# Check for spec type value
try:
self._type.validate(val)
return dict(value=val)
except ValueError:
pass
# Check for data source field name
if isinstance(val, string_types):
return dict(field=val)
# Must be dict, return as-is
return val
def __str__(self):
val = getattr(self, self._name, self.default)
return "%s(%r)" % (self.__class__.__name__, val)
class NumberSpec(DataSpec):
def __init__(self, default, help=None):
super(NumberSpec, self).__init__(Float, default=default, help=help)
class StringSpec(DataSpec):
def __init__(self, default, help=None):
super(StringSpec, self).__init__(List(String), default=default, help=help)
def __set__(self, obj, value):
if isinstance(value, list):
if len(value) != 1:
raise TypeError("StringSpec convenience list values must have length 1")
value = dict(value=value[0])
super(StringSpec, self).__set__(obj, value)
class FontSizeSpec(DataSpec):
def __init__(self, default, help=None):
super(FontSizeSpec, self).__init__(List(String), default=default, help=help)
def __set__(self, obj, value):
if isinstance(value, string_types):
warn('Setting a fixed font size value as a string %r is deprecated, '
'set with value(%r) or [%r] instead' % (value, value, value),
DeprecationWarning, stacklevel=2)
if len(value) > 0 and value[0].isdigit():
value = dict(value=value)
super(FontSizeSpec, self).__set__(obj, value)
class UnitsSpec(NumberSpec):
def __init__(self, default, units_type, units_default, help=None):
super(UnitsSpec, self).__init__(default=default, help=help)
self._units_type = self._validate_type_param(units_type)
self._units_type.validate(units_default)
self._units_type._default = units_default
def to_dict(self, obj):
d = super(UnitsSpec, self).to_dict(obj)
d["units"] = getattr(obj, self.name+"_units")
return d
def __set__(self, obj, value):
if isinstance(value, dict):
units = value.pop("units", None)
if units: setattr(obj, self.name+"_units", units)
super(UnitsSpec, self).__set__(obj, value)
def __str__(self):
val = getattr(self, self._name, self.default)
return "%s(%r, units_default=%r)" % (self.__class__.__name__, val, self._units_type._default)
class AngleSpec(UnitsSpec):
def __init__(self, default, units_default="rad", help=None):
super(AngleSpec, self).__init__(default=default, units_type=Enum(enums.AngleUnits), units_default=units_default, help=help)
class DistanceSpec(UnitsSpec):
def __init__(self, default, units_default="data", help=None):
super(DistanceSpec, self).__init__(default=default, units_type=Enum(enums.SpatialUnits), units_default=units_default, help=help)
def __set__(self, obj, value):
try:
if value is not None and value < 0:
raise ValueError("Distances must be positive or None!")
except TypeError:
pass
super(DistanceSpec, self).__set__(obj, value)
class ScreenDistanceSpec(NumberSpec):
def to_dict(self, obj):
d = super(ScreenDistanceSpec, self).to_dict(obj)
d["units"] = "screen"
return d
def __set__(self, obj, value):
try:
if value is not None and value < 0:
raise ValueError("Distances must be positive or None!")
except TypeError:
pass
super(ScreenDistanceSpec, self).__set__(obj, value)
class DataDistanceSpec(NumberSpec):
def to_dict(self, obj):
d = super(ScreenDistanceSpec, self).to_dict(obj)
d["units"] = "data"
return d
def __set__(self, obj, value):
try:
if value is not None and value < 0:
raise ValueError("Distances must be positive or None!")
except TypeError:
pass
super(DataDistanceSpec, self).__set__(obj, value)
class ColorSpec(DataSpec):
def __init__(self, default, help=None):
super(ColorSpec, self).__init__(Color, default=default, help=help)
@classmethod
def isconst(cls, arg):
""" Returns True if the argument is a literal color. Check for a
well-formed hexadecimal color value.
"""
return isinstance(arg, string_types) and \
((len(arg) == 7 and arg[0] == "#") or arg in enums.NamedColor)
@classmethod
def is_color_tuple(cls, val):
return isinstance(val, tuple) and len(val) in (3, 4)
@classmethod
def format_tuple(cls, colortuple):
if len(colortuple) == 3:
return "rgb%r" % (colortuple,)
else:
return "rgba%r" % (colortuple,)
def to_dict(self, obj):
val = getattr(obj, self._name, self.default)
if val is None:
return dict(value=None)
# Check for hexadecimal or named color
if self.isconst(val):
return dict(value=val)
# Check for RGB or RGBa tuple
if isinstance(val, tuple):
return dict(value=self.format_tuple(val))
# Check for data source field name
if isinstance(val, string_types):
return dict(field=val)
# Must be dict, return as-is
return val
def validate(self, value):
try:
return super(ColorSpec, self).validate(value)
except ValueError as e:
# Check for tuple input if not yet a valid input type
if self.is_color_tuple(value):
return True
else:
raise e
def transform(self, value):
# Make sure that any tuple has either three integers, or three integers and one float
if isinstance(value, tuple):
value = tuple(int(v) if i < 3 else v for i, v in enumerate(value))
return value
|
|
# -*- coding: utf-8 -*-
# Form implementation generated from reading ui file '.\race.ui'
#
# Created by: PyQt5 UI code generator 5.5.1
#
# WARNING! All changes made in this file will be lost!
from PyQt5 import QtCore, QtGui, QtWidgets
class Ui_RaceWindow(object):
def setupUi(self, RaceWindow):
RaceWindow.setObjectName("RaceWindow")
RaceWindow.resize(462, 456)
icon = QtGui.QIcon()
icon.addPixmap(QtGui.QPixmap("logo.png"), QtGui.QIcon.Normal, QtGui.QIcon.Off)
RaceWindow.setWindowIcon(icon)
self.centralwidget = QtWidgets.QWidget(RaceWindow)
self.centralwidget.setObjectName("centralwidget")
self.position_frame = QtWidgets.QFrame(self.centralwidget)
self.position_frame.setGeometry(QtCore.QRect(0, 0, 457, 235))
font = QtGui.QFont()
font.setFamily("Segoe UI Semibold")
font.setPointSize(12)
font.setBold(True)
font.setWeight(75)
self.position_frame.setFont(font)
self.position_frame.setFrameShape(QtWidgets.QFrame.StyledPanel)
self.position_frame.setFrameShadow(QtWidgets.QFrame.Raised)
self.position_frame.setLineWidth(3)
self.position_frame.setMidLineWidth(3)
self.position_frame.setObjectName("position_frame")
self.label = QtWidgets.QLabel(self.position_frame)
self.label.setGeometry(QtCore.QRect(10, 6, 31, 16))
self.label.setObjectName("label")
self.pos_label_p1 = QtWidgets.QLabel(self.position_frame)
self.pos_label_p1.setGeometry(QtCore.QRect(8, 28, 31, 16))
self.pos_label_p1.setLayoutDirection(QtCore.Qt.LeftToRight)
self.pos_label_p1.setAlignment(QtCore.Qt.AlignCenter)
self.pos_label_p1.setObjectName("pos_label_p1")
self.pos_label_p2 = QtWidgets.QLabel(self.position_frame)
self.pos_label_p2.setGeometry(QtCore.QRect(8, 53, 31, 16))
self.pos_label_p2.setAlignment(QtCore.Qt.AlignCenter)
self.pos_label_p2.setObjectName("pos_label_p2")
self.pos_label_p3 = QtWidgets.QLabel(self.position_frame)
self.pos_label_p3.setGeometry(QtCore.QRect(8, 78, 31, 16))
self.pos_label_p3.setAlignment(QtCore.Qt.AlignCenter)
self.pos_label_p3.setObjectName("pos_label_p3")
self.pos_label_p4 = QtWidgets.QLabel(self.position_frame)
self.pos_label_p4.setGeometry(QtCore.QRect(8, 103, 31, 16))
self.pos_label_p4.setAlignment(QtCore.Qt.AlignCenter)
self.pos_label_p4.setObjectName("pos_label_p4")
self.pos_label_p5 = QtWidgets.QLabel(self.position_frame)
self.pos_label_p5.setGeometry(QtCore.QRect(8, 128, 31, 16))
self.pos_label_p5.setAlignment(QtCore.Qt.AlignCenter)
self.pos_label_p5.setObjectName("pos_label_p5")
self.pos_label_p6 = QtWidgets.QLabel(self.position_frame)
self.pos_label_p6.setGeometry(QtCore.QRect(8, 153, 31, 16))
self.pos_label_p6.setAlignment(QtCore.Qt.AlignCenter)
self.pos_label_p6.setObjectName("pos_label_p6")
self.pos_label_p7 = QtWidgets.QLabel(self.position_frame)
self.pos_label_p7.setGeometry(QtCore.QRect(8, 178, 31, 16))
self.pos_label_p7.setAlignment(QtCore.Qt.AlignCenter)
self.pos_label_p7.setObjectName("pos_label_p7")
self.line = QtWidgets.QFrame(self.position_frame)
self.line.setGeometry(QtCore.QRect(0, 18, 601, 16))
self.line.setFrameShape(QtWidgets.QFrame.HLine)
self.line.setFrameShadow(QtWidgets.QFrame.Sunken)
self.line.setObjectName("line")
self.line_2 = QtWidgets.QFrame(self.position_frame)
self.line_2.setGeometry(QtCore.QRect(0, 43, 601, 16))
self.line_2.setFrameShape(QtWidgets.QFrame.HLine)
self.line_2.setFrameShadow(QtWidgets.QFrame.Sunken)
self.line_2.setObjectName("line_2")
self.line_3 = QtWidgets.QFrame(self.position_frame)
self.line_3.setGeometry(QtCore.QRect(0, 68, 601, 16))
self.line_3.setFrameShape(QtWidgets.QFrame.HLine)
self.line_3.setFrameShadow(QtWidgets.QFrame.Sunken)
self.line_3.setObjectName("line_3")
self.line_4 = QtWidgets.QFrame(self.position_frame)
self.line_4.setGeometry(QtCore.QRect(0, 93, 601, 16))
self.line_4.setFrameShape(QtWidgets.QFrame.HLine)
self.line_4.setFrameShadow(QtWidgets.QFrame.Sunken)
self.line_4.setObjectName("line_4")
self.line_5 = QtWidgets.QFrame(self.position_frame)
self.line_5.setGeometry(QtCore.QRect(0, 118, 601, 16))
self.line_5.setFrameShape(QtWidgets.QFrame.HLine)
self.line_5.setFrameShadow(QtWidgets.QFrame.Sunken)
self.line_5.setObjectName("line_5")
self.line_6 = QtWidgets.QFrame(self.position_frame)
self.line_6.setGeometry(QtCore.QRect(0, 143, 601, 16))
self.line_6.setFrameShape(QtWidgets.QFrame.HLine)
self.line_6.setFrameShadow(QtWidgets.QFrame.Sunken)
self.line_6.setObjectName("line_6")
self.line_8 = QtWidgets.QFrame(self.position_frame)
self.line_8.setGeometry(QtCore.QRect(0, 168, 601, 16))
self.line_8.setFrameShape(QtWidgets.QFrame.HLine)
self.line_8.setFrameShadow(QtWidgets.QFrame.Sunken)
self.line_8.setObjectName("line_8")
self.line_7 = QtWidgets.QFrame(self.position_frame)
self.line_7.setGeometry(QtCore.QRect(0, 193, 601, 16))
self.line_7.setFrameShape(QtWidgets.QFrame.HLine)
self.line_7.setFrameShadow(QtWidgets.QFrame.Sunken)
self.line_7.setObjectName("line_7")
self.line_9 = QtWidgets.QFrame(self.position_frame)
self.line_9.setGeometry(QtCore.QRect(33, 25, 20, 175))
self.line_9.setFrameShape(QtWidgets.QFrame.VLine)
self.line_9.setFrameShadow(QtWidgets.QFrame.Sunken)
self.line_9.setObjectName("line_9")
self.label_2 = QtWidgets.QLabel(self.position_frame)
self.label_2.setGeometry(QtCore.QRect(50, 6, 61, 16))
self.label_2.setObjectName("label_2")
self.name_label_p1 = QtWidgets.QLabel(self.position_frame)
self.name_label_p1.setGeometry(QtCore.QRect(48, 24, 181, 25))
font = QtGui.QFont()
font.setBold(False)
font.setWeight(50)
self.name_label_p1.setFont(font)
self.name_label_p1.setObjectName("name_label_p1")
self.name_label_p2 = QtWidgets.QLabel(self.position_frame)
self.name_label_p2.setGeometry(QtCore.QRect(48, 49, 181, 25))
font = QtGui.QFont()
font.setBold(False)
font.setWeight(50)
self.name_label_p2.setFont(font)
self.name_label_p2.setObjectName("name_label_p2")
self.name_label_p3 = QtWidgets.QLabel(self.position_frame)
self.name_label_p3.setGeometry(QtCore.QRect(48, 74, 181, 25))
font = QtGui.QFont()
font.setBold(False)
font.setWeight(50)
self.name_label_p3.setFont(font)
self.name_label_p3.setObjectName("name_label_p3")
self.name_label_p4 = QtWidgets.QLabel(self.position_frame)
self.name_label_p4.setGeometry(QtCore.QRect(48, 99, 181, 25))
font = QtGui.QFont()
font.setBold(False)
font.setWeight(50)
self.name_label_p4.setFont(font)
self.name_label_p4.setObjectName("name_label_p4")
self.name_label_p5 = QtWidgets.QLabel(self.position_frame)
self.name_label_p5.setGeometry(QtCore.QRect(48, 124, 181, 25))
font = QtGui.QFont()
font.setBold(False)
font.setWeight(50)
self.name_label_p5.setFont(font)
self.name_label_p5.setObjectName("name_label_p5")
self.name_label_p6 = QtWidgets.QLabel(self.position_frame)
self.name_label_p6.setGeometry(QtCore.QRect(48, 149, 181, 25))
font = QtGui.QFont()
font.setBold(False)
font.setWeight(50)
self.name_label_p6.setFont(font)
self.name_label_p6.setObjectName("name_label_p6")
self.name_label_p7 = QtWidgets.QLabel(self.position_frame)
self.name_label_p7.setGeometry(QtCore.QRect(48, 174, 181, 25))
font = QtGui.QFont()
font.setBold(False)
font.setWeight(50)
self.name_label_p7.setFont(font)
self.name_label_p7.setObjectName("name_label_p7")
self.line_10 = QtWidgets.QFrame(self.position_frame)
self.line_10.setGeometry(QtCore.QRect(216, 25, 20, 175))
self.line_10.setFrameShape(QtWidgets.QFrame.VLine)
self.line_10.setFrameShadow(QtWidgets.QFrame.Sunken)
self.line_10.setObjectName("line_10")
self.label_10 = QtWidgets.QLabel(self.position_frame)
self.label_10.setGeometry(QtCore.QRect(234, 0, 79, 25))
self.label_10.setObjectName("label_10")
self.lap_label_p1 = QtWidgets.QLabel(self.position_frame)
self.lap_label_p1.setGeometry(QtCore.QRect(234, 30, 61, 16))
self.lap_label_p1.setAlignment(QtCore.Qt.AlignCenter)
self.lap_label_p1.setObjectName("lap_label_p1")
self.line_11 = QtWidgets.QFrame(self.position_frame)
self.line_11.setGeometry(QtCore.QRect(288, 25, 20, 175))
self.line_11.setFrameShape(QtWidgets.QFrame.VLine)
self.line_11.setFrameShadow(QtWidgets.QFrame.Sunken)
self.line_11.setObjectName("line_11")
self.label_12 = QtWidgets.QLabel(self.position_frame)
self.label_12.setGeometry(QtCore.QRect(312, 0, 37, 25))
self.label_12.setObjectName("label_12")
self.lap_diff_p1 = QtWidgets.QLabel(self.position_frame)
self.lap_diff_p1.setGeometry(QtCore.QRect(306, 28, 67, 19))
self.lap_diff_p1.setToolTipDuration(0)
self.lap_diff_p1.setAlignment(QtCore.Qt.AlignLeading|QtCore.Qt.AlignLeft|QtCore.Qt.AlignVCenter)
self.lap_diff_p1.setObjectName("lap_diff_p1")
self.lap_label_p2 = QtWidgets.QLabel(self.position_frame)
self.lap_label_p2.setGeometry(QtCore.QRect(234, 55, 61, 16))
self.lap_label_p2.setAlignment(QtCore.Qt.AlignCenter)
self.lap_label_p2.setObjectName("lap_label_p2")
self.lap_label_p3 = QtWidgets.QLabel(self.position_frame)
self.lap_label_p3.setGeometry(QtCore.QRect(234, 80, 61, 16))
self.lap_label_p3.setAlignment(QtCore.Qt.AlignCenter)
self.lap_label_p3.setObjectName("lap_label_p3")
self.lap_label_p4 = QtWidgets.QLabel(self.position_frame)
self.lap_label_p4.setGeometry(QtCore.QRect(234, 105, 61, 16))
self.lap_label_p4.setAlignment(QtCore.Qt.AlignCenter)
self.lap_label_p4.setObjectName("lap_label_p4")
self.lap_label_p5 = QtWidgets.QLabel(self.position_frame)
self.lap_label_p5.setGeometry(QtCore.QRect(234, 130, 61, 16))
self.lap_label_p5.setAlignment(QtCore.Qt.AlignCenter)
self.lap_label_p5.setObjectName("lap_label_p5")
self.lap_label_p6 = QtWidgets.QLabel(self.position_frame)
self.lap_label_p6.setGeometry(QtCore.QRect(234, 155, 61, 16))
self.lap_label_p6.setAlignment(QtCore.Qt.AlignCenter)
self.lap_label_p6.setObjectName("lap_label_p6")
self.lap_label_p7 = QtWidgets.QLabel(self.position_frame)
self.lap_label_p7.setGeometry(QtCore.QRect(234, 180, 61, 16))
self.lap_label_p7.setAlignment(QtCore.Qt.AlignCenter)
self.lap_label_p7.setObjectName("lap_label_p7")
self.lap_diff_p2 = QtWidgets.QLabel(self.position_frame)
self.lap_diff_p2.setGeometry(QtCore.QRect(306, 53, 67, 19))
self.lap_diff_p2.setToolTipDuration(0)
self.lap_diff_p2.setAlignment(QtCore.Qt.AlignLeading|QtCore.Qt.AlignLeft|QtCore.Qt.AlignVCenter)
self.lap_diff_p2.setObjectName("lap_diff_p2")
self.lap_diff_p3 = QtWidgets.QLabel(self.position_frame)
self.lap_diff_p3.setGeometry(QtCore.QRect(306, 78, 67, 19))
self.lap_diff_p3.setToolTipDuration(0)
self.lap_diff_p3.setAlignment(QtCore.Qt.AlignLeading|QtCore.Qt.AlignLeft|QtCore.Qt.AlignVCenter)
self.lap_diff_p3.setObjectName("lap_diff_p3")
self.lap_diff_p4 = QtWidgets.QLabel(self.position_frame)
self.lap_diff_p4.setGeometry(QtCore.QRect(306, 103, 67, 19))
self.lap_diff_p4.setToolTipDuration(0)
self.lap_diff_p4.setAlignment(QtCore.Qt.AlignLeading|QtCore.Qt.AlignLeft|QtCore.Qt.AlignVCenter)
self.lap_diff_p4.setObjectName("lap_diff_p4")
self.lap_diff_p5 = QtWidgets.QLabel(self.position_frame)
self.lap_diff_p5.setGeometry(QtCore.QRect(306, 128, 67, 19))
self.lap_diff_p5.setToolTipDuration(0)
self.lap_diff_p5.setAlignment(QtCore.Qt.AlignLeading|QtCore.Qt.AlignLeft|QtCore.Qt.AlignVCenter)
self.lap_diff_p5.setObjectName("lap_diff_p5")
self.lap_diff_p6 = QtWidgets.QLabel(self.position_frame)
self.lap_diff_p6.setGeometry(QtCore.QRect(306, 153, 67, 19))
self.lap_diff_p6.setToolTipDuration(0)
self.lap_diff_p6.setAlignment(QtCore.Qt.AlignLeading|QtCore.Qt.AlignLeft|QtCore.Qt.AlignVCenter)
self.lap_diff_p6.setObjectName("lap_diff_p6")
self.lap_diff_p7 = QtWidgets.QLabel(self.position_frame)
self.lap_diff_p7.setGeometry(QtCore.QRect(306, 178, 67, 19))
self.lap_diff_p7.setToolTipDuration(0)
self.lap_diff_p7.setAlignment(QtCore.Qt.AlignLeading|QtCore.Qt.AlignLeft|QtCore.Qt.AlignVCenter)
self.lap_diff_p7.setObjectName("lap_diff_p7")
self.status_label = QtWidgets.QLabel(self.position_frame)
self.status_label.setGeometry(QtCore.QRect(6, 204, 337, 25))
font = QtGui.QFont()
font.setFamily("Segoe UI Semibold")
font.setPointSize(10)
font.setBold(True)
font.setWeight(75)
self.status_label.setFont(font)
self.status_label.setText("")
self.status_label.setObjectName("status_label")
self.label_7 = QtWidgets.QLabel(self.position_frame)
self.label_7.setGeometry(QtCore.QRect(372, 0, 37, 25))
self.label_7.setObjectName("label_7")
self.line_12 = QtWidgets.QFrame(self.position_frame)
self.line_12.setGeometry(QtCore.QRect(360, 25, 13, 175))
self.line_12.setFrameShape(QtWidgets.QFrame.VLine)
self.line_12.setFrameShadow(QtWidgets.QFrame.Sunken)
self.line_12.setObjectName("line_12")
self.lap_complete_label_p1 = QtWidgets.QLabel(self.position_frame)
self.lap_complete_label_p1.setGeometry(QtCore.QRect(372, 28, 37, 19))
self.lap_complete_label_p1.setObjectName("lap_complete_label_p1")
self.lap_complete_label_p2 = QtWidgets.QLabel(self.position_frame)
self.lap_complete_label_p2.setGeometry(QtCore.QRect(372, 53, 37, 19))
self.lap_complete_label_p2.setObjectName("lap_complete_label_p2")
self.lap_complete_label_p3 = QtWidgets.QLabel(self.position_frame)
self.lap_complete_label_p3.setGeometry(QtCore.QRect(372, 78, 37, 19))
self.lap_complete_label_p3.setObjectName("lap_complete_label_p3")
self.lap_complete_label_p4 = QtWidgets.QLabel(self.position_frame)
self.lap_complete_label_p4.setGeometry(QtCore.QRect(372, 103, 37, 19))
self.lap_complete_label_p4.setObjectName("lap_complete_label_p4")
self.lap_complete_label_p5 = QtWidgets.QLabel(self.position_frame)
self.lap_complete_label_p5.setGeometry(QtCore.QRect(372, 128, 37, 19))
self.lap_complete_label_p5.setObjectName("lap_complete_label_p5")
self.lap_complete_label_p6 = QtWidgets.QLabel(self.position_frame)
self.lap_complete_label_p6.setGeometry(QtCore.QRect(372, 153, 37, 19))
self.lap_complete_label_p6.setObjectName("lap_complete_label_p6")
self.lap_complete_label_p7 = QtWidgets.QLabel(self.position_frame)
self.lap_complete_label_p7.setGeometry(QtCore.QRect(372, 178, 37, 19))
self.lap_complete_label_p7.setObjectName("lap_complete_label_p7")
self.time_label = QtWidgets.QLabel(self.position_frame)
self.time_label.setGeometry(QtCore.QRect(348, 204, 85, 25))
font = QtGui.QFont()
font.setFamily("Tahoma")
font.setPointSize(10)
font.setBold(True)
font.setWeight(75)
self.time_label.setFont(font)
self.time_label.setObjectName("time_label")
self.ok_button = QtWidgets.QPushButton(self.position_frame)
self.ok_button.setGeometry(QtCore.QRect(192, 204, 85, 25))
self.ok_button.setStyleSheet("background-color: rgb(255, 0, 0);\n"
"border-color: rgb(0, 85, 255);")
self.ok_button.setObjectName("ok_button")
self.line_13 = QtWidgets.QFrame(self.position_frame)
self.line_13.setGeometry(QtCore.QRect(408, 25, 13, 175))
self.line_13.setFrameShape(QtWidgets.QFrame.VLine)
self.line_13.setFrameShadow(QtWidgets.QFrame.Sunken)
self.line_13.setObjectName("line_13")
self.label_18 = QtWidgets.QLabel(self.position_frame)
self.label_18.setGeometry(QtCore.QRect(420, 0, 31, 25))
self.label_18.setObjectName("label_18")
self.pit_label_p1 = QtWidgets.QLabel(self.position_frame)
self.pit_label_p1.setGeometry(QtCore.QRect(420, 28, 25, 19))
self.pit_label_p1.setObjectName("pit_label_p1")
self.pit_label_p1_2 = QtWidgets.QLabel(self.position_frame)
self.pit_label_p1_2.setGeometry(QtCore.QRect(420, 53, 25, 19))
self.pit_label_p1_2.setObjectName("pit_label_p1_2")
self.pit_label_p1_3 = QtWidgets.QLabel(self.position_frame)
self.pit_label_p1_3.setGeometry(QtCore.QRect(420, 78, 25, 19))
self.pit_label_p1_3.setObjectName("pit_label_p1_3")
self.pit_label_p1_4 = QtWidgets.QLabel(self.position_frame)
self.pit_label_p1_4.setGeometry(QtCore.QRect(420, 103, 25, 19))
self.pit_label_p1_4.setObjectName("pit_label_p1_4")
self.pit_label_p1_5 = QtWidgets.QLabel(self.position_frame)
self.pit_label_p1_5.setGeometry(QtCore.QRect(420, 128, 25, 19))
self.pit_label_p1_5.setObjectName("pit_label_p1_5")
self.pit_label_p1_6 = QtWidgets.QLabel(self.position_frame)
self.pit_label_p1_6.setGeometry(QtCore.QRect(420, 153, 25, 19))
self.pit_label_p1_6.setObjectName("pit_label_p1_6")
self.pit_label_p1_7 = QtWidgets.QLabel(self.position_frame)
self.pit_label_p1_7.setGeometry(QtCore.QRect(420, 178, 25, 19))
self.pit_label_p1_7.setObjectName("pit_label_p1_7")
self.weather_frame = QtWidgets.QFrame(self.centralwidget)
self.weather_frame.setGeometry(QtCore.QRect(0, 234, 457, 55))
font = QtGui.QFont()
font.setFamily("Segoe UI Semibold")
font.setPointSize(10)
font.setBold(True)
font.setWeight(75)
self.weather_frame.setFont(font)
self.weather_frame.setFrameShape(QtWidgets.QFrame.StyledPanel)
self.weather_frame.setFrameShadow(QtWidgets.QFrame.Raised)
self.weather_frame.setObjectName("weather_frame")
self.label_3 = QtWidgets.QLabel(self.weather_frame)
self.label_3.setGeometry(QtCore.QRect(10, 8, 55, 19))
self.label_3.setObjectName("label_3")
self.w_type_label = QtWidgets.QLabel(self.weather_frame)
self.w_type_label.setGeometry(QtCore.QRect(72, 8, 57, 19))
self.w_type_label.setObjectName("w_type_label")
self.label_4 = QtWidgets.QLabel(self.weather_frame)
self.label_4.setGeometry(QtCore.QRect(10, 30, 56, 19))
self.label_4.setFrameShadow(QtWidgets.QFrame.Plain)
self.label_4.setObjectName("label_4")
self.w_dir_label = QtWidgets.QLabel(self.weather_frame)
self.w_dir_label.setGeometry(QtCore.QRect(72, 30, 57, 19))
self.w_dir_label.setObjectName("w_dir_label")
self.label_5 = QtWidgets.QLabel(self.weather_frame)
self.label_5.setGeometry(QtCore.QRect(138, 30, 76, 19))
self.label_5.setObjectName("label_5")
self.w_speed_label = QtWidgets.QLabel(self.weather_frame)
self.w_speed_label.setGeometry(QtCore.QRect(222, 30, 25, 19))
self.w_speed_label.setObjectName("w_speed_label")
self.label_6 = QtWidgets.QLabel(self.weather_frame)
self.label_6.setGeometry(QtCore.QRect(138, 8, 73, 19))
self.label_6.setObjectName("label_6")
self.w_temp_label = QtWidgets.QLabel(self.weather_frame)
self.w_temp_label.setGeometry(QtCore.QRect(216, 8, 37, 19))
self.w_temp_label.setObjectName("w_temp_label")
self.label_8 = QtWidgets.QLabel(self.weather_frame)
self.label_8.setGeometry(QtCore.QRect(258, 30, 67, 19))
self.label_8.setObjectName("label_8")
self.air_temp_label = QtWidgets.QLabel(self.weather_frame)
self.air_temp_label.setGeometry(QtCore.QRect(324, 30, 49, 19))
self.air_temp_label.setObjectName("air_temp_label")
self.label_9 = QtWidgets.QLabel(self.weather_frame)
self.label_9.setGeometry(QtCore.QRect(258, 8, 43, 19))
self.label_9.setObjectName("label_9")
self.sky_type_label = QtWidgets.QLabel(self.weather_frame)
self.sky_type_label.setGeometry(QtCore.QRect(300, 8, 91, 19))
self.sky_type_label.setObjectName("sky_type_label")
self.version_label = QtWidgets.QLabel(self.weather_frame)
self.version_label.setGeometry(QtCore.QRect(402, 0, 25, 16))
font = QtGui.QFont()
font.setPointSize(9)
self.version_label.setFont(font)
self.version_label.setObjectName("version_label")
self.fuel_frame = QtWidgets.QFrame(self.centralwidget)
self.fuel_frame.setGeometry(QtCore.QRect(0, 294, 457, 80))
self.fuel_frame.setFrameShape(QtWidgets.QFrame.StyledPanel)
self.fuel_frame.setFrameShadow(QtWidgets.QFrame.Raised)
self.fuel_frame.setObjectName("fuel_frame")
self.laps_left_lcd = QtWidgets.QLCDNumber(self.fuel_frame)
self.laps_left_lcd.setGeometry(QtCore.QRect(282, 24, 85, 51))
font = QtGui.QFont()
font.setPointSize(36)
self.laps_left_lcd.setFont(font)
self.laps_left_lcd.setAutoFillBackground(False)
self.laps_left_lcd.setDigitCount(3)
self.laps_left_lcd.setProperty("intValue", 0)
self.laps_left_lcd.setObjectName("laps_left_lcd")
self.label_13 = QtWidgets.QLabel(self.fuel_frame)
self.label_13.setGeometry(QtCore.QRect(144, 0, 131, 20))
font = QtGui.QFont()
font.setPointSize(12)
self.label_13.setFont(font)
self.label_13.setObjectName("label_13")
self.laps_empty_lcd = QtWidgets.QLCDNumber(self.fuel_frame)
self.laps_empty_lcd.setGeometry(QtCore.QRect(144, 24, 131, 51))
palette = QtGui.QPalette()
brush = QtGui.QBrush(QtGui.QColor(255, 255, 255))
brush.setStyle(QtCore.Qt.SolidPattern)
palette.setBrush(QtGui.QPalette.Active, QtGui.QPalette.WindowText, brush)
brush = QtGui.QBrush(QtGui.QColor(255, 255, 255))
brush.setStyle(QtCore.Qt.SolidPattern)
palette.setBrush(QtGui.QPalette.Inactive, QtGui.QPalette.WindowText, brush)
brush = QtGui.QBrush(QtGui.QColor(120, 120, 120))
brush.setStyle(QtCore.Qt.SolidPattern)
palette.setBrush(QtGui.QPalette.Disabled, QtGui.QPalette.WindowText, brush)
self.laps_empty_lcd.setPalette(palette)
font = QtGui.QFont()
font.setPointSize(36)
self.laps_empty_lcd.setFont(font)
self.laps_empty_lcd.setSmallDecimalPoint(False)
self.laps_empty_lcd.setSegmentStyle(QtWidgets.QLCDNumber.Flat)
self.laps_empty_lcd.setProperty("value", 0.0)
self.laps_empty_lcd.setProperty("intValue", 0)
self.laps_empty_lcd.setObjectName("laps_empty_lcd")
self.label_11 = QtWidgets.QLabel(self.fuel_frame)
self.label_11.setGeometry(QtCore.QRect(282, 0, 91, 16))
font = QtGui.QFont()
font.setPointSize(12)
self.label_11.setFont(font)
self.label_11.setObjectName("label_11")
self.fuel_needed_lcd = QtWidgets.QLCDNumber(self.fuel_frame)
self.fuel_needed_lcd.setGeometry(QtCore.QRect(6, 24, 131, 51))
font = QtGui.QFont()
font.setPointSize(36)
self.fuel_needed_lcd.setFont(font)
self.fuel_needed_lcd.setSegmentStyle(QtWidgets.QLCDNumber.Flat)
self.fuel_needed_lcd.setObjectName("fuel_needed_lcd")
self.label_14 = QtWidgets.QLabel(self.fuel_frame)
self.label_14.setGeometry(QtCore.QRect(24, 0, 101, 16))
font = QtGui.QFont()
font.setPointSize(12)
self.label_14.setFont(font)
self.label_14.setObjectName("label_14")
self.laps_label_3 = QtWidgets.QLabel(self.fuel_frame)
self.laps_label_3.setGeometry(QtCore.QRect(378, 0, 91, 16))
font = QtGui.QFont()
font.setPointSize(12)
self.laps_label_3.setFont(font)
self.laps_label_3.setObjectName("laps_label_3")
self.laps_since_pit_lcd = QtWidgets.QLCDNumber(self.centralwidget)
self.laps_since_pit_lcd.setGeometry(QtCore.QRect(282, 396, 85, 51))
font = QtGui.QFont()
font.setPointSize(36)
self.laps_since_pit_lcd.setFont(font)
self.laps_since_pit_lcd.setAutoFillBackground(False)
self.laps_since_pit_lcd.setDigitCount(3)
self.laps_since_pit_lcd.setProperty("intValue", 0)
self.laps_since_pit_lcd.setObjectName("laps_since_pit_lcd")
self.label_15 = QtWidgets.QLabel(self.centralwidget)
self.label_15.setGeometry(QtCore.QRect(276, 375, 103, 16))
font = QtGui.QFont()
font.setPointSize(12)
self.label_15.setFont(font)
self.label_15.setObjectName("label_15")
self.fuel_2_add_lcd = QtWidgets.QLCDNumber(self.centralwidget)
self.fuel_2_add_lcd.setGeometry(QtCore.QRect(144, 396, 131, 51))
font = QtGui.QFont()
font.setPointSize(36)
self.fuel_2_add_lcd.setFont(font)
self.fuel_2_add_lcd.setAutoFillBackground(False)
self.fuel_2_add_lcd.setProperty("intValue", 0)
self.fuel_2_add_lcd.setObjectName("fuel_2_add_lcd")
self.label_16 = QtWidgets.QLabel(self.centralwidget)
self.label_16.setGeometry(QtCore.QRect(174, 375, 97, 16))
font = QtGui.QFont()
font.setPointSize(12)
self.label_16.setFont(font)
self.label_16.setObjectName("label_16")
self.current_fuel_lcd = QtWidgets.QLCDNumber(self.centralwidget)
self.current_fuel_lcd.setGeometry(QtCore.QRect(6, 396, 131, 51))
font = QtGui.QFont()
font.setPointSize(36)
self.current_fuel_lcd.setFont(font)
self.current_fuel_lcd.setAutoFillBackground(False)
self.current_fuel_lcd.setProperty("intValue", 0)
self.current_fuel_lcd.setObjectName("current_fuel_lcd")
self.label_17 = QtWidgets.QLabel(self.centralwidget)
self.label_17.setGeometry(QtCore.QRect(24, 375, 85, 16))
font = QtGui.QFont()
font.setPointSize(12)
self.label_17.setFont(font)
self.label_17.setCursor(QtGui.QCursor(QtCore.Qt.ArrowCursor))
self.label_17.setObjectName("label_17")
self.water_temp_lcd = QtWidgets.QLCDNumber(self.centralwidget)
self.water_temp_lcd.setGeometry(QtCore.QRect(372, 318, 85, 51))
font = QtGui.QFont()
font.setPointSize(36)
self.water_temp_lcd.setFont(font)
self.water_temp_lcd.setDigitCount(3)
self.water_temp_lcd.setProperty("intValue", 0)
self.water_temp_lcd.setObjectName("water_temp_lcd")
self.oil_temp_lcd = QtWidgets.QLCDNumber(self.centralwidget)
self.oil_temp_lcd.setGeometry(QtCore.QRect(372, 396, 85, 51))
font = QtGui.QFont()
font.setPointSize(36)
self.oil_temp_lcd.setFont(font)
self.oil_temp_lcd.setDigitCount(3)
self.oil_temp_lcd.setProperty("intValue", 0)
self.oil_temp_lcd.setObjectName("oil_temp_lcd")
self.label_19 = QtWidgets.QLabel(self.centralwidget)
self.label_19.setGeometry(QtCore.QRect(384, 375, 67, 16))
font = QtGui.QFont()
font.setPointSize(12)
self.label_19.setFont(font)
self.label_19.setObjectName("label_19")
RaceWindow.setCentralWidget(self.centralwidget)
self.retranslateUi(RaceWindow)
QtCore.QMetaObject.connectSlotsByName(RaceWindow)
def retranslateUi(self, RaceWindow):
_translate = QtCore.QCoreApplication.translate
RaceWindow.setWindowTitle(_translate("RaceWindow", "iRacePal"))
self.label.setText(_translate("RaceWindow", "POS"))
self.pos_label_p1.setText(_translate("RaceWindow", "1"))
self.pos_label_p2.setText(_translate("RaceWindow", "2"))
self.pos_label_p3.setText(_translate("RaceWindow", "3"))
self.pos_label_p4.setText(_translate("RaceWindow", "4"))
self.pos_label_p5.setText(_translate("RaceWindow", "5"))
self.pos_label_p6.setText(_translate("RaceWindow", "6"))
self.pos_label_p7.setText(_translate("RaceWindow", "7"))
self.label_2.setText(_translate("RaceWindow", "Name"))
self.name_label_p1.setText(_translate("RaceWindow", "-----------------------------"))
self.name_label_p2.setText(_translate("RaceWindow", "-----------------------------"))
self.name_label_p3.setText(_translate("RaceWindow", "-----------------------------"))
self.name_label_p4.setText(_translate("RaceWindow", "-----------------------------"))
self.name_label_p5.setText(_translate("RaceWindow", "-----------------------------"))
self.name_label_p6.setText(_translate("RaceWindow", "-----------------------------"))
self.name_label_p7.setText(_translate("RaceWindow", "-----------------------------"))
self.label_10.setText(_translate("RaceWindow", "Last Lap"))
self.lap_label_p1.setText(_translate("RaceWindow", "0:00.00"))
self.label_12.setText(_translate("RaceWindow", "Diff"))
self.lap_diff_p1.setText(_translate("RaceWindow", "<html><head/><body><p><span style=\" color:#ff0000;\">+0.32</span></p></body></html>"))
self.lap_label_p2.setText(_translate("RaceWindow", "0:00.00"))
self.lap_label_p3.setText(_translate("RaceWindow", "0:00.00"))
self.lap_label_p4.setText(_translate("RaceWindow", "0:00.00"))
self.lap_label_p5.setText(_translate("RaceWindow", "0:00.00"))
self.lap_label_p6.setText(_translate("RaceWindow", "0:00.00"))
self.lap_label_p7.setText(_translate("RaceWindow", "0:00.00"))
self.lap_diff_p2.setText(_translate("RaceWindow", "<html><head/><body><p><span style=\" color:#ff0000;\">+0.32</span></p></body></html>"))
self.lap_diff_p3.setText(_translate("RaceWindow", "<html><head/><body><p><span style=\" color:#ff0000;\">+0.32</span></p></body></html>"))
self.lap_diff_p4.setText(_translate("RaceWindow", "<html><head/><body><p><span style=\" color:#ff0000;\">+0.32</span></p></body></html>"))
self.lap_diff_p5.setText(_translate("RaceWindow", "<html><head/><body><p><span style=\" color:#ff0000;\">+0.32</span></p></body></html>"))
self.lap_diff_p6.setText(_translate("RaceWindow", "<html><head/><body><p><span style=\" color:#ff0000;\">+0.32</span></p></body></html>"))
self.lap_diff_p7.setText(_translate("RaceWindow", "<html><head/><body><p><span style=\" color:#ff0000;\">+0.32</span></p></body></html>"))
self.label_7.setText(_translate("RaceWindow", "Lap"))
self.lap_complete_label_p1.setText(_translate("RaceWindow", "10"))
self.lap_complete_label_p2.setText(_translate("RaceWindow", "10"))
self.lap_complete_label_p3.setText(_translate("RaceWindow", "10"))
self.lap_complete_label_p4.setText(_translate("RaceWindow", "10"))
self.lap_complete_label_p5.setText(_translate("RaceWindow", "10"))
self.lap_complete_label_p6.setText(_translate("RaceWindow", "10"))
self.lap_complete_label_p7.setText(_translate("RaceWindow", "10"))
self.time_label.setText(_translate("RaceWindow", "10:00:01 pm"))
self.ok_button.setText(_translate("RaceWindow", "RESTART"))
self.label_18.setToolTip(_translate("RaceWindow", "Laps since last on pit road"))
self.label_18.setText(_translate("RaceWindow", "Pit"))
self.pit_label_p1.setText(_translate("RaceWindow", "10"))
self.pit_label_p1_2.setText(_translate("RaceWindow", "10"))
self.pit_label_p1_3.setText(_translate("RaceWindow", "10"))
self.pit_label_p1_4.setText(_translate("RaceWindow", "10"))
self.pit_label_p1_5.setText(_translate("RaceWindow", "10"))
self.pit_label_p1_6.setText(_translate("RaceWindow", "10"))
self.pit_label_p1_7.setText(_translate("RaceWindow", "10"))
self.label_3.setText(_translate("RaceWindow", "Weather:"))
self.w_type_label.setText(_translate("RaceWindow", "TextLabel"))
self.label_4.setText(_translate("RaceWindow", "Wind Dir:"))
self.w_dir_label.setText(_translate("RaceWindow", "TextLabel"))
self.label_5.setText(_translate("RaceWindow", "Wind Speed:"))
self.w_speed_label.setText(_translate("RaceWindow", "0"))
self.label_6.setText(_translate("RaceWindow", "Track Temp:"))
self.w_temp_label.setText(_translate("RaceWindow", "0"))
self.label_8.setText(_translate("RaceWindow", "Air Temp:"))
self.air_temp_label.setText(_translate("RaceWindow", "0"))
self.label_9.setText(_translate("RaceWindow", "Skies:"))
self.sky_type_label.setText(_translate("RaceWindow", "Cloudy"))
self.version_label.setText(_translate("RaceWindow", "v0.1"))
self.laps_left_lcd.setToolTip(_translate("RaceWindow", "Laps Remaining in Race"))
self.label_13.setText(_translate("RaceWindow", "Laps Until Empty"))
self.label_11.setText(_translate("RaceWindow", "Laps Remain"))
self.label_14.setText(_translate("RaceWindow", "Fuel to Finish"))
self.laps_label_3.setText(_translate("RaceWindow", "Water Tmp"))
self.laps_since_pit_lcd.setToolTip(_translate("RaceWindow", "Laps Remaining in Race"))
self.label_15.setText(_translate("RaceWindow", "Laps Since Pit"))
self.fuel_2_add_lcd.setToolTip(_translate("RaceWindow", "Laps Remaining in Race"))
self.label_16.setText(_translate("RaceWindow", "Fuel To Add"))
self.current_fuel_lcd.setToolTip(_translate("RaceWindow", "Laps Remaining in Race"))
self.label_17.setText(_translate("RaceWindow", "Fuel in Car"))
self.label_19.setText(_translate("RaceWindow", "Oil Tmp"))
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.