signature
stringlengths 29
44.1k
| implementation
stringlengths 0
85.2k
|
|---|---|
def get_providing_power_source_type ( self ) :
"""Looks through all power supplies in POWER _ SUPPLY _ PATH .
If there is an AC adapter online returns POWER _ TYPE _ AC .
If there is a discharging battery , returns POWER _ TYPE _ BATTERY .
Since the order of supplies is arbitrary , whatever found first is returned ."""
|
for supply in os . listdir ( POWER_SUPPLY_PATH ) :
supply_path = os . path . join ( POWER_SUPPLY_PATH , supply )
try :
type = self . power_source_type ( supply_path )
if type == common . POWER_TYPE_AC :
if self . is_ac_online ( supply_path ) :
return common . POWER_TYPE_AC
elif type == common . POWER_TYPE_BATTERY :
if self . is_battery_present ( supply_path ) and self . is_battery_discharging ( supply_path ) :
return common . POWER_TYPE_BATTERY
else :
warnings . warn ( "UPS is not supported." )
except ( RuntimeError , IOError ) as e :
warnings . warn ( "Unable to read properties of {0}: {1}" . format ( supply_path , e ) , category = RuntimeWarning )
return common . POWER_TYPE_AC
|
def connect_generators ( self , debug = False ) :
"""Connects LV generators ( graph nodes ) to grid ( graph )
Args
debug : bool , defaults to False
If True , information is printed during process"""
|
self . _graph = lv_connect . lv_connect_generators ( self . grid_district , self . _graph , debug )
|
def subset ( self , logic , update = False ) :
"""subset create a specific phenotype based on a logic ,
logic is a ' SubsetLogic ' class ,
take union of all the phenotypes listed . If none are listed use all phenotypes .
take the intersection of all the scored calls .
Args :
logic ( SubsetLogic ) : A subsetlogic object to slice on
update ( bool ) : ( default False ) change the name of the phenotype according to the label in the subset logic
Returns :
CellDataFrame : The CellDataFrame modified ."""
|
pnames = self . phenotypes
snames = self . scored_names
data = self . copy ( )
values = [ ]
phenotypes = logic . phenotypes
if len ( phenotypes ) == 0 :
phenotypes = pnames
removing = set ( self . phenotypes ) - set ( phenotypes )
for k in phenotypes :
if k not in pnames :
raise ValueError ( "phenotype must exist in defined" )
temp = data . loc [ data [ 'phenotype_calls' ] . apply ( lambda x : x [ k ] == 1 ) ] . copy ( )
if len ( removing ) > 0 and temp . shape [ 0 ] > 0 :
temp [ 'phenotype_calls' ] = temp . apply ( lambda x : dict ( [ ( k , v ) for k , v in x [ 'phenotype_calls' ] . items ( ) if k not in removing ] ) , 1 )
values . append ( temp )
data = pd . concat ( values )
for k , v in logic . scored_calls . items ( ) :
if k not in snames :
raise ValueError ( "Scored name must exist in defined" )
myfilter = 0 if v == '-' else 1
data = data . loc [ data [ 'scored_calls' ] . apply ( lambda x : x [ k ] == myfilter ) ]
data . microns_per_pixel = self . microns_per_pixel
if update :
data [ 'phenotype_calls' ] = data [ 'phenotype_calls' ] . apply ( lambda x : { logic . label : 1 } )
data . fill_phenotype_label ( inplace = True )
data . db = self . db
return data
|
def delete ( name , region = None , key = None , keyid = None , profile = None ) :
'''Delete an ELB .
CLI example to delete an ELB :
. . code - block : : bash
salt myminion boto _ elb . delete myelb region = us - east - 1'''
|
conn = _get_conn ( region = region , key = key , keyid = keyid , profile = profile )
if not exists ( name , region , key , keyid , profile ) :
return True
try :
conn . delete_load_balancer ( name )
log . info ( 'Deleted ELB %s.' , name )
return True
except boto . exception . BotoServerError as error :
log . error ( 'Failed to delete ELB %s' , name , exc_info_on_loglevel = logging . DEBUG )
return False
|
def generate_create_view ( self ) :
"""Generate class based view for CreateView"""
|
name = model_class_form ( self . model + 'CreateView' )
create_args = dict ( form_class = self . get_actual_form ( 'create' ) , model = self . get_model_class , template_name = self . get_template ( 'create' ) , permissions = self . view_permission ( 'create' ) , permission_required = self . check_permission_required , login_required = self . check_login_required , inlineformset = self . inlineformset , success_url = reverse_lazy ( '{}-{}-list' . format ( self . app , self . custom_postfix_url ) ) , custom_form = self . createupdate_forms or self . custom_modelform , custom_postfix_url = self . custom_postfix_url )
parent_classes = [ self . get_createupdate_mixin ( ) , CreateView ]
if self . custom_create_view_mixin :
parent_classes . insert ( 0 , self . custom_create_view_mixin )
create_class = type ( name , tuple ( parent_classes ) , create_args )
self . classes [ name ] = create_class
return create_class
|
def certs ( self , entity_id , descriptor , use = "signing" ) :
'''Returns certificates for the given Entity'''
|
ent = self [ entity_id ]
def extract_certs ( srvs ) :
res = [ ]
for srv in srvs :
if "key_descriptor" in srv :
for key in srv [ "key_descriptor" ] :
if "use" in key and key [ "use" ] == use :
for dat in key [ "key_info" ] [ "x509_data" ] :
cert = repack_cert ( dat [ "x509_certificate" ] [ "text" ] )
if cert not in res :
res . append ( cert )
elif not "use" in key :
for dat in key [ "key_info" ] [ "x509_data" ] :
cert = repack_cert ( dat [ "x509_certificate" ] [ "text" ] )
if cert not in res :
res . append ( cert )
return res
if descriptor == "any" :
res = [ ]
for descr in [ "spsso" , "idpsso" , "role" , "authn_authority" , "attribute_authority" , "pdp" ] :
try :
srvs = ent [ "%s_descriptor" % descr ]
except KeyError :
continue
res . extend ( extract_certs ( srvs ) )
else :
srvs = ent [ "%s_descriptor" % descriptor ]
res = extract_certs ( srvs )
return res
|
def staticMovingAverage ( arr , fineness = 10 ) :
"""smooth [ arr ] using moving average"""
|
s0 = arr . shape [ 0 ]
window_len = int ( round ( s0 / fineness ) )
start = arr [ 0 ] + arr [ 0 ] - arr [ window_len - 1 : 0 : - 1 ]
end = arr [ - 1 ] + arr [ - 1 ] - arr [ - 1 : - window_len : - 1 ]
s = np . r_ [ start , arr , end ]
w = np . ones ( window_len , 'd' )
w /= w . sum ( )
a0 = np . convolve ( w , s , mode = 'valid' ) [ : s0 ]
a1 = np . convolve ( w , s [ : : - 1 ] , mode = 'valid' ) [ : s0 ] [ : : - 1 ]
return 0.5 * ( a0 + a1 )
|
def eval_objfn ( self ) :
r"""Compute components of objective function as well as total
contribution to objective function . The objective function is
: math : ` \ | \ mathbf { x } \ | _ 1 ` and the constraint violation
measure is : math : ` P ( \ mathbf { x } ) - \ mathbf { x } ` where
: math : ` P ( \ mathbf { x } ) ` is the projection into the constraint
set ."""
|
obj = np . linalg . norm ( ( self . wl1 * self . obfn_g0var ( ) ) . ravel ( ) , 1 )
cns = np . linalg . norm ( sl . proj_l2ball ( self . obfn_g1var ( ) , self . S , self . epsilon , axes = 0 ) - self . obfn_g1var ( ) )
return ( obj , cns )
|
def plot_images ( self , outfile ) :
"""Generates a POSCAR with the calculated diffusion path with respect to the first endpoint .
: param outfile : Output file for the POSCAR"""
|
sum_struct = self . __images [ 0 ] . sites
for image in self . __images :
for site_i in self . __relax_sites :
sum_struct . append ( PeriodicSite ( image . sites [ site_i ] . specie , image . sites [ site_i ] . frac_coords , self . __images [ 0 ] . lattice , to_unit_cell = True , coords_are_cartesian = False ) )
sum_struct = Structure . from_sites ( sum_struct , validate_proximity = False )
p = Poscar ( sum_struct )
p . write_file ( outfile )
|
def write_case_data ( self , file ) :
"""Writes the case data as CSV ."""
|
writer = self . _get_writer ( file )
writer . writerow ( [ "Name" , "base_mva" ] )
writer . writerow ( [ self . case . name , self . case . base_mva ] )
|
def curve_to ( self , x , y , x2 , y2 , x3 , y3 ) :
"""draw a curve . ( x2 , y2 ) is the middle point of the curve"""
|
self . _add_instruction ( "curve_to" , x , y , x2 , y2 , x3 , y3 )
|
def set_file_encoding ( self , path , encoding ) :
"""Cache encoding for the specified file path .
: param path : path of the file to cache
: param encoding : encoding to cache"""
|
try :
map = json . loads ( self . _settings . value ( 'cachedFileEncodings' ) )
except TypeError :
map = { }
map [ path ] = encoding
self . _settings . setValue ( 'cachedFileEncodings' , json . dumps ( map ) )
|
def execute_with_scope ( expr , scope , aggcontext = None , clients = None , ** kwargs ) :
"""Execute an expression ` expr ` , with data provided in ` scope ` .
Parameters
expr : ibis . expr . types . Expr
The expression to execute .
scope : collections . Mapping
A dictionary mapping : class : ` ~ ibis . expr . operations . Node ` subclass
instances to concrete data such as a pandas DataFrame .
aggcontext : Optional [ ibis . pandas . aggcontext . AggregationContext ]
Returns
result : scalar , pd . Series , pd . DataFrame"""
|
op = expr . op ( )
# Call pre _ execute , to allow clients to intercept the expression before
# computing anything * and * before associating leaf nodes with data . This
# allows clients to provide their own data for each leaf .
if clients is None :
clients = list ( find_backends ( expr ) )
if aggcontext is None :
aggcontext = agg_ctx . Summarize ( )
pre_executed_scope = pre_execute ( op , * clients , scope = scope , aggcontext = aggcontext , ** kwargs )
new_scope = toolz . merge ( scope , pre_executed_scope )
result = execute_until_in_scope ( expr , new_scope , aggcontext = aggcontext , clients = clients , # XXX : we * explicitly * pass in scope and not new _ scope here so that
# post _ execute sees the scope of execute _ with _ scope , not the scope of
# execute _ until _ in _ scope
post_execute_ = functools . partial ( post_execute , scope = scope , aggcontext = aggcontext , clients = clients , ** kwargs , ) , ** kwargs , )
return result
|
def proxy_upload ( self , path , filename , content_type = None , content_encoding = None , cb = None , num_cb = None ) :
"""This is the main function that uploads . We assume the bucket
and key ( = = path ) exists . What we do here is simple . Calculate
the headers we will need , ( e . g . md5 , content - type , etc ) . Then
we ask the self . get _ proxy _ config method to fill in the authentication
information and tell us which remote host we should talk to
for the upload . From there , the rest is ripped from
boto . key . Key . send _ file"""
|
from boto . connection import AWSAuthConnection
import mimetypes
from hashlib import md5
import base64
BufferSize = 65536
# # set to something very small to make sure
# # chunking is working properly
fp = open ( filename )
headers = { 'Content-Type' : content_type }
if content_type is None :
content_type = mimetypes . guess_type ( filename ) [ 0 ] or "text/plain"
headers [ 'Content-Type' ] = content_type
if content_encoding is not None :
headers [ 'Content-Encoding' ] = content_encoding
m = md5 ( )
fp . seek ( 0 )
s = fp . read ( BufferSize )
while s :
m . update ( s )
s = fp . read ( BufferSize )
self . size = fp . tell ( )
fp . seek ( 0 )
self . md5 = m . hexdigest ( )
headers [ 'Content-MD5' ] = base64 . encodestring ( m . digest ( ) ) . rstrip ( '\n' )
headers [ 'Content-Length' ] = str ( self . size )
headers [ 'Expect' ] = '100-Continue'
host , headers = self . get_proxy_config ( headers , path )
# # # how to do this same thing with curl instead . . .
print ( "curl -i --trace-ascii foo.log -T %s -H %s https://%s%s" % ( filename , " -H " . join ( "'%s: %s'" % ( k , v ) for k , v in headers . items ( ) ) , host , path ) )
def sender ( http_conn , method , path , data , headers ) :
http_conn . putrequest ( method , path )
for key in headers :
http_conn . putheader ( key , headers [ key ] )
http_conn . endheaders ( )
fp . seek ( 0 )
http_conn . set_debuglevel ( 0 )
# # # XXX set to e . g . 4 to see what going on
if cb :
if num_cb > 2 :
cb_count = self . size / BufferSize / ( num_cb - 2 )
elif num_cb < 0 :
cb_count = - 1
else :
cb_count = 0
i = total_bytes = 0
cb ( total_bytes , self . size )
l = fp . read ( BufferSize )
while len ( l ) > 0 :
http_conn . send ( l )
if cb :
total_bytes += len ( l )
i += 1
if i == cb_count or cb_count == - 1 :
cb ( total_bytes , self . size )
i = 0
l = fp . read ( BufferSize )
if cb :
cb ( total_bytes , self . size )
response = http_conn . getresponse ( )
body = response . read ( )
fp . seek ( 0 )
if response . status == 500 or response . status == 503 or response . getheader ( 'location' ) : # we ' ll try again
return response
elif response . status >= 200 and response . status <= 299 :
self . etag = response . getheader ( 'etag' )
if self . etag != '"%s"' % self . md5 :
raise Exception ( 'ETag from S3 did not match computed MD5' )
return response
else : # raise provider . storage _ response _ error (
# response . status , response . reason , body )
raise Exception ( response . status , response . reason , body )
awsc = AWSAuthConnection ( host , aws_access_key_id = "key_id" , aws_secret_access_key = "secret" )
awsc . _mexe ( 'PUT' , path , None , headers , sender = sender )
|
def n_at_a_time ( items : List [ int ] , n : int , fillvalue : str ) -> Iterator [ Tuple [ Union [ int , str ] ] ] :
"""Returns an iterator which groups n items at a time .
Any final partial tuple will be padded with the fillvalue
> > > list ( n _ at _ a _ time ( [ 1 , 2 , 3 , 4 , 5 ] , 2 , ' X ' ) )
[ ( 1 , 2 ) , ( 3 , 4 ) , ( 5 , ' X ' ) ]"""
|
it = iter ( items )
return itertools . zip_longest ( * [ it ] * n , fillvalue = fillvalue )
|
def assemble_phi5_works_filepaths ( ) :
"""Reads PHI5 index and builds a list of absolute filepaths ."""
|
plaintext_dir_rel = '~/cltk_data/latin/text/phi5/individual_works/'
plaintext_dir = os . path . expanduser ( plaintext_dir_rel )
all_filepaths = [ ]
for author_code in PHI5_WORKS_INDEX :
author_data = PHI5_WORKS_INDEX [ author_code ]
works = author_data [ 'works' ]
for work in works :
f = os . path . join ( plaintext_dir , author_code + '.TXT' + '-' + work + '.txt' )
all_filepaths . append ( f )
return all_filepaths
|
def update ( self , initiation_actions = values . unset ) :
"""Update the AssistantInitiationActionsInstance
: param dict initiation _ actions : The initiation _ actions
: returns : Updated AssistantInitiationActionsInstance
: rtype : twilio . rest . preview . understand . assistant . assistant _ initiation _ actions . AssistantInitiationActionsInstance"""
|
return self . _proxy . update ( initiation_actions = initiation_actions , )
|
def add_remote_link ( self , issue , destination , globalId = None , application = None , relationship = None ) :
"""Add a remote link from an issue to an external application and returns a remote link Resource for it .
` ` object ` ` should be a dict containing at least ` ` url ` ` to the linked external URL and
` ` title ` ` to display for the link inside JIRA .
For definitions of the allowable fields for ` ` object ` ` and the keyword arguments ` ` globalId ` ` , ` ` application ` `
and ` ` relationship ` ` , see https : / / developer . atlassian . com / display / JIRADEV / JIRA + REST + API + for + Remote + Issue + Links .
: param issue : the issue to add the remote link to
: param destination : the link details to add ( see the above link for details )
: param globalId : unique ID for the link ( see the above link for details )
: param application : application information for the link ( see the above link for details )
: param relationship : relationship description for the link ( see the above link for details )"""
|
try :
applicationlinks = self . applicationlinks ( )
except JIRAError as e :
applicationlinks = [ ]
# In many ( if not most ) configurations , non - admin users are
# not allowed to list applicationlinks ; if we aren ' t allowed ,
# let ' s let people try to add remote links anyway , we just
# won ' t be able to be quite as helpful .
warnings . warn ( "Unable to gather applicationlinks; you will not be able " "to add links to remote issues: (%s) %s" % ( e . status_code , e . text ) , Warning )
data = { }
if isinstance ( destination , Issue ) :
data [ 'object' ] = { 'title' : str ( destination ) , 'url' : destination . permalink ( ) }
for x in applicationlinks :
if x [ 'application' ] [ 'displayUrl' ] == destination . _options [ 'server' ] :
data [ 'globalId' ] = "appId=%s&issueId=%s" % ( x [ 'application' ] [ 'id' ] , destination . raw [ 'id' ] )
data [ 'application' ] = { 'name' : x [ 'application' ] [ 'name' ] , 'type' : "com.atlassian.jira" }
break
if 'globalId' not in data :
raise NotImplementedError ( "Unable to identify the issue to link to." )
else :
if globalId is not None :
data [ 'globalId' ] = globalId
if application is not None :
data [ 'application' ] = application
data [ 'object' ] = destination
if relationship is not None :
data [ 'relationship' ] = relationship
# check if the link comes from one of the configured application links
for x in applicationlinks :
if x [ 'application' ] [ 'displayUrl' ] == self . _options [ 'server' ] :
data [ 'globalId' ] = "appId=%s&issueId=%s" % ( x [ 'application' ] [ 'id' ] , destination . raw [ 'id' ] )
data [ 'application' ] = { 'name' : x [ 'application' ] [ 'name' ] , 'type' : "com.atlassian.jira" }
break
url = self . _get_url ( 'issue/' + str ( issue ) + '/remotelink' )
r = self . _session . post ( url , data = json . dumps ( data ) )
remote_link = RemoteLink ( self . _options , self . _session , raw = json_loads ( r ) )
return remote_link
|
def set_transition_down ( self , p_self ) :
'''Set the downbeat - tracking transition matrix according to
self - loop probabilities .
Parameters
p _ self : None , float in ( 0 , 1 ) , or np . ndarray [ shape = ( 2 , ) ]
Optional self - loop probability ( ies ) , used for Viterbi decoding'''
|
if p_self is None :
self . down_transition = None
else :
self . down_transition = transition_loop ( 2 , p_self )
|
def check_results ( tmp_ ) :
"""Return a 3 tuple for something ."""
|
# TODO : Fix this to work with more meaningful names
if tmp_ [ 't' ] > 0 :
if tmp_ [ 'l' ] > 0 :
if tmp_ [ 'rr' ] > 0 or tmp_ [ 'ra' ] > 1 :
print 1 , 3 , tmp_
return 3
elif tmp_ [ 'cr' ] > 0 or tmp_ [ 'ca' ] > 1 :
print 2 , 3 , tmp_
return 3
elif tmp_ [ 'mr' ] > 0 or tmp_ [ 'ma' ] > 1 :
print 3 , 2 , tmp_
return 2
if tmp_ [ 'cr' ] > 1 or tmp_ [ 'ca' ] > 2 :
print 4 , 2 , tmp_
return 2
if tmp_ [ 'mr' ] > 0 or tmp_ [ 'ma' ] > 1 :
if tmp_ [ 'cr' ] > 0 or tmp_ [ 'ca' ] > 1 :
print 6 , 0 , tmp_
return 0
if tmp_ [ 'rr' ] > 1 or tmp_ [ 'ra' ] > 2 :
print 7 , 0 , tmp_
return 0
if tmp_ [ 'sr' ] > 1 or tmp_ [ 'sa' ] > 2 :
print 8 , 0 , tmp_
return 0
if tmp_ [ 'l' ] > 0 :
if tmp_ [ 'rr' ] > 0 or tmp_ [ 'ra' ] > 1 :
print 9 , 2 , tmp_
return 2
if tmp_ [ 'cr' ] > 0 or tmp_ [ 'ca' ] > 1 :
print 10 , 0 , tmp_
return 0
return - 1
|
def restrict ( self , index_array ) :
"""Generate a view restricted to a subset of indices ."""
|
new_shape = index_array . shape [ 0 ] , index_array . shape [ 0 ]
return OnFlySymMatrix ( self . get_row , new_shape , DC_start = self . DC_start , DC_end = self . DC_end , rows = self . rows , restrict_array = index_array )
|
def expect ( p , prefixes , confidential = False ) :
"""Read a line and return it without required prefix ."""
|
resp = p . stdout . readline ( )
log . debug ( '%s -> %r' , p . args , resp if not confidential else '********' )
for prefix in prefixes :
if resp . startswith ( prefix ) :
return resp [ len ( prefix ) : ]
raise UnexpectedError ( resp )
|
def arnoldi_projected ( H , P , k , ortho = 'mgs' ) :
"""Compute ( perturbed ) Arnoldi relation for projected operator .
Assume that you have computed an Arnoldi relation
. . math : :
A V _ n = V _ { n + 1 } \\ underline { H } _ n
where : math : ` V _ { n + 1} \\ in \\ mathbb { C } ^ { N , n + 1 } ` has orthogonal columns
( with respect to an inner product : math : ` \\ langle \\ cdot , \\ cdot \\ rangle ` )
and : math : ` \\ underline { H } _ n \\ in \\ mathbb { C } ^ { n + 1 , n } ` is an extended
upper Hessenberg matrix .
For : math : ` k < n ` you choose full rank matrices
: math : ` X \\ in \\ mathbb { C } ^ { n - 1 , k } ` and : math : ` Y \\ in \\ mathbb { C } ^ { n , k } ` and
define : math : ` \\ tilde { X } : = A V _ { n _ 1 } X = V _ n \\ underline { H } _ { n - 1 } X ` and
: math : ` \\ tilde { Y } : = V _ n Y ` such that
: math : ` \\ langle \\ tilde { Y } , \\ tilde { X } \\ rangle = Y ^ * \\ underline { H } _ { n - 1 } X `
is invertible . Then the projections : math : ` P ` and : math : ` \\ tilde { P } `
characterized by
* : math : ` \\ tilde { P } x = x -
\\ tilde { X } \\ langle \\ tilde { Y } , \\ tilde { X } \\ rangle ^ { - 1}
\\ langle \\ tilde { Y } , x \\ rangle `
* : math : ` P = I - \\ underline { H } _ { n - 1 } X ( Y ^ * \\ underline { H } _ { n - 1 } X ) ^ { - 1 } Y ^ * `
are well defined and : math : ` \\ tilde { P } V _ { n + 1 } = [ V _ n P , v _ { n + 1 } ] ` holds .
This method computes for : math : ` i < n - k ` the Arnoldi relation
. . math : :
( \\ tilde { P } A + E _ i ) W _ i
= W _ { i + 1 } \\ underline { G } _ i
where : math : ` W _ { i + 1 } = V _ n U _ { i + 1 } ` has orthogonal columns with respect
to : math : ` \\ langle \\ cdot , \\ cdot \\ rangle ` ,
: math : ` \\ underline { G } _ i ` is an extended upper Hessenberg matrix
and : math : ` E _ i x = v _ { n + 1 } F _ i \\ langle W _ i , x \\ rangle ` with
: math : ` F _ i = [ f _ 1 , \ ldots , f _ i ] \\ in \\ mathbb { C } ^ { 1 , i } ` .
The perturbed Arnoldi relation can also be generated with the operator
: math : ` P _ { V _ n } \\ tilde { P } A ` :
. . math : :
P _ { V _ n } \\ tilde { P } A W _ i
= W _ { i + 1 } \\ underline { G } _ i .
In a sense the perturbed Arnoldi relation is the best prediction for the
behavior of the Krylov subspace : math : ` K _ i ( \\ tilde { P } A , \\ tilde { P } v _ 1 ) `
that can be generated only with the data from : math : ` K _ { n + 1 } ( A , v _ 1 ) ` and
without carrying out further matrix - vector multiplications with A .
: param H : the extended upper Hessenberg matrix
: math : ` \\ underline { H } _ n ` with ` ` shape = = ( n + 1 , n ) ` ` .
: param P : the projection
: math : ` P : \\ mathbb { C } ^ n \\ longrightarrow \\ mathbb { C } ^ n ` ( has to be
compatible with : py : meth : ` get _ linearoperator ` ) .
: param k : the dimension of the null space of P .
: returns : U , G , F where
* U is the coefficient matrix : math : ` U _ { i + 1 } ` with ` ` shape = = ( n , i + 1 ) ` ` ,
* G is the extended upper Hessenberg matrix : math : ` \\ underline { G } _ i `
with ` ` shape = = ( i + 1 , i ) ` ` ,
* F is the error matrix : math : ` F _ i ` with ` ` shape = = ( 1 , i ) ` ` ."""
|
n = H . shape [ 1 ]
dtype = find_common_dtype ( H , P )
invariant = H . shape [ 0 ] == n
hlast = 0 if invariant else H [ - 1 , - 1 ]
H = get_linearoperator ( ( n , n ) , H if invariant else H [ : - 1 , : ] )
P = get_linearoperator ( ( n , n ) , P )
v = P * numpy . eye ( n , 1 )
maxiter = n - k + 1
F = numpy . zeros ( ( 1 , maxiter ) , dtype = dtype )
PH = lambda x : P * ( H * x )
PH = LinearOperator ( ( n , n ) , dtype , PH )
_arnoldi = Arnoldi ( PH , v , maxiter = maxiter , ortho = ortho )
while _arnoldi . iter < _arnoldi . maxiter and not _arnoldi . invariant :
u , _ = _arnoldi . get_last ( )
F [ 0 , _arnoldi . iter ] = hlast * u [ - 1 , 0 ]
_arnoldi . advance ( )
U , G = _arnoldi . get ( )
return U , G , F [ [ 0 ] , : _arnoldi . iter ]
|
def create_sitemap ( app , exception ) :
"""Generates the sitemap . xml from the collected HTML page links"""
|
if ( not app . config [ 'html_theme_options' ] . get ( 'base_url' , '' ) or exception is not None or not app . sitemap_links ) :
return
filename = app . outdir + "/sitemap.xml"
print ( "Generating sitemap.xml in %s" % filename )
root = ET . Element ( "urlset" )
root . set ( "xmlns" , "http://www.sitemaps.org/schemas/sitemap/0.9" )
for link in app . sitemap_links :
url = ET . SubElement ( root , "url" )
ET . SubElement ( url , "loc" ) . text = link
ET . ElementTree ( root ) . write ( filename )
|
def insert_self_to_empty_and_insert_all_intemediate ( self , optimized ) :
"""For each state qi of the PDA , we add the rule Aii - > e
For each triplet of states qi , qj and qk , we add the rule Aij - > Aik Akj .
Args :
optimized ( bool ) : Enable or Disable optimization - Do not produce O ( n ^ 3)"""
|
for state_a in self . statediag :
self . rules . append ( 'A' + repr ( state_a . id ) + ',' + repr ( state_a . id ) + ': @empty_set' )
# If CFG is not requested , avoid the following O ( n ^ 3 ) rule .
# It can be solved and a string can be generated faster with BFS of DFS
if optimized == 0 :
for state_b in self . statediag :
if state_b . id != state_a . id :
for state_c in self . statediag :
if state_c . id != state_a . id and state_b . id != state_c . id :
self . rules . append ( 'A' + repr ( state_a . id ) + ',' + repr ( state_c . id ) + ': A' + repr ( state_a . id ) + ',' + repr ( state_b . id ) + ' A' + repr ( state_b . id ) + ',' + repr ( state_c . id ) + '' )
|
def get_fun ( fun ) :
'''Return a dict of the last function called for all minions'''
|
serv = _get_serv ( ret = None )
ret = { }
for minion in serv . smembers ( 'minions' ) :
ind_str = '{0}:{1}' . format ( minion , fun )
try :
jid = serv . get ( ind_str )
except Exception :
continue
if not jid :
continue
data = serv . get ( '{0}:{1}' . format ( minion , jid ) )
if data :
ret [ minion ] = salt . utils . json . loads ( data )
return ret
|
def temporary_file ( mode ) :
"""Cross platform temporary file creation .
This is an alternative to ` ` tempfile . NamedTemporaryFile ` ` that
also works on windows and avoids the " file being used by
another process " error ."""
|
tempdir = tempfile . gettempdir ( )
basename = 'tmpfile-%s' % ( uuid . uuid4 ( ) )
full_filename = os . path . join ( tempdir , basename )
if 'w' not in mode : # We need to create the file before we can open
# it in ' r ' mode .
open ( full_filename , 'w' ) . close ( )
try :
with open ( full_filename , mode ) as f :
yield f
finally :
os . remove ( f . name )
|
def serialized_task ( self , task : Task ) -> Tuple [ str , str ] :
"""Returns the name of the task definition file and its contents ."""
|
return f"{task.hash}.json" , task . json
|
def load_from_file ( swag_path , swag_type = 'yml' , root_path = None ) :
"""Load specs from YAML file"""
|
if swag_type not in ( 'yaml' , 'yml' ) :
raise AttributeError ( "Currently only yaml or yml supported" )
# TODO : support JSON
try :
enc = detect_by_bom ( swag_path )
with codecs . open ( swag_path , encoding = enc ) as yaml_file :
return yaml_file . read ( )
except IOError : # not in the same dir , add dirname
swag_path = os . path . join ( root_path or os . path . dirname ( __file__ ) , swag_path )
try :
enc = detect_by_bom ( swag_path )
with codecs . open ( swag_path , encoding = enc ) as yaml_file :
return yaml_file . read ( )
except IOError : # pragma : no cover
# if package dir
# see https : / / github . com / rochacbruno / flasgger / pull / 104
# Still not able to reproduce this case
# test are in examples / package _ example
# need more detail on how to reproduce IOError here
swag_path = swag_path . replace ( "/" , os . sep ) . replace ( "\\" , os . sep )
path = swag_path . replace ( ( root_path or os . path . dirname ( __file__ ) ) , '' ) . split ( os . sep ) [ 1 : ]
site_package = imp . find_module ( path [ 0 ] ) [ 1 ]
swag_path = os . path . join ( site_package , os . sep . join ( path [ 1 : ] ) )
with open ( swag_path ) as yaml_file :
return yaml_file . read ( )
|
def create_node ( hostname , username , password , name , address , trans_label = None ) :
'''A function to connect to a bigip device and create a node .
hostname
The host / address of the bigip device
username
The iControl REST username
password
The iControl REST password
name
The name of the node
address
The address of the node
trans _ label
The label of the transaction stored within the grain :
` ` bigip _ f5 _ trans : < label > ` `
CLI Example : :
salt ' * ' bigip . create _ node bigip admin admin 10.1.1.2'''
|
# build session
bigip_session = _build_session ( username , password , trans_label )
# construct the payload
payload = { }
payload [ 'name' ] = name
payload [ 'address' ] = address
# post to REST
try :
response = bigip_session . post ( BIG_IP_URL_BASE . format ( host = hostname ) + '/ltm/node' , data = salt . utils . json . dumps ( payload ) )
except requests . exceptions . ConnectionError as e :
return _load_connection_error ( hostname , e )
return _load_response ( response )
|
def load_yaml ( task : Task , file : str ) -> Result :
"""Loads a yaml file .
Arguments :
file : path to the file containing the yaml file to load
Examples :
Simple example with ` ` ordered _ dict ` ` : :
> nr . run ( task = load _ yaml ,
file = " mydata . yaml " )
Returns :
Result object with the following attributes set :
* result ( ` ` dict ` ` ) : dictionary with the contents of the file"""
|
with open ( file , "r" ) as f :
yml = ruamel . yaml . YAML ( typ = "safe" )
data = yml . load ( f )
return Result ( host = task . host , result = data )
|
def with_name ( self , name ) :
"""Sets the name scope for future operations ."""
|
with self . g . as_default ( ) , tf . variable_scope ( name ) as var_scope :
name_scope = scopes . get_current_name_scope ( )
return _DeferredLayer ( self . bookkeeper , None , ( ) , { } , scope = ( name_scope , var_scope ) , defaults = self . _defaults , pass_through = self , partial_context = self . _partial_context )
|
def _set_enabled_zone ( self , v , load = False ) :
"""Setter method for enabled _ zone , mapped from YANG variable / brocade _ zone _ rpc / show _ zoning _ enabled _ configuration / output / enabled _ configuration / enabled _ zone ( list )
If this variable is read - only ( config : false ) in the
source YANG file , then _ set _ enabled _ zone is considered as a private
method . Backends looking to populate this variable should
do so via calling thisObj . _ set _ enabled _ zone ( ) directly ."""
|
if hasattr ( v , "_utype" ) :
v = v . _utype ( v )
try :
t = YANGDynClass ( v , base = YANGListType ( "zone_name" , enabled_zone . enabled_zone , yang_name = "enabled-zone" , rest_name = "enabled-zone" , parent = self , is_container = 'list' , user_ordered = False , path_helper = self . _path_helper , yang_keys = 'zone-name' , extensions = { u'tailf-common' : { u'info' : u'List of enabled Zones' } } ) , is_container = 'list' , yang_name = "enabled-zone" , rest_name = "enabled-zone" , parent = self , path_helper = self . _path_helper , extmethods = self . _extmethods , register_paths = False , extensions = { u'tailf-common' : { u'info' : u'List of enabled Zones' } } , namespace = 'urn:brocade.com:mgmt:brocade-zone' , defining_module = 'brocade-zone' , yang_type = 'list' , is_config = True )
except ( TypeError , ValueError ) :
raise ValueError ( { 'error-string' : """enabled_zone must be of a type compatible with list""" , 'defined-type' : "list" , 'generated-type' : """YANGDynClass(base=YANGListType("zone_name",enabled_zone.enabled_zone, yang_name="enabled-zone", rest_name="enabled-zone", parent=self, is_container='list', user_ordered=False, path_helper=self._path_helper, yang_keys='zone-name', extensions={u'tailf-common': {u'info': u'List of enabled Zones'}}), is_container='list', yang_name="enabled-zone", rest_name="enabled-zone", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=False, extensions={u'tailf-common': {u'info': u'List of enabled Zones'}}, namespace='urn:brocade.com:mgmt:brocade-zone', defining_module='brocade-zone', yang_type='list', is_config=True)""" , } )
self . __enabled_zone = t
if hasattr ( self , '_set' ) :
self . _set ( )
|
def add_entity_info ( data_api , struct_inflator ) :
"""Add the entity info to the structure .
: param data _ api the interface to the decoded data
: param struct _ inflator the interface to put the data into the client object"""
|
for entity in data_api . entity_list :
struct_inflator . set_entity_info ( entity [ "chainIndexList" ] , entity [ "sequence" ] , entity [ "description" ] , entity [ "type" ] )
|
def available ( self , context ) :
"""Determine if this actions is available in this ` context ` .
: param context : a dict whose content is left to application needs ; if
: attr : ` . condition ` is a callable it receives ` context `
in parameter ."""
|
if not self . _enabled :
return False
try :
return self . pre_condition ( context ) and self . _check_condition ( context )
except Exception :
return False
|
def convolutional_barycenter2d ( A , reg , weights = None , numItermax = 10000 , stopThr = 1e-9 , stabThr = 1e-30 , verbose = False , log = False ) :
"""Compute the entropic regularized wasserstein barycenter of distributions A
where A is a collection of 2D images .
The function solves the following optimization problem :
. . math : :
\mathbf{a} = arg\min_\mathbf{a} \sum_i W_{reg}(\mathbf{a},\mathbf{a}_i)
where :
- : math : ` W _ { reg } ( \ cdot , \ cdot ) ` is the entropic regularized Wasserstein distance ( see ot . bregman . sinkhorn )
- : math : ` \ mathbf { a } _ i ` are training distributions ( 2D images ) in the mast two dimensions of matrix : math : ` \ mathbf { A } `
- reg is the regularization strength scalar value
The algorithm used for solving the problem is the Sinkhorn - Knopp matrix scaling algorithm as proposed in [ 21 ] _
Parameters
A : np . ndarray ( n , w , h )
n distributions ( 2D images ) of size w x h
reg : float
Regularization term > 0
weights : np . ndarray ( n , )
Weights of each image on the simplex ( barycentric coodinates )
numItermax : int , optional
Max number of iterations
stopThr : float , optional
Stop threshol on error ( > 0)
stabThr : float , optional
Stabilization threshold to avoid numerical precision issue
verbose : bool , optional
Print information along iterations
log : bool , optional
record log if True
Returns
a : ( w , h ) ndarray
2D Wasserstein barycenter
log : dict
log dictionary return only if log = = True in parameters
References
. . [ 21 ] Solomon , J . , De Goes , F . , Peyré , G . , Cuturi , M . , Butscher , A . , Nguyen , A . & Guibas , L . ( 2015 ) .
Convolutional wasserstein distances : Efficient optimal transportation on geometric domains
ACM Transactions on Graphics ( TOG ) , 34(4 ) , 66"""
|
if weights is None :
weights = np . ones ( A . shape [ 0 ] ) / A . shape [ 0 ]
else :
assert ( len ( weights ) == A . shape [ 0 ] )
if log :
log = { 'err' : [ ] }
b = np . zeros_like ( A [ 0 , : , : ] )
U = np . ones_like ( A )
KV = np . ones_like ( A )
cpt = 0
err = 1
# build the convolution operator
t = np . linspace ( 0 , 1 , A . shape [ 1 ] )
[ Y , X ] = np . meshgrid ( t , t )
xi1 = np . exp ( - ( X - Y ) ** 2 / reg )
def K ( x ) :
return np . dot ( np . dot ( xi1 , x ) , xi1 )
while ( err > stopThr and cpt < numItermax ) :
bold = b
cpt = cpt + 1
b = np . zeros_like ( A [ 0 , : , : ] )
for r in range ( A . shape [ 0 ] ) :
KV [ r , : , : ] = K ( A [ r , : , : ] / np . maximum ( stabThr , K ( U [ r , : , : ] ) ) )
b += weights [ r ] * np . log ( np . maximum ( stabThr , U [ r , : , : ] * KV [ r , : , : ] ) )
b = np . exp ( b )
for r in range ( A . shape [ 0 ] ) :
U [ r , : , : ] = b / np . maximum ( stabThr , KV [ r , : , : ] )
if cpt % 10 == 1 :
err = np . sum ( np . abs ( bold - b ) )
# log and verbose print
if log :
log [ 'err' ] . append ( err )
if verbose :
if cpt % 200 == 0 :
print ( '{:5s}|{:12s}' . format ( 'It.' , 'Err' ) + '\n' + '-' * 19 )
print ( '{:5d}|{:8e}|' . format ( cpt , err ) )
if log :
log [ 'niter' ] = cpt
log [ 'U' ] = U
return b , log
else :
return b
|
def create_dbinstance ( self , id , allocated_storage , instance_class , master_username , master_password , port = 3306 , engine = 'MySQL5.1' , db_name = None , param_group = None , security_groups = None , availability_zone = None , preferred_maintenance_window = None , backup_retention_period = None , preferred_backup_window = None , multi_az = False , engine_version = None , auto_minor_version_upgrade = True ) :
"""Create a new DBInstance .
: type id : str
: param id : Unique identifier for the new instance .
Must contain 1-63 alphanumeric characters .
First character must be a letter .
May not end with a hyphen or contain two consecutive hyphens
: type allocated _ storage : int
: param allocated _ storage : Initially allocated storage size , in GBs .
Valid values are [ 5-1024]
: type instance _ class : str
: param instance _ class : The compute and memory capacity of
the DBInstance . Valid values are :
* db . m1 . small
* db . m1 . large
* db . m1 . xlarge
* db . m2 . xlarge
* db . m2.2xlarge
* db . m2.4xlarge
: type engine : str
: param engine : Name of database engine . Must be MySQL5.1 for now .
: type master _ username : str
: param master _ username : Name of master user for the DBInstance .
Must be 1-15 alphanumeric characters , first
must be a letter .
: type master _ password : str
: param master _ password : Password of master user for the DBInstance .
Must be 4-16 alphanumeric characters .
: type port : int
: param port : Port number on which database accepts connections .
Valid values [ 1115-65535 ] . Defaults to 3306.
: type db _ name : str
: param db _ name : Name of a database to create when the DBInstance
is created . Default is to create no databases .
: type param _ group : str
: param param _ group : Name of DBParameterGroup to associate with
this DBInstance . If no groups are specified
no parameter groups will be used .
: type security _ groups : list of str or list of DBSecurityGroup objects
: param security _ groups : List of names of DBSecurityGroup to authorize on
this DBInstance .
: type availability _ zone : str
: param availability _ zone : Name of the availability zone to place
DBInstance into .
: type preferred _ maintenance _ window : str
: param preferred _ maintenance _ window : The weekly time range ( in UTC )
during which maintenance can occur .
Default is Sun : 05:00 - Sun : 09:00
: type backup _ retention _ period : int
: param backup _ retention _ period : The number of days for which automated
backups are retained . Setting this to
zero disables automated backups .
: type preferred _ backup _ window : str
: param preferred _ backup _ window : The daily time range during which
automated backups are created ( if
enabled ) . Must be in h24 : mi - hh24 : mi
format ( UTC ) .
: type multi _ az : bool
: param multi _ az : If True , specifies the DB Instance will be
deployed in multiple availability zones .
: type engine _ version : str
: param engine _ version : Version number of the database engine to use .
: type auto _ minor _ version _ upgrade : bool
: param auto _ minor _ version _ upgrade : Indicates that minor engine
upgrades will be applied
automatically to the Read Replica
during the maintenance window .
Default is True .
: rtype : : class : ` boto . rds . dbinstance . DBInstance `
: return : The new db instance ."""
|
params = { 'DBInstanceIdentifier' : id , 'AllocatedStorage' : allocated_storage , 'DBInstanceClass' : instance_class , 'Engine' : engine , 'MasterUsername' : master_username , 'MasterUserPassword' : master_password , 'Port' : port , 'MultiAZ' : str ( multi_az ) . lower ( ) , 'AutoMinorVersionUpgrade' : str ( auto_minor_version_upgrade ) . lower ( ) }
if db_name :
params [ 'DBName' ] = db_name
if param_group :
params [ 'DBParameterGroupName' ] = param_group
if security_groups :
l = [ ]
for group in security_groups :
if isinstance ( group , DBSecurityGroup ) :
l . append ( group . name )
else :
l . append ( group )
self . build_list_params ( params , l , 'DBSecurityGroups.member' )
if availability_zone :
params [ 'AvailabilityZone' ] = availability_zone
if preferred_maintenance_window :
params [ 'PreferredMaintenanceWindow' ] = preferred_maintenance_window
if backup_retention_period is not None :
params [ 'BackupRetentionPeriod' ] = backup_retention_period
if preferred_backup_window :
params [ 'PreferredBackupWindow' ] = preferred_backup_window
if engine_version :
params [ 'EngineVersion' ] = engine_version
return self . get_object ( 'CreateDBInstance' , params , DBInstance )
|
def wait_to_start ( self , allow_failure = False ) :
"""Wait for the thread to actually starts ."""
|
self . _started . wait ( )
if self . _crashed and not allow_failure :
self . _thread . join ( )
raise RuntimeError ( 'Setup failed, see {} Traceback' 'for details.' . format ( self . _thread . name ) )
|
def assign ( self , variables = None , ** variables_kwargs ) :
"""Assign new data variables to a Dataset , returning a new object
with all the original variables in addition to the new ones .
Parameters
variables : mapping , value pairs
Mapping from variables names to the new values . If the new values
are callable , they are computed on the Dataset and assigned to new
data variables . If the values are not callable , ( e . g . a DataArray ,
scalar , or array ) , they are simply assigned .
* * variables _ kwargs :
The keyword arguments form of ` ` variables ` ` .
One of variables or variables _ kwarg must be provided .
Returns
ds : Dataset
A new Dataset with the new variables in addition to all the
existing variables .
Notes
Since ` ` kwargs ` ` is a dictionary , the order of your arguments may not
be preserved , and so the order of the new variables is not well
defined . Assigning multiple variables within the same ` ` assign ` ` is
possible , but you cannot reference other variables created within the
same ` ` assign ` ` call .
See Also
pandas . DataFrame . assign"""
|
variables = either_dict_or_kwargs ( variables , variables_kwargs , 'assign' )
data = self . copy ( )
# do all calculations first . . .
results = data . _calc_assign_results ( variables )
# . . . and then assign
data . update ( results )
return data
|
def save_load ( jid , load , minions = None ) :
'''Save the load to the specified jid'''
|
log . debug ( 'sqlite3 returner <save_load> called jid: %s load: %s' , jid , load )
conn = _get_conn ( ret = None )
cur = conn . cursor ( )
sql = '''INSERT INTO jids (jid, load) VALUES (:jid, :load)'''
cur . execute ( sql , { 'jid' : jid , 'load' : salt . utils . json . dumps ( load ) } )
_close_conn ( conn )
|
def compute ( self , runner_results , setup = False , poll = False , ignore_errors = False ) :
'''walk through all results and increment stats'''
|
for ( host , value ) in runner_results . get ( 'contacted' , { } ) . iteritems ( ) :
if not ignore_errors and ( ( 'failed' in value and bool ( value [ 'failed' ] ) ) or ( 'rc' in value and value [ 'rc' ] != 0 ) ) :
self . _increment ( 'failures' , host )
elif 'skipped' in value and bool ( value [ 'skipped' ] ) :
self . _increment ( 'skipped' , host )
elif 'changed' in value and bool ( value [ 'changed' ] ) :
if not setup and not poll :
self . _increment ( 'changed' , host )
self . _increment ( 'ok' , host )
else :
if not poll or ( 'finished' in value and bool ( value [ 'finished' ] ) ) :
self . _increment ( 'ok' , host )
for ( host , value ) in runner_results . get ( 'dark' , { } ) . iteritems ( ) :
self . _increment ( 'dark' , host )
|
def connect ( nickname , server_def , debug = None , timeout = 10 ) :
"""Connect and confirm server works by testing for a known class in
the default namespace or if there is no default namespace defined ,
in all the possible interop namespaces .
returns a WBEMConnection object or None if the connection fails ."""
|
url = server_def . url
conn = pywbem . WBEMConnection ( url , ( server_def . user , server_def . password ) , default_namespace = server_def . implementation_namespace , no_verification = server_def . no_verification , timeout = timeout )
if debug :
conn . debug = True
ns = server_def . implementation_namespace if server_def . implementation_namespace else 'interop'
try :
conn . GetQualifier ( 'Association' , namespace = ns )
return conn
except pywbem . ConnectionError as exc :
print ( "Test server {0} at {1!r} cannot be reached. {2}: {3}" . format ( nickname , url , exc . __class__ . __name__ , exc ) )
return None
except pywbem . AuthError as exc :
print ( "Test server {0} at {1!r} cannot be authenticated with. " "{2}: {3}" . format ( nickname , url , exc . __class__ . __name__ , exc ) )
return None
except pywbem . CIMError as ce :
if ce . status_code == pywbem . CIM_ERR_NAMESPACE_NOT_FOUND or ce . status . code == pywbem . CIM_ERR_NOT_FOUND :
return conn
else :
return None
except pywbem . Error as exc :
print ( "Test server {0} at {1!r} returned exception. {2}: {3}" . format ( nickname , url , exc . __class__ . __name__ , exc ) )
return None
|
def state_nums ( ) :
"""Get a dictionary of state names mapped to their ' legend ' value .
Returns :
dictionary of state names mapped to their numeric value"""
|
st_nums = { }
fname = pkg_resources . resource_filename ( __name__ , 'resources/States.csv' )
with open ( fname , 'rU' ) as csvfile :
reader = csv . reader ( csvfile , delimiter = ',' )
i = 0
for row in reader :
st_nums [ row [ 0 ] ] = i
i = i + 1
return st_nums
|
def get_hashed_filename ( name , file , suffix = None ) :
"""Gets a new filename for the provided file of the form
" oldfilename . hash . ext " . If the old filename looks like it already contains a
hash , it will be replaced ( so you don ' t end up with names like
" pic . hash . hash . ext " )"""
|
basename , hash , ext = split_filename ( name )
file . seek ( 0 )
new_hash = '.%s' % md5 ( file . read ( ) ) . hexdigest ( ) [ : 12 ]
if suffix is not None :
basename = '%s_%s' % ( basename , suffix )
return '%s%s%s' % ( basename , new_hash , ext )
|
def validate ( self , vat_deets ) :
"""Validates an existing VAT identification number against VIES ."""
|
request = self . _get ( 'validation' , vat_deets )
return self . responder ( request )
|
def _thread_listen ( self ) :
"""The main & listen loop ."""
|
while self . _running :
try :
rest = requests . get ( URL_LISTEN . format ( self . _url ) , timeout = self . _timeout )
if rest . status_code == 200 :
self . _queue . put ( rest . json ( ) )
else :
_LOGGER . error ( 'QSUSB response code %s' , rest . status_code )
sleep ( 30 )
# Received for " Read timed out " and " Connection refused "
except requests . exceptions . ConnectionError as err :
if str ( err ) . find ( 'timed' ) > 0 : # " Read timedout " update
self . _queue . put ( { QS_CMD : CMD_UPDATE } )
else : # " Connection refused " QSUSB down
_LOGGER . error ( str ( err ) )
sleep ( 60 )
except Exception as err : # pylint : disable = broad - except
_LOGGER . error ( "%s - %s" , str ( type ( err ) ) , str ( err ) )
sleep ( 5 )
self . _queue . put ( { } )
|
def init ( app_id , app_key = None , master_key = None , hook_key = None ) :
"""初始化 LeanCloud 的 AppId / AppKey / MasterKey
: type app _ id : string _ types
: param app _ id : 应用的 Application ID
: type app _ key : None or string _ types
: param app _ key : 应用的 Application Key
: type master _ key : None or string _ types
: param master _ key : 应用的 Master Key
: param hook _ key : application ' s hook key
: type hook _ key : None or string _ type"""
|
if ( not app_key ) and ( not master_key ) :
raise RuntimeError ( 'app_key or master_key must be specified' )
global APP_ID , APP_KEY , MASTER_KEY , HOOK_KEY
APP_ID = app_id
APP_KEY = app_key
MASTER_KEY = master_key
if hook_key :
HOOK_KEY = hook_key
else :
HOOK_KEY = os . environ . get ( 'LEANCLOUD_APP_HOOK_KEY' )
|
def datetime_to_str ( dt = 'now' , no_fractions = False ) :
"""The Last - Modified data in ISO8601 syntax , Z notation .
The lastmod is stored as unix timestamp which is already
in UTC . At preesent this code will return 6 decimal digits
if any fraction of a second is given . It would perhaps be
better to return only the number of decimal digits necessary ,
up to a resultion of 1 microsecond .
Special cases :
- Returns datetime str for now if no parameter given .
- Returns None if None is supplied ."""
|
if ( dt is None ) :
return None
elif ( dt == 'now' ) :
dt = time . time ( )
if ( no_fractions ) :
dt = int ( dt )
else :
dt += 0.0000001
# improve rounding to microseconds
return datetime . utcfromtimestamp ( dt ) . isoformat ( ) + 'Z'
|
def can_create_objectives ( self ) :
"""Tests if this user can create Objectives .
A return of true does not guarantee successful authorization . A
return of false indicates that it is known creating an Objective
will result in a PermissionDenied . This is intended as a hint
to an application that may opt not to offer create operations to
an unauthorized user .
return : ( boolean ) - false if Objective creation is not
authorized , true otherwise
compliance : mandatory - This method must be implemented ."""
|
url_path = construct_url ( 'authorization' , bank_id = self . _catalog_idstr )
return self . _get_request ( url_path ) [ 'objectiveHints' ] [ 'canCreate' ]
|
def validate_input ( self , validation_definition ) :
"""In this example we are using external validation to verify that min is
less than max . If validate _ input does not raise an Exception , the input is
assumed to be valid . Otherwise it prints the exception as an error message
when telling splunkd that the configuration is invalid .
When using external validation , after splunkd calls the modular input with
- - scheme to get a scheme , it calls it again with - - validate - arguments for
each instance of the modular input in its configuration files , feeding XML
on stdin to the modular input to do validation . It is called the same way
whenever a modular input ' s configuration is edited .
: param validation _ definition : a ValidationDefinition object"""
|
# Get the parameters from the ValidationDefinition object ,
# then typecast the values as floats
minimum = float ( validation_definition . parameters [ "min" ] )
maximum = float ( validation_definition . parameters [ "max" ] )
if minimum >= maximum :
raise ValueError ( "min must be less than max; found min=%f, max=%f" % minimum , maximum )
|
def render_hidden ( name , value ) :
"""render as hidden widget"""
|
if isinstance ( value , list ) :
return MultipleHiddenInput ( ) . render ( name , value )
return HiddenInput ( ) . render ( name , value )
|
def app_token ( vault_client , app_id , user_id ) :
"""Returns a vault token based on the app and user id ."""
|
resp = vault_client . auth_app_id ( app_id , user_id )
if 'auth' in resp and 'client_token' in resp [ 'auth' ] :
return resp [ 'auth' ] [ 'client_token' ]
else :
raise aomi . exceptions . AomiCredentials ( 'invalid apptoken' )
|
def _report_container_count ( self , containers_by_id ) :
"""Report container count per state"""
|
m_func = FUNC_MAP [ GAUGE ] [ self . use_histogram ]
per_state_count = defaultdict ( int )
filterlambda = lambda ctr : not self . _is_container_excluded ( ctr )
containers = list ( filter ( filterlambda , containers_by_id . values ( ) ) )
for ctr in containers :
per_state_count [ ctr . get ( 'State' , '' ) ] += 1
for state in per_state_count :
if state :
m_func ( self , 'docker.container.count' , per_state_count [ state ] , tags = [ 'container_state:%s' % state . lower ( ) ] )
|
def recv_file_from_host ( src_file , dst_filename , filesize , dst_mode = 'wb' ) :
"""Function which runs on the pyboard . Matches up with send _ file _ to _ remote ."""
|
import sys
import ubinascii
if HAS_BUFFER :
try :
import pyb
usb = pyb . USB_VCP ( )
except :
try :
import machine
usb = machine . USB_VCP ( )
except :
usb = None
if usb and usb . isconnected ( ) : # We don ' t want 0x03 bytes in the data to be interpreted as a Control - C
# This gets reset each time the REPL runs a line , so we don ' t need to
# worry about resetting it ourselves
usb . setinterrupt ( - 1 )
try :
with open ( dst_filename , dst_mode ) as dst_file :
bytes_remaining = filesize
if not HAS_BUFFER :
bytes_remaining *= 2
# hexlify makes each byte into 2
buf_size = BUFFER_SIZE
write_buf = bytearray ( buf_size )
read_buf = bytearray ( buf_size )
while bytes_remaining > 0 : # Send back an ack as a form of flow control
sys . stdout . write ( '\x06' )
read_size = min ( bytes_remaining , buf_size )
buf_remaining = read_size
buf_index = 0
while buf_remaining > 0 :
if HAS_BUFFER :
bytes_read = sys . stdin . buffer . readinto ( read_buf , read_size )
else :
bytes_read = sys . stdin . readinto ( read_buf , read_size )
if bytes_read > 0 :
write_buf [ buf_index : bytes_read ] = read_buf [ 0 : bytes_read ]
buf_index += bytes_read
buf_remaining -= bytes_read
if HAS_BUFFER :
dst_file . write ( write_buf [ 0 : read_size ] )
else :
dst_file . write ( ubinascii . unhexlify ( write_buf [ 0 : read_size ] ) )
bytes_remaining -= read_size
return True
except :
return False
|
def cancel_orders ( self , market_id , instructions , customer_ref = None ) :
"""Cancel all bets OR cancel all bets on a market OR fully or
partially cancel particular orders on a market .
: param str market _ id : If not supplied all bets are cancelled
: param list instructions : List of ` CancelInstruction ` objects
: param str customer _ ref : Optional order identifier string"""
|
return self . make_api_request ( 'Sports' , 'cancelOrders' , utils . get_kwargs ( locals ( ) ) , model = models . CancelExecutionReport , )
|
def _register_update ( self , replot = False , fmt = { } , force = False , todefault = False ) :
"""Register new formatoptions for updating
Parameters
replot : bool
Boolean that determines whether the data specific formatoptions
shall be updated in any case or not . Note , if ` dims ` is not empty
or any coordinate keyword is in ` ` * * kwargs ` ` , this will be set to
True automatically
fmt : dict
Keys may be any valid formatoption of the formatoptions in the
: attr : ` plotter `
force : str , list of str or bool
If formatoption key ( i . e . string ) or list of formatoption keys ,
thery are definitely updated whether they changed or not .
If True , all the given formatoptions in this call of the are
: meth : ` update ` method are updated
todefault : bool
If True , all changed formatoptions ( except the registered ones )
are updated to their default value as stored in the
: attr : ` ~ psyplot . plotter . Plotter . rc ` attribute
See Also
start _ update"""
|
self . replot = self . replot or replot
if self . plotter is not None :
self . plotter . _register_update ( replot = self . replot , fmt = fmt , force = force , todefault = todefault )
|
def _parse_command_line ( ) :
"""Configure and parse our command line flags ."""
|
parser = argparse . ArgumentParser ( )
parser . add_argument ( '--portserver_static_pool' , type = str , default = '15000-24999' , help = 'Comma separated N-P Range(s) of ports to manage (inclusive).' )
parser . add_argument ( '--portserver_unix_socket_address' , type = str , default = '@unittest-portserver' , help = 'Address of AF_UNIX socket on which to listen (first @ is a NUL).' )
parser . add_argument ( '--verbose' , action = 'store_true' , default = False , help = 'Enable verbose messages.' )
parser . add_argument ( '--debug' , action = 'store_true' , default = False , help = 'Enable full debug messages.' )
return parser . parse_args ( sys . argv [ 1 : ] )
|
def Nu_Kitoh ( Re , Pr , H = None , G = None , q = None ) :
r'''Calculates internal convection Nusselt number for turbulent vertical
upward flow in a pipe under supercritical conditions according to [ 1 ] _ ,
also shown in [ 2 ] _ , [ 3 ] _ and [ 4 ] _ . Depends on fluid enthalpy , mass flux ,
and heat flux .
. . math : :
Nu _ b = 0.015Re _ b ^ { 0.85 } Pr _ b ^ m
m = 0.69 - \ frac { 81000 } { q _ { dht } } + f _ cq
q _ { dht } = 200 G ^ { 1.2}
f _ c = 2.9 \ times10 ^ { - 8 } + \ frac { 0.11 } { q _ { dht } } \ text { for }
H _ b < 1500 \ text { kJ / kg }
f _ c = - 8.7 \ times10 ^ { - 8 } - \ frac { 0.65 } { q _ { dht } } \ text { for }
1500 \ text { kJ / kg } < H _ b < 3300 \ text { kJ / kg }
f _ c = - 9.7 \ times10 ^ { - 7 } + \ frac { 1.3 } { q _ { dht } } \ text { for }
H _ b > 3300 \ text { kJ / kg }
Parameters
Re : float
Reynolds number with bulk fluid properties , [ - ]
Pr : float
Prandtl number with bulk fluid properties , [ - ]
H : float , optional
Enthalpy of water ( if the fluid is water ) , [ J / kg ]
G : float , optional
Mass flux of the fluid , [ kg / m ^ 2 / s ]
q : float , optional
Heat flux to wall , [ W / m ^ 2]
Returns
Nu : float
Nusselt number as explained below , [ - ]
Notes
The reference point for the enthalpy values is not stated in [ 1 ] _ . The
upper and lower enthalpy limits for this correlation are 4000 kJ / kg and
0 kJ / kg , but these are not enforced in this function .
If not all of H , G , and q are provided , the correlation is used without
the correction .
This correlation was ranked 6th best in [ 3 ] _ , and found 4th best for
enhanced heat transfer in [ 2 ] _ with a MAD of 12.3 % .
For the data used to develop the correlation , G varied from 100-1750
kg / m ^ 2 / s , q varied from 0 to 1800 kW / m ^ 2 , and bulk temperature varied from
20 to 550 decrees Celsius .
This correlation does not have realistic behavior for values outside those
used in the study , and should not be used .
Examples
> > > Nu _ Kitoh ( 1E5 , 1.2 , 1.3E6 , 1500 , 5E6)
331.80234139591306
References
. . [ 1 ] Kitoh , Kazuaki , Seiichi Koshizuka , and Yoshiaki Oka . " Refinement of
Transient Criteria and Safety Analysis for a High - Temperature Reactor
Cooled by Supercritical Water . " Nuclear Technology 135 , no . 3
( September 1 , 2001 ) : 252-64.
. . [ 2 ] Chen , Weiwei , Xiande Fang , Yu Xu , and Xianghui Su . " An Assessment of
Correlations of Forced Convection Heat Transfer to Water at
Supercritical Pressure . " Annals of Nuclear Energy 76 ( February 2015 ) :
451-60 . doi : 10.1016 / j . anucene . 2014.10.027.
. . [ 3 ] Yu , Jiyang , Baoshan Jia , Dan Wu , and Daling Wang . " Optimization of
Heat Transfer Coefficient Correlation at Supercritical Pressure Using
Genetic Algorithms . " Heat and Mass Transfer 45 , no . 6 ( January 8 , 2009 ) :
757-66 . doi : 10.1007 / s00231-008-0475-4.
. . [ 4 ] Jäger , Wadim , Victor Hugo Sánchez Espinoza , and Antonio Hurtado .
" Review and Proposal for Heat Transfer Predictions at Supercritical
Water Conditions Using Existing Correlations and Experiments . " Nuclear
Engineering and Design , ( W3MDM ) University of Leeds International
Symposium : What Where When ? Multi - dimensional Advances for Industrial
Process Monitoring , 241 , no . 6 ( June 2011 ) : 2184-2203.
doi : 10.1016 / j . nucengdes . 2011.03.022.'''
|
if H and G and q :
qht = 200. * G ** 1.2
if H < 1.5E6 :
fc = 2.9E-8 + 0.11 / qht
elif 1.5E6 <= H <= 3.3E6 :
fc = - 8.7E-8 - 0.65 / qht
else :
fc = - 9.7E-7 + 1.3 / qht
m = 0.69 - 81000. / qht + fc * q
else :
m = 0.69
return 0.015 * Re ** 0.85 * Pr ** m
|
def create_profile ( hostname , username , password , profile_type , name , ** kwargs ) :
r'''A function to connect to a bigip device and create a profile .
hostname
The host / address of the bigip device
username
The iControl REST username
password
The iControl REST password
profile _ type
The type of profile to create
name
The name of the profile to create
kwargs
[ arg = val ] . . .
Consult F5 BIGIP user guide for specific options for each profile type .
Typically , tmsh arg names are used .
Special Characters ` ` | ` ` , ` ` , ` ` and ` ` : ` ` must be escaped using ` ` \ ` ` when
used within strings .'''
|
ret = { 'name' : name , 'changes' : { } , 'result' : False , 'comment' : '' }
if __opts__ [ 'test' ] :
return _test_output ( ret , 'create' , params = { 'hostname' : hostname , 'username' : username , 'password' : password , 'profile_type' : profile_type , 'name' : name } )
# is this profile currently configured ?
existing = __salt__ [ 'bigip.list_profile' ] ( hostname , username , password , profile_type , name )
# if it exists
if existing [ 'code' ] == 200 :
ret [ 'result' ] = True
ret [ 'comment' ] = 'A profile by this name currently exists. No change made.'
# if it doesn ' t exist
elif existing [ 'code' ] == 404 :
response = __salt__ [ 'bigip.create_profile' ] ( hostname , username , password , profile_type , name , ** kwargs )
if response [ 'code' ] == 200 :
ret [ 'result' ] = True
ret [ 'changes' ] [ 'old' ] = { }
ret [ 'changes' ] [ 'new' ] = response [ 'content' ]
ret [ 'comment' ] = 'Profile was successfully created.'
else :
ret = _load_result ( response , ret )
# else something else was returned
else :
ret = _load_result ( existing , ret )
return ret
|
def iterate_dictionary ( d , path , squash_single = False ) :
"""Takes a dict , and a path delimited with slashes like A / B / C / D , and returns a list of objects found at all leaf nodes at all trajectories ` dict [ A ] [ B ] [ C ] [ D ] ` . It does this using BFS not DFS .
The word " leaf " hereby refers to an item at the search path level . That is , upon calling the function
iterate _ dictionary ( d _ to _ search , " A / B / C / D " )
If ` d _ to _ search ` has five levels A / B / C / D / E , then D is the " leaf node level " . Since ` [ E ] ` exists , then at least one object in the return list will be a dictionary .
Rules
Each node can be either
1 ) an arbitrary non - list , non - dictionary object
2 ) a dictionary
3 ) a list of arbitrary objects
All nodes of type 3 at each level are searched for nodes of type 1 and 2 . Nodes of type 2 are the ones iterated in this tree search .
At the current time , nodes of type 1 are * not * inspected . They are returned in a list if they are at the search path and ignored otherwise .
Returns
1 ) If the path is an empty string , returns the original dict
2 ) * If * at least one object exists at the search path , it returns a list of all items at the search path . Using the above example terminology , a list of all objects at all trajectories ` " A / B / C / D " ` .
* Special Parameter * : If the optional Boolean parameter ` squash _ single ` is True , and the return list contains only one object , the object is returned ( * not * a list ) , else a list with that one object is returned . This optional flag is useful so that [ 0 ] does not have to be indexed on the return list in the case where only one item is expected .
3 ) None in the case that there are no objects at the search path ."""
|
path_parts = path . split ( "/" )
return_list = [ ]
if len ( path_parts ) == 0 : # no search string
return d
else :
try :
sub_dicts = [ d ]
# BFS , start with root node
for i in range ( 0 , len ( path_parts ) ) : # BFS
new_sub_dicts = [ ]
for s in sub_dicts :
if path_parts [ i ] in s : # this tree node is part of the search path
the_list = s [ path_parts [ i ] ] if isinstance ( s [ path_parts [ i ] ] , list ) else [ s [ path_parts [ i ] ] ]
for j in the_list :
if i < len ( path_parts ) - 1 : # not a leaf node ; check level
if isinstance ( j , dict ) : # skip this non - leaf node if not a dict
new_sub_dicts . append ( j )
# BFS expansion
else : # leaf node at the desired path ; add to final return list
return_list . append ( j )
sub_dicts = new_sub_dicts
# return
return return_list [ 0 ] if squash_single and len ( return_list ) == 1 else return_list if len ( return_list ) >= 1 else None
except :
return None
|
def _aggregate_one_result ( self , sock_info , slave_ok , cmd , collation = None , session = None ) :
"""Internal helper to run an aggregate that returns a single result ."""
|
result = self . _command ( sock_info , cmd , slave_ok , codec_options = self . __write_response_codec_options , read_concern = self . read_concern , collation = collation , session = session )
batch = result [ 'cursor' ] [ 'firstBatch' ]
return batch [ 0 ] if batch else None
|
def one ( self , default = None , as_dict = False , as_ordereddict = False ) :
"""Returns a single record for the RecordCollection , ensuring that it
is the only record , or returns ` default ` . If ` default ` is an instance
or subclass of Exception , then raise it instead of returning it ."""
|
# Ensure that we don ' t have more than one row .
try :
self [ 1 ]
except IndexError :
return self . first ( default = default , as_dict = as_dict , as_ordereddict = as_ordereddict )
else :
raise ValueError ( 'RecordCollection contained more than one row. ' 'Expects only one row when using ' 'RecordCollection.one' )
|
def adjust_locations ( ast_node , first_lineno , first_offset ) :
"""Adjust the locations of the ast nodes , offsetting them
to the new lineno and column offset"""
|
line_delta = first_lineno - 1
def _fix ( node ) :
if 'lineno' in node . _attributes :
lineno = node . lineno
col = node . col_offset
# adjust the offset on the first line
if lineno == 1 :
col += first_offset
lineno += line_delta
node . lineno = lineno
node . col_offset = col
for child in iter_child_nodes ( node ) :
_fix ( child )
_fix ( ast_node )
|
def _check_logged_user ( self ) :
"""Check if a user is logged . Otherwise , an error is raised ."""
|
if not self . _env or not self . _password or not self . _login :
raise error . InternalError ( "Login required" )
|
def generate ( env ) :
"""Add Builders and construction variables for SunPRO C + + ."""
|
path , cxx , shcxx , version = get_cppc ( env )
if path :
cxx = os . path . join ( path , cxx )
shcxx = os . path . join ( path , shcxx )
cplusplus . generate ( env )
env [ 'CXX' ] = cxx
env [ 'SHCXX' ] = shcxx
env [ 'CXXVERSION' ] = version
env [ 'SHCXXFLAGS' ] = SCons . Util . CLVar ( '$CXXFLAGS -KPIC' )
env [ 'SHOBJPREFIX' ] = 'so_'
env [ 'SHOBJSUFFIX' ] = '.o'
|
def get_default_ca_certs ( ) :
"""Try to find out system path with ca certificates . This path is cached and
returned . If no path is found out , None is returned ."""
|
# pylint : disable = protected - access
if not hasattr ( get_default_ca_certs , '_path' ) :
for path in get_default_ca_cert_paths ( ) :
if os . path . exists ( path ) :
get_default_ca_certs . _path = path
break
else :
get_default_ca_certs . _path = None
return get_default_ca_certs . _path
|
def has_child_logs ( self , log_id ) :
"""Tests if a log has any children .
arg : log _ id ( osid . id . Id ) : the ` ` Id ` ` of a log
return : ( boolean ) - ` ` true ` ` if the ` ` log _ id ` ` has children ,
` ` false ` ` otherwise
raise : NotFound - ` ` log _ id ` ` is not found
raise : NullArgument - ` ` log _ id ` ` is ` ` null ` `
raise : OperationFailed - unable to complete request
raise : PermissionDenied - authorization failure
* compliance : mandatory - - This method must be implemented . *"""
|
# Implemented from template for
# osid . resource . BinHierarchySession . has _ child _ bins
if self . _catalog_session is not None :
return self . _catalog_session . has_child_catalogs ( catalog_id = log_id )
return self . _hierarchy_session . has_children ( id_ = log_id )
|
def certify_date ( value , required = True ) :
"""Certifier for datetime . date values .
: param value :
The value to be certified .
: param bool required :
Whether the value can be ` None ` Defaults to True .
: raises CertifierTypeError :
The type is invalid"""
|
if certify_required ( value = value , required = required , ) :
return
if not isinstance ( value , date ) :
raise CertifierTypeError ( message = "expected timestamp (date∂), but value is of type {cls!r}" . format ( cls = value . __class__ . __name__ ) , value = value , required = required , )
|
def updateNodeCapabilitiesResponse ( self , nodeId , node , vendorSpecific = None ) :
"""CNRegister . updateNodeCapabilities ( session , nodeId , node ) → boolean
https : / / releases . dataone . org / online / api - documentation - v2.0.1 / apis / CN _ AP
Is . html # CNRegister . updateNodeCapabilities .
Args :
nodeId :
node :
vendorSpecific :
Returns :"""
|
mmp_dict = { 'node' : ( 'node.xml' , node . toxml ( 'utf-8' ) ) }
return self . PUT ( [ 'node' , nodeId ] , fields = mmp_dict , headers = vendorSpecific )
|
def plot_ts ( fignum , dates , ts ) :
"""plot the geomagnetic polarity time scale
Parameters
_ _ _ _ _
fignum : matplotlib figure number
dates : bounding dates for plot
ts : time scale ck95 , gts04 , or gts12"""
|
vertical_plot_init ( fignum , 10 , 3 )
TS , Chrons = pmag . get_ts ( ts )
p = 1
X , Y = [ ] , [ ]
for d in TS :
if d <= dates [ 1 ] :
if d >= dates [ 0 ] :
if len ( X ) == 0 :
ind = TS . index ( d )
X . append ( TS [ ind - 1 ] )
Y . append ( p % 2 )
X . append ( d )
Y . append ( p % 2 )
p += 1
X . append ( d )
Y . append ( p % 2 )
else :
X . append ( dates [ 1 ] )
Y . append ( p % 2 )
plt . plot ( X , Y , 'k' )
plot_vs ( fignum , dates , 'w' , '-' )
plot_hs ( fignum , [ 1.1 , - .1 ] , 'w' , '-' )
plt . xlabel ( "Age (Ma): " + ts )
isign = - 1
for c in Chrons :
off = - .1
isign = - 1 * isign
if isign > 0 :
off = 1.05
if c [ 1 ] >= X [ 0 ] and c [ 1 ] < X [ - 1 ] :
plt . text ( c [ 1 ] - .2 , off , c [ 0 ] )
return
|
def geom ( self ) :
"""Geometry information .
: class : ` _ Geometry ` instance holding geometry information . It is
issued from binary files holding field information . It is set to
None if not available for this time step ."""
|
if self . _header is UNDETERMINED :
binfiles = self . step . sdat . binfiles_set ( self . step . isnap )
if binfiles :
self . _header = stagyyparsers . fields ( binfiles . pop ( ) , only_header = True )
elif self . step . sdat . hdf5 :
xmf = self . step . sdat . hdf5 / 'Data.xmf'
self . _header , _ = stagyyparsers . read_geom_h5 ( xmf , self . step . isnap )
else :
self . _header = None
if self . _geom is UNDETERMINED :
if self . _header is None :
self . _geom = None
else :
self . _geom = _Geometry ( self . _header , self . step . sdat . par )
return self . _geom
|
def is_cms_app ( app_name ) :
"""Return whether the given application is a CMS app"""
|
for pat in appsettings . FLUENT_DASHBOARD_CMS_APP_NAMES :
if fnmatch ( app_name , pat ) :
return True
return False
|
def verify_address ( self , addr1 = "" , addr2 = "" , city = "" , fname = "" , lname = "" , phone = "" , province = "" , postal = "" , country = "" , email = "" , recordID = "" , freeform = "" ) :
"""verify _ address
Builds a JSON request to send to Melissa data . Takes in all needed address info .
Args :
addr1 ( str ) : Contains info for Melissa data
addr2 ( str ) : Contains info for Melissa data
city ( str ) : Contains info for Melissa data
fname ( str ) : Contains info for Melissa data
lname ( str ) : Contains info for Melissa data
phone ( str ) : Contains info for Melissa data
province ( str ) : Contains info for Melissa data
postal ( str ) : Contains info for Melissa data
country ( str ) : Contains info for Melissa data
email ( str ) : Contains info for Melissa data
recordID ( str ) : Contains info for Melissa data
freeform ( str ) : Contains info for Melissa data
Returns :
result , a string containing the result codes from MelissaData"""
|
data = { "TransmissionReference" : "" , "CustomerID" : self . custID , "Actions" : "Check" , "Options" : "" , "Columns" : "" , "Records" : [ { "RecordID" : recordID , "CompanyName" : "" , "FullName" : fname + " " + lname , "AddressLine1" : addr1 , "AddressLine2" : addr2 , "Suite" : "" , "City" : city , "State" : province , "PostalCode" : postal , "Country" : country , "PhoneNumber" : phone , "EmailAddress" : email , "FreeForm" : freeform , } ] }
self . country = country
data = json . dumps ( data )
result = requests . post ( "https://personator.melissadata.net/v3/WEB/ContactVerify/doContactVerify" , data = data )
result = json . loads ( result . text )
result = self . parse_results ( result )
return result
|
def permanently_delete ( self , tickets ) :
"""Permanently delete ticket . ` See Zendesk API docs < https : / / developer . zendesk . com / rest _ api / docs / support / tickets # delete - ticket - permanently > ` _
Ticket should be softly deleted first with regular ` delete ` method .
: param tickets : Ticket object or list of tickets objects
: return : JobStatus object"""
|
endpoint_kwargs = dict ( )
if isinstance ( tickets , collections . Iterable ) :
endpoint_kwargs [ 'destroy_ids' ] = [ i . id for i in tickets ]
else :
endpoint_kwargs [ 'id' ] = tickets . id
url = self . _build_url ( self . endpoint . deleted ( ** endpoint_kwargs ) )
deleted_ticket_job_id = self . _delete ( url )
self . cache . delete ( tickets )
return deleted_ticket_job_id
|
def ball_count ( cls , ball_tally , strike_tally , pitch_res ) :
"""Ball / Strike counter
: param ball _ tally : Ball telly
: param strike _ tally : Strike telly
: param pitch _ res : pitching result ( Retrosheet format )
: return : ball count , strike count"""
|
b , s = ball_tally , strike_tally
if pitch_res == "B" :
if ball_tally < 4 :
b += 1
elif pitch_res == "S" or pitch_res == "C" or pitch_res == "X" :
if strike_tally < 3 :
s += 1
elif pitch_res == "F" :
if strike_tally < 2 :
s += 1
return b , s
|
def implicitly_declare_ro ( instructions : List [ AbstractInstruction ] ) :
"""Implicitly declare a register named ` ` ro ` ` for backwards compatibility with Quil 1.
There used to be one un - named hunk of classical memory . Now there are variables with
declarations . Instead of : :
MEASURE 0 [ 0]
You must now measure into a named register , idiomatically : :
MEASURE 0 ro [ 0]
The ` ` MEASURE ` ` instruction will emit this ( with a deprecation warning ) if you ' re still
using bare integers for classical addresses . However , you must also declare memory in the
new scheme : :
DECLARE ro BIT [ 8]
MEASURE 0 ro [ 0]
This method will determine if you are in " backwards compatibility mode " and will declare
a read - out ` ` ro ` ` register for you . If you program contains any DECLARE commands or if it
does not have any MEASURE x ro [ x ] , this will not do anything .
This behavior is included for backwards compatibility and will be removed in future releases
of PyQuil . Please DECLARE all memory including ` ` ro ` ` ."""
|
ro_addrs : List [ int ] = [ ]
for instr in instructions :
if isinstance ( instr , Declare ) : # The user has declared their own memory
# so they are responsible for all declarations and memory references .
return instructions
if isinstance ( instr , Measurement ) :
if instr . classical_reg is None :
continue
if instr . classical_reg . name == 'ro' :
ro_addrs += [ instr . classical_reg . offset ]
else : # The user has used a classical register named something other than " ro "
# so they are responsible for all declarations and memory references .
return instructions
if len ( ro_addrs ) == 0 :
return instructions
warnings . warn ( "Please DECLARE all memory. I'm adding a declaration for the `ro` register, " "but I won't do this for you in the future." )
new_instr = instructions . copy ( )
new_instr . insert ( 0 , Declare ( name = 'ro' , memory_type = 'BIT' , memory_size = max ( ro_addrs ) + 1 ) )
return new_instr
|
def bfloat16_activations_var_getter ( getter , * args , ** kwargs ) :
"""A custom getter function for float32 parameters and bfloat16 activations .
Args :
getter : custom getter
* args : arguments
* * kwargs : keyword arguments
Returns :
variables with the correct dtype .
Raises :
KeyError : if " dtype " is not provided as a kwarg ."""
|
requested_dtype = kwargs [ "dtype" ]
if requested_dtype == tf . bfloat16 :
kwargs [ "dtype" ] = tf . float32
var = getter ( * args , ** kwargs )
# This if statement is needed to guard the cast , because batch norm
# assigns directly to the return value of this custom getter . The cast
# makes the return value not a variable so it cannot be assigned . Batch
# norm variables are always in fp32 so this if statement is never
# triggered for them .
if var . dtype . base_dtype != requested_dtype :
var = tf . cast ( var , requested_dtype )
return var
|
def subarc_between_points ( self , p_from = None , p_to = None ) :
'''Given two points on the arc , extract a sub - arc between those points .
No check is made to verify the points are actually on the arc .
It is basically a wrapper around subarc ( point _ as _ angle ( p _ from ) , point _ as _ angle ( p _ to ) ) .
Either p _ from or p _ to may be None to denote first or last arc endpoints .
> > > a = Arc ( ( 0 , 0 ) , 1 , 0 , 90 , True )
> > > a . subarc _ between _ points ( ( 1 , 0 ) , ( np . cos ( np . pi / 4 ) , np . sin ( np . pi / 4 ) ) )
Arc ( [ 0.000 , 0.000 ] , 1.000 , 0.000 , 45.000 , True , degrees = 45.000)
> > > a . subarc _ between _ points ( None , None )
Arc ( [ 0.000 , 0.000 ] , 1.000 , 0.000 , 90.000 , True , degrees = 90.000)
> > > a . subarc _ between _ points ( ( np . cos ( np . pi / 4 ) , np . sin ( np . pi / 4 ) ) )
Arc ( [ 0.000 , 0.000 ] , 1.000 , 45.000 , 90.000 , True , degrees = 45.000)'''
|
a_from = self . point_as_angle ( p_from ) if p_from is not None else None
a_to = self . point_as_angle ( p_to ) if p_to is not None else None
return self . subarc ( a_from , a_to )
|
def plot_fit ( self , ** kwargs ) :
"""Plots the fit of the model
Returns
None ( plots data and the fit )"""
|
import matplotlib . pyplot as plt
import seaborn as sns
figsize = kwargs . get ( 'figsize' , ( 10 , 7 ) )
if self . latent_variables . estimated is False :
raise Exception ( "No latent variables estimated!" )
else :
date_index = self . index [ self . lags : self . data [ 0 ] . shape [ 0 ] ]
mu , Y = self . _model ( self . latent_variables . get_z_values ( ) )
for series in range ( 0 , Y . shape [ 0 ] ) :
plt . figure ( figsize = figsize )
plt . plot ( date_index , Y [ series ] , label = 'Data ' + str ( series ) )
plt . plot ( date_index , mu [ series ] , label = 'Filter' + str ( series ) , c = 'black' )
plt . title ( self . data_name [ series ] )
plt . legend ( loc = 2 )
plt . show ( )
|
def getPysamVariants ( self , referenceName , startPosition , endPosition ) :
"""Returns an iterator over the pysam VCF records corresponding to the
specified query ."""
|
if referenceName in self . _chromFileMap :
varFileName = self . _chromFileMap [ referenceName ]
referenceName , startPosition , endPosition = self . sanitizeVariantFileFetch ( referenceName , startPosition , endPosition )
cursor = self . getFileHandle ( varFileName ) . fetch ( referenceName , startPosition , endPosition )
for record in cursor :
yield record
|
def _get_column_type ( self , column ) :
"""Return ' numeric ' if the column is of type integer or
real , otherwise return ' string ' ."""
|
ctype = column . GetType ( )
if ctype in [ ogr . OFTInteger , ogr . OFTReal ] :
return 'numeric'
else :
return 'string'
|
def crop ( self , height , width , crop_ci , crop_cj ) :
"""Convert to new camera intrinsics for crop of image from original camera .
Parameters
height : int
height of crop window
width : int
width of crop window
crop _ ci : int
row of crop window center
crop _ cj : int
col of crop window center
Returns
: obj : ` CameraIntrinsics `
camera intrinsics for cropped window"""
|
cx = self . cx + float ( width - 1 ) / 2 - crop_cj
cy = self . cy + float ( height - 1 ) / 2 - crop_ci
cropped_intrinsics = CameraIntrinsics ( frame = self . frame , fx = self . fx , fy = self . fy , skew = self . skew , cx = cx , cy = cy , height = height , width = width )
return cropped_intrinsics
|
def from_file ( self , fname , binary = False , ** kwargs ) :
"""Initialize the class instance from gridded data in a file .
Usage
x = SHGrid . from _ file ( fname , [ binary , * * kwargs ] )
Returns
x : SHGrid class instance
Parameters
fname : str
The filename containing the gridded data . For text files ( default )
the file is read using the numpy routine loadtxt ( ) , whereas for
binary files , the file is read using numpy . load ( ) . The dimensions
of the array must be nlon = nlat or nlon = 2 * nlat for Driscoll and
Healy grids , or nlon = 2 * nlat - 1 for Gauss - Legendre Quadrature grids .
binary : bool , optional , default = False
If False , read a text file . If True , read a binary ' npy ' file .
* * kwargs : keyword arguments , optional
Keyword arguments of numpy . loadtxt ( ) or numpy . load ( ) ."""
|
if binary is False :
data = _np . loadtxt ( fname , ** kwargs )
elif binary is True :
data = _np . load ( fname , ** kwargs )
else :
raise ValueError ( 'binary must be True or False. ' 'Input value is {:s}' . format ( binary ) )
if _np . iscomplexobj ( data ) :
kind = 'complex'
else :
kind = 'real'
if ( data . shape [ 1 ] == data . shape [ 0 ] ) or ( data . shape [ 1 ] == 2 * data . shape [ 0 ] ) :
grid = 'DH'
elif data . shape [ 1 ] == 2 * data . shape [ 0 ] - 1 :
grid = 'GLQ'
else :
raise ValueError ( 'Input grid must be dimensioned as ' + '(nlat, nlon). For DH grids, nlon = nlat or ' + 'nlon = 2 * nlat. For GLQ grids, nlon = ' + '2 * nlat - 1. Input dimensions are nlat = ' + '{:d}, nlon = {:d}' . format ( data . shape [ 0 ] , data . shape [ 1 ] ) )
for cls in self . __subclasses__ ( ) :
if cls . istype ( kind ) and cls . isgrid ( grid ) :
return cls ( data )
|
def set_group_member_unorphan ( self , member_id , unorphan_info ) :
"""Make an orphan member trigger into an group trigger .
: param member _ id : Orphan Member Trigger id to be assigned into a group trigger
: param unorphan _ info : Only context and dataIdMap are used when changing back to a non - orphan .
: type unorphan _ info : UnorphanMemberInfo
: return : Trigger for the group"""
|
data = self . _serialize_object ( unorphan_info )
data = self . _service_url ( [ 'triggers' , 'groups' , 'members' , member_id , 'unorphan' ] )
return Trigger ( self . _put ( url , data ) )
|
def primary_from_id ( self , tax_id ) :
"""Returns primary taxonomic name associated with tax _ id"""
|
s = select ( [ self . names . c . tax_name ] , and_ ( self . names . c . tax_id == tax_id , self . names . c . is_primary ) )
res = s . execute ( )
output = res . fetchone ( )
if not output :
msg = 'value "{}" not found in names.tax_id' . format ( tax_id )
raise ValueError ( msg )
else :
return output [ 0 ]
|
def load_collided_alias ( self ) :
"""Load ( create , if not exist ) the collided alias file ."""
|
# w + creates the alias config file if it does not exist
open_mode = 'r+' if os . path . exists ( GLOBAL_COLLIDED_ALIAS_PATH ) else 'w+'
with open ( GLOBAL_COLLIDED_ALIAS_PATH , open_mode ) as collided_alias_file :
collided_alias_str = collided_alias_file . read ( )
try :
self . collided_alias = json . loads ( collided_alias_str if collided_alias_str else '{}' )
except Exception : # pylint : disable = broad - except
self . collided_alias = { }
|
def _read_header ( stream , decoder , strict = False ) :
"""Read AMF L { Message } header from the stream .
@ type stream : L { BufferedByteStream < pyamf . util . BufferedByteStream > }
@ param decoder : An AMF0 decoder .
@ param strict : Use strict decoding policy . Default is C { False } . Will raise a
L { pyamf . DecodeError } if the data that was read from the stream does not
match the header length .
@ return : A C { tuple } containing the name of the header , a C { bool }
determining if understanding this header is required and the decoded
data .
@ note : Quite what understanding required headers actually means is unknown ."""
|
name_len = stream . read_ushort ( )
name = stream . read_utf8_string ( name_len )
required = bool ( stream . read_uchar ( ) )
data_len = stream . read_ulong ( )
pos = stream . tell ( )
data = decoder . readElement ( )
if strict and pos + data_len != stream . tell ( ) :
raise pyamf . DecodeError ( "Data read from stream does not match header length" )
return ( name , required , data )
|
def _update_table_fallbacks ( self , table_toplevels ) :
"""Updates the fallbacks on all the table elements to make relative table access possible .
Raises DuplicateKeysError if appropriate ."""
|
if len ( self . elements ) <= 1 :
return
def parent_of ( toplevel ) : # Returns an TopLevel parent of the given entry , or None .
for parent_toplevel in table_toplevels :
if toplevel . name . sub_names [ : - 1 ] == parent_toplevel . name . sub_names :
return parent_toplevel
for entry in table_toplevels :
if entry . name . is_qualified :
parent = parent_of ( entry )
if parent :
child_name = entry . name . without_prefix ( parent . name )
parent . table_element . set_fallback ( { child_name . sub_names [ 0 ] : entry . table_element } )
|
def get_child_repositories ( self , repository_id ) :
"""Gets the children of the given repository .
arg : repository _ id ( osid . id . Id ) : the ` ` Id ` ` to query
return : ( osid . repository . RepositoryList ) - the children of the
repository
raise : NotFound - ` ` repository _ id ` ` not found
raise : NullArgument - ` ` repository _ id ` ` is ` ` null ` `
raise : OperationFailed - unable to complete request
raise : PermissionDenied - authorization failure
* compliance : mandatory - - This method must be implemented . *"""
|
# Implemented from template for
# osid . resource . BinHierarchySession . get _ child _ bins
if self . _catalog_session is not None :
return self . _catalog_session . get_child_catalogs ( catalog_id = repository_id )
return RepositoryLookupSession ( self . _proxy , self . _runtime ) . get_repositories_by_ids ( list ( self . get_child_repository_ids ( repository_id ) ) )
|
def add_op_create_replicated_pool ( self , name , replica_count = 3 , pg_num = None , weight = None , group = None , namespace = None , app_name = None , max_bytes = None , max_objects = None ) :
"""Adds an operation to create a replicated pool .
: param name : Name of pool to create
: type name : str
: param replica _ count : Number of copies Ceph should keep of your data .
: type replica _ count : int
: param pg _ num : Request specific number of Placement Groups to create
for pool .
: type pg _ num : int
: param weight : The percentage of data that is expected to be contained
in the pool from the total available space on the OSDs .
Used to calculate number of Placement Groups to create
for pool .
: type weight : float
: param group : Group to add pool to
: type group : str
: param namespace : Group namespace
: type namespace : str
: param app _ name : ( Optional ) Tag pool with application name . Note that
there is certain protocols emerging upstream with
regard to meaningful application names to use .
Examples are ` ` rbd ` ` and ` ` rgw ` ` .
: type app _ name : str
: param max _ bytes : Maximum bytes quota to apply
: type max _ bytes : int
: param max _ objects : Maximum objects quota to apply
: type max _ objects : int"""
|
if pg_num and weight :
raise ValueError ( 'pg_num and weight are mutually exclusive' )
self . ops . append ( { 'op' : 'create-pool' , 'name' : name , 'replicas' : replica_count , 'pg_num' : pg_num , 'weight' : weight , 'group' : group , 'group-namespace' : namespace , 'app-name' : app_name , 'max-bytes' : max_bytes , 'max-objects' : max_objects } )
|
def _set_community ( self , v , load = False ) :
"""Setter method for community , mapped from YANG variable / routing _ system / route _ map / content / set / community ( container )
If this variable is read - only ( config : false ) in the
source YANG file , then _ set _ community is considered as a private
method . Backends looking to populate this variable should
do so via calling thisObj . _ set _ community ( ) directly ."""
|
if hasattr ( v , "_utype" ) :
v = v . _utype ( v )
try :
t = YANGDynClass ( v , base = community . community , is_container = 'container' , presence = False , yang_name = "community" , rest_name = "community" , parent = self , path_helper = self . _path_helper , extmethods = self . _extmethods , register_paths = True , extensions = { u'tailf-common' : { u'info' : u'BGP community attribute.' , u'cli-full-no' : None , u'cli-break-sequence-commands' : None } } , namespace = 'urn:brocade.com:mgmt:brocade-ip-policy' , defining_module = 'brocade-ip-policy' , yang_type = 'container' , is_config = True )
except ( TypeError , ValueError ) :
raise ValueError ( { 'error-string' : """community must be of a type compatible with container""" , 'defined-type' : "container" , 'generated-type' : """YANGDynClass(base=community.community, is_container='container', presence=False, yang_name="community", rest_name="community", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, extensions={u'tailf-common': {u'info': u'BGP community attribute.', u'cli-full-no': None, u'cli-break-sequence-commands': None}}, namespace='urn:brocade.com:mgmt:brocade-ip-policy', defining_module='brocade-ip-policy', yang_type='container', is_config=True)""" , } )
self . __community = t
if hasattr ( self , '_set' ) :
self . _set ( )
|
def aggressive_tree_merge ( odb , tree_shas ) :
""": return : list of BaseIndexEntries representing the aggressive merge of the given
trees . All valid entries are on stage 0 , whereas the conflicting ones are left
on stage 1 , 2 or 3 , whereas stage 1 corresponds to the common ancestor tree ,
2 to our tree and 3 to ' their ' tree .
: param tree _ shas : 1 , 2 or 3 trees as identified by their binary 20 byte shas
If 1 or two , the entries will effectively correspond to the last given tree
If 3 are given , a 3 way merge is performed"""
|
out = [ ]
out_append = out . append
# one and two way is the same for us , as we don ' t have to handle an existing
# index , instrea
if len ( tree_shas ) in ( 1 , 2 ) :
for entry in traverse_tree_recursive ( odb , tree_shas [ - 1 ] , '' ) :
out_append ( _tree_entry_to_baseindexentry ( entry , 0 ) )
# END for each entry
return out
# END handle single tree
if len ( tree_shas ) > 3 :
raise ValueError ( "Cannot handle %i trees at once" % len ( tree_shas ) )
# three trees
for base , ours , theirs in traverse_trees_recursive ( odb , tree_shas , '' ) :
if base is not None : # base version exists
if ours is not None : # ours exists
if theirs is not None : # it exists in all branches , if it was changed in both
# its a conflict , otherwise we take the changed version
# This should be the most common branch , so it comes first
if ( base [ 0 ] != ours [ 0 ] and base [ 0 ] != theirs [ 0 ] and ours [ 0 ] != theirs [ 0 ] ) or ( base [ 1 ] != ours [ 1 ] and base [ 1 ] != theirs [ 1 ] and ours [ 1 ] != theirs [ 1 ] ) : # changed by both
out_append ( _tree_entry_to_baseindexentry ( base , 1 ) )
out_append ( _tree_entry_to_baseindexentry ( ours , 2 ) )
out_append ( _tree_entry_to_baseindexentry ( theirs , 3 ) )
elif base [ 0 ] != ours [ 0 ] or base [ 1 ] != ours [ 1 ] : # only we changed it
out_append ( _tree_entry_to_baseindexentry ( ours , 0 ) )
else : # either nobody changed it , or they did . In either
# case , use theirs
out_append ( _tree_entry_to_baseindexentry ( theirs , 0 ) )
# END handle modification
else :
if ours [ 0 ] != base [ 0 ] or ours [ 1 ] != base [ 1 ] : # they deleted it , we changed it , conflict
out_append ( _tree_entry_to_baseindexentry ( base , 1 ) )
out_append ( _tree_entry_to_baseindexentry ( ours , 2 ) )
# else :
# we didn ' t change it , ignore
# pass
# END handle our change
# END handle theirs
else :
if theirs is None : # deleted in both , its fine - its out
pass
else :
if theirs [ 0 ] != base [ 0 ] or theirs [ 1 ] != base [ 1 ] : # deleted in ours , changed theirs , conflict
out_append ( _tree_entry_to_baseindexentry ( base , 1 ) )
out_append ( _tree_entry_to_baseindexentry ( theirs , 3 ) )
# END theirs changed
# else :
# theirs didn ' t change
# pass
# END handle theirs
# END handle ours
else : # all three can ' t be None
if ours is None : # added in their branch
out_append ( _tree_entry_to_baseindexentry ( theirs , 0 ) )
elif theirs is None : # added in our branch
out_append ( _tree_entry_to_baseindexentry ( ours , 0 ) )
else : # both have it , except for the base , see whether it changed
if ours [ 0 ] != theirs [ 0 ] or ours [ 1 ] != theirs [ 1 ] :
out_append ( _tree_entry_to_baseindexentry ( ours , 2 ) )
out_append ( _tree_entry_to_baseindexentry ( theirs , 3 ) )
else : # it was added the same in both
out_append ( _tree_entry_to_baseindexentry ( ours , 0 ) )
# END handle two items
# END handle heads
# END handle base exists
# END for each entries tuple
return out
|
def anneal ( args ) :
"""% prog anneal agpfile contigs . fasta
Merge adjacent overlapping contigs and make new AGP file .
By default it will also anneal lines like these together ( unless - - nozipshreds ) :
scaffold4 1 1608 1 W ca - bacs . 5638 . frag11.22000-23608 1 1608 -
scaffold4 1609 1771 2 N 163 scaffold yes paired - ends
scaffold4 1772 3771 3 W ca - bacs . 5638 . frag10.20000-22000 1 2000 -
These are most likely shreds , which we look for based on names ."""
|
p = OptionParser ( anneal . __doc__ )
p . set_align ( pctid = GoodPct , hitlen = GoodOverlap )
p . add_option ( "--hang" , default = GoodOverhang , type = "int" , help = "Maximum overhang length [default: %default]" )
p . set_outdir ( outdir = "outdir" )
p . set_cpus ( )
opts , args = p . parse_args ( args )
if len ( args ) != 2 :
sys . exit ( not p . print_help ( ) )
agpfile , contigs = args
outdir = opts . outdir
if not op . exists ( outdir ) :
mkdir ( outdir )
cmd = "faSplit byname {0} {1}/" . format ( contigs , outdir )
sh ( cmd )
cutoff = Cutoff ( opts . pctid , opts . hitlen , opts . hang )
logging . debug ( str ( cutoff ) )
agp = AGP ( agpfile )
blastfile = agpfile . replace ( ".agp" , ".blast" )
if not op . exists ( blastfile ) :
populate_blastfile ( blastfile , agp , outdir , opts )
assert op . exists ( blastfile )
logging . debug ( "File `{0}` found. Start loading." . format ( blastfile ) )
blast = BlastSlow ( blastfile ) . to_dict ( )
annealedagp = "annealed.agp"
annealedfasta = "annealed.fasta"
newagp = deepcopy ( agp )
clrstore = { }
for a , b , qreverse in agp . iter_paired_components ( ) :
aid = a . component_id
bid = b . component_id
pair = ( aid , bid )
if pair in blast :
bl = blast [ pair ]
else :
oopts = get_overlap_opts ( aid , bid , qreverse , outdir , opts )
o = overlap ( oopts )
if not o :
continue
bl = o . blastline
o = Overlap ( bl , a . component_span , b . component_span , cutoff , qreverse = qreverse )
if aid not in clrstore :
clrstore [ aid ] = CLR . from_agpline ( a )
if bid not in clrstore :
clrstore [ bid ] = CLR . from_agpline ( b )
aclr , bclr = clrstore [ aid ] , clrstore [ bid ]
o . print_graphic ( )
if o . anneal ( aclr , bclr ) :
newagp . delete_between ( aid , bid , verbose = True )
if o . otype == 2 : # b ~ a
o = o . swapped
o . print_graphic ( )
if o . anneal ( bclr , aclr ) :
newagp . switch_between ( bid , aid , verbose = True )
newagp . delete_between ( bid , aid , verbose = True )
logging . debug ( "A total of {0} components with modified CLR." . format ( len ( clrstore ) ) )
for cid , c in clrstore . items ( ) :
if c . is_valid :
continue
print ( "Remove {0}" . format ( c ) , file = sys . stderr )
newagp . convert_to_gap ( cid , verbose = True )
# Update all ranges that has modified clr
for a in newagp :
if a . is_gap :
continue
aid = a . component_id
if aid in clrstore :
c = clrstore [ aid ]
a . component_beg = c . start
a . component_end = c . end
newagp . print_to_file ( annealedagp )
tidyagp = tidy ( [ annealedagp , contigs ] )
build ( [ tidyagp , contigs , annealedfasta ] )
return annealedfasta
|
def unset ( self , key ) :
"""Delete object indexed by < key >"""
|
try :
try :
self . bucket . delete ( key )
except couchbase . exception . MemcachedError , inst :
if str ( inst ) == "Memcached error #1: Not found" : # for some reason the py cb client raises an error when
# a key isnt found , instead we just want a none value .
return
else :
raise
except :
raise
except :
raise
|
def trace_inspect ( self ) :
"""A decorator that allows to inspect / change the trace data ."""
|
def decorator ( f ) :
self . tracer . inspector = f
return f
return decorator
|
def get_data ( self , collection ) :
"""Return serialized list of data objects on collection that user has ` view ` permission on ."""
|
data = self . _filter_queryset ( 'view_data' , collection . data . all ( ) )
return self . _serialize_data ( data )
|
def inserir ( self , name ) :
"""Inserts a new Brand and returns its identifier
: param name : Brand name . String with a minimum 3 and maximum of 100 characters
: return : Dictionary with the following structure :
{ ' marca ' : { ' id ' : < id _ brand > } }
: raise InvalidParameterError : Name is null and invalid .
: raise NomeMarcaDuplicadoError : There is already a registered Brand with the value of name .
: raise DataBaseError : Networkapi failed to access the database .
: raise XMLError : Networkapi failed to generate the XML response ."""
|
brand_map = dict ( )
brand_map [ 'name' ] = name
code , xml = self . submit ( { 'brand' : brand_map } , 'POST' , 'brand/' )
return self . response ( code , xml )
|
def validate_access_permission ( self , valid_permissions ) :
""": param valid _ permissions :
List of permissions that access is allowed .
: type valid _ permissions : | list | / | tuple |
: raises ValueError : If the | attr _ mode | is invalid .
: raises IOError :
If the | attr _ mode | not in the ` ` valid _ permissions ` ` .
: raises simplesqlite . NullDatabaseConnectionError :
| raises _ check _ connection |"""
|
self . check_connection ( )
if typepy . is_null_string ( self . mode ) :
raise ValueError ( "mode is not set" )
if self . mode not in valid_permissions :
raise IOError ( "invalid access: expected-mode='{}', current-mode='{}'" . format ( "' or '" . join ( valid_permissions ) , self . mode ) )
|
def get_payments_of_credit_note_per_page ( self , credit_note_id , per_page = 1000 , page = 1 ) :
"""Get payments of credit note per page
: param credit _ note _ id : the credit note id
: param per _ page : How many objects per page . Default : 1000
: param page : Which page . Default : 1
: return : list"""
|
return self . _get_resource_per_page ( resource = CREDIT_NOTE_PAYMENTS , per_page = per_page , page = page , params = { 'credit_note_id' : credit_note_id } , )
|
def cube ( target , pore_diameter = 'pore.diameter' , throat_area = 'throat.area' ) :
r"""Calculates internal surface area of pore bodies assuming they are cubes
then subtracts the area of the neighboring throats .
Parameters
target : OpenPNM Object
The object for which these values are being calculated . This
controls the length of the calculated array , and also provides
access to other necessary thermofluid properties .
pore _ diameter : string
The dictionary key to the pore diameter array .
throat _ area : string
The dictioanry key to the throat area array . Throat areas are needed
since their insection with the pore are removed from the computation ."""
|
network = target . project . network
D = target [ pore_diameter ]
Tn = network . find_neighbor_throats ( pores = target . Ps , flatten = False )
Tsurf = _np . array ( [ _np . sum ( network [ throat_area ] [ Ts ] ) for Ts in Tn ] )
value = 6 * D ** 2 - Tsurf
return value
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.