signature
stringlengths 29
44.1k
| implementation
stringlengths 0
85.2k
|
|---|---|
def bdd ( * keywords ) :
"""Run tests matching keywords ."""
|
settings = _personal_settings ( ) . data
_storybook ( ) . with_params ( ** { "python version" : settings [ "params" ] [ "python version" ] } ) . only_uninherited ( ) . shortcut ( * keywords ) . play ( )
|
def _GetSerializedAttributeContainerList ( self , container_type ) :
"""Retrieves a serialized attribute container list .
Args :
container _ type ( str ) : attribute container type .
Returns :
SerializedAttributeContainerList : serialized attribute container list ."""
|
container_list = self . _serialized_attribute_containers . get ( container_type , None )
if not container_list :
container_list = SerializedAttributeContainerList ( )
self . _serialized_attribute_containers [ container_type ] = container_list
return container_list
|
def method_already_there ( object_type , method_name , this_class_only = False ) :
"""Returns True if method ` method _ name ` is already implemented by object _ type , that is , its implementation differs from
the one in ` object ` .
: param object _ type :
: param method _ name :
: param this _ class _ only :
: return :"""
|
if this_class_only :
return method_name in vars ( object_type )
# or object _ type . _ _ dict _ _
else :
try :
method = getattr ( object_type , method_name )
except AttributeError :
return False
else :
return method is not None and method is not getattr ( object , method_name , None )
|
def get_many ( self , type : Type [ T ] , query : Mapping [ str , Any ] , streaming : bool = False ) -> Iterable [ T ] :
"""Gets a query from the data pipeline , which contains a request for multiple objects .
1 ) Extracts the query the sequence of data sources .
2 ) Inserts the results into the data sinks ( if appropriate ) .
3 ) Transforms the results into the requested type if it wasn ' t already .
4 ) Inserts the transformed result into any data sinks .
Args :
query : The query being requested ( contains a request for multiple objects ) .
context : The context for the extraction ( mutable ) .
streaming : Specifies whether the results should be returned as a generator ( default False ) .
Returns :
The requested objects or a generator of the objects if streaming is True ."""
|
LOGGER . info ( "Getting SourceHandlers for \"{type}\"" . format ( type = type . __name__ ) )
try :
handlers = self . _get_types [ type ]
except KeyError :
try :
LOGGER . info ( "Building new SourceHandlers for \"{type}\"" . format ( type = type . __name__ ) )
handlers = self . _get_handlers ( type )
except NoConversionError :
handlers = None
self . _get_types [ type ] = handlers
if handlers is None :
raise NoConversionError ( "No source can provide \"{type}\"" . format ( type = type . __name__ ) )
LOGGER . info ( "Creating new PipelineContext" )
context = self . _new_context ( )
LOGGER . info ( "Querying SourceHandlers for \"{type}\"" . format ( type = type . __name__ ) )
for handler in handlers :
try :
return handler . get_many ( query , context , streaming )
except NotFoundError :
pass
raise NotFoundError ( "No source returned a query result!" )
|
def process_response ( self , request , response ) :
"""Disconnects the signal receiver to prevent it from staying active ."""
|
if hasattr ( threadlocal , 'auditlog' ) :
pre_save . disconnect ( sender = LogEntry , dispatch_uid = threadlocal . auditlog [ 'signal_duid' ] )
return response
|
def attr_list ( label = None , kwargs = None , attributes = None ) :
"""Return assembled DOT attribute list string .
Sorts ` ` kwargs ` ` and ` ` attributes ` ` if they are plain dicts ( to avoid
unpredictable order from hash randomization in Python 3 versions ) .
> > > attr _ list ( )
> > > attr _ list ( ' spam spam ' , kwargs = { ' eggs ' : ' eggs ' , ' ham ' : ' ham ham ' } )
' [ label = " spam spam " eggs = eggs ham = " ham ham " ] '
> > > attr _ list ( kwargs = { ' spam ' : None , ' eggs ' : ' ' } )
' [ eggs = " " ] '"""
|
content = a_list ( label , kwargs , attributes )
if not content :
return ''
return ' [%s]' % content
|
def isMethodAllowed ( self , method ) :
"""Checks if the analysis can follow the method specified , either if
the method was assigned directly ( by using " Allows manual entry of
results " ) or indirectly via Instrument ( " Allows instrument entry of
results " ) in Analysis Service Edit view .
Param method can be either a uid or an object
: param method : string , Method
: return : True if the analysis can follow the method specified
: rtype : bool"""
|
if isinstance ( method , str ) :
uid = method
else :
uid = method . UID ( )
return uid in self . getAllowedMethodUIDs ( )
|
def space_exists ( args ) :
"""Determine if the named space exists in the given project ( namespace )"""
|
# The return value is the INVERSE of UNIX exit status semantics , ( where
# 0 = good / true , 1 = bad / false ) , so to check existence in UNIX one would do
# if ! fissfc space _ exists blah ; then
# fi
try :
r = fapi . get_workspace ( args . project , args . workspace )
fapi . _check_response_code ( r , 200 )
exists = True
except FireCloudServerError as e :
if e . code == 404 :
exists = False
else :
raise
if fcconfig . verbosity :
result = "DOES NOT" if not exists else "DOES"
eprint ( 'Space <%s> %s exist in project <%s>' % ( args . workspace , result , args . project ) )
return exists
|
def get_file ( self , key , file ) :
"""Write contents of key to file
Like : meth : ` . KeyValueStore . put _ file ` , this method allows backends to
implement a specialized function if data needs to be written to disk or
streamed .
If * file * is a string , contents of * key * are written to a newly
created file with the filename * file * . Otherwise , the data will be
written using the * write * method of * file * .
: param key : The key to be read
: param file : Output filename or an object with a * write * method .
: raises exceptions . ValueError : If the key is not valid .
: raises exceptions . IOError : If there was a problem reading or writing
data .
: raises exceptions . KeyError : If the key was not found ."""
|
self . _check_valid_key ( key )
if isinstance ( file , str ) :
return self . _get_filename ( key , file )
else :
return self . _get_file ( key , file )
|
def parsePropertyValue ( self ) :
"""Called when " [ " encountered ( but not consumed ) , ends when the next
property , node , or variation encountered . Parses and returns a list
of property values . Raises ' PropertyValueParseError ' if there is a
problem ."""
|
pvlist = [ ]
while self . index < self . datalen :
match = self . rePropertyStart . match ( self . data , self . index )
if match :
self . index = match . end ( )
v = ""
# value
# scan for escaped characters ( using ' \ ' ) , unescape them ( remove linebreaks )
mend = self . rePropertyEnd . search ( self . data , self . index )
mesc = self . reEscape . search ( self . data , self . index )
while mesc and mend and ( mesc . end ( ) < mend . end ( ) ) : # copy up to ' \ ' , but remove ' \ '
v = v + self . data [ self . index : mesc . start ( ) ]
mbreak = self . reLineBreak . match ( self . data , mesc . end ( ) )
if mbreak :
self . index = mbreak . end ( )
# remove linebreak
else :
v = v + self . data [ mesc . end ( ) ]
# copy escaped character
self . index = mesc . end ( ) + 1
# move to point after escaped char
mend = self . rePropertyEnd . search ( self . data , self . index )
mesc = self . reEscape . search ( self . data , self . index )
if mend :
v = v + self . data [ self . index : mend . start ( ) ]
self . index = mend . end ( )
pvlist . append ( self . _convertControlChars ( v ) )
else :
raise PropertyValueParseError
else : # reached end of Property
break
if len ( pvlist ) >= 1 :
return pvlist
else :
raise PropertyValueParseError
|
def getnodefieldname ( idfobject , endswith , fluid = None , startswith = None ) :
"""return the field name of the node
fluid is only needed if there are air and water nodes
fluid is Air or Water or ' ' .
if the fluid is Steam , use Water"""
|
if startswith is None :
startswith = ''
if fluid is None :
fluid = ''
nodenames = getfieldnamesendswith ( idfobject , endswith )
nodenames = [ name for name in nodenames if name . startswith ( startswith ) ]
fnodenames = [ nd for nd in nodenames if nd . find ( fluid ) != - 1 ]
fnodenames = [ name for name in fnodenames if name . startswith ( startswith ) ]
if len ( fnodenames ) == 0 :
nodename = nodenames [ 0 ]
else :
nodename = fnodenames [ 0 ]
return nodename
|
def logpdf ( self , mu ) :
"""Log PDF for Normal prior
Parameters
mu : float
Latent variable for which the prior is being formed over
Returns
- log ( p ( mu ) )"""
|
if self . transform is not None :
mu = self . transform ( mu )
return - np . log ( float ( self . sigma0 ) ) - ( 0.5 * ( mu - self . mu0 ) ** 2 ) / float ( self . sigma0 ** 2 )
|
def publish ( self , topic , message = None , qos = 0 , retain = False ) :
"""Publish a message to a topic with specified qos and retained flag .
It is required that a connection has been established using ` Connect `
keyword before using this keyword .
` topic ` topic to which the message will be published
` message ` message payload to publish
` qos ` qos of the message
` retain ` retained flag
Examples :
| Publish | test / test | test message | 1 | $ { false } |"""
|
logger . info ( 'Publish topic: %s, message: %s, qos: %s, retain: %s' % ( topic , message , qos , retain ) )
self . _mid = - 1
self . _mqttc . on_publish = self . _on_publish
result , mid = self . _mqttc . publish ( topic , message , int ( qos ) , retain )
if result != 0 :
raise RuntimeError ( 'Error publishing: %s' % result )
timer_start = time . time ( )
while time . time ( ) < timer_start + self . _loop_timeout :
if mid == self . _mid :
break ;
self . _mqttc . loop ( )
if mid != self . _mid :
logger . warn ( 'mid wasn\'t matched: %s' % mid )
|
def list_feeds ( self ) :
"""Output a list of all feed names"""
|
feeds = configparser . ConfigParser ( )
feeds . read ( self . data_filename )
return feeds . sections ( )
|
def get_bytes ( self , * args , ** kwargs ) :
"""no string decoding performed"""
|
return super ( XClient , self ) . get ( * args , ** kwargs )
|
def log_results ( ) :
"""This is the callback that is run once the Async task is finished . It
takes the output from grep and logs it ."""
|
from furious . context import get_current_async
# Get the recently finished Async object .
async = get_current_async ( )
# Pull out the result data and log it .
for result in async . result :
logging . info ( result )
|
def parse_definition ( self , definition ) :
"""Parse the basic structure of both provided and requested entities
: param definition : string encoding an entity
: return : tuple of entity type , alias and attributes"""
|
type = definition [ 0 ]
alias = None
attributes = { }
for part in definition [ 1 : ] :
if '=' in part :
key , value = part . split ( '=' , 1 )
attributes [ key ] = set ( value . split ( ',' ) )
elif alias is None :
alias = part
else :
raise ValueError ( 'entity name already defined: {}' . format ( part ) )
return type , alias , attributes
|
def delete_fastqs ( job , patient_dict ) :
"""Delete the fastqs from the job Store once their purpose has been achieved ( i . e . after all
mapping steps )
: param dict patient _ dict : Dict of list of input fastqs"""
|
for key in patient_dict . keys ( ) :
if 'fastq' not in key :
continue
job . fileStore . logToMaster ( 'Deleting "%s:%s" ' % ( patient_dict [ 'patient_id' ] , key ) + 'from the filestore.' )
job . fileStore . deleteGlobalFile ( patient_dict [ key ] )
return None
|
def add_star ( G , nodes , t , ** attr ) :
"""Add a star at time t .
The first node in nodes is the middle of the star . It is connected
to all other nodes .
Parameters
G : graph
A DyNetx graph
nodes : iterable container
A container of nodes .
t : snapshot id ( default = None )
snapshot id
See Also
add _ path , add _ cycle
Examples
> > > G = dn . DynGraph ( )
> > > dn . add _ star ( G , [ 0,1,2,3 ] , t = 0)"""
|
nlist = iter ( nodes )
v = next ( nlist )
edges = ( ( v , n ) for n in nlist )
G . add_interactions_from ( edges , t , ** attr )
|
def hamiltonian ( Ep , epsilonp , detuning_knob , rm , omega_level , omega_laser , xi , RWA = True , RF = True ) :
r"""Return symbolic Hamiltonian .
> > > from sympy import zeros , pi , pprint , symbols
> > > Ne = 3
> > > Nl = 2
> > > Ep , omega _ laser = define _ laser _ variables ( Nl )
> > > epsilonp = [ polarization _ vector ( 0 , - pi / 2 , 0 , 0 , 1 ) for l in range ( Nl ) ]
> > > detuning _ knob = symbols ( " delta1 delta2 " , real = True )
> > > xi = [ zeros ( Ne , Ne ) for l in range ( Nl ) ]
> > > coup = [ [ ( 1 , 0 ) ] , [ ( 2 , 0 ) ] ]
> > > for l in range ( Nl ) :
. . . for pair in coup [ l ] :
. . . xi [ l ] [ pair [ 0 ] , pair [ 1 ] ] = 1
. . . xi [ l ] [ pair [ 1 ] , pair [ 0 ] ] = 1
> > > rm = define _ r _ components ( Ne , xi , explicitly _ hermitian = True ,
. . . helicity = True , p = - 1)
> > > rm = helicity _ to _ cartesian ( rm )
> > > omega _ level , omega , gamma = define _ frequencies ( Ne , True )
> > > H = hamiltonian ( Ep , epsilonp , detuning _ knob , rm , omega _ level ,
. . . omega _ laser , xi , RWA = True , RF = False )
> > > print H [ 1 , 0]
- E _ { 01 } * e * r _ { 0;21 } * exp ( - I * t * varpi _ 1 ) / 2
> > > print H [ 2 , 0]
- E _ { 02 } * e * r _ { 0;31 } * exp ( - I * t * varpi _ 2 ) / 2
> > > print H [ 2 , 2]
hbar * omega _ 3"""
|
# We check what RF is .
if type ( RF ) == list :
theta = RF [ : ]
RF = True
elif type ( RF ) == Matrix :
theta = [ RF [ i , 0 ] for i in range ( RF . shape [ 0 ] ) ]
RF = True
elif RF : # theta should be calculate here !
s = "We are still missing automatic calculation of phase "
s += "transformations."
raise ValueError ( s )
if not RWA and RF :
s = "The rotating frame does not exist without the rotating wave \
approximation, as far as I know."
raise ValueError ( s )
# We check that the epsilonp is a list of vectors .
if not isinstance ( epsilonp , list ) :
raise ValueError ( "epsilonp must be a list of polarization vectors." )
if not isinstance ( epsilonp [ 0 ] , Matrix ) :
raise ValueError ( "epsilonp must be a list of polarization vectors." )
Ne = len ( omega_level )
Nl = len ( omega_laser )
H = zeros ( Ne , Ne )
hbar , e = symbols ( "hbar e" , positive = True )
t = symbols ( "t" , real = True )
for i in range ( Ne ) :
for j in range ( Ne ) :
rmij = vector_element ( rm , i , j )
rpij = vector_element ( rm , j , i ) . conjugate ( )
for l in range ( Nl ) :
epsilonpl = epsilonp [ l ]
epsilonml = epsilonpl . conjugate ( )
if RF :
Epl = xi [ l ] [ i , j ] * Ep [ l ]
Epl *= exp ( - I * ( theta [ j ] - theta [ i ] - t * omega_laser [ l ] ) )
Eml = xi [ l ] [ i , j ] * Ep [ l ] . conjugate ( )
Eml *= exp ( - I * ( theta [ j ] - theta [ i ] + t * omega_laser [ l ] ) )
else :
Epl = Ep [ l ] * xi [ l ] [ i , j ] * exp ( - I * omega_laser [ l ] * t )
Eml = Epl . conjugate ( )
# The E ^ ( + ) r ^ ( - ) term
H [ i , j ] += - e * Epl / 2 * cartesian_dot_product ( epsilonpl , rmij )
# The E ^ ( - ) r ^ ( + ) term
H [ i , j ] += - e * Eml / 2 * cartesian_dot_product ( epsilonml , rpij )
if not RWA : # The E ^ ( + ) r ^ ( + ) term
H [ i , j ] += - e * Epl / 2 * cartesian_dot_product ( epsilonpl , rpij )
# The E ^ ( - ) r ^ ( - ) term
H [ i , j ] += - e * Eml / 2 * cartesian_dot_product ( epsilonml , rmij )
if i == j :
if RF :
H [ i , j ] += hbar * ( omega_level [ i ] + diff ( theta [ i ] , t ) )
else :
H [ i , j ] += hbar * omega_level [ i ]
return H
|
def validate_current_versions ( self ) : # type : ( ) - > bool
"""Can a version be found ? Are all versions currently the same ? Are they valid sem ver ?
: return :"""
|
versions = self . all_current_versions ( )
for _ , version in versions . items ( ) :
if "Invalid Semantic Version" in version :
logger . error ( "Invalid versions, can't compare them, can't determine if in sync" )
return False
if not versions :
logger . warning ( "Found no versions, will use default 0.1.0" )
return True
if not self . all_versions_equal ( versions ) :
if self . almost_the_same_version ( [ x for x in versions . values ( ) ] ) : # TODO : disable with strict option
logger . warning ( "Version very by a patch level, will use greater." )
return True
logger . error ( "Found various versions, how can we rationally pick?" )
logger . error ( unicode ( versions ) )
return False
for _ in versions :
return True
return False
|
def get_object_by_natural_key ( self , app_label , model_name , object_id ) :
"""Return a model based on a natural key .
This is a utility function for : func : ` get _ edited _ object ` ."""
|
try :
model_type = ContentType . objects . get_by_natural_key ( app_label , model_name )
except ContentType . DoesNotExist :
return None
# Pointless to fetch the object , if there is no URL to generate
# Avoid another database query .
ModelClass = model_type . model_class ( )
if not hasattr ( ModelClass , 'get_absolute_url' ) :
return None
try :
return model_type . get_object_for_this_type ( pk = object_id )
except ObjectDoesNotExist :
return None
|
def get_from_ipfs_and_checkhash ( ipfs_client , ipfs_hash_base58 , validate = True ) :
"""Get file from ipfs
We must check the hash becasue we cannot believe that ipfs _ client wasn ' t been compromise"""
|
if validate :
from snet_cli . resources . proto . unixfs_pb2 import Data
from snet_cli . resources . proto . merckledag_pb2 import MerkleNode
# No nice Python library to parse ipfs blocks , so do it ourselves .
block_data = ipfs_client . block_get ( ipfs_hash_base58 )
mn = MerkleNode ( )
mn . ParseFromString ( block_data )
unixfs_data = Data ( )
unixfs_data . ParseFromString ( mn . Data )
assert unixfs_data . Type == unixfs_data . DataType . Value ( 'File' ) , "IPFS hash must be a file"
data = unixfs_data . Data
# multihash has a badly registered base58 codec , overwrite it . . .
multihash . CodecReg . register ( 'base58' , base58 . b58encode , base58 . b58decode )
# create a multihash object from our ipfs hash
mh = multihash . decode ( ipfs_hash_base58 . encode ( 'ascii' ) , 'base58' )
# Convenience method lets us directly use a multihash to verify data
if not mh . verify ( block_data ) :
raise Exception ( "IPFS hash mismatch with data" )
else :
data = ipfs_client . cat ( ipfs_hash_base58 )
return data
|
def _synchronize_node ( configfile , node ) :
"""Performs the Synchronize step of a Chef run :
Uploads all cookbooks , all roles and all databags to a node and add the
patch for data bags
Returns the node object of the node which is about to be configured ,
or None if this node object cannot be found ."""
|
msg = "Synchronizing nodes, environments, roles, cookbooks and data bags..."
if env . parallel :
msg = "[{0}]: {1}" . format ( env . host_string , msg )
print ( msg )
# First upload node . json
remote_file = '/etc/chef/node.json'
put ( configfile , remote_file , use_sudo = True , mode = 400 )
with hide ( 'stdout' ) :
sudo ( 'chown root:$(id -g -n root) {0}' . format ( remote_file ) )
# Remove local temporary node file
os . remove ( configfile )
# Synchronize kitchen
extra_opts = "-q"
if env . follow_symlinks :
extra_opts += " --copy-links"
ssh_opts = ""
if env . ssh_config_path :
ssh_opts += " -F %s" % os . path . expanduser ( env . ssh_config_path )
if env . encrypted_data_bag_secret :
put ( env . encrypted_data_bag_secret , "/etc/chef/encrypted_data_bag_secret" , use_sudo = True , mode = 0600 )
sudo ( 'chown root:$(id -g -n root) /etc/chef/encrypted_data_bag_secret' )
paths_to_sync = [ './data_bags' , './roles' , './environments' ]
for cookbook_path in cookbook_paths :
paths_to_sync . append ( './{0}' . format ( cookbook_path ) )
# Add berksfile directory to sync _ list
if env . berksfile :
paths_to_sync . append ( env . berksfile_cookbooks_directory )
if env . loglevel is "debug" :
extra_opts = ""
if env . gateway :
ssh_key_file = '.ssh/' + os . path . basename ( ' ' . join ( env . ssh_config . lookup ( env . host_string ) [ 'identityfile' ] ) )
ssh_opts += " " + env . gateway + " ssh -o StrictHostKeyChecking=no -i "
ssh_opts += ssh_key_file
rsync_project ( env . node_work_path , ' ' . join ( paths_to_sync ) , exclude = ( '*.svn' , '.bzr*' , '.git*' , '.hg*' ) , delete = True , extra_opts = extra_opts , ssh_opts = ssh_opts )
if env . sync_packages_dest_dir and env . sync_packages_local_dir :
print ( "Uploading packages from {0} to remote server {2} directory " "{1}" ) . format ( env . sync_packages_local_dir , env . sync_packages_dest_dir , env . host_string )
try :
rsync_project ( env . sync_packages_dest_dir , env . sync_packages_local_dir + "/*" , exclude = ( '*.svn' , '.bzr*' , '.git*' , '.hg*' ) , delete = True , extra_opts = extra_opts , ssh_opts = ssh_opts )
except :
print ( "Warning: package upload failed. Continuing cooking..." )
_add_environment_lib ( )
|
def cvxEDA ( eda , sampling_rate = 1000 , tau0 = 2. , tau1 = 0.7 , delta_knot = 10. , alpha = 8e-4 , gamma = 1e-2 , solver = None , verbose = False , options = { 'reltol' : 1e-9 } ) :
"""A convex optimization approach to electrodermal activity processing ( CVXEDA ) .
This function implements the cvxEDA algorithm described in " cvxEDA : a
Convex Optimization Approach to Electrodermal Activity Processing " ( Greco et al . , 2015 ) .
Parameters
eda : list or array
raw EDA signal array .
sampling _ rate : int
Sampling rate ( samples / second ) .
tau0 : float
Slow time constant of the Bateman function .
tau1 : float
Fast time constant of the Bateman function .
delta _ knot : float
Time between knots of the tonic spline function .
alpha : float
Penalization for the sparse SMNA driver .
gamma : float
Penalization for the tonic spline coefficients .
solver : bool
Sparse QP solver to be used , see cvxopt . solvers . qp
verbose : bool
Print progress ?
options : dict
Solver options , see http : / / cvxopt . org / userguide / coneprog . html # algorithm - parameters
Returns
phasic : numpy . array
The phasic component .
Notes
* Authors *
- Luca Citi ( https : / / github . com / lciti )
- Alberto Greco
* Dependencies *
- cvxopt
- numpy
* See Also *
- cvxEDA : https : / / github . com / lciti / cvxEDA
References
- Greco , A . , Valenza , G . , & Scilingo , E . P . ( 2016 ) . Evaluation of CDA and CvxEDA Models . In Advances in Electrodermal Activity Processing with Applications for Mental Health ( pp . 35-43 ) . Springer International Publishing .
- Greco , A . , Valenza , G . , Lanata , A . , Scilingo , E . P . , & Citi , L . ( 2016 ) . cvxEDA : A convex optimization approach to electrodermal activity processing . IEEE Transactions on Biomedical Engineering , 63(4 ) , 797-804."""
|
frequency = 1 / sampling_rate
# Normalizing signal
eda = z_score ( eda )
eda = np . array ( eda ) [ : , 0 ]
n = len ( eda )
eda = eda . astype ( 'double' )
eda = cv . matrix ( eda )
# bateman ARMA model
a1 = 1. / min ( tau1 , tau0 )
# a1 > a0
a0 = 1. / max ( tau1 , tau0 )
ar = np . array ( [ ( a1 * frequency + 2. ) * ( a0 * frequency + 2. ) , 2. * a1 * a0 * frequency ** 2 - 8. , ( a1 * frequency - 2. ) * ( a0 * frequency - 2. ) ] ) / ( ( a1 - a0 ) * frequency ** 2 )
ma = np . array ( [ 1. , 2. , 1. ] )
# matrices for ARMA model
i = np . arange ( 2 , n )
A = cv . spmatrix ( np . tile ( ar , ( n - 2 , 1 ) ) , np . c_ [ i , i , i ] , np . c_ [ i , i - 1 , i - 2 ] , ( n , n ) )
M = cv . spmatrix ( np . tile ( ma , ( n - 2 , 1 ) ) , np . c_ [ i , i , i ] , np . c_ [ i , i - 1 , i - 2 ] , ( n , n ) )
# spline
delta_knot_s = int ( round ( delta_knot / frequency ) )
spl = np . r_ [ np . arange ( 1. , delta_knot_s ) , np . arange ( delta_knot_s , 0. , - 1. ) ]
# order 1
spl = np . convolve ( spl , spl , 'full' )
spl /= max ( spl )
# matrix of spline regressors
i = np . c_ [ np . arange ( - ( len ( spl ) // 2 ) , ( len ( spl ) + 1 ) // 2 ) ] + np . r_ [ np . arange ( 0 , n , delta_knot_s ) ]
nB = i . shape [ 1 ]
j = np . tile ( np . arange ( nB ) , ( len ( spl ) , 1 ) )
p = np . tile ( spl , ( nB , 1 ) ) . T
valid = ( i >= 0 ) & ( i < n )
B = cv . spmatrix ( p [ valid ] , i [ valid ] , j [ valid ] )
# trend
C = cv . matrix ( np . c_ [ np . ones ( n ) , np . arange ( 1. , n + 1. ) / n ] )
nC = C . size [ 1 ]
# Solve the problem :
# .5 * ( M * q + B * l + C * d - eda ) ^ 2 + alpha * sum ( A , 1 ) * p + . 5 * gamma * l ' * l
# s . t . A * q > = 0
if verbose is False :
options [ "show_progress" ] = False
old_options = cv . solvers . options . copy ( )
cv . solvers . options . clear ( )
cv . solvers . options . update ( options )
if solver == 'conelp' : # Use conelp
z = lambda m , n : cv . spmatrix ( [ ] , [ ] , [ ] , ( m , n ) )
G = cv . sparse ( [ [ - A , z ( 2 , n ) , M , z ( nB + 2 , n ) ] , [ z ( n + 2 , nC ) , C , z ( nB + 2 , nC ) ] , [ z ( n , 1 ) , - 1 , 1 , z ( n + nB + 2 , 1 ) ] , [ z ( 2 * n + 2 , 1 ) , - 1 , 1 , z ( nB , 1 ) ] , [ z ( n + 2 , nB ) , B , z ( 2 , nB ) , cv . spmatrix ( 1.0 , range ( nB ) , range ( nB ) ) ] ] )
h = cv . matrix ( [ z ( n , 1 ) , .5 , .5 , eda , .5 , .5 , z ( nB , 1 ) ] )
c = cv . matrix ( [ ( cv . matrix ( alpha , ( 1 , n ) ) * A ) . T , z ( nC , 1 ) , 1 , gamma , z ( nB , 1 ) ] )
res = cv . solvers . conelp ( c , G , h , dims = { 'l' : n , 'q' : [ n + 2 , nB + 2 ] , 's' : [ ] } )
obj = res [ 'primal objective' ]
else : # Use qp
Mt , Ct , Bt = M . T , C . T , B . T
H = cv . sparse ( [ [ Mt * M , Ct * M , Bt * M ] , [ Mt * C , Ct * C , Bt * C ] , [ Mt * B , Ct * B , Bt * B + gamma * cv . spmatrix ( 1.0 , range ( nB ) , range ( nB ) ) ] ] )
f = cv . matrix ( [ ( cv . matrix ( alpha , ( 1 , n ) ) * A ) . T - Mt * eda , - ( Ct * eda ) , - ( Bt * eda ) ] )
res = cv . solvers . qp ( H , f , cv . spmatrix ( - A . V , A . I , A . J , ( n , len ( f ) ) ) , cv . matrix ( 0. , ( n , 1 ) ) , solver = solver )
obj = res [ 'primal objective' ] + .5 * ( eda . T * eda )
cv . solvers . options . clear ( )
cv . solvers . options . update ( old_options )
l = res [ 'x' ] [ - nB : ]
d = res [ 'x' ] [ n : n + nC ]
tonic = B * l + C * d
q = res [ 'x' ] [ : n ]
p = A * q
phasic = M * q
e = eda - phasic - tonic
phasic = np . array ( phasic ) [ : , 0 ]
# results = ( np . array ( a ) . ravel ( ) for a in ( r , t , p , l , d , e , obj ) )
return ( tonic , phasic )
|
def _add_gainloss_to_output ( out , data ) :
"""Add gainloss based on genes , helpful for identifying changes in smaller genes ."""
|
out_file = "%s-gainloss.txt" % os . path . splitext ( out [ "cns" ] ) [ 0 ]
if not utils . file_exists ( out_file ) :
with file_transaction ( data , out_file ) as tx_out_file :
cmd = [ os . path . join ( os . path . dirname ( sys . executable ) , "cnvkit.py" ) , "gainloss" , "-s" , out [ "cns" ] , "-o" , tx_out_file , out [ "cnr" ] ]
gender = _get_batch_gender ( [ data ] )
if gender :
cmd += [ "--sample-sex" , gender ]
do . run ( cmd , "CNVkit gainloss" )
out [ "gainloss" ] = out_file
return out
|
def get ( self ) :
"""Get a JSON - ready representation of this SubscriptionTracking .
: returns : This SubscriptionTracking , ready for use in a request body .
: rtype : dict"""
|
subscription_tracking = { }
if self . enable is not None :
subscription_tracking [ "enable" ] = self . enable
if self . text is not None :
subscription_tracking [ "text" ] = self . text . get ( )
if self . html is not None :
subscription_tracking [ "html" ] = self . html . get ( )
if self . substitution_tag is not None :
subscription_tracking [ "substitution_tag" ] = self . substitution_tag . get ( )
return subscription_tracking
|
def unpack_bitstring ( length , is_float , is_signed , bits ) : # type : ( int , bool , bool , typing . Any ) - > typing . Union [ float , int ]
"""returns a value calculated from bits
: param length : length of signal in bits
: param is _ float : value is float
: param bits : value as bits ( array / iterable )
: param is _ signed : value is signed
: return :"""
|
if is_float :
types = { 32 : '>f' , 64 : '>d' }
float_type = types [ length ]
value , = struct . unpack ( float_type , bytearray ( int ( '' . join ( b ) , 2 ) for b in grouper ( bits , 8 ) ) )
else :
value = int ( bits , 2 )
if is_signed and bits [ 0 ] == '1' :
value -= ( 1 << len ( bits ) )
return value
|
def _async_raise ( tid , exctype ) :
"""raises the exception , performs cleanup if needed
参考 : https : / / www . oschina . net / question / 172446_2159505"""
|
tid = ctypes . c_long ( tid )
if not inspect . isclass ( exctype ) :
exctype = type ( exctype )
res = ctypes . pythonapi . PyThreadState_SetAsyncExc ( tid , ctypes . py_object ( exctype ) )
if res == 0 :
raise ValueError ( "invalid thread id" )
elif res != 1 : # " " " if it returns a number greater than one , you ' re in trouble ,
# and you should call it again with exc = NULL to revert the effect " " "
ctypes . pythonapi . PyThreadState_SetAsyncExc ( tid , None )
raise SystemError ( "PyThreadState_SetAsyncExc failed" )
print ( 'force close: {} {}' . format ( tid , datetime . datetime . now ( ) . strftime ( '%Y-%m-%d %H:%M:%S' ) ) )
|
def from_disk ( self , path , ** kwargs ) :
"""Load the entity ruler from a file . Expects a file containing
newline - delimited JSON ( JSONL ) with one entry per line .
path ( unicode / Path ) : The JSONL file to load .
* * kwargs : Other config paramters , mostly for consistency .
RETURNS ( EntityRuler ) : The loaded entity ruler .
DOCS : https : / / spacy . io / api / entityruler # from _ disk"""
|
path = ensure_path ( path )
path = path . with_suffix ( ".jsonl" )
patterns = srsly . read_jsonl ( path )
self . add_patterns ( patterns )
return self
|
def plot_by_correct ( self , y , is_correct ) :
"""Plots the images which correspond to the selected class ( y ) and to the specific case ( prediction is correct - is _ true = True , prediction is wrong - is _ true = False )
Arguments :
y ( int ) : the selected class
is _ correct ( boolean ) : a boolean flag ( True , False ) which specify the what to look for . Ex : True - most correct samples , False - most incorrect samples"""
|
return self . plot_val_with_title ( self . most_by_correct ( y , is_correct ) , y )
|
def create_node ( self , network , participant ) :
"""Make a new node for participants ."""
|
if network . role == "practice" or network . role == "catch" :
return RogersAgentFounder ( network = network , participant = participant )
elif network . size ( type = Agent ) < network . generation_size :
return RogersAgentFounder ( network = network , participant = participant )
else :
return RogersAgent ( network = network , participant = participant )
|
def cublasDspr ( handle , uplo , n , alpha , x , incx , AP ) :
"""Rank - 1 operation on real symmetric - packed matrix ."""
|
status = _libcublas . cublasDspr_v2 ( handle , _CUBLAS_FILL_MODE [ uplo ] , n , ctypes . byref ( ctypes . c_double ( alpha ) ) , int ( x ) , incx , int ( AP ) )
cublasCheckStatus ( status )
|
def _build_paths ( ) :
"""Prepare paths for distlib . wheel . Wheel to install into ."""
|
paths = sysconfig . get_paths ( )
return { "prefix" : sys . prefix , "data" : paths [ "data" ] , "scripts" : paths [ "scripts" ] , "headers" : paths [ "include" ] , "purelib" : paths [ "purelib" ] , "platlib" : paths [ "platlib" ] , }
|
def get_one ( self , request , ** kwargs ) :
"""Load a resource ."""
|
resource = request . match_info . get ( self . name )
if not resource :
return None
try :
return self . collection . where ( self . meta . model_pk == resource ) . get ( )
except Exception :
raise RESTNotFound ( reason = 'Resource not found.' )
|
def PushState ( self , ** _ ) :
"""Push the current state on the state stack ."""
|
if self . verbose :
logging . debug ( "Storing state %r" , self . state )
self . state_stack . append ( self . state )
|
def shift_and_scale ( matrix , shift , scale ) :
"""Shift and scale matrix so its minimum value is placed at ` shift ` and
its maximum value is scaled to ` scale `"""
|
zeroed = matrix - matrix . min ( )
scaled = ( scale - shift ) * ( zeroed / zeroed . max ( ) )
return scaled + shift
|
def fixup_simple_stmt ( parent , i , stmt_node ) :
"""if there is a semi - colon all the parts count as part of the same
simple _ stmt . We just want the _ _ metaclass _ _ part so we move
everything efter the semi - colon into its own simple _ stmt node"""
|
for semi_ind , node in enumerate ( stmt_node . children ) :
if node . type == token . SEMI : # * sigh *
break
else :
return
node . remove ( )
# kill the semicolon
new_expr = Node ( syms . expr_stmt , [ ] )
new_stmt = Node ( syms . simple_stmt , [ new_expr ] )
while stmt_node . children [ semi_ind : ] :
move_node = stmt_node . children [ semi_ind ]
new_expr . append_child ( move_node . clone ( ) )
move_node . remove ( )
parent . insert_child ( i , new_stmt )
new_leaf1 = new_stmt . children [ 0 ] . children [ 0 ]
old_leaf1 = stmt_node . children [ 0 ] . children [ 0 ]
new_leaf1 . prefix = old_leaf1 . prefix
|
def call_sphinx ( out_type , build_dir = "build" ) :
"""Call the ` ` sphinx - build ` ` for the given output type and the ` ` make ` ` when
the target has this possibility .
Parameters
out _ type :
A builder name for ` ` sphinx - build ` ` . See the full list at
` < http : / / sphinx - doc . org / invocation . html > ` _ .
build _ dir :
Directory for storing the output . Defaults to " build " ."""
|
sphinx_string = sphinx_template . format ( build_dir = build_dir , out_type = out_type )
if sphinx . main ( shlex . split ( sphinx_string ) ) != 0 :
raise RuntimeError ( "Something went wrong while building '{0}'" . format ( out_type ) )
if out_type in make_target :
make_string = make_template . format ( build_dir = build_dir , out_type = out_type , make_param = make_target [ out_type ] )
call ( shlex . split ( make_string ) )
|
def _reload_version ( self ) :
"""Packages installed by distutils ( e . g . numpy or scipy ) ,
which uses an old safe _ version , and so
their version numbers can get mangled when
converted to filenames ( e . g . , 1.11.0 . dev0 + 2329eae to
1.11.0 . dev0_2329eae ) . These distributions will not be
parsed properly
downstream by Distribution and safe _ version , so
take an extra step and try to get the version number from
the metadata file itself instead of the filename ."""
|
md_version = self . _get_version ( )
if md_version :
self . _version = md_version
return self
|
def meryl ( args ) :
"""% prog meryl folder
Run meryl on Illumina reads ."""
|
p = OptionParser ( meryl . __doc__ )
p . add_option ( "-k" , default = 19 , type = "int" , help = "Kmer size" )
p . set_cpus ( )
opts , args = p . parse_args ( args )
if len ( args ) != 1 :
sys . exit ( not p . print_help ( ) )
folder , = args
K = opts . k
cpus = opts . cpus
mm = MakeManager ( )
for p , pf in iter_project ( folder ) :
cmds = [ ]
mss = [ ]
for i , ip in enumerate ( p ) :
ms = "{}{}.ms{}" . format ( pf , i + 1 , K )
mss . append ( ms )
cmd = "meryl -B -C -m {} -threads {}" . format ( K , cpus )
cmd += " -s {} -o {}" . format ( ip , ms )
cmds . append ( cmd )
ams , bms = mss
pms = "{}.ms{}" . format ( pf , K )
cmd = "meryl -M add -s {} -s {} -o {}" . format ( ams , bms , pms )
cmds . append ( cmd )
cmd = "rm -f {}.mcdat {}.mcidx {}.mcdat {}.mcidx" . format ( ams , ams , bms , bms )
cmds . append ( cmd )
mm . add ( p , pms + ".mcdat" , cmds )
mm . write ( )
|
def _shock_create ( self , h , shock_type , shock_index , shock_value , shock_dir , irf_intervals ) :
"""Function creates shocks based on desired specification
Parameters
h : int
How many steps ahead to forecast
shock _ type : None or str
Type of shock ; options include None , ' Cov ' ( simulate from covariance matrix ) , ' IRF ' ( impulse response shock )
shock _ index : int
Which latent variables to apply the shock to if using an IRF .
shock _ value : None or float
If specified , applies a custom - sized impulse response shock .
shock _ dir : str
Direction of the IRF shock . One of ' positive ' or ' negative ' .
irf _ intervals : Boolean
Whether to have intervals for the IRF plot or not
Returns
A h - length list which contains np . arrays containing shocks for each variable"""
|
if shock_type is None :
random = [ np . zeros ( self . ylen ) for i in range ( 0 , h ) ]
elif shock_type == 'IRF' :
if self . use_ols_covariance is False :
cov = self . custom_covariance ( self . latent_variables . get_z_values ( ) )
else :
cov = self . ols_covariance ( )
post = ss . multivariate_normal ( np . zeros ( self . ylen ) , cov )
if irf_intervals is False :
random = [ np . zeros ( self . ylen ) for i in range ( 0 , h ) ]
else :
random = [ post . rvs ( ) for i in range ( 0 , h ) ]
random [ 0 ] = np . zeros ( self . ylen )
if shock_value is None :
if shock_dir == 'positive' :
random [ 0 ] [ shock_index ] = cov [ shock_index , shock_index ] ** 0.5
elif shock_dir == 'negative' :
random [ 0 ] [ shock_index ] = - cov [ shock_index , shock_index ] ** 0.5
else :
raise ValueError ( "Unknown shock direction!" )
else :
random [ 0 ] [ shock_index ] = shock_value
elif shock_type == 'Cov' :
if self . use_ols_covariance is False :
cov = self . custom_covariance ( self . latent_variables . get_z_values ( ) )
else :
cov = self . ols_covariance ( )
post = ss . multivariate_normal ( np . zeros ( self . ylen ) , cov )
random = [ post . rvs ( ) for i in range ( 0 , h ) ]
return random
|
def ComputeLightTravelTime ( Det1Pos , Det2Pos ) :
"""ComputeLightTravelTime - function to compute light travel time between
two GW detectors at positions Det1Pos and Det2Pos .
Det1Pos - ( 3 , ) array . Position vector of detector 1.
Det2Pos - ( 3 , ) array . Position vector of detector 2.
Returns travelTime - Light travel time between two GW detectors at
positions Det1Pos and Det2Pos [ s ] .
Sarah Gossan 2012 . Adapted from TimeDelay . c , written by Jolien
Creighton , David Chin , Steven Fairhurst , Kipp Cannon , Alexander Dietz ,
Drew Keppel 2007."""
|
# Get relative position vector
Det21Pos = Det2Pos - Det1Pos
# Dot difference vector into itself to get magnitude of detector separation
dist = np . sqrt ( np . dot ( Det21Pos , Det21Pos ) )
# Normalise with speed of light
travelTime = dist / c
return travelTime
|
def boolean_add ( self , mesh , inplace = False ) :
"""Add a mesh to the current mesh . Does not attempt to " join "
the meshes .
Parameters
mesh : vtki . PolyData
The mesh to add .
inplace : bool , optional
Updates mesh in - place while returning nothing .
Returns
joinedmesh : vtki . PolyData
Initial mesh and the new mesh when inplace = False ."""
|
vtkappend = vtk . vtkAppendPolyData ( )
vtkappend . AddInputData ( self )
vtkappend . AddInputData ( mesh )
vtkappend . Update ( )
mesh = _get_output ( vtkappend )
if inplace :
self . overwrite ( mesh )
else :
return mesh
|
def quote ( string , safe = '/' , encoding = None , errors = None ) :
"""quote ( ' abc def ' ) - > ' abc % 20def '
Each part of a URL , e . g . the path info , the query , etc . , has a
different set of reserved characters that must be quoted .
RFC 2396 Uniform Resource Identifiers ( URI ) : Generic Syntax lists
the following reserved characters .
reserved = " ; " | " / " | " ? " | " : " | " @ " | " & " | " = " | " + " |
Each of these characters is reserved in some component of a URL ,
but not necessarily in all of them .
By default , the quote function is intended for quoting the path
section of a URL . Thus , it will not encode ' / ' . This character
is reserved , but in typical usage the quote function is being
called on a path where the existing slash characters are used as
reserved characters .
string and safe may be either str or bytes objects . encoding must
not be specified if string is a str .
The optional encoding and errors parameters specify how to deal with
non - ASCII characters , as accepted by the str . encode method .
By default , encoding = ' utf - 8 ' ( characters are encoded with UTF - 8 ) , and
errors = ' strict ' ( unsupported characters raise a UnicodeEncodeError ) ."""
|
if isinstance ( string , str ) :
if not string :
return string
if encoding is None :
encoding = 'utf-8'
if errors is None :
errors = 'strict'
string = string . encode ( encoding , errors )
else :
if encoding is not None :
raise TypeError ( "quote() doesn't support 'encoding' for bytes" )
if errors is not None :
raise TypeError ( "quote() doesn't support 'errors' for bytes" )
return quote_from_bytes ( string , safe )
|
def get_email_regex ( self ) :
"""Return a regex pattern matching valid email addresses . Uses the same
logic as the django validator , with the folowing exceptions :
- Internationalized domain names not supported
- IP addresses not supported
- Strips lookbehinds ( not supported in javascript regular expressions )"""
|
validator = self . default_validators [ 0 ]
user_regex = validator . user_regex . pattern . replace ( '\Z' , '@' )
domain_patterns = ( [ re . escape ( domain ) + '$' for domain in validator . domain_whitelist ] + [ validator . domain_regex . pattern . replace ( '\Z' , '$' ) ] )
domain_regex = '({0})' . format ( '|' . join ( domain_patterns ) )
email_regex = user_regex + domain_regex
return re . sub ( r'\(\?\<[^()]*?\)' , '' , email_regex )
|
def get_domain_connect_template_sync_url ( self , domain , provider_id , service_id , redirect_uri = None , params = None , state = None , group_ids = None ) :
"""Makes full Domain Connect discovery of a domain and returns full url to request sync consent .
: param domain : str
: param provider _ id : str
: param service _ id : str
: param redirect _ uri : str
: param params : dict
: param state : str
: param group _ ids : list ( str )
: return : ( str , str )
first field is an url which shall be used to redirect the browser to
second field is an indication of error
: raises : NoDomainConnectRecordException
when no _ domainconnect record found
: raises : NoDomainConnectSettingsException
when settings are not found
: raises : InvalidDomainConnectSettingsException
when settings contain missing fields"""
|
# TODO : support for signatures
# TODO : support for provider _ name ( for shared templates )
if params is None :
params = { }
config = self . get_domain_config ( domain )
self . check_template_supported ( config , provider_id , service_id )
if config . urlSyncUX is None :
raise InvalidDomainConnectSettingsException ( "No sync URL in config" )
sync_url_format = '{}/v2/domainTemplates/providers/{}/services/{}/' 'apply?domain={}&host={}&{}'
if redirect_uri is not None :
params [ "redirect_uri" ] = redirect_uri
if state is not None :
params [ "state" ] = state
if group_ids is not None :
params [ "groupId" ] = "," . join ( group_ids )
return sync_url_format . format ( config . urlSyncUX , provider_id , service_id , config . domain_root , config . host , urllib . parse . urlencode ( sorted ( params . items ( ) , key = lambda val : val [ 0 ] ) ) )
|
def get_result ( self ) :
"""Get the result of this transfer ."""
|
while self . _result is None :
if len ( self . daplink . _commands_to_read ) > 0 :
self . daplink . _read_packet ( )
else :
assert not self . daplink . _crnt_cmd . get_empty ( )
self . daplink . flush ( )
if self . _error is not None : # Pylint is confused and thinks self . _ error is None
# since that is what it is initialized to .
# Supress warnings for this .
# pylint : disable = raising - bad - type
raise self . _error
assert self . _result is not None
return self . _result
|
def fetchmany ( self , * args , ** kwargs ) :
"""Analogous to : any : ` sqlite3 . Cursor . fetchmany `"""
|
with self :
return self . _cursor . fetchmany ( * args , ** kwargs )
|
def create_script_acl ( self , id_vlan , network_type ) :
'''Generate the script acl
: param id _ vlan : Vlan Id
: param network _ type : v4 or v6
: raise InvalidValueError : Attrs invalids .
: raise XMLError : Networkapi failed to generate the XML response .
: raise VlanACLDuplicatedError : ACL name duplicate .
: raise VlanNotFoundError : Vlan not registered .
: return : Following dictionary :
{ ' vlan ' : {
' id ' : < id > ,
' nome ' : ' < nome > ' ,
' num _ vlan ' : < num _ vlan > ,
' descricao ' : < descricao >
' acl _ file _ name ' : < acl _ file _ name > ,
' ativada ' : < ativada > ,
' acl _ valida ' : < acl _ valida > ,
' acl _ file _ name _ v6 ' : < acl _ file _ name _ v6 > ,
' redeipv6 ' : < redeipv6 > ,
' acl _ valida _ v6 ' : < acl _ valida _ v6 > ,
' redeipv4 ' : < redeipv4 > ,
' ambiente ' : < ambiente > ,'''
|
vlan_map = dict ( )
vlan_map [ 'id_vlan' ] = id_vlan
vlan_map [ 'network_type' ] = network_type
url = 'vlan/create/script/acl/'
code , xml = self . submit ( { 'vlan' : vlan_map } , 'POST' , url )
return self . response ( code , xml )
|
def upload_large_items ( self ) :
"""Upload files that were too large ."""
|
for local_file , parent in self . large_items :
if local_file . need_to_send :
self . process_large_file ( local_file , parent )
|
def doc ( obj , of_class = None ) :
"""Get parsed documentation for an object as a dict .
This includes arguments spec , as well as the parsed data from the docstring .
` ` ` python
from exdoc import doc
The ` doc ( ) ` function simply fetches documentation for an object , which can be
* Module
* Class
* Function or method
* Property
The resulting dictionary includes argument specification , as well as parsed docstring :
` ` ` python
def f ( a , b = 1 , * args ) :
' ' ' Simple function
: param a : First
: type a : int
: param b : Second
: type b : int
: param args : More numbers
: returns : nothing interesting
: rtype : bool
: raises ValueError : hopeless condition
from exdoc import doc
doc ( f ) # - >
' module ' : ' _ _ main _ _ ' ,
' name ' : ' f ' ,
' qualname ' : ' f ' , # qualified name : e . g . < class > . < method >
' signature ' : ' f ( a , b = 1 , * args ) ' ,
' qsignature ' : ' f ( a , b = 1 , * args ) ' , # qualified signature
' doc ' : ' Simple function ' ,
' clsdoc ' : ' ' , # doc from the class ( used for constructors )
# Exceptions
' exc ' : [
{ ' doc ' : ' hopeless condition ' , ' name ' : ' ValueError ' }
# Return value
' ret ' : { ' doc ' : ' nothing interesting ' , ' type ' : ' bool ' } ,
# Arguments
' args ' : [
{ ' doc ' : ' First ' , ' name ' : ' a ' , ' type ' : ' int ' } ,
{ ' default ' : 1 , ' doc ' : ' Second ' , ' name ' : ' b ' , ' type ' : ' int ' } ,
{ ' doc ' : ' More numbers ' , ' name ' : ' * args ' , ' type ' : None }
Note : in Python 3 , when documenting a method of a class , pass the class to the ` doc ( ) ` function as the second argument :
` ` ` python
doc ( cls . method , cls )
This is necessary because in Python3 methods are not bound like they used to . Now , they are just functions .
: type obj : ModuleType | type | Callable | property
: param of _ class : A class whose method is being documented .
: type of _ class : class | None
: rtype : Docstring | FDocstring"""
|
# Special care about properties
if isinstance ( obj , property ) :
docstr = doc ( obj . fget )
# Some hacks for properties
docstr . signature = docstr . qsignature = obj . fget . __name__
docstr . args = docstr . args [ 1 : ]
return docstr
# Module
module = inspect . getmodule ( obj )
if module :
module = module . __name__
# Not callable : e . g . modules
if not callable ( obj ) :
if hasattr ( obj , '__name__' ) :
return data . Docstring ( qualname = obj . __name__ , doc = getdoc ( obj ) )
else :
return None
# Callables
qualname , fun , of_class = _get_callable ( obj , of_class )
docstr = _docspec ( fun , module = module , qualname = qualname , of_class = of_class )
# Class ? Get doc
if inspect . isclass ( obj ) : # Get class doc
clsdoc = getdoc ( obj )
# Parse docstring and merge into constructor doc
if clsdoc : # Parse docstring
clsdoc = _doc_parse ( clsdoc , module = module , qualname = qualname )
# Store clsdoc always
docstr . clsdoc = clsdoc . doc
# Merge exceptions list
docstr . exc . extend ( clsdoc . exc )
# If constructor does not have it ' s own docstr - - copy it from the clsdoc
if not docstr . doc :
docstr . doc = docstr . clsdoc
# Merge arguments : type , doc
for a_class in clsdoc . args :
for a_constructor in docstr . args :
if a_class . name . lstrip ( '*' ) == a_constructor . name . lstrip ( '*' ) :
a_constructor . type = a_class . type
a_constructor . doc = a_class . doc
# Finish
return docstr
|
def _call_pyfftw ( self , x , out , ** kwargs ) :
"""Implement ` ` self ( x [ , out , * * kwargs ] ) ` ` using pyfftw .
Parameters
x : ` domain ` element
Input element to be transformed .
out : ` range ` element
Output element storing the result .
flags : sequence of strings , optional
Flags for the transform . ` ` ' FFTW _ UNALIGNED ' ` ` is not
supported , and ` ` ' FFTW _ DESTROY _ INPUT ' ` ` is enabled by
default . See the ` pyfftw API documentation ` _
for futher information .
Default : ` ` ( ' FFTW _ MEASURE ' , ) ` `
threads : positive int , optional
Number of threads to use . Default : 1
planning _ timelimit : float , optional
Rough upper limit in seconds for the planning step of the
transform . The default is no limit . See the
` pyfftw API documentation ` _ for futher information .
Returns
out : ` range ` element
Result of the transform . If ` ` out ` ` was given , the returned
object is a reference to it .
References
. . _ pyfftw API documentation :
https : / / pyfftw . readthedocs . io"""
|
kwargs . pop ( 'normalise_idft' , None )
# Using ` True ` here
kwargs . pop ( 'axes' , None )
kwargs . pop ( 'halfcomplex' , None )
flags = list ( _pyfftw_to_local ( flag ) for flag in kwargs . pop ( 'flags' , ( 'FFTW_MEASURE' , ) ) )
try :
flags . remove ( 'unaligned' )
except ValueError :
pass
try :
flags . remove ( 'destroy_input' )
except ValueError :
pass
effort = flags [ 0 ] if flags else 'measure'
direction = 'forward' if self . sign == '-' else 'backward'
self . _fftw_plan = pyfftw_call ( x , out , direction = direction , axes = self . axes , halfcomplex = self . halfcomplex , planning_effort = effort , fftw_plan = self . _fftw_plan , normalise_idft = True )
# Need to normalize for ' forward ' , no way to force pyfftw
if self . sign == '-' :
out /= np . prod ( np . take ( self . domain . shape , self . axes ) )
return out
|
def add_schema_spec ( self , spec : Dict [ str , Any ] , fully_qualified_parent_name : str = None ) -> Optional [ str ] :
"""Add a schema dictionary to the schema loader . The given schema is stored
against fully _ qualified _ parent _ name + ITEM _ SEPARATOR ( ' . ' ) + schema . name .
: param spec : Schema specification .
: param fully _ qualified _ parent _ name : Full qualified name of the parent .
If None is passed then the schema is stored against the schema name .
: return : The fully qualified name against which the spec is stored .
None is returned if the given spec is not a dictionary or the spec does not
contain a ' name ' key ."""
|
if not isinstance ( spec , dict ) or ATTRIBUTE_NAME not in spec :
return None
name = spec [ ATTRIBUTE_NAME ]
fully_qualified_name = name if fully_qualified_parent_name is None else self . get_fully_qualified_name ( fully_qualified_parent_name , name )
# Ensure that basic validation for each spec part is done before it is added to spec cache
if isinstance ( spec , dict ) :
self . _error_cache . add ( validate_required_attributes ( fully_qualified_name , spec , ATTRIBUTE_NAME , ATTRIBUTE_TYPE ) )
if ATTRIBUTE_TYPE in spec and not Type . contains ( spec [ ATTRIBUTE_TYPE ] ) :
self . _error_cache . add ( InvalidTypeError ( fully_qualified_name , spec , ATTRIBUTE_TYPE , InvalidTypeError . Reason . TYPE_NOT_DEFINED ) )
self . _spec_cache [ fully_qualified_name ] = spec
for key , val in spec . items ( ) :
if isinstance ( val , list ) :
for item in val :
self . add_schema_spec ( item , fully_qualified_name )
self . add_schema_spec ( val , fully_qualified_name )
return spec [ ATTRIBUTE_NAME ]
|
def fetch ( self ) :
"""Fetch the data from hddtemp daemon ."""
|
# Taking care of sudden deaths / stops of hddtemp daemon
try :
sck = socket . socket ( socket . AF_INET , socket . SOCK_STREAM )
sck . connect ( ( self . host , self . port ) )
data = b''
while True :
received = sck . recv ( 4096 )
if not received :
break
data += received
except Exception as e :
logger . debug ( "Cannot connect to an HDDtemp server ({}:{} => {})" . format ( self . host , self . port , e ) )
logger . debug ( "Disable the HDDtemp module. Use the --disable-hddtemp to hide the previous message." )
if self . args is not None :
self . args . disable_hddtemp = True
data = ""
finally :
sck . close ( )
if data != "" :
logger . debug ( "Received data from the HDDtemp server: {}" . format ( data ) )
return data
|
def define_from_header ( cls , image_header ) :
"""Define class members directly from FITS header .
Parameters
image _ header : instance of hdulist . header
Header content from a FITS file ."""
|
self = CsuConfiguration ( )
# declare lists to store configuration of CSU bars
self . _csu_bar_left = [ ]
self . _csu_bar_right = [ ]
self . _csu_bar_slit_center = [ ]
self . _csu_bar_slit_width = [ ]
for i in range ( EMIR_NBARS ) :
ibar = i + 1
keyword = 'CSUP{}' . format ( ibar )
if keyword in image_header :
self . _csu_bar_left . append ( image_header [ keyword ] )
else :
raise ValueError ( "Expected keyword " + keyword + " not found!" )
keyword = 'CSUP{}' . format ( ibar + EMIR_NBARS )
if keyword in image_header : # set the same origin as the one employed for _ csu _ bar _ left
self . _csu_bar_right . append ( 341.5 - image_header [ keyword ] )
else :
raise ValueError ( "Expected keyword " + keyword + " not found!" )
self . _csu_bar_slit_center . append ( ( self . _csu_bar_left [ i ] + self . _csu_bar_right [ i ] ) / 2 )
self . _csu_bar_slit_width . append ( self . _csu_bar_right [ i ] - self . _csu_bar_left [ i ] )
return self
|
def get_analyzer_for ( language_code , default = 'snowball' ) :
"""Get the available language analyzer for the given language code or else the default .
: param language _ code : Django language code
: param default : The analyzer to return if no language analyzer has been found .
Defaults to ' snowball ' .
: return : The Haystack language name . E . g . ' german ' or the default analyzer"""
|
languages = { 'ar' : 'arabic' , # ' ' : ' armenian ' ,
'eu' : 'basque' , 'pt-br' : 'brazilian' , 'bg' : 'bulgarian' , 'ca' : 'catalan' , 'zh-hans' : 'chinese' , 'zh-hant' : 'chinese' , # ' cjk ' ,
'cs' : 'czech' , 'da' : 'danish' , 'nl' : 'dutch' , 'en' : 'english' , 'fi' : 'finnish' , 'fr' : 'french' , 'gl' : 'galician' , 'de' : 'german' , 'el' : 'greek' , 'hi' : 'hindi' , 'hu' : 'hungarian' , 'id' : 'indonesian' , 'ga' : 'irish' , 'it' : 'italian' , 'lv' : 'latvian' , 'no' : 'norwegian' , 'fa' : 'persian' , 'pt' : 'portuguese' , 'ro' : 'romanian' , 'ru' : 'russian' , # ' sorani ' ,
'es' : 'spanish' , 'sv' : 'swedish' , 'tr' : 'turkish' , 'th' : 'thai' }
if language_code in languages :
return languages [ language_code ]
elif language_code [ : 2 ] in languages :
return languages [ language_code [ : 2 ] ]
return default
|
def set_context ( pid_file , context_info ) :
"""Set context of running notebook .
: param context _ info : dict of extra context parameters , see comm . py comments"""
|
assert type ( context_info ) == dict
port_file = get_context_file_name ( pid_file )
with open ( port_file , "wt" ) as f :
f . write ( json . dumps ( context_info ) )
|
def delete_os_in_nwk ( self , tenant_id , fw_dict , is_fw_virt = False ) :
"""Deletes the Openstack In network and update the DB ."""
|
ret = True
tenant_name = fw_dict . get ( 'tenant_name' )
try :
ret = self . _delete_os_nwk ( tenant_id , tenant_name , "in" , is_fw_virt = is_fw_virt )
except Exception as exc :
LOG . error ( "Deletion of In Openstack Network failed tenant " "%(tenant)s Exception %(exc)s" , { 'tenant' : tenant_id , 'exc' : str ( exc ) } )
ret = False
# Updating the FW DB
if ret :
res = fw_const . OS_IN_NETWORK_DEL_SUCCESS
else :
res = fw_const . OS_IN_NETWORK_DEL_FAIL
self . update_fw_db_result ( tenant_id , os_status = res )
return ret
|
def predict ( self , u = 0 ) :
"""Predict next position .
Parameters
u : ndarray
Optional control vector . If non - zero , it is multiplied by ` B `
to create the control input into the system ."""
|
# x = Fx + Bu
self . x = dot ( self . F , self . x ) + dot ( self . B , u )
|
def url_is_from_any_domain ( url , domains ) :
"""Return True if the url belongs to any of the given domains"""
|
host = parse_url ( url ) . netloc . lower ( )
if host :
return any ( ( ( host == d . lower ( ) ) or ( host . endswith ( '.%s' % d . lower ( ) ) ) for d in domains ) )
else :
return False
|
def purity ( rho : Density ) -> bk . BKTensor :
"""Calculate the purity of a mixed quantum state .
Purity , defined as tr ( rho ^ 2 ) , has an upper bound of 1 for a pure state ,
and a lower bound of 1 / D ( where D is the Hilbert space dimension ) for a
competently mixed state .
Two closely related measures are the linear entropy , 1 - purity , and the
participation ratio , 1 / purity ."""
|
tensor = rho . tensor
N = rho . qubit_nb
matrix = bk . reshape ( tensor , [ 2 ** N , 2 ** N ] )
return bk . trace ( bk . matmul ( matrix , matrix ) )
|
def from_dynacRepr ( cls , pynacRepr ) :
"""Construct a ` ` AccGap ` ` instance from the Pynac lattice element"""
|
pynacList = pynacRepr [ 1 ] [ 0 ]
L = float ( pynacList [ 3 ] )
TTF = float ( pynacList [ 4 ] )
TTFprime = float ( pynacList [ 5 ] )
TTFprimeprime = float ( pynacList [ 13 ] )
EField = float ( pynacList [ 10 ] )
phase = float ( pynacList [ 11 ] )
F = float ( pynacList [ 14 ] )
atten = float ( pynacList [ 15 ] )
gap = cls ( L , TTF , TTFprime , TTFprimeprime , EField , phase , F , atten )
gap . gapID = Param ( val = int ( pynacList [ 0 ] ) , unit = None )
gap . energy = Param ( val = float ( pynacList [ 1 ] ) , unit = 'MeV' )
gap . beta = Param ( val = float ( pynacList [ 2 ] ) , unit = None )
gap . S = Param ( val = float ( pynacList [ 6 ] ) , unit = None )
gap . SP = Param ( val = float ( pynacList [ 7 ] ) , unit = None )
gap . quadLength = Param ( val = float ( pynacList [ 8 ] ) , unit = 'cm' )
gap . quadStrength = Param ( val = float ( pynacList [ 9 ] ) , unit = 'kG/cm' )
gap . accumLen = Param ( val = float ( pynacList [ 12 ] ) , unit = 'cm' )
return gap
|
def OnSelectAll ( self , event ) :
"""Select all cells event handler"""
|
entry_line = self . main_window . entry_line_panel . entry_line_panel . entry_line
if wx . Window . FindFocus ( ) != entry_line :
self . main_window . grid . SelectAll ( )
else :
entry_line . SelectAll ( )
|
def commands ( self , event ) :
"""Lists all available commands ."""
|
commands = sorted ( self . commands_dict ( ) . keys ( ) )
return "Available commands: %s" % " " . join ( commands )
|
def make_relative_timing_fn ( ) :
"""Make a function that logs the duration since it was made ."""
|
start_time = time . time ( )
def format_relative_time ( ) :
time_delta = time . time ( ) - start_time
return str ( datetime . timedelta ( seconds = time_delta ) )
def log_relative_time ( ) :
tf . logging . info ( "Timing: %s" , format_relative_time ( ) )
return log_relative_time
|
def __configure_annotations ( mapper , cls ) :
"""Run through attributes of the class looking for annotations from
: func : ` annotation _ wrapper ` and add them to : attr : ` cls . _ _ annotations _ _ `
and : attr : ` cls . _ _ annotations _ by _ attr _ _ `"""
|
annotations = { }
annotations_by_attr = { }
# An attribute may be defined more than once in base classes . Only handle the first
processed = set ( )
# Loop through all attributes in the class and its base classes , looking for annotations
for base in cls . __mro__ :
for name , attr in base . __dict__ . items ( ) :
if name in processed or name . startswith ( '__' ) :
continue
# ' data ' is a list of string annotations
if isinstance ( attr , collections . Hashable ) and attr in __cache__ :
data = __cache__ [ attr ]
del __cache__ [ attr ]
elif isinstance ( attr , InstrumentedAttribute ) and attr . property in __cache__ :
data = __cache__ [ attr . property ]
del __cache__ [ attr . property ]
elif hasattr ( attr , '_coaster_annotations' ) :
data = attr . _coaster_annotations
else :
data = None
if data is not None :
annotations_by_attr . setdefault ( name , [ ] ) . extend ( data )
for a in data :
annotations . setdefault ( a , [ ] ) . append ( name )
processed . add ( name )
# Classes specifying ` ` _ _ annotations _ _ ` ` directly isn ' t supported ,
# so we don ' t bother preserving existing content , if any .
if annotations :
cls . __annotations__ = annotations
if annotations_by_attr :
cls . __annotations_by_attr__ = annotations_by_attr
annotations_configured . send ( cls )
|
def cc ( self , cc ) :
''': param cc : Email addresses for the ' Cc ' API field .
: type cc : : keyword : ` list ` or ` str `'''
|
if isinstance ( cc , basestring ) :
cc = cc . split ( ',' )
self . _cc = cc
|
def WritePathStatHistory ( self , client_path , stat_entries ) :
"""Writes a collection of ` StatEntry ` observed for particular path .
Args :
client _ path : A ` ClientPath ` instance .
stat _ entries : A dictionary with timestamps as keys and ` StatEntry `
instances as values ."""
|
client_path_history = ClientPathHistory ( )
for timestamp , stat_entry in iteritems ( stat_entries ) :
client_path_history . AddStatEntry ( timestamp , stat_entry )
self . MultiWritePathHistory ( { client_path : client_path_history } )
|
def stop ( self , io_loop ) :
"""Asynchronously stop the application .
: param tornado . ioloop . IOLoop io _ loop : loop to run until all
callbacks , timeouts , and queued calls are complete
Call this method to start the application shutdown process .
The IOLoop will be stopped once the application is completely
shut down ."""
|
running_async = False
shutdown = _ShutdownHandler ( io_loop )
for callback in self . on_shutdown_callbacks :
try :
maybe_future = callback ( self . tornado_application )
if asyncio . iscoroutine ( maybe_future ) :
maybe_future = asyncio . create_task ( maybe_future )
if concurrent . is_future ( maybe_future ) :
shutdown . add_future ( maybe_future )
running_async = True
except Exception as error :
self . logger . warning ( 'exception raised from shutdown ' 'callback %r, ignored: %s' , callback , error , exc_info = 1 )
if not running_async :
shutdown . on_shutdown_ready ( )
|
async def vcx_messages_update_status ( msg_json : str ) :
"""Update the status of messages from the specified connection
: param msg _ json :
: return :"""
|
logger = logging . getLogger ( __name__ )
if not hasattr ( vcx_messages_update_status , "cb" ) :
logger . debug ( "vcx_messages_update_status: Creating callback" )
vcx_messages_update_status . cb = create_cb ( CFUNCTYPE ( None , c_uint32 , c_uint32 ) )
c_msg_json = c_char_p ( msg_json . encode ( 'utf-8' ) )
c_status = c_char_p ( "MS-106" . encode ( 'utf-8' ) )
result = await do_call ( 'vcx_messages_update_status' , c_status , c_msg_json , vcx_messages_update_status . cb )
logger . debug ( "vcx_messages_update_status completed" )
return result
|
def add_subtrack ( self , subtrack ) :
"""Add a child : class : ` Track ` ."""
|
self . add_child ( subtrack )
self . subtracks . append ( subtrack )
|
def filter_catalog ( catalog , ** kwargs ) :
"""Create a new catalog selected from input based on photometry .
Parameters
bright _ limit : float
Fraction of catalog based on brightness that should be retained .
Value of 1.00 means full catalog .
max _ bright : int
Maximum number of sources to keep regardless of ` bright _ limit ` .
min _ bright : int
Minimum number of sources to keep regardless of ` bright _ limit ` .
colname : str
Name of column to use for selection / sorting .
Returns
new _ catalog : ` ~ astropy . table . Table `
New table which only has the sources that meet the selection criteria ."""
|
# interpret input pars
bright_limit = kwargs . get ( 'bright_limit' , 1.00 )
max_bright = kwargs . get ( 'max_bright' , None )
min_bright = kwargs . get ( 'min_bright' , 20 )
colname = kwargs . get ( 'colname' , 'vegamag' )
# sort by magnitude
phot_column = catalog [ colname ]
num_sources = len ( phot_column )
sort_indx = np . argsort ( phot_column )
if max_bright is None :
max_bright = num_sources
# apply limits , insuring no more than full catalog gets selected
limit_num = max ( int ( num_sources * bright_limit ) , min_bright )
limit_num = min ( max_bright , limit_num , num_sources )
# Extract sources identified by selection
new_catalog = catalog [ sort_indx [ : limit_num ] ]
return new_catalog
|
def insert_after ( sequence , offset , new_residues ) :
"""Mutate the given sequence by inserting the string ` new _ residues ` after
` offset ` .
Parameters
sequence : sequence
String of amino acids or DNA bases
offset : int
Base 0 offset from start of sequence , after which we should insert
` new _ residues ` .
new _ residues : sequence"""
|
assert 0 <= offset < len ( sequence ) , "Invalid position %d for sequence of length %d" % ( offset , len ( sequence ) )
prefix = sequence [ : offset + 1 ]
suffix = sequence [ offset + 1 : ]
return prefix + new_residues + suffix
|
def tree_entries_from_data ( data ) :
"""Reads the binary representation of a tree and returns tuples of Tree items
: param data : data block with tree data ( as bytes )
: return : list ( tuple ( binsha , mode , tree _ relative _ path ) , . . . )"""
|
ord_zero = ord ( '0' )
space_ord = ord ( ' ' )
len_data = len ( data )
i = 0
out = [ ]
while i < len_data :
mode = 0
# read mode
# Some git versions truncate the leading 0 , some don ' t
# The type will be extracted from the mode later
while byte_ord ( data [ i ] ) != space_ord : # move existing mode integer up one level being 3 bits
# and add the actual ordinal value of the character
mode = ( mode << 3 ) + ( byte_ord ( data [ i ] ) - ord_zero )
i += 1
# END while reading mode
# byte is space now , skip it
i += 1
# parse name , it is NULL separated
ns = i
while byte_ord ( data [ i ] ) != 0 :
i += 1
# END while not reached NULL
# default encoding for strings in git is utf8
# Only use the respective unicode object if the byte stream was encoded
name = data [ ns : i ]
name = safe_decode ( name )
# byte is NULL , get next 20
i += 1
sha = data [ i : i + 20 ]
i = i + 20
out . append ( ( sha , mode , name ) )
# END for each byte in data stream
return out
|
def _cached_pages ( self , target_page = - 1 ) :
"""Get a page or all pages from page generator , caching results .
This is necessary because PDFMiner searches recursively for pages ,
so we won ' t know how many there are until we parse the whole document ,
which we don ' t want to do until we need to ."""
|
try : # pdfminer < 20131022
self . _pages_iter = self . _pages_iter or self . doc . get_pages ( )
except AttributeError : # pdfminer > = 20131022
self . _pages_iter = self . _pages_iter or PDFPage . create_pages ( self . doc )
if target_page >= 0 :
while len ( self . _pages ) <= target_page :
next_page = next ( self . _pages_iter )
if not next_page :
return None
next_page . page_number = 0
self . _pages += [ next_page ]
try :
return self . _pages [ target_page ]
except IndexError :
return None
self . _pages += list ( self . _pages_iter )
return self . _pages
|
def delegate_method ( other , method , name = None ) :
"""Add a method to the current class that delegates to another method .
The * other * argument must be a property that returns the instance to
delegate to . Due to an implementation detail , the property must be defined
in the current class . The * method * argument specifies a method to delegate
to . It can be any callable as long as it takes the instances as its first
argument .
It is a common paradigm in Gruvi to expose protocol methods onto clients .
This keeps most of the logic into the protocol , but prevents the user from
having to type ` ` ' client . protocol . * methodname * ' ` ` all the time .
For example : :
class MyClient ( Client ) :
protocol = Client . protocol
delegate _ method ( protocol , MyProtocol . method )"""
|
frame = sys . _getframe ( 1 )
classdict = frame . f_locals
@ functools . wraps ( method )
def delegate ( self , * args , ** kwargs ) :
other_self = other . __get__ ( self )
return method ( other_self , * args , ** kwargs )
if getattr ( method , '__switchpoint__' , False ) :
delegate . __switchpoint__ = True
if name is None :
name = method . __name__
propname = None
for key in classdict :
if classdict [ key ] is other :
propname = key
break
# If we know the property name , replace the docstring with a small
# reference instead of copying the function docstring .
if propname :
qname = getattr ( method , '__qualname__' , method . __name__ )
if '.' in qname :
delegate . __doc__ = 'A shorthand for ``self.{propname}.{name}()``.' . format ( name = name , propname = propname )
else :
delegate . __doc__ = 'A shorthand for ``{name}({propname}, ...)``.' . format ( name = name , propname = propname )
classdict [ name ] = delegate
|
def adapter_remove_nio_binding ( self , adapter_number ) :
"""Removes an adapter NIO binding .
: param adapter _ number : adapter number
: returns : NIO instance"""
|
try :
adapter = self . _ethernet_adapters [ adapter_number ]
except KeyError :
raise VirtualBoxError ( "Adapter {adapter_number} doesn't exist on VirtualBox VM '{name}'" . format ( name = self . name , adapter_number = adapter_number ) )
if self . is_running ( ) :
yield from self . _ubridge_send ( "bridge delete {name}" . format ( name = "VBOX-{}-{}" . format ( self . _id , adapter_number ) ) )
vm_state = yield from self . _get_vm_state ( )
if vm_state == "running" :
yield from self . _control_vm ( "setlinkstate{} off" . format ( adapter_number + 1 ) )
nio = adapter . get_nio ( 0 )
if isinstance ( nio , NIOUDP ) :
self . manager . port_manager . release_udp_port ( nio . lport , self . _project )
adapter . remove_nio ( 0 )
log . info ( "VirtualBox VM '{name}' [{id}]: {nio} removed from adapter {adapter_number}" . format ( name = self . name , id = self . id , nio = nio , adapter_number = adapter_number ) )
return nio
|
def is_reassignment_pending ( self ) :
"""Return True if there are reassignment tasks pending ."""
|
in_progress_plan = self . zk . get_pending_plan ( )
if in_progress_plan :
in_progress_partitions = in_progress_plan [ 'partitions' ]
self . log . info ( 'Previous re-assignment in progress for {count} partitions.' ' Current partitions in re-assignment queue: {partitions}' . format ( count = len ( in_progress_partitions ) , partitions = in_progress_partitions , ) )
return True
else :
return False
|
def p2sh_input_and_witness ( outpoint , stack_script , redeem_script , sequence = None ) :
'''OutPoint , str , str , int - > ( TxIn , InputWitness )
Create a signed legacy TxIn from a p2pkh prevout
Create an empty InputWitness for it
Useful for transactions spending some witness and some legacy prevouts'''
|
if sequence is None :
sequence = guess_sequence ( redeem_script )
stack_script = script_ser . serialize ( stack_script )
redeem_script = script_ser . hex_serialize ( redeem_script )
redeem_script = script_ser . serialize ( redeem_script )
return tb . make_legacy_input_and_empty_witness ( outpoint = outpoint , stack_script = stack_script , redeem_script = redeem_script , sequence = sequence )
|
def update_qualification_score ( self , qualification_type_id , worker_id , value ) :
"""TODO : Document ."""
|
params = { 'QualificationTypeId' : qualification_type_id , 'SubjectId' : worker_id , 'IntegerValue' : value }
return self . _process_request ( 'UpdateQualificationScore' , params )
|
def fixminimized ( self , alphabet ) :
"""After pyfst minimization ,
all unused arcs are removed ,
and all sink states are removed .
However this may break compatibility .
Args :
alphabet ( list ) : The input alphabet
Returns :
None"""
|
insymbols = fst . SymbolTable ( )
outsymbols = fst . SymbolTable ( )
num = 1
for char in self . alphabet :
self . isyms . __setitem__ ( char , num )
self . osyms . __setitem__ ( char , num )
insymbols . add_symbol ( char , num )
outsymbols . add_symbol ( char , num )
num = num + 1
self . automaton . set_input_symbols ( insymbols )
self . automaton . set_output_symbols ( outsymbols )
endstate = self . add_state ( )
for state in self . states :
for char in alphabet :
found = 0
for arc in state . arcs :
if self . isyms . find ( arc . ilabel ) == char :
found = 1
break
if found == 0 :
self . add_arc ( state . stateid , endstate , char )
self [ endstate ] . final = False
for char in alphabet :
self . add_arc ( endstate , endstate , char )
|
def _gill_king ( mat , beta , delta ) :
"""Backend function for the Gill - King algorithm ."""
|
size = mat . shape [ 0 ]
# initialize d _ vec and lowtri
if scipy . sparse . issparse ( mat ) :
lowtri = scipy . sparse . eye ( * mat . shape )
else :
lowtri = numpy . eye ( size )
d_vec = numpy . zeros ( size , dtype = float )
# there are no inner for loops , everything implemented with
# vector operations for a reasonable level of efficiency
for idx in range ( size ) :
if idx == 0 :
idz = [ ]
# column index : all columns to left of diagonal
# d _ vec ( idz ) doesn ' t work in case idz is empty
else :
idz = numpy . s_ [ : idx ]
djtemp = mat [ idx , idx ] - numpy . dot ( lowtri [ idx , idz ] , d_vec [ idz ] * lowtri [ idx , idz ] . T )
# C ( idx , idx ) in book
if idx < size - 1 :
idy = numpy . s_ [ idx + 1 : size ]
# row index : all rows below diagonal
ccol = mat [ idy , idx ] - numpy . dot ( lowtri [ idy , idz ] , d_vec [ idz ] * lowtri [ idx , idz ] . T )
# C ( idy , idx ) in book
theta = abs ( ccol ) . max ( )
# guarantees d _ vec ( idx ) not too small and lowtri ( idy , idx ) not too
# big in sufficiently positive definite case , d _ vec ( idx ) = djtemp
d_vec [ idx ] = max ( abs ( djtemp ) , ( theta / beta ) ** 2 , delta )
lowtri [ idy , idx ] = ccol / d_vec [ idx ]
else :
d_vec [ idx ] = max ( abs ( djtemp ) , delta )
# convert to usual output format : replace lowtri by lowtri * sqrt ( D ) and
# transpose
for idx in range ( size ) :
lowtri [ : , idx ] = lowtri [ : , idx ] * numpy . sqrt ( d_vec [ idx ] )
# lowtri = lowtri * diag ( sqrt ( d _ vec ) ) bad in sparse case
return lowtri
|
def _get_queue_lock ( self , queue , log ) :
"""Get queue lock for max worker queues .
For max worker queues it returns a Lock if acquired and whether
it failed to acquire the lock ."""
|
max_workers = self . max_workers_per_queue
# Check if this is single worker queue
for part in dotted_parts ( queue ) :
if part in self . single_worker_queues :
log . debug ( 'single worker queue' )
max_workers = 1
break
# Max worker queues require us to get a queue lock before
# moving tasks
if max_workers :
queue_lock = Semaphore ( self . connection , self . _key ( LOCK_REDIS_KEY , queue ) , self . id , max_locks = max_workers , timeout = self . config [ 'ACTIVE_TASK_UPDATE_TIMEOUT' ] )
acquired , locks = queue_lock . acquire ( )
if not acquired :
return None , True
log . debug ( 'acquired queue lock' , locks = locks )
else :
queue_lock = None
return queue_lock , False
|
def generate_templates ( self , exercise_questions = False ) :
"""Create empty . csv files with the right headers and place them in the
Will place files as siblings of directory ` channeldir ` ."""
|
self . generate_template ( channeldir = self . channeldir , filename = self . channelinfo , header = CHANNEL_INFO_HEADER )
self . generate_template ( channeldir = self . channeldir , filename = self . contentinfo , header = CONTENT_INFO_HEADER )
if exercise_questions :
self . generate_template ( channeldir = self . channeldir , filename = self . exercisesinfo , header = EXERCISE_INFO_HEADER )
self . generate_template ( channeldir = self . channeldir , filename = self . questionsinfo , header = EXERCISE_QUESTIONS_INFO_HEADER )
|
def _onShortcutPasteLine ( self ) :
"""Paste lines from the clipboard"""
|
lines = self . lines [ self . _selectedLinesSlice ( ) ]
text = QApplication . clipboard ( ) . text ( )
if text :
with self :
if self . textCursor ( ) . hasSelection ( ) :
startBlockNumber , endBlockNumber = self . _selectedBlockNumbers ( )
del self . lines [ self . _selectedLinesSlice ( ) ]
self . lines . insert ( startBlockNumber , text )
else :
line , col = self . cursorPosition
if col > 0 :
line = line + 1
self . lines . insert ( line , text )
|
def _mantissa ( dval ) :
"""Extract the _ mantissa bits from a double - precision floating
point value ."""
|
bb = _double_as_bytes ( dval )
mantissa = bb [ 1 ] & 0x0f << 48
mantissa += bb [ 2 ] << 40
mantissa += bb [ 3 ] << 32
mantissa += bb [ 4 ]
return mantissa
|
def get_form_class ( self ) :
"""Returns the form class to use in this view . Makes
sure that the form _ field _ callback is set to use
the ` formfield _ for _ dbfield ` method and that any
custom form classes are prepared by the
` customize _ form _ widgets ` method ."""
|
if self . fieldsets :
fields = flatten_fieldsets ( self . get_fieldsets ( ) )
else :
if ( self . form_class and getattr ( self . form_class , 'Meta' , None ) and getattr ( self . form_class . Meta , 'fields' , None ) ) :
fields = self . form_class . Meta . fields
else :
fields = [ ]
exclude = None
if self . parent_field :
exclude = ( self . parent_field , )
readonly_fields = self . get_readonly_fields ( )
if readonly_fields :
if exclude :
exclude = list ( exclude )
else :
exclude = [ ]
for field in readonly_fields :
try :
try :
f = self . model . _meta . get_field ( field )
if fields :
fields . remove ( field )
else :
exclude . append ( field )
except models . FieldDoesNotExist :
if fields :
fields . remove ( field )
except ValueError :
pass
params = { 'fields' : fields or '__all__' , 'exclude' : exclude , 'formfield_callback' : self . formfield_for_dbfield }
if self . form_class :
if issubclass ( self . form_class , forms . ModelForm ) and getattr ( self . form_class . _meta , 'model' , None ) :
model = self . form_class . Meta . model
else :
model = self . model
fc = self . customize_form_widgets ( self . form_class , fields = fields )
params [ 'form' ] = fc
else :
if self . model is not None : # If a model has been explicitly provided , use it
model = self . model
elif hasattr ( self , 'object' ) and self . object is not None : # If this view is operating on a single object , use
# the class of that object
model = self . object . __class__
else : # Try to get a queryset and extract the model class
# from that
model = self . get_queryset ( ) . model
return model_forms . modelform_factory ( model , ** params )
|
def show_progress ( self , message = None ) :
"""If we are in a progress scope , and no log messages have been
shown , write out another ' . '"""
|
if self . in_progress_hanging :
if message is None :
sys . stdout . write ( '.' )
sys . stdout . flush ( )
else :
if self . last_message :
padding = ' ' * max ( 0 , len ( self . last_message ) - len ( message ) )
else :
padding = ''
sys . stdout . write ( '\r%s%s%s%s' % ( ' ' * self . indent , self . in_progress , message , padding ) )
sys . stdout . flush ( )
self . last_message = message
|
def _canViewChange ( self , proposedViewNo : int ) -> ( bool , str ) :
"""Return whether there ' s quorum for view change for the proposed view
number and its view is less than or equal to the proposed view"""
|
msg = None
quorum = self . quorums . view_change . value
if not self . instance_changes . has_quorum ( proposedViewNo , quorum ) :
msg = '{} has no quorum for view {}' . format ( self , proposedViewNo )
elif not proposedViewNo > self . view_no :
msg = '{} is in higher view more than {}' . format ( self , proposedViewNo )
return not bool ( msg ) , msg
|
def collapse ( cls , holomap , ranges = None , mode = 'data' ) :
"""Given a map of Overlays , apply all applicable compositors ."""
|
# No potential compositors
if cls . definitions == [ ] :
return holomap
# Apply compositors
clone = holomap . clone ( shared_data = False )
data = zip ( ranges [ 1 ] , holomap . data . values ( ) ) if ranges else holomap . data . items ( )
for key , overlay in data :
clone [ key ] = cls . collapse_element ( overlay , ranges , mode )
return clone
|
def literal_array ( cls , elems ) :
"""Construct a literal array constant made of the given members ."""
|
tys = [ el . type for el in elems ]
if len ( tys ) == 0 :
raise ValueError ( "need at least one element" )
ty = tys [ 0 ]
for other in tys :
if ty != other :
raise TypeError ( "all elements must have the same type" )
return cls ( types . ArrayType ( ty , len ( elems ) ) , elems )
|
def from_legacy_urlsafe ( cls , urlsafe ) :
"""Convert urlsafe string to : class : ` ~ google . cloud . datastore . key . Key ` .
This is intended to work with the " legacy " representation of a
datastore " Key " used within Google App Engine ( a so - called
" Reference " ) . This assumes that ` ` urlsafe ` ` was created within an App
Engine app via something like ` ` ndb . Key ( . . . ) . urlsafe ( ) ` ` .
: type urlsafe : bytes or unicode
: param urlsafe : The base64 encoded ( ASCII ) string corresponding to a
datastore " Key " / " Reference " .
: rtype : : class : ` ~ google . cloud . datastore . key . Key ` .
: returns : The key corresponding to ` ` urlsafe ` ` ."""
|
urlsafe = _to_bytes ( urlsafe , encoding = "ascii" )
padding = b"=" * ( - len ( urlsafe ) % 4 )
urlsafe += padding
raw_bytes = base64 . urlsafe_b64decode ( urlsafe )
reference = _app_engine_key_pb2 . Reference ( )
reference . ParseFromString ( raw_bytes )
project = _clean_app ( reference . app )
namespace = _get_empty ( reference . name_space , u"" )
_check_database_id ( reference . database_id )
flat_path = _get_flat_path ( reference . path )
return cls ( * flat_path , project = project , namespace = namespace )
|
def epsilon_rules_restore ( root ) : # type : ( Nonterminal ) - > Nonterminal
"""Transform parsed tree to contain epsilon rules originally removed from the grammar .
: param root : Root of the parsed tree .
: return : Modified tree including epsilon rules ."""
|
items = Traversing . post_order ( root )
items = filter ( lambda x : isinstance ( x , EpsilonRemovedRule ) , items )
for rule in items : # create original rule
created_rule = rule . from_rule ( )
# type : Rule
# attach parrents parents
for s in rule . from_symbols : # type : Nonterminal
s . _set_to_rule ( created_rule )
created_rule . _from_symbols . append ( s )
# attach children up to replace index ( that will contain epsilon )
for i in range ( rule . replace_index ) :
ch = rule . to_symbols [ i ]
# type : Nonterminal
ch . _set_from_rule ( created_rule )
created_rule . _to_symbols . append ( ch )
# add symbols originally rewrote to epsilon
symb = _restore_tree_for ( created_rule . right [ rule . replace_index ] , rule . backtrack )
# type : Nonterminal
created_rule . _to_symbols . append ( symb )
symb . _set_from_rule ( created_rule )
# attach rest of children
for i in range ( rule . replace_index , len ( rule . to_symbols ) ) :
ch = rule . to_symbols [ i ]
# type : Nonterminal
ch . _set_from_rule ( created_rule )
created_rule . _to_symbols . append ( ch )
return root
|
def clear ( self ) :
"""Remove all sources from this configuration ."""
|
super ( LazyConfig , self ) . clear ( )
self . _lazy_suffix = [ ]
self . _lazy_prefix = [ ]
|
def _schema_line ( args ) :
"""Implements the BigQuery schema magic used to display table / view schemas .
Args :
args : the arguments following ' % bigquery schema ' .
Returns :
The HTML rendering for the schema ."""
|
# TODO ( gram ) : surely we could just return the schema itself ?
name = args [ 'table' ] if args [ 'table' ] else args [ 'view' ]
if name is None :
raise Exception ( 'No table or view specified; cannot show schema' )
schema = _get_schema ( name )
if schema :
html = _repr_html_table_schema ( schema )
return IPython . core . display . HTML ( html )
else :
raise Exception ( '%s is not a schema and does not appear to have a schema member' % name )
|
def create ( backbone : ModelFactory , input_block : typing . Optional [ ModelFactory ] = None ) :
"""Vel factory function"""
|
if input_block is None :
input_block = IdentityFactory ( )
return StochasticPolicyModelFactory ( input_block = input_block , backbone = backbone )
|
def group_remove ( name , ** kwargs ) :
"""Remove routing group from the storage ."""
|
ctx = Context ( ** kwargs )
ctx . execute_action ( 'group:remove' , ** { 'storage' : ctx . repo . create_secure_service ( 'storage' ) , 'name' : name , } )
|
def enable_save_reply_handlers ( self , delay = 120 , filename = "./.handler-saves/reply.save" ) :
"""Enable saving reply handlers ( by default saving disable )
: param delay : Delay between changes in handlers and saving
: param filename : Filename of save file"""
|
self . reply_saver = Saver ( self . reply_handlers , filename , delay )
|
def create_node_rating_counts_settings ( sender , ** kwargs ) :
"""create node rating count and settings"""
|
created = kwargs [ 'created' ]
node = kwargs [ 'instance' ]
if created : # create node _ rating _ count and settings
# task will be executed in background unless settings . CELERY _ ALWAYS _ EAGER is True
# if CELERY _ ALWAYS _ EAGER is False celery worker must be running otherwise task won ' t be executed
create_related_object . delay ( NodeRatingCount , { 'node' : node } )
create_related_object . delay ( NodeParticipationSettings , { 'node' : node } )
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.