signature stringlengths 29 44.1k | implementation stringlengths 0 85.2k |
|---|---|
def get_metrics ( self ) :
"""Returns the Model metrics .
: return : Nodes metrics .
: rtype : dict""" | search_file_nodes_count = search_occurence_nodesCount = 0
for node in foundations . walkers . nodes_walker ( self . root_node ) :
if node . family == "SearchFile" :
search_file_nodes_count += 1
elif node . family == "SearchOccurence" :
search_occurence_nodesCount += 1
return { "SearchFile" : search_file_nodes_count , "SearchOccurence" : search_occurence_nodesCount } |
def _x_tune_ok ( self , channel_max , frame_max , heartbeat ) :
"""Negotiate connection tuning parameters
This method sends the client ' s connection tuning parameters to
the server . Certain fields are negotiated , others provide
capability information .
PARAMETERS :
channel _ max : short
negotiated maximum channels
The maximum total number of channels that the client
will use per connection . May not be higher than the
value specified by the server .
RULE :
The server MAY ignore the channel - max value or MAY
use it for tuning its resource allocation .
frame _ max : long
negotiated maximum frame size
The largest frame size that the client and server will
use for the connection . Zero means that the client
does not impose any specific limit but may reject very
large frames if it cannot allocate resources for them .
Note that the frame - max limit applies principally to
content frames , where large contents can be broken
into frames of arbitrary size .
RULE :
Until the frame - max has been negotiated , both
peers must accept frames of up to 4096 octets
large . The minimum non - zero value for the frame -
max field is 4096.
heartbeat : short
desired heartbeat delay
The delay , in seconds , of the connection heartbeat
that the client wants . Zero means the client does not
want a heartbeat .""" | args = AMQPWriter ( )
args . write_short ( channel_max )
args . write_long ( frame_max )
args . write_short ( heartbeat or 0 )
self . _send_method ( ( 10 , 31 ) , args )
self . _wait_tune_ok = False |
def physical_conversion ( quantity , pop = False ) :
"""Decorator to convert to physical coordinates :
quantity = [ position , velocity , time ]""" | def wrapper ( method ) :
@ wraps ( method )
def wrapped ( * args , ** kwargs ) :
use_physical = kwargs . get ( 'use_physical' , True ) and not kwargs . get ( 'log' , False )
# Parse whether ro or vo should be considered to be set , because
# the return value will have units anyway
# ( like in Orbit methods that return numbers with units , like ra )
roSet = '_' in quantity
# _ in quantity name means always units
voSet = '_' in quantity
# _ in quantity name means always units
use_physical = use_physical or '_' in quantity
# _ in quantity name means always units
ro = kwargs . get ( 'ro' , None )
if ro is None and ( roSet or ( hasattr ( args [ 0 ] , '_roSet' ) and args [ 0 ] . _roSet ) ) :
ro = args [ 0 ] . _ro
if ro is None and isinstance ( args [ 0 ] , list ) and hasattr ( args [ 0 ] [ 0 ] , '_roSet' ) and args [ 0 ] [ 0 ] . _roSet : # For lists of Potentials
ro = args [ 0 ] [ 0 ] . _ro
if _APY_LOADED and isinstance ( ro , units . Quantity ) :
ro = ro . to ( units . kpc ) . value
vo = kwargs . get ( 'vo' , None )
if vo is None and ( voSet or ( hasattr ( args [ 0 ] , '_voSet' ) and args [ 0 ] . _voSet ) ) :
vo = args [ 0 ] . _vo
if vo is None and isinstance ( args [ 0 ] , list ) and hasattr ( args [ 0 ] [ 0 ] , '_voSet' ) and args [ 0 ] [ 0 ] . _voSet : # For lists of Potentials
vo = args [ 0 ] [ 0 ] . _vo
if _APY_LOADED and isinstance ( vo , units . Quantity ) :
vo = vo . to ( units . km / units . s ) . value
# Override Quantity output ?
_apy_units = kwargs . get ( 'quantity' , _APY_UNITS )
# Remove ro and vo kwargs if necessary
if pop and 'use_physical' in kwargs :
kwargs . pop ( 'use_physical' )
if pop and 'ro' in kwargs :
kwargs . pop ( 'ro' )
if pop and 'vo' in kwargs :
kwargs . pop ( 'vo' )
if use_physical and not ( _voNecessary [ quantity . lower ( ) ] and vo is None ) and not ( _roNecessary [ quantity . lower ( ) ] and ro is None ) :
from galpy . orbit import Orbit
if isinstance ( args [ 0 ] , Orbit ) :
print_physical_warning ( )
if quantity . lower ( ) == 'time' :
fac = time_in_Gyr ( vo , ro )
if _apy_units :
u = units . Gyr
elif quantity . lower ( ) == 'position' :
fac = ro
if _apy_units :
u = units . kpc
elif quantity . lower ( ) == 'position_kpc' : # already in kpc
fac = 1.
if _apy_units :
u = units . kpc
elif quantity . lower ( ) == 'velocity' :
fac = vo
if _apy_units :
u = units . km / units . s
elif quantity . lower ( ) == 'velocity2' :
fac = vo ** 2.
if _apy_units :
u = ( units . km / units . s ) ** 2
elif quantity . lower ( ) == 'velocity_kms' : # already in km / s
fac = 1.
if _apy_units :
u = units . km / units . s
elif quantity . lower ( ) == 'frequency' :
if kwargs . get ( 'kmskpc' , False ) and not _apy_units :
fac = freq_in_kmskpc ( vo , ro )
else :
fac = freq_in_Gyr ( vo , ro )
if _apy_units :
u = units . Gyr ** - 1.
elif quantity . lower ( ) == 'frequency-kmskpc' :
fac = freq_in_kmskpc ( vo , ro )
if _apy_units :
u = units . km / units . s / units . kpc
elif quantity . lower ( ) == 'action' :
fac = ro * vo
if _apy_units :
u = units . kpc * units . km / units . s
elif quantity . lower ( ) == 'energy' :
fac = vo ** 2.
if _apy_units :
u = units . km ** 2. / units . s ** 2.
elif quantity . lower ( ) == 'angle' : # in rad
fac = 1.
if _apy_units :
u = units . rad
elif quantity . lower ( ) == 'angle_deg' : # already in deg
fac = 1.
if _apy_units :
u = units . deg
elif quantity . lower ( ) == 'proper-motion_masyr' : # already in mas / yr
fac = 1.
if _apy_units :
u = units . mas / units . yr
elif quantity . lower ( ) == 'force' :
fac = force_in_kmsMyr ( vo , ro )
if _apy_units :
u = units . km / units . s / units . Myr
elif quantity . lower ( ) == 'density' :
fac = dens_in_msolpc3 ( vo , ro )
if _apy_units :
u = units . Msun / units . pc ** 3
elif quantity . lower ( ) == 'numberdensity' :
fac = 1 / ro ** 3.
if _apy_units :
u = 1 / units . kpc ** 3
elif quantity . lower ( ) == 'velocity2surfacedensity' :
fac = surfdens_in_msolpc2 ( vo , ro ) * vo ** 2
if _apy_units :
u = units . Msun / units . pc ** 2 * ( units . km / units . s ) ** 2
elif quantity . lower ( ) == 'surfacedensity' :
fac = surfdens_in_msolpc2 ( vo , ro )
if _apy_units :
u = units . Msun / units . pc ** 2
elif quantity . lower ( ) == 'numbersurfacedensity' :
fac = 1. / ro ** 2.
if _apy_units :
u = 1 / units . kpc ** 2
elif quantity . lower ( ) == 'surfacedensitydistance' :
fac = surfdens_in_msolpc2 ( vo , ro ) * ro * 1000.
if _apy_units :
u = units . Msun / units . pc
elif quantity . lower ( ) == 'mass' :
fac = mass_in_msol ( vo , ro )
if _apy_units :
u = units . Msun
elif quantity . lower ( ) == 'forcederivative' :
fac = freq_in_Gyr ( vo , ro ) ** 2.
if _apy_units :
u = units . Gyr ** - 2.
elif quantity . lower ( ) == 'phasespacedensity' :
fac = 1. / vo ** 3. / ro ** 3.
if _apy_units :
u = 1 / ( units . km / units . s ) ** 3 / units . kpc ** 3
elif quantity . lower ( ) == 'phasespacedensity2d' :
fac = 1. / vo ** 2. / ro ** 2.
if _apy_units :
u = 1 / ( units . km / units . s ) ** 2 / units . kpc ** 2
elif quantity . lower ( ) == 'phasespacedensityvelocity' :
fac = 1. / vo ** 2. / ro ** 3.
if _apy_units :
u = 1 / ( units . km / units . s ) ** 2 / units . kpc ** 3
elif quantity . lower ( ) == 'phasespacedensityvelocity2' :
fac = 1. / vo / ro ** 3.
if _apy_units :
u = 1 / ( units . km / units . s ) / units . kpc ** 3
elif quantity . lower ( ) == 'dimensionless' :
fac = 1.
if _apy_units :
u = units . dimensionless_unscaled
out = method ( * args , ** kwargs )
if out is None :
return out
if _apy_units :
return units . Quantity ( out * fac , unit = u )
else :
return out * fac
else :
return method ( * args , ** kwargs )
return wrapped
return wrapper |
def account_unpin ( self , id ) :
"""Unpin / un - endorse a user .
Returns a ` relationship dict ` _ containing the updated relationship to the user .""" | id = self . __unpack_id ( id )
url = '/api/v1/accounts/{0}/unpin' . format ( str ( id ) )
return self . __api_request ( 'POST' , url ) |
def prepare_fcma_data ( images , conditions , mask1 , mask2 = None , random = RandomType . NORANDOM , comm = MPI . COMM_WORLD ) :
"""Prepare data for correlation - based computation and analysis .
Generate epochs of interests , then broadcast to all workers .
Parameters
images : Iterable [ SpatialImage ]
Data .
conditions : List [ UniqueLabelConditionSpec ]
Condition specification .
mask1 : np . ndarray
Mask to apply to each image .
mask2 : Optional [ np . ndarray ]
Mask to apply to each image .
If it is not specified , the method will assign None to the returning
variable raw _ data2 and the self - correlation on raw _ data1 will be
computed
random : Optional [ RandomType ]
Randomize the image data within subject or not .
comm : MPI . Comm
MPI communicator to use for MPI operations .
Returns
raw _ data1 : list of 2D array in shape [ epoch length , nVoxels ]
the data organized in epochs , specified by the first mask .
len ( raw _ data ) equals the number of epochs
raw _ data2 : Optional , list of 2D array in shape [ epoch length , nVoxels ]
the data organized in epochs , specified by the second mask if any .
len ( raw _ data2 ) equals the number of epochs
labels : list of 1D array
the condition labels of the epochs
len ( labels ) labels equals the number of epochs""" | rank = comm . Get_rank ( )
labels = [ ]
raw_data1 = [ ]
raw_data2 = [ ]
if rank == 0 :
logger . info ( 'start to apply masks and separate epochs' )
if mask2 is not None :
masks = ( mask1 , mask2 )
activity_data1 , activity_data2 = zip ( * multimask_images ( images , masks , np . float32 ) )
_randomize_subject_list ( activity_data2 , random )
raw_data2 , _ = _separate_epochs ( activity_data2 , conditions )
else :
activity_data1 = list ( mask_images ( images , mask1 , np . float32 ) )
_randomize_subject_list ( activity_data1 , random )
raw_data1 , labels = _separate_epochs ( activity_data1 , conditions )
time1 = time . time ( )
raw_data_length = len ( raw_data1 )
raw_data_length = comm . bcast ( raw_data_length )
# broadcast the data subject by subject to prevent size overflow
for i in range ( raw_data_length ) :
if rank != 0 :
raw_data1 . append ( None )
if mask2 is not None :
raw_data2 . append ( None )
raw_data1 [ i ] = comm . bcast ( raw_data1 [ i ] , root = 0 )
if mask2 is not None :
raw_data2 [ i ] = comm . bcast ( raw_data2 [ i ] , root = 0 )
if comm . Get_size ( ) > 1 :
labels = comm . bcast ( labels , root = 0 )
if rank == 0 :
time2 = time . time ( )
logger . info ( 'data broadcasting done, takes %.2f s' % ( time2 - time1 ) )
if mask2 is None :
raw_data2 = None
return raw_data1 , raw_data2 , labels |
def from_line ( cls , line , lineno = None ) :
"""Parses the given line of text to find the names for the host ,
the type of key , and the key data . The line is expected to be in the
format used by the OpenSSH known _ hosts file .
Lines are expected to not have leading or trailing whitespace .
We don ' t bother to check for comments or empty lines . All of
that should be taken care of before sending the line to us .
: param str line : a line from an OpenSSH known _ hosts file""" | log = get_logger ( "paramiko.hostkeys" )
fields = line . split ( " " )
if len ( fields ) < 3 : # Bad number of fields
msg = "Not enough fields found in known_hosts in line {} ({!r})"
log . info ( msg . format ( lineno , line ) )
return None
fields = fields [ : 3 ]
names , keytype , key = fields
names = names . split ( "," )
# Decide what kind of key we ' re looking at and create an object
# to hold it accordingly .
try :
key = b ( key )
if keytype == "ssh-rsa" :
key = RSAKey ( data = decodebytes ( key ) )
elif keytype == "ssh-dss" :
key = DSSKey ( data = decodebytes ( key ) )
elif keytype in ECDSAKey . supported_key_format_identifiers ( ) :
key = ECDSAKey ( data = decodebytes ( key ) , validate_point = False )
elif keytype == "ssh-ed25519" :
key = Ed25519Key ( data = decodebytes ( key ) )
else :
log . info ( "Unable to handle key of type {}" . format ( keytype ) )
return None
except binascii . Error as e :
raise InvalidHostKey ( line , e )
return cls ( names , key ) |
def auth_add ( user , auth ) :
'''Add authorization to user
user : string
username
auth : string
authorization name
CLI Example :
. . code - block : : bash
salt ' * ' rbac . auth _ add martine solaris . zone . manage
salt ' * ' rbac . auth _ add martine solaris . zone . manage , solaris . mail . mailq''' | ret = { }
# # validate auths
auths = auth . split ( ',' )
known_auths = auth_list ( ) . keys ( )
valid_auths = [ r for r in auths if r in known_auths ]
log . debug ( 'rbac.auth_add - auths=%s, known_auths=%s, valid_auths=%s' , auths , known_auths , valid_auths , )
# # update user auths
if valid_auths :
res = __salt__ [ 'cmd.run_all' ] ( 'usermod -A "{auths}" {login}' . format ( login = user , auths = ',' . join ( set ( auth_get ( user , False ) + valid_auths ) ) , ) )
if res [ 'retcode' ] > 0 :
ret [ 'Error' ] = { 'retcode' : res [ 'retcode' ] , 'message' : res [ 'stderr' ] if 'stderr' in res else res [ 'stdout' ] }
return ret
# # update return value
active_auths = auth_get ( user , False )
for a in auths :
if a not in valid_auths :
ret [ a ] = 'Unknown'
elif a in active_auths :
ret [ a ] = 'Added'
else :
ret [ a ] = 'Failed'
return ret |
def create ( self , _attributes = None , ** attributes ) :
"""Create a new instance of the related model .
: param attributes : The attributes
: type attributes : dict
: rtype : Model""" | if _attributes is not None :
attributes . update ( _attributes )
instance = self . _related . new_instance ( attributes )
instance . set_attribute ( self . get_plain_foreign_key ( ) , self . get_parent_key ( ) )
instance . save ( )
return instance |
def notify_one ( correlation_id , component , args ) :
"""Notifies specific component .
To be notiied components must implement [ [ INotifiable ] ] interface .
If they don ' t the call to this method has no effect .
: param correlation _ id : ( optional ) transaction id to trace execution through call chain .
: param component : the component that is to be notified .
: param args : notifiation arguments .""" | if component == None :
return
if isinstance ( component , INotifiable ) :
component . notify ( correlation_id , args ) |
def y_offset ( self , container ) :
"""Vertical baseline offset ( up is positive ) .""" | offset = ( self . parent . y_offset ( container ) if hasattr ( self . parent , 'y_offset' ) else 0 )
if self . is_script ( container ) :
style = self . _style ( container )
offset += ( self . parent . height ( container ) * self . position [ style . position ] )
# The Y offset should only change once for the nesting level
# where the position style is set , hence we don ' t recursively
# get the position style using self . get _ style ( ' position ' )
return offset |
def parse_status ( status_log , encoding = 'utf-8' ) :
"""Parses the status log of OpenVPN .
: param status _ log : The content of status log .
: type status _ log : : class : ` str `
: param encoding : Optional . The encoding of status log .
: type encoding : : class : ` str `
: return : The instance of : class : ` . models . Status `""" | if isinstance ( status_log , bytes ) :
status_log = status_log . decode ( encoding )
parser = LogParser . fromstring ( status_log )
return parser . parse ( ) |
def fill ( duration , point ) :
"""fills the subsequence of the point with repetitions of its subsequence and
sets the ` ` duration ` ` of each point .""" | point [ 'sequence' ] = point [ 'sequence' ] * ( point [ DURATION_64 ] / ( 8 * duration ) ) | add ( { DURATION_64 : duration } )
return point |
def parse ( self , filepath , content ) :
"""Parse opened settings content using YAML parser .
Args :
filepath ( str ) : Settings object , depends from backend
content ( str ) : Settings content from opened file , depends from
backend .
Raises :
boussole . exceptions . SettingsBackendError : If parser can not decode
a valid YAML object .
Returns :
dict : Dictionnary containing parsed setting elements .""" | try :
parsed = yaml . load ( content )
except yaml . YAMLError as exc :
msg = "No YAML object could be decoded from file: {}\n{}"
raise SettingsBackendError ( msg . format ( filepath , exc ) )
return parsed |
def update ( self , currentTemp , targetTemp ) :
"""Calculate PID output value for given reference input and feedback .""" | # in this implementation , ki includes the dt multiplier term ,
# and kd includes the dt divisor term . This is typical practice in
# industry .
self . targetTemp = targetTemp
self . error = targetTemp - currentTemp
self . P_value = self . Kp * self . error
# it is common practice to compute derivative term against PV ,
# instead of de / dt . This is because de / dt spikes
# when the set point changes .
# PV version with no dPV / dt filter - note ' previous ' - ' current ' ,
# that ' s desired , how the math works out
self . D_value = self . Kd * ( self . Derivator - currentTemp )
self . Derivator = currentTemp
self . Integrator = self . Integrator + self . error
if self . Integrator > self . Integrator_max :
self . Integrator = self . Integrator_max
elif self . Integrator < self . Integrator_min :
self . Integrator = self . Integrator_min
self . I_value = self . Integrator * self . Ki
output = self . P_value + self . I_value + self . D_value
if output > self . Output_max :
output = self . Output_max
if output < self . Output_min :
output = self . Output_min
return ( output ) |
def from_path ( klass , path , tabix_path = None , record_checks = None , parsed_samples = None ) :
"""Create new : py : class : ` Reader ` from path
. . note : :
If you use the ` ` parsed _ samples ` ` feature and you write out
records then you must not change the ` ` FORMAT ` ` of the record .
: param path : the path to load from ( converted to ` ` str ` ` for
compatibility with ` ` path . py ` ` )
: param tabix _ path : optional string with path to TBI index ,
automatic inferral from ` ` path ` ` will be tried on the fly
if not given
: param list record _ checks : record checks to perform , can contain
' INFO ' and ' FORMAT '""" | record_checks = record_checks or [ ]
path = str ( path )
if path . endswith ( ".gz" ) :
f = gzip . open ( path , "rt" )
if not tabix_path :
tabix_path = path + ".tbi"
if not os . path . exists ( tabix_path ) :
tabix_path = None
# guessing path failed
else :
f = open ( path , "rt" )
return klass . from_stream ( stream = f , path = path , tabix_path = tabix_path , record_checks = record_checks , parsed_samples = parsed_samples , ) |
def json_description_metadata ( description ) :
"""Return metatata from JSON formated image description as dict .
Raise ValuError if description is of unknown format .
> > > description = ' { " shape " : [ 256 , 256 , 3 ] , " axes " : " YXS " } '
> > > json _ description _ metadata ( description ) # doctest : + SKIP
{ ' shape ' : [ 256 , 256 , 3 ] , ' axes ' : ' YXS ' }
> > > json _ description _ metadata ( ' shape = ( 256 , 256 , 3 ) ' )
{ ' shape ' : ( 256 , 256 , 3 ) }""" | if description [ : 6 ] == 'shape=' : # old - style ' shaped ' description ; not JSON
shape = tuple ( int ( i ) for i in description [ 7 : - 1 ] . split ( ',' ) )
return dict ( shape = shape )
if description [ : 1 ] == '{' and description [ - 1 : ] == '}' : # JSON description
return json . loads ( description )
raise ValueError ( 'invalid JSON image description' , description ) |
def get_records_for_code ( self , meth_code , incl = True , use_slice = False , sli = None , strict_match = True ) :
"""Use regex to see if meth _ code is in the method _ codes " : " delimited list .
If incl = = True , return all records WITH meth _ code .
If incl = = False , return all records WITHOUT meth _ code .
If strict _ match = = True , return only records with the exact meth _ code .
If strict _ match = = False , return records that contain the meth _ code partial string ,
( i . e . , " DE - " ) .
Not inplace""" | # ( must use fillna to replace np . nan with False for indexing )
if use_slice :
df = sli . copy ( )
else :
df = self . df . copy ( )
# if meth _ code not provided , return unchanged dataframe
if not meth_code :
return df
# get regex
if not strict_match : # grab any record that contains any part of meth _ code
cond = df [ 'method_codes' ] . str . contains ( meth_code ) . fillna ( False )
else : # grab only an exact match
pattern = re . compile ( '{}(?=:|\s|\Z)' . format ( meth_code ) )
cond = df [ 'method_codes' ] . str . contains ( pattern ) . fillna ( False )
if incl : # return a copy of records with that method code :
return df [ cond ]
else : # return a copy of records without that method code
return df [ ~ cond ] |
def _setup_redis ( self ) :
"""Returns a Redis Client""" | if not self . closed :
try :
self . logger . debug ( "Creating redis connection to host " + str ( self . settings [ 'REDIS_HOST' ] ) )
self . redis_conn = redis . StrictRedis ( host = self . settings [ 'REDIS_HOST' ] , port = self . settings [ 'REDIS_PORT' ] , db = self . settings [ 'REDIS_DB' ] )
self . redis_conn . info ( )
self . redis_connected = True
self . logger . info ( "Successfully connected to redis" )
except KeyError as e :
self . logger . error ( 'Missing setting named ' + str ( e ) , { 'ex' : traceback . format_exc ( ) } )
except :
self . logger . error ( "Couldn't initialize redis client." , { 'ex' : traceback . format_exc ( ) } )
raise |
def bezier_value_check ( coeffs , s_val , rhs_val = 0.0 ) :
r"""Check if a polynomial in the Bernstein basis evaluates to a value .
This is intended to be used for root checking , i . e . for a polynomial
: math : ` f ( s ) ` and a particular value : math : ` s _ { \ ast } ` :
Is it true that : math : ` f \ left ( s _ { \ ast } \ right ) = 0 ` ?
Does so by re - stating as a matrix rank problem . As in
: func : ` ~ bezier . _ algebraic _ intersection . bezier _ roots ` , we can rewrite
. . math : :
f ( s ) = ( 1 - s ) ^ n g \ left ( \ sigma \ right )
for : math : ` \ sigma = \ frac { s } { 1 - s } ` and : math : ` g \ left ( \ sigma \ right ) `
written in the power / monomial basis . Now , checking if
: math : ` g \ left ( \ sigma \ right ) = 0 ` is a matter of checking that
: math : ` \ det \ left ( C _ g - \ sigma I \ right ) = 0 ` ( where : math : ` C _ g ` is the
companion matrix of : math : ` g ` ) .
Due to issues of numerical stability , we ' d rather ask if
: math : ` C _ g - \ sigma I ` is singular to numerical precision . A typical
approach to this using the singular values ( assuming : math : ` C _ g ` is
: math : ` m \ times m ` ) is that the matrix is singular if
. . math : :
\ sigma _ m < m \ varepsilon \ sigma _ 1 \ Longleftrightarrow
\ frac { 1 } { \ kappa _ 2 } < m \ varepsilon
( where : math : ` \ kappa _ 2 ` is the 2 - norm condition number of the matrix ) .
Since we also know that : math : ` \ kappa _ 2 < \ kappa _ 1 ` , a stronger
requirement would be
. . math : :
\ frac { 1 } { \ kappa _ 1 } < \ frac { 1 } { \ kappa _ 2 } < m \ varepsilon .
This is more useful since it is * * much * * easier to compute the 1 - norm
condition number / reciprocal condition number ( and methods in LAPACK are
provided for doing so ) .
Args :
coeffs ( numpy . ndarray ) : A 1D array of coefficients in
the Bernstein basis representing a polynomial .
s _ val ( float ) : The value to check on the polynomial :
: math : ` f ( s ) = r ` .
rhs _ val ( Optional [ float ] ) : The value to check that the polynomial
evaluates to . Defaults to ` ` 0.0 ` ` .
Returns :
bool : Indicates if : math : ` f \ left ( s _ { \ ast } \ right ) = r ` ( where
: math : ` s _ { \ ast } ` is ` ` s _ val ` ` and : math : ` r ` is ` ` rhs _ val ` ` ) .""" | if s_val == 1.0 :
return coeffs [ - 1 ] == rhs_val
shifted_coeffs = coeffs - rhs_val
sigma_coeffs , _ , effective_degree = _get_sigma_coeffs ( shifted_coeffs )
if effective_degree == 0 : # This means that all coefficients except the ` ` ( 1 - s ) ^ n ` `
# term are zero , so we have ` ` f ( s ) = C ( 1 - s ) ^ n ` ` . Since we know
# ` ` s ! = 1 ` ` , this can only be zero if ` ` C = = 0 ` ` .
return shifted_coeffs [ 0 ] == 0.0
sigma_val = s_val / ( 1.0 - s_val )
lu_mat , one_norm = lu_companion ( - sigma_coeffs [ : : - 1 ] , sigma_val )
rcond = _reciprocal_condition_number ( lu_mat , one_norm )
# " Is a root ? " IFF Singular IF ` ` 1 / kappa _ 1 < m epsilon ` `
return rcond < effective_degree * _SINGULAR_EPS |
def logpt ( self , t , xp , x ) :
"""Log - density of X _ t given X _ { t - 1 } .""" | raise NotImplementedError ( err_msg_missing_trans % self . __class__ . __name__ ) |
def __update_mouse ( self , milliseconds ) :
"""Use the mouse to control selection of the buttons .""" | for button in self . gui_buttons :
was_hovering = button . is_mouse_hovering
button . update ( milliseconds )
# Provides capibilities for the mouse to select a button if the mouse is the focus of input .
if was_hovering == False and button . is_mouse_hovering : # The user has just moved the mouse over the button . Set it as active .
old_index = self . current_index
self . current_index = self . gui_buttons . index ( button )
self . __handle_selections ( old_index , self . current_index )
elif Ragnarok . get_world ( ) . Mouse . is_clicked ( self . mouse_select_button ) and button . is_mouse_hovering : # The main mouse button has just depressed , click the current button .
button . clicked_action ( ) |
def padded_variance_explained ( predictions , labels , weights_fn = common_layers . weights_all ) :
"""Explained variance , also known as R ^ 2.""" | predictions , labels = common_layers . pad_with_zeros ( predictions , labels )
targets = labels
weights = weights_fn ( targets )
y_bar = tf . reduce_mean ( weights * targets )
tot_ss = tf . reduce_sum ( weights * tf . pow ( targets - y_bar , 2 ) )
res_ss = tf . reduce_sum ( weights * tf . pow ( targets - predictions , 2 ) )
r2 = 1. - res_ss / tot_ss
return r2 , tf . reduce_sum ( weights ) |
def _validate_dict ( self , input_dict , schema_dict , path_to_root , object_title = '' ) :
'''a helper method for recursively validating keys in dictionaries
: return input _ dict''' | # reconstruct key path to current dictionary in model
rules_top_level_key = re . sub ( '\[\d+\]' , '[0]' , path_to_root )
map_rules = self . keyMap [ rules_top_level_key ]
# construct list error report template
map_error = { 'object_title' : object_title , 'model_schema' : self . schema , 'input_criteria' : map_rules , 'failed_test' : 'value_datatype' , 'input_path' : path_to_root , 'error_value' : 0 , 'error_code' : 4001 }
# validate map size
if 'min_size' in map_rules . keys ( ) :
input_size = sys . getsizeof ( json . dumps ( str ( input_dict ) ) . replace ( ' ' , '' ) ) - 51
if input_size < map_rules [ 'min_size' ] :
map_error [ 'failed_test' ] = 'min_size'
map_error [ 'error_value' ] = input_size
map_error [ 'error_code' ] = 4031
raise InputValidationError ( map_error )
if 'max_size' in map_rules . keys ( ) :
input_size = sys . getsizeof ( json . dumps ( str ( input_dict ) ) . replace ( ' ' , '' ) ) - 51
if input_size > map_rules [ 'max_size' ] :
map_error [ 'failed_test' ] = 'max_size'
map_error [ 'error_value' ] = input_size
map_error [ 'error_code' ] = 4032
raise InputValidationError ( map_error )
# construct lists of keys in input dictionary
input_keys = [ ]
input_key_list = [ ]
for key in input_dict . keys ( ) :
error_dict = { 'object_title' : object_title , 'model_schema' : self . schema , 'input_criteria' : self . keyMap [ rules_top_level_key ] , 'failed_test' : 'key_datatype' , 'input_path' : path_to_root , 'error_value' : key , 'error_code' : 4004 }
error_dict [ 'input_criteria' ] [ 'key_datatype' ] = 'string'
if path_to_root == '.' :
if not isinstance ( key , str ) :
input_key_name = path_to_root + str ( key )
error_dict [ 'input_path' ] = input_key_name
raise InputValidationError ( error_dict )
input_key_name = path_to_root + key
else :
if not isinstance ( key , str ) :
input_key_name = path_to_root + '.' + str ( key )
error_dict [ 'input_path' ] = input_key_name
raise InputValidationError ( error_dict )
input_key_name = path_to_root + '.' + key
input_keys . append ( input_key_name )
input_key_list . append ( key )
# TODO : validate top - level key and values against identical to reference
# TODO : run lambda function and call validation
# construct lists of keys in schema dictionary
max_keys = [ ]
max_key_list = [ ]
req_keys = [ ]
req_key_list = [ ]
for key in schema_dict . keys ( ) :
if path_to_root == '.' :
schema_key_name = path_to_root + key
else :
schema_key_name = path_to_root + '.' + key
max_keys . append ( schema_key_name )
max_key_list . append ( key )
rules_schema_key_name = re . sub ( '\[\d+\]' , '[0]' , schema_key_name )
if self . keyMap [ rules_schema_key_name ] [ 'required_field' ] :
req_keys . append ( schema_key_name )
req_key_list . append ( key )
# validate existence of required fields
missing_keys = set ( req_keys ) - set ( input_keys )
if missing_keys :
error_dict = { 'object_title' : object_title , 'model_schema' : self . schema , 'input_criteria' : self . keyMap [ rules_top_level_key ] , 'failed_test' : 'required_field' , 'input_path' : path_to_root , 'error_value' : list ( missing_keys ) , 'error_code' : 4002 }
error_dict [ 'input_criteria' ] [ 'required_keys' ] = req_keys
raise InputValidationError ( error_dict )
# validate existence of extra fields
extra_keys = set ( input_keys ) - set ( max_keys )
if extra_keys and not self . keyMap [ rules_top_level_key ] [ 'extra_fields' ] :
extra_key_list = [ ]
for key in extra_keys :
pathless_key = re . sub ( rules_top_level_key , '' , key , count = 1 )
extra_key_list . append ( pathless_key )
error_dict = { 'object_title' : object_title , 'model_schema' : self . schema , 'input_criteria' : self . keyMap [ rules_top_level_key ] , 'failed_test' : 'extra_fields' , 'input_path' : path_to_root , 'error_value' : extra_key_list , 'error_code' : 4003 }
error_dict [ 'input_criteria' ] [ 'maximum_scope' ] = max_key_list
raise InputValidationError ( error_dict )
# validate datatype of value
for key , value in input_dict . items ( ) :
if path_to_root == '.' :
input_key_name = path_to_root + key
else :
input_key_name = path_to_root + '.' + key
rules_input_key_name = re . sub ( '\[\d+\]' , '[0]' , input_key_name )
if input_key_name in max_keys :
input_criteria = self . keyMap [ rules_input_key_name ]
error_dict = { 'object_title' : object_title , 'model_schema' : self . schema , 'input_criteria' : input_criteria , 'failed_test' : 'value_datatype' , 'input_path' : input_key_name , 'error_value' : value , 'error_code' : 4001 }
try :
value_index = self . _datatype_classes . index ( value . __class__ )
except :
error_dict [ 'error_value' ] = value . __class__ . __name__
raise InputValidationError ( error_dict )
value_type = self . _datatype_names [ value_index ]
if input_criteria [ 'value_datatype' ] == 'null' :
pass
else :
if value_type != input_criteria [ 'value_datatype' ] :
raise InputValidationError ( error_dict )
# call appropriate validation sub - routine for datatype of value
if value_type == 'boolean' :
input_dict [ key ] = self . _validate_boolean ( value , input_key_name , object_title )
elif value_type == 'number' :
input_dict [ key ] = self . _validate_number ( value , input_key_name , object_title )
elif value_type == 'string' :
input_dict [ key ] = self . _validate_string ( value , input_key_name , object_title )
elif value_type == 'map' :
input_dict [ key ] = self . _validate_dict ( value , schema_dict [ key ] , input_key_name , object_title )
elif value_type == 'list' :
input_dict [ key ] = self . _validate_list ( value , schema_dict [ key ] , input_key_name , object_title )
# set default values for empty optional fields
for key in max_key_list :
if key not in input_key_list :
indexed_key = max_keys [ max_key_list . index ( key ) ]
if indexed_key in self . components . keys ( ) :
if 'default_value' in self . components [ indexed_key ] :
input_dict [ key ] = self . components [ indexed_key ] [ 'default_value' ]
return input_dict |
def nlmsg_alloc ( len_ = default_msg_size ) :
"""Allocate a new Netlink message with maximum payload size specified .
https : / / github . com / thom311 / libnl / blob / libnl3_2_25 / lib / msg . c # L299
Allocates a new Netlink message without any further payload . The maximum payload size defaults to
resource . getpagesize ( ) or as otherwise specified with nlmsg _ set _ default _ size ( ) .
Returns :
Newly allocated Netlink message ( nl _ msg class instance ) .""" | len_ = max ( libnl . linux_private . netlink . nlmsghdr . SIZEOF , len_ )
nm = nl_msg ( )
nm . nm_refcnt = 1
nm . nm_nlh = libnl . linux_private . netlink . nlmsghdr ( bytearray ( b'\0' ) * len_ )
nm . nm_protocol = - 1
nm . nm_size = len_
nm . nm_nlh . nlmsg_len = nlmsg_total_size ( 0 )
_LOGGER . debug ( 'msg 0x%x: Allocated new message, maxlen=%d' , id ( nm ) , len_ )
return nm |
def reorder ( self , dst_order , arr , src_order = None ) :
"""Reorder the output array to match that needed by the viewer .""" | if dst_order is None :
dst_order = self . viewer . rgb_order
if src_order is None :
src_order = self . rgb_order
if src_order != dst_order :
arr = trcalc . reorder_image ( dst_order , arr , src_order )
return arr |
def to_json_ ( self ) -> str :
"""Convert the main dataframe to json
: return : json data
: rtype : str
: example : ` ` ds . to _ json _ ( ) ` `""" | try :
renderer = pytablewriter . JsonTableWriter
data = self . _build_export ( renderer )
return data
except Exception as e :
self . err ( e , "Can not convert data to json" ) |
def EnsureGdbPosition ( self , pid , tid , frame_depth ) :
"""Make sure our position matches the request .
Args :
pid : The process ID of the target process
tid : The python thread ident of the target thread
frame _ depth : The ' depth ' of the requested frame in the frame stack
Raises :
PositionUnavailableException : If the requested process , thread or frame
can ' t be found or accessed .""" | position = [ pid , tid , frame_depth ]
if not pid :
return
if not self . IsAttached ( ) :
try :
self . Attach ( position )
except gdb . error as exc :
raise PositionUnavailableException ( exc . message )
if gdb . selected_inferior ( ) . pid != pid :
self . Detach ( )
try :
self . Attach ( position )
except gdb . error as exc :
raise PositionUnavailableException ( exc . message )
if tid :
tstate_head = GdbCache . INTERP_HEAD [ 'tstate_head' ]
for tstate in self . _IterateChainedList ( tstate_head , 'next' ) :
if tid == tstate [ 'thread_id' ] :
self . selected_tstate = tstate
break
else :
raise PositionUnavailableException ( 'Thread %s does not exist.' % str ( tid ) )
stack_head = self . selected_tstate [ 'frame' ]
if frame_depth is not None :
frames = list ( self . _IterateChainedList ( stack_head , 'f_back' ) )
frames . reverse ( )
try :
self . selected_frame = frames [ frame_depth ]
except IndexError :
raise PositionUnavailableException ( 'Stack is not %s frames deep' % str ( frame_depth + 1 ) ) |
def read_response ( self ) :
"Read the response from a previously sent command" | try :
response = self . _parser . read_response ( )
except socket . timeout :
self . disconnect ( )
raise TimeoutError ( "Timeout reading from %s:%s" % ( self . host , self . port ) )
except socket . error :
self . disconnect ( )
e = sys . exc_info ( ) [ 1 ]
raise ConnectionError ( "Error while reading from %s:%s : %s" % ( self . host , self . port , e . args ) )
except : # noqa : E722
self . disconnect ( )
raise
if isinstance ( response , ResponseError ) :
raise response
return response |
def popup ( self , title , callfn , initialdir = None , filename = None ) :
"""Let user select and load file .""" | self . cb = callfn
self . filew . set_title ( title )
if initialdir :
self . filew . set_current_folder ( initialdir )
if filename : # self . filew . set _ filename ( filename )
self . filew . set_current_name ( filename )
self . filew . show ( ) |
def query ( cls , file , offset = None , limit = None , api = None ) :
"""Queries genome markers on a file .
: param file : Genome file - Usually bam file .
: param offset : Pagination offset .
: param limit : Pagination limit .
: param api : Api instance .
: return : Collection object .""" | api = api if api else cls . _API
file = Transform . to_file ( file )
return super ( Marker , cls ) . _query ( url = cls . _URL [ 'query' ] , offset = offset , limit = limit , file = file , fields = '_all' , api = api ) |
def connection_from_list ( data , args = None , ** kwargs ) :
'''A simple function that accepts an array and connection arguments , and returns
a connection object for use in GraphQL . It uses array offsets as pagination ,
so pagination will only work if the array is static .''' | _len = len ( data )
return connection_from_list_slice ( data , args , slice_start = 0 , list_length = _len , list_slice_length = _len , ** kwargs ) |
def get_all_roles ( self , view = None ) :
"""Get all roles in the service .
@ param view : View to materialize ( ' full ' or ' summary ' )
@ return : A list of ApiRole objects .""" | return roles . get_all_roles ( self . _get_resource_root ( ) , self . name , self . _get_cluster_name ( ) , view ) |
def goto_definitions ( self ) :
"""Return the definition of a the symbol under the cursor via exact match .
Goes to that definition with a buffer .""" | element = self . _evaluator . get_definition ( )
if element is not None :
return BaseDefinition ( self . _user_context , element )
else :
return None |
def parse_name ( name ) :
"""Parse a gs : / / URL into the bucket and item names .
Args :
name : a GCS URL of the form gs : / / bucket or gs : / / bucket / item
Returns :
The bucket name ( with no gs : / / prefix ) , and the item name if present . If the name
could not be parsed returns None for both .""" | bucket = None
item = None
m = re . match ( _STORAGE_NAME , name )
if m : # We want to return the last two groups as first group is the optional ' gs : / / '
bucket = m . group ( 1 )
item = m . group ( 2 )
if item is not None :
item = item [ 1 : ]
# Strip ' / '
else :
m = re . match ( '(' + _OBJECT_NAME + ')' , name )
if m :
item = m . group ( 1 )
return bucket , item |
def fit_mle ( self , data , k_array = np . arange ( 0.1 , 100 , 0.1 ) ) :
"""% ( super ) s
In addition to data , gives an optional keyword argument k _ array
containing the values to search for k _ agg . A brute force search is then
used to find the parameter k _ agg .""" | # todo : check and mention in docstring biases of mle for k _ agg
data = np . array ( data )
mu = np . mean ( data )
return mu , _solve_k_from_mu ( data , k_array , nbinom_nll , mu ) |
def stdformD ( D , Cd , M , dimN = 2 ) :
"""Reshape dictionary array ( ` D ` in : mod : ` . admm . cbpdn ` module , ` X ` in
: mod : ` . admm . ccmod ` module ) to internal standard form .
Parameters
D : array _ like
Dictionary array
Cd : int
Size of dictionary channel index
M : int
Number of filters in dictionary
dimN : int , optional ( default 2)
Number of problem spatial indices
Returns
Dr : ndarray
Reshaped dictionary array""" | return D . reshape ( D . shape [ 0 : dimN ] + ( Cd , ) + ( 1 , ) + ( M , ) ) |
def connection ( self , connection ) :
"""Change the dynamo connection""" | if connection is not None :
connection . subscribe ( "capacity" , self . _on_capacity_data )
connection . default_return_capacity = True
if self . _connection is not None :
connection . unsubscribe ( "capacity" , self . _on_capacity_data )
self . _connection = connection
self . _cloudwatch_connection = None
self . cached_descriptions = { } |
def symlink_to ( self , target , target_is_directory = False ) :
"""Make this path a symlink pointing to the given path .
Note the order of arguments ( self , target ) is the reverse of os . symlink ' s .""" | self . _accessor . symlink ( target , self , target_is_directory ) |
def generate_key_pair ( secret = None ) :
"""Generates a cryptographic key pair .
Args :
secret ( : class : ` string ` ) : A secret that serves as a seed
Returns :
: class : ` ~ bigchaindb . common . crypto . CryptoKeypair ` : A
: obj : ` collections . namedtuple ` with named fields
: attr : ` ~ bigchaindb . common . crypto . CryptoKeypair . private _ key ` and
: attr : ` ~ bigchaindb . common . crypto . CryptoKeypair . public _ key ` .""" | if secret :
keypair_raw = ed25519_generate_key_pair_from_secret ( secret )
return CryptoKeypair ( * ( k . decode ( ) for k in keypair_raw ) )
else :
return generate_keypair ( ) |
def corr_heatmap ( x , mask_half = True , cmap = "RdYlGn_r" , vmin = - 1 , vmax = 1 , linewidths = 0.5 , square = True , figsize = ( 10 , 10 ) , ** kwargs ) :
"""Wrapper around seaborn . heatmap for visualizing correlation matrix .
Parameters
x : DataFrame
Underlying data ( not a correlation matrix )
mask _ half : bool , default True
If True , mask ( whiteout ) the upper right triangle of the matrix
All other parameters passed to seaborn . heatmap :
https : / / seaborn . pydata . org / generated / seaborn . heatmap . html
Example
# Generate some correlated data
> > > import numpy as np
> > > import pandas as pd
> > > k = 10
> > > size = 400
> > > mu = np . random . randint ( 0 , 10 , k ) . astype ( float )
> > > r = np . random . ranf ( k * * 2 ) . reshape ( ( k , k ) ) * 5
> > > df = pd . DataFrame ( np . random . multivariate _ normal ( mu , r , size = size ) )
> > > corr _ heatmap ( df , figsize = ( 6 , 6 ) )""" | if mask_half :
mask = np . zeros_like ( x . corr ( ) . values )
mask [ np . triu_indices_from ( mask ) ] = True
else :
mask = None
with sns . axes_style ( "white" ) :
return sns . heatmap ( x . corr ( ) , cmap = cmap , vmin = vmin , vmax = vmax , linewidths = linewidths , square = square , mask = mask , ** kwargs ) |
def get_inventory ( self , keys = None ) :
"""Create an Ansible inventory based on python dicts and lists .
The returned value is a dict in which every key represents a group
and every value is a list of entries for that group .
Args :
keys ( list of str ) : Path to the keys that will be used to
create groups .
Returns :
dict : dict based Ansible inventory""" | inventory = defaultdict ( list )
keys = keys or [ 'vm-type' , 'groups' , 'vm-provider' ]
vms = self . prefix . get_vms ( ) . values ( )
for vm in vms :
entry = self . _generate_entry ( vm )
vm_spec = vm . spec
for key in keys :
value = self . get_key ( key , vm_spec )
if value is None :
continue
if isinstance ( value , list ) :
for sub_value in value :
inventory [ '{}={}' . format ( key , sub_value ) ] . append ( entry )
else :
inventory [ '{}={}' . format ( key , value ) ] . append ( entry )
# Adding a special case for the group key .
# Most of the times the user wants that the host
# will be a part of the group " group " , and not a part of
# " group = something "
for group in vm_spec . get ( 'groups' , [ ] ) :
inventory [ group ] . append ( entry )
return inventory |
def process_python ( self , path ) :
"""Process a python file .""" | ( pylint_stdout , pylint_stderr ) = epylint . py_run ( ' ' . join ( [ str ( path ) ] + self . pylint_opts ) , return_std = True )
emap = { }
print ( pylint_stderr . read ( ) )
for line in pylint_stdout :
sys . stderr . write ( line )
key = line . split ( ':' ) [ - 1 ] . split ( '(' ) [ 0 ] . strip ( )
if key not in self . pylint_cats :
continue
if key not in emap :
emap [ key ] = 1
else :
emap [ key ] += 1
sys . stderr . write ( '\n' )
self . python_map [ str ( path ) ] = emap |
def retention_policy_exists ( database , name , user = None , password = None , host = None , port = None ) :
'''Check if a retention policy exists .
database
The database to operate on .
name
Name of the policy to modify .
CLI Example :
. . code - block : : bash
salt ' * ' influxdb08 . retention _ policy _ exists metrics default''' | policy = retention_policy_get ( database , name , user , password , host , port )
return policy is not None |
def preview_data ( self , flow_id , key = None , count = True , total = True ) :
'''Get keys or number of series for a prospective dataset query allowing for
keys with multiple values per dimension .
It downloads the complete list of series keys for a dataflow rather than using constraints and DSD . This feature is ,
however , not supported by all data providers .
ECB and UNSD are known to work .
Args :
flow _ id ( str ) : dataflow id
key ( dict ) : optional key mapping dimension names to values or lists of values .
Must have been validated before . It is not checked if key values
are actually valid dimension names and values . Default : { }
count ( bool ) : if True ( default ) , return the number of series
of the dataset designated by flow _ id and key . If False ,
the actual keys are returned as a pandas DataFrame or dict of dataframes , depending on
the value of ' total ' .
total ( bool ) : if True ( default ) , return the aggregate number
of series or a single dataframe ( depending on the value of ' count ' ) . If False ,
return a dict mapping keys to dataframes of series keys .
E . g . , if key = { ' COUNTRY ' : ' IT + CA + AU ' } , the dict will
have 3 items describing the series keys for each country
respectively . If ' count ' is True , dict values will be int rather than
PD . DataFrame .''' | all_keys = self . series_keys ( flow_id )
# Handle the special case that no key is provided
if not key :
if count :
return all_keys . shape [ 0 ]
else :
return all_keys
# So there is a key specifying at least one dimension value .
# Wrap single values in 1 - elem list for uniform treatment
key_l = { k : [ v ] if isinstance ( v , str_type ) else v for k , v in key . items ( ) }
# order dim _ names that are present in the key
dim_names = [ k for k in all_keys if k in key ]
# Drop columns that are not in the key
key_df = all_keys . loc [ : , dim_names ]
if total : # DataFrame with matching series keys
bool_series = reduce ( and_ , ( key_df . isin ( key_l ) [ col ] for col in dim_names ) )
if count :
return bool_series . value_counts ( ) [ True ]
else :
return all_keys [ bool_series ]
else : # Dict of value combinations as dict keys
key_product = product ( * ( key_l [ k ] for k in dim_names ) )
# Replace key tuples by namedtuples
PartialKey = namedtuple_factory ( 'PartialKey' , dim_names )
matches = { PartialKey ( k ) : reduce ( and_ , ( key_df . isin ( { k1 : [ v1 ] for k1 , v1 in zip ( dim_names , k ) } ) [ col ] for col in dim_names ) ) for k in key_product }
if not count : # dict mapping each key to DataFrame with selected key - set
return { k : all_keys [ v ] for k , v in matches . items ( ) }
else : # Number of series per key
return { k : v . value_counts ( ) [ True ] for k , v in matches . items ( ) } |
def signal_handler ( sig , frame ) :
"""SIGINT handler .
Disconnect all active clients and then invoke the original signal handler .""" | for client in connected_clients [ : ] :
if client . is_asyncio_based ( ) :
client . start_background_task ( client . disconnect , abort = True )
else :
client . disconnect ( abort = True )
return original_signal_handler ( sig , frame ) |
def _get_device_id ( self , bus ) :
"""Find the device id""" | _dbus = bus . get ( SERVICE_BUS , PATH )
devices = _dbus . devices ( )
if self . device is None and self . device_id is None and len ( devices ) == 1 :
return devices [ 0 ]
for id in devices :
self . _dev = bus . get ( SERVICE_BUS , DEVICE_PATH + "/%s" % id )
if self . device == self . _dev . name :
return id
return None |
def _cond ( self , unused_x , unused_cumul_out , unused_prev_state , unused_cumul_state , cumul_halting , unused_iteration , unused_remainder ) :
"""The ` cond ` of the ` tf . while _ loop ` .""" | return tf . reduce_any ( cumul_halting < 1 ) |
def _compute_non_linear_term ( self , pga4nl , bnl ) :
"""Compute non - linear term ,
equation ( 8a ) to ( 8c ) , pag 108.""" | fnl = np . zeros ( pga4nl . shape )
a1 = 0.03
a2 = 0.09
pga_low = 0.06
# equation ( 8a )
idx = pga4nl <= a1
fnl [ idx ] = bnl [ idx ] * np . log ( pga_low / 0.1 )
# equation ( 8b )
idx = np . where ( ( pga4nl > a1 ) & ( pga4nl <= a2 ) )
delta_x = np . log ( a2 / a1 )
delta_y = bnl [ idx ] * np . log ( a2 / pga_low )
c = ( 3 * delta_y - bnl [ idx ] * delta_x ) / delta_x ** 2
d = - ( 2 * delta_y - bnl [ idx ] * delta_x ) / delta_x ** 3
fnl [ idx ] = bnl [ idx ] * np . log ( pga_low / 0.1 ) + c * ( np . log ( pga4nl [ idx ] / a1 ) ** 2 ) + d * ( np . log ( pga4nl [ idx ] / a1 ) ** 3 )
# equation ( 8c )
idx = pga4nl > a2
fnl [ idx ] = np . squeeze ( bnl [ idx ] ) * np . log ( pga4nl [ idx ] / 0.1 )
return fnl |
def settrace_forked ( ) :
'''When creating a fork from a process in the debugger , we need to reset the whole debugger environment !''' | from _pydevd_bundle . pydevd_constants import GlobalDebuggerHolder
GlobalDebuggerHolder . global_dbg = None
threading . current_thread ( ) . additional_info = None
from _pydevd_frame_eval . pydevd_frame_eval_main import clear_thread_local_info
host , port = dispatch ( )
import pydevd_tracing
pydevd_tracing . restore_sys_set_trace_func ( )
if port is not None :
global connected
connected = False
global forked
forked = True
custom_frames_container_init ( )
if clear_thread_local_info is not None :
clear_thread_local_info ( )
settrace ( host , port = port , suspend = False , trace_only_current_thread = False , overwrite_prev_trace = True , patch_multiprocessing = True , ) |
def collect_gallery_files ( examples_dirs ) :
"""Collect python files from the gallery example directories .""" | files = [ ]
for example_dir in examples_dirs :
for root , dirnames , filenames in os . walk ( example_dir ) :
for filename in filenames :
if filename . endswith ( '.py' ) :
files . append ( os . path . join ( root , filename ) )
return files |
def existence_check ( text , list , err , msg , ignore_case = True , str = False , max_errors = float ( "inf" ) , offset = 0 , require_padding = True , dotall = False , excluded_topics = None , join = False ) :
"""Build a checker that blacklists certain words .""" | flags = 0
msg = " " . join ( msg . split ( ) )
if ignore_case :
flags = flags | re . IGNORECASE
if str :
flags = flags | re . UNICODE
if dotall :
flags = flags | re . DOTALL
if require_padding :
regex = u"(?:^|\W){}[\W$]"
else :
regex = u"{}"
errors = [ ]
# If the topic of the text is in the excluded list , return immediately .
if excluded_topics :
tps = topics ( text )
if any ( [ t in excluded_topics for t in tps ] ) :
return errors
rx = "|" . join ( regex . format ( w ) for w in list )
for m in re . finditer ( rx , text , flags = flags ) :
txt = m . group ( 0 ) . strip ( )
errors . append ( ( m . start ( ) + 1 + offset , m . end ( ) + offset , err , msg . format ( txt ) , None ) )
errors = truncate_to_max ( errors , max_errors )
return errors |
def certify_set ( value , certifier = None , min_len = None , max_len = None , include_collections = False , required = True , ) :
"""Certifier for a set .
: param set value :
The set to be certified .
: param func certifier :
A function to be called on each value in the list to check that it is valid .
: param int min _ len :
The minimum acceptable length for the list . If None , the minimum length is not checked .
: param int max _ len :
The maximum acceptable length for the list . If None , the maximum length is not checked .
: param bool include _ collections :
Include types from collections .
: param bool required :
Whether the value can be ` None ` . Defaults to True .
: return :
The certified set .
: rtype :
set
: raises CertifierTypeError :
The type is invalid
: raises CertifierValueError :
The valid is invalid""" | certify_bool ( include_collections , required = True )
certify_iterable ( value = value , types = tuple ( [ set , MutableSet , Set ] ) if include_collections else tuple ( [ set ] ) , certifier = certifier , min_len = min_len , max_len = max_len , schema = None , required = required , ) |
def connect ( self , pattern , presenter , ** kwargs ) :
"""Connect the given pattern with the given presenter
: param pattern : URI pattern
: param presenter : target presenter name
: param kwargs : route arguments ( see : class : ` . WWebRoute ` )
: return : None""" | self . __routes . append ( WWebRoute ( pattern , presenter , ** kwargs ) ) |
def mark_best_classification ( text_log_error , classified_failure ) :
"""Wrapper for setting best _ classification on both TextLogError and FailureLine .
Set the given ClassifiedFailure as best _ classification for the given
TextLogError . Handles the duplication of best _ classification on FailureLine
so you don ' t have to !""" | text_log_error . metadata . best_classification = classified_failure
text_log_error . metadata . save ( update_fields = [ 'best_classification' ] )
text_log_error . metadata . failure_line . elastic_search_insert ( ) |
def plot_phens_blits ( phen_grid , patches , ** kwargs ) :
"""A version of plot _ phens designed to be used in animations . Takes a 2D array
of phenotypes and a list of matplotlib patch objects that have already
been added to the current axes and recolors the patches based on the array .""" | denom , palette = get_kwargs ( phen_grid , kwargs )
grid = color_grid ( phen_grid , palette , denom )
for i in range ( len ( grid ) ) :
for j in range ( len ( grid [ i ] ) ) :
curr_patch = patches [ i * len ( grid [ i ] ) + j ]
if grid [ i ] [ j ] == - 1 :
curr_patch . set_visible ( False )
else :
curr_patch . set_facecolor ( grid [ i ] [ j ] )
curr_patch . set_visible ( True )
return patches |
def list ( self , product , store_view = None , identifierType = None ) :
"""Retrieve product image list
: param product : ID or SKU of product
: param store _ view : Code or ID of store view
: param identifierType : Defines whether the product or SKU value is
passed in the " product " parameter .
: return : ` list ` of ` dict `""" | return self . call ( 'catalog_product_attribute_media.list' , [ product , store_view , identifierType ] ) |
def stop_refresh ( self ) :
"""Stop redrawing the canvas at the previously set timed interval .""" | self . logger . debug ( "stopping timed refresh" )
self . rf_flags [ 'done' ] = True
self . rf_timer . clear ( ) |
def set_scene_color ( self , scene_id , color ) :
"""reconfigure a scene by scene ID""" | if not scene_id in self . state . scenes : # does that scene _ id exist ?
err_msg = "Requested to recolor scene {sceneNum}, which does not exist" . format ( sceneNum = scene_id )
logging . info ( err_msg )
return ( False , 0 , err_msg )
self . state . scenes [ scene_id ] = self . state . scenes [ scene_id ] . _replace ( color = color )
sequence_number = self . zmq_publisher . publish_scene_color ( scene_id , color )
logging . debug ( "Recolored scene {sceneNum}" . format ( sceneNum = scene_id ) )
if scene_id == self . state . activeSceneId :
self . state . activeAnimation . set_color ( color )
self . _do_next_frame ( )
# TODO : make it more sensible , e . g . call only if static scene
return ( True , sequence_number , "OK" ) |
def _get_ann ( dbs , features ) :
"""Gives format to annotation for html table output""" | value = ""
for db , feature in zip ( dbs , features ) :
value += db + ":" + feature
return value |
def add ( self , name , obj ) :
'''Register a new feature serializer .
The feature type should be one of the fixed set of feature
representations , and ` name ` should be one of ` ` StringCounter ` ` ,
` ` SparseVector ` ` , or ` ` DenseVector ` ` . ` obj ` is a describing
object with three fields : ` constructor ` is a callable that
creates an empty instance of the representation ; ` dumps ` is
a callable that takes an instance of the representation and
returns a JSON - compatible form made of purely primitive
objects ( lists , dictionaries , strings , numbers ) ; and ` loads `
is a callable that takes the response from ` dumps ` and recreates
the original representation .
Note that ` ` obj . constructor ( ) ` ` * must * return an
object that is an instance of one of the following
types : ` ` unicode ` ` , : class : ` dossier . fc . StringCounter ` ,
: class : ` dossier . fc . SparseVector ` or
: class : ` dossier . fc . DenseVector ` . If it isn ' t , a
: exc : ` ValueError ` is raised .''' | ro = obj . constructor ( )
if name not in cbor_names_to_tags :
print ( name )
raise ValueError ( 'Unsupported feature type name: "%s". ' 'Allowed feature type names: %r' % ( name , cbor_names_to_tags . keys ( ) ) )
if not is_valid_feature_instance ( ro ) :
raise ValueError ( 'Constructor for "%s" returned "%r" which has an unknown ' 'sub type "%r". (mro: %r). Object must be an instance of ' 'one of the allowed types: %r' % ( name , ro , type ( ro ) , type ( ro ) . mro ( ) , ALLOWED_FEATURE_TYPES ) )
self . _registry [ name ] = { 'obj' : obj , 'ro' : obj . constructor ( ) }
self . _inverse [ obj . constructor ] = name |
def fastas ( self , download = False ) :
"""Dict of filepaths for all fasta files associated with code .
Parameters
download : bool
If True , downloads the fasta file from the PDB .
If False , uses the ampal Protein . fasta property
Defaults to False - this is definitely the recommended behaviour .
Notes
Calls self . mmols , and so downloads mmol files if not already present .
See . fasta property of isambard . ampal . base _ ampal . Protein for more information .
Returns
fastas _ dict : dict , or None .
Keys : int
mmol number
Values : str
Filepath for the corresponding fasta file .""" | fastas_dict = { }
fasta_dir = os . path . join ( self . parent_dir , 'fasta' )
if not os . path . exists ( fasta_dir ) :
os . makedirs ( fasta_dir )
for i , mmol_file in self . mmols . items ( ) :
mmol_name = os . path . basename ( mmol_file )
fasta_file_name = '{0}.fasta' . format ( mmol_name )
fasta_file = os . path . join ( fasta_dir , fasta_file_name )
if not os . path . exists ( fasta_file ) :
if download :
pdb_url = "http://www.rcsb.org/pdb/files/fasta.txt?structureIdList={0}" . format ( self . code . upper ( ) )
r = requests . get ( pdb_url )
if r . status_code == 200 :
fasta_string = r . text
else :
fasta_string = None
else :
a = convert_pdb_to_ampal ( mmol_file )
# take first object if AmpalContainer ( i . e . NMR structure ) .
if type ( a ) == AmpalContainer :
a = a [ 0 ]
fasta_string = a . fasta
with open ( fasta_file , 'w' ) as foo :
foo . write ( fasta_string )
fastas_dict [ i ] = fasta_file
return fastas_dict |
def format_info_response ( value ) :
"""Format the response from redis
: param str value : The return response from redis
: rtype : dict""" | info = { }
for line in value . decode ( 'utf-8' ) . splitlines ( ) :
if not line or line [ 0 ] == '#' :
continue
if ':' in line :
key , value = line . split ( ':' , 1 )
info [ key ] = parse_info_value ( value )
return info |
def close ( self , connection , * , commit = True ) :
"""Close the connection using the closer method passed to the constructor .""" | if commit :
connection . commit ( )
else :
connection . rollback ( )
self . closer ( connection ) |
def _setup_ulimit_time_limit ( self , hardtimelimit , cgroups ) :
"""Setup time limit with ulimit for the current process .""" | if hardtimelimit is not None : # Also use ulimit for CPU time limit as a fallback if cgroups don ' t work .
if CPUACCT in cgroups : # Use a slightly higher limit to ensure cgroups get used
# ( otherwise we cannot detect the timeout properly ) .
ulimit = hardtimelimit + _ULIMIT_DEFAULT_OVERHEAD
else :
ulimit = hardtimelimit
resource . setrlimit ( resource . RLIMIT_CPU , ( ulimit , ulimit ) ) |
def with_args ( self , * args , ** kwargs ) :
"""Declares that the double can only be called with the provided arguments .
: param args : Any positional arguments required for invocation .
: param kwargs : Any keyword arguments required for invocation .""" | self . args = args
self . kwargs = kwargs
self . verify_arguments ( )
return self |
def from_file ( filename = None , io = 'auto' , prefix_dir = None , omit_facets = False ) :
"""Read a mesh from a file .
Parameters
filename : string or function or MeshIO instance or Mesh instance
The name of file to read the mesh from . For convenience , a
mesh creation function or a MeshIO instance or directly a Mesh
instance can be passed in place of the file name .
io : * MeshIO instance
Passing * MeshIO instance has precedence over filename .
prefix _ dir : str
If not None , the filename is relative to that directory .
omit _ facets : bool
If True , do not read cells of lower dimension than the space
dimension ( faces and / or edges ) . Only some MeshIO subclasses
support this !""" | if isinstance ( filename , Mesh ) :
return filename
if io == 'auto' :
if filename is None :
output ( 'filename or io must be specified!' )
raise ValueError
else :
io = MeshIO . any_from_filename ( filename , prefix_dir = prefix_dir )
output ( 'reading mesh (%s)...' % ( io . filename ) )
tt = time . clock ( )
trunk = io . get_filename_trunk ( )
mesh = Mesh ( trunk )
mesh = io . read ( mesh , omit_facets = omit_facets )
output ( '...done in %.2f s' % ( time . clock ( ) - tt ) )
mesh . _set_shape_info ( )
return mesh |
def _run_progress_callbacks ( self , bytes_transferred ) :
'''pass the number of bytes process to progress callbacks''' | if bytes_transferred :
for callback in self . _progress_callbacks :
try :
callback ( bytes_transferred = bytes_transferred )
except Exception as ex :
logger . error ( "Exception: %s" % str ( ex ) ) |
def to_resolvers ( sweepable : Sweepable ) -> List [ ParamResolver ] :
"""Convert a Sweepable to a list of ParamResolvers .""" | if isinstance ( sweepable , ParamResolver ) :
return [ sweepable ]
elif isinstance ( sweepable , Sweep ) :
return list ( sweepable )
elif isinstance ( sweepable , collections . Iterable ) :
iterable = cast ( collections . Iterable , sweepable )
return list ( iterable ) if isinstance ( next ( iter ( iterable ) ) , ParamResolver ) else sum ( [ list ( s ) for s in iterable ] , [ ] )
raise TypeError ( 'Unexpected Sweepable type.' ) |
def unassign_activity_from_objective_bank ( self , activity_id , objective_bank_id ) :
"""Removes a ` ` Activity ` ` from a ` ` ObjectiveBank ` ` .
arg : activity _ id ( osid . id . Id ) : the ` ` Id ` ` of the ` ` Activity ` `
arg : objective _ bank _ id ( osid . id . Id ) : the ` ` Id ` ` of the
` ` ObjectiveBank ` `
raise : NotFound - ` ` activity _ id ` ` or ` ` objective _ bank _ id ` ` not
found or ` ` activity _ id ` ` not mapped to
` ` objective _ bank _ id ` `
raise : NullArgument - ` ` activity _ id ` ` or ` ` objective _ bank _ id ` `
is ` ` null ` `
raise : OperationFailed - unable to complete request
raise : PermissionDenied - authorization failure
* compliance : mandatory - - This method must be implemented . *""" | # Implemented from template for
# osid . resource . ResourceBinAssignmentSession . unassign _ resource _ from _ bin
mgr = self . _get_provider_manager ( 'LEARNING' , local = True )
lookup_session = mgr . get_objective_bank_lookup_session ( proxy = self . _proxy )
lookup_session . get_objective_bank ( objective_bank_id )
# to raise NotFound
self . _unassign_object_from_catalog ( activity_id , objective_bank_id ) |
def shell_call ( command , ** kwargs ) :
"""Calls shell command with argument substitution .
Args :
command : command represented as a list . Each element of the list is one
token of the command . For example " cp a b " becomes [ ' cp ' , ' a ' , ' b ' ]
If any element of the list looks like ' $ { NAME } ' then it will be replaced
by value from * * kwargs with key ' NAME ' .
* * kwargs : dictionary with argument substitution
Returns :
output of the command
Raises :
subprocess . CalledProcessError if command return value is not zero
This function is useful when you need to do variable substitution prior
running the command . Below are few examples of how it works :
shell _ call ( [ ' cp ' , ' a ' , ' b ' ] , a = ' asd ' ) calls command ' cp a b '
shell _ call ( [ ' cp ' , ' $ { a } ' , ' b ' ] , a = ' asd ' ) calls command ' cp asd b ' ,
' $ { a } ; was replaced with ' asd ' before calling the command""" | # Regular expression to find instances of ' $ { NAME } ' in a string
CMD_VARIABLE_RE = re . compile ( '^\\$\\{(\\w+)\\}$' )
command = list ( command )
for i in range ( len ( command ) ) :
m = CMD_VARIABLE_RE . match ( command [ i ] )
if m :
var_id = m . group ( 1 )
if var_id in kwargs :
command [ i ] = kwargs [ var_id ]
str_command = ' ' . join ( command )
logging . debug ( 'Executing shell command: %s' % str_command )
return subprocess . check_output ( command ) |
def guid ( valu = None ) :
'''Get a 16 byte guid value .
By default , this is a random guid value .
Args :
valu : Object used to construct the guid valu from . This must be able
to be msgpack ' d .
Returns :
str : 32 character , lowercase ascii string .''' | if valu is None :
return binascii . hexlify ( os . urandom ( 16 ) ) . decode ( 'utf8' )
# Generate a " stable " guid from the given item
byts = s_msgpack . en ( valu )
return hashlib . md5 ( byts ) . hexdigest ( ) |
def read_sps ( path ) :
"""Read a LibSVM file line - by - line .
Args :
path ( str ) : A path to the LibSVM file to read .
Yields :
data ( list ) and target ( int ) .""" | for line in open ( path ) : # parse x
xs = line . rstrip ( ) . split ( ' ' )
yield xs [ 1 : ] , int ( xs [ 0 ] ) |
def get_sound ( self , title , group ) :
'''Retrieve sound @ title from group @ group .''' | return self . sounds [ group . lower ( ) ] [ title . lower ( ) ] |
def main ( ) :
"""Main entry point for gunicorn _ console .""" | # Set up curses .
stdscr = curses . initscr ( )
curses . start_color ( )
curses . init_pair ( 1 , foreground_colour , background_colour )
curses . noecho ( )
stdscr . keypad ( True )
stdscr . nodelay ( True )
try :
curses . curs_set ( False )
except :
pass
try : # Run main event loop until quit .
while True :
try :
update_gunicorns ( )
handle_keypress ( stdscr )
display_output ( stdscr )
curses . napms ( int ( screen_delay * 1000 ) )
except KeyboardInterrupt :
break
finally : # Tear down curses .
curses . nocbreak ( )
stdscr . keypad ( False )
curses . echo ( )
curses . endwin ( ) |
def axis_as_object ( arr , axis = - 1 ) :
"""cast the given axis of an array to a void object
if the axis to be cast is contiguous , a view is returned , otherwise a copy is made
this is useful for efficiently sorting by the content of an axis , for instance
Parameters
arr : ndarray
array to view as void object type
axis : int
axis to view as a void object type
Returns
ndarray
array with the given axis viewed as a void object""" | shape = arr . shape
# make axis to be viewed as a void object as contiguous items
arr = np . ascontiguousarray ( np . rollaxis ( arr , axis , arr . ndim ) )
# number of bytes in each void object
nbytes = arr . dtype . itemsize * shape [ axis ]
# void type with the correct number of bytes
voidtype = np . dtype ( ( np . void , nbytes ) )
# return the view as such , with the reduced shape
return arr . view ( voidtype ) . reshape ( np . delete ( shape , axis ) ) |
def mon_hosts ( mons ) :
"""Iterate through list of MON hosts , return tuples of ( name , host ) .""" | for m in mons :
if m . count ( ':' ) :
( name , host ) = m . split ( ':' )
else :
name = m
host = m
if name . count ( '.' ) > 0 :
name = name . split ( '.' ) [ 0 ]
yield ( name , host ) |
def enable ( identifier = None , * args , ** kwargs ) :
'''Enables a specific cache for the current session . Remember that is has to be registered .''' | global cache
if not identifier :
for item in ( config [ 'default-caches' ] + [ 'NoCache' ] ) :
if caches . has_key ( item ) :
debug ( 'Enabling default cache %s...' % ( item , ) )
cache = caches [ item ] ( * args , ** kwargs )
if not cache . status ( ) :
warning ( '%s could not be loaded. Is the backend running (%s:%d)?' % ( item , cache . server , cache . port ) )
continue
# This means that the cache backend was set up successfully
break
else :
debug ( 'Cache backend %s is not registered. Are all requirements satisfied?' % ( item , ) )
elif caches . has_key ( identifier ) :
debug ( 'Enabling cache %s...' % ( identifier , ) )
previouscache = cache
cache = caches [ identifier ] ( * args , ** kwargs )
if not cache . status ( ) :
warning ( '%s could not be loaded. Is the backend running (%s:%d)?' % ( identifier , cache . server , cache . port ) )
cache = previouscache
else :
debug ( 'Cache backend %s is not registered. Are all requirements satisfied?' % ( identifier , ) ) |
def set_file_properties ( self , share_name , directory_name , file_name , content_settings , timeout = None ) :
'''Sets system properties on the file . If one property is set for the
content _ settings , all properties will be overriden .
: param str share _ name :
Name of existing share .
: param str directory _ name :
The path to the directory .
: param str file _ name :
Name of existing file .
: param ~ azure . storage . file . models . ContentSettings content _ settings :
ContentSettings object used to set the file properties .
: param int timeout :
The timeout parameter is expressed in seconds .''' | _validate_not_none ( 'share_name' , share_name )
_validate_not_none ( 'file_name' , file_name )
_validate_not_none ( 'content_settings' , content_settings )
request = HTTPRequest ( )
request . method = 'PUT'
request . host = self . _get_host ( )
request . path = _get_path ( share_name , directory_name , file_name )
request . query = [ ( 'comp' , 'properties' ) , ( 'timeout' , _int_to_str ( timeout ) ) , ]
request . headers = None
request . headers = content_settings . _to_headers ( )
self . _perform_request ( request ) |
def hsl2rgb ( hsl ) :
"""Convert HSL representation towards RGB
: param h : Hue , position around the chromatic circle ( h = 1 equiv h = 0)
: param s : Saturation , color saturation ( 0 = full gray , 1 = full color )
: param l : Ligthness , Overhaul lightness ( 0 = full black , 1 = full white )
: rtype : 3 - uple for RGB values in float between 0 and 1
Hue , Saturation , Range from Lightness is a float between 0 and 1
Note that Hue can be set to any value but as it is a rotation
around the chromatic circle , any value above 1 or below 0 can
be expressed by a value between 0 and 1 ( Note that h = 0 is equiv
to h = 1 ) .
This algorithm came from :
http : / / www . easyrgb . com / index . php ? X = MATH & H = 19 # text19
Here are some quick notion of HSL to RGB conversion :
> > > from colour import hsl2rgb
With a lightness put at 0 , RGB is always rgbblack
> > > hsl2rgb ( ( 0.0 , 0.0 , 0.0 ) )
(0.0 , 0.0 , 0.0)
> > > hsl2rgb ( ( 0.5 , 0.0 , 0.0 ) )
(0.0 , 0.0 , 0.0)
> > > hsl2rgb ( ( 0.5 , 0.5 , 0.0 ) )
(0.0 , 0.0 , 0.0)
Same for lightness put at 1 , RGB is always rgbwhite
> > > hsl2rgb ( ( 0.0 , 0.0 , 1.0 ) )
(1.0 , 1.0 , 1.0)
> > > hsl2rgb ( ( 0.5 , 0.0 , 1.0 ) )
(1.0 , 1.0 , 1.0)
> > > hsl2rgb ( ( 0.5 , 0.5 , 1.0 ) )
(1.0 , 1.0 , 1.0)
With saturation put at 0 , the RGB should be equal to Lightness :
> > > hsl2rgb ( ( 0.0 , 0.0 , 0.25 ) )
(0.25 , 0.25 , 0.25)
> > > hsl2rgb ( ( 0.5 , 0.0 , 0.5 ) )
(0.5 , 0.5 , 0.5)
> > > hsl2rgb ( ( 0.5 , 0.0 , 0.75 ) )
(0.75 , 0.75 , 0.75)
With saturation put at 1 , and lightness put to 0.5 , we can find
normal full red , green , blue colors :
> > > hsl2rgb ( ( 0 , 1.0 , 0.5 ) )
(1.0 , 0.0 , 0.0)
> > > hsl2rgb ( ( 1 , 1.0 , 0.5 ) )
(1.0 , 0.0 , 0.0)
> > > hsl2rgb ( ( 1.0/3 , 1.0 , 0.5 ) )
(0.0 , 1.0 , 0.0)
> > > hsl2rgb ( ( 2.0/3 , 1.0 , 0.5 ) )
(0.0 , 0.0 , 1.0)
Of course :
> > > hsl2rgb ( ( 0.0 , 2.0 , 0.5 ) ) # doctest : + ELLIPSIS
Traceback ( most recent call last ) :
ValueError : Saturation must be between 0 and 1.
And :
> > > hsl2rgb ( ( 0.0 , 0.0 , 1.5 ) ) # doctest : + ELLIPSIS
Traceback ( most recent call last ) :
ValueError : Lightness must be between 0 and 1.""" | h , s , l = [ float ( v ) for v in hsl ]
if not ( 0.0 - FLOAT_ERROR <= s <= 1.0 + FLOAT_ERROR ) :
raise ValueError ( "Saturation must be between 0 and 1." )
if not ( 0.0 - FLOAT_ERROR <= l <= 1.0 + FLOAT_ERROR ) :
raise ValueError ( "Lightness must be between 0 and 1." )
if s == 0 :
return l , l , l
if l < 0.5 :
v2 = l * ( 1.0 + s )
else :
v2 = ( l + s ) - ( s * l )
v1 = 2.0 * l - v2
r = _hue2rgb ( v1 , v2 , h + ( 1.0 / 3 ) )
g = _hue2rgb ( v1 , v2 , h )
b = _hue2rgb ( v1 , v2 , h - ( 1.0 / 3 ) )
return r , g , b |
def list_subnetpools ( self , retrieve_all = True , ** _params ) :
"""Fetches a list of all subnetpools for a project .""" | return self . list ( 'subnetpools' , self . subnetpools_path , retrieve_all , ** _params ) |
def extractSNPs ( prefixes , snpToExtractFileNames , outPrefixes , runSGE ) :
"""Extract a list of SNPs using Plink .""" | s = None
jobIDs = [ ]
jobTemplates = [ ]
if runSGE : # Add the environment variable for DRMAA package
if "DRMAA_LIBRARY_PATH" not in os . environ :
t = "/shares/data/sge/lib/lx24-amd64/libdrmaa.so.1.0"
os . environ [ "DRMAA_LIBRARY_PATH" ] = t
# Import the python drmaa library
try :
import drmaa
except ImportError :
raise ProgramError ( "drmaa is not install, install drmaa" )
# Initializing a session
s = drmaa . Session ( )
s . initialize ( )
for k , prefix in enumerate ( prefixes ) :
plinkCommand = [ "plink" , "--noweb" , "--bfile" , prefix , "--extract" , snpToExtractFileNames [ k ] , "--make-bed" , "--out" , outPrefixes [ k ] ]
if runSGE : # We run using SGE
# Creating the job template
jt = s . createJobTemplate ( )
jt . remoteCommand = plinkCommand [ 0 ]
jt . workingDirectory = os . getcwd ( )
jt . jobEnvironment = os . environ
jt . args = plinkCommand [ 1 : ]
jt . jobName = "_plink_extract_snps"
# Running the job
jobID = s . runJob ( jt )
# Storing the job template and the job ID
jobTemplates . append ( jt )
jobIDs . append ( jobID )
else : # We run normal
runCommand ( plinkCommand )
if runSGE : # We wait for all the jobs to be over
hadProblems = [ ]
for jobID in jobIDs :
retVal = s . wait ( jobID , drmaa . Session . TIMEOUT_WAIT_FOREVER )
hadProblems . append ( retVal . exitStatus == 0 )
# The jobs should be finished , so we clean everything
# Deleating the job template , and exiting the session
for jt in jobTemplates :
s . deleteJobTemplate ( jt )
# Closing the connection
s . exit ( )
for hadProblem in hadProblems :
if not hadProblem :
msg = "Some SGE jobs had errors..."
raise ProgramError ( msg ) |
def seekend ( self ) :
"""Set the current record position past the last vdata record .
Subsequent write ( ) calls will append records to the vdata .
Args : :
no argument
Returns : :
index of the last record plus 1
C library equivalent : no equivalent""" | try : # Seek to the next - to - last record position
n = self . seek ( self . _nrecs - 1 )
# updates _ offset
# Read last record , ignoring values
self . read ( 1 )
# updates _ offset
return self . _nrecs
except HDF4Error :
raise HDF4Error ( "seekend: cannot execute" ) |
def main_restore ( directory , conn_name ) :
"""Restore your database dumped with the dump command
just a wrapper around ` dump restore ` https : / / github . com / Jaymon / dump""" | inter = get_interface ( conn_name )
conn = inter . connection_config
cmd = get_base_cmd ( "restore" , inter , directory )
run_cmd ( cmd ) |
def build_permutation_matrix ( permutation ) :
"""Build a permutation matrix for a permutation .""" | matrix = lil_matrix ( ( len ( permutation ) , len ( permutation ) ) )
column = 0
for row in permutation :
matrix [ row , column ] = 1
column += 1
return matrix |
def find_show_premium_by_ids ( self , show_ids , page = 1 , count = 20 ) :
"""doc : http : / / open . youku . com / docs / doc ? id = 61""" | url = 'https://openapi.youku.com/v2/shows/show_premium.json'
params = { 'client_id' : self . client_id , 'show_ids' : show_ids , 'page' : page , 'count' : count }
r = requests . get ( url , params = params )
check_error ( r )
return r . json ( ) |
def connect_container_to_network ( container , net_id , ** kwargs ) :
'''. . versionadded : : 2015.8.3
. . versionchanged : : 2017.7.0
Support for ` ` ipv4 _ address ` ` argument added
. . versionchanged : : 2018.3.0
All arguments are now passed through to
` connect _ container _ to _ network ( ) ` _ , allowing for any new arguments added
to this function to be supported automagically .
Connect container to network . See the ` connect _ container _ to _ network ( ) ` _
docs for information on supported arguments .
container
Container name or ID
net _ id
Network name or ID
CLI Examples :
. . code - block : : bash
salt myminion docker . connect _ container _ to _ network web - 1 mynet
salt myminion docker . connect _ container _ to _ network web - 1 mynet ipv4 _ address = 10.20.0.10
salt myminion docker . connect _ container _ to _ network web - 1 1f9d2454d0872b68dd9e8744c6e7a4c66b86f10abaccc21e14f7f014f729b2bc''' | kwargs = __utils__ [ 'args.clean_kwargs' ] ( ** kwargs )
log . debug ( 'Connecting container \'%s\' to network \'%s\' with the following ' 'configuration: %s' , container , net_id , kwargs )
response = _client_wrapper ( 'connect_container_to_network' , container , net_id , ** kwargs )
log . debug ( 'Successfully connected container \'%s\' to network \'%s\'' , container , net_id )
_clear_context ( )
return True |
def dump ( self , force = False ) :
"""Encodes the value using DER
: param force :
If the encoded contents already exist , clear them and regenerate
to ensure they are in DER format instead of BER format
: return :
A byte string of the DER - encoded value""" | if self . _parsed is None :
self . parse ( )
return self . _parsed [ 0 ] . dump ( force = force ) |
def _upd_unused ( self , what ) :
"""Make sure to have exactly one copy of every valid function in the
" unused " pile on the right .
Doesn ' t read from the database .
: param what : a string , ' trigger ' , ' prereq ' , or ' action '""" | builder = getattr ( self , '_{}_builder' . format ( what ) )
updtrig = getattr ( self , '_trigger_upd_unused_{}s' . format ( what ) )
builder . unbind ( decks = updtrig )
funcs = OrderedDict ( )
cards = list ( self . _action_builder . decks [ 1 ] )
cards . reverse ( )
for card in cards :
funcs [ card . ud [ 'funcname' ] ] = card
for card in self . _action_builder . decks [ 0 ] :
if card . ud [ 'funcname' ] not in funcs :
funcs [ card . ud [ 'funcname' ] ] = card . copy ( )
unused = list ( funcs . values ( ) )
unused . reverse ( )
builder . decks [ 1 ] = unused
builder . bind ( decks = updtrig ) |
def get_country_name_from_iso3 ( cls , iso3 , use_live = True , exception = None ) : # type : ( str , bool , Optional [ ExceptionUpperBound ] ) - > Optional [ str ]
"""Get country name from ISO3 code
Args :
iso3 ( str ) : ISO3 code for which to get country name
use _ live ( bool ) : Try to get use latest data from web rather than file in package . Defaults to True .
exception ( Optional [ ExceptionUpperBound ] ) : An exception to raise if country not found . Defaults to None .
Returns :
Optional [ str ] : Country name""" | countryinfo = cls . get_country_info_from_iso3 ( iso3 , use_live = use_live , exception = exception )
if countryinfo is not None :
return countryinfo . get ( '#country+name+preferred' )
return None |
def _cleanAndRebuildIfNeeded ( portal , cleanrebuild ) :
"""Rebuild the given catalogs .
: portal : the Plone portal object
: cleanrebuild : a list with catalog ids""" | for cat in cleanrebuild :
catalog = getToolByName ( portal , cat )
if catalog :
if hasattr ( catalog , "softClearFindAndRebuild" ) :
catalog . softClearFindAndRebuild ( )
else :
catalog . clearFindAndRebuild ( )
else :
logger . warning ( '%s do not found' % cat ) |
def add_cache_entry ( self , key , entry ) :
"""Add the given ` entry ` ( which must be a : class : ` ~ . disco . xso . InfoQuery `
instance ) to the user - level database keyed with the hash function type
` hash _ ` and the ` node ` URL . The ` entry ` is * * not * * validated to
actually map to ` node ` with the given ` hash _ ` function , it is expected
that the caller perfoms the validation .""" | copied_entry = copy . copy ( entry )
self . _memory_overlay [ key ] = copied_entry
if self . _user_db_path is not None :
asyncio . ensure_future ( asyncio . get_event_loop ( ) . run_in_executor ( None , writeback , self . _user_db_path / key . path , entry . captured_events ) ) |
def merge_duplicates ( self ) :
"""Merge and remove duplicate entries .
Compares each entry ( ' name ' ) in ` stubs ` to all later entries to check
for duplicates in name or alias . If a duplicate is found , they are
merged and written to file .""" | if len ( self . entries ) == 0 :
self . log . error ( "WARNING: `entries` is empty, loading stubs" )
if self . args . update :
self . log . warning ( "No sources changed, entry files unchanged in update." " Skipping merge." )
return
self . entries = self . load_stubs ( )
task_str = self . get_current_task_str ( )
keys = list ( sorted ( self . entries . keys ( ) ) )
n1 = 0
mainpbar = tqdm ( total = len ( keys ) , desc = task_str )
while n1 < len ( keys ) :
name1 = keys [ n1 ]
if name1 not in self . entries :
self . log . info ( "Entry for {} not found, likely already " "deleted in merging process." . format ( name1 ) )
n1 = n1 + 1
mainpbar . update ( 1 )
continue
allnames1 = set ( self . entries [ name1 ] . get_aliases ( ) + self . entries [ name1 ] . extra_aliases ( ) )
# Search all later names
for name2 in keys [ n1 + 1 : ] :
if name1 == name2 :
continue
if name1 not in self . entries :
self . log . info ( "Entry for {} not found, likely already " "deleted in merging process." . format ( name1 ) )
continue
if name2 not in self . entries :
self . log . info ( "Entry for {} not found, likely already " "deleted in merging process." . format ( name2 ) )
continue
allnames2 = set ( self . entries [ name2 ] . get_aliases ( ) + self . entries [ name2 ] . extra_aliases ( ) )
# If there are any common names or aliases , merge
if len ( allnames1 & allnames2 ) :
self . log . warning ( "Found two entries with common aliases " "('{}' and '{}'), merging." . format ( name1 , name2 ) )
load1 = self . proto . init_from_file ( self , name = name1 )
load2 = self . proto . init_from_file ( self , name = name2 )
if load1 is not None and load2 is not None : # Delete old files
self . _delete_entry_file ( entry = load1 )
self . _delete_entry_file ( entry = load2 )
self . entries [ name1 ] = load1
self . entries [ name2 ] = load2
priority1 = 0
priority2 = 0
for an in allnames1 :
if an . startswith ( self . entries [ name1 ] . priority_prefixes ( ) ) :
priority1 += 1
for an in allnames2 :
if an . startswith ( self . entries [ name2 ] . priority_prefixes ( ) ) :
priority2 += 1
if priority1 > priority2 :
self . copy_to_entry_in_catalog ( name2 , name1 )
keys . append ( name1 )
del self . entries [ name2 ]
else :
self . copy_to_entry_in_catalog ( name1 , name2 )
keys . append ( name2 )
del self . entries [ name1 ]
else :
self . log . warning ( 'Duplicate already deleted' )
# if len ( self . entries ) ! = 1:
# self . log . error (
# " WARNING : len ( entries ) = { } , expected 1 . "
# " Still journaling . . . " . format ( len ( self . entries ) ) )
self . journal_entries ( )
if self . args . travis and n1 > self . TRAVIS_QUERY_LIMIT :
break
n1 = n1 + 1
mainpbar . update ( 1 )
mainpbar . close ( ) |
def get_keypair_dict ( ) :
"""Returns dictionary of { keypairname : keypair }""" | client = get_ec2_client ( )
response = client . describe_key_pairs ( )
assert is_good_response ( response )
result = { }
ec2 = get_ec2_resource ( )
for keypair in response [ 'KeyPairs' ] :
keypair_name = keypair . get ( 'KeyName' , '' )
if keypair_name in result :
util . log ( f"Warning: Duplicate key {keypair_name}" )
if DUPLICATE_CHECKING :
assert keypair_name not in result , "Duplicate key " + keypair_name
result [ keypair_name ] = ec2 . KeyPair ( keypair_name )
return result |
def touch ( name , atime = None , mtime = None ) :
'''. . versionadded : : 0.9.5
Just like the ` ` touch ` ` command , create a file if it doesn ' t exist or
simply update the atime and mtime if it already does .
atime :
Access time in Unix epoch time . Set it to 0 to set atime of the
file with Unix date of birth . If this parameter isn ' t set , atime
will be set with current time .
mtime :
Last modification in Unix epoch time . Set it to 0 to set mtime of
the file with Unix date of birth . If this parameter isn ' t set ,
mtime will be set with current time .
CLI Example :
. . code - block : : bash
salt ' * ' file . touch / var / log / emptyfile''' | name = os . path . expanduser ( name )
if atime and atime . isdigit ( ) :
atime = int ( atime )
if mtime and mtime . isdigit ( ) :
mtime = int ( mtime )
try :
if not os . path . exists ( name ) :
with salt . utils . files . fopen ( name , 'a' ) :
pass
if atime is None and mtime is None :
times = None
elif mtime is None and atime is not None :
times = ( atime , time . time ( ) )
elif atime is None and mtime is not None :
times = ( time . time ( ) , mtime )
else :
times = ( atime , mtime )
os . utime ( name , times )
except TypeError :
raise SaltInvocationError ( 'atime and mtime must be integers' )
except ( IOError , OSError ) as exc :
raise CommandExecutionError ( exc . strerror )
return os . path . exists ( name ) |
def save ( self , filename = None , * , gzipped = None , byteorder = None ) :
"""Write the file at the specified location .
The ` gzipped ` keyword only argument indicates if the file should
be gzipped . The ` byteorder ` keyword only argument lets you
specify whether the file should be big - endian or little - endian .
If the method is called without any argument , it will default to
the instance attributes and use the file ' s ` filename ` ,
` gzipped ` and ` byteorder ` attributes . Calling the method without
a ` filename ` will raise a ` ValueError ` if the ` filename ` of the
file is ` None ` .""" | if gzipped is None :
gzipped = self . gzipped
if filename is None :
filename = self . filename
if filename is None :
raise ValueError ( 'No filename specified' )
open_file = gzip . open if gzipped else open
with open_file ( filename , 'wb' ) as buff :
self . write ( buff , byteorder or self . byteorder ) |
def commit_hash ( self ) :
"""Return the current commit hash if available .
This is not a required task so best effort is fine . In other words this is not guaranteed
to work 100 % of the time .""" | commit_hash = None
branch = None
branch_file = '.git/HEAD'
# ref : refs / heads / develop
# get current branch
if os . path . isfile ( branch_file ) :
with open ( branch_file , 'r' ) as f :
try :
branch = f . read ( ) . strip ( ) . split ( '/' ) [ 2 ]
except IndexError :
pass
# get commit hash
if branch :
hash_file = '.git/refs/heads/{}' . format ( branch )
if os . path . isfile ( hash_file ) :
with open ( hash_file , 'r' ) as f :
commit_hash = f . read ( ) . strip ( )
return commit_hash |
def _adjust_object_lists ( obj ) :
'''For creation or update of object that have attribute which contains a list Zabbix awaits plain list of IDs while
querying Zabbix for same object returns list of dicts
: param obj : Zabbix object parameters''' | for subcomp in TEMPLATE_COMPONENT_DEF :
if subcomp in obj and TEMPLATE_COMPONENT_DEF [ subcomp ] [ 'adjust' ] :
obj [ subcomp ] = [ item [ TEMPLATE_COMPONENT_DEF [ subcomp ] [ 'qidname' ] ] for item in obj [ subcomp ] ] |
def md5 ( self ) :
"""An MD5 hash of the current vertices and entities .
Returns
md5 : str , two appended MD5 hashes""" | # first MD5 the points in every entity
target = '{}{}' . format ( util . md5_object ( bytes ( ) . join ( e . _bytes ( ) for e in self . entities ) ) , self . vertices . md5 ( ) )
return target |
def verify_calling_thread ( self , should_be_emulation , message = None ) :
"""Verify if the calling thread is or is not the emulation thread .
This method can be called to make sure that an action is being taken
in the appropriate context such as not blocking the event loop thread
or modifying an emulate state outside of the event loop thread .
If the verification fails an InternalError exception is raised ,
allowing this method to be used to protect other methods from being
called in a context that could deadlock or cause race conditions .
Args :
should _ be _ emulation ( bool ) : True if this call should be taking place
on the emulation , thread , False if it must not take place on
the emulation thread .
message ( str ) : Optional message to include when raising the exception .
Otherwise a generic message is used .
Raises :
InternalError : When called from the wrong thread .""" | if should_be_emulation == self . _on_emulation_thread ( ) :
return
if message is None :
message = "Operation performed on invalid thread"
raise InternalError ( message ) |
def cancelall ( self ) :
"""Cancel all orders of this bot""" | if self . orders :
return self . bitshares . cancel ( [ o [ "id" ] for o in self . orders ] , account = self . account ) |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.