signature stringlengths 29 44.1k | implementation stringlengths 0 85.2k |
|---|---|
def eventFilter ( self , widget , event ) :
"""A filter that is used to send a signal when the figure canvas is
clicked .""" | if event . type ( ) == QEvent . MouseButtonPress :
if event . button ( ) == Qt . LeftButton :
self . sig_canvas_clicked . emit ( self )
return super ( FigureThumbnail , self ) . eventFilter ( widget , event ) |
def bytesize ( arr ) :
"""Returns the memory byte size of a Numpy array as an integer .""" | byte_size = np . prod ( arr . shape ) * np . dtype ( arr . dtype ) . itemsize
return byte_size |
def apply_weight_drop ( block , local_param_regex , rate , axes = ( ) , weight_dropout_mode = 'training' ) :
"""Apply weight drop to the parameter of a block .
Parameters
block : Block or HybridBlock
The block whose parameter is to be applied weight - drop .
local _ param _ regex : str
The regex for parameter names used in the self . params . get ( ) , such as ' weight ' .
rate : float
Fraction of the input units to drop . Must be a number between 0 and 1.
axes : tuple of int , default ( )
The axes on which dropout mask is shared . If empty , regular dropout is applied .
weight _ drop _ mode : { ' training ' , ' always ' } , default ' training '
Whether the weight dropout should be applied only at training time , or always be applied .
Examples
> > > net = gluon . rnn . LSTM ( 10 , num _ layers = 2 , bidirectional = True )
> > > gluonnlp . model . apply _ weight _ drop ( net , r ' . * h2h _ weight ' , 0.5)
> > > net . collect _ params ( )
lstm0 _ (
Parameter lstm0 _ l0 _ i2h _ weight ( shape = ( 40 , 0 ) , dtype = < class ' numpy . float32 ' > )
WeightDropParameter lstm0 _ l0 _ h2h _ weight ( shape = ( 40 , 10 ) , dtype = < class ' numpy . float32 ' > , rate = 0.5 , mode = training )
Parameter lstm0 _ l0 _ i2h _ bias ( shape = ( 40 , ) , dtype = < class ' numpy . float32 ' > )
Parameter lstm0 _ l0 _ h2h _ bias ( shape = ( 40 , ) , dtype = < class ' numpy . float32 ' > )
Parameter lstm0 _ r0 _ i2h _ weight ( shape = ( 40 , 0 ) , dtype = < class ' numpy . float32 ' > )
WeightDropParameter lstm0 _ r0 _ h2h _ weight ( shape = ( 40 , 10 ) , dtype = < class ' numpy . float32 ' > , rate = 0.5 , mode = training )
Parameter lstm0 _ r0 _ i2h _ bias ( shape = ( 40 , ) , dtype = < class ' numpy . float32 ' > )
Parameter lstm0 _ r0 _ h2h _ bias ( shape = ( 40 , ) , dtype = < class ' numpy . float32 ' > )
Parameter lstm0 _ l1 _ i2h _ weight ( shape = ( 40 , 20 ) , dtype = < class ' numpy . float32 ' > )
WeightDropParameter lstm0 _ l1 _ h2h _ weight ( shape = ( 40 , 10 ) , dtype = < class ' numpy . float32 ' > , rate = 0.5 , mode = training )
Parameter lstm0 _ l1 _ i2h _ bias ( shape = ( 40 , ) , dtype = < class ' numpy . float32 ' > )
Parameter lstm0 _ l1 _ h2h _ bias ( shape = ( 40 , ) , dtype = < class ' numpy . float32 ' > )
Parameter lstm0 _ r1 _ i2h _ weight ( shape = ( 40 , 20 ) , dtype = < class ' numpy . float32 ' > )
WeightDropParameter lstm0 _ r1 _ h2h _ weight ( shape = ( 40 , 10 ) , dtype = < class ' numpy . float32 ' > , rate = 0.5 , mode = training )
Parameter lstm0 _ r1 _ i2h _ bias ( shape = ( 40 , ) , dtype = < class ' numpy . float32 ' > )
Parameter lstm0 _ r1 _ h2h _ bias ( shape = ( 40 , ) , dtype = < class ' numpy . float32 ' > )
> > > ones = mx . nd . ones ( ( 3 , 4 , 5 ) )
> > > net . initialize ( )
> > > with mx . autograd . train _ mode ( ) :
. . . net ( ones ) . max ( ) . asscalar ( ) ! = net ( ones ) . max ( ) . asscalar ( )
True""" | if not rate :
return
existing_params = _find_params ( block , local_param_regex )
for ( local_param_name , param ) , ( ref_params_list , ref_reg_params_list ) in existing_params . items ( ) :
dropped_param = WeightDropParameter ( param , rate , weight_dropout_mode , axes )
for ref_params in ref_params_list :
ref_params [ param . name ] = dropped_param
for ref_reg_params in ref_reg_params_list :
ref_reg_params [ local_param_name ] = dropped_param
if hasattr ( block , local_param_name ) :
local_attr = getattr ( block , local_param_name )
if local_attr == param :
local_attr = dropped_param
elif isinstance ( local_attr , ( list , tuple ) ) :
if isinstance ( local_attr , tuple ) :
local_attr = list ( local_attr )
for i , v in enumerate ( local_attr ) :
if v == param :
local_attr [ i ] = dropped_param
elif isinstance ( local_attr , dict ) :
for k , v in local_attr :
if v == param :
local_attr [ k ] = dropped_param
else :
continue
if local_attr :
super ( Block , block ) . __setattr__ ( local_param_name , local_attr ) |
def reduce ( self , sum1 , sum2 , * args ) :
"""The internal reduce function that sums the results from various
processors""" | self . sum1g [ ... ] += sum1
if not self . pts_only :
self . sum2g [ ... ] += sum2
if self . compute_mean_coords :
N , centers_sum = args
self . N [ ... ] += N
for i in range ( self . bins . Ndim ) :
self . centers [ i ] [ ... ] += centers_sum [ i ] |
def query ( self , query_criteria , valid_record = None ) :
'''a core method for querying model valid data with criteria
* * NOTE : input is only returned if all fields & qualifiers are valid for model
: param query _ criteria : dictionary with model field names and query qualifiers
: param valid _ record : dictionary with model valid record
: return : boolean ( or QueryValidationError )
an example of how to construct the query _ criteria argument :
query _ criteria = {
' . path . to . number ' : {
' min _ value ' : 4.5
' . path . to . string ' : {
' must _ contain ' : [ ' \\ regex ' ]
* * NOTE : for a full list of operators for query _ criteria based upon field
datatype , see either the query - rules . json file or REFERENCE file''' | __name__ = '%s.query' % self . __class__ . __name__
_query_arg = '%s(query_criteria={...})' % __name__
_record_arg = '%s(valid_record={...})' % __name__
# validate input
if not isinstance ( query_criteria , dict ) :
raise ModelValidationError ( '%s must be a dictionary.' % _query_arg )
# convert javascript dot _ path to class dot _ path
criteria_copy = { }
equal_fields = [ ]
dot_fields = [ ]
for key , value in query_criteria . items ( ) :
copy_key = key
if not key :
copy_key = '.'
else :
if key [ 0 ] != '.' :
copy_key = '.%s' % key
dot_fields . append ( copy_key )
criteria_copy [ copy_key ] = value
if value . __class__ in self . _datatype_classes [ 0 : 4 ] :
criteria_copy [ copy_key ] = { 'equal_to' : value }
equal_fields . append ( copy_key )
# validate query criteria against query rules
query_kwargs = { 'fields_dict' : criteria_copy , 'fields_rules' : self . queryRules , 'declared_value' : False }
try :
self . _validate_fields ( ** query_kwargs )
except ModelValidationError as err :
message = err . error [ 'message' ]
for field in equal_fields :
equal_error = 'field %s qualifier equal_to' % field
if message . find ( equal_error ) > - 1 :
message = message . replace ( equal_error , 'field %s' % field )
break
field_pattern = re . compile ( 'ield\s(\..*?)\s' )
field_name = field_pattern . findall ( message )
if field_name :
if field_name [ 0 ] in dot_fields :
def _replace_field ( x ) :
return 'ield %s ' % x . group ( 1 ) [ 1 : ]
message = field_pattern . sub ( _replace_field , message )
raise QueryValidationError ( message )
# query test record
if valid_record :
if not isinstance ( valid_record , dict ) :
raise ModelValidationError ( '%s must be a dictionary.' % _record_arg )
for key , value in criteria_copy . items ( ) :
eval_outcome = self . _evaluate_field ( valid_record , key , value )
if not eval_outcome :
return False
return True |
def setHintColor ( self , color ) :
"""Sets the hint color for this combo box provided its line edit is
an XLineEdit instance .
: param color | < QColor >""" | lineEdit = self . lineEdit ( )
if isinstance ( lineEdit , XLineEdit ) :
lineEdit . setHintColor ( color ) |
def isPlantOrigin ( taxid ) :
"""Given a taxid , this gets the expanded tree which can then be checked to
see if the organism is a plant or not
> > > isPlantOrigin ( 29760)
True""" | assert isinstance ( taxid , int )
t = TaxIDTree ( taxid )
try :
return "Viridiplantae" in str ( t )
except AttributeError :
raise ValueError ( "{0} is not a valid ID" . format ( taxid ) ) |
def master_config ( opts , vm_ ) :
'''Return a master ' s configuration for the provided options and VM''' | # Let ' s get a copy of the salt master default options
master = copy . deepcopy ( salt . config . DEFAULT_MASTER_OPTS )
# Some default options are Null , let ' s set a reasonable default
master . update ( log_level = 'info' , log_level_logfile = 'info' , hash_type = 'sha256' )
# Get ANY defined master setting , merging data , in the following order
# 1 . VM config
# 2 . Profile config
# 3 . Global configuration
master . update ( salt . config . get_cloud_config_value ( 'master' , vm_ , opts , default = { } , search_global = True ) )
return master |
def annot_boxplot ( ax , dmetrics , xoffwithin = 0.85 , xoff = 1.6 , yoff = 0 , annotby = 'xs' , test = False ) :
""": param dmetrics : hue in index , x in columns
# todos
# x | y off in %
xmin , xmax = ax . get _ xlim ( )
( xmax - xmin ) + ( xmax - xmin ) * 0.35 + xmin""" | xlabel = ax . get_xlabel ( )
ylabel = ax . get_ylabel ( )
if test :
dmetrics . index . name = 'index'
dmetrics . columns . name = 'columns'
dm = dmetrics . melt ( )
dm [ 'value' ] = 1
ax = sns . boxplot ( data = dm , x = 'columns' , y = 'value' )
for huei , hue in enumerate ( dmetrics . index ) :
for xi , x in enumerate ( dmetrics . columns ) :
if not pd . isnull ( dmetrics . loc [ hue , x ] ) :
xco = xi + ( huei * xoffwithin / len ( dmetrics . index ) + ( xoff / len ( dmetrics . index ) ) )
yco = ax . get_ylim ( ) [ 1 ] + yoff
if annotby == 'ys' :
xco , yco = yco , xco
ax . text ( xco , yco , dmetrics . loc [ hue , x ] , ha = 'center' )
ax . set_xlabel ( xlabel )
ax . set_ylabel ( ylabel )
return ax |
def Bankoff ( m , x , rhol , rhog , mul , mug , D , roughness = 0 , L = 1 ) :
r'''Calculates two - phase pressure drop with the Bankoff ( 1960 ) correlation ,
as shown in [ 2 ] _ , [ 3 ] _ , and [ 4 ] _ .
. . math : :
\ Delta P _ { tp } = \ phi _ { l } ^ { 7/4 } \ Delta P _ { l }
. . math : :
\ phi _ l = \ frac { 1 } { 1 - x } \ left [ 1 - \ gamma \ left ( 1 - \ frac { \ rho _ g } { \ rho _ l }
\ right ) \ right ] ^ { 3/7 } \ left [ 1 + x \ left ( \ frac { \ rho _ l } { \ rho _ g } - 1 \ right )
\ right ]
. . math : :
\ gamma = \ frac { 0.71 + 2.35 \ left ( \ frac { \ rho _ g } { \ rho _ l } \ right ) }
{1 + \ frac { 1 - x } { x } \ cdot \ frac { \ rho _ g } { \ rho _ l } }
Parameters
m : float
Mass flow rate of fluid , [ kg / s ]
x : float
Quality of fluid , [ - ]
rhol : float
Liquid density , [ kg / m ^ 3]
rhog : float
Gas density , [ kg / m ^ 3]
mul : float
Viscosity of liquid , [ Pa * s ]
mug : float
Viscosity of gas , [ Pa * s ]
D : float
Diameter of pipe , [ m ]
roughness : float , optional
Roughness of pipe for use in calculating friction factor , [ m ]
L : float , optional
Length of pipe , [ m ]
Returns
dP : float
Pressure drop of the two - phase flow , [ Pa ]
Notes
This correlation is not actually shown in [ 1 ] _ . Its origin is unknown .
The author recommends against using this .
Examples
> > > Bankoff ( m = 0.6 , x = 0.1 , rhol = 915 . , rhog = 2.67 , mul = 180E - 6 , mug = 14E - 6,
. . . D = 0.05 , roughness = 0 , L = 1)
4746.059442453399
References
. . [ 1 ] Bankoff , S . G . " A Variable Density Single - Fluid Model for Two - Phase
Flow With Particular Reference to Steam - Water Flow . " Journal of Heat
Transfer 82 , no . 4 ( November 1 , 1960 ) : 265-72 . doi : 10.1115/1.3679930.
. . [ 2 ] Thome , John R . " Engineering Data Book III . " Wolverine Tube Inc
(2004 ) . http : / / www . wlv . com / heat - transfer - databook /
. . [ 3 ] Moreno Quibén , Jesús . " Experimental and Analytical Study of Two -
Phase Pressure Drops during Evaporation in Horizontal Tubes , " 2005.
doi : 10.5075 / epfl - thesis - 3337.
. . [ 4 ] Mekisso , Henock Mateos . " Comparison of Frictional Pressure Drop
Correlations for Isothermal Two - Phase Horizontal Flow . " Thesis , Oklahoma
State University , 2013 . https : / / shareok . org / handle / 11244/11109.''' | # Liquid - only properties , for calculation of dP _ lo
v_lo = m / rhol / ( pi / 4 * D ** 2 )
Re_lo = Reynolds ( V = v_lo , rho = rhol , mu = mul , D = D )
fd_lo = friction_factor ( Re = Re_lo , eD = roughness / D )
dP_lo = fd_lo * L / D * ( 0.5 * rhol * v_lo ** 2 )
gamma = ( 0.71 + 2.35 * rhog / rhol ) / ( 1. + ( 1. - x ) / x * rhog / rhol )
phi_Bf = 1. / ( 1. - x ) * ( 1 - gamma * ( 1 - rhog / rhol ) ) ** ( 3 / 7. ) * ( 1. + x * ( rhol / rhog - 1. ) )
return dP_lo * phi_Bf ** ( 7 / 4. ) |
def directions ( self , features , profile = 'mapbox/driving' , alternatives = None , geometries = None , overview = None , steps = None , continue_straight = None , waypoint_snapping = None , annotations = None , language = None , ** kwargs ) :
"""Request directions for waypoints encoded as GeoJSON features .
Parameters
features : iterable
An collection of GeoJSON features
profile : str
Name of a Mapbox profile such as ' mapbox . driving '
alternatives : bool
Whether to try to return alternative routes , default : False
geometries : string
Type of geometry returned ( geojson , polyline , polyline6)
overview : string or False
Type of returned overview geometry : ' full ' , ' simplified ' ,
or False
steps : bool
Whether to return steps and turn - by - turn instructions ,
default : False
continue _ straight : bool
Direction of travel when departing intermediate waypoints
radiuses : iterable of numbers or ' unlimited '
Must be same length as features
waypoint _ snapping : list
Controls snapping of waypoints
The list is zipped with the features collection and must
have the same length . Elements of the list must be one of :
- A number ( interpretted as a snapping radius )
- The string ' unlimited ' ( unlimited snapping radius )
- A 3 - element tuple consisting of ( radius , angle , range )
- None ( no snapping parameters specified for that waypoint )
annotations : str
Whether or not to return additional metadata along the route
Possible values are : ' duration ' , ' distance ' , ' speed ' , and
' congestion ' . Several annotations can be used by joining
them with ' , ' .
language : str
Language of returned turn - by - turn text instructions ,
default : ' en '
Returns
requests . Response
The response object has a geojson ( ) method for access to
the route ( s ) as a GeoJSON - like FeatureCollection
dictionary .""" | # backwards compatible , deprecated
if 'geometry' in kwargs and geometries is None :
geometries = kwargs [ 'geometry' ]
warnings . warn ( 'Use `geometries` instead of `geometry`' , errors . MapboxDeprecationWarning )
annotations = self . _validate_annotations ( annotations )
coordinates = encode_coordinates ( features , precision = 6 , min_limit = 2 , max_limit = 25 )
geometries = self . _validate_geom_encoding ( geometries )
overview = self . _validate_geom_overview ( overview )
profile = self . _validate_profile ( profile )
bearings , radii = self . _validate_snapping ( waypoint_snapping , features )
params = { }
if alternatives is not None :
params . update ( { 'alternatives' : 'true' if alternatives is True else 'false' } )
if geometries is not None :
params . update ( { 'geometries' : geometries } )
if overview is not None :
params . update ( { 'overview' : 'false' if overview is False else overview } )
if steps is not None :
params . update ( { 'steps' : 'true' if steps is True else 'false' } )
if continue_straight is not None :
params . update ( { 'continue_straight' : 'true' if steps is True else 'false' } )
if annotations is not None :
params . update ( { 'annotations' : ',' . join ( annotations ) } )
if language is not None :
params . update ( { 'language' : language } )
if radii is not None :
params . update ( { 'radiuses' : ';' . join ( str ( r ) for r in radii ) } )
if bearings is not None :
params . update ( { 'bearings' : ';' . join ( self . _encode_bearing ( b ) for b in bearings ) } )
profile_ns , profile_name = profile . split ( '/' )
uri = URITemplate ( self . baseuri + '/{profile_ns}/{profile_name}/{coordinates}.json' ) . expand ( profile_ns = profile_ns , profile_name = profile_name , coordinates = coordinates )
resp = self . session . get ( uri , params = params )
self . handle_http_error ( resp )
def geojson ( ) :
return self . _geojson ( resp . json ( ) , geom_format = geometries )
resp . geojson = geojson
return resp |
def power_status_update ( self , POWER_STATUS ) :
'''update POWER _ STATUS warnings level''' | now = time . time ( )
Vservo = POWER_STATUS . Vservo * 0.001
Vcc = POWER_STATUS . Vcc * 0.001
self . high_servo_voltage = max ( self . high_servo_voltage , Vservo )
if self . high_servo_voltage > 1 and Vservo < self . settings . servowarn :
if now - self . last_servo_warn_time > 30 :
self . last_servo_warn_time = now
self . say ( "Servo volt %.1f" % Vservo )
if Vservo < 1 : # prevent continuous announcements on power down
self . high_servo_voltage = Vservo
if Vcc > 0 and Vcc < self . settings . vccwarn :
if now - self . last_vcc_warn_time > 30 :
self . last_vcc_warn_time = now
self . say ( "Vcc %.1f" % Vcc ) |
def log_progress ( lbl = 'Progress: ' , length = 0 , flushfreq = 4 , startafter = - 1 , start = True , repl = False , approx = False , disable = False , writefreq = 1 , with_time = False , backspace = True , pad_stdout = False , wfreq = None , ffreq = None , freq = None , total = None , num = None , with_totaltime = None ) :
"""DEPRICATE
FIXME : depricate for ProgressIter .
still used in util _ dev""" | global AGGROFLUSH
# Alias kwargs with simpler names
if num is not None :
length = num
if total is not None :
length = total
if wfreq is not None :
writefreq = wfreq
if ffreq is not None :
flushfreq = ffreq
if freq is not None :
writefreq = flushfreq = freq
if with_totaltime is not None :
with_time = with_totaltime
# flush frequency must be a multiple of write frequency
flushfreq = max ( int ( round ( flushfreq / writefreq ) ) , 1 ) * writefreq
if length < startafter or disable : # Do not mark progress if only executing a small number of tasks
def mark_progress ( * args ) :
pass
def end_progress ( * args ) :
pass
return mark_progress , end_progress
else :
write_fn = util_logging . _utool_write ( )
flush_fn = util_logging . _utool_flush ( )
# build format string for displaying progress
fmt_str = progress_str ( length , lbl = lbl , repl = repl , approx = approx , backspace = backspace )
if AGGROFLUSH : # Progress function which automatically flushes
def mark_progress ( count , flush_fn = flush_fn ) :
count_ = count + 1
write_fn ( fmt_str % ( count_ ) )
flush_fn ( )
else : # Progress function flushes every < flushfreq > times
def mark_progress ( count , fmt_str = fmt_str , flushfreq = flushfreq , writefreq = writefreq , write_fn = write_fn , flush_fn = flush_fn ) :
count_ = count + 1
if count_ % writefreq == 0 :
write_fn ( fmt_str % count_ )
if count_ % flushfreq == 0 :
flush_fn ( )
if pad_stdout :
write_fn ( '\n' )
write_fn ( '\n' )
flush_fn ( )
if with_time :
tt = util_time . tic ( lbl )
def end_progress ( count_ = length , write_fn = write_fn , flush_fn = flush_fn ) :
write_fn ( fmt_str % ( count_ ) )
write_fn ( '\n' )
flush_fn ( )
if with_time :
util_time . toc ( tt )
if pad_stdout :
write_fn ( '\n\n' )
flush_fn ( )
# mark _ progress ( 0)
if start :
mark_progress ( - 1 )
return mark_progress , end_progress |
def recoSurface ( points , bins = 256 ) :
"""Surface reconstruction from a scattered cloud of points .
: param int bins : number of voxels in x , y and z .
. . hint : : | recosurface | | recosurface . py | _""" | if isinstance ( points , vtk . vtkActor ) :
points = points . coordinates ( )
N = len ( points )
if N < 50 :
print ( "recoSurface: Use at least 50 points." )
return None
points = np . array ( points )
ptsSource = vtk . vtkPointSource ( )
ptsSource . SetNumberOfPoints ( N )
ptsSource . Update ( )
vpts = ptsSource . GetOutput ( ) . GetPoints ( )
for i , p in enumerate ( points ) :
vpts . SetPoint ( i , p )
polyData = ptsSource . GetOutput ( )
distance = vtk . vtkSignedDistance ( )
f = 0.1
x0 , x1 , y0 , y1 , z0 , z1 = polyData . GetBounds ( )
distance . SetBounds ( x0 - ( x1 - x0 ) * f , x1 + ( x1 - x0 ) * f , y0 - ( y1 - y0 ) * f , y1 + ( y1 - y0 ) * f , z0 - ( z1 - z0 ) * f , z1 + ( z1 - z0 ) * f )
if polyData . GetPointData ( ) . GetNormals ( ) :
distance . SetInputData ( polyData )
else :
normals = vtk . vtkPCANormalEstimation ( )
normals . SetInputData ( polyData )
normals . SetSampleSize ( int ( N / 50 ) )
normals . SetNormalOrientationToGraphTraversal ( )
distance . SetInputConnection ( normals . GetOutputPort ( ) )
print ( "Recalculating normals for" , N , "Points, sample size=" , int ( N / 50 ) )
b = polyData . GetBounds ( )
diagsize = np . sqrt ( ( b [ 1 ] - b [ 0 ] ) ** 2 + ( b [ 3 ] - b [ 2 ] ) ** 2 + ( b [ 5 ] - b [ 4 ] ) ** 2 )
radius = diagsize / bins * 5
distance . SetRadius ( radius )
distance . SetDimensions ( bins , bins , bins )
distance . Update ( )
print ( "Calculating mesh from points with R =" , radius )
surface = vtk . vtkExtractSurface ( )
surface . SetRadius ( radius * 0.99 )
surface . HoleFillingOn ( )
surface . ComputeNormalsOff ( )
surface . ComputeGradientsOff ( )
surface . SetInputConnection ( distance . GetOutputPort ( ) )
surface . Update ( )
return Actor ( surface . GetOutput ( ) , "gold" , 1 , 0 , "tomato" ) |
def validate_words ( word_list ) :
'''Checks for each edited word in word _ list if that word is a valid english word . abs
Returns all validated words as a set instance .''' | if word_list is None :
return { }
elif isinstance ( word_list , list ) :
if not word_list :
return { }
else :
return set ( word for word in word_list if word in WORD_DISTRIBUTION )
else :
raise InputError ( "list variable not passed as argument to validate_words" ) |
def write ( self , oprot ) :
'''Write this object to the given output protocol and return self .
: type oprot : thryft . protocol . _ output _ protocol . _ OutputProtocol
: rtype : pastpy . gen . database . impl . online . online _ database _ objects _ list _ item . OnlineDatabaseObjectsListItem''' | oprot . write_struct_begin ( 'OnlineDatabaseObjectsListItem' )
oprot . write_field_begin ( name = 'detail_href' , type = 11 , id = None )
oprot . write_string ( self . detail_href )
oprot . write_field_end ( )
oprot . write_field_begin ( name = 'record_type' , type = 11 , id = None )
oprot . write_string ( self . record_type )
oprot . write_field_end ( )
oprot . write_field_begin ( name = 'title' , type = 11 , id = None )
oprot . write_string ( self . title )
oprot . write_field_end ( )
if self . thumbnail_url is not None :
oprot . write_field_begin ( name = 'thumbnail_url' , type = 11 , id = None )
oprot . write_string ( self . thumbnail_url )
oprot . write_field_end ( )
oprot . write_field_stop ( )
oprot . write_struct_end ( )
return self |
def _create_bvals_bvecs ( multiframe_dicom , bval_file , bvec_file , nifti , nifti_file ) :
"""Write the bvals from the sorted dicom files to a bval file
Inspired by https : / / github . com / IBIC / ibicUtils / blob / master / ibicBvalsBvecs . py""" | # create the empty arrays
number_of_stack_slices = common . get_ss_value ( multiframe_dicom [ Tag ( 0x2001 , 0x105f ) ] [ 0 ] [ Tag ( 0x2001 , 0x102d ) ] )
number_of_stacks = int ( int ( multiframe_dicom . NumberOfFrames ) / number_of_stack_slices )
bvals = numpy . zeros ( [ number_of_stacks ] , dtype = numpy . int32 )
bvecs = numpy . zeros ( [ number_of_stacks , 3 ] )
# loop over all timepoints and create a list with all bvals and bvecs
for stack_index in range ( 0 , number_of_stacks ) :
stack = multiframe_dicom [ Tag ( 0x5200 , 0x9230 ) ] [ stack_index ]
if str ( stack [ Tag ( 0x0018 , 0x9117 ) ] [ 0 ] [ Tag ( 0x0018 , 0x9075 ) ] . value ) == 'DIRECTIONAL' :
bvals [ stack_index ] = common . get_fd_value ( stack [ Tag ( 0x0018 , 0x9117 ) ] [ 0 ] [ Tag ( 0x0018 , 0x9087 ) ] )
bvecs [ stack_index , : ] = common . get_fd_array_value ( stack [ Tag ( 0x0018 , 0x9117 ) ] [ 0 ] [ Tag ( 0x0018 , 0x9076 ) ] [ 0 ] [ Tag ( 0x0018 , 0x9089 ) ] , 3 )
# truncate nifti if needed
nifti , bvals , bvecs = _fix_diffusion_images ( bvals , bvecs , nifti , nifti_file )
# save the found bvecs to the file
if numpy . count_nonzero ( bvals ) > 0 or numpy . count_nonzero ( bvecs ) > 0 :
common . write_bval_file ( bvals , bval_file )
common . write_bvec_file ( bvecs , bvec_file )
else :
bval_file = None
bvec_file = None
bvals = None
bvecs = None
return bvals , bvecs , bval_file , bvec_file |
def get_file ( self , fp , headers = None , cb = None , num_cb = 10 , torrent = False , version_id = None , override_num_retries = None , response_headers = None , callback = None ) :
"""Retrieves a file from an S3 Key
: type fp : file
: param fp : File pointer to put the data into
: type headers : string
: param : headers to send when retrieving the files
: type cb : function
: param cb : a callback function that will be called to report
progress on the upload . The callback should accept
two integer parameters , the first representing the
number of bytes that have been successfully
transmitted to S3 and the second representing the
size of the to be transmitted object .
: type cb : int
: param num _ cb : ( optional ) If a callback is specified with
the cb parameter this parameter determines the
granularity of the callback by defining
the maximum number of times the callback will
be called during the file transfer .
: type torrent : bool
: param torrent : Flag for whether to get a torrent for the file
: type override _ num _ retries : int
: param override _ num _ retries : If not None will override configured
num _ retries parameter for underlying GET .
: type response _ headers : dict
: param response _ headers : A dictionary containing HTTP headers / values
that will override any headers associated with
the stored object in the response .
See http : / / goo . gl / EWOPb for details .""" | if cb :
if num_cb > 2 :
cb_count = self . size / self . BufferSize / ( num_cb - 2 )
elif num_cb < 0 :
cb_count = - 1
else :
cb_count = 0
i = total_bytes = 0
cb ( total_bytes , self . size )
save_debug = self . bucket . connection . debug
if self . bucket . connection . debug == 1 :
self . bucket . connection . debug = 0
query_args = [ ]
if torrent :
query_args . append ( 'torrent' )
# If a version _ id is passed in , use that . If not , check to see
# if the Key object has an explicit version _ id and , if so , use that .
# Otherwise , don ' t pass a version _ id query param .
if version_id is None :
version_id = self . version_id
if version_id :
query_args . append ( 'versionId=%s' % version_id )
if response_headers :
for key in response_headers :
query_args . append ( '%s=%s' % ( key , response_headers [ key ] ) )
query_args = '&' . join ( query_args )
def file_got ( response ) :
body = self . resp . read ( )
fp . write ( body )
if cb :
cb ( total_bytes , self . size )
self . close ( )
self . bucket . connection . debug = save_debug
if callable ( callback ) :
callback ( response )
self . open ( 'r' , headers , query_args = query_args , override_num_retries = override_num_retries , callback = file_got ) |
def format_private_ip_address ( result ) :
'''Formats the PrivateIPAddress object removing arguments that are empty''' | from collections import OrderedDict
# Only display parameters that have content
order_dict = OrderedDict ( )
if result . ip_address is not None :
order_dict [ 'ipAddress' ] = result . ip_address
if result . subnet_resource_id is not None :
order_dict [ 'subnetResourceId' ] = result . subnet_resource_id
return order_dict |
def add_row_range_from_keys ( self , start_key = None , end_key = None , start_inclusive = True , end_inclusive = False ) :
"""Add row range to row _ ranges list from the row keys
For example :
. . literalinclude : : snippets _ table . py
: start - after : [ START bigtable _ row _ range _ from _ keys ]
: end - before : [ END bigtable _ row _ range _ from _ keys ]
: type start _ key : bytes
: param start _ key : ( Optional ) Start key of the row range . If left empty ,
will be interpreted as the empty string .
: type end _ key : bytes
: param end _ key : ( Optional ) End key of the row range . If left empty ,
will be interpreted as the empty string and range will
be unbounded on the high end .
: type start _ inclusive : bool
: param start _ inclusive : ( Optional ) Whether the ` ` start _ key ` ` should be
considered inclusive . The default is True ( inclusive ) .
: type end _ inclusive : bool
: param end _ inclusive : ( Optional ) Whether the ` ` end _ key ` ` should be
considered inclusive . The default is False ( exclusive ) .""" | row_range = RowRange ( start_key , end_key , start_inclusive , end_inclusive )
self . row_ranges . append ( row_range ) |
def set_mode ( self , mode ) :
""": param mode : a str , one of [ home , away , night ]
: return : nothing""" | values = { "desired_state" : { "mode" : mode } }
response = self . api_interface . set_device_state ( self , values )
self . _update_state_from_response ( response ) |
def _request_get ( self , path , params = None , json = True , url = BASE_URL ) :
"""Perform a HTTP GET request .""" | url = urljoin ( url , path )
headers = self . _get_request_headers ( )
response = requests . get ( url , params = params , headers = headers )
if response . status_code >= 500 :
backoff = self . _initial_backoff
for _ in range ( self . _max_retries ) :
time . sleep ( backoff )
backoff_response = requests . get ( url , params = params , headers = headers , timeout = DEFAULT_TIMEOUT )
if backoff_response . status_code < 500 :
response = backoff_response
break
backoff *= 2
response . raise_for_status ( )
if json :
return response . json ( )
else :
return response |
def get_nonparametric_sources ( self ) :
""": returns : list of non parametric sources in the composite source model""" | return [ src for sm in self . source_models for src_group in sm . src_groups for src in src_group if hasattr ( src , 'data' ) ] |
def get ( self , sid ) :
"""Constructs a RoleContext
: param sid : The unique string that identifies the resource
: returns : twilio . rest . chat . v2 . service . role . RoleContext
: rtype : twilio . rest . chat . v2 . service . role . RoleContext""" | return RoleContext ( self . _version , service_sid = self . _solution [ 'service_sid' ] , sid = sid , ) |
def discard ( self , element ) :
"""Removes an element from the set .
. . note : You may remove elements from the set that are not
present , but a context from the server is required .
: param element : the element to remove
: type element : str""" | _check_element ( element )
self . _require_context ( )
self . _removes . add ( element ) |
def count_generator ( generator , memory_efficient = True ) :
"""Count number of item in generator .
memory _ efficient = True , 3 times slower , but memory _ efficient .
memory _ efficient = False , faster , but cost more memory .""" | if memory_efficient :
counter = 0
for _ in generator :
counter += 1
return counter
else :
return len ( list ( generator ) ) |
def pause ( self ) :
"""Pauses a running pipeline . This will stop retrieving results from the
pipeline . Parallel parts of the pipeline will stop after the ` ` NuMap ` `
buffer is has been filled . A paused pipeline can be run or stopped .""" | # 1 . stop the plumbing thread by raising a StopIteration on a stride
# boundary
if self . _started . isSet ( ) and self . _running . isSet ( ) and not self . _pausing . isSet ( ) :
self . _pausing . set ( )
self . _plunger . join ( )
del self . _plunger
self . _pausing . clear ( )
self . _running . clear ( )
else :
raise PlumberError |
def vertex_enumeration_gen ( g , qhull_options = None ) :
"""Generator version of ` vertex _ enumeration ` .
Parameters
g : NormalFormGame
NormalFormGame instance with 2 players .
qhull _ options : str , optional ( default = None )
Options to pass to ` scipy . spatial . ConvexHull ` . See the ` Qhull
manual < http : / / www . qhull . org > ` _ for details .
Yields
tuple ( ndarray ( float , ndim = 1 ) )
Tuple of Nash equilibrium mixed actions .""" | try :
N = g . N
except AttributeError :
raise TypeError ( 'input must be a 2-player NormalFormGame' )
if N != 2 :
raise NotImplementedError ( 'Implemented only for 2-player games' )
brps = [ _BestResponsePolytope ( g . players [ 1 - i ] , idx = i , qhull_options = qhull_options ) for i in range ( N ) ]
labelings_bits_tup = tuple ( _ints_arr_to_bits ( brps [ i ] . labelings ) for i in range ( N ) )
equations_tup = tuple ( brps [ i ] . equations for i in range ( N ) )
trans_recips = tuple ( brps [ i ] . trans_recip for i in range ( N ) )
return _vertex_enumeration_gen ( labelings_bits_tup , equations_tup , trans_recips ) |
def send_query ( query_dict ) :
"""Query ChEMBL API
Parameters
query _ dict : dict
' query ' : string of the endpoint to query
' params ' : dict of params for the query
Returns
js : dict
dict parsed from json that is unique to the submitted query""" | query = query_dict [ 'query' ]
params = query_dict [ 'params' ]
url = 'https://www.ebi.ac.uk/chembl/api/data/' + query + '.json'
r = requests . get ( url , params = params )
r . raise_for_status ( )
js = r . json ( )
return js |
def _escape_arg ( self , arg ) :
'''Properly escape argument to protect special characters from shell
interpretation . This avoids having to do tricky argument quoting .
Effectively just escape all characters in the argument that are not
alphanumeric !''' | if self . winrm :
return arg
return '' . join ( [ '\\' + char if re . match ( r'\W' , char ) else char for char in arg ] ) |
def loop_write ( self , max_packets = 1 ) :
"""Process read network events . Use in place of calling loop ( ) if you
wish to handle your client reads as part of your own application .
Use socket ( ) to obtain the client socket to call select ( ) or equivalent
on .
Use want _ write ( ) to determine if there is data waiting to be written .
Do not use if you are using the threaded interface loop _ start ( ) .""" | if self . _sock is None and self . _ssl is None :
return MQTT_ERR_NO_CONN
max_packets = len ( self . _out_packet ) + 1
if max_packets < 1 :
max_packets = 1
for i in range ( 0 , max_packets ) :
rc = self . _packet_write ( )
if rc > 0 :
return self . _loop_rc_handle ( rc )
elif rc == MQTT_ERR_AGAIN :
return MQTT_ERR_SUCCESS
return MQTT_ERR_SUCCESS |
def constantrotating_to_static ( frame_r , frame_i , w , t = None ) :
"""Transform from a constantly rotating frame to a static , inertial frame .
Parameters
frame _ i : ` ~ gala . potential . StaticFrame `
frame _ r : ` ~ gala . potential . ConstantRotatingFrame `
w : ` ~ gala . dynamics . PhaseSpacePosition ` , ` ~ gala . dynamics . Orbit `
t : quantity _ like ( optional )
Required if input coordinates are just a phase - space position .
Returns
pos : ` ~ astropy . units . Quantity `
Position in static , inertial frame .
vel : ` ~ astropy . units . Quantity `
Velocity in static , inertial frame .""" | return _constantrotating_static_helper ( frame_r = frame_r , frame_i = frame_i , w = w , t = t , sign = - 1. ) |
def _realpath ( self , relpath ) :
"""Follow symlinks to find the real path to a file or directory in the repo .
: returns : if the expanded path points to a file , the relative path
to that file ; if a directory , the relative path + ' / ' ; if
a symlink outside the repo , a path starting with / or . . / .""" | obj , path_so_far = self . _read_object ( relpath , MAX_SYMLINKS_IN_REALPATH )
if isinstance ( obj , self . Symlink ) :
raise self . SymlinkLoopException ( self . rev , relpath )
return path_so_far |
def remember ( self , user_name ) :
'''Remember the authenticated identity .
This method simply delegates to another IIdentifier plugin if configured .''' | log . debug ( 'Repoze OAuth remember' )
environ = toolkit . request . environ
rememberer = self . _get_rememberer ( environ )
identity = { 'repoze.who.userid' : user_name }
headers = rememberer . remember ( environ , identity )
for header , value in headers :
toolkit . response . headers . add ( header , value ) |
def put ( self , resource , obj , operation_timeout = None , max_envelope_size = None , locale = None ) :
"""resource can be a URL or a ResourceLocator""" | headers = None
return self . service . invoke ( headers , obj ) |
def create_api_v4_virtual_interface ( self ) :
"""Get an instance of Api Virtual Interface services facade .""" | return ApiV4VirtualInterface ( self . networkapi_url , self . user , self . password , self . user_ldap ) |
def make3d ( self , forcefield = "mmff94" , steps = 50 ) :
"""A wrapper to pybel ' s make3D method generate a 3D structure from a
2D or 0D structure .
The 3D structure is made very quickly using a combination of rules
( e . g . sp3 atoms should have four bonds arranged in a tetrahedron ) and
ring templates ( e . g . cyclohexane is shaped like a chair ) . Once 3D
coordinates are generated , hydrogens are added and a quick local
optimization is carried out as default .
The generated 3D structure can have clashes or have high energy
structures due to some strain . Please consider to use the conformer
search or geometry optimization to further optimize the structure .
Args :
forcefield : Default is mmff94 . Options are ' gaff ' , ' ghemical ' ,
' mmff94 ' , ' mmff94s ' , and ' uff ' .
steps : Default is 50.""" | pbmol = pb . Molecule ( self . _obmol )
pbmol . make3D ( forcefield = forcefield , steps = steps )
self . _obmol = pbmol . OBMol |
def get_mesh ( self , var , coords = None ) :
"""Get the mesh variable for the given ` var `
Parameters
var : xarray . Variable
The data source whith the ` ` ' mesh ' ` ` attribute
coords : dict
The coordinates to use . If None , the coordinates of the dataset of
this decoder is used
Returns
xarray . Coordinate
The mesh coordinate""" | mesh = var . attrs . get ( 'mesh' )
if mesh is None :
return None
if coords is None :
coords = self . ds . coords
return coords . get ( mesh , self . ds . coords . get ( mesh ) ) |
def serialize ( self ) :
"""serialize .""" | if self . request is None :
request = None
else :
request = json . loads ( self . request )
if self . response is None :
response = None
else :
response = json . loads ( self . response )
return { 'id' : self . id , 'name' : self . name , 'request' : request , 'response' : response } |
def query_other_gene_name ( ) :
"""Returns list of alternative short name by query query parameters
tags :
- Query functions
parameters :
- name : type _
in : query
type : string
required : false
description : Alternative short name
default : CVAP
- name : name
in : query
type : string
required : false
description : Alternative short name
default : CVAP
- name : entry _ name
in : query
type : string
required : false
description : UniProt entry name
default : A4 _ HUMAN
- name : limit
in : query
type : integer
required : false
description : limit of results numbers
default : 10""" | args = get_args ( request_args = request . args , allowed_str_args = [ 'name' , 'entry_name' ] , allowed_int_args = [ 'limit' ] )
return jsonify ( query . other_gene_name ( ** args ) ) |
def get_trips ( feed : "Feed" , date : Optional [ str ] = None , time : Optional [ str ] = None ) -> DataFrame :
"""Return a subset of ` ` feed . trips ` ` .
Parameters
feed : Feed
date : string
YYYYMMDD date string
time : string
HH : MM : SS time string , possibly with HH > 23
Returns
DataFrame
The subset of ` ` feed . trips ` ` containing trips active ( starting )
on the given date at the given time .
If no date or time are specified , then return the entire
` ` feed . trips ` ` .""" | if feed . trips is None or date is None :
return feed . trips
f = feed . trips . copy ( )
f [ "is_active" ] = f [ "trip_id" ] . map ( lambda trip_id : feed . is_active_trip ( trip_id , date ) )
f = f [ f [ "is_active" ] ] . copy ( )
del f [ "is_active" ]
if time is not None : # Get trips active during given time
g = pd . merge ( f , feed . stop_times [ [ "trip_id" , "departure_time" ] ] )
def F ( group ) :
d = { }
start = group [ "departure_time" ] . dropna ( ) . min ( )
end = group [ "departure_time" ] . dropna ( ) . max ( )
try :
result = start <= time <= end
except TypeError :
result = False
d [ "is_active" ] = result
return pd . Series ( d )
h = g . groupby ( "trip_id" ) . apply ( F ) . reset_index ( )
f = pd . merge ( f , h [ h [ "is_active" ] ] )
del f [ "is_active" ]
return f |
def add ( self , value , session = None , ** kwargs ) :
'''Add ` ` value ` ` , an instance of : attr : ` formodel ` to the
: attr : ` through ` model . This method can only be accessed by an instance of the
model for which this related manager is an attribute .''' | s , instance = self . session_instance ( 'add' , value , session , ** kwargs )
return s . add ( instance ) |
def class_name_to_resource_name ( class_name : str ) -> str :
"""Converts a camel case class name to a resource name with spaces .
> > > class _ name _ to _ resource _ name ( ' FooBarObject ' )
' Foo Bar Object '
: param class _ name : The name to convert .
: returns : The resource name .""" | s = re . sub ( '(.)([A-Z][a-z]+)' , r'\1 \2' , class_name )
return re . sub ( '([a-z0-9])([A-Z])' , r'\1 \2' , s ) |
def progress_color ( current , total , name , style = 'normal' , when = 'auto' ) :
"""Display a simple , colored progress report .""" | update_color ( '[%d/%d] ' % ( current , total ) , name , style , when ) |
def login ( self , role , jwt , use_token = True , mount_point = DEFAULT_MOUNT_POINT ) :
"""Login to retrieve a Vault token via the GCP auth method .
This endpoint takes a signed JSON Web Token ( JWT ) and a role name for some entity . It verifies the JWT
signature with Google Cloud to authenticate that entity and then authorizes the entity for the given role .
Supported methods :
POST : / auth / { mount _ point } / login . Produces : 200 application / json
: param role : The name of the role against which the login is being attempted .
: type role : str | unicode
: param jwt : A signed JSON web token
: type jwt : str | unicode
: param use _ token : if True , uses the token in the response received from the auth request to set the " token "
attribute on the the : py : meth : ` hvac . adapters . Adapter ` instance under the _ adapater Client attribute .
: type use _ token : bool
: param mount _ point : The " path " the method / backend was mounted on .
: type mount _ point : str | unicode
: return : The JSON response of the request .
: rtype : dict""" | params = { 'role' : role , 'jwt' : jwt , }
api_path = '/v1/auth/{mount_point}/login' . format ( mount_point = mount_point )
response = self . _adapter . login ( url = api_path , use_token = use_token , json = params , )
return response |
def parse ( fp ) :
"""Parse the contents of the ` ~ io . IOBase . readline ` - supporting file - like object
` ` fp ` ` as a simple line - oriented ` ` . properties ` ` file and return a
generator of ` ` ( key , value , original _ lines ) ` ` triples for every entry in
` ` fp ` ` ( including duplicate keys ) in order of occurrence . The third
element of each triple is the concatenation of the unmodified lines in
` ` fp ` ` ( including trailing newlines ) from which the key and value were
extracted . The generator also includes comments and blank / all - whitespace
lines found in ` ` fp ` ` , one triple per line , with the first two elements of
the triples set to ` None ` . This is the only way to extract comments from a
` ` . properties ` ` file with this library .
` ` fp ` ` may be either a text or binary filehandle , with or without universal
newlines enabled . If it is a binary filehandle , its contents are decoded
as Latin - 1.
. . versionchanged : : 0.5.0
Invalid ` ` \\ uXXXX ` ` escape sequences will now cause an
` InvalidUEscapeError ` to be raised
: param fp : the file from which to read the ` ` . properties ` ` document
: type fp : file - like object
: rtype : generator of triples of text strings
: raises InvalidUEscapeError : if an invalid ` ` \\ uXXXX ` ` escape sequence
occurs in the input""" | def lineiter ( ) :
while True :
ln = fp . readline ( )
if isinstance ( ln , binary_type ) :
ln = ln . decode ( 'iso-8859-1' )
if ln == '' :
return
for l in ascii_splitlines ( ln ) :
yield l
liter = lineiter ( )
for source in liter :
line = source
if re . match ( r'^[ \t\f]*(?:[#!]|\r?\n?$)' , line ) :
yield ( None , None , source )
continue
line = line . lstrip ( ' \t\f' ) . rstrip ( '\r\n' )
while re . search ( r'(?<!\\)(?:\\\\)*\\$' , line ) :
line = line [ : - 1 ]
nextline = next ( liter , '' )
source += nextline
line += nextline . lstrip ( ' \t\f' ) . rstrip ( '\r\n' )
if line == '' : # series of otherwise - blank lines with continuations
yield ( None , None , source )
continue
m = re . search ( r'(?<!\\)(?:\\\\)*([ \t\f]*[=:]|[ \t\f])[ \t\f]*' , line )
if m :
yield ( unescape ( line [ : m . start ( 1 ) ] ) , unescape ( line [ m . end ( ) : ] ) , source )
else :
yield ( unescape ( line ) , '' , source ) |
def filename ( self , appendix = None , create_if_not_existing = False ) :
"""creates a filename based
Args :
appendix : appendix for file
Returns : filename""" | # if provided path is a relative path and self . data _ path exists , build path
if os . path . isabs ( self . settings [ 'path' ] ) == False and self . data_path is not None :
path = os . path . join ( self . data_path , self . settings [ 'path' ] )
else :
path = self . settings [ 'path' ]
tag = self . settings [ 'tag' ]
# . replace ( ' . ' , ' - ' )
filename = os . path . join ( path , "{:s}_{:s}" . format ( self . start_time . strftime ( '%y%m%d-%H_%M_%S' ) , tag ) )
if os . path . exists ( filename ) == False and create_if_not_existing :
os . makedirs ( filename )
if appendix is not None :
filename = os . path . join ( filename , "{:s}_{:s}{:s}" . format ( self . start_time . strftime ( '%y%m%d-%H_%M_%S' ) , tag , appendix ) )
# windows can ' t deal with long filenames so we have to use the prefix ' \ \ \ \ ? \ \ '
# if len ( filename . split ( ' \ \ \ \ ? \ \ ' ) ) = = 1:
# filename = ' \ \ \ \ ? \ \ ' + filename
return filename |
def cipher ( self ) :
"""Applies the Caesar shift cipher .
Based on the attributes of the object , applies the Caesar shift cipher
to the message attribute . Accepts positive and negative integers as
offsets .
Required attributes :
message
offset
Returns :
String with cipher applied .""" | # If no offset is selected , pick random one with sufficient distance
# from original .
if self . offset is False :
self . offset = randrange ( 5 , 25 )
logging . info ( "Random offset selected: {0}" . format ( self . offset ) )
logging . debug ( "Offset set: {0}" . format ( self . offset ) )
# Cipher
ciphered_message_list = list ( self . message )
for i , letter in enumerate ( ciphered_message_list ) :
if letter . isalpha ( ) : # Use default upper and lower case characters if alphabet
# not supplied by user .
if letter . isupper ( ) :
alphabet = [ character . upper ( ) for character in self . alphabet ]
else :
alphabet = self . alphabet
logging . debug ( "Letter: {0}" . format ( letter ) )
logging . debug ( "Alphabet: {0}" . format ( alphabet ) )
value = alphabet . index ( letter )
cipher_value = value + self . offset
if cipher_value > 25 or cipher_value < 0 :
cipher_value = cipher_value % 26
logging . debug ( "Cipher value: {0}" . format ( cipher_value ) )
ciphered_message_list [ i ] = alphabet [ cipher_value ]
logging . debug ( "Ciphered letter: {0}" . format ( letter ) )
self . message = '' . join ( ciphered_message_list )
return self . message |
def _get_order_clause ( archive_table ) :
"""Returns an ascending order clause on the versioned unique constraint as well as the
version column .""" | order_clause = [ sa . asc ( getattr ( archive_table , col_name ) ) for col_name in archive_table . _version_col_names ]
order_clause . append ( sa . asc ( archive_table . version_id ) )
return order_clause |
def step ( self , action ) :
"""Run one timestep of the environment ' s dynamics . When end of
episode is reached , you are responsible for calling ` reset ( ) `
to reset this environment ' s state .
Accepts an action and returns a tuple ( observation , reward , done , info ) .
In the case of multi - agent environments , these are lists .
Args :
action ( object / list ) : an action provided by the environment
Returns :
observation ( object / list ) : agent ' s observation of the current environment
reward ( float / list ) : amount of reward returned after previous action
done ( boolean / list ) : whether the episode has ended .
info ( dict ) : contains auxiliary diagnostic information , including BrainInfo .""" | # Use random actions for all other agents in environment .
if self . _multiagent :
if not isinstance ( action , list ) :
raise UnityGymException ( "The environment was expecting `action` to be a list." )
if len ( action ) != self . _n_agents :
raise UnityGymException ( "The environment was expecting a list of {} actions." . format ( self . _n_agents ) )
else :
if self . _flattener is not None : # Action space is discrete and flattened - we expect a list of scalars
action = [ self . _flattener . lookup_action ( _act ) for _act in action ]
action = np . array ( action )
else :
if self . _flattener is not None : # Translate action into list
action = self . _flattener . lookup_action ( action )
info = self . _env . step ( action ) [ self . brain_name ]
n_agents = len ( info . agents )
self . _check_agents ( n_agents )
self . _current_state = info
if not self . _multiagent :
obs , reward , done , info = self . _single_step ( info )
self . game_over = done
else :
obs , reward , done , info = self . _multi_step ( info )
self . game_over = all ( done )
return obs , reward , done , info |
def list_repo ( self ) :
"""Returns info about all Repos .""" | req = proto . ListRepoRequest ( )
res = self . stub . ListRepo ( req , metadata = self . metadata )
if hasattr ( res , 'repo_info' ) :
return res . repo_info
return [ ] |
def _lowerAsn ( asnfile ) :
"""Create a copy of the original asn file and change
the case of all members to lower - case .""" | # Start by creating a new name for the ASN table
_indx = asnfile . find ( '_asn.fits' )
_new_asn = asnfile [ : _indx ] + '_pipeline' + asnfile [ _indx : ]
if os . path . exists ( _new_asn ) :
os . remove ( _new_asn )
# copy original ASN table to new table
shutil . copy ( asnfile , _new_asn )
# Open up the new copy and convert all MEMNAME ' s to lower - case
fasn = fits . open ( _new_asn , mode = 'update' , memmap = False )
for i in range ( len ( fasn [ 1 ] . data ) ) :
fasn [ 1 ] . data [ i ] . setfield ( 'MEMNAME' , fasn [ 1 ] . data [ i ] . field ( 'MEMNAME' ) . lower ( ) )
fasn . close ( )
return _new_asn |
def obj_from_file ( filename = 'annotation.yaml' , filetype = 'auto' ) :
'''Read object from file''' | if filetype == 'auto' :
_ , ext = os . path . splitext ( filename )
filetype = ext [ 1 : ]
if filetype in ( 'yaml' , 'yml' ) :
from ruamel . yaml import YAML
yaml = YAML ( typ = "unsafe" )
with open ( filename , encoding = "utf-8" ) as f :
obj = yaml . load ( f )
if obj is None :
obj = { }
# import yaml
# with open ( filename , encoding = " utf - 8 " ) as f :
# intext = f . read ( )
# obj = yaml . load ( intext )
elif filetype in ( 'pickle' , 'pkl' , 'pklz' , 'picklezip' ) :
fcontent = read_pkl_and_pklz ( filename )
# import pickle
if sys . version_info [ 0 ] < 3 :
import cPickle as pickle
else :
import _pickle as pickle
# import sPickle as pickle
if sys . version_info . major == 2 :
obj = pickle . loads ( fcontent )
else :
obj = pickle . loads ( fcontent , encoding = "latin1" )
else :
logger . error ( 'Unknown filetype ' + filetype )
return obj |
def get_resources_nodes ( call = None , resFilter = None ) :
'''Retrieve all hypervisors ( nodes ) available on this environment
CLI Example :
. . code - block : : bash
salt - cloud - f get _ resources _ nodes my - proxmox - config''' | log . debug ( 'Getting resource: nodes.. (filter: %s)' , resFilter )
resources = query ( 'get' , 'cluster/resources' )
ret = { }
for resource in resources :
if 'type' in resource and resource [ 'type' ] == 'node' :
name = resource [ 'node' ]
ret [ name ] = resource
if resFilter is not None :
log . debug ( 'Filter given: %s, returning requested ' 'resource: nodes' , resFilter )
return ret [ resFilter ]
log . debug ( 'Filter not given: %s, returning all resource: nodes' , ret )
return ret |
def reconfigure ( working_dir ) :
"""Reconfigure blockstackd .""" | configure ( working_dir , force = True , interactive = True )
print "Blockstack successfully reconfigured."
sys . exit ( 0 ) |
def disconnect_child ( self , sprite , * handlers ) :
"""disconnects from child event . if handler is not specified , will
disconnect from all the child sprite events""" | handlers = handlers or self . _child_handlers . get ( sprite , [ ] )
for handler in list ( handlers ) :
if sprite . handler_is_connected ( handler ) :
sprite . disconnect ( handler )
if handler in self . _child_handlers . get ( sprite , [ ] ) :
self . _child_handlers [ sprite ] . remove ( handler )
if not self . _child_handlers [ sprite ] :
del self . _child_handlers [ sprite ] |
def show_instance ( name = None , instance_id = None , call = None , kwargs = None ) :
'''Show the details from EC2 concerning an AMI .
Can be called as an action ( which requires a name ) :
. . code - block : : bash
salt - cloud - a show _ instance myinstance
. . . or as a function ( which requires either a name or instance _ id ) :
. . code - block : : bash
salt - cloud - f show _ instance my - ec2 name = myinstance
salt - cloud - f show _ instance my - ec2 instance _ id = i - d34db33f''' | if not name and call == 'action' :
raise SaltCloudSystemExit ( 'The show_instance action requires a name.' )
if call == 'function' :
name = kwargs . get ( 'name' , None )
instance_id = kwargs . get ( 'instance_id' , None )
if not name and not instance_id :
raise SaltCloudSystemExit ( 'The show_instance function requires ' 'either a name or an instance_id' )
node = _get_node ( name = name , instance_id = instance_id )
__utils__ [ 'cloud.cache_node' ] ( node , __active_provider_name__ , __opts__ )
return node |
def display_start ( self ) :
"""Set up status display if option selected . NB : this method
assumes that the first entry is the iteration count and the last
is the rho value .""" | if self . opt [ 'Verbose' ] : # If AutoRho option enabled rho is included in iteration status
if self . opt [ 'AutoRho' , 'Enabled' ] :
hdrtxt = type ( self ) . hdrtxt ( )
else :
hdrtxt = type ( self ) . hdrtxt ( ) [ 0 : - 1 ]
# Call utility function to construct status display formatting
hdrstr , fmtstr , nsep = common . solve_status_str ( hdrtxt , fwdth0 = type ( self ) . fwiter , fprec = type ( self ) . fpothr )
# Print header and separator strings
if self . opt [ 'StatusHeader' ] :
print ( hdrstr )
print ( "-" * nsep )
else :
fmtstr , nsep = '' , 0
return fmtstr , nsep |
def metadata ( request , config_loader_path = None , valid_for = None ) :
"""Returns an XML with the SAML 2.0 metadata for this
SP as configured in the settings . py file .""" | conf = get_config ( config_loader_path , request )
metadata = entity_descriptor ( conf )
return HttpResponse ( content = text_type ( metadata ) . encode ( 'utf-8' ) , content_type = "text/xml; charset=utf8" ) |
def opt_func ( options , check_mandatory = True ) :
"""Restore argument checks for functions that takes options dicts as arguments
Functions that take the option dictionary produced by : meth : ` Options . parse `
as ` kwargs ` loose the argument checking usually performed by the python
interpretor . They also loose the ability to take default values for
their keyword arguments . Such function is basically unusable without the
full dictionary produced by the option parser .
This is a decorator that restores argument checking and default values
assignment on the basis of an : class : ` Options ` instance .
options = Options ( [
(0 , " - f " , " input " , str , 1 , None , MULTI , " Input file " ) ,
(0 , " - o " , " output " , str , 1 , None , MA , " Output file " ) ,
(0 , " - p " , " topology " , str , 1 , None , 0 , " Optional topology " ) ,
@ opt _ func ( options )
def process _ things ( * * arguments ) :
# Do something
return
# The function can be called with the arguments
# from the argument parser
arguments = options . parse ( )
process _ things ( * * arguments )
# It can be called with only part of the arguments ,
# the other arguments will be set to their default as defined by
# the Options instance
process _ things ( output = ' output . gro ' )
# If the function is called with an unknown argument , the decorator
# raises a TypeError
process _ things ( unknown = None )
# Likewise , if the function is called without the mandatory arguments ,
# the decorator raises a TypeError
process _ things ( topology = ' topology . top ' )
# The check for mandatory arguments can be deactivated
@ opt _ func ( options , check _ mandatory = False )
def process _ things ( * * arguments ) :
# Do things
return
Note that the decorator cannot be used on functions that accept Other
arguments than the one defined in the : class : ` Options ` instance . Also , the
arguments have to be given as keyword arguments . Positional arguments
will cause the decorator to raise a ` TypeError ` .""" | # A function ` my _ function ` decorated with ` opt _ func ` is replaced by
# ` opt _ func ( options ) ( my _ function ) ` . This is equivalent to
# ` validate _ arguments ( my _ function ) ` using the ` options ` argument provided
# to the decorator . A call to ` my _ function ` results in a call to
# ` opt _ func ( options ) ( my _ function ) ( * args , * * kwargs ) `
# ^ ^ ^ ^ ^ validate _ arguments
# ^ ^ ^ ^ ^ wrap
def validate_arguments ( func ) :
@ functools . wraps ( func )
def wrap ( * args , ** kwargs ) :
if args :
raise TypeError ( '{0.__name__}() takes 0 positional arguments ' 'but {1} was given' . format ( func , len ( args ) ) )
keys = set ( kwargs . keys ( ) )
missing = options . mandatory_keys - keys
if missing and check_mandatory :
raise TypeError ( '{0.__name__}() is missing the following ' 'mandatory keyword arguments: {1}' . format ( func , ', ' . join ( missing ) ) )
arguments = options . _default_dict ( )
unknown = keys - set ( arguments . keys ( ) )
if unknown :
raise TypeError ( '{0.__name__}() received the following ' 'unexpected arguments: {1}' . format ( func , ', ' . join ( unknown ) ) )
arguments . update ( ** kwargs )
return func ( ** arguments )
return wrap
return validate_arguments |
def ntp_authentication_key_md5 ( self , ** kwargs ) :
"""Auto Generated Code""" | config = ET . Element ( "config" )
ntp = ET . SubElement ( config , "ntp" , xmlns = "urn:brocade.com:mgmt:brocade-ntp" )
authentication_key = ET . SubElement ( ntp , "authentication-key" )
keyid_key = ET . SubElement ( authentication_key , "keyid" )
keyid_key . text = kwargs . pop ( 'keyid' )
md5 = ET . SubElement ( authentication_key , "md5" )
md5 . text = kwargs . pop ( 'md5' )
callback = kwargs . pop ( 'callback' , self . _callback )
return callback ( config ) |
def highlightBlock ( self , text ) :
"""Apply syntax highlighting to the given block of text .""" | # Do other syntax formatting
for expression , nth , format in self . rules :
index = expression . indexIn ( text , 0 )
while index >= 0 : # We actually want the index of the nth match
index = expression . pos ( nth )
length = len ( expression . cap ( nth ) )
self . setFormat ( index , length , format )
index = expression . indexIn ( text , index + length )
self . setCurrentBlockState ( 0 )
# Do multi - line strings
in_multiline = self . match_multiline ( text , * self . tri_single )
if not in_multiline :
in_multiline = self . match_multiline ( text , * self . tri_double ) |
def __hammingDistance ( s1 , s2 ) :
'''Finds the Hamming distance between two strings .
@ param s1 : string
@ param s2 : string
@ return : the distance
@ raise ValueError : if the lenght of the strings differ''' | l1 = len ( s1 )
l2 = len ( s2 )
if l1 != l2 :
raise ValueError ( "Hamming distance requires strings of same size." )
return sum ( ch1 != ch2 for ch1 , ch2 in zip ( s1 , s2 ) ) |
def simxCreateDummy ( clientID , size , color , operationMode ) :
'''Please have a look at the function description / documentation in the V - REP user manual''' | handle = ct . c_int ( )
if color != None :
c_color = ( ct . c_ubyte * 12 ) ( * color )
else :
c_color = None
return c_CreateDummy ( clientID , size , c_color , ct . byref ( handle ) , operationMode ) , handle . value |
def salt_spm ( ) :
'''The main function for spm , the Salt Package Manager
. . versionadded : : 2015.8.0''' | import salt . cli . spm
spm = salt . cli . spm . SPM ( )
# pylint : disable = E1120
spm . run ( ) |
def execute ( st , ** kwargs ) :
"""Work around for Python3 exec function which doesn ' t allow changes to the local namespace because of scope .
This breaks a lot of the old functionality in the code which was origionally in Python2 . So this function
runs just like exec except that it returns the output of the input statement to the local namespace . It may
break if you start feeding it multiline monoliths of statements ( haven ' t tested ) but you shouldn ' t do that
anyway ( bad programming ) .
Parameters
st : the statement you want executed and for which you want the return
kwargs : anything that may need to be in this namespace to execute st
Returns
The return value of executing the input statement""" | namespace = kwargs
exec ( "b = {}" . format ( st ) , namespace )
return namespace [ 'b' ] |
def write_batch ( self , batch ) :
"""Buffer a batch of items to be written and update internal counters .
Calling this method doesn ' t guarantee that all items have been written .
To ensure everything has been written you need to call flush ( ) .""" | for item in batch :
self . write_buffer . buffer ( item )
key = self . write_buffer . get_key_from_item ( item )
if self . write_buffer . should_write_buffer ( key ) :
self . _write_current_buffer_for_group_key ( key )
self . increment_written_items ( )
self . _check_items_limit ( ) |
def python_value ( self , value ) :
"""Return the value in the database as an Pendulum object .
Returns :
pendulum . Pendulum :
An instance of Pendulum with the field filled in .""" | value = super ( PendulumDateTimeField , self ) . python_value ( value )
if isinstance ( value , datetime . datetime ) :
value = pendulum . instance ( value )
elif isinstance ( value , datetime . date ) :
value = pendulum . instance ( datetime . datetime . combine ( value , datetime . datetime . min . time ( ) ) )
elif isinstance ( value , string_types ) :
value = pendulum . parse ( value )
return value |
def count_by_tag ( stack , descriptor ) :
"""Returns the count of currently running or pending instances
that match the given stack and deployer combo""" | ec2_conn = boto . ec2 . connection . EC2Connection ( )
resses = ec2_conn . get_all_instances ( filters = { 'tag:stack' : stack , 'tag:descriptor' : descriptor } )
instance_list_raw = list ( )
[ [ instance_list_raw . append ( x ) for x in res . instances ] for res in resses ]
instance_list = [ x for x in instance_list_raw if state_filter ( x ) ]
instances = len ( instance_list )
return instances |
def setup ( app ) :
"""Called by Sphinx during phase 0 ( initialization ) .
: param sphinx . application . Sphinx app : Sphinx application object .
: returns : Extension version .
: rtype : dict""" | # Used internally . For rebuilding all pages when one or versions fail .
app . add_config_value ( 'sphinxcontrib_versioning_versions' , SC_VERSIONING_VERSIONS , 'html' )
# Needed for banner .
app . config . html_static_path . append ( STATIC_DIR )
app . add_stylesheet ( 'banner.css' )
# Tell Sphinx which config values can be set by the user .
for name , default in Config ( ) :
app . add_config_value ( 'scv_{}' . format ( name ) , default , 'html' )
# Event handlers .
app . connect ( 'builder-inited' , EventHandlers . builder_inited )
app . connect ( 'env-updated' , EventHandlers . env_updated )
app . connect ( 'html-page-context' , EventHandlers . html_page_context )
return dict ( version = __version__ ) |
def get_google_drive_folder_location ( ) :
"""Try to locate the Google Drive folder .
Returns :
( str ) Full path to the current Google Drive folder""" | gdrive_db_path = 'Library/Application Support/Google/Drive/sync_config.db'
yosemite_gdrive_db_path = ( 'Library/Application Support/Google/Drive/' 'user_default/sync_config.db' )
yosemite_gdrive_db = os . path . join ( os . environ [ 'HOME' ] , yosemite_gdrive_db_path )
if os . path . isfile ( yosemite_gdrive_db ) :
gdrive_db_path = yosemite_gdrive_db
googledrive_home = None
gdrive_db = os . path . join ( os . environ [ 'HOME' ] , gdrive_db_path )
if os . path . isfile ( gdrive_db ) :
con = sqlite3 . connect ( gdrive_db )
if con :
cur = con . cursor ( )
query = ( "SELECT data_value " "FROM data " "WHERE entry_key = 'local_sync_root_path';" )
cur . execute ( query )
data = cur . fetchone ( )
googledrive_home = str ( data [ 0 ] )
con . close ( )
if not googledrive_home :
error ( "Unable to find your Google Drive install =(" )
return googledrive_home |
def get_dict ( self ) :
"""Returns a dict containing the host ' s attributes . The following
keys are contained :
- hostname
- address
- protocol
- port
: rtype : dict
: return : The resulting dictionary .""" | return { 'hostname' : self . get_name ( ) , 'address' : self . get_address ( ) , 'protocol' : self . get_protocol ( ) , 'port' : self . get_tcp_port ( ) } |
def apply_transforms ( self , data , rot_deg ) :
"""Apply transformations to the given data .
These include flip / swap X / Y , invert Y , and rotation .
Parameters
data : ndarray
Data to be transformed .
rot _ deg : float
Rotate the data by the given degrees .
Returns
data : ndarray
Transformed data .""" | start_time = time . time ( )
wd , ht = self . get_dims ( data )
xoff , yoff = self . _org_xoff , self . _org_yoff
# Do transforms as necessary
flip_x , flip_y = self . t_ [ 'flip_x' ] , self . t_ [ 'flip_y' ]
swap_xy = self . t_ [ 'swap_xy' ]
data = trcalc . transform ( data , flip_x = flip_x , flip_y = flip_y , swap_xy = swap_xy )
if flip_y :
yoff = ht - yoff
if flip_x :
xoff = wd - xoff
if swap_xy :
xoff , yoff = yoff , xoff
split_time = time . time ( )
self . logger . debug ( "reshape time %.3f sec" % ( split_time - start_time ) )
# Rotate the image as necessary
if rot_deg != 0 : # This is the slowest part of the rendering - - install the OpenCv or pyopencl
# packages to speed it up
data = np . ascontiguousarray ( data )
data = trcalc . rotate_clip ( data , - rot_deg , out = data , logger = self . logger )
split2_time = time . time ( )
# apply other transforms
if self . _invert_y : # Flip Y for natural natural Y - axis inversion between FITS coords
# and screen coords
data = np . flipud ( data )
self . logger . debug ( "rotate time %.3f sec, total reshape %.3f sec" % ( split2_time - split_time , split2_time - start_time ) )
# dimensions may have changed in transformations
wd , ht = self . get_dims ( data )
ctr_x , ctr_y = self . _ctr_x , self . _ctr_y
dst_x , dst_y = ctr_x - xoff , ctr_y - ( ht - yoff )
self . _dst_x , self . _dst_y = dst_x , dst_y
self . logger . debug ( "ctr=%d,%d off=%d,%d dst=%d,%d cutout=%dx%d" % ( ctr_x , ctr_y , xoff , yoff , dst_x , dst_y , wd , ht ) )
win_wd , win_ht = self . get_window_size ( )
self . logger . debug ( "win=%d,%d coverage=%d,%d" % ( win_wd , win_ht , dst_x + wd , dst_y + ht ) )
return data |
def create_shape ( self ) :
"""Create the toolkit shape for the proxy object .
This method is called during the top - down pass , just before the
' init _ shape ( ) ' method is called . This method should create the
toolkit widget and assign it to the ' widget ' attribute .""" | d = self . declaration
if d . shape1 and d . shape2 :
self . shape = self . _do_operation ( d . shape1 , d . shape2 )
else :
self . shape = None |
def convert ( model , feature_names , target ) :
"""Convert a Support Vector Classtion ( SVC ) model to the protobuf spec .
Parameters
model : SVC
A trained SVC encoder model .
feature _ names : [ str ] , optional ( default = None )
Name of the input columns .
target : str , optional ( default = None )
Name of the output column .
Returns
model _ spec : An object of type Model _ pb .
Protobuf representation of the model""" | if not ( _HAS_SKLEARN ) :
raise RuntimeError ( 'scikit-learn not found. scikit-learn conversion API is disabled.' )
spec = _generate_base_svm_classifier_spec ( model )
spec = set_classifier_interface_params ( spec , feature_names , model . classes_ , 'supportVectorClassifier' , output_features = target )
svm = spec . supportVectorClassifier
for i in model . n_support_ :
svm . numberOfSupportVectorsPerClass . append ( int ( i ) )
if len ( model . probA_ ) != 0 and len ( model . classes_ ) == 2 :
print ( "[WARNING] Scikit Learn uses a technique to normalize pairwise probabilities even for binary classification. " "This can cause differences in predicted probabilities, usually less than 0.5%." )
# If this is an empty list , then model . probA _ will be an empty list .
if len ( model . probA_ ) != 0 :
for i in model . probA_ :
svm . probA . append ( i )
for i in model . probB_ :
svm . probB . append ( i )
return _MLModel ( spec ) |
def _add_point_scalar ( self , scalars , name , set_active = False , deep = True ) :
"""Adds point scalars to the mesh
Parameters
scalars : numpy . ndarray
Numpy array of scalars . Must match number of points .
name : str
Name of point scalars to add .
set _ active : bool , optional
Sets the scalars to the active plotting scalars . Default False .
deep : bool , optional
Does not copy scalars when False . A reference to the scalars
must be kept to avoid a segfault .""" | if not isinstance ( scalars , np . ndarray ) :
raise TypeError ( 'Input must be a numpy.ndarray' )
if scalars . shape [ 0 ] != self . n_points :
raise Exception ( 'Number of scalars must match the number of ' + 'points' )
# need to track which arrays are boolean as all boolean arrays
# must be stored as uint8
if scalars . dtype == np . bool :
scalars = scalars . view ( np . uint8 )
if name not in self . _point_bool_array_names :
self . _point_bool_array_names . append ( name )
if not scalars . flags . c_contiguous :
scalars = np . ascontiguousarray ( scalars )
vtkarr = numpy_to_vtk ( scalars , deep = deep )
vtkarr . SetName ( name )
self . GetPointData ( ) . AddArray ( vtkarr )
if set_active or self . active_scalar_info [ 1 ] is None :
self . GetPointData ( ) . SetActiveScalars ( name )
self . _active_scalar_info = [ POINT_DATA_FIELD , name ] |
def moveEvent ( self , event ) :
"""Reimplement Qt method""" | if not self . isMaximized ( ) and not self . fullscreen_flag :
self . window_position = self . pos ( )
QMainWindow . moveEvent ( self , event )
# To be used by the tour to be able to move
self . sig_moved . emit ( event ) |
def editPerson ( self , person , nickname , edits ) :
"""Change the name and contact information associated with the given
L { Person } .
@ type person : L { Person }
@ param person : The person which will be modified .
@ type nickname : C { unicode }
@ param nickname : The new value for L { Person . name }
@ type edits : C { list }
@ param edits : list of tuples of L { IContactType } providers and
corresponding L { ListChanges } objects or dictionaries of parameter
values .""" | for existing in self . store . query ( Person , Person . name == nickname ) :
if existing is person :
continue
raise ValueError ( "A person with the name %r exists already." % ( nickname , ) )
oldname = person . name
person . name = nickname
self . _callOnOrganizerPlugins ( 'personNameChanged' , person , oldname )
for contactType , submission in edits :
if contactType . allowMultipleContactItems :
for edit in submission . edit :
self . editContactItem ( contactType , edit . object , edit . values )
for create in submission . create :
create . setter ( self . createContactItem ( contactType , person , create . values ) )
for delete in submission . delete :
delete . deleteFromStore ( )
else :
( contactItem , ) = contactType . getContactItems ( person )
self . editContactItem ( contactType , contactItem , submission ) |
def font_width ( self ) :
"""Return the badge font width .""" | return self . get_font_width ( font_name = self . font_name , font_size = self . font_size ) |
def cudnnDestroy ( handle ) :
"""Release cuDNN resources .
Release hardware resources used by cuDNN .
Parameters
handle : cudnnHandle
cuDNN context .""" | status = _libcudnn . cudnnDestroy ( ctypes . c_void_p ( handle ) )
cudnnCheckStatus ( status ) |
def get_collection ( self , collection , filter = None , fields = None , page_size = None ) :
"""Returns a specific collection from the asset service with
the given collection endpoint .
Supports passing through parameters such as . . .
- filters such as " name = Vesuvius " following GEL spec
- fields such as " uri , description " comma delimited
- page _ size such as " 100 " ( the default )""" | params = { }
if filter :
params [ 'filter' ] = filter
if fields :
params [ 'fields' ] = fields
if page_size :
params [ 'pageSize' ] = page_size
uri = self . uri + '/v1' + collection
return self . service . _get ( uri , params = params ) |
def group_members ( self , group_id , include_orphans = False ) :
"""Find all group member trigger definitions
: param group _ id : group trigger id
: param include _ orphans : If True , include orphan members
: return : list of asociated group members as trigger objects""" | params = { 'includeOrphans' : str ( include_orphans ) . lower ( ) }
url = self . _service_url ( [ 'triggers' , 'groups' , group_id , 'members' ] , params = params )
return Trigger . list_to_object_list ( self . _get ( url ) ) |
def roc_curve ( df , col_true = None , col_pred = None , col_scores = None , pos_label = 1 ) :
r"""Compute true positive rate ( TPR ) , false positive rate ( FPR ) and threshold from predicted DataFrame .
Note that this method will trigger the defined flow to execute .
: param df : predicted data frame
: type df : DataFrame
: param pos _ label : positive label
: type pos _ label : str
: param col _ true : true column
: type col _ true : str
: param col _ pred : predicted column , ' prediction _ result ' if absent .
: type col _ pred : str
: param col _ scores : score column , ' prediction _ score ' if absent .
: type col _ scores : str
: return : False positive rate , true positive rate and threshold , in numpy array format .
: Example :
> > > import matplotlib . pyplot as plt
> > > fpr , tpr , thresh = roc _ curve ( predicted , " class " )
> > > plt . plot ( fpr , tpr )""" | if not col_pred :
col_pred = get_field_name_by_role ( df , FieldRole . PREDICTED_CLASS )
if not col_scores :
col_scores = get_field_name_by_role ( df , FieldRole . PREDICTED_SCORE )
thresh , tp , fn , tn , fp = _run_roc_node ( df , pos_label , col_true , col_pred , col_scores )
if np is not None :
tpr = tp * 1.0 / ( tp + fn )
fpr = fp * 1.0 / ( fp + tn )
else :
tpr = [ tp [ i ] * 1.0 / ( tp [ i ] + fn [ i ] ) for i in range ( len ( tp ) ) ]
fpr = [ fp [ i ] * 1.0 / ( fp [ i ] + tn [ i ] ) for i in range ( len ( fp ) ) ]
roc_result = namedtuple ( 'ROCResult' , 'fpr tpr thresh' )
return roc_result ( fpr = fpr , tpr = tpr , thresh = thresh ) |
def read_stream_stats ( self ) :
""": return : dictionary { stream index { stat name : value } } .
Sea XenaStream . stats _ captions .""" | stream_stats = OrderedDict ( )
for stream in self . streams . values ( ) :
stream_stats [ stream ] = stream . read_stats ( )
return stream_stats |
def collect_variables ( self , g_scope = 'gen' , d_scope = 'discrim' ) :
"""Assign ` self . g _ vars ` to the parameters under scope ` g _ scope ` ,
and same with ` self . d _ vars ` .""" | self . g_vars = tf . get_collection ( tf . GraphKeys . TRAINABLE_VARIABLES , g_scope )
assert self . g_vars
self . d_vars = tf . get_collection ( tf . GraphKeys . TRAINABLE_VARIABLES , d_scope )
assert self . d_vars |
def _check_metrics ( cls , schema , metrics ) :
"""Ensure that returned metrics are properly exposed""" | for name , value in metrics . items ( ) :
metric = schema . get ( name )
if not metric :
message = "Unexpected metric '{}' returned" . format ( name )
raise Exception ( message )
cls . _check_metric ( schema , metric , name , value ) |
def check_with_pyflakes ( source_code , filename = None ) :
"""Check source code with pyflakes
Returns an empty list if pyflakes is not installed""" | try :
if filename is None :
filename = '<string>'
try :
source_code += '\n'
except TypeError : # Python 3
source_code += to_binary_string ( '\n' )
import _ast
from pyflakes . checker import Checker
# First , compile into an AST and handle syntax errors .
try :
tree = compile ( source_code , filename , "exec" , _ast . PyCF_ONLY_AST )
except SyntaxError as value : # If there ' s an encoding problem with the file , the text is None .
if value . text is None :
results = [ ]
else :
results = [ ( value . args [ 0 ] , value . lineno ) ]
except ( ValueError , TypeError ) : # Example of ValueError : file contains invalid \ x escape character
# ( see https : / / bugs . debian . org / cgi - bin / bugreport . cgi ? bug = 674797)
# Example of TypeError : file contains null character
# ( see https : / / bugs . debian . org / cgi - bin / bugreport . cgi ? bug = 674796)
results = [ ]
else : # Okay , it ' s syntactically valid . Now check it .
w = Checker ( tree , filename )
w . messages . sort ( key = lambda x : x . lineno )
results = [ ]
coding = encoding . get_coding ( source_code )
lines = source_code . splitlines ( )
for warning in w . messages :
if 'analysis:ignore' not in to_text_string ( lines [ warning . lineno - 1 ] , coding ) :
results . append ( ( warning . message % warning . message_args , warning . lineno ) )
except Exception : # Never return None to avoid lock in spyder / widgets / editor . py
# See Issue 1547
results = [ ]
if DEBUG_EDITOR :
traceback . print_exc ( )
# Print exception in internal console
return results |
def save_dir_list ( key , * dirs_refs ) :
"""Convert the given parameters to a special JSON object .
Each parameter is a dir - refs specification of the form :
< dir - path > : < reference1 > , < reference2 > , . . . ,
where the colon ' : ' and the list of references are optional .
JSON object is of the form :
{ key : { " dir " : dir _ path } } , or
{ key : { " dir " : dir _ path , " refs " : [ refs [ 0 ] , refs [ 1 ] , . . . ] } }""" | dir_list = [ ]
for dir_refs in dirs_refs :
if ':' in dir_refs :
try :
dir_path , refs = dir_refs . split ( ':' )
except ValueError as e :
return error ( "Only one colon ':' allowed in dir-refs specification." )
else :
dir_path , refs = dir_refs , None
if not os . path . isdir ( dir_path ) :
return error ( "Output '{}' set to a missing directory: '{}'." . format ( key , dir_path ) )
dir_obj = { 'dir' : dir_path }
if refs :
refs = [ ref_path . strip ( ) for ref_path in refs . split ( ',' ) ]
missing_refs = [ ref for ref in refs if not ( os . path . isfile ( ref ) or os . path . isdir ( ref ) ) ]
if len ( missing_refs ) > 0 :
return error ( "Output '{}' set to missing references: '{}'." . format ( key , ', ' . join ( missing_refs ) ) )
dir_obj [ 'refs' ] = refs
dir_list . append ( dir_obj )
return json . dumps ( { key : dir_list } ) |
def recycle_view ( self , position ) :
"""Tell the view to render the item at the given position""" | d = self . declaration
if position < len ( d . parent . items ) :
d . index = position
d . item = d . parent . items [ position ]
else :
d . index = - 1
d . item = None |
def mkpart ( self , disk , start , end , part_type = 'primary' ) :
"""Make partition on disk
: param disk : device path ( / dev / sda , / dev / sdb , etc . . . )
: param start : partition start as accepted by parted mkpart
: param end : partition end as accepted by parted mkpart
: param part _ type : partition type as accepted by parted mkpart""" | args = { 'disk' : disk , 'start' : start , 'end' : end , 'part_type' : part_type , }
self . _mkpart_chk . check ( args )
response = self . _client . raw ( 'disk.mkpart' , args )
result = response . get ( )
if result . state != 'SUCCESS' :
raise RuntimeError ( 'failed to create partition: %s' % result . stderr ) |
def linkCustomerToVerifiedUser ( sender , ** kwargs ) :
"""If a Registration is processed in which the associated Customer does not yet
have a User , then check to see if the Customer ' s email address has been
verified as belonging to a specific User , and if that User has an associated
Customer . If such a User is found , then associated this Customer with that
User . This way , if a new User verifies their email account before they have
submitted any Registrations , their Customer account is seamlessly linked when
they do complete their first Registration .""" | registration = kwargs . get ( 'registration' , None )
if not registration or ( hasattr ( registration . customer , 'user' ) and registration . customer . user ) :
return
logger . debug ( 'Checking for User for Customer with no associated registration.' )
customer = registration . customer
try :
verified_email = EmailAddress . objects . get ( email = customer . email , verified = True , primary = True , user__customer__isnull = True )
logger . info ( "Found user %s to associate with customer %s." , verified_email . user . id , customer . id )
customer . user = verified_email . user
customer . save ( )
if not customer . user . first_name and not customer . user . last_name :
customer . user . first_name = customer . first_name
customer . user . last_name = customer . last_name
customer . user . save ( )
except ObjectDoesNotExist :
logger . info ( "No user found to associate with customer %s." , customer . id )
except MultipleObjectsReturned : # This should never happen , as email should be unique in the db table account _ emailaddress .
# If it does , something ' s broken in the database or Django .
errmsg = "Something's not right with the database: more than one entry found on the database for the email %s. \
This duplicate key value violates unique constraint \"account_emailaddress_email_key\". \
The email field should be unique for each account.\n"
logger . exception ( errmsg , customer . email ) |
def check_process ( pidfile ) :
"""Read pid file and check process status .
Return ( running , pid ) .""" | # Check pid file
try :
handle = open ( pidfile , 'r' )
except IOError as exc :
if exc . errno == errno . ENOENT : # pid file disappeared
return False , 0
raise
try :
pid = int ( handle . read ( ) . strip ( ) , 10 )
except ( TypeError , ValueError ) as exc :
raise EnvironmentError ( "Invalid PID file '%s' (%s), won't start!" % ( pidfile , exc ) )
finally :
handle . close ( )
# Check process
try :
os . kill ( pid , 0 )
except EnvironmentError as exc :
return False , pid
else :
return True , pid |
async def redis ( self ) -> aioredis . RedisConnection :
"""Get Redis connection
This property is awaitable .""" | # Use thread - safe asyncio Lock because this method without that is not safe
async with self . _connection_lock :
if self . _redis is None :
self . _redis = await aioredis . create_connection ( ( self . _host , self . _port ) , db = self . _db , password = self . _password , ssl = self . _ssl , loop = self . _loop , ** self . _kwargs )
return self . _redis |
def get_package_version ( self , feed_id , package_id , package_version_id , project = None , include_urls = None , is_listed = None , is_deleted = None ) :
"""GetPackageVersion .
[ Preview API ] Get details about a specific package version .
: param str feed _ id : Name or Id of the feed .
: param str package _ id : Id of the package ( GUID Id , not name ) .
: param str package _ version _ id : Id of the package version ( GUID Id , not name ) .
: param str project : Project ID or project name
: param bool include _ urls : True to include urls for each version . Default is true .
: param bool is _ listed : Only applicable for NuGet packages . If false , delisted package versions will be returned .
: param bool is _ deleted : This does not have any effect on the requested package version , for other versions returned specifies whether to return only deleted or non - deleted versions of packages in the response . Default is unset ( return all versions ) .
: rtype : : class : ` < PackageVersion > < azure . devops . v5_1 . feed . models . PackageVersion > `""" | route_values = { }
if project is not None :
route_values [ 'project' ] = self . _serialize . url ( 'project' , project , 'str' )
if feed_id is not None :
route_values [ 'feedId' ] = self . _serialize . url ( 'feed_id' , feed_id , 'str' )
if package_id is not None :
route_values [ 'packageId' ] = self . _serialize . url ( 'package_id' , package_id , 'str' )
if package_version_id is not None :
route_values [ 'packageVersionId' ] = self . _serialize . url ( 'package_version_id' , package_version_id , 'str' )
query_parameters = { }
if include_urls is not None :
query_parameters [ 'includeUrls' ] = self . _serialize . query ( 'include_urls' , include_urls , 'bool' )
if is_listed is not None :
query_parameters [ 'isListed' ] = self . _serialize . query ( 'is_listed' , is_listed , 'bool' )
if is_deleted is not None :
query_parameters [ 'isDeleted' ] = self . _serialize . query ( 'is_deleted' , is_deleted , 'bool' )
response = self . _send ( http_method = 'GET' , location_id = '3b331909-6a86-44cc-b9ec-c1834c35498f' , version = '5.1-preview.1' , route_values = route_values , query_parameters = query_parameters )
return self . _deserialize ( 'PackageVersion' , response ) |
def auth_creds ( cls , username , password ) :
"""Validate a username & password
A token is returned if auth is successful & can be
used to authorize future requests or ignored entirely
if the authorization mechanizm does not need it .
: return : string token""" | store = goldman . sess . store
login = store . find ( cls . RTYPE , 'username' , username )
if not login :
msg = 'No login found by that username. Spelling error?'
raise AuthRejected ( ** { 'detail' : msg } )
elif login . locked :
msg = 'The login account is currently locked out.'
raise AuthRejected ( ** { 'detail' : msg } )
elif not cmp_val_salt_hash ( password , login . salt , login . password ) :
msg = 'The password provided is incorrect. Spelling error?'
raise AuthRejected ( ** { 'detail' : msg } )
else :
if not login . token :
login . token = random_str ( )
login . post_authenticate ( )
return login . token |
def get_img_tag ( self , title = '' , alt_text = '' , ** kwargs ) :
"""Build a < img > tag for the image with the specified options .
Returns : an HTML fragment .""" | try :
style = [ ]
for key in ( 'img_style' , 'style' ) :
if key in kwargs :
if isinstance ( kwargs [ key ] , ( list , tuple , set ) ) :
style += list ( kwargs [ key ] )
else :
style . append ( kwargs [ key ] )
if 'shape' in kwargs :
shape = self . _get_shape_style ( ** kwargs )
if shape :
style . append ( "shape-outside: url('{}')" . format ( shape ) )
attrs = { 'alt' : alt_text , 'title' : title , ** self . get_img_attrs ( style , ** kwargs ) }
return flask . Markup ( self . _wrap_link_target ( kwargs , utils . make_tag ( 'img' , attrs , start_end = kwargs . get ( 'xhtml' ) ) , title ) )
except FileNotFoundError as error :
text = '<span class="error">Image not found: <code>{}</code>' . format ( html . escape ( error . filename ) )
if ' ' in error . filename :
text += ' (Did you forget a <code>|</code>?)'
text += '</span>'
return flask . Markup ( text ) |
def routing_area_2_json ( self ) :
"""transform ariane _ clip3 routing area object to Ariane server JSON obj
: return : Ariane JSON obj""" | LOGGER . debug ( "RoutingArea.routing_area_2_json" )
json_obj = { 'routingAreaID' : self . id , 'routingAreaName' : self . name , 'routingAreaDescription' : self . description , 'routingAreaType' : self . type , 'routingAreaMulticast' : self . multicast , 'routingAreaLocationsID' : self . loc_ids , 'routingAreaSubnetsID' : self . subnet_ids }
return json . dumps ( json_obj ) |
def expand_dependencies_section ( section , kwargs ) :
"""Expands dependency section , e . g . substitues " use : foo " for its contents , but
doesn ' t evaluate conditions nor substitue variables .""" | deps = [ ]
for dep in section :
for dep_type , dep_list in dep . items ( ) :
if dep_type in [ 'call' , 'use' ] :
deps . extend ( Command ( dep_type , dep_list , kwargs ) . run ( ) )
elif dep_type . startswith ( 'if ' ) or dep_type == 'else' :
deps . append ( { dep_type : expand_dependencies_section ( dep_list , kwargs ) } )
else :
deps . append ( { dep_type : dep_list } )
return deps |
def append_to_history ( self , filename , command , go_to_eof ) :
"""Append an entry to history filename .
Args :
filename ( str ) : file to be updated in a new tab .
command ( str ) : line to be added .
go _ to _ eof ( bool ) : scroll to the end of file .""" | if not is_text_string ( filename ) : # filename is a QString
filename = to_text_string ( filename . toUtf8 ( ) , 'utf-8' )
command = to_text_string ( command )
index = self . filenames . index ( filename )
self . editors [ index ] . append ( command )
if go_to_eof :
self . editors [ index ] . set_cursor_position ( 'eof' )
self . tabwidget . setCurrentIndex ( index ) |
def _handle_is_dag_stopped ( self , request ) :
"""The handler for the dag _ stopped request .
The dag _ stopped request checks whether a dag is flagged to be terminated .
Args :
request ( Request ) : Reference to a request object containing the
incoming request . The payload has to contain the
following fields :
' dag _ name ' : the name of the dag that should be checked
Returns :
Response : A response object containing the following fields :
- is _ stopped : True if the dag is flagged to be stopped .""" | return Response ( success = True , uid = request . uid , payload = { 'is_stopped' : request . payload [ 'dag_name' ] in self . _stop_dags } ) |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.