signature stringlengths 29 44.1k | implementation stringlengths 0 85.2k |
|---|---|
def _to_string ( val ) :
"""Convert to text .""" | if isinstance ( val , binary_type ) :
return val . decode ( 'utf-8' )
assert isinstance ( val , text_type )
return val |
def predict ( self , data , output_margin = False , ntree_limit = None , validate_features = True ) :
"""Predict with ` data ` .
. . note : : This function is not thread safe .
For each booster object , predict can only be called from one thread .
If you want to run prediction using multiple thread , call ` ` xgb . copy ( ) ` ` to make copies
of model object and then call ` ` predict ( ) ` ` .
. . note : : Using ` ` predict ( ) ` ` with DART booster
If the booster object is DART type , ` ` predict ( ) ` ` will perform dropouts , i . e . only
some of the trees will be evaluated . This will produce incorrect results if ` ` data ` ` is
not the training data . To obtain correct results on test sets , set ` ` ntree _ limit ` ` to
a nonzero value , e . g .
. . code - block : : python
preds = bst . predict ( dtest , ntree _ limit = num _ round )
Parameters
data : DMatrix
The dmatrix storing the input .
output _ margin : bool
Whether to output the raw untransformed margin value .
ntree _ limit : int
Limit number of trees in the prediction ; defaults to best _ ntree _ limit if defined
( i . e . it has been trained with early stopping ) , otherwise 0 ( use all trees ) .
validate _ features : bool
When this is True , validate that the Booster ' s and data ' s feature _ names are identical .
Otherwise , it is assumed that the feature _ names are the same .
Returns
prediction : numpy array""" | # pylint : disable = missing - docstring , invalid - name
test_dmatrix = DMatrix ( data , missing = self . missing , nthread = self . n_jobs )
# get ntree _ limit to use - if none specified , default to
# best _ ntree _ limit if defined , otherwise 0.
if ntree_limit is None :
ntree_limit = getattr ( self , "best_ntree_limit" , 0 )
return self . get_booster ( ) . predict ( test_dmatrix , output_margin = output_margin , ntree_limit = ntree_limit , validate_features = validate_features ) |
def unregister_editor ( self , editor_node , raise_exception = False ) :
"""Unregisters given : class : ` umbra . components . factory . script _ editor . nodes . EditorNode ` class Node from the Model .
: param editor _ node : EditorNode to unregister .
: type editor _ node : EditorNode
: param raise _ exception : Raise the exception .
: type raise _ exception : bool
: return : EditorNode .
: rtype : EditorNode""" | if raise_exception :
if not editor_node in self . list_editor_nodes ( ) :
raise foundations . exceptions . ProgrammingError ( "{0} | '{1}' editor 'EditorNode' isn't registered!" . format ( self . __class__ . __name__ , editor_node ) )
LOGGER . debug ( "> Unregistering '{0}' editor 'EditorNode'." . format ( editor_node ) )
parent = editor_node . parent
row = editor_node . row ( )
self . beginRemoveRows ( self . get_node_index ( parent ) , row , row )
parent . remove_child ( row )
self . endRemoveRows ( )
self . editor_unregistered . emit ( editor_node )
return editor_node |
def _create_produce_requests ( self , collated ) :
"""Transfer the record batches into a list of produce requests on a
per - node basis .
Arguments :
collated : { node _ id : [ RecordBatch ] }
Returns :
dict : { node _ id : ProduceRequest } ( version depends on api _ version )""" | requests = { }
for node_id , batches in six . iteritems ( collated ) :
requests [ node_id ] = self . _produce_request ( node_id , self . config [ 'acks' ] , self . config [ 'request_timeout_ms' ] , batches )
return requests |
def getActionHandle ( self , pchActionName ) :
"""Returns a handle for an action . This handle is used for all performance - sensitive calls .""" | fn = self . function_table . getActionHandle
pHandle = VRActionHandle_t ( )
result = fn ( pchActionName , byref ( pHandle ) )
return result , pHandle |
def thresh ( dset , p , positive_only = False , prefix = None ) :
'''returns a string containing an inline ` ` 3dcalc ` ` command that thresholds the
given dataset at the specified * p * - value , or will create a new dataset if a
suffix or prefix is given''' | return available_method ( 'thresh' ) ( dset , p , positive_only , prefix ) |
def gelman_rubin ( mcmc ) :
"""Args :
mcmc ( MCMCResults ) : Pre - sliced MCMC samples to compute diagnostics for .""" | if mcmc . n_chains < 2 :
raise ValueError ( 'Gelman-Rubin diagnostic requires multiple chains ' 'of the same length.' )
# get Vhat and within - chain variance
Vhat , W = _vhat_w ( mcmc )
# compute and return Gelman - Rubin statistic
Rhat = np . sqrt ( Vhat / W )
return pd . DataFrame ( { 'gelman_rubin' : Rhat } , index = mcmc . levels ) |
def forward ( self , x ) :
"""Returns a list of outputs for tasks 0 , . . . t - 1
Args :
x : a [ batch _ size , . . . ] batch from X""" | head_outputs = [ None ] * self . t
# Execute input layer
if isinstance ( self . input_layer , list ) : # One input _ module per task
input_outputs = [ mod ( x ) for mod , x in zip ( self . input_layer , x ) ]
x = torch . stack ( input_outputs , dim = 1 )
# Execute level - 0 task heads from their respective input modules
for t in self . task_map [ 0 ] :
head = self . heads [ t ]
head_outputs [ t ] = head ( input_outputs [ t ] )
else : # One input _ module for all tasks
x = self . input_layer ( x )
# Execute level - 0 task heads from the single input module
for t in self . task_map [ 0 ] :
head = self . heads [ t ]
head_outputs [ t ] = head ( x )
# Execute middle layers
for i , layer in enumerate ( self . middle_layers , start = 1 ) :
x = layer ( x )
# Attach level - i task heads from the ith middle module
for t in self . task_map [ i ] :
head = self . heads [ t ]
# Optionally include as input the predictions of parent tasks
if self . config [ "pass_predictions" ] and bool ( self . task_graph . parents [ t ] ) :
task_input = [ x ]
for p in self . task_graph . parents [ t ] :
task_input . append ( head_outputs [ p ] )
task_input = torch . stack ( task_input , dim = 1 )
else :
task_input = x
head_outputs [ t ] = head ( task_input )
return head_outputs |
def get_mean_and_stddevs ( self , sites , rup , dists , imt , stddev_types ) :
"""See : meth : ` superclass method
< . base . GroundShakingIntensityModel . get _ mean _ and _ stddevs > `
for spec of input and result values .""" | # get mean and std using the superclass
mean , stddevs = super ( ) . get_mean_and_stddevs ( sites , rup , dists , imt , stddev_types )
A08 = self . A08_COEFFS [ imt ]
f_ena = 10.0 ** ( A08 [ "c" ] + A08 [ "d" ] * dists . rjb )
return np . log ( np . exp ( mean ) * f_ena ) , stddevs |
def findNode ( class_ , hot_map , targetNode , parentNode = None ) :
'''Find the target node in the hot _ map .''' | for index , ( rect , node , children ) in enumerate ( hot_map ) :
if node == targetNode :
return parentNode , hot_map , index
result = class_ . findNode ( children , targetNode , node )
if result :
return result
return None |
async def register_service ( self , short_name , long_name , allow_duplicate = True ) :
"""Register a new service with the service manager .
Args :
short _ name ( string ) : A unique short name for this service that functions
as an id
long _ name ( string ) : A user facing name for this service
allow _ duplicate ( boolean ) : Don ' t throw an error if this service is already
registered . This is important if the service is preregistered for example .
Raises :
ArgumentError : if the short _ name is already taken""" | try :
await self . send_command ( OPERATIONS . CMD_REGISTER_SERVICE , dict ( name = short_name , long_name = long_name ) , MESSAGES . RegisterServiceResponse )
except ArgumentError :
if not allow_duplicate :
raise |
def identify_pycbc_live ( origin , filepath , fileobj , * args , ** kwargs ) :
"""Identify a PyCBC Live file as an HDF5 with the correct name""" | if identify_hdf5 ( origin , filepath , fileobj , * args , ** kwargs ) and ( filepath is not None and PYCBC_FILENAME . match ( basename ( filepath ) ) ) :
return True
return False |
def close ( self , cursor_id , address ) :
"""Kill a cursor .
Raises TypeError if cursor _ id is not an instance of ( int , long ) .
: Parameters :
- ` cursor _ id ` : cursor id to close
- ` address ` : the cursor ' s server ' s ( host , port ) pair
. . versionchanged : : 3.0
Now requires an ` address ` argument .""" | if not isinstance ( cursor_id , integer_types ) :
raise TypeError ( "cursor_id must be an integer" )
self . __client ( ) . kill_cursors ( [ cursor_id ] , address ) |
def as_dict ( self ) :
"""Additionally encodes headers .
: return :""" | data = super ( BaseEmail , self ) . as_dict ( )
data [ "Headers" ] = [ { "Name" : name , "Value" : value } for name , value in data [ "Headers" ] . items ( ) ]
for field in ( "To" , "Cc" , "Bcc" ) :
if field in data :
data [ field ] = list_to_csv ( data [ field ] )
data [ "Attachments" ] = [ prepare_attachments ( attachment ) for attachment in data [ "Attachments" ] ]
return data |
def invalidate_cache ( user , size = None ) :
"""Function to be called when saving or changing an user ' s avatars .""" | sizes = set ( settings . AVATAR_AUTO_GENERATE_SIZES )
if size is not None :
sizes . add ( size )
for prefix in cached_funcs :
for size in sizes :
cache . delete ( get_cache_key ( user , size , prefix ) ) |
def total ( self ) :
"""Total cost of the order""" | total = 0
for item in self . items . all ( ) :
total += item . total
return total |
def console_from_file ( filename : str ) -> tcod . console . Console :
"""Return a new console object from a filename .
The file format is automactially determined . This can load REXPaint ` . xp ` ,
ASCII Paint ` . apf ` , or Non - delimited ASCII ` . asc ` files .
Args :
filename ( Text ) : The path to the file , as a string .
Returns : A new : any ` Console ` instance .""" | return tcod . console . Console . _from_cdata ( lib . TCOD_console_from_file ( filename . encode ( "utf-8" ) ) ) |
def get_extrema ( self , normalize_rxn_coordinate = True ) :
"""Returns the positions of the extrema along the MEP . Both local
minimums and maximums are returned .
Args :
normalize _ rxn _ coordinate ( bool ) : Whether to normalize the
reaction coordinate to between 0 and 1 . Defaults to True .
Returns :
( min _ extrema , max _ extrema ) , where the extrema are given as
[ ( x1 , y1 ) , ( x2 , y2 ) , . . . ] .""" | x = np . arange ( 0 , np . max ( self . r ) , 0.01 )
y = self . spline ( x ) * 1000
scale = 1 if not normalize_rxn_coordinate else 1 / self . r [ - 1 ]
min_extrema = [ ]
max_extrema = [ ]
for i in range ( 1 , len ( x ) - 1 ) :
if y [ i ] < y [ i - 1 ] and y [ i ] < y [ i + 1 ] :
min_extrema . append ( ( x [ i ] * scale , y [ i ] ) )
elif y [ i ] > y [ i - 1 ] and y [ i ] > y [ i + 1 ] :
max_extrema . append ( ( x [ i ] * scale , y [ i ] ) )
return min_extrema , max_extrema |
def check_rights ( self , resources , request = None ) :
"""Check rights for resources .
: return bool : True if operation is success else HTTP _ 403 _ FORBIDDEN""" | if not self . auth :
return True
try :
if not self . auth . test_rights ( resources , request = request ) :
raise AssertionError ( )
except AssertionError , e :
raise HttpError ( "Access forbiden. {0}" . format ( e ) , status = status . HTTP_403_FORBIDDEN ) |
def network_expansion ( network , method = 'rel' , ext_min = 0.1 , ext_width = False , filename = None , boundaries = [ ] ) :
"""Plot relative or absolute network extension of AC - and DC - lines .
Parameters
network : PyPSA network container
Holds topology of grid including results from powerflow analysis
method : str
Choose ' rel ' for extension relative to s _ nom and ' abs ' for
absolute extensions .
ext _ min : float
Choose minimum relative line extension shown in plot in p . u . .
ext _ width : float or bool
Choose if line _ width respects line extension . Turn off with ' False ' or
set linear factor to decremise extension line _ width .
filename : str or None
Save figure in this direction
boundaries : array
Set boundaries of heatmap axis""" | cmap = plt . cm . jet
overlay_network = network . copy ( )
overlay_network . lines = overlay_network . lines [ overlay_network . lines . s_nom_extendable & ( ( overlay_network . lines . s_nom_opt - overlay_network . lines . s_nom_min ) / overlay_network . lines . s_nom >= ext_min ) ]
overlay_network . links = overlay_network . links [ overlay_network . links . p_nom_extendable & ( ( overlay_network . links . p_nom_opt - overlay_network . links . p_nom_min ) / overlay_network . links . p_nom >= ext_min ) ]
for i , row in overlay_network . links . iterrows ( ) :
linked = overlay_network . links [ ( row [ 'bus1' ] == overlay_network . links . bus0 ) & ( row [ 'bus0' ] == overlay_network . links . bus1 ) ]
if not linked . empty :
if row [ 'p_nom_opt' ] < linked . p_nom_opt . values [ 0 ] :
overlay_network . links . p_nom_opt [ i ] = linked . p_nom_opt . values [ 0 ]
array_line = [ [ 'Line' ] * len ( overlay_network . lines ) , overlay_network . lines . index ]
array_link = [ [ 'Link' ] * len ( overlay_network . links ) , overlay_network . links . index ]
if method == 'rel' :
extension_lines = pd . Series ( ( 100 * ( overlay_network . lines . s_nom_opt - overlay_network . lines . s_nom_min ) / overlay_network . lines . s_nom ) . data , index = array_line )
extension_links = pd . Series ( ( 100 * ( overlay_network . links . p_nom_opt - overlay_network . links . p_nom_min ) / ( overlay_network . links . p_nom ) ) . data , index = array_link )
if method == 'abs' :
extension_lines = pd . Series ( ( overlay_network . lines . s_nom_opt - overlay_network . lines . s_nom_min ) . data , index = array_line )
extension_links = pd . Series ( ( overlay_network . links . p_nom_opt - overlay_network . links . p_nom_min ) . data , index = array_link )
extension = extension_lines . append ( extension_links )
# Plot whole network in backgroud of plot
network . plot ( line_colors = pd . Series ( "grey" , index = [ [ 'Line' ] * len ( network . lines ) , network . lines . index ] ) . append ( pd . Series ( "grey" , index = [ [ 'Link' ] * len ( network . links ) , network . links . index ] ) ) , bus_sizes = 0 , line_widths = pd . Series ( 0.5 , index = [ [ 'Line' ] * len ( network . lines ) , network . lines . index ] ) . append ( pd . Series ( 0.55 , index = [ [ 'Link' ] * len ( network . links ) , network . links . index ] ) ) )
if not ext_width :
line_widths = pd . Series ( 0.8 , index = array_line ) . append ( pd . Series ( 0.8 , index = array_link ) )
else :
line_widths = 0.5 + ( extension / ext_width )
ll = overlay_network . plot ( line_colors = extension , line_cmap = cmap , bus_sizes = 0 , title = "Optimized AC- and DC-line expansion" , line_widths = line_widths )
if not boundaries :
v = np . linspace ( min ( extension ) , max ( extension ) , 101 )
boundaries = [ min ( extension ) , max ( extension ) ]
else :
v = np . linspace ( boundaries [ 0 ] , boundaries [ 1 ] , 101 )
if not extension_links . empty :
cb_Link = plt . colorbar ( ll [ 2 ] , boundaries = v , ticks = v [ 0 : 101 : 10 ] )
cb_Link . set_clim ( vmin = boundaries [ 0 ] , vmax = boundaries [ 1 ] )
cb_Link . remove ( )
cb = plt . colorbar ( ll [ 1 ] , boundaries = v , ticks = v [ 0 : 101 : 10 ] , fraction = 0.046 , pad = 0.04 )
cb . set_clim ( vmin = boundaries [ 0 ] , vmax = boundaries [ 1 ] )
if method == 'rel' :
cb . set_label ( 'line expansion relative to s_nom in %' )
if method == 'abs' :
cb . set_label ( 'line expansion in MW' )
if filename is None :
plt . show ( )
else :
plt . savefig ( filename )
plt . close ( ) |
def log_of_array_ignoring_zeros ( M ) :
"""Returns an array containing the logs of the nonzero
elements of M . Zeros are left alone since log ( 0 ) isn ' t
defined .
Parameters
M : array - like
Returns
array - like
Shape matches ` M `""" | log_M = M . copy ( )
mask = log_M > 0
log_M [ mask ] = np . log ( log_M [ mask ] )
return log_M |
def get_setting_list ( self , key , default_value = None , delimiter = ',' , value_type = str ) :
"""Get the setting stored at the given key and split it to a list .
Args :
key ( str ) : the setting key
default _ value ( list , optional ) : The default value , if none is
found . Defaults to None .
delimiter ( list of str , optional ) : The delimiter to break on .
Defaults to ' , ' .
value _ type ( function , optional ) : The type of a setting value in the
list . Defaults to ` str ` .
Returns :
list of str : The values of the setting if found , default _ value
otherwise .
If a value is found , it is split using the given delimiter .""" | value = self . get_setting ( key )
if value is not None :
setting_list = [ value_type ( v ) for v in value . split ( delimiter ) ]
else :
setting_list = default_value
return setting_list |
def _generate_signature_for_function ( self , func ) :
"""Given a function , returns a string representing its args .""" | args_list = [ ]
argspec = inspect . getargspec ( func )
first_arg_with_default = ( len ( argspec . args or [ ] ) - len ( argspec . defaults or [ ] ) )
for arg in argspec . args [ : first_arg_with_default ] :
if arg == "self" : # Python documentation typically skips ` self ` when printing method
# signatures .
continue
args_list . append ( arg )
# TODO ( mrry ) : This is a workaround for documenting signature of
# functions that have the @ contextlib . contextmanager decorator .
# We should do something better .
if argspec . varargs == "args" and argspec . keywords == "kwds" :
original_func = func . __closure__ [ 0 ] . cell_contents
return self . _generate_signature_for_function ( original_func )
if argspec . defaults :
for arg , default in zip ( argspec . args [ first_arg_with_default : ] , argspec . defaults ) :
if callable ( default ) :
args_list . append ( "%s=%s" % ( arg , default . __name__ ) )
else :
args_list . append ( "%s=%r" % ( arg , default ) )
if argspec . varargs :
args_list . append ( "*" + argspec . varargs )
if argspec . keywords :
args_list . append ( "**" + argspec . keywords )
return "(" + ", " . join ( args_list ) + ")" |
def set_uint_info ( self , field , data ) :
"""Set uint type property into the DMatrix .
Parameters
field : str
The field name of the information
data : numpy array
The array of data to be set""" | if getattr ( data , 'base' , None ) is not None and data . base is not None and isinstance ( data , np . ndarray ) and isinstance ( data . base , np . ndarray ) and ( not data . flags . c_contiguous ) :
warnings . warn ( "Use subset (sliced data) of np.ndarray is not recommended " + "because it will generate extra copies and increase memory consumption" )
data = np . array ( data , copy = True , dtype = ctypes . c_uint )
else :
data = np . array ( data , copy = False , dtype = ctypes . c_uint )
_check_call ( _LIB . XGDMatrixSetUIntInfo ( self . handle , c_str ( field ) , c_array ( ctypes . c_uint , data ) , c_bst_ulong ( len ( data ) ) ) ) |
def _count_pixels_on_line ( self , y , p ) :
"""Count the number of pixels rendered on this line .""" | h = line ( y , self . _effective_thickness ( p ) , 0.0 )
return h . sum ( ) |
async def slowlog_get ( self , num = None ) :
"""Get the entries from the slowlog . If ` ` num ` ` is specified , get the
most recent ` ` num ` ` items .""" | args = [ 'SLOWLOG GET' ]
if num is not None :
args . append ( num )
return await self . execute_command ( * args ) |
def _update_triangles ( self , triangles_list ) :
"""From a set of variables forming a triangle in the model , we form the corresponding Clusters .
These clusters are then appended to the code .
Parameters
triangle _ list : list
The list of variables forming the triangles to be updated . It is of the form of
[ [ ' var _ 5 ' , ' var _ 8 ' , ' var _ 7 ' ] , [ ' var _ 4 ' , ' var _ 5 ' , ' var _ 7 ' ] ]""" | new_intersection_set = [ ]
for triangle_vars in triangles_list :
cardinalities = [ self . cardinality [ variable ] for variable in triangle_vars ]
current_intersection_set = [ frozenset ( intersect ) for intersect in it . combinations ( triangle_vars , 2 ) ]
current_factor = DiscreteFactor ( triangle_vars , cardinalities , np . zeros ( np . prod ( cardinalities ) ) )
self . cluster_set [ frozenset ( triangle_vars ) ] = self . Cluster ( current_intersection_set , current_factor )
# add new factors
self . model . factors . append ( current_factor )
# add new intersection sets
new_intersection_set . extend ( current_intersection_set )
# add new factors in objective
self . objective [ frozenset ( triangle_vars ) ] = current_factor |
def quick_search ( limit , pretty , sort , ** kw ) :
'''Execute a quick search .''' | req = search_req_from_opts ( ** kw )
cl = clientv1 ( )
page_size = min ( limit , 250 )
echo_json_response ( call_and_wrap ( cl . quick_search , req , page_size = page_size , sort = sort ) , pretty , limit ) |
def view_task_hazard ( token , dstore ) :
"""Display info about a given task . Here are a few examples of usage : :
$ oq show task _ hazard : 0 # the fastest task
$ oq show task _ hazard : - 1 # the slowest task""" | tasks = set ( dstore [ 'task_info' ] )
if 'source_data' not in dstore :
return 'Missing source_data'
if 'classical_split_filter' in tasks :
data = dstore [ 'task_info/classical_split_filter' ] . value
else :
data = dstore [ 'task_info/compute_gmfs' ] . value
data . sort ( order = 'duration' )
rec = data [ int ( token . split ( ':' ) [ 1 ] ) ]
taskno = rec [ 'taskno' ]
arr = get_array ( dstore [ 'source_data' ] . value , taskno = taskno )
st = [ stats ( 'nsites' , arr [ 'nsites' ] ) , stats ( 'weight' , arr [ 'weight' ] ) ]
sources = dstore [ 'task_sources' ] [ taskno - 1 ] . split ( )
srcs = set ( decode ( s ) . split ( ':' , 1 ) [ 0 ] for s in sources )
res = 'taskno=%d, weight=%d, duration=%d s, sources="%s"\n\n' % ( taskno , rec [ 'weight' ] , rec [ 'duration' ] , ' ' . join ( sorted ( srcs ) ) )
return res + rst_table ( st , header = 'variable mean stddev min max n' . split ( ) ) |
def get_size ( vm_ ) :
'''Return the VM ' s size object''' | vm_size = config . get_cloud_config_value ( 'size' , vm_ , __opts__ )
sizes = avail_sizes ( )
if not vm_size :
return sizes [ 'Small Instance' ]
for size in sizes :
combinations = ( six . text_type ( sizes [ size ] [ 'id' ] ) , six . text_type ( size ) )
if vm_size and six . text_type ( vm_size ) in combinations :
return sizes [ size ]
raise SaltCloudNotFound ( 'The specified size, \'{0}\', could not be found.' . format ( vm_size ) ) |
def _validate ( self , val ) :
"""Checks that the value is numeric and that it is within the hard
bounds ; if not , an exception is raised .""" | if self . allow_None and val is None :
return
if not isinstance ( val , dt_types ) and not ( self . allow_None and val is None ) :
raise ValueError ( "Date '%s' only takes datetime types." % self . name )
if self . step is not None and not isinstance ( self . step , dt_types ) :
raise ValueError ( "Step parameter can only be None or a datetime type" )
self . _checkBounds ( val ) |
def digicam_configure_encode ( self , target_system , target_component , mode , shutter_speed , aperture , iso , exposure_type , command_id , engine_cut_off , extra_param , extra_value ) :
'''Configure on - board Camera Control System .
target _ system : System ID ( uint8 _ t )
target _ component : Component ID ( uint8 _ t )
mode : Mode enumeration from 1 to N / / P , TV , AV , M , Etc ( 0 means ignore ) ( uint8 _ t )
shutter _ speed : Divisor number / / e . g . 1000 means 1/1000 ( 0 means ignore ) ( uint16 _ t )
aperture : F stop number x 10 / / e . g . 28 means 2.8 ( 0 means ignore ) ( uint8 _ t )
iso : ISO enumeration from 1 to N / / e . g . 80 , 100 , 200 , Etc ( 0 means ignore ) ( uint8 _ t )
exposure _ type : Exposure type enumeration from 1 to N ( 0 means ignore ) ( uint8 _ t )
command _ id : Command Identity ( incremental loop : 0 to 255 ) / / A command sent multiple times will be executed or pooled just once ( uint8 _ t )
engine _ cut _ off : Main engine cut - off time before camera trigger in seconds / 10 ( 0 means no cut - off ) ( uint8 _ t )
extra _ param : Extra parameters enumeration ( 0 means ignore ) ( uint8 _ t )
extra _ value : Correspondent value to given extra _ param ( float )''' | return MAVLink_digicam_configure_message ( target_system , target_component , mode , shutter_speed , aperture , iso , exposure_type , command_id , engine_cut_off , extra_param , extra_value ) |
def compress ( data , mode = DEFAULT_MODE , quality = lib . BROTLI_DEFAULT_QUALITY , lgwin = lib . BROTLI_DEFAULT_WINDOW , lgblock = 0 , dictionary = b'' ) :
"""Compress a string using Brotli .
. . versionchanged : : 0.5.0
Added ` ` mode ` ` , ` ` quality ` ` , ` lgwin ` ` , ` ` lgblock ` ` , and ` ` dictionary ` `
parameters .
: param data : A bytestring containing the data to compress .
: type data : ` ` bytes ` `
: param mode : The encoder mode .
: type mode : : class : ` BrotliEncoderMode ` or ` ` int ` `
: param quality : Controls the compression - speed vs compression - density
tradeoffs . The higher the quality , the slower the compression . The
range of this value is 0 to 11.
: type quality : ` ` int ` `
: param lgwin : The base - 2 logarithm of the sliding window size . The range of
this value is 10 to 24.
: type lgwin : ` ` int ` `
: param lgblock : The base - 2 logarithm of the maximum input block size . The
range of this value is 16 to 24 . If set to 0 , the value will be set
based on ` ` quality ` ` .
: type lgblock : ` ` int ` `
: param dictionary : A pre - set dictionary for LZ77 . Please use this with
caution : if a dictionary is used for compression , the same dictionary
* * must * * be used for decompression !
: type dictionary : ` ` bytes ` `
: returns : The compressed bytestring .
: rtype : ` ` bytes ` `""" | # This method uses private variables on the Compressor object , and
# generally does a whole lot of stuff that ' s not supported by the public
# API . The goal here is to minimise the number of allocations and copies
# we have to do . Users should prefer this method over the Compressor if
# they know they have single - shot data .
compressor = Compressor ( mode = mode , quality = quality , lgwin = lgwin , lgblock = lgblock , dictionary = dictionary )
compressed_data = compressor . _compress ( data , lib . BROTLI_OPERATION_FINISH )
assert lib . BrotliEncoderIsFinished ( compressor . _encoder ) == lib . BROTLI_TRUE
assert ( lib . BrotliEncoderHasMoreOutput ( compressor . _encoder ) == lib . BROTLI_FALSE )
return compressed_data |
def has_predecessor ( self , graph , dest , orig , branch , turn , tick , * , forward = None ) :
"""Return whether an edge connects the destination to the origin at the given time .
Doesn ' t require the edge ' s index , which makes it slower than retrieving a
particular edge .""" | if forward is None :
forward = self . db . _forward
return orig in self . _get_origcache ( graph , dest , branch , turn , tick , forward = forward ) |
def logical_chassis_fwdl_status_output_cluster_fwdl_entries_fwdl_entries_blade_swbd ( self , ** kwargs ) :
"""Auto Generated Code""" | config = ET . Element ( "config" )
logical_chassis_fwdl_status = ET . Element ( "logical_chassis_fwdl_status" )
config = logical_chassis_fwdl_status
output = ET . SubElement ( logical_chassis_fwdl_status , "output" )
cluster_fwdl_entries = ET . SubElement ( output , "cluster-fwdl-entries" )
fwdl_entries = ET . SubElement ( cluster_fwdl_entries , "fwdl-entries" )
blade_swbd = ET . SubElement ( fwdl_entries , "blade-swbd" )
blade_swbd . text = kwargs . pop ( 'blade_swbd' )
callback = kwargs . pop ( 'callback' , self . _callback )
return callback ( config ) |
def cache_if_needed ( cacheKey , result , menu , context , meta ) :
"""Cache the result , if needed""" | if cacheKey : # This will be a method in django 1.7
flat_context = { }
for d in context . dicts :
flat_context . update ( d )
del flat_context [ 'csrf_token' ]
data = { 'result' : result , 'menu' : menu , 'context' : flat_context }
cache . set ( 'plugit-cache-' + cacheKey , data , meta [ 'cache_time' ] ) |
def dump ( self ) :
"""Print a formatted summary of the current solve state .""" | from rez . utils . formatting import columnise
rows = [ ]
for i , phase in enumerate ( self . phase_stack ) :
rows . append ( ( self . _depth_label ( i ) , phase . status , str ( phase ) ) )
print "status: %s (%s)" % ( self . status . name , self . status . description )
print "initial request: %s" % str ( self . request_list )
print
print "solve stack:"
print '\n' . join ( columnise ( rows ) )
if self . failed_phase_list :
rows = [ ]
for i , phase in enumerate ( self . failed_phase_list ) :
rows . append ( ( "#%d" % i , phase . status , str ( phase ) ) )
print
print "previous failures:"
print '\n' . join ( columnise ( rows ) ) |
def pull ( self ) :
"""Pull topics from Transifex .""" | topic_stats = txlib . api . statistics . Statistics . get ( project_slug = self . tx_project_slug , resource_slug = self . TOPIC_STRINGS_SLUG , )
translated = { }
# for each language
for locale in self . enabled_locales :
if not self . _process_locale ( locale ) :
continue
locale_stats = getattr ( topic_stats , locale , None )
if locale_stats is None :
self . log . debug ( 'Locale %s not present when pulling topics.' % ( locale , ) )
continue
if locale_stats [ 'completed' ] == '100%' : # get the resource from Tx
translation = txlib . api . translations . Translation . get ( project_slug = self . tx_project_slug , slug = self . TOPIC_STRINGS_SLUG , lang = locale , )
translated [ locale ] = babel . messages . pofile . read_po ( StringIO ( translation . content . encode ( 'utf-8' ) ) )
# now that we ' ve pulled everything from Tx , upload to Desk
for topic in self . desk . topics ( ) :
for locale in translated :
if topic . name in translated [ locale ] :
self . log . debug ( 'Updating topic (%s) for locale (%s)' % ( topic . name , locale ) , )
if locale in topic . translations :
topic . translations [ locale ] . update ( name = translated [ locale ] [ topic . name ] . string , )
else :
topic . translations . create ( locale = locale , name = translated [ locale ] [ topic . name ] . string , )
else :
self . log . error ( 'Topic name (%s) does not exist in locale (%s)' % ( topic [ 'name' ] , locale ) , ) |
def echo_with_markers ( text , marker = '=' , marker_color = 'blue' , text_color = None ) :
"""Print a text to the screen with markers surrounding it .
The output looks like :
= = = = = text = = = = =
with marker = ' = ' right now .
In the event that the terminal window is too small , the text is printed
without markers .
: param str text : the text to echo
: param str marker : the marker to surround the text
: param str marker _ color : one of ( ' black ' | ' red ' | ' green ' | ' yellow ' | ' blue ' | ' magenta ' | ' cyan ' | ' white ' )
: param str text _ color : one of ( ' black ' | ' red ' | ' green ' | ' yellow ' | ' blue ' | ' magenta ' | ' cyan ' | ' white ' )""" | text = ' ' + text + ' '
width , _ = click . get_terminal_size ( )
if len ( text ) >= width :
click . echo ( text )
# this is probably never the case
else :
leftovers = width - len ( text )
click . secho ( marker * ( leftovers / 2 ) , fg = marker_color , nl = False )
click . secho ( text , nl = False , fg = text_color )
click . secho ( marker * ( leftovers / 2 + leftovers % 2 ) , fg = marker_color ) |
def send_backspace ( self , count ) :
"""Sends the given number of backspace key presses .""" | for i in range ( count ) :
self . interface . send_key ( Key . BACKSPACE ) |
def _addRule ( self , isWhitelist , rule ) :
"""Add an ( isWhitelist , rule ) pair to the rule list .""" | if isinstance ( rule , six . string_types ) or hasattr ( rule , '__call__' ) :
self . rules . append ( ( isWhitelist , rule ) )
else :
raise TypeError ( 'Graphite logging rules must be glob pattern or callable. Invalid: %r' % rule ) |
def clear_all ( ) :
"""Clears all parameters , variables , and shocks defined previously""" | frame = inspect . currentframe ( ) . f_back
try :
if frame . f_globals . get ( 'variables_order' ) : # we should avoid to declare symbols twice !
del frame . f_globals [ 'variables_order' ]
if frame . f_globals . get ( 'parameters_order' ) : # we should avoid to declare symbols twice !
del frame . f_globals [ 'parameters_order' ]
finally :
del frame |
def _gql ( cls , query_string , * args , ** kwds ) :
"""Run a GQL query .""" | from . query import gql
# Import late to avoid circular imports .
return gql ( 'SELECT * FROM %s %s' % ( cls . _class_name ( ) , query_string ) , * args , ** kwds ) |
def setup_session ( endpoint_context , areq , uid , client_id = '' , acr = '' , salt = 'salt' , authn_event = None ) :
"""Setting up a user session
: param endpoint _ context :
: param areq :
: param uid :
: param acr :
: param client _ id :
: param salt :
: param authn _ event : A already made AuthnEvent
: return :""" | if authn_event is None and acr :
authn_event = AuthnEvent ( uid = uid , salt = salt , authn_info = acr , authn_time = time . time ( ) )
if not client_id :
client_id = areq [ 'client_id' ]
sid = endpoint_context . sdb . create_authz_session ( authn_event , areq , client_id = client_id , uid = uid )
endpoint_context . sdb . do_sub ( sid , uid , '' )
return sid |
def get_instance ( cls , device ) :
"""This is only a slot to store and get already initialized poco instance rather than initializing again . You can
simply pass the ` ` current device instance ` ` provided by ` ` airtest ` ` to get the AndroidUiautomationPoco instance .
If no such AndroidUiautomationPoco instance , a new instance will be created and stored .
Args :
device ( : py : obj : ` airtest . core . device . Device ` ) : more details refer to ` ` airtest doc ` `
Returns :
poco instance""" | if cls . _nuis . get ( device ) is None :
cls . _nuis [ device ] = AndroidUiautomationPoco ( device )
return cls . _nuis [ device ] |
def competition_leaderboard_cli ( self , competition , competition_opt = None , path = None , view = False , download = False , csv_display = False , quiet = False ) :
"""a wrapper for competition _ leaderbord _ view that will print the
results as a table or comma separated values
Parameters
competition : the competition name to view leadboard for
competition _ opt : an alternative competition option provided by cli
path : a path to download to , if download is True
view : if True , show the results in the terminal as csv or table
download : if True , download the entire leaderboard
csv _ display : if True , print comma separated values instead of table
quiet : suppress verbose output ( default is False )""" | competition = competition or competition_opt
if not view and not download :
raise ValueError ( 'Either --show or --download must be specified' )
if competition is None :
competition = self . get_config_value ( self . CONFIG_NAME_COMPETITION )
if competition is not None and not quiet :
print ( 'Using competition: ' + competition )
if competition is None :
raise ValueError ( 'No competition specified' )
if download :
self . competition_leaderboard_download ( competition , path , quiet )
if view :
results = self . competition_leaderboard_view ( competition )
fields = [ 'teamId' , 'teamName' , 'submissionDate' , 'score' ]
if results :
if csv_display :
self . print_csv ( results , fields )
else :
self . print_table ( results , fields )
else :
print ( 'No results found' ) |
def create_expanded_design_for_mixing ( design , draw_list , mixing_pos , rows_to_mixers ) :
"""Parameters
design : 2D ndarray .
All elements should be ints , floats , or longs . Each row corresponds to
an available alternative for a given individual . There should be one
column per index coefficient being estimated .
draw _ list : list of 2D ndarrays .
All numpy arrays should have the same number of columns ( ` num _ draws ` )
and the same number of rows ( ` num _ mixers ` ) . All elements of the numpy
arrays should be ints , floats , or longs . Should have as many elements
as there are lements in ` mixing _ pos ` .
mixing _ pos : list of ints .
Each element should denote a column in design whose associated index
coefficient is being treated as a random variable .
rows _ to _ mixers : 2D scipy sparse array .
All elements should be zeros and ones . Will map the rows of the design
matrix to the particular units that the mixing is being performed over .
Note that in the case of panel data , this matrix will be different from
` rows _ to _ obs ` .
Returns
design _ 3d : 3D numpy array .
Each slice of the third dimension will contain a copy of the design
matrix corresponding to a given draw of the random variables being
mixed over .""" | if len ( mixing_pos ) != len ( draw_list ) :
msg = "mixing_pos == {}" . format ( mixing_pos )
msg_2 = "len(draw_list) == {}" . format ( len ( draw_list ) )
raise ValueError ( msg + "\n" + msg_2 )
# Determine the number of draws being used . Note the next line assumes an
# equal number of draws from each random coefficient ' s mixing distribution .
num_draws = draw_list [ 0 ] . shape [ 1 ]
orig_num_vars = design . shape [ 1 ]
# Initialize the expanded design matrix that replicates the columns of the
# variables that are being mixed over .
arrays_for_mixing = design [ : , mixing_pos ]
expanded_design = np . concatenate ( ( design , arrays_for_mixing ) , axis = 1 ) . copy ( )
design_3d = np . repeat ( expanded_design [ : , None , : ] , repeats = num_draws , axis = 1 )
# Multiply the columns that are being mixed over by their appropriate
# draws from the normal distribution
for pos , idx in enumerate ( mixing_pos ) :
rel_draws = draw_list [ pos ]
# Note that rel _ long _ draws will be a dense , 2D numpy array of shape
# ( num _ rows , num _ draws ) .
rel_long_draws = rows_to_mixers . dot ( rel_draws )
# Create the actual column in design 3d that should be used .
# It should be the multiplication of the draws random variable and the
# independent variable associated with the param that is being mixed .
# NOTE THE IMPLICIT ASSUMPTION THAT ONLY INDEX COEFFICIENTS ARE MIXED .
# Also , the final axis is selected on because the final axis sepecifies
# the particular variable being multiplied by the draws . We select with
# orig _ num _ vars + pos since the variables being mixed over were added ,
# in order so we simply need to start at the first position after all
# the original variables ( i . e . at orig _ num _ vars ) and iterate .
design_3d [ : , : , orig_num_vars + pos ] *= rel_long_draws
return design_3d |
def pythag ( a , b ) :
"""Computer c = ( a ^ 2 + b ^ 2 ) ^ 0.5 without destructive underflow or overflow
It solves the Pythagorean theorem a ^ 2 + b ^ 2 = c ^ 2""" | absA = abs ( a )
absB = abs ( b )
if absA > absB :
return absA * sqrt ( 1.0 + ( absB / float ( absA ) ) ** 2 )
elif absB == 0.0 :
return 0.0
else :
return absB * sqrt ( 1.0 + ( absA / float ( absB ) ) ** 2 ) |
def workon ( ctx , issue_id , new , base_branch ) :
"""Start work on a given issue .
This command retrieves the issue from the issue tracker , creates and checks
out a new aptly - named branch , puts the issue in the configured active ,
status , assigns it to you and starts a correctly linked Harvest timer .
If a branch with the same name as the one to be created already exists , it
is checked out instead . Variations in the branch name occuring after the
issue ID are accounted for and the branch renamed to match the new issue
summary .
If the ` default _ project ` directive is correctly configured , it is enough to
give the issue ID ( instead of the full project prefix + issue ID ) .""" | lancet = ctx . obj
if not issue_id and not new :
raise click . UsageError ( "Provide either an issue ID or the --new flag." )
elif issue_id and new :
raise click . UsageError ( "Provide either an issue ID or the --new flag, but not both." )
if new : # Create a new issue
summary = click . prompt ( "Issue summary" )
issue = create_issue ( lancet , summary = summary , add_to_active_sprint = True )
else :
issue = get_issue ( lancet , issue_id )
username = lancet . tracker . whoami ( )
active_status = lancet . config . get ( "tracker" , "active_status" )
if not base_branch :
base_branch = lancet . config . get ( "repository" , "base_branch" )
# Get the working branch
branch = get_branch ( lancet , issue , base_branch )
# Make sure the issue is in a correct status
transition = get_transition ( ctx , lancet , issue , active_status )
# Make sure the issue is assigned to us
assign_issue ( lancet , issue , username , active_status )
# Activate environment
set_issue_status ( lancet , issue , active_status , transition )
with taskstatus ( "Checking out working branch" ) as ts :
lancet . repo . checkout ( branch . name )
ts . ok ( 'Checked out working branch based on "{}"' . format ( base_branch ) )
with taskstatus ( "Starting harvest timer" ) as ts :
lancet . timer . start ( issue )
ts . ok ( "Started harvest timer" ) |
def add_ldap_group_link ( self , cn , group_access , provider , ** kwargs ) :
"""Add an LDAP group link .
Args :
cn ( str ) : CN of the LDAP group
group _ access ( int ) : Minimum access level for members of the LDAP
group
provider ( str ) : LDAP provider for the LDAP group
* * kwargs : Extra options to send to the server ( e . g . sudo )
Raises :
GitlabAuthenticationError : If authentication is not correct
GitlabCreateError : If the server cannot perform the request""" | path = '/groups/%s/ldap_group_links' % self . get_id ( )
data = { 'cn' : cn , 'group_access' : group_access , 'provider' : provider }
self . manager . gitlab . http_post ( path , post_data = data , ** kwargs ) |
def get_stockprices ( chart_range = '1y' ) :
'''This is a proxy to the main fetch function to cache
the result based on the chart range parameter .''' | all_symbols = list_symbols ( )
@ daily_cache ( filename = 'iex_chart_{}' . format ( chart_range ) )
def get_stockprices_cached ( all_symbols ) :
return _get_stockprices ( all_symbols , chart_range )
return get_stockprices_cached ( all_symbols ) |
def get_plaintext_citations ( bibtex ) :
"""Parse a BibTeX file to get a clean list of plaintext citations .
: param bibtex : Either the path to the BibTeX file or the content of a BibTeX file .
: returns : A list of cleaned plaintext citations .""" | parser = BibTexParser ( )
parser . customization = convert_to_unicode
# Load the BibTeX
if os . path . isfile ( bibtex ) :
with open ( bibtex ) as fh :
bib_database = bibtexparser . load ( fh , parser = parser )
else :
bib_database = bibtexparser . loads ( bibtex , parser = parser )
# Convert bibentries to plaintext
bibentries = [ bibentry_as_plaintext ( bibentry ) for bibentry in bib_database . entries ]
# Return them
return bibentries |
def length ( time_flags ) : # type : ( int ) - > int
'''Static method to return the length of the Rock Ridge Time Stamp
record .
Parameters :
time _ flags - Integer representing the flags to use .
Returns :
The length of this record in bytes .''' | tf_each_size = 7
if time_flags & ( 1 << 7 ) :
tf_each_size = 17
time_flags &= 0x7f
tf_num = 0
while time_flags :
time_flags &= time_flags - 1
tf_num += 1
return 5 + tf_each_size * tf_num |
def check_session_id_signature ( session_id , secret_key = settings . secret_key_bytes ( ) , signed = settings . sign_sessions ( ) ) :
"""Check the signature of a session ID , returning True if it ' s valid .
The server uses this function to check whether a session ID
was generated with the correct secret key . If signed sessions are disabled ,
this function always returns True .
Args :
session _ id ( str ) : The session ID to check
secret _ key ( str , optional ) : Secret key ( default : value of ' BOKEH _ SECRET _ KEY ' env var )
signed ( bool , optional ) : Whether to check anything ( default : value of
' BOKEH _ SIGN _ SESSIONS ' env var )""" | secret_key = _ensure_bytes ( secret_key )
if signed :
pieces = session_id . split ( '-' , 1 )
if len ( pieces ) != 2 :
return False
base_id = pieces [ 0 ]
provided_signature = pieces [ 1 ]
expected_signature = _signature ( base_id , secret_key )
# hmac . compare _ digest ( ) uses a string compare algorithm that doesn ' t
# short - circuit so we don ' t allow timing analysis
# encode _ utf8 is used to ensure that strings have same encoding
return hmac . compare_digest ( encode_utf8 ( expected_signature ) , encode_utf8 ( provided_signature ) )
else :
return True |
def _matching_string ( matched , string ) :
"""Return the string as byte or unicode depending
on the type of matched , assuming string is an ASCII string .""" | if string is None :
return string
if IS_PY2 : # pylint : disable = undefined - variable
if isinstance ( matched , text_type ) :
return text_type ( string )
else :
if isinstance ( matched , bytes ) and isinstance ( string , str ) :
return string . encode ( locale . getpreferredencoding ( False ) )
return string |
def _file_md5 ( file_ ) :
"""Compute the md5 digest of a file in base64 encoding .""" | md5 = hashlib . md5 ( )
chunk_size = 128 * md5 . block_size
for chunk in iter ( lambda : file_ . read ( chunk_size ) , b'' ) :
md5 . update ( chunk )
file_ . seek ( 0 )
byte_digest = md5 . digest ( )
return base64 . b64encode ( byte_digest ) . decode ( ) |
def rate_limited ( max_per_second ) :
"""Sort of based off of an answer about
rate limiting on Stack Overflow .
Definitely * * not * * thread safe , so
don ' t even think about it , buddy .""" | import datetime
min_request_time = datetime . timedelta ( seconds = max_per_second )
last_time_called = [ None ]
def decorate ( func ) :
def rate_limited_function ( * args , ** kwargs ) :
if last_time_called [ 0 ] :
delta = datetime . datetime . now ( ) - last_time_called [ 0 ]
if delta < datetime . timedelta . min :
raise chrw . exceptions . TimeIsBackToFront , "Call the Doc!"
elif delta < min_request_time :
msg = "Last request was {0}, should be at least {1}" . format ( delta , min_request_time )
raise chrw . exceptions . RequestRateTooHigh , msg
ret = func ( * args , ** kwargs )
last_time_called [ 0 ] = datetime . datetime . now ( )
return ret
return functools . update_wrapper ( rate_limited_function , func )
return decorate |
def _on_context_disconnect ( self , context ) :
"""Respond to Context disconnect event by deleting any record of the no
longer reachable context . This method runs in the Broker thread and
must not to block .""" | self . _lock . acquire ( )
try :
LOG . info ( '%r: Forgetting %r due to stream disconnect' , self , context )
self . _forget_context_unlocked ( context )
finally :
self . _lock . release ( ) |
def _handle_compound ( self , node , scope , ctxt , stream ) :
"""Handle Compound nodes
: node : TODO
: scope : TODO
: ctxt : TODO
: stream : TODO
: returns : TODO""" | self . _dlog ( "handling compound statement" )
# scope . push ( )
try :
for child in node . children ( ) :
self . _handle_node ( child , scope , ctxt , stream )
# in case a return occurs , be sure to pop the scope
# ( returns are implemented by raising an exception )
finally : # scope . pop ( )
pass |
def isAuthorized ( self , pid , action , vendorSpecific = None ) :
"""Return True if user is allowed to perform ` ` action ` ` on ` ` pid ` ` , else
False .""" | response = self . isAuthorizedResponse ( pid , action , vendorSpecific )
return self . _read_boolean_401_response ( response ) |
def get ( cls , sha1 = '' ) : # type : ( str ) - > CommitDetails
"""Return details about a given commit .
Args :
sha1 ( str ) :
The sha1 of the commit to query . If not given , it will return
the details for the latest commit .
Returns :
CommitDetails : Commit details . You can use the instance of the
class to query git tree further .""" | with conf . within_proj_dir ( ) :
cmd = 'git show -s --format="%H||%an||%ae||%s||%b||%P" {}' . format ( sha1 )
result = shell . run ( cmd , capture = True , never_pretend = True ) . stdout
sha1 , name , email , title , desc , parents = result . split ( '||' )
return CommitDetails ( sha1 = sha1 , author = Author ( name , email ) , title = title , desc = desc , parents_sha1 = parents . split ( ) , ) |
def uncontract_general ( basis , use_copy = True ) :
"""Removes the general contractions from a basis set
The input basis set is not modified . The returned basis
may have functions with coefficients of zero and may have duplicate
shells .
If use _ copy is True , the input basis set is not modified .""" | if use_copy :
basis = copy . deepcopy ( basis )
for k , el in basis [ 'elements' ] . items ( ) :
if not 'electron_shells' in el :
continue
newshells = [ ]
for sh in el [ 'electron_shells' ] : # See if we actually have to uncontract
# Also , don ' t uncontract sp , spd , . . . . orbitals
# ( leave that to uncontract _ spdf )
if len ( sh [ 'coefficients' ] ) == 1 or len ( sh [ 'angular_momentum' ] ) > 1 :
newshells . append ( sh )
else :
if len ( sh [ 'angular_momentum' ] ) == 1 :
for c in sh [ 'coefficients' ] : # copy , them replace ' coefficients '
newsh = sh . copy ( )
newsh [ 'coefficients' ] = [ c ]
newshells . append ( newsh )
el [ 'electron_shells' ] = newshells
# If use _ basis is True , we already made our deep copy
return prune_basis ( basis , False ) |
def get_method ( self , cls_name ) :
"""Generator that returns all registered authenticators based on a
specific authentication class .
: param acr : Authentication Class
: return : generator""" | for id , spec in self . db . items ( ) :
if spec [ "method" ] . __class__ . __name__ == cls_name :
yield spec [ "method" ] |
def legends ( value ) :
"""list or KeyedList of ` ` Legends ` ` : Legend definitions
Legends visualize scales , and take one or more scales as their input .
They can be customized via a LegendProperty object .""" | for i , entry in enumerate ( value ) :
_assert_is_type ( 'legends[{0}]' . format ( i ) , entry , Legend ) |
def name ( self ) :
"""Give back tab name if is set else generate name by code""" | if self . _name :
return self . _name
return self . code . replace ( '_' , ' ' ) . capitalize ( ) |
def _StrftimeLocal ( value , unused_context , args ) :
"""Convert a timestamp in seconds to a string based on the format string .
Returns local time .""" | time_tuple = time . localtime ( value )
return _StrftimeHelper ( args , time_tuple ) |
def find_wheels ( projects , search_dirs ) :
"""Find wheels from which we can import PROJECTS .
Scan through SEARCH _ DIRS for a wheel for each PROJECT in turn . Return
a list of the first wheel found for each PROJECT""" | wheels = [ ]
# Look through SEARCH _ DIRS for the first suitable wheel . Don ' t bother
# about version checking here , as this is simply to get something we can
# then use to install the correct version .
for project in projects :
for dirname in search_dirs : # This relies on only having " universal " wheels available .
# The pattern could be tightened to require - py2 . py3 - none - any . whl .
files = glob . glob ( os . path . join ( dirname , project + '-*.whl' ) )
if files :
wheels . append ( os . path . abspath ( files [ 0 ] ) )
break
else : # We ' re out of luck , so quit with a suitable error
logger . fatal ( 'Cannot find a wheel for %s' % ( project , ) )
return wheels |
def point_to_line ( point , segment_start , segment_end ) :
"""Given a point and a line segment , return the vector from the point to
the closest point on the segment .""" | # TODO : Needs unittests .
segment_vec = segment_end - segment_start
# t is distance along line
t = - ( segment_start - point ) . dot ( segment_vec ) / ( segment_vec . length_squared ( ) )
closest_point = segment_start + scale_v3 ( segment_vec , t )
return point - closest_point |
def request_object ( self ) :
"""Grab an object from the pool . If the pool is empty , a new object will be generated and returned .""" | obj_to_return = None
if self . queue . count > 0 :
obj_to_return = self . __dequeue ( )
else : # The queue is empty , generate a new item .
self . __init_object ( )
object_to_return = self . __dequeue ( )
self . active_objects += 1
return obj_to_return |
def _get ( url , headers = { } , params = None ) :
"""Tries to GET data from an endpoint using retries""" | param_string = _foursquare_urlencode ( params )
for i in xrange ( NUM_REQUEST_RETRIES ) :
try :
try :
response = requests . get ( url , headers = headers , params = param_string , verify = VERIFY_SSL )
return _process_response ( response )
except requests . exceptions . RequestException as e :
_log_and_raise_exception ( 'Error connecting with foursquare API' , e )
except FoursquareException as e : # Some errors don ' t bear repeating
if e . __class__ in [ InvalidAuth , ParamError , EndpointError , NotAuthorized , Deprecated ] :
raise
# If we ' ve reached our last try , re - raise
if ( ( i + 1 ) == NUM_REQUEST_RETRIES ) :
raise
time . sleep ( 1 ) |
def find_config_file ( self , project = None , extension = '.conf' ) :
"""Return the config file .
: param project : " zvmsdk "
: param extension : the type of the config file""" | cfg_dirs = self . _get_config_dirs ( )
config_files = self . _search_dirs ( cfg_dirs , project , extension )
return config_files |
def expect_column_values_to_be_decreasing ( self , column , strictly = None , parse_strings_as_datetimes = None , mostly = None , result_format = None , include_config = False , catch_exceptions = None , meta = None ) :
"""Expect column values to be decreasing .
By default , this expectation only works for numeric or datetime data .
When ` parse _ strings _ as _ datetimes = True ` , it can also parse strings to datetimes .
If ` strictly = True ` , then this expectation is only satisfied if each consecutive value
is strictly decreasing - - equal values are treated as failures .
expect _ column _ values _ to _ be _ decreasing is a : func : ` column _ map _ expectation < great _ expectations . data _ asset . dataset . Dataset . column _ map _ expectation > ` .
Args :
column ( str ) : The column name .
Keyword Args :
strictly ( Boolean or None ) : If True , values must be strictly greater than previous values
parse _ strings _ as _ datetimes ( boolean or None ) : If True , all non - null column values to datetimes before making comparisons
mostly ( None or a float between 0 and 1 ) : Return ` " success " : True ` if at least mostly percent of values match the expectation . For more detail , see : ref : ` mostly ` .
Other Parameters :
result _ format ( str or None ) : Which output mode to use : ` BOOLEAN _ ONLY ` , ` BASIC ` , ` COMPLETE ` , or ` SUMMARY ` .
For more detail , see : ref : ` result _ format < result _ format > ` .
include _ config ( boolean ) : If True , then include the expectation config as part of the result object . For more detail , see : ref : ` include _ config ` .
catch _ exceptions ( boolean or None ) : If True , then catch exceptions and include them as part of the result object . For more detail , see : ref : ` catch _ exceptions ` .
meta ( dict or None ) : A JSON - serializable dictionary ( nesting allowed ) that will be included in the output without modification . For more detail , see : ref : ` meta ` .
Returns :
A JSON - serializable expectation result object .
Exact fields vary depending on the values passed to : ref : ` result _ format < result _ format > ` and
: ref : ` include _ config ` , : ref : ` catch _ exceptions ` , and : ref : ` meta ` .
See Also :
expect _ column _ values _ to _ be _ increasing""" | raise NotImplementedError |
def _update_marshallers ( self ) :
"""Update the full marshaller list and other data structures .
Makes a full list of both builtin and user marshallers and
rebuilds internal data structures used for looking up which
marshaller to use for reading / writing Python objects to / from
file .
Also checks for whether the required modules are present or not ,
loading the required modules ( if not doing lazy loading ) , and
whether the required modules are imported already or not .""" | # Combine all sets of marshallers .
self . _marshallers = [ ]
for v in self . _priority :
if v == 'builtin' :
self . _marshallers . extend ( self . _builtin_marshallers )
elif v == 'plugin' :
self . _marshallers . extend ( self . _plugin_marshallers )
elif v == 'user' :
self . _marshallers . extend ( self . _user_marshallers )
else :
raise ValueError ( 'priority attribute has an illegal ' 'element value.' )
# Determine whether the required modules are present , do module
# loading , and determine whether the required modules are
# imported .
self . _has_required_modules = len ( self . _marshallers ) * [ False ]
self . _imported_required_modules = len ( self . _marshallers ) * [ False ]
for i , m in enumerate ( self . _marshallers ) : # Check if the required modules are here .
try :
for name in m . required_parent_modules :
if name not in sys . modules and pkgutil . find_loader ( name ) is None :
raise ImportError ( 'module not present' )
except ImportError :
self . _has_required_modules [ i ] = False
except :
raise
else :
self . _has_required_modules [ i ] = True
# Modules obviously can ' t be fully loaded if not all are
# present .
if not self . _has_required_modules [ i ] :
self . _imported_required_modules [ i ] = False
continue
# Check if all modules are loaded or not , and load them if
# doing lazy loading .
try :
for name in m . required_modules :
if name not in sys . modules :
raise ImportError ( 'module not loaded yet.' )
except ImportError :
if self . _lazy_loading :
self . _imported_required_modules [ i ] = False
else :
success = self . _import_marshaller_modules ( m )
self . _has_required_modules [ i ] = success
self . _imported_required_modules [ i ] = success
except :
raise
else :
self . _imported_required_modules [ i ] = True
# Construct the dictionary to look up the appropriate marshaller
# by type , the equivalent one to read data types given type
# strings needs to be created from it ( basically , we have to
# make the key be the python _ type _ string from it ) , and the
# equivalent one to read data types given MATLAB class strings
# needs to be created from it ( basically , we have to make the
# key be the matlab _ class from it ) .
# Marshallers earlier in the list have priority ( means that the
# builtins have the highest ) . Since the types can be specified
# as strings as well , duplicates will be checked for by running
# each type through str if it isn ' t str .
types_as_str = set ( )
self . _types = dict ( )
self . _type_strings = dict ( )
self . _matlab_classes = dict ( )
for i , m in enumerate ( self . _marshallers ) : # types .
for tp in m . types :
if isinstance ( tp , str ) :
tp_as_str = tp
else :
tp_as_str = tp . __module__ + '.' + tp . __name__
if tp_as_str not in types_as_str :
self . _types [ tp_as_str ] = i
types_as_str . add ( tp_as_str )
# type strings
for type_string in m . python_type_strings :
if type_string not in self . _type_strings :
self . _type_strings [ type_string ] = i
# matlab classes .
for matlab_class in m . matlab_classes :
if matlab_class not in self . _matlab_classes :
self . _matlab_classes [ matlab_class ] = i |
def addnot ( self , action = None , subject = None , ** conditions ) :
"""Defines an ability which cannot be done .""" | self . add_rule ( Rule ( False , action , subject , ** conditions ) ) |
def ParsedSections ( file_val ) :
"""Get the sections and options of a file returned as a dictionary""" | try :
template_dict = { }
cur_section = ''
for val in file_val . split ( '\n' ) :
val = val . strip ( )
if val != '' :
section_match = re . match ( r'\[.+\]' , val )
if section_match :
cur_section = section_match . group ( ) [ 1 : - 1 ]
template_dict [ cur_section ] = { }
else :
option , value = val . split ( '=' , 1 )
option = option . strip ( )
value = value . strip ( )
if option . startswith ( '#' ) :
template_dict [ cur_section ] [ val ] = ''
else :
template_dict [ cur_section ] [ option ] = value
except Exception : # pragma : no cover
template_dict = { }
return template_dict |
def get_data_files ( top ) :
"""Get data files""" | data_files = [ ]
ntrim = len ( here + os . path . sep )
for ( d , _ , filenames ) in os . walk ( top ) :
data_files . append ( ( d [ ntrim : ] , [ os . path . join ( d , f ) for f in filenames ] ) )
return data_files |
def get_preds ( model : nn . Module , dl : DataLoader , pbar : Optional [ PBar ] = None , cb_handler : Optional [ CallbackHandler ] = None , activ : nn . Module = None , loss_func : OptLossFunc = None , n_batch : Optional [ int ] = None ) -> List [ Tensor ] :
"Tuple of predictions and targets , and optional losses ( if ` loss _ func ` ) using ` dl ` , max batches ` n _ batch ` ." | res = [ torch . cat ( o ) . cpu ( ) for o in zip ( * validate ( model , dl , cb_handler = cb_handler , pbar = pbar , average = False , n_batch = n_batch ) ) ]
if loss_func is not None :
with NoneReduceOnCPU ( loss_func ) as lf :
res . append ( lf ( res [ 0 ] , res [ 1 ] ) )
if activ is not None :
res [ 0 ] = activ ( res [ 0 ] )
return res |
def client_port ( self ) :
"""Client connection ' s TCP port .""" | address = self . _client . getpeername ( )
if isinstance ( address , tuple ) :
return address [ 1 ]
# Maybe a Unix domain socket connection .
return 0 |
def assignment_action ( self , text , loc , assign ) :
"""Code executed after recognising an assignment statement""" | exshared . setpos ( loc , text )
if DEBUG > 0 :
print ( "ASSIGN:" , assign )
if DEBUG == 2 :
self . symtab . display ( )
if DEBUG > 2 :
return
var_index = self . symtab . lookup_symbol ( assign . var , [ SharedData . KINDS . GLOBAL_VAR , SharedData . KINDS . PARAMETER , SharedData . KINDS . LOCAL_VAR ] )
if var_index == None :
raise SemanticException ( "Undefined lvalue '%s' in assignment" % assign . var )
if not self . symtab . same_types ( var_index , assign . exp [ 0 ] ) :
raise SemanticException ( "Incompatible types in assignment" )
self . codegen . move ( assign . exp [ 0 ] , var_index ) |
def nsx_controller_name ( self , ** kwargs ) :
"""Get / Set nsx controller name
Args :
name : ( str ) : Name of the nsx controller
get ( bool ) : Get nsx controller config ( True , False )
callback ( function ) : A function executed upon completion of the
method .
Returns :
Return value of ` callback ` .
Raises :
None""" | name = kwargs . pop ( 'name' )
name_args = dict ( name = name )
method_name = 'nsx_controller_name'
method_class = self . _brocade_tunnels
nsxcontroller_attr = getattr ( method_class , method_name )
config = nsxcontroller_attr ( ** name_args )
if kwargs . pop ( 'get' , False ) :
output = self . _callback ( config , handler = 'get_config' )
else :
output = self . _callback ( config )
return output |
def save_config ( self , config_file_name ) :
"""Save configuration file from prt or str .
Configuration file type is extracted from the file suffix - prt or str .
: param config _ file _ name : full path to the configuration file .
IxTclServer must have access to the file location . either :
The config file is on shared folder .
IxTclServer run on the client machine .""" | config_file_name = config_file_name . replace ( '\\' , '/' )
ext = path . splitext ( config_file_name ) [ - 1 ] . lower ( )
if ext == '.prt' :
self . api . call_rc ( 'port export "{}" {}' . format ( config_file_name , self . uri ) )
elif ext == '.str' : # self . reset ( )
self . api . call_rc ( 'stream export "{}" {}' . format ( config_file_name , self . uri ) )
else :
raise ValueError ( 'Configuration file type {} not supported.' . format ( ext ) ) |
def move_tab ( self , index_from , index_to ) :
"""Move tab .""" | client = self . clients . pop ( index_from )
self . clients . insert ( index_to , client ) |
def get_alignak_status ( self , details = False ) : # pylint : disable = too - many - locals , too - many - branches
"""Push the alignak overall state as a passive check
Build all the daemons overall state as a passive check that can be notified
to the Alignak WS
The Alignak Arbiter is considered as an host which services are all the Alignak
running daemons . An Alignak daemon is considered as a service of an Alignak host .
As such , it reports its status as a passive service check formatted as defined for
the Alignak WS module ( see http : / / alignak - module - ws . readthedocs . io )
: return : A dict with the following structure
' name ' : ' type and name of the daemon ' ,
' livestate ' : {
' state ' : " ok " ,
' output ' : " state message " ,
' long _ output ' : " state message - longer . . . if any " ,
' perf _ data ' : " daemon metrics ( if any . . . ) "
" services " : {
" daemon - 1 " : {
' name ' : ' type and name of the daemon ' ,
' livestate ' : {
' state ' : " ok " ,
' output ' : " state message " ,
' long _ output ' : " state message - longer . . . if any " ,
' perf _ data ' : " daemon metrics ( if any . . . ) "
" daemon - N " : {
' name ' : ' type and name of the daemon ' ,
' livestate ' : {
' state ' : " ok " ,
' output ' : " state message " ,
' long _ output ' : " state message - longer . . . if any " ,
' perf _ data ' : " daemon metrics ( if any . . . ) "
: rtype : dict""" | now = int ( time . time ( ) )
# Get the arbiter statistics
inner_stats = self . get_daemon_stats ( details = details )
res = { "name" : inner_stats [ 'alignak' ] , "template" : { "_templates" : [ "alignak" , "important" ] , "alias" : inner_stats [ 'alignak' ] , "active_checks_enabled" : False , "passive_checks_enabled" : True , "notes" : '' } , "variables" : { } , "livestate" : { "timestamp" : now , "state" : "unknown" , "output" : "" , "long_output" : "" , "perf_data" : "" } , "services" : [ ] }
if details :
res = { "name" : inner_stats [ 'alignak' ] , "template" : { "_templates" : [ "alignak" , "important" ] , "alias" : inner_stats [ 'alignak' ] , "active_checks_enabled" : False , "passive_checks_enabled" : True , "notes" : '' } , "variables" : { } , "livestate" : { "timestamp" : now , "state" : "unknown" , "output" : "" , "long_output" : "" , "perf_data" : "" } , "services" : [ ] }
# Create self arbiter service - I am now considered as a service for my Alignak monitor !
if 'livestate' in inner_stats :
livestate = inner_stats [ 'livestate' ]
res [ 'services' ] . append ( { "name" : inner_stats [ 'name' ] , "livestate" : { "timestamp" : now , "state" : [ "ok" , "warning" , "critical" , "unknown" ] [ livestate [ 'state' ] ] , "output" : livestate [ 'output' ] , "long_output" : livestate [ 'long_output' ] if 'long_output' in livestate else "" , "perf_data" : livestate [ 'perf_data' ] if 'perf_data' in livestate else "" } } )
# Alignak performance data are :
# 1 / the monitored items counters
if 'counters' in inner_stats :
metrics = [ ]
my_counters = [ strclss for _ , _ , strclss , _ , _ in list ( self . conf . types_creations . values ( ) ) if strclss not in [ 'hostescalations' , 'serviceescalations' ] ]
for counter in inner_stats [ 'counters' ] : # Only the arbiter created objects . . .
if counter not in my_counters :
continue
metrics . append ( "'%s'=%d" % ( counter , inner_stats [ 'counters' ] [ counter ] ) )
res [ 'livestate' ] [ 'perf_data' ] = ' ' . join ( metrics )
# Report the arbiter daemons states , but only if they exist . . .
if 'daemons_states' in inner_stats :
state = 0
long_output = [ ]
for daemon_id in sorted ( inner_stats [ 'daemons_states' ] ) :
daemon = inner_stats [ 'daemons_states' ] [ daemon_id ]
# Ignore daemons that are not active in the configuration
if not daemon [ 'active' ] :
continue
res [ 'services' ] . append ( { "name" : daemon_id , "livestate" : { "timestamp" : now , "name" : "%s_%s" % ( daemon [ 'type' ] , daemon [ 'name' ] ) , "state" : [ "ok" , "warning" , "critical" , "unknown" ] [ daemon [ 'livestate' ] ] , "output" : [ u"daemon is alive and reachable." , u"daemon is not reachable." , u"daemon is not alive." ] [ daemon [ 'livestate' ] ] , "long_output" : "Realm: %s (%s). Listening on: %s" % ( daemon [ 'realm_name' ] , daemon [ 'manage_sub_realms' ] , daemon [ 'uri' ] ) , "perf_data" : "last_check=%.2f" % daemon [ 'last_check' ] } } )
state = max ( state , daemon [ 'livestate' ] )
long_output . append ( "%s - %s" % ( daemon_id , [ u"daemon is alive and reachable." , u"daemon is not reachable." , u"daemon is not alive." ] [ daemon [ 'livestate' ] ] ) )
res [ 'livestate' ] . update ( { "state" : "up" , # Always Up ; )
"output" : [ u"All my daemons are up and running." , u"Some of my daemons are not reachable." , u"Some of my daemons are not responding!" ] [ state ] , "long_output" : '\n' . join ( long_output ) } )
log_level = 'info'
if state == 1 : # DOWN
log_level = 'error'
if state == 2 : # UNREACHABLE
log_level = 'warning'
if self . conf . log_alignak_checks or state > 0 :
self . add ( make_monitoring_log ( log_level , 'ALIGNAK CHECK;%s;%d;%s;%s' % ( self . alignak_name , state , res [ 'livestate' ] [ 'output' ] , res [ 'livestate' ] [ 'long_output' ] ) ) )
if self . my_status != state :
self . my_status = state
self . add ( make_monitoring_log ( log_level , 'ALIGNAK ALERT;%s;%d;%s;%s' % ( self . alignak_name , state , res [ 'livestate' ] [ 'output' ] , res [ 'livestate' ] [ 'long_output' ] ) ) )
if self . alignak_monitor :
logger . debug ( "Pushing Alignak passive check to %s: %s" , self . alignak_monitor , res )
if self . my_monitor is None :
self . my_monitor = MonitorConnection ( self . alignak_monitor )
if not self . my_monitor . authenticated :
self . my_monitor . login ( self . alignak_monitor_username , self . alignak_monitor_password )
result = self . my_monitor . patch ( 'host' , res )
logger . debug ( "Monitor reporting result: %s" , result )
else :
logger . debug ( "No configured Alignak monitor to receive: %s" , res )
return res |
def add_word ( self , word ) :
"""Parameters
word : etree . Element
etree representation of a < word > element
( i . e . a token , which might contain child elements )""" | word_id = self . get_element_id ( word )
if word . getparent ( ) . tag in ( 'node' , 'sentence' ) :
parent_id = self . get_parent_id ( word )
else : # ExportXML is an inline XML format . Therefore , a < word >
# might be embedded in weird elements . If this is the case ,
# attach it directly to the closest < node > or < sentence > node
try :
parent = word . iterancestors ( tag = ( 'node' , 'sentence' ) ) . next ( )
parent_id = self . get_element_id ( parent )
except StopIteration as e : # there ' s at least one weird edge case , where a < word > is
# embedded like this : ( text ( topic ( edu ( word ) ) ) )
# here , we guess the sentence ID from the
parent_id = self . get_element_id ( word ) . split ( '_' ) [ 0 ]
self . tokens . append ( word_id )
# use all attributes except for the ID
word_attribs = self . element_attribs_to_dict ( word )
# add the token string under the key namespace : token
token_str = word_attribs [ self . ns + ':form' ]
word_attribs . update ( { self . ns + ':token' : token_str , 'label' : token_str } )
self . add_node ( word_id , layers = { self . ns , self . ns + ':token' } , attr_dict = word_attribs )
self . add_edge ( parent_id , word_id , edge_type = dg . EdgeTypes . dominance_relation )
self . parse_child_elements ( word ) |
def unicode ( expr , cache = None , ** settings ) :
"""Return a unicode representation of the given object / expression
Args :
expr : Expression to print
cache ( dict or None ) : dictionary to use for caching
show _ hs _ label ( bool or str ) : Whether to a label for the Hilbert space
of ` expr ` . By default ( ` ` show _ hs _ label = True ` ` ) , the label is shown
as a superscript . It can be shown as a subscript with
` ` show _ hs _ label = ' subscript ' ` ` or suppressed entirely
( ` ` show _ hs _ label = False ` ` )
sig _ as _ ketbra ( bool ) : Whether to render instances of
: class : ` . LocalSigma ` as a ket - bra ( default ) , or as an operator
symbol
unicode _ sub _ super ( bool ) : Whether to try to use unicode symbols for
sub - or superscripts if possible
unicode _ op _ hats ( bool ) : Whether to draw unicode hats on single - letter
operator symbols
Examples :
> > > A = OperatorSymbol ( ' A ' , hs = 1 ) ; B = OperatorSymbol ( ' B ' , hs = 1)
> > > unicode ( A + B )
' Â ( 1 ) + B̂ ( 1 ) '
> > > unicode ( A + B , cache = { A : ' A ' , B : ' B ' } )
' A + B '
> > > unicode ( A + B , show _ hs _ label = ' subscript ' )
' Â ( 1 ) + B̂ ( 1 ) '
> > > unicode ( A + B , show _ hs _ label = False )
' Â + B̂ '
> > > unicode ( LocalSigma ( 0 , 1 , hs = 1 ) )
' | 0 ⟩ ⟨ 1 | ( 1 ) '
> > > unicode ( LocalSigma ( 0 , 1 , hs = 1 ) , sig _ as _ ketbra = False )
' σ̂ _ 0,1 ^ ( 1 ) '
> > > unicode ( A + B , unicode _ sub _ super = False )
' Â ^ ( 1 ) + B̂ ^ ( 1 ) '
> > > unicode ( A + B , unicode _ op _ hats = False )
' A ( 1 ) + B ( 1 ) '
Note that the accepted parameters and their default values may be changed
through : func : ` init _ printing ` or : func : ` configure _ printing `""" | try :
if cache is None and len ( settings ) == 0 :
return unicode . printer . doprint ( expr )
else :
printer = unicode . _printer_cls ( cache , settings )
return printer . doprint ( expr )
except AttributeError : # init _ printing was not called . Setting up defaults
unicode . _printer_cls = QnetUnicodePrinter
unicode . printer = unicode . _printer_cls ( )
return unicode ( expr , cache , ** settings ) |
def _root_mean_square_error ( y , y_pred , w ) :
"""Calculate the root mean square error .""" | return np . sqrt ( np . average ( ( ( y_pred - y ) ** 2 ) , weights = w ) ) |
def list_feeds ( ) :
"""List all feeds in plain text and give their aliases""" | with Database ( "feeds" ) as feeds , Database ( "aliases" ) as aliases_db :
for feed in feeds :
name = feed
url = feeds [ feed ]
aliases = [ ]
for k , v in zip ( list ( aliases_db . keys ( ) ) , list ( aliases_db . values ( ) ) ) :
if v == name :
aliases . append ( k )
if aliases :
print ( name , " : %s Aliases: %s" % ( url , aliases ) )
else :
print ( name , " : %s" % url ) |
def update ( self , stats , duration = 3 , cs_status = None , return_to_browser = False ) :
"""Update the screen .
INPUT
stats : Stats database to display
duration : duration of the loop
cs _ status :
" None " : standalone or server mode
" Connected " : Client is connected to the server
" Disconnected " : Client is disconnected from the server
return _ to _ browser :
True : Do not exist , return to the browser list
False : Exit and return to the shell
OUTPUT
True : Exit key has been pressed
False : Others cases . . .""" | # Flush display
self . flush ( stats , cs_status = cs_status )
# If the duration is < 0 ( update + export time > refresh _ time )
# Then display the interface and log a message
if duration <= 0 :
logger . warning ( 'Update and export time higher than refresh_time.' )
duration = 0.1
# Wait duration ( in s ) time
exitkey = False
countdown = Timer ( duration )
# Set the default timeout ( in ms ) for the getch method
self . term_window . timeout ( int ( duration * 1000 ) )
while not countdown . finished ( ) and not exitkey : # Getkey
pressedkey = self . __catch_key ( return_to_browser = return_to_browser )
# Is it an exit key ?
exitkey = ( pressedkey == ord ( '\x1b' ) or pressedkey == ord ( 'q' ) )
if not exitkey and pressedkey > - 1 : # Redraw display
self . flush ( stats , cs_status = cs_status )
# Overwrite the timeout with the countdown
self . term_window . timeout ( int ( countdown . get ( ) * 1000 ) )
return exitkey |
def _get_storage ( cls , uri ) :
"""Given a URI like local : / / / srv / repo or s3 : / / key : secret @ apt . example . com ,
return a libcloud storage or container object .""" | driver = cls . _get_driver ( uri . scheme )
key = uri . username
secret = uri . password
container = uri . netloc
driver_kwargs = { }
if uri . scheme . startswith ( 's3' ) :
if not key :
key = os . environ . get ( 'AWS_ACCESS_KEY_ID' )
if not secret :
secret = os . environ . get ( 'AWS_SECRET_ACCESS_KEY' )
if not ( key and secret and container ) :
raise ValueError ( 'For S3 you must provide an access key ID, secret access key, and bucket name' )
# No way to store this in the URI , what about a CLI option too ?
if 'AWS_TOKEN' in os . environ :
driver_kwargs [ 'token' ] = os . environ [ 'AWS_TOKEN' ]
elif uri . scheme == 'local' :
parts = [ ]
if uri . netloc :
parts . append ( uri . netloc )
if uri . path :
parts . append ( uri . path )
if not parts :
parts . append ( '.' )
base_path = os . path . abspath ( '' . join ( parts ) )
key = os . path . dirname ( base_path )
container = os . path . basename ( base_path )
storage = driver ( key , secret , ** driver_kwargs )
try :
return storage . get_container ( container )
except ContainerDoesNotExistError :
return storage . create_container ( container ) |
def convert_camel_case_string ( name : str ) -> str :
"""Convert camel case string to snake case""" | string = re . sub ( "(.)([A-Z][a-z]+)" , r"\1_\2" , name )
return re . sub ( "([a-z0-9])([A-Z])" , r"\1_\2" , string ) . lower ( ) |
def _collected_label ( collect , label ) :
"""Label of a collected column .""" | if not collect . __name__ . startswith ( '<' ) :
return label + ' ' + collect . __name__
else :
return label |
def _pseudoinverse ( self , A , tol = 1.0e-10 ) :
"""Compute the Moore - Penrose pseudoinverse , wraps np . linalg . pinv
REQUIRED ARGUMENTS
A ( np KxK matrix ) - the square matrix whose pseudoinverse is to be computed
RETURN VALUES
Ainv ( np KxK matrix ) - the pseudoinverse
OPTIONAL VALUES
tol - the tolerance ( relative to largest magnitude singlular value ) below which singular values are to not be include in forming pseudoinverse ( default : 1.0e - 10)
NOTES
In previous versions of pymbar / Numpy , we wrote our own pseudoinverse
because of a bug in Numpy .""" | return np . linalg . pinv ( A , rcond = tol ) |
def odd_digit_product ( num : int ) -> int :
"""Calculate the product of odd digits in a positive integer . Return 0 if all numbers are even .
Args :
num ( int ) : Input positive Integer .
Returns :
int : Product of odd digits if there are any , 0 otherwise .
Examples :
> > > odd _ digit _ product ( 1)
> > > odd _ digit _ product ( 4)
> > > odd _ digit _ product ( 235)
15""" | product = 1
has_odd_digit = False
for digit in str ( num ) :
if int ( digit ) % 2 :
product *= int ( digit )
has_odd_digit = True
return 0 if not has_odd_digit else product |
def clear_stats ( self ) :
"""Reset server stat counters . .""" | self . _start_time = None
self . _run_time = 0
self . stats = { 'Enabled' : False , 'Bind Address' : lambda s : repr ( self . bind_addr ) , 'Run time' : lambda s : ( not s [ 'Enabled' ] ) and - 1 or self . runtime ( ) , 'Accepts' : 0 , 'Accepts/sec' : lambda s : s [ 'Accepts' ] / self . runtime ( ) , 'Queue' : lambda s : getattr ( self . requests , 'qsize' , None ) , 'Threads' : lambda s : len ( getattr ( self . requests , '_threads' , [ ] ) ) , 'Threads Idle' : lambda s : getattr ( self . requests , 'idle' , None ) , 'Socket Errors' : 0 , 'Requests' : lambda s : ( not s [ 'Enabled' ] ) and - 1 or sum ( [ w [ 'Requests' ] ( w ) for w in s [ 'Worker Threads' ] . values ( ) ] , 0 , ) , 'Bytes Read' : lambda s : ( not s [ 'Enabled' ] ) and - 1 or sum ( [ w [ 'Bytes Read' ] ( w ) for w in s [ 'Worker Threads' ] . values ( ) ] , 0 , ) , 'Bytes Written' : lambda s : ( not s [ 'Enabled' ] ) and - 1 or sum ( [ w [ 'Bytes Written' ] ( w ) for w in s [ 'Worker Threads' ] . values ( ) ] , 0 , ) , 'Work Time' : lambda s : ( not s [ 'Enabled' ] ) and - 1 or sum ( [ w [ 'Work Time' ] ( w ) for w in s [ 'Worker Threads' ] . values ( ) ] , 0 , ) , 'Read Throughput' : lambda s : ( not s [ 'Enabled' ] ) and - 1 or sum ( [ w [ 'Bytes Read' ] ( w ) / ( w [ 'Work Time' ] ( w ) or 1e-6 ) for w in s [ 'Worker Threads' ] . values ( ) ] , 0 , ) , 'Write Throughput' : lambda s : ( not s [ 'Enabled' ] ) and - 1 or sum ( [ w [ 'Bytes Written' ] ( w ) / ( w [ 'Work Time' ] ( w ) or 1e-6 ) for w in s [ 'Worker Threads' ] . values ( ) ] , 0 , ) , 'Worker Threads' : { } , }
logging . statistics [ 'Cheroot HTTPServer %d' % id ( self ) ] = self . stats |
def generate_mavlink ( directory , xml ) :
'''generate MVMavlink header and implementation''' | f = open ( os . path . join ( directory , "MVMavlink.h" ) , mode = 'w' )
t . write ( f , '''
//
// MVMavlink.h
// MAVLink communications protocol built from ${basename}.xml
//
// Created on ${parse_time} by mavgen_objc.py
// http://qgroundcontrol.org/mavlink
//
#import "MVMessage.h"
${{message_definition_files:#import "MV${name_camel_case}Messages.h"
}}
@class MVMavlink;
@protocol MVMessage;
@protocol MVMavlinkDelegate <NSObject>
/*!
Method called on the delegate when a full message has been received. Note that this may be called multiple times when parseData: is called, if the data passed to parseData: contains multiple messages.
@param mavlink The MVMavlink object calling this method
@param message The id<MVMessage> class containing the parsed message
*/
- (void)mavlink:(MVMavlink *)mavlink didGetMessage:(id<MVMessage>)message;
/*!
Method called on the delegate when data should be sent.
@param mavlink The MVMavlink object calling this method
@param data NSData object containing the bytes to be sent
*/
- (BOOL)mavlink:(MVMavlink *)mavlink shouldWriteData:(NSData *)data;
@end
/*!
Class for parsing and sending instances of id<MVMessage>
@discussion MVMavlink receives a stream of bytes via the parseData: method and calls the delegate method mavlink:didGetMessage: each time a message is fully parsed. Users of MVMavlink can call parseData: anytime they get new data, even if that data does not contain a complete message.
*/
@interface MVMavlink : NSObject
@property (weak, nonatomic) id<MVMavlinkDelegate> delegate;
/*!
Parse byte data received from a MAVLink byte stream.
@param data NSData containing the received bytes
*/
- (void)parseData:(NSData *)data;
/*!
Compile MVMessage object into a bytes and pass to the delegate for sending.
@param message Object conforming to the MVMessage protocol that represents the data to be sent
@return YES if message sending was successful
*/
- (BOOL)sendMessage:(id<MVMessage>)message;
@end
''' , xml )
f . close ( )
f = open ( os . path . join ( directory , "MVMavlink.m" ) , mode = 'w' )
t . write ( f , '''
//
// MVMavlink.m
// MAVLink communications protocol built from ${basename}.xml
//
// Created by mavgen_objc.py
// http://qgroundcontrol.org/mavlink
//
#import "MVMavlink.h"
@implementation MVMavlink
- (void)parseData:(NSData *)data {
mavlink_message_t msg;
mavlink_status_t status;
char *bytes = (char *)[data bytes];
for (NSInteger i = 0; i < [data length]; ++i) {
if (mavlink_parse_char(MAVLINK_COMM_0, bytes[i], &msg, &status)) {
// Packet received
id<MVMessage> message = [MVMessage messageWithCMessage:msg];
[_delegate mavlink:self didGetMessage:message];
}
}
}
- (BOOL)sendMessage:(id<MVMessage>)message {
return [_delegate mavlink:self shouldWriteData:[message data]];
}
@end
''' , xml )
f . close ( ) |
def get_agile_board ( self , board_id ) :
"""Get agile board info by id
: param board _ id :
: return :""" | url = 'rest/agile/1.0/board/{}' . format ( str ( board_id ) )
return self . get ( url ) |
def VerifyServerPEM ( self , http_object ) :
"""Check the server PEM for validity .
This is used to determine connectivity to the server . Sometimes captive
portals return a valid HTTP status , but the data is corrupted .
Args :
http _ object : The response received from the server .
Returns :
True if the response contains a valid server certificate .""" | try :
server_pem = http_object . data
server_url = http_object . url
if b"BEGIN CERTIFICATE" in server_pem : # Now we know that this proxy is working . We still have to verify the
# certificate . This will raise if the server cert is invalid .
server_certificate = rdf_crypto . RDFX509Cert ( server_pem )
self . communicator . LoadServerCertificate ( server_certificate = server_certificate , ca_certificate = self . ca_cert )
logging . info ( "Server PEM re-keyed." )
return True
except Exception as e : # pylint : disable = broad - except
logging . info ( "Unable to verify server certificate at %s: %s" , server_url , e )
return False |
def serialize_encryption_context ( encryption_context ) :
"""Serializes the contents of a dictionary into a byte string .
: param dict encryption _ context : Dictionary of encrytion context keys / values .
: returns : Serialized encryption context
: rtype : bytes""" | if not encryption_context :
return bytes ( )
serialized_context = bytearray ( )
dict_size = len ( encryption_context )
if dict_size > aws_encryption_sdk . internal . defaults . MAX_BYTE_ARRAY_SIZE :
raise SerializationError ( "The encryption context contains too many elements." )
serialized_context . extend ( struct . pack ( ">H" , dict_size ) )
# Encode strings first to catch bad values .
encryption_context_list = [ ]
for key , value in encryption_context . items ( ) :
try :
if isinstance ( key , bytes ) :
key = codecs . decode ( key )
if isinstance ( value , bytes ) :
value = codecs . decode ( value )
encryption_context_list . append ( ( aws_encryption_sdk . internal . str_ops . to_bytes ( key ) , aws_encryption_sdk . internal . str_ops . to_bytes ( value ) ) )
except Exception :
raise SerializationError ( "Cannot encode dictionary key or value using {}." . format ( aws_encryption_sdk . internal . defaults . ENCODING ) )
for key , value in sorted ( encryption_context_list , key = lambda x : x [ 0 ] ) :
serialized_context . extend ( struct . pack ( ">H{key_size}sH{value_size}s" . format ( key_size = len ( key ) , value_size = len ( value ) ) , len ( key ) , key , len ( value ) , value , ) )
if len ( serialized_context ) > aws_encryption_sdk . internal . defaults . MAX_BYTE_ARRAY_SIZE :
raise SerializationError ( "The serialized context is too large." )
return bytes ( serialized_context ) |
def get_area_def ( self , key , info = None ) :
"""Create AreaDefinition for specified product .
Projection information are hard coded for 0 degree geos projection
Test dataset doesn ' t provide the values in the file container .
Only fill values are inserted .""" | # TODO Get projection information from input file
a = 6378169.
h = 35785831.
b = 6356583.8
lon_0 = 0.
# area _ extent = ( - 5432229.9317116784 , - 5429229.5285458621,
# 5429229.5285458621 , 5432229.9317116784)
area_extent = ( - 5570248.4773392612 , - 5567248.074173444 , 5567248.074173444 , 5570248.4773392612 )
proj_dict = { 'a' : float ( a ) , 'b' : float ( b ) , 'lon_0' : float ( lon_0 ) , 'h' : float ( h ) , 'proj' : 'geos' , 'units' : 'm' }
area = geometry . AreaDefinition ( 'LI_area_name' , "LI area" , 'geosli' , proj_dict , self . ncols , self . nlines , area_extent )
self . area = area
logger . debug ( "Dataset area definition: \n {}" . format ( area ) )
return area |
def camelHump ( text ) :
"""Converts the inputted text to camel humps by joining all
capital letters toegether ( The Quick , Brown ,
Fox . Tail - > TheQuickBrownFoxTail )
: param : text < str > text to be changed
: return : < str >
: usage : | import projex . text
| print projex . text . camelHump ( ' The , Quick , Brown , Fox . Tail ' )""" | # make sure the first letter is upper case
output = '' . join ( [ word [ 0 ] . upper ( ) + word [ 1 : ] for word in words ( text ) ] )
if output :
output = output [ 0 ] . lower ( ) + output [ 1 : ]
return output |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.