signature
stringlengths 29
44.1k
| implementation
stringlengths 0
85.2k
|
|---|---|
def register_on_guest_keyboard ( self , callback ) :
"""Set the callback function to consume on guest keyboard events
Callback receives a IGuestKeyboardEvent object .
Example :
def callback ( event ) :
print ( event . scancodes )"""
|
return self . event_source . register_callback ( callback , library . VBoxEventType . on_guest_keyboard )
|
def get_and_clear_context ( self ) :
"""Get and clean all of our broks , actions , external commands and homerun
: return : list of all broks of the satellite link
: rtype : list"""
|
res = ( self . broks , self . actions , self . wait_homerun , self . pushed_commands )
self . broks = [ ]
self . actions = { }
self . wait_homerun = { }
self . pushed_commands = [ ]
return res
|
def _check_module_is_image_embedding ( module_spec ) :
"""Raises ValueError if ` module _ spec ` is not usable as image embedding .
Args :
module _ spec : A ` _ ModuleSpec ` to test .
Raises :
ValueError : if ` module _ spec ` default signature is not compatible with
mappingan " images " input to a Tensor ( float32 , shape = ( _ , K ) ) ."""
|
issues = [ ]
# Find issues with " default " signature inputs . The common signatures for
# image models prescribe a specific name ; we trust it if we find it
# and if we can do the necessary inference of input shapes from it .
input_info_dict = module_spec . get_input_info_dict ( )
if ( list ( input_info_dict . keys ( ) ) != [ "images" ] or input_info_dict [ "images" ] . dtype != tf . float32 ) :
issues . append ( "Module 'default' signature must require a single input, " "which must have type float32 and name 'images'." )
else :
try :
image_util . get_expected_image_size ( module_spec )
except ValueError as e :
issues . append ( "Module does not support hub.get_expected_image_size(); " "original error was:\n" + str ( e ) )
# Raised again below .
# Find issues with " default " signature outputs . We test that the dtype and
# shape is appropriate for use in input _ layer ( ) .
output_info_dict = module_spec . get_output_info_dict ( )
if "default" not in output_info_dict :
issues . append ( "Module 'default' signature must have a 'default' output." )
else :
output_type = output_info_dict [ "default" ] . dtype
output_shape = output_info_dict [ "default" ] . get_shape ( )
if not ( output_type == tf . float32 and output_shape . ndims == 2 and output_shape . dims [ 1 ] . value ) :
issues . append ( "Module 'default' signature must have a 'default' output " "of tf.Tensor(shape=(_,K), dtype=float32)." )
if issues :
raise ValueError ( "Module is not usable as image embedding: %r" % issues )
|
def get_processing_block_ids ( self ) :
"""Get list of processing block ids using the processing block id"""
|
# Initialise empty list
_processing_block_ids = [ ]
# Pattern used to search processing block ids
pattern = '*:processing_block:*'
block_ids = self . _db . get_ids ( pattern )
for block_id in block_ids :
id_split = block_id . split ( ':' ) [ - 1 ]
_processing_block_ids . append ( id_split )
return sorted ( _processing_block_ids )
|
def has_credentials ( self ) :
"""Does this session have valid credentials
: rtype : bool"""
|
return all ( [ getattr ( self , '_%s' % field , None ) is not None for field in self . CredentialMap . get ( self . provider_name ) ] )
|
def save ( self , * args , ** kwargs ) :
"""when creating a new record fill CPU and RAM info if available"""
|
adding_new = False
if not self . pk or ( not self . cpu and not self . ram ) : # if self . model . cpu is not empty
if self . model . cpu :
self . cpu = self . model . cpu
# if self . model . ram is not empty
if self . model . ram :
self . ram = self . model . ram
# mark to add a new antenna
adding_new = True
# perform save
super ( DeviceToModelRel , self ) . save ( * args , ** kwargs )
# after Device2Model object has been saved
try : # does the device model have an integrated antenna ?
antenna_model = self . model . antennamodel
except AntennaModel . DoesNotExist : # if not antenna _ model is False
antenna_model = False
# if we are adding a new device2model and the device model has an integrated antenna
if adding_new and antenna_model : # create new Antenna object
antenna = Antenna ( device = self . device , model = self . model . antennamodel )
# retrieve wireless interfaces and assign it to the antenna object if possible
wireless_interfaces = self . device . interface_set . filter ( type = 2 )
if len ( wireless_interfaces ) > 0 :
antenna . radio = wireless_interfaces [ 0 ]
# save antenna
antenna . save ( )
|
def pca_ellipse ( data , loc = None , ax = None , ** ellipse_kwargs ) :
'''Finds the 2d PCA ellipse of given data and plots it .
loc : center of the ellipse [ default : mean of the data ]'''
|
from sklearn . decomposition import PCA
# Late import ; sklearn is optional
pca = PCA ( n_components = 2 ) . fit ( data )
if loc is None :
loc = pca . mean_
if ax is None :
ax = plt . gca ( )
cov = pca . explained_variance_ * pca . components_ . T
u , s , v = np . linalg . svd ( cov )
width , height = 2 * np . sqrt ( s [ : 2 ] )
angle = np . rad2deg ( np . arctan2 ( u [ 1 , 0 ] , u [ 0 , 0 ] ) )
ell = Ellipse ( xy = loc , width = width , height = height , angle = angle , ** ellipse_kwargs )
ax . add_patch ( ell )
return ell
|
def partitioned_iterator ( self , partition_size , shuffle = True , seed = None ) :
"""Return a partitioning : class : ` audiomate . feeding . FrameIterator ` for the dataset .
Args :
partition _ size ( str ) : Size of the partitions in bytes . The units ` ` k ` ` ( kibibytes ) , ` ` m ` `
( mebibytes ) and ` ` g ` ` ( gibibytes ) are supported , i . e . a ` ` partition _ size ` `
of ` ` 1g ` ` equates : math : ` 2 ^ { 30 } ` bytes .
shuffle ( bool ) : Indicates whether the data should be returned in
random order ( ` ` True ` ` ) or not ( ` ` False ` ` ) .
seed ( int ) : Seed to be used for the random number generator .
Returns :
FrameIterator : A partition iterator over the dataset ."""
|
return iterator . FrameIterator ( self . utt_ids , self . containers , partition_size , shuffle = shuffle , seed = seed )
|
def check ( self ) :
"""Determine how long until the next scheduled time for a Task .
Returns the number of seconds until the next scheduled time or zero
if the task needs to be run immediately .
If it ' s an hourly task and it ' s never been run , run it now .
If it ' s a daily task and it ' s never been run and the hour is right , run it now ."""
|
boto . log . info ( 'checking Task[%s]-now=%s, last=%s' % ( self . name , self . now , self . last_executed ) )
if self . hourly and not self . last_executed :
return 0
if self . daily and not self . last_executed :
if int ( self . hour ) == self . now . hour :
return 0
else :
return max ( ( int ( self . hour ) - self . now . hour ) , ( self . now . hour - int ( self . hour ) ) ) * 60 * 60
delta = self . now - self . last_executed
if self . hourly :
if delta . seconds >= 60 * 60 :
return 0
else :
return 60 * 60 - delta . seconds
else :
if int ( self . hour ) == self . now . hour :
if delta . days >= 1 :
return 0
else :
return 82800
# 23 hours , just to be safe
else :
return max ( ( int ( self . hour ) - self . now . hour ) , ( self . now . hour - int ( self . hour ) ) ) * 60 * 60
|
def percentile_between ( self , min_percentile , max_percentile , mask = NotSpecified ) :
"""Construct a new Filter representing entries from the output of this
Factor that fall within the percentile range defined by min _ percentile
and max _ percentile .
Parameters
min _ percentile : float [ 0.0 , 100.0]
Return True for assets falling above this percentile in the data .
max _ percentile : float [ 0.0 , 100.0]
Return True for assets falling below this percentile in the data .
mask : zipline . pipeline . Filter , optional
A Filter representing assets to consider when percentile
calculating thresholds . If mask is supplied , percentile cutoffs
are computed each day using only assets for which ` ` mask ` ` returns
True . Assets for which ` ` mask ` ` produces False will produce False
in the output of this Factor as well .
Returns
out : zipline . pipeline . filters . PercentileFilter
A new filter that will compute the specified percentile - range mask .
See Also
zipline . pipeline . filters . filter . PercentileFilter"""
|
return PercentileFilter ( self , min_percentile = min_percentile , max_percentile = max_percentile , mask = mask , )
|
def logEndTime ( ) :
"""Write end info to log"""
|
logger . info ( '\n' + '#' * 70 )
logger . info ( 'Complete' )
logger . info ( datetime . today ( ) . strftime ( "%A, %d %B %Y %I:%M%p" ) )
logger . info ( '#' * 70 + '\n' )
|
def _valid_packet ( raw_packet ) :
"""Validate incoming packet ."""
|
if raw_packet [ 0 : 1 ] != b'\xcc' :
return False
if len ( raw_packet ) != 19 :
return False
checksum = 0
for i in range ( 1 , 17 ) :
checksum += raw_packet [ i ]
if checksum != raw_packet [ 18 ] :
return False
return True
|
def _get_label_encoder_and_max ( self , x ) :
"""Return a mapping from values and its maximum of a column to integer labels .
Args :
x ( pandas . Series ) : a categorical column to encode .
Returns :
label _ encoder ( dict ) : mapping from values of features to integers
max _ label ( int ) : maximum label"""
|
# NaN cannot be used as a key for dict . So replace it with a random integer .
label_count = x . fillna ( NAN_INT ) . value_counts ( )
n_uniq = label_count . shape [ 0 ]
label_count = label_count [ label_count >= self . min_obs ]
n_uniq_new = label_count . shape [ 0 ]
# If every label appears more than min _ obs , new label starts from 0.
# Otherwise , new label starts from 1 and 0 is used for all old labels
# that appear less than min _ obs .
offset = 0 if n_uniq == n_uniq_new else 1
label_encoder = pd . Series ( np . arange ( n_uniq_new ) + offset , index = label_count . index )
max_label = label_encoder . max ( )
label_encoder = label_encoder . to_dict ( )
return label_encoder , max_label
|
def from_pandas ( cls , index ) :
"""Create baloo Index from pandas Index .
Parameters
index : pandas . base . Index
Returns
Index"""
|
from pandas import Index as PandasIndex
check_type ( index , PandasIndex )
return Index ( index . values , index . dtype , index . name )
|
def _get_key_info ( self ) :
"""Get key info appropriate for signing ."""
|
if self . _key_info_cache is None :
self . _key_info_cache = agent_key_info_from_key_id ( self . key_id )
return self . _key_info_cache
|
def _observe_keep_screen_on ( self , change ) :
"""Sets or clears the flag to keep the screen on ."""
|
def set_screen_on ( window ) :
from . android_window import Window
window = Window ( __id__ = window )
if self . keep_screen_on :
window . addFlags ( Window . FLAG_KEEP_SCREEN_ON )
else :
window . clearFlags ( Window . FLAG_KEEP_SCREEN_ON )
self . widget . getWindow ( ) . then ( set_screen_on )
|
def open_if_exists ( filename , mode = 'rb' ) :
"""Returns a file descriptor for the filename if that file exists ,
otherwise ` None ` ."""
|
try :
return open ( filename , mode )
except IOError , e :
if e . errno not in ( errno . ENOENT , errno . EISDIR ) :
raise
|
def get_rva_from_offset ( self , offset ) :
"""Get the RVA corresponding to this file offset ."""
|
s = self . get_section_by_offset ( offset )
if not s :
if self . sections :
lowest_rva = min ( [ adjust_SectionAlignment ( s . VirtualAddress , self . OPTIONAL_HEADER . SectionAlignment , self . OPTIONAL_HEADER . FileAlignment ) for s in self . sections ] )
if offset < lowest_rva : # We will assume that the offset lies within the headers , or
# at least points before where the earliest section starts
# and we will simply return the offset as the RVA
# The case illustrating this behavior can be found at :
# http : / / corkami . blogspot . com / 2010/01 / hey - hey - hey - whats - in - your - head . html
# where the import table is not contained by any section
# hence the RVA needs to be resolved to a raw offset
return offset
else :
return offset
# raise PEFormatError ( " specified offset ( 0x % x ) doesn ' t belong to any section . " % offset )
return s . get_rva_from_offset ( offset )
|
def mode_yubikey_otp ( self , private_uid , aes_key ) :
"""Set the YubiKey up for standard OTP validation ."""
|
if not self . capabilities . have_yubico_OTP ( ) :
raise yubikey_base . YubiKeyVersionError ( 'Yubico OTP not available in %s version %d.%d' % ( self . capabilities . model , self . ykver [ 0 ] , self . ykver [ 1 ] ) )
if private_uid . startswith ( b'h:' ) :
private_uid = binascii . unhexlify ( private_uid [ 2 : ] )
if len ( private_uid ) != yubikey_defs . UID_SIZE :
raise yubico_exception . InputError ( 'Private UID must be %i bytes' % ( yubikey_defs . UID_SIZE ) )
self . _change_mode ( 'YUBIKEY_OTP' , major = 0 , minor = 9 )
self . uid = private_uid
self . aes_key ( aes_key )
|
def calculate_normals ( vertices ) :
"""Return Nx3 normal array from Nx3 vertex array ."""
|
verts = np . array ( vertices , dtype = float )
normals = np . zeros_like ( verts )
for start , end in pairwise ( np . arange ( 0 , verts . shape [ 0 ] + 1 , 3 ) ) :
vecs = np . vstack ( ( verts [ start + 1 ] - verts [ start ] , verts [ start + 2 ] - verts [ start ] ) )
# Get triangle of vertices and calculate 2-1 and 3-1
vecs /= np . linalg . norm ( vecs , axis = 1 , keepdims = True )
# normalize vectors
normal = np . cross ( * vecs )
# normal is the cross products of vectors .
normals [ start : end , : ] = normal / np . linalg . norm ( normal )
return normals
|
def generate_make_string ( out_f , max_step ) :
"""Generate the make _ string template"""
|
steps = [ 2 ** n for n in xrange ( int ( math . log ( max_step , 2 ) ) , - 1 , - 1 ) ]
with Namespace ( out_f , [ 'boost' , 'metaparse' , 'v{0}' . format ( VERSION ) , 'impl' ] ) as nsp :
generate_take ( out_f , steps , nsp . prefix ( ) )
out_f . write ( '{0}template <int LenNow, int LenRemaining, char... Cs>\n' '{0}struct make_string;\n' '\n' '{0}template <char... Cs>' ' struct make_string<0, 0, Cs...> : string<> {{}};\n' . format ( nsp . prefix ( ) ) )
disable_sun = False
for i in reversed ( steps ) :
if i > 64 and not disable_sun :
out_f . write ( '#ifndef __SUNPRO_CC\n' )
disable_sun = True
out_f . write ( '{0}template <int LenRemaining,{1}char... Cs>' ' struct make_string<{2},LenRemaining,{3}Cs...> :' ' concat<string<{4}>,' ' typename make_string<take(LenRemaining),' 'LenRemaining-take(LenRemaining),Cs...>::type> {{}};\n' . format ( nsp . prefix ( ) , '' . join ( 'char {0},' . format ( n ) for n in unique_names ( i ) ) , i , '' . join ( '{0},' . format ( n ) for n in unique_names ( i ) ) , ',' . join ( unique_names ( i ) ) ) )
if disable_sun :
out_f . write ( '#endif\n' )
|
def make_ndarray ( tensor ) :
"""Create a numpy ndarray from a tensor .
Create a numpy ndarray with the same shape and data as the tensor .
Args :
tensor : A TensorProto .
Returns :
A numpy array with the tensor contents .
Raises :
TypeError : if tensor has unsupported type ."""
|
shape = [ d . size for d in tensor . tensor_shape . dim ]
num_elements = np . prod ( shape , dtype = np . int64 )
tensor_dtype = dtypes . as_dtype ( tensor . dtype )
dtype = tensor_dtype . as_numpy_dtype
if tensor . tensor_content :
return np . frombuffer ( tensor . tensor_content , dtype = dtype ) . copy ( ) . reshape ( shape )
elif tensor_dtype == dtypes . float16 or tensor_dtype == dtypes . bfloat16 : # the half _ val field of the TensorProto stores the binary representation
# of the fp16 : we need to reinterpret this as a proper float16
if len ( tensor . half_val ) == 1 :
tmp = np . array ( tensor . half_val [ 0 ] , dtype = np . uint16 )
tmp . dtype = tensor_dtype . as_numpy_dtype
return np . repeat ( tmp , num_elements ) . reshape ( shape )
else :
tmp = np . fromiter ( tensor . half_val , dtype = np . uint16 )
tmp . dtype = tensor_dtype . as_numpy_dtype
return tmp . reshape ( shape )
elif tensor_dtype == dtypes . float32 :
if len ( tensor . float_val ) == 1 :
return np . repeat ( np . array ( tensor . float_val [ 0 ] , dtype = dtype ) , num_elements ) . reshape ( shape )
else :
return np . fromiter ( tensor . float_val , dtype = dtype ) . reshape ( shape )
elif tensor_dtype == dtypes . float64 :
if len ( tensor . double_val ) == 1 :
return np . repeat ( np . array ( tensor . double_val [ 0 ] , dtype = dtype ) , num_elements ) . reshape ( shape )
else :
return np . fromiter ( tensor . double_val , dtype = dtype ) . reshape ( shape )
elif tensor_dtype in [ dtypes . int32 , dtypes . uint8 , dtypes . uint16 , dtypes . int16 , dtypes . int8 , dtypes . qint32 , dtypes . quint8 , dtypes . qint8 , dtypes . qint16 , dtypes . quint16 , ] :
if len ( tensor . int_val ) == 1 :
return np . repeat ( np . array ( tensor . int_val [ 0 ] , dtype = dtype ) , num_elements ) . reshape ( shape )
else :
return np . fromiter ( tensor . int_val , dtype = dtype ) . reshape ( shape )
elif tensor_dtype == dtypes . int64 :
if len ( tensor . int64_val ) == 1 :
return np . repeat ( np . array ( tensor . int64_val [ 0 ] , dtype = dtype ) , num_elements ) . reshape ( shape )
else :
return np . fromiter ( tensor . int64_val , dtype = dtype ) . reshape ( shape )
elif tensor_dtype == dtypes . string :
if len ( tensor . string_val ) == 1 :
return np . repeat ( np . array ( tensor . string_val [ 0 ] , dtype = dtype ) , num_elements ) . reshape ( shape )
else :
return np . array ( [ x for x in tensor . string_val ] , dtype = dtype ) . reshape ( shape )
elif tensor_dtype == dtypes . complex64 :
it = iter ( tensor . scomplex_val )
if len ( tensor . scomplex_val ) == 2 :
return np . repeat ( np . array ( complex ( tensor . scomplex_val [ 0 ] , tensor . scomplex_val [ 1 ] ) , dtype = dtype ) , num_elements , ) . reshape ( shape )
else :
return np . array ( [ complex ( x [ 0 ] , x [ 1 ] ) for x in zip ( it , it ) ] , dtype = dtype ) . reshape ( shape )
elif tensor_dtype == dtypes . complex128 :
it = iter ( tensor . dcomplex_val )
if len ( tensor . dcomplex_val ) == 2 :
return np . repeat ( np . array ( complex ( tensor . dcomplex_val [ 0 ] , tensor . dcomplex_val [ 1 ] ) , dtype = dtype ) , num_elements , ) . reshape ( shape )
else :
return np . array ( [ complex ( x [ 0 ] , x [ 1 ] ) for x in zip ( it , it ) ] , dtype = dtype ) . reshape ( shape )
elif tensor_dtype == dtypes . bool :
if len ( tensor . bool_val ) == 1 :
return np . repeat ( np . array ( tensor . bool_val [ 0 ] , dtype = dtype ) , num_elements ) . reshape ( shape )
else :
return np . fromiter ( tensor . bool_val , dtype = dtype ) . reshape ( shape )
else :
raise TypeError ( "Unsupported tensor type: %s" % tensor . dtype )
|
def poll ( target , step , args = ( ) , kwargs = None , timeout = None , max_tries = None , check_success = is_truthy , step_function = step_constant , ignore_exceptions = ( ) , poll_forever = False , collect_values = None , * a , ** k ) :
"""Poll by calling a target function until a certain condition is met . You must specify at least a target
function to be called and the step - - base wait time between each function call .
: param step : Step defines the amount of time to wait ( in seconds )
: param args : Arguments to be passed to the target function
: type kwargs : dict
: param kwargs : Keyword arguments to be passed to the target function
: param timeout : The target function will be called until the time elapsed is greater than the maximum timeout
( in seconds ) . NOTE that the actual execution time of the function * can * exceed the time specified in the timeout .
For instance , if the target function takes 10 seconds to execute and the timeout is 21 seconds , the polling
function will take a total of 30 seconds ( two iterations of the target - - 20s which is less than the timeout - - 21s ,
and a final iteration )
: param max _ tries : Maximum number of times the target function will be called before failing
: param check _ success : A callback function that accepts the return value of the target function . It should
return true if you want the polling function to stop and return this value . It should return false if you want it
to continue executing . The default is a callback that tests for truthiness ( anything not False , 0 , or empty
collection ) .
: param step _ function : A callback function that accepts each iteration ' s " step . " By default , this is constant ,
but you can also pass a function that will increase or decrease the step . As an example , you can increase the wait
time between calling the target function by 10 seconds every iteration until the step is 100 seconds - - at which
point it should remain constant at 100 seconds
> > > def my _ step _ function ( step ) :
> > > step + = 10
> > > return max ( step , 100)
: type ignore _ exceptions : tuple
: param ignore _ exceptions : You can specify a tuple of exceptions that should be caught and ignored on every
iteration . If the target function raises one of these exceptions , it will be caught and the exception
instance will be pushed to the queue of values collected during polling . Any other exceptions raised will be
raised as normal .
: param poll _ forever : If set to true , this function will retry until an exception is raised or the target ' s
return value satisfies the check _ success function . If this is not set , then a timeout or a max _ tries must be set .
: type collect _ values : Queue
: param collect _ values : By default , polling will create a new Queue to store all of the target ' s return values .
Optionally , you can specify your own queue to collect these values for access to it outside of function scope .
: return : Polling will return first value from the target function that meets the condions of the check _ success
callback . By default , this will be the first value that is not None , 0 , False , ' ' , or an empty collection ."""
|
assert ( timeout is not None or max_tries is not None ) or poll_forever , ( 'You did not specify a maximum number of tries or a timeout. Without either of these set, the polling ' 'function will poll forever. If this is the behavior you want, pass "poll_forever=True"' )
assert not ( ( timeout is not None or max_tries is not None ) and poll_forever ) , 'You cannot specify both the option to poll_forever and max_tries/timeout.'
kwargs = kwargs or dict ( )
values = collect_values or Queue ( )
max_time = time . time ( ) + timeout if timeout else None
tries = 0
last_item = None
while True :
if max_tries is not None and tries >= max_tries :
raise MaxCallException ( values , last_item )
try :
val = target ( * args , ** kwargs )
last_item = val
except ignore_exceptions as e :
last_item = e
else : # Condition passes , this is the only " successful " exit from the polling function
if check_success ( val ) :
return val
values . put ( last_item )
tries += 1
# Check the time after to make sure the poll function is called at least once
if max_time is not None and time . time ( ) >= max_time :
raise TimeoutException ( values , last_item )
time . sleep ( step )
step = step_function ( step )
|
def visit_and_update ( self , visitor_fn ) :
"""Create an updated version ( if needed ) of BetweenClause via the visitor pattern ."""
|
new_lower_bound = self . lower_bound . visit_and_update ( visitor_fn )
new_upper_bound = self . upper_bound . visit_and_update ( visitor_fn )
if new_lower_bound is not self . lower_bound or new_upper_bound is not self . upper_bound :
return visitor_fn ( BetweenClause ( self . field , new_lower_bound , new_upper_bound ) )
else :
return visitor_fn ( self )
|
def get_channel ( self , name ) :
"""Get a channel by name . To get the names , use get _ channels .
: param string name : Name of channel to get
: returns dict conn : A channel attribute dictionary ."""
|
name = quote ( name , '' )
path = Client . urls [ 'channels_by_name' ] % name
chan = self . _call ( path , 'GET' )
return chan
|
def relative_to ( self , * other ) :
"""Return the relative path to another path identified by the passed
arguments . If the operation is not possible ( because this is not
a subpath of the other path ) , raise ValueError ."""
|
# For the purpose of this method , drive and root are considered
# separate parts , i . e . :
# Path ( ' c : / ' ) . relative _ to ( ' c : ' ) gives Path ( ' / ' )
# Path ( ' c : / ' ) . relative _ to ( ' / ' ) raise ValueError
if not other :
raise TypeError ( "need at least one argument" )
parts = self . _parts
drv = self . _drv
root = self . _root
if root :
abs_parts = [ drv , root ] + parts [ 1 : ]
else :
abs_parts = parts
to_drv , to_root , to_parts = self . _parse_args ( other )
if to_root :
to_abs_parts = [ to_drv , to_root ] + to_parts [ 1 : ]
else :
to_abs_parts = to_parts
n = len ( to_abs_parts )
cf = self . _flavour . casefold_parts
if ( root or drv ) if n == 0 else cf ( abs_parts [ : n ] ) != cf ( to_abs_parts ) :
formatted = self . _format_parsed_parts ( to_drv , to_root , to_parts )
raise ValueError ( "{!r} does not start with {!r}" . format ( str ( self ) , str ( formatted ) ) )
return self . _from_parsed_parts ( '' , root if n == 1 else '' , abs_parts [ n : ] )
|
def create_cloud_user ( cfg , args ) :
"""Attempt to create the user on the cloud node ."""
|
url = cfg [ 'api_server' ] + "admin/add-user"
params = { 'user_email' : args . user_email , 'user_name' : args . user_name , 'user_role' : args . user_role , 'email' : cfg [ 'email' ] , 'api_key' : cfg [ 'api_key' ] }
headers = { 'Content-Type' : 'application/json' }
response = requests . post ( url , data = json . dumps ( params ) , headers = headers )
if response . status_code not in range ( 200 , 299 ) :
raise Exception ( "Errors contacting the cloud node: %s" % ( response . content ) )
loaded = json . loads ( response . content )
return loaded
|
def fetch ( self , transfer_id , data = { } , ** kwargs ) :
"""Fetch Transfer for given Id
Args :
transfer _ id : Id for which transfer object has to be retrieved
Returns :
Transfer dict for given transfer Id"""
|
return super ( Transfer , self ) . fetch ( transfer_id , data , ** kwargs )
|
def set_keywords ( self , keywords ) :
"""Pass an array to filter the result by keywords .
: param keywords"""
|
self . _query_params += str ( QueryParam . KEYWORDS ) + '+' . join ( keywords )
|
def get_osds ( service , device_class = None ) :
"""Return a list of all Ceph Object Storage Daemons currently in the
cluster ( optionally filtered by storage device class ) .
: param device _ class : Class of storage device for OSD ' s
: type device _ class : str"""
|
luminous_or_later = cmp_pkgrevno ( 'ceph-common' , '12.0.0' ) >= 0
if luminous_or_later and device_class :
out = check_output ( [ 'ceph' , '--id' , service , 'osd' , 'crush' , 'class' , 'ls-osd' , device_class , '--format=json' ] )
else :
out = check_output ( [ 'ceph' , '--id' , service , 'osd' , 'ls' , '--format=json' ] )
if six . PY3 :
out = out . decode ( 'UTF-8' )
return json . loads ( out )
|
def vertices ( self ) :
'''A dictionary of four points where the axes intersect the ellipse , dict .'''
|
return { 'a' : self . a , 'a_neg' : self . a_neg , 'b' : self . b , 'b_neg' : self . b_neg }
|
def rot_rads ( self , rads ) :
"""Rotate vector by angle in radians ."""
|
new_x = self . x * math . cos ( rads ) - self . y * math . sin ( rads )
self . y = self . x * math . sin ( rads ) + self . y * math . cos ( rads )
self . x = new_x
|
def set_position_target_local_ned_send ( self , time_boot_ms , target_system , target_component , coordinate_frame , type_mask , x , y , z , vx , vy , vz , afx , afy , afz , yaw , yaw_rate , force_mavlink1 = False ) :
'''Sets a desired vehicle position in a local north - east - down coordinate
frame . Used by an external controller to command the
vehicle ( manual controller or other system ) .
time _ boot _ ms : Timestamp in milliseconds since system boot ( uint32 _ t )
target _ system : System ID ( uint8 _ t )
target _ component : Component ID ( uint8 _ t )
coordinate _ frame : Valid options are : MAV _ FRAME _ LOCAL _ NED = 1 , MAV _ FRAME _ LOCAL _ OFFSET _ NED = 7 , MAV _ FRAME _ BODY _ NED = 8 , MAV _ FRAME _ BODY _ OFFSET _ NED = 9 ( uint8 _ t )
type _ mask : Bitmask to indicate which dimensions should be ignored by the vehicle : a value of 0b00000 or 0b00000100000 indicates that none of the setpoint dimensions should be ignored . If bit 10 is set the floats afx afy afz should be interpreted as force instead of acceleration . Mapping : bit 1 : x , bit 2 : y , bit 3 : z , bit 4 : vx , bit 5 : vy , bit 6 : vz , bit 7 : ax , bit 8 : ay , bit 9 : az , bit 10 : is force setpoint , bit 11 : yaw , bit 12 : yaw rate ( uint16 _ t )
x : X Position in NED frame in meters ( float )
y : Y Position in NED frame in meters ( float )
z : Z Position in NED frame in meters ( note , altitude is negative in NED ) ( float )
vx : X velocity in NED frame in meter / s ( float )
vy : Y velocity in NED frame in meter / s ( float )
vz : Z velocity in NED frame in meter / s ( float )
afx : X acceleration or force ( if bit 10 of type _ mask is set ) in NED frame in meter / s ^ 2 or N ( float )
afy : Y acceleration or force ( if bit 10 of type _ mask is set ) in NED frame in meter / s ^ 2 or N ( float )
afz : Z acceleration or force ( if bit 10 of type _ mask is set ) in NED frame in meter / s ^ 2 or N ( float )
yaw : yaw setpoint in rad ( float )
yaw _ rate : yaw rate setpoint in rad / s ( float )'''
|
return self . send ( self . set_position_target_local_ned_encode ( time_boot_ms , target_system , target_component , coordinate_frame , type_mask , x , y , z , vx , vy , vz , afx , afy , afz , yaw , yaw_rate ) , force_mavlink1 = force_mavlink1 )
|
def projection ( self , exprs ) :
"""Like mutate , but do not include existing table columns"""
|
w = self . _get_window ( )
windowed_exprs = [ ]
exprs = self . table . _resolve ( exprs )
for expr in exprs :
expr = L . windowize_function ( expr , w = w )
windowed_exprs . append ( expr )
return self . table . projection ( windowed_exprs )
|
def replace_negative_size_with_batch_size ( shape , batch_size ) :
"""Replace all dimensions with negative values to batch size"""
|
sl = [ ]
for d in shape . dim :
if d < 0 : # Negative size means batch size
sl . append ( batch_size )
else :
sl . append ( d )
out_shape = nnabla_pb2 . Shape ( )
out_shape . dim . extend ( sl )
return out_shape
|
def deprecated_opts_signature ( args , kwargs ) :
"""Utility to help with the deprecation of the old . opts method signature
Returns whether opts . apply _ groups should be used ( as a bool ) and the
corresponding options ."""
|
from . options import Options
groups = set ( Options . _option_groups )
opts = { kw for kw in kwargs if kw != 'clone' }
apply_groups = False
options = None
new_kwargs = { }
if len ( args ) > 0 and isinstance ( args [ 0 ] , dict ) :
apply_groups = True
if ( not set ( args [ 0 ] ) . issubset ( groups ) and all ( isinstance ( v , dict ) and not set ( v ) . issubset ( groups ) for v in args [ 0 ] . values ( ) ) ) :
apply_groups = False
elif set ( args [ 0 ] . keys ( ) ) <= groups :
new_kwargs = args [ 0 ]
else :
options = args [ 0 ]
elif opts and opts . issubset ( set ( groups ) ) :
apply_groups = True
elif kwargs . get ( 'options' , None ) is not None :
apply_groups = True
elif not args and not kwargs :
apply_groups = True
return apply_groups , options , new_kwargs
|
def reduce_hierarchy ( x , depth ) :
"""Reduce the hierarchy ( depth by ` | ` ) string to the specified level"""
|
_x = x . split ( '|' )
depth = len ( _x ) + depth - 1 if depth < 0 else depth
return '|' . join ( _x [ 0 : ( depth + 1 ) ] )
|
def unique ( seq ) :
"""Return the unique values in a sequence .
Parameters
seq : sequence
Sequence with ( possibly duplicate ) elements .
Returns
unique : list
Unique elements of ` ` seq ` ` .
Order is guaranteed to be the same as in seq .
Examples
Determine unique elements in list
> > > unique ( [ 1 , 2 , 3 , 3 ] )
[1 , 2 , 3]
> > > unique ( ( 1 , ' str ' , ' str ' ) )
[1 , ' str ' ]
The utility also works with unhashable types :
> > > unique ( ( 1 , [ 1 ] , [ 1 ] ) )
[1 , [ 1 ] ]"""
|
# First check if all elements are hashable , if so O ( n ) can be done
try :
return list ( OrderedDict . fromkeys ( seq ) )
except TypeError : # Unhashable , resort to O ( n ^ 2)
unique_values = [ ]
for i in seq :
if i not in unique_values :
unique_values . append ( i )
return unique_values
|
def get_authorizations_ids_by_vault ( self , vault_ids ) :
"""Gets the list of ` ` Authorization Ids ` ` corresponding to a list of ` ` Vault ` ` objects .
arg : vault _ ids ( osid . id . IdList ) : list of vault ` ` Ids ` `
return : ( osid . id . IdList ) - list of authorization ` ` Ids ` `
raise : NullArgument - ` ` vault _ ids ` ` is ` ` null ` `
raise : OperationFailed - unable to complete request
raise : PermissionDenied - authorization failure
* compliance : mandatory - - This method must be implemented . *"""
|
# Implemented from template for
# osid . resource . ResourceBinSession . get _ resource _ ids _ by _ bin
id_list = [ ]
for authorization in self . get_authorizations_by_vault ( vault_ids ) :
id_list . append ( authorization . get_id ( ) )
return IdList ( id_list )
|
def generate_all_kmers ( k ) :
"""Generate all possible k - mers
Example :
> > > generate _ all _ kmers ( 2)
[ ' AA ' , ' AC ' , ' AG ' , ' AT ' , ' CA ' , ' CC ' , ' CG ' , ' CT ' , ' GA ' , ' GC ' , ' GG ' , ' GT ' , ' TA ' , ' TC ' , ' TG ' , ' TT ' ]"""
|
bases = [ 'A' , 'C' , 'G' , 'T' ]
return [ '' . join ( p ) for p in itertools . product ( bases , repeat = k ) ]
|
def do_prune ( self ) :
"""Return True if prune _ table , prune _ column , and prune _ date are implemented .
If only a subset of prune variables are override , an exception is raised to remind the user to implement all or none .
Prune ( data newer than prune _ date deleted ) before copying new data in ."""
|
if self . prune_table and self . prune_column and self . prune_date :
return True
elif self . prune_table or self . prune_column or self . prune_date :
raise Exception ( 'override zero or all prune variables' )
else :
return False
|
def row ( values , width = WIDTH , format_spec = FMT , align = ALIGN , style = STYLE ) :
"""Returns a formatted row of data
Parameters
values : array _ like
An iterable array of data ( numbers or strings ) , each value is printed in a separate column
width : int
The width of each column ( Default : 11)
format _ spec : string
The precision format string used to format numbers in the values array ( Default : ' 5g ' )
align : string
The alignment to use ( ' left ' , ' center ' , or ' right ' ) . ( Default : ' right ' )
style : namedtuple , optional
A line formatting style
Returns
rowstr : string
A string consisting of the full row of data to print"""
|
tablestyle = STYLES [ style ]
widths = parse_width ( width , len ( values ) )
assert isinstance ( format_spec , string_types ) | isinstance ( format_spec , list ) , "format_spec must be a string or list of strings"
if isinstance ( format_spec , string_types ) :
format_spec = [ format_spec ] * len ( list ( values ) )
# mapping function for string formatting
def mapdata ( val ) : # unpack
width , datum , prec = val
if isinstance ( datum , string_types ) :
return ( '{:%s%i}' % ( ALIGNMENTS [ align ] , width + ansi_len ( datum ) ) ) . format ( datum )
elif isinstance ( datum , Number ) :
return ( '{:%s%i.%s}' % ( ALIGNMENTS [ align ] , width , prec ) ) . format ( datum )
else :
raise ValueError ( 'Elements in the values array must be strings, ints, or floats' )
# string formatter
data = map ( mapdata , zip ( widths , values , format_spec ) )
# build the row string
return format_line ( data , tablestyle . row )
|
def do_cp ( self , line ) :
"""cp SOURCE DEST Copy a single SOURCE file to DEST file .
cp SOURCE . . . DIRECTORY Copy multiple SOURCE files to a directory .
cp [ - r | - - recursive ] [ SOURCE | SOURCE _ DIR ] . . . DIRECTORY
cp [ - r ] PATTERN DIRECTORY Copy matching files to DIRECTORY .
The destination must be a directory except in the case of
copying a single file . To copy directories - r must be specified .
This will cause directories and their contents to be recursively
copied ."""
|
args = self . line_to_args ( line )
if len ( args . filenames ) < 2 :
print_err ( 'Missing destination file' )
return
dst_dirname = resolve_path ( args . filenames [ - 1 ] )
dst_mode = auto ( get_mode , dst_dirname )
d_dst = { }
# Destination directory : lookup stat by basename
if args . recursive :
dst_files = auto ( listdir_stat , dst_dirname )
if dst_files is None :
err = "cp: target {} is not a directory"
print_err ( err . format ( dst_dirname ) )
return
for name , stat in dst_files :
d_dst [ name ] = stat
src_filenames = args . filenames [ : - 1 ]
# Process PATTERN
sfn = src_filenames [ 0 ]
if is_pattern ( sfn ) :
if len ( src_filenames ) > 1 :
print_err ( "Usage: cp [-r] PATTERN DIRECTORY" )
return
src_filenames = process_pattern ( sfn )
if src_filenames is None :
return
for src_filename in src_filenames :
if is_pattern ( src_filename ) :
print_err ( "Only one pattern permitted." )
return
src_filename = resolve_path ( src_filename )
src_mode = auto ( get_mode , src_filename )
if not mode_exists ( src_mode ) :
print_err ( "File '{}' doesn't exist" . format ( src_filename ) )
return
if mode_isdir ( src_mode ) :
if args . recursive : # Copying a directory
src_basename = os . path . basename ( src_filename )
dst_filename = dst_dirname + '/' + src_basename
if src_basename in d_dst :
dst_stat = d_dst [ src_basename ]
dst_mode = stat_mode ( dst_stat )
if not mode_isdir ( dst_mode ) :
err = "Destination {} is not a directory"
print_err ( err . format ( dst_filename ) )
return
else :
if not mkdir ( dst_filename ) :
err = "Unable to create directory {}"
print_err ( err . format ( dst_filename ) )
return
rsync ( src_filename , dst_filename , mirror = False , dry_run = False , print_func = lambda * args : None , recursed = False , sync_hidden = args . all )
else :
print_err ( "Omitting directory {}" . format ( src_filename ) )
continue
if mode_isdir ( dst_mode ) :
dst_filename = dst_dirname + '/' + os . path . basename ( src_filename )
else :
dst_filename = dst_dirname
if not cp ( src_filename , dst_filename ) :
err = "Unable to copy '{}' to '{}'"
print_err ( err . format ( src_filename , dst_filename ) )
break
|
def get ( self , deviceId , measurementId ) :
"""details the specific measurement ."""
|
record = self . measurements . get ( deviceId )
if record is not None :
return record . get ( measurementId )
return None
|
def face_normals ( self ) :
"""Return the unit normal vector for each face .
If a face is degenerate and a normal can ' t be generated
a zero magnitude unit vector will be returned for that face .
Returns
normals : ( len ( self . faces ) , 3 ) np . float64
Normal vectors of each face"""
|
# check shape of cached normals
cached = self . _cache [ 'face_normals' ]
if np . shape ( cached ) == np . shape ( self . _data [ 'faces' ] ) :
return cached
log . debug ( 'generating face normals' )
# use cached triangle cross products to generate normals
# this will always return the correct shape but some values
# will be zero or an arbitrary vector if the inputs had
# a cross product below machine epsilon
normals , valid = triangles . normals ( triangles = self . triangles , crosses = self . triangles_cross )
# if all triangles are valid shape is correct
if valid . all ( ) : # put calculated face normals into cache manually
self . _cache [ 'face_normals' ] = normals
return normals
# make a padded list of normals for correct shape
padded = np . zeros ( ( len ( self . triangles ) , 3 ) , dtype = np . float64 )
padded [ valid ] = normals
# put calculated face normals into cache manually
self . _cache [ 'face_normals' ] = padded
return padded
|
def heartbeat ( request ) :
"""Runs all the Django checks and returns a JsonResponse with either
a status code of 200 or 500 depending on the results of the checks .
Any check that returns a warning or worse ( error , critical ) will
return a 500 response ."""
|
all_checks = checks . registry . registry . get_checks ( include_deployment_checks = not settings . DEBUG , )
details = { }
statuses = { }
level = 0
for check in all_checks :
detail = heartbeat_check_detail ( check )
statuses [ check . __name__ ] = detail [ 'status' ]
level = max ( level , detail [ 'level' ] )
if detail [ 'level' ] > 0 :
details [ check . __name__ ] = detail
if level < checks . messages . WARNING :
status_code = 200
heartbeat_passed . send ( sender = heartbeat , level = level )
else :
status_code = 500
heartbeat_failed . send ( sender = heartbeat , level = level )
payload = { 'status' : level_to_text ( level ) , 'checks' : statuses , 'details' : details , }
return JsonResponse ( payload , status = status_code )
|
def get_overlays ( self , ** kw ) :
"""See Overlay . match ( ) for arguments ."""
|
return [ o for o in self . overlays if o . match ( ** kw ) ]
|
def check_move ( new , old , t ) :
"""Determines if a model will be accepted ."""
|
if ( t <= 0 ) or numpy . isclose ( t , 0.0 ) :
return False
K_BOLTZ = 1.9872041E-003
# kcal / mol . K
if new < old :
return True
else :
move_prob = math . exp ( - ( new - old ) / ( K_BOLTZ * t ) )
if move_prob > random . uniform ( 0 , 1 ) :
return True
return False
|
def write_element ( elem_to_parse , file_or_path , encoding = DEFAULT_ENCODING ) :
"""Writes the contents of the parsed element to file _ or _ path
: see : get _ element ( parent _ to _ parse , element _ path )"""
|
xml_header = '<?xml version="1.0" encoding="{0}"?>' . format ( encoding )
get_element_tree ( elem_to_parse ) . write ( file_or_path , encoding , xml_header )
|
def open ( self ) :
"""Open the URL with urllib . request ."""
|
url = self . url
try :
request = urllib . request . Request ( url )
handle = urllib . request . build_opener ( )
except IOError :
return None
return ( request , handle )
|
def to_text ( self ) :
"""Render a Table MessageElement as plain text .
: returns : The text representation of the Table MessageElement
: rtype : basestring"""
|
table = ''
if self . caption is not None :
table += '%s</caption>\n' % self . caption
table += '\n'
for row in self . rows :
table += row . to_text ( )
return table
|
def set_computable_distance ( self , value ) :
'''setter'''
|
if isinstance ( value , ComputableDistance ) is False :
raise TypeError ( )
self . __computable_distance = value
|
def CheckRegistryKey ( javaKey ) :
"""Method checks for the java in the registry entries ."""
|
from _winreg import ConnectRegistry , HKEY_LOCAL_MACHINE , OpenKey , QueryValueEx
path = None
try :
aReg = ConnectRegistry ( None , HKEY_LOCAL_MACHINE )
rk = OpenKey ( aReg , javaKey )
for i in range ( 1024 ) :
currentVersion = QueryValueEx ( rk , "CurrentVersion" )
if currentVersion != None :
key = OpenKey ( rk , currentVersion [ 0 ] )
if key != None :
path = QueryValueEx ( key , "JavaHome" )
return path [ 0 ]
except Exception , err : # TODO : Add Warning / Error messages in Logger .
WriteUcsWarning ( "Not able to access registry." )
return None
|
def packet_meta_data ( self ) :
"""Pull out the metadata about each packet from the input _ stream
Args :
None
Returns :
generator ( dictionary ) : a generator that contains packet meta data in the form of a dictionary"""
|
# For each packet in the pcap process the contents
for item in self . input_stream : # Output object
output = { }
# Grab the fields I need
timestamp = item [ 'timestamp' ]
buf = item [ 'raw_buf' ]
# Print out the timestamp in UTC
output [ 'timestamp' ] = datetime . datetime . utcfromtimestamp ( timestamp )
# Unpack the Ethernet frame ( mac src / dst , ethertype )
eth = dpkt . ethernet . Ethernet ( buf )
output [ 'eth' ] = { 'src' : eth . src , 'dst' : eth . dst , 'type' : eth . type , 'len' : len ( eth ) }
# Grab packet data
packet = eth . data
# Packet Type ( ' EtherType ' ) ( IP , ARP , PPPoE , IP6 . . . see http : / / en . wikipedia . org / wiki / EtherType )
if hasattr ( packet , 'data' ) :
output [ 'packet' ] = { 'type' : packet . __class__ . __name__ , 'data' : packet . data }
else :
output [ 'packet' ] = { 'type' : None , 'data' : None }
# It this an IP packet ?
if output [ 'packet' ] [ 'type' ] == 'IP' : # Pull out fragment information ( flags and offset all packed into off field , so use bitmasks )
df = bool ( packet . off & dpkt . ip . IP_DF )
mf = bool ( packet . off & dpkt . ip . IP_MF )
offset = packet . off & dpkt . ip . IP_OFFMASK
# Pulling out src , dst , length , fragment info , TTL , checksum and Protocol
output [ 'packet' ] . update ( { 'src' : packet . src , 'dst' : packet . dst , 'p' : packet . p , 'len' : packet . len , 'ttl' : packet . ttl , 'df' : df , 'mf' : mf , 'offset' : offset , 'checksum' : packet . sum } )
# Is this an IPv6 packet ?
elif output [ 'packet' ] [ 'type' ] == 'IP6' : # Pulling out the IP6 fields
output [ 'packet' ] . update ( { 'src' : packet . src , 'dst' : packet . dst , 'p' : packet . p , 'len' : packet . plen , 'ttl' : packet . hlim } )
# If the packet isn ' t IP or IPV6 just pack it as a dictionary
else :
output [ 'packet' ] . update ( data_utils . make_dict ( packet ) )
# For the transport layer we ' re going to set the transport to None . and
# hopefully a ' link ' upstream will manage the transport functionality
output [ 'transport' ] = None
# For the application layer we ' re going to set the application to None . and
# hopefully a ' link ' upstream will manage the application functionality
output [ 'application' ] = None
# All done
yield output
|
def match_entries ( entries , pattern ) :
"""A drop - in replacement for fnmatch . filter that supports pattern
variants ( ie . { foo , bar } baz = foobaz or barbaz ) ."""
|
matching = [ ]
for variant in expand_braces ( pattern ) :
matching . extend ( fnmatch . filter ( entries , variant ) )
return list ( _deduplicate ( matching ) )
|
def check_valence ( self ) :
"""check valences of all atoms
: return : list of invalid atoms"""
|
return [ x for x , atom in self . atoms ( ) if not atom . check_valence ( self . environment ( x ) ) ]
|
def last_bookmark ( bookmarks ) :
"""The bookmark returned by the last : class : ` . Transaction ` ."""
|
last = None
for bookmark in bookmarks :
if last is None :
last = bookmark
else :
last = _last_bookmark ( last , bookmark )
return last
|
def findConnectedDevices ( self ) :
"""Find all the devices on the serial buss and store the results in a
class member object ."""
|
tmpTimeout = self . _serial . timeout
self . _serial . timeout = 0.01
for dev in range ( 128 ) :
device = self . _getDeviceID ( dev )
if device is not None and int ( device ) not in self . _deviceConfig :
config = self . _deviceConfig . setdefault ( int ( device ) , { } )
self . _deviceCallback ( device , config )
self . _log and self . _log . info ( "Found device '%s' with configuration: %s" , device , config )
self . _serial . timeout = tmpTimeout
|
def U ( self ) :
"""Property to support lazy evaluation of residuals"""
|
if self . _U is None :
sinv = N . diag ( 1 / self . singular_values )
self . _U = dot ( self . arr , self . V . T , sinv )
return self . _U
|
def add_arg ( self , arg ) :
"""Add an argument"""
|
if not isinstance ( arg , File ) :
arg = str ( arg )
self . _args += [ arg ]
|
def _gen_explain_command ( coll , spec , projection , skip , limit , batch_size , options , read_concern ) :
"""Generate an explain command document ."""
|
cmd = _gen_find_command ( coll , spec , projection , skip , limit , batch_size , options )
if read_concern . level :
return SON ( [ ( 'explain' , cmd ) , ( 'readConcern' , read_concern . document ) ] )
return SON ( [ ( 'explain' , cmd ) ] )
|
def client ( host = 'localhost' , port = 2379 , ca_cert = None , cert_key = None , cert_cert = None , timeout = None , user = None , password = None , grpc_options = None ) :
"""Return an instance of an Etcd3Client ."""
|
return Etcd3Client ( host = host , port = port , ca_cert = ca_cert , cert_key = cert_key , cert_cert = cert_cert , timeout = timeout , user = user , password = password , grpc_options = grpc_options )
|
def nodes_simple_info ( self , params = { } , ** kwargs ) :
"""Return a dictionary of the nodes simple info that key is a column name ,
such as [ { " http _ address " : " 192.111.111.111 " , " name " : " test " , . . . } , . . . ]"""
|
h = [ 'name' , 'pid' , 'http_address' , 'version' , 'jdk' , 'disk.total' , 'disk.used_percent' , 'heap.current' , 'heap.percent' , 'ram.current' , 'ram.percent' , 'uptime' , 'node.role' ]
result = self . client . cat . nodes ( v = True , h = h , ** kwargs , params = params )
result = [ x . strip ( ) . split ( ' ' ) for x in result . split ( '\n' ) ]
# Clean up the space
result . remove ( result [ - 1 ] )
for i in range ( len ( result ) ) :
result [ i ] = list ( filter ( lambda x : x != '' , result [ i ] ) )
# Packing into the dictionary
dicts = [ ]
for i in range ( len ( result ) - 1 ) :
dict = { }
for k , v in zip ( result [ 0 ] , result [ i + 1 ] ) :
dict [ k ] = v
dicts . append ( dict )
logger . info ( 'Acquire simple information of the nodes is done succeeded: %s' % len ( dicts ) )
return dicts
|
def clear_grade_system ( self ) :
"""Clears the grading system .
raise : NoAccess - ` ` Metadata . isRequired ( ) ` ` or
` ` Metadata . isReadOnly ( ) ` ` is ` ` true ` `
* compliance : mandatory - - This method must be implemented . *"""
|
# Implemented from template for osid . resource . ResourceForm . clear _ avatar _ template
if ( self . get_grade_system_metadata ( ) . is_read_only ( ) or self . get_grade_system_metadata ( ) . is_required ( ) ) :
raise errors . NoAccess ( )
self . _my_map [ 'gradeSystemId' ] = self . _grade_system_default
|
def close ( self ) :
"""Collects the result from the workers and closes the thread pool ."""
|
self . pool . close ( )
self . pool . terminate ( )
self . pool . join ( )
|
def find_library_linux ( cls ) :
"""Loads the SEGGER DLL from the root directory .
On Linux , the SEGGER tools are installed under the ` ` / opt / SEGGER ` `
directory with versioned directories having the suffix ` ` _ VERSION ` ` .
Args :
cls ( Library ) : the ` ` Library ` ` class
Returns :
The paths to the J - Link library files in the order that they are
found ."""
|
dll = Library . JLINK_SDK_NAME
root = os . path . join ( '/' , 'opt' , 'SEGGER' )
for ( directory_name , subdirs , files ) in os . walk ( root ) :
fnames = [ ]
x86_found = False
for f in files :
path = os . path . join ( directory_name , f )
if os . path . isfile ( path ) and f . startswith ( dll ) :
fnames . append ( f )
if '_x86' in path :
x86_found = True
for fname in fnames :
fpath = os . path . join ( directory_name , fname )
if util . is_os_64bit ( ) :
if '_x86' not in fname :
yield fpath
elif x86_found :
if '_x86' in fname :
yield fpath
else :
yield fpath
|
def report_ports ( target , ports ) :
"""portscan a target and output a LaTeX table
report _ ports ( target , ports ) - > string"""
|
ans , unans = sr ( IP ( dst = target ) / TCP ( dport = ports ) , timeout = 5 )
rep = "\\begin{tabular}{|r|l|l|}\n\\hline\n"
for s , r in ans :
if not r . haslayer ( ICMP ) :
if r . payload . flags == 0x12 :
rep += r . sprintf ( "%TCP.sport% & open & SA \\\\\n" )
rep += "\\hline\n"
for s , r in ans :
if r . haslayer ( ICMP ) :
rep += r . sprintf ( "%TCPerror.dport% & closed & ICMP type %ICMP.type%/%ICMP.code% from %IP.src% \\\\\n" )
# noqa : E501
elif r . payload . flags != 0x12 :
rep += r . sprintf ( "%TCP.sport% & closed & TCP %TCP.flags% \\\\\n" )
rep += "\\hline\n"
for i in unans :
rep += i . sprintf ( "%TCP.dport% & ? & unanswered \\\\\n" )
rep += "\\hline\n\\end{tabular}\n"
return rep
|
def set_params ( self , subs = None , numticks = None ) :
"""Set parameters within this locator .
Parameters
subs : array , optional
Subtick values , as multiples of the main ticks .
numticks : array , optional
Number of ticks ."""
|
if numticks is not None :
self . numticks = numticks
if subs is not None :
self . _subs = subs
|
def json_conversion ( obj ) :
"""Encode additional objects to JSON ."""
|
try : # numpy isn ' t an explicit dependency of bowtie
# so we can ' t assume it ' s available
import numpy as np
if isinstance ( obj , ( np . ndarray , np . generic ) ) :
return obj . tolist ( )
except ImportError :
pass
try : # pandas isn ' t an explicit dependency of bowtie
# so we can ' t assume it ' s available
import pandas as pd
if isinstance ( obj , pd . Index ) :
return obj . tolist ( )
except ImportError :
pass
if isinstance ( obj , ( datetime , time , date ) ) :
return obj . isoformat ( )
raise TypeError ( 'Not sure how to serialize {} of type {}' . format ( obj , type ( obj ) ) )
|
def safe_filepath ( file_path_name , dir_sep = None ) :
'''Input the full path and filename , splits on directory separator and calls safe _ filename _ leaf for
each part of the path . dir _ sep allows coder to force a directory separate to a particular character
. . versionadded : : 2017.7.2
: codeauthor : Damon Atkins < https : / / github . com / damon - atkins >'''
|
if not dir_sep :
dir_sep = os . sep
# Normally if file _ path _ name or dir _ sep is Unicode then the output will be Unicode
# This code ensure the output type is the same as file _ path _ name
if not isinstance ( file_path_name , six . text_type ) and isinstance ( dir_sep , six . text_type ) :
dir_sep = dir_sep . encode ( 'ascii' )
# This should not be executed under PY3
# splitdrive only set drive on windows platform
( drive , path ) = os . path . splitdrive ( file_path_name )
path = dir_sep . join ( [ safe_filename_leaf ( file_section ) for file_section in path . rsplit ( dir_sep ) ] )
if drive :
path = dir_sep . join ( [ drive , path ] )
return path
|
async def dump_variant ( obj , elem , elem_type = None , params = None , field_archiver = None ) :
"""Transform variant to the popo object representation .
: param obj :
: param elem :
: param elem _ type :
: param params :
: param field _ archiver :
: return :"""
|
field_archiver = field_archiver if field_archiver else dump_field
if isinstance ( elem , x . VariantType ) or elem_type . WRAPS_VALUE :
return { elem . variant_elem : await field_archiver ( None , getattr ( elem , elem . variant_elem ) , elem . variant_elem_type ) }
else :
fdef = elem_type . find_fdef ( elem_type . f_specs ( ) , elem )
return { fdef [ 0 ] : await field_archiver ( None , elem , fdef [ 1 ] ) }
|
def process_pgturl ( self , params ) :
"""Handle PGT request
: param dict params : A template context dict
: raises ValidateError : if pgtUrl is invalid or if TLS validation of the pgtUrl fails
: return : The rendering of ` ` cas _ server / serviceValidate . xml ` ` , using ` ` params ` `
: rtype : django . http . HttpResponse"""
|
try :
pattern = ServicePattern . validate ( self . pgt_url )
if pattern . proxy_callback :
proxyid = utils . gen_pgtiou ( )
pticket = ProxyGrantingTicket . objects . create ( user = self . ticket . user , service = self . pgt_url , service_pattern = pattern , single_log_out = pattern . single_log_out )
url = utils . update_url ( self . pgt_url , { 'pgtIou' : proxyid , 'pgtId' : pticket . value } )
try :
ret = requests . get ( url , verify = settings . CAS_PROXY_CA_CERTIFICATE_PATH )
if ret . status_code == 200 :
params [ 'proxyGrantingTicket' ] = proxyid
else :
pticket . delete ( )
logger . info ( ( "ValidateService: ticket %s validated for user %s on service %s. " "Proxy Granting Ticket transmited to %s." ) % ( self . ticket . value , self . ticket . user . username , self . ticket . service , self . pgt_url ) )
logger . debug ( "ValidateService: User attributs are:\n%s" % ( pprint . pformat ( self . ticket . attributs ) , ) )
return render ( self . request , "cas_server/serviceValidate.xml" , params , content_type = "text/xml; charset=utf-8" )
except requests . exceptions . RequestException as error :
error = utils . unpack_nested_exception ( error )
raise ValidateError ( u'INVALID_PROXY_CALLBACK' , u"%s: %s" % ( type ( error ) , str ( error ) ) )
else :
raise ValidateError ( u'INVALID_PROXY_CALLBACK' , u"callback url not allowed by configuration" )
except ServicePattern . DoesNotExist :
raise ValidateError ( u'INVALID_PROXY_CALLBACK' , u'callback url not allowed by configuration' )
|
def toProtocolElement ( self ) :
"""Converts this rnaQuant into its GA4GH protocol equivalent ."""
|
protocolElement = protocol . RnaQuantificationSet ( )
protocolElement . id = self . getId ( )
protocolElement . dataset_id = self . _parentContainer . getId ( )
protocolElement . name = self . _name
self . serializeAttributes ( protocolElement )
return protocolElement
|
def delete_group ( self , name ) :
"""Delete contact group
: param name : of group
: type name : ` ` str ` ` , ` ` unicode ` `
: rtype : ` ` bool ` `"""
|
group = self . get_group ( name )
method , url = get_URL ( 'group_delete' )
payload = { 'apikey' : self . config . get ( 'apikey' ) , 'logintoken' : self . session . cookies . get ( 'logintoken' ) , 'contactgroupid' : group [ 'contactgroupid' ] }
res = getattr ( self . session , method ) ( url , params = payload )
if res . status_code == 200 :
return True
hellraiser ( res )
|
def addSquigglyAnnot ( self , rect ) :
"""Wavy underline content in a rectangle or quadrilateral ."""
|
CheckParent ( self )
val = _fitz . Page_addSquigglyAnnot ( self , rect )
if not val :
return
val . thisown = True
val . parent = weakref . proxy ( self )
self . _annot_refs [ id ( val ) ] = val
return val
|
def get_F1_EM ( dataset , predict_data ) :
"""Calculate the F1 and EM scores of the predicted results .
Use only with the SQuAD1.1 dataset .
Parameters
dataset _ file : string
Path to the data file .
predict _ data : dict
All final predictions .
Returns
scores : dict
F1 and EM scores ."""
|
f1 = exact_match = total = 0
for record in dataset :
total += 1
if record [ 1 ] not in predict_data :
message = 'Unanswered question ' + record [ 1 ] + ' will receive score 0.'
print ( message )
continue
ground_truths = record [ 4 ]
prediction = predict_data [ record [ 1 ] ]
exact_match += metric_max_over_ground_truths ( exact_match_score , prediction , ground_truths )
f1 += metric_max_over_ground_truths ( f1_score , prediction , ground_truths )
exact_match = 100.0 * exact_match / total
f1 = 100.0 * f1 / total
scores = { 'exact_match' : exact_match , 'f1' : f1 }
return scores
|
def get_archs ( libname ) :
"""Return architecture types from library ` libname `
Parameters
libname : str
filename of binary for which to return arch codes
Returns
arch _ names : frozenset
Empty ( frozen ) set if no arch codes . If not empty , contains one or more
of ' ppc ' , ' ppc64 ' , ' i386 ' , ' x86_64'"""
|
if not exists ( libname ) :
raise RuntimeError ( libname + " is not a file" )
try :
stdout = back_tick ( [ 'lipo' , '-info' , libname ] )
except RuntimeError :
return frozenset ( )
lines = [ line . strip ( ) for line in stdout . split ( '\n' ) if line . strip ( ) ]
# For some reason , output from lipo - info on . a file generates this line
if lines [ 0 ] == "input file {0} is not a fat file" . format ( libname ) :
line = lines [ 1 ]
else :
assert len ( lines ) == 1
line = lines [ 0 ]
for reggie in ( 'Non-fat file: {0} is architecture: (.*)' . format ( libname ) , 'Architectures in the fat file: {0} are: (.*)' . format ( libname ) ) :
reggie = re . compile ( reggie )
match = reggie . match ( line )
if not match is None :
return frozenset ( match . groups ( ) [ 0 ] . split ( ' ' ) )
raise ValueError ( "Unexpected output: '{0}' for {1}" . format ( stdout , libname ) )
|
def install ( self ) :
"""Install the generated config file ."""
|
outs = super ( MyInstallLib , self ) . install ( )
infile = self . create_conf_file ( )
outfile = os . path . join ( self . install_dir , os . path . basename ( infile ) )
self . copy_file ( infile , outfile )
outs . append ( outfile )
return outs
|
def get_repo_parent ( path ) :
"""Returns parent repo or input path if none found .
: return : grit . Local or path"""
|
# path is a repository
if is_repo ( path ) :
return Local ( path )
# path is inside a repository
elif not os . path . isdir ( path ) :
_rel = ''
while path and path != '/' :
if is_repo ( path ) :
return Local ( path )
else :
_rel = os . path . join ( os . path . basename ( path ) , _rel )
path = os . path . dirname ( path )
return path
|
def reset ( self ) :
'''Reset list of terms and y - variable .'''
|
self . terms = OrderedDict ( )
self . y = None
self . backend = None
self . added_terms = [ ]
self . _added_priors = { }
self . completes = [ ]
self . clean_data = None
|
def calculate_digit_distance ( number1 , number2 ) :
"""A python function which calculates the digit distance between two numbers by
adding the digits of the absolute difference between the numbers .
Example usage :
calculate _ digit _ distance ( 1 , 2 ) - > 1
calculate _ digit _ distance ( 23 , 56 ) - > 6
calculate _ digit _ distance ( 123 , 256 ) - > 7
Args :
number1 : First integer .
number2 : Second integer .
Returns :
The digit distance between the two numbers ."""
|
return sum ( int ( digit ) for digit in str ( abs ( number1 - number2 ) ) )
|
def set_all_pattern_variables ( self , patternnumber , sp0 , ti0 , sp1 , ti1 , sp2 , ti2 , sp3 , ti3 , sp4 , ti4 , sp5 , ti5 , sp6 , ti6 , sp7 , ti7 , actual_step , additional_cycles , link_pattern ) :
"""Set all variables for a given pattern at one time .
Args :
* patternnumber ( integer ) : 0-7
* sp [ * n * ] ( float ) : setpoint value for step * n *
* ti [ * n * ] ( integer ? ? ) : step time for step * n * , 0-900
* actual _ step ( int ) : ?
* additional _ cycles ( int ) : ?
* link _ pattern ( int ) : ?"""
|
_checkPatternNumber ( patternnumber )
self . set_pattern_step_setpoint ( patternnumber , 0 , sp0 )
self . set_pattern_step_setpoint ( patternnumber , 1 , sp1 )
self . set_pattern_step_setpoint ( patternnumber , 2 , sp2 )
self . set_pattern_step_setpoint ( patternnumber , 3 , sp3 )
self . set_pattern_step_setpoint ( patternnumber , 4 , sp4 )
self . set_pattern_step_setpoint ( patternnumber , 5 , sp5 )
self . set_pattern_step_setpoint ( patternnumber , 6 , sp6 )
self . set_pattern_step_setpoint ( patternnumber , 7 , sp7 )
self . set_pattern_step_time ( patternnumber , 0 , ti0 )
self . set_pattern_step_time ( patternnumber , 1 , ti1 )
self . set_pattern_step_time ( patternnumber , 2 , ti2 )
self . set_pattern_step_time ( patternnumber , 3 , ti3 )
self . set_pattern_step_time ( patternnumber , 4 , ti4 )
self . set_pattern_step_time ( patternnumber , 5 , ti5 )
self . set_pattern_step_time ( patternnumber , 6 , ti6 )
self . set_pattern_step_time ( patternnumber , 7 , ti7 )
self . set_pattern_additional_cycles ( patternnumber , additional_cycles )
self . set_pattern_link_topattern ( patternnumber , link_pattern )
self . set_pattern_actual_step ( patternnumber , actual_step )
|
def find_behind_subscriptions ( ) :
"""Finds any subscriptions that are behind according to where they should be ,
and creates a BehindSubscription entry for them ."""
|
subscriptions = Subscription . objects . filter ( active = True , completed = False , process_status = 0 ) . values_list ( "id" , flat = True )
for subscription_id in subscriptions . iterator ( ) :
calculate_subscription_lifecycle . delay ( str ( subscription_id ) )
|
def _gatherDataFromLookups ( gpos , scriptOrder ) :
"""Gather kerning and classes from the applicable lookups
and return them in script order ."""
|
lookupIndexes = _gatherLookupIndexes ( gpos )
seenLookups = set ( )
kerningDictionaries = [ ]
leftClassDictionaries = [ ]
rightClassDictionaries = [ ]
for script in scriptOrder :
kerning = [ ]
leftClasses = [ ]
rightClasses = [ ]
for lookupIndex in lookupIndexes [ script ] :
if lookupIndex in seenLookups :
continue
seenLookups . add ( lookupIndex )
result = _gatherKerningForLookup ( gpos , lookupIndex )
if result is None :
continue
k , lG , rG = result
kerning . append ( k )
leftClasses . append ( lG )
rightClasses . append ( rG )
if kerning :
kerningDictionaries . append ( kerning )
leftClassDictionaries . append ( leftClasses )
rightClassDictionaries . append ( rightClasses )
return kerningDictionaries , leftClassDictionaries , rightClassDictionaries
|
def constant_fold ( code , silent = True , ignore_errors = True ) :
"""Constant - folds simple expressions like 2 3 + to 5.
Args :
code : Code in non - native types .
silent : Flag that controls whether to print optimizations made .
ignore _ errors : Whether to raise exceptions on found errors ."""
|
# Loop until we haven ' t done any optimizations . E . g . , " 2 3 + 5 * " will be
# optimized to " 5 5 * " and in the next iteration to 25 . Yes , this is
# extremely slow , big - O wise . We ' ll fix that some other time . ( TODO )
arithmetic = list ( map ( instructions . lookup , [ instructions . add , instructions . bitwise_and , instructions . bitwise_or , instructions . bitwise_xor , instructions . div , instructions . equal , instructions . greater , instructions . less , instructions . mod , instructions . mul , instructions . sub , ] ) )
divzero = map ( instructions . lookup , [ instructions . div , instructions . mod , ] )
lookup = instructions . lookup
def isfunction ( op ) :
try :
instructions . lookup ( op )
return True
except KeyError :
return False
def isconstant ( op ) :
return op is None or interpreter . isconstant ( op , quoted = True ) or not isfunction ( op )
keep_running = True
while keep_running :
keep_running = False
# Find two consecutive numbes and an arithmetic operator
for i , a in enumerate ( code ) :
b = code [ i + 1 ] if i + 1 < len ( code ) else None
c = code [ i + 2 ] if i + 2 < len ( code ) else None
# Constant fold arithmetic operations ( TODO : Move to check - func )
if interpreter . isnumber ( a , b ) and c in arithmetic : # Although we can detect division by zero at compile time , we
# don ' t report it here , because the surrounding system doesn ' t
# handle that very well . So just leave it for now . ( NOTE : If
# we had an " error " instruction , we could actually transform
# the expression to an error , or exit instruction perhaps )
if b == 0 and c in divzero :
if ignore_errors :
continue
else :
raise errors . CompileError ( ZeroDivisionError ( "Division by zero" ) )
# Calculate result by running on a machine ( lambda vm : . . . is
# embedded pushes , see compiler )
result = interpreter . Machine ( [ lambda vm : vm . push ( a ) , lambda vm : vm . push ( b ) , instructions . lookup ( c ) ] ) . run ( ) . top
del code [ i : i + 3 ]
code . insert ( i , result )
if not silent :
print ( "Optimizer: Constant-folded %s %s %s to %s" % ( a , b , c , result ) )
keep_running = True
break
# Translate < constant > dup to < constant > < constant >
if isconstant ( a ) and b == lookup ( instructions . dup ) :
code [ i + 1 ] = a
if not silent :
print ( "Optimizer: Translated %s %s to %s %s" % ( a , b , a , a ) )
keep_running = True
break
# Dead code removal : < constant > drop
if isconstant ( a ) and b == lookup ( instructions . drop ) :
del code [ i : i + 2 ]
if not silent :
print ( "Optimizer: Removed dead code %s %s" % ( a , b ) )
keep_running = True
break
if a == lookup ( instructions . nop ) :
del code [ i ]
if not silent :
print ( "Optimizer: Removed dead code %s" % a )
keep_running = True
break
# Dead code removal : < integer > cast _ int
if isinstance ( a , int ) and b == lookup ( instructions . cast_int ) :
del code [ i + 1 ]
if not silent :
print ( "Optimizer: Translated %s %s to %s" % ( a , b , a ) )
keep_running = True
break
# Dead code removal : < float > cast _ float
if isinstance ( a , float ) and b == lookup ( instructions . cast_float ) :
del code [ i + 1 ]
if not silent :
print ( "Optimizer: Translated %s %s to %s" % ( a , b , a ) )
keep_running = True
break
# Dead code removal : < string > cast _ str
if isinstance ( a , str ) and b == lookup ( instructions . cast_str ) :
del code [ i + 1 ]
if not silent :
print ( "Optimizer: Translated %s %s to %s" % ( a , b , a ) )
keep_running = True
break
# Dead code removal : < boolean > cast _ bool
if isinstance ( a , bool ) and b == lookup ( instructions . cast_bool ) :
del code [ i + 1 ]
if not silent :
print ( "Optimizer: Translated %s %s to %s" % ( a , b , a ) )
keep_running = True
break
# < c1 > < c2 > swap - > < c2 > < c1 >
if isconstant ( a ) and isconstant ( b ) and c == lookup ( instructions . swap ) :
del code [ i : i + 3 ]
code = code [ : i ] + [ b , a ] + code [ i : ]
if not silent :
print ( "Optimizer: Translated %s %s %s to %s %s" % ( a , b , c , b , a ) )
keep_running = True
break
# a b over - > a b a
if isconstant ( a ) and isconstant ( b ) and c == lookup ( instructions . over ) :
code [ i + 2 ] = a
if not silent :
print ( "Optimizer: Translated %s %s %s to %s %s %s" % ( a , b , c , a , b , a ) )
keep_running = True
break
# "123 " cast _ int - > 123
if interpreter . isstring ( a ) and b == lookup ( instructions . cast_int ) :
try :
number = int ( a )
del code [ i : i + 2 ]
code . insert ( i , number )
if not silent :
print ( "Optimizer: Translated %s %s to %s" % ( a , b , number ) )
keep_running = True
break
except ValueError :
pass
if isconstant ( a ) and b == lookup ( instructions . cast_str ) :
del code [ i : i + 2 ]
code . insert ( i , str ( a ) )
# TODO : Try - except here
if not silent :
print ( "Optimizer: Translated %s %s to %s" % ( a , b , str ( a ) ) )
keep_running = True
break
if isconstant ( a ) and b == lookup ( instructions . cast_bool ) :
del code [ i : i + 2 ]
code . insert ( i , bool ( a ) )
# TODO : Try - except here
if not silent :
print ( "Optimizer: Translated %s %s to %s" % ( a , b , bool ( a ) ) )
keep_running = True
break
if isconstant ( a ) and b == lookup ( instructions . cast_float ) :
try :
v = float ( a )
del code [ i : i + 2 ]
code . insert ( i , v )
if not silent :
print ( "Optimizer: Translated %s %s to %s" % ( a , b , v ) )
keep_running = True
break
except ValueError :
pass
return code
|
def setup ( self ) :
"""Collect watchdog information .
Collect configuration files , custom executables for test - binary
and repair - binary , and stdout / stderr logs ."""
|
conf_file = self . get_option ( 'conf_file' )
log_dir = '/var/log/watchdog'
# Get service configuration and sysconfig files
self . add_copy_spec ( [ conf_file , '/etc/sysconfig/watchdog' , ] )
# Get custom executables
self . add_copy_spec ( [ '/etc/watchdog.d' , '/usr/libexec/watchdog/scripts' , ] )
# Get logs
try :
res = self . get_log_dir ( conf_file )
if res :
log_dir = res
except IOError as ex :
self . _log_warn ( "Could not read %s: %s" % ( conf_file , ex ) )
if self . get_option ( 'all_logs' ) :
log_files = glob ( os . path . join ( log_dir , '*' ) )
else :
log_files = ( glob ( os . path . join ( log_dir , '*.stdout' ) ) + glob ( os . path . join ( log_dir , '*.stderr' ) ) )
self . add_copy_spec ( log_files )
# Get output of " wdctl < device > " for each / dev / watchdog *
for dev in glob ( '/dev/watchdog*' ) :
self . add_cmd_output ( "wdctl %s" % dev )
|
def make ( self ) :
"""Make the lock file ."""
|
try : # Create the lock file
self . mkfile ( self . lock_file )
except Exception as e :
self . die ( 'Failed to generate lock file: {}' . format ( str ( e ) ) )
|
def set_instrumentation_callback ( self , callback ) :
"""Assign a method to invoke when a request has completed gathering
measurements .
: param method callback : The method to invoke"""
|
self . logger . debug ( 'Setting instrumentation callback: %r' , callback )
self . _instrumentation_callback = callback
|
def schema ( self ) :
"""The DQL syntax for creating this item"""
|
if self . key_type is None :
return "%s %s" % ( self . name , self . data_type )
else :
return "%s %s %s KEY" % ( self . name , self . data_type , self . key_type )
|
def execute ( self , input_data ) :
'''Okay this worker is going build graphs from PCAP Bro output logs'''
|
# Grab the Bro log handles from the input
bro_logs = input_data [ 'pcap_bro' ]
# Weird log
if 'weird_log' in bro_logs :
stream = self . workbench . stream_sample ( bro_logs [ 'weird_log' ] )
self . weird_log_graph ( stream )
# HTTP log
gsleep ( )
stream = self . workbench . stream_sample ( bro_logs [ 'http_log' ] )
self . http_log_graph ( stream )
# Files log
gsleep ( )
stream = self . workbench . stream_sample ( bro_logs [ 'files_log' ] )
self . files_log_graph ( stream )
return { 'output' : 'go to http://localhost:7474/browser and execute this query "match (s:origin), (t:file), p=allShortestPaths((s)--(t)) return p"' }
|
def generate_sub_codons_left ( codons_dict ) :
"""Generate the sub _ codons _ left dictionary of codon prefixes .
Parameters
codons _ dict : dict
Dictionary , keyed by the allowed ' amino acid ' symbols with the values
being lists of codons corresponding to the symbol .
Returns
sub _ codons _ left : dict
Dictionary of the 1 and 2 nucleotide prefixes ( read from 5 ' ) for
each codon in an ' amino acid ' grouping"""
|
sub_codons_left = { }
for aa in codons_dict . keys ( ) :
sub_codons_left [ aa ] = list ( set ( [ x [ 0 ] for x in codons_dict [ aa ] ] + [ x [ : 2 ] for x in codons_dict [ aa ] ] ) )
return sub_codons_left
|
def initialize_from_string ( content : str ) -> 'HomusSymbol' :
"""Create and initializes a new symbol from a string
: param content : The content of a symbol as read from the text - file
: return : The initialized symbol
: rtype : HomusSymbol"""
|
if content is None or content is "" :
return None
lines = content . splitlines ( )
min_x = sys . maxsize
max_x = 0
min_y = sys . maxsize
max_y = 0
symbol_name = lines [ 0 ]
strokes = [ ]
for stroke_string in lines [ 1 : ] :
stroke = [ ]
for point_string in stroke_string . split ( ";" ) :
if point_string is "" :
continue
# Skip the last element , that is due to a trailing ; in each line
point_x , point_y = point_string . split ( "," )
x = int ( point_x )
y = int ( point_y )
stroke . append ( Point2D ( x , y ) )
max_x = max ( max_x , x )
min_x = min ( min_x , x )
max_y = max ( max_y , y )
min_y = min ( min_y , y )
strokes . append ( stroke )
dimensions = Rectangle ( Point2D ( min_x , min_y ) , max_x - min_x + 1 , max_y - min_y + 1 )
return HomusSymbol ( content , strokes , symbol_name , dimensions )
|
def update ( self ) :
"""Calculate the smoothing parameter value .
The following example is explained in some detail in module
| smoothtools | :
> > > from hydpy . models . dam import *
> > > parameterstep ( )
> > > highestremotedischarge ( 1.0)
> > > highestremotetolerance ( 0.0)
> > > derived . highestremotesmoothpar . update ( )
> > > from hydpy . cythons . smoothutils import smooth _ min1
> > > from hydpy import round _
> > > round _ ( smooth _ min1 ( - 4.0 , 1.5 , derived . highestremotesmoothpar ) )
-4.0
> > > highestremotetolerance ( 2.5)
> > > derived . highestremotesmoothpar . update ( )
> > > round _ ( smooth _ min1 ( - 4.0 , - 1.5 , derived . highestremotesmoothpar ) )
-4.01
Note that the example above corresponds to the example on function
| calc _ smoothpar _ min1 | , due to the value of parameter
| HighestRemoteDischarge | being 1 m3 / s . Doubling the value of
| HighestRemoteDischarge | also doubles the value of
| HighestRemoteSmoothPar | proportional . This leads to the following
result :
> > > highestremotedischarge ( 2.0)
> > > derived . highestremotesmoothpar . update ( )
> > > round _ ( smooth _ min1 ( - 4.0 , 1.0 , derived . highestremotesmoothpar ) )
-4.02
This relationship between | HighestRemoteDischarge | and
| HighestRemoteSmoothPar | prevents from any smoothing when
the value of | HighestRemoteDischarge | is zero :
> > > highestremotedischarge ( 0.0)
> > > derived . highestremotesmoothpar . update ( )
> > > round _ ( smooth _ min1(1.0 , 1.0 , derived . highestremotesmoothpar ) )
1.0
In addition , | HighestRemoteSmoothPar | is set to zero if
| HighestRemoteDischarge | is infinity ( because no actual value
will ever come in the vicinit of infinity ) , which is why no
value would be changed through smoothing anyway ) :
> > > highestremotedischarge ( inf )
> > > derived . highestremotesmoothpar . update ( )
> > > round _ ( smooth _ min1(1.0 , 1.0 , derived . highestremotesmoothpar ) )
1.0"""
|
control = self . subpars . pars . control
if numpy . isinf ( control . highestremotedischarge ) :
self ( 0.0 )
else :
self ( control . highestremotedischarge * smoothtools . calc_smoothpar_min1 ( control . highestremotetolerance ) )
|
def broadcast_transaction ( hex_tx , blockchain_client ) :
"""Dispatches a raw hex transaction to the network ."""
|
if isinstance ( blockchain_client , BlockcypherClient ) :
return blockcypher . broadcast_transaction ( hex_tx , blockchain_client )
elif isinstance ( blockchain_client , BlockchainInfoClient ) :
return blockchain_info . broadcast_transaction ( hex_tx , blockchain_client )
elif isinstance ( blockchain_client , ChainComClient ) :
return chain_com . broadcast_transaction ( hex_tx , blockchain_client )
elif isinstance ( blockchain_client , ( BitcoindClient , AuthServiceProxy ) ) :
return bitcoind . broadcast_transaction ( hex_tx , blockchain_client )
elif hasattr ( blockchain_client , "broadcast_transaction" ) :
return blockchain_client . broadcast_transaction ( hex_tx )
elif isinstance ( blockchain_client , BlockchainClient ) :
raise Exception ( 'That blockchain interface is not supported.' )
else :
raise Exception ( 'A BlockchainClient object is required' )
|
def call_use_cached_files ( tup ) :
"""Importable helper for multi - proc calling of ArtifactCache . use _ cached _ files on a cache instance .
Multiprocessing map / apply / etc require functions which can be imported , not bound methods .
To call a bound method , instead call a helper like this and pass tuple of the instance and args .
The helper can then call the original method on the deserialized instance .
: param tup : A tuple of an ArtifactCache and args ( eg CacheKey ) for ArtifactCache . use _ cached _ files ."""
|
try :
cache , key , results_dir = tup
res = cache . use_cached_files ( key , results_dir )
if res :
sys . stderr . write ( '.' )
else :
sys . stderr . write ( ' ' )
sys . stderr . flush ( )
return res
except NonfatalArtifactCacheError as e :
logger . warn ( 'Error calling use_cached_files in artifact cache: {0}' . format ( e ) )
return False
|
def print_version ( ) :
"""Print get _ version ( ) return value in a readable format .
Params :
None
Returns :
None"""
|
v = get_version ( )
try :
s = _STR_WIN [ v ]
except KeyError :
s = "Unknow OS"
print ( "-----------------------------------------------------------" )
print ( "###################### WinVer Report ######################" )
print ( "Python Version : {}.{}.{}" . format ( * sys . version_info [ : 3 ] ) )
print ( "Windows Version String : {}" . format ( s ) )
print ( "Windows Major Version : {}" . format ( v [ 0 ] ) )
print ( "Windows Minor Version : {}" . format ( v [ 1 ] ) )
print ( "Windows Service Pack (or Build) Version : {}" . format ( v [ 2 ] ) )
print ( "Is Windows Server : {}" . format ( 'Yes' if v [ 3 ] == 1 else 'No' ) )
print ( "Is Windows 10 (or Windows Server 2016) : {}" . format ( 'Yes' if v >= WIN_10 else 'No' ) )
print ( "-----------------------------------------------------------" )
|
def prime_factors ( n ) :
"""Lists prime factors of a given natural integer , from greatest to smallest
: param n : Natural integer
: rtype : list of all prime factors of the given natural n"""
|
i = 2
while i <= sqrt ( n ) :
if n % i == 0 :
l = prime_factors ( n / i )
l . append ( i )
return l
i += 1
return [ n ]
|
def create_proxy_zip ( proxy_string , proxy_user , proxy_pass ) :
"""Implementation of https : / / stackoverflow . com / a / 35293284 for
https : / / stackoverflow . com / questions / 12848327/
( Run Selenium on a proxy server that requires authentication . )
Solution involves creating & adding a Chrome extension on the fly .
* CHROME - ONLY for now ! *"""
|
proxy_host = proxy_string . split ( ':' ) [ 0 ]
proxy_port = proxy_string . split ( ':' ) [ 1 ]
background_js = ( """var config = {\n""" """ mode: "fixed_servers",\n""" """ rules: {\n""" """ singleProxy: {\n""" """ scheme: "http",\n""" """ host: "%s",\n""" """ port: parseInt("%s")\n""" """ },\n""" """ }\n""" """ };\n""" """chrome.proxy.settings.set(""" """{value: config, scope: "regular"}, function() {""" """});\n""" """function callbackFn(details) {\n""" """ return {\n""" """ authCredentials: {\n""" """ username: "%s",\n""" """ password: "%s"\n""" """ }\n""" """ };\n""" """}\n""" """chrome.webRequest.onAuthRequired.addListener(\n""" """ callbackFn,\n""" """ {urls: ["<all_urls>"]},\n""" """ ['blocking']\n""" """);""" % ( proxy_host , proxy_port , proxy_user , proxy_pass ) )
manifest_json = ( '''{\n''' '''"version": "1.0.0",\n''' '''"manifest_version": 2,\n''' '''"name": "Chrome Proxy",\n''' '''"permissions": [\n''' ''' "proxy",\n''' ''' "tabs",\n''' ''' "unlimitedStorage",\n''' ''' "storage",\n''' ''' "<all_urls>",\n''' ''' "webRequest",\n''' ''' "webRequestBlocking"\n''' '''],\n''' '''"background": {\n''' ''' "scripts": ["background.js"]\n''' '''},\n''' '''"minimum_chrome_version":"22.0.0"\n''' '''}''' )
lock = threading . RLock ( )
# Support multi - threaded test runs with Pytest
with lock :
try :
zf = zipfile . ZipFile ( PROXY_ZIP_PATH , mode = 'w' )
except IOError : # Handle " Permission denied " on the default proxy . zip path
abs_path = os . path . abspath ( '.' )
downloads_path = os . path . join ( abs_path , DOWNLOADS_DIR )
if not os . path . exists ( downloads_path ) :
os . mkdir ( downloads_path )
zf = zipfile . ZipFile ( PROXY_ZIP_PATH_2 , mode = 'w' )
zf . writestr ( "background.js" , background_js )
zf . writestr ( "manifest.json" , manifest_json )
zf . close ( )
|
def configs ( self , filters = None ) :
"""List configs
Args :
filters ( dict ) : A map of filters to process on the configs
list . Available filters : ` ` names ` `
Returns ( list ) : A list of configs"""
|
url = self . _url ( '/configs' )
params = { }
if filters :
params [ 'filters' ] = utils . convert_filters ( filters )
return self . _result ( self . _get ( url , params = params ) , True )
|
def system_monitor_LineCard_threshold_marginal_threshold ( self , ** kwargs ) :
"""Auto Generated Code"""
|
config = ET . Element ( "config" )
system_monitor = ET . SubElement ( config , "system-monitor" , xmlns = "urn:brocade.com:mgmt:brocade-system-monitor" )
LineCard = ET . SubElement ( system_monitor , "LineCard" )
threshold = ET . SubElement ( LineCard , "threshold" )
marginal_threshold = ET . SubElement ( threshold , "marginal-threshold" )
marginal_threshold . text = kwargs . pop ( 'marginal_threshold' )
callback = kwargs . pop ( 'callback' , self . _callback )
return callback ( config )
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.