signature
stringlengths 29
44.1k
| implementation
stringlengths 0
85.2k
|
|---|---|
def parity ( x ) :
'''The parity of a permutation
The parity of a permutation is even if the permutation can be
formed by an even number of transpositions and is odd otherwise .
The parity of a permutation is even if there are an even number of
compositions of even size and odd otherwise . A composition is a cycle :
for instance in ( 1 , 2 , 0 , 3 ) , there is the cycle : ( 0 - > 1 , 1 - > 2 , 2 - > 0)
and the cycle , ( 3 - > 3 ) . Both cycles are odd , so the parity is even :
you can exchange 0 and 1 giving ( 0 , 2 , 1 , 3 ) and 2 and 1 to get
(0 , 1 , 2 , 3)'''
|
order = np . lexsort ( ( x , ) )
hit = np . zeros ( len ( x ) , bool )
p = 0
for j in range ( len ( x ) ) :
if not hit [ j ] :
cycle = 1
i = order [ j ]
# mark every node in a cycle
while i != j :
hit [ i ] = True
i = order [ i ]
cycle += 1
p += cycle - 1
return 1 if p % 2 == 0 else - 1
|
def iter_grants ( self , as_json = True ) :
"""Fetch records from the SQLite database ."""
|
self . _connect ( )
result = self . db_connection . cursor ( ) . execute ( "SELECT data, format FROM grants" )
for data , data_format in result :
if ( not as_json ) and data_format == 'json' :
raise Exception ( "Cannot convert JSON source to XML output." )
elif as_json and data_format == 'xml' :
data = self . grantxml2json ( data )
elif as_json and data_format == 'json' :
data = json . loads ( data )
yield data
self . _disconnect ( )
|
def make_bubble_surface ( dims = DEFAULT_DIMS , repeat = 3 ) :
"""Makes a surface from the product of sine functions on each axis .
Args :
dims ( pair ) : the dimensions of the surface to create
repeat ( int ) : the frequency of the waves is set to ensure this many
repetitions of the function
Returns :
surface : A surface ."""
|
gradients = make_gradients ( dims )
return ( np . sin ( ( gradients [ 0 ] - 0.5 ) * repeat * np . pi ) * np . sin ( ( gradients [ 1 ] - 0.5 ) * repeat * np . pi ) )
|
def _build_int_array_el ( el_name , parent , list_ ) :
"""build a soapenc : Array made of ints called ` el _ name ` as a child
of ` parent `"""
|
el = parent . add_child ( el_name )
el . add_attribute ( 'xmlns:soapenc' , 'http://schemas.xmlsoap.org/soap/encoding/' )
el . add_attribute ( 'xsi:type' , 'soapenc:Array' )
el . add_attribute ( 'soapenc:arrayType' , 'xsd:int[{:d}]' . format ( len ( list_ ) ) )
for item in list_ :
item_el = el . add_child ( 'item' , str ( item ) )
item_el . add_attribute ( 'xsi:type' , 'xsd:int' )
return el
|
def _fix_bias ( op_name , attrs , num_inputs ) :
"""A workaround for ' use _ bias ' attribute since onnx don ' t provide this attribute ,
we have to check the number of inputs to decide it ."""
|
if num_inputs == 3 :
attrs [ 'no_bias' ] = False
elif num_inputs == 2 :
attrs [ 'no_bias' ] = True
else :
raise ValueError ( "Unexpected number of inputs for: {}" . format ( op_name ) )
return attrs
|
def unproject ( self , image_points ) :
"""Find ( up to scale ) 3D coordinate of an image point
This is the inverse of the ` project ` function .
The resulting 3D points are only valid up to an unknown scale .
Parameters
image _ points : ( 2 , N ) ndarray
Image points
Returns
points : ( 3 , N ) ndarray
3D coordinates ( valid up to scale )"""
|
Ki = self . inv_camera_matrix
X = np . dot ( Ki , to_homogeneous ( image_points ) )
X = X / X [ 2 ]
XU = self . invert ( X )
return XU
|
def on_menu ( self , event ) :
'''handle menu selections'''
|
state = self . state
ret = self . menu . find_selected ( event )
if ret is None :
return
ret . call_handler ( )
state . child_pipe_send . send ( ret )
|
def getNeoParam ( fn , FrameIndReq = None , ut1req = None , kineticsec = None , startUTC = None , cmosinit = { } , verbose = False ) :
"""assumption is that this is a Neo sCMOS FITS / TIFF file , where Solis chooses to break up the recordings
into smaller files . Verify if this timing estimate makes sense for your application !
I did not want to do regexp on the filename or USERTXT1 as I felt this too prone to error .
inputs :
cmosinit = { ' firstrawind ' , ' lastrawind ' }"""
|
fn = Path ( fn ) . expanduser ( )
nHeadBytes = 0
if fn . suffix . lower ( ) in '.tiff' :
if tifffile is None :
raise ImportError ( 'pip install tifffile' )
# FIXME didn ' t the 2011 TIFFs have headers ? maybe not .
with tifffile . TiffFile ( str ( fn ) ) as f :
Y , X = f [ 0 ] . shape
cmosinit = { 'firstrawind' : 1 , 'lastrawind' : len ( f ) }
elif fn . suffix . lower ( ) in '.fits' :
with fits . open ( fn , mode = 'readonly' , memmap = False ) as f :
kineticsec = f [ 0 ] . header [ 'KCT' ]
# TODO start of night ' s recording ( with some Solis versionss )
startseries = parse ( f [ 0 ] . header [ 'DATE' ] + 'Z' )
# TODO wish there was a better way
try :
frametxt = f [ 0 ] . header [ 'USERTXT1' ]
m = re . search ( '(?<=Images\:)\d+-\d+(?=\.)' , frametxt )
inds = m . group ( 0 ) . split ( '-' )
except KeyError : # just a single file ?
# yes start with 1 , end without adding 1 for Andor Solis
inds = [ 1 , f [ 0 ] . shape [ 0 ] ]
cmosinit = { 'firstrawind' : int ( inds [ 0 ] ) , 'lastrawind' : int ( inds [ 1 ] ) }
# start = parse ( f [ 0 ] . header [ ' FRAME ' ] + ' Z ' ) No , incorrect by several hours with some 2015 Solis versions !
Y , X = f [ 0 ] . shape [ - 2 : ]
startUTC = startseries . timestamp ( )
# % % FrameInd relative to this file
PixelsPerImage , BytesPerImage , BytesPerFrame = howbig ( X , Y , nHeadBytes )
FrameIndRel = whichframes ( fn , FrameIndReq , kineticsec , ut1req , startUTC , cmosinit [ 'firstrawind' ] , cmosinit [ 'lastrawind' ] , BytesPerImage , BytesPerFrame , verbose )
assert isinstance ( FrameIndReq , int ) or FrameIndReq is None , 'TODO: add multi-frame request case'
rawFrameInd = arange ( cmosinit [ 'firstrawind' ] , cmosinit [ 'lastrawind' ] + 1 , FrameIndReq , dtype = int64 )
finf = { 'superx' : X , 'supery' : Y , 'nframeextract' : FrameIndRel . size , 'nframe' : rawFrameInd . size , 'frameindrel' : FrameIndRel , 'frameind' : rawFrameInd , 'kineticsec' : kineticsec }
# % % absolute frame timing ( software , yikes )
finf [ 'ut1' ] = frame2ut1 ( startUTC , kineticsec , rawFrameInd )
return finf
|
def add ( self , defn ) :
"""Adds the given Packet Definition to this Telemetry Dictionary ."""
|
if defn . name not in self :
self [ defn . name ] = defn
else :
msg = "Duplicate packet name '%s'" % defn . name
log . error ( msg )
raise util . YAMLError ( msg )
|
def check_auto_merge_labeler ( repo : GithubRepository , pull_id : int ) -> Optional [ CannotAutomergeError ] :
"""References :
https : / / developer . github . com / v3 / issues / events / # list - events - for - an - issue"""
|
url = ( "https://api.github.com/repos/{}/{}/issues/{}/events" "?access_token={}" . format ( repo . organization , repo . name , pull_id , repo . access_token ) )
response = requests . get ( url )
if response . status_code != 200 :
raise RuntimeError ( 'Event check failed. Code: {}. Content: {}.' . format ( response . status_code , response . content ) )
payload = json . JSONDecoder ( ) . decode ( response . content . decode ( ) )
relevant = [ event for event in payload if event [ 'event' ] == 'labeled' and event [ 'label' ] [ 'name' ] in AUTO_MERGE_LABELS ]
if not relevant :
return CannotAutomergeError ( '"automerge" label was never added.' )
return check_collaborator_has_write ( repo , relevant [ - 1 ] [ 'actor' ] [ 'login' ] )
|
def pop_message ( self , till = None ) :
"""RETURN TUPLE ( message , payload ) CALLER IS RESPONSIBLE FOR CALLING message . delete ( ) WHEN DONE
DUMMY IMPLEMENTATION FOR DEBUGGING"""
|
if till is not None and not isinstance ( till , Signal ) :
Log . error ( "Expecting a signal" )
return Null , self . pop ( till = till )
|
def _title_for_slice ( self , truncate = 50 ) :
"""If the dataarray has 1 dimensional coordinates or comes from a slice
we can show that info in the title
Parameters
truncate : integer
maximum number of characters for title
Returns
title : string
Can be used for plot titles"""
|
one_dims = [ ]
for dim , coord in self . coords . items ( ) :
if coord . size == 1 :
one_dims . append ( '{dim} = {v}' . format ( dim = dim , v = format_item ( coord . values ) ) )
title = ', ' . join ( one_dims )
if len ( title ) > truncate :
title = title [ : ( truncate - 3 ) ] + '...'
return title
|
def get_mysqldump_args_and_env_from_url ( url ) :
"""Constructs list of command line arguments and dictionary of environment
variables that can be given to ` mysqldump ` executable to obtain database
dump of the database described in given URL .
: param url : Parsed database URL .
: type url : urllib . urlparse . ParseResult
: return : List of command line arguments as well as dictionary of
environment variables that can be used to launch the MySQL dump
process to obtain dump of the database .
: rtype : tuple [ list [ str ] , dict [ str , str ] ]"""
|
args = [ # Without this , ` INSERT INTO ` statements will exclude column names from
# the output , which are required for sanitation .
"--complete-insert" , # This enables use for " exteded inserts " where multiple rows of a table
# are included in a single ` INSERT INTO ` statement ( contents of the
# entire table even , if it ' s within limits ) . We use it to increase the
# performance of the sanitation and to decrease the dump size .
"--extended-insert" , # This makes the ` mysqldump ` to attempt to limit size of a single line
# into 10 megabytes . We use it to reduce memory consumption .
"--net_buffer_length=10240" , # Hostname of the database to connect into , should be always present in
# the parsed database URL .
"-h" , url . hostname , ]
env = { }
if url . port is not None :
args . extend ( ( "-P" , six . text_type ( url . port ) ) )
if url . username :
args . extend ( ( "-u" , url . username ) )
if url . password :
env [ "MYSQL_PWD" ] = url . password
if len ( url . path ) < 2 or not url . path . startswith ( "/" ) :
raise ValueError ( "Name of the database is missing from the URL" )
args . append ( url . path [ 1 : ] )
return args , env
|
def get_children ( self , * types ) :
"""Read ( getList ) children from IXN .
Use this method to align with current IXN configuration .
: param types : list of requested children .
: return : list of all children objects of the requested types ."""
|
children_objs = OrderedDict ( )
if not types :
types = self . get_all_child_types ( self . obj_ref ( ) )
for child_type in types :
children_list = self . api . getList ( self . obj_ref ( ) , child_type )
children_objs . update ( self . _build_children_objs ( child_type , children_list ) )
return list ( children_objs . values ( ) )
|
def localeselector ( ) :
"""Default locale selector used in abilian applications ."""
|
# if a user is logged in , use the locale from the user settings
user = getattr ( g , "user" , None )
if user is not None :
locale = getattr ( user , "locale" , None )
if locale :
return locale
# Otherwise , try to guess the language from the user accept header the browser
# transmits . By default we support en / fr . The best match wins .
return request . accept_languages . best_match ( current_app . config [ "BABEL_ACCEPT_LANGUAGES" ] )
|
def iter_annotation_value_pairs ( graph ) -> Iterable [ Tuple [ str , str ] ] :
"""Iterate over the key / value pairs , with duplicates , for each annotation used in a BEL graph .
: param pybel . BELGraph graph : A BEL graph"""
|
return ( ( key , value ) for _ , _ , data in graph . edges ( data = True ) if ANNOTATIONS in data for key , values in data [ ANNOTATIONS ] . items ( ) for value in values )
|
def lmfit_parameters ( self ) :
"""A [ ` lmfit . Parameters ` ] [ 1 ] object built from ` scipy _ data _ fitting . Fit . fitting _ parameters ` ,
see ` scipy _ data _ fitting . Fit . parameters ` .
Each parameters is assigned a key of the form ` p _ 00000 ` , ` p _ 00001 ` , ` p _ 00002 ` , etc .
Thus , ` sorted ( self . lmfit _ parameters ) ` will give the keys in the same
order defined by ` scipy _ data _ fitting . Fit . fitting _ parameters ` .
Parameter values are scaled by ` prefix ` before assignment .
The values of ` min ` and ` max ` , if specified in the ` limft ` key ,
will be scaled by ` prefix ` before being used to add the parameter .
[1 ] : http : / / lmfit . github . io / lmfit - py / parameters . html # the - parameters - class"""
|
p0 = [ ]
for param in self . fitting_parameters :
opts = param [ 'lmfit' ] . copy ( ) if 'lmfit' in param else { }
if 'min' in opts :
opts [ 'min' ] = prefix_factor ( param ) * opts [ 'min' ]
if 'max' in opts :
opts [ 'max' ] = prefix_factor ( param ) * opts [ 'max' ]
p0 . append ( ( prefix_factor ( param ) * param [ 'guess' ] , opts ) )
params = lmfit . Parameters ( )
for p in zip ( itertools . count ( ) , p0 ) :
params . add ( 'p_' + "%05d" % p [ 0 ] , value = p [ 1 ] [ 0 ] , ** p [ 1 ] [ 1 ] )
return params
|
def impulse ( dur = None , one = 1. , zero = 0. ) :
"""Impulse stream generator .
Parameters
dur :
Duration , in number of samples ; endless if not given .
Returns
Stream that repeats " 0.0 " during a given time duration ( if any ) or
endlessly , but starts with one ( and only one ) " 1.0 " ."""
|
if dur is None or ( isinf ( dur ) and dur > 0 ) :
yield one
while True :
yield zero
elif dur >= .5 :
num_samples = int ( dur - .5 )
yield one
for x in xrange ( num_samples ) :
yield zero
|
def edgepaths ( self ) :
"""Returns the fixed EdgePaths or computes direct connections
between supplied nodes ."""
|
edgepaths = super ( TriMesh , self ) . edgepaths
edgepaths . crs = self . crs
return edgepaths
|
def add_waveform ( self , waveform ) :
"""Add a waveform to the plot .
: param waveform : the waveform to be added
: type waveform : : class : ` ~ aeneas . plotter . PlotWaveform `
: raises : TypeError : if ` ` waveform ` ` is not an instance of : class : ` ~ aeneas . plotter . PlotWaveform `"""
|
if not isinstance ( waveform , PlotWaveform ) :
self . log_exc ( u"waveform must be an instance of PlotWaveform" , None , True , TypeError )
self . waveform = waveform
self . log ( u"Added waveform" )
|
def getChannelColRowList ( self , ra , dec , wantZeroOffset = False , allowIllegalReturnValues = True ) :
"""similar to getChannelColRow ( ) but takes lists as input"""
|
try :
ch = self . pickAChannelList ( ra , dec )
except ValueError :
logger . warning ( "WARN: %.7f %.7f not on any channel" % ( ra , dec ) )
return ( 0 , 0 , 0 )
col = np . zeros ( len ( ch ) )
row = np . zeros ( len ( ch ) )
for channel in set ( ch ) :
mask = ( ch == channel )
col [ mask ] , row [ mask ] = self . getColRowWithinChannelList ( ra [ mask ] , dec [ mask ] , channel , wantZeroOffset , allowIllegalReturnValues )
return ( ch , col , row )
|
def hyperrectangle ( lower , upper , bdy = True ) :
'''Returns the indicator function of a hyperrectangle .
: param lower :
Vector - like numpy array , defining the lower boundary of the hyperrectangle . \n
len ( lower ) fixes the dimension .
: param upper :
Vector - like numpy array , defining the upper boundary of the hyperrectangle . \n
: param bdy :
Bool . When ` ` x ` ` is at the hyperrectangles ' s boundary then
` ` hr _ indicator ( x ) ` ` returns ` ` True ` ` if and only if ` ` bdy = True ` ` .'''
|
# copy input
lower = _np . array ( lower )
upper = _np . array ( upper )
dim = len ( lower )
if ( upper <= lower ) . any ( ) :
raise ValueError ( 'invalid input; found upper <= lower' )
if bdy :
def hr_indicator ( x ) :
if len ( x ) != dim :
raise ValueError ( 'input has wrong dimension (%i instead of %i)' % ( len ( x ) , dim ) )
if ( lower <= x ) . all ( ) and ( x <= upper ) . all ( ) :
return True
return False
else :
def hr_indicator ( x ) :
if len ( x ) != dim :
raise ValueError ( 'input has wrong dimension (%i instead of %i)' % ( len ( x ) , dim ) )
if ( lower < x ) . all ( ) and ( x < upper ) . all ( ) :
return True
return False
# write docstring for ball _ indicator
hr_indicator . __doc__ = 'automatically generated hyperrectangle indicator function:'
hr_indicator . __doc__ += '\nlower = ' + repr ( lower ) [ 6 : - 1 ]
hr_indicator . __doc__ += '\nupper = ' + repr ( upper ) [ 6 : - 1 ]
hr_indicator . __doc__ += '\nbdy = ' + str ( bdy )
return hr_indicator
|
def __command ( self , ttype , tvalue ) :
"""Command parsing method
Entry point for command parsing . Here is expected behaviour :
* Handle command beginning if detected ,
* Call the appropriate sub - method ( specified by _ _ cstate ) to
handle the body ,
* Handle command ending or block opening if detected .
Syntax :
identifier arguments ( " ; " / block )
: param ttype : current token type
: param tvalue : current token value
: return : False if an error is encountered , True otherwise"""
|
if self . __cstate is None :
if ttype == "right_cbracket" :
self . __up ( )
self . __opened_blocks -= 1
self . __cstate = None
return True
if ttype != "identifier" :
return False
command = get_command_instance ( tvalue . decode ( "ascii" ) , self . __curcommand )
if command . get_type ( ) == "test" :
raise ParseError ( "%s may not appear as a first command" % command . name )
if command . get_type ( ) == "control" and command . accept_children and command . has_arguments ( ) :
self . __set_expected ( "identifier" )
if self . __curcommand is not None :
if not self . __curcommand . addchild ( command ) :
raise ParseError ( "%s unexpected after a %s" % ( tvalue , self . __curcommand . name ) )
self . __curcommand = command
self . __cstate = self . __arguments
return True
if self . __cstate ( ttype , tvalue ) :
return True
if ttype == "left_cbracket" :
self . __opened_blocks += 1
self . __cstate = None
return True
if ttype == "semicolon" :
self . __cstate = None
if not self . __check_command_completion ( testsemicolon = False ) :
return False
self . __curcommand . complete_cb ( )
self . __up ( )
return True
return False
|
def _validate_hue ( df , hue ) :
"""The top - level ` ` hue ` ` parameter present in most plot types accepts a variety of input types . This method
condenses this variety into a single preferred format - - - an iterable - - - which is expected by all submethods working
with the data downstream of it .
Parameters
df : GeoDataFrame
The full data input , from which standardized ` ` hue ` ` information may need to be extracted .
hue : Series , GeoSeries , iterable , str
The data column whose entries are being discretely colorized , as ( loosely ) passed by the top - level ` ` hue ` `
variable .
required : boolean
Whether or not this parameter is required for the plot in question .
Returns
hue : iterable
The ` ` hue ` ` parameter input as an iterable ."""
|
if hue is None :
return None
elif isinstance ( hue , str ) :
hue = df [ hue ]
return hue
else :
return gpd . GeoSeries ( hue )
|
def pil2tensor ( image : Union [ NPImage , NPArray ] , dtype : np . dtype ) -> TensorImage :
"Convert PIL style ` image ` array to torch style image tensor ."
|
a = np . asarray ( image )
if a . ndim == 2 :
a = np . expand_dims ( a , 2 )
a = np . transpose ( a , ( 1 , 0 , 2 ) )
a = np . transpose ( a , ( 2 , 1 , 0 ) )
return torch . from_numpy ( a . astype ( dtype , copy = False ) )
|
def zlma ( series , window = 20 , min_periods = None , kind = "ema" ) :
"""John Ehlers ' Zero lag ( exponential ) moving average
https : / / en . wikipedia . org / wiki / Zero _ lag _ exponential _ moving _ average"""
|
min_periods = window if min_periods is None else min_periods
lag = ( window - 1 ) // 2
series = 2 * series - series . shift ( lag )
if kind in [ 'ewm' , 'ema' ] :
return wma ( series , lag , min_periods )
elif kind == "hma" :
return hma ( series , lag , min_periods )
return sma ( series , lag , min_periods )
|
def generate ( bits , progress_func = None ) :
"""Generate a new private RSA key . This factory function can be used to
generate a new host key or authentication key .
@ param bits : number of bits the generated key should be .
@ type bits : int
@ param progress _ func : an optional function to call at key points in
key generation ( used by C { pyCrypto . PublicKey } ) .
@ type progress _ func : function
@ return : new private key
@ rtype : L { RSAKey }"""
|
signing_key = ECDSA . generate ( )
key = ECDSAKey ( vals = ( signing_key , signing_key . get_verifying_key ( ) ) )
return key
|
def hit_expire ( self , all_hits , hit_ids = None ) :
'''Expire all HITs .'''
|
if all_hits :
hits_data = self . get_active_hits ( )
hit_ids = [ hit . options [ 'hitid' ] for hit in hits_data ]
for hit in hit_ids :
success = self . amt_services . expire_hit ( hit )
if success :
if self . sandbox :
print "expiring sandbox HIT" , hit
else :
print "expiring live HIT" , hit
|
def prepare_feats ( feat_type , org_wav_dir = ORG_WAV_DIR , feat_dir = FEAT_DIR , tgt_wav_dir = TGT_WAV_DIR , org_xml_dir = ORG_XML_DIR , label_dir = LABEL_DIR ) :
"""Prepare the input features ."""
|
if not os . path . isdir ( TGT_DIR ) :
os . makedirs ( TGT_DIR )
if not os . path . isdir ( FEAT_DIR ) :
os . makedirs ( FEAT_DIR )
if not os . path . isdir ( os . path . join ( feat_dir , "WORDLIST" ) ) :
os . makedirs ( os . path . join ( feat_dir , "WORDLIST" ) )
if not os . path . isdir ( os . path . join ( feat_dir , "TEXT" ) ) :
os . makedirs ( os . path . join ( feat_dir , "TEXT" ) )
# Extract utterances from WAVS .
trim_wavs ( org_wav_dir = org_wav_dir , tgt_wav_dir = tgt_wav_dir , org_xml_dir = org_xml_dir )
# TODO Currently assumes that the wav trimming from XML has already been
# done .
prefixes = [ ]
for fn in os . listdir ( os . path . join ( tgt_wav_dir , "WORDLIST" ) ) :
if fn . endswith ( ".wav" ) :
pre , _ = os . path . splitext ( fn )
prefixes . append ( os . path . join ( "WORDLIST" , pre ) )
for fn in os . listdir ( os . path . join ( tgt_wav_dir , "TEXT" ) ) :
if fn . endswith ( ".wav" ) :
pre , _ = os . path . splitext ( fn )
prefixes . append ( os . path . join ( "TEXT" , pre ) )
if feat_type == "phonemes_onehot" :
import numpy as np
# prepare _ labels ( " phonemes " )
for prefix in prefixes :
label_fn = os . path . join ( label_dir , "%s.phonemes" % prefix )
out_fn = os . path . join ( feat_dir , "%s.phonemes_onehot" % prefix )
try :
with open ( label_fn ) as label_f :
labels = label_f . readlines ( ) [ 0 ] . split ( )
except FileNotFoundError :
continue
indices = [ PHONEMES_TO_INDICES [ label ] for label in labels ]
one_hots = [ [ 0 ] * len ( PHONEMES ) for _ in labels ]
for i , index in enumerate ( indices ) :
one_hots [ i ] [ index ] = 1
one_hots = np . array ( one_hots )
np . save ( out_fn , one_hots )
else : # Otherwise ,
for prefix in prefixes : # Convert the wave to 16k mono .
wav_fn = os . path . join ( tgt_wav_dir , "%s.wav" % prefix )
mono16k_wav_fn = os . path . join ( feat_dir , "%s.wav" % prefix )
if not os . path . isfile ( mono16k_wav_fn ) :
logging . info ( "Normalizing wav {} to a 16k 16KHz mono {}" . format ( wav_fn , mono16k_wav_fn ) )
feat_extract . convert_wav ( wav_fn , mono16k_wav_fn )
# Extract features from the wavs .
feat_extract . from_dir ( Path ( os . path . join ( feat_dir , "WORDLIST" ) ) , feat_type = feat_type )
feat_extract . from_dir ( Path ( os . path . join ( feat_dir , "TEXT" ) ) , feat_type = feat_type )
|
def _merge_summary ( in_files , out_file , data ) :
"""Create one big summary file for disambiguation from multiple splits ."""
|
if not utils . file_exists ( out_file ) :
with file_transaction ( data , out_file ) as tx_out_file :
with open ( tx_out_file , "w" ) as out_handle :
for i , in_file in enumerate ( in_files ) :
with open ( in_file ) as in_handle :
for j , line in enumerate ( in_handle ) :
if j == 0 :
if i == 0 :
out_handle . write ( line )
else :
out_handle . write ( line )
return out_file
|
def _get_tap_file_path ( self , test_case ) :
"""Get the TAP output file path for the test case ."""
|
sanitized_test_case = test_case . translate ( self . _sanitized_table )
tap_file = sanitized_test_case + ".tap"
if self . outdir :
return os . path . join ( self . outdir , tap_file )
return tap_file
|
def build_payload ( self ) :
'''Builds the payload that will be sent in tracker _ request'''
|
payload = { }
hashed_info = hashlib . sha1 ( tparser . bencode ( self . torrent_dict [ 'info' ] ) )
self . hash_string = hashed_info . digest ( )
self . peer_id = ( '-DR' + VERSION + '' . join ( random . sample ( ALPHANUM , 13 ) ) )
assert len ( self . peer_id ) == 20
payload [ 'info_hash' ] = self . hash_string
payload [ 'peer_id' ] = self . peer_id
payload [ 'port' ] = self . port
payload [ 'uploaded' ] = 0
payload [ 'downloaded' ] = 0
payload [ 'left' ] = self . length
payload [ 'compact' ] = 1
payload [ 'supportcrypto' ] = 1
payload [ 'event' ] = 'started'
return payload
|
def get_fields ( self , request , obj = None ) :
"""For subclasses of ` ` Orderable ` ` , the ` ` _ order ` ` field must
always be present and be the last field ."""
|
fields = super ( BaseDynamicInlineAdmin , self ) . get_fields ( request , obj )
if issubclass ( self . model , Orderable ) :
fields = list ( fields )
try :
fields . remove ( "_order" )
except ValueError :
pass
fields . append ( "_order" )
return fields
|
def create_app ( name , site , sourcepath , apppool = None ) :
'''Create an IIS application .
. . note :
This function only validates against the application name , and will return True
even if the application already exists with a different configuration . It will not
modify the configuration of an existing application .
: param str name : The IIS application .
: param str site : The IIS site name .
: param str sourcepath : The physical path .
: param str apppool : The name of the IIS application pool .
Example of usage with only the required arguments :
. . code - block : : yaml
site0 - v1 - app :
win _ iis . create _ app :
- name : v1
- site : site0
- sourcepath : C : \\ inetpub \\ site0 \\ v1
Example of usage specifying all available arguments :
. . code - block : : yaml
site0 - v1 - app :
win _ iis . create _ app :
- name : v1
- site : site0
- sourcepath : C : \\ inetpub \\ site0 \\ v1
- apppool : site0'''
|
ret = { 'name' : name , 'changes' : { } , 'comment' : str ( ) , 'result' : None }
current_apps = __salt__ [ 'win_iis.list_apps' ] ( site )
if name in current_apps :
ret [ 'comment' ] = 'Application already present: {0}' . format ( name )
ret [ 'result' ] = True
elif __opts__ [ 'test' ] :
ret [ 'comment' ] = 'Application will be created: {0}' . format ( name )
ret [ 'changes' ] = { 'old' : None , 'new' : name }
else :
ret [ 'comment' ] = 'Created application: {0}' . format ( name )
ret [ 'changes' ] = { 'old' : None , 'new' : name }
ret [ 'result' ] = __salt__ [ 'win_iis.create_app' ] ( name , site , sourcepath , apppool )
return ret
|
def filter_pages ( pages , pagenum , pagename ) :
"""Choices pages by pagenum and pagename"""
|
if pagenum :
try :
pages = [ list ( pages ) [ pagenum - 1 ] ]
except IndexError :
raise IndexError ( 'Invalid page number: %d' % pagenum )
if pagename :
pages = [ page for page in pages if page . name == pagename ]
if pages == [ ] :
raise IndexError ( 'Page not found: pagename=%s' % pagename )
return pages
|
def _children_ ( self ) :
"""get children objects
: rtype : a dict of children { child _ name : child _ object }"""
|
ret = { }
names = self . _field_names_
def down ( name , obj ) :
if isinstance ( obj , BaseObj ) :
if not isinstance ( obj , weakref . ProxyTypes ) :
ret [ name ] = obj
elif isinstance ( obj , list ) :
for i , v in zip ( range ( len ( obj ) ) , obj ) :
down ( jp_compose ( str ( i ) , name ) , v )
elif isinstance ( obj , dict ) :
for k , v in six . iteritems ( obj ) :
down ( jp_compose ( k , name ) , v )
for n in names :
down ( jp_compose ( n ) , getattr ( self , n ) )
return ret
|
def base36encode ( number ) :
"""Converts an integer into a base36 string ."""
|
ALPHABET = "0123456789abcdefghijklmnopqrstuvwxyz"
base36 = ''
sign = ''
if number < 0 :
sign = '-'
number = - number
if 0 <= number < len ( ALPHABET ) :
return sign + ALPHABET [ number ]
while number != 0 :
number , i = divmod ( number , len ( ALPHABET ) )
base36 = ALPHABET [ i ] + base36
return sign + base36
|
def remove_listener ( self , registration_id ) :
"""Removes the specified item listener . Returns silently if the specified listener was not added before .
: param registration _ id : ( str ) , id of the listener to be deleted .
: return : ( bool ) , ` ` true ` ` if the item listener is removed , ` ` false ` ` otherwise ."""
|
return self . _stop_listening ( registration_id , lambda i : queue_remove_listener_codec . encode_request ( self . name , i ) )
|
def get_comments_by_books ( self , book_ids ) :
"""Gets the list of ` ` Comments ` ` corresponding to a list of ` ` Books ` ` .
arg : book _ ids ( osid . id . IdList ) : list of book ` ` Ids ` `
return : ( osid . commenting . CommentList ) - list of comments
raise : NullArgument - ` ` book _ ids ` ` is ` ` null ` `
raise : OperationFailed - unable to complete request
raise : PermissionDenied - authorization failure
* compliance : mandatory - - This method must be implemented . *"""
|
# Implemented from template for
# osid . resource . ResourceBinSession . get _ resources _ by _ bins
comment_list = [ ]
for book_id in book_ids :
comment_list += list ( self . get_comments_by_book ( book_id ) )
return objects . CommentList ( comment_list )
|
def libvlc_media_player_set_chapter ( p_mi , i_chapter ) :
'''Set movie chapter ( if applicable ) .
@ param p _ mi : the Media Player .
@ param i _ chapter : chapter number to play .'''
|
f = _Cfunctions . get ( 'libvlc_media_player_set_chapter' , None ) or _Cfunction ( 'libvlc_media_player_set_chapter' , ( ( 1 , ) , ( 1 , ) , ) , None , None , MediaPlayer , ctypes . c_int )
return f ( p_mi , i_chapter )
|
def default_scan ( self , region = 'mainland' , expected_num = 20 , val_thr_num = 4 , queue_timeout = 3 , val_timeout = 5 , out_file = 'proxies.json' , src_files = None ) :
"""Default scan method , to simplify the usage of ` scan ` method .
It will register following scan functions :
1 . scan _ file
2 . scan _ cnproxy ( if region is mainland )
3 . scan _ free _ proxy _ list ( if region is overseas )
4 . scan _ ip84
5 . scan _ mimiip
After scanning , all the proxy info will be saved in out _ file .
Args :
region : Either ' mainland ' or ' overseas '
expected _ num : An integer indicating the expected number of proxies ,
if this argument is set too great , it may take long to
finish scanning process .
val _ thr _ num : Number of threads used for validating proxies .
queue _ timeout : An integer indicating the timeout for getting a
candidate proxy from the queue .
val _ timeout : An integer indicating the timeout when connecting the
test url using a candidate proxy .
out _ file : the file name of the output file saving all the proxy info
src _ files : A list of file names to scan"""
|
if expected_num > 30 :
self . logger . warn ( 'The more proxy you expect, the more time it ' 'will take. It is highly recommended to limit the' ' expected num under 30.' )
proxy_scanner = ProxyScanner ( )
if src_files is None :
src_files = [ ]
elif isinstance ( src_files , str ) :
src_files = [ src_files ]
for filename in src_files :
proxy_scanner . register_func ( proxy_scanner . scan_file , { 'src_file' : filename } )
if region == 'mainland' :
proxy_scanner . register_func ( proxy_scanner . scan_cnproxy , { } )
elif region == 'overseas' :
proxy_scanner . register_func ( proxy_scanner . scan_free_proxy_list , { } )
proxy_scanner . register_func ( proxy_scanner . scan_ip84 , { 'region' : region , 'page' : 5 } )
proxy_scanner . register_func ( proxy_scanner . scan_mimiip , { 'region' : region , 'page' : 5 } )
self . scan ( proxy_scanner , expected_num , val_thr_num , queue_timeout , val_timeout , out_file )
|
def _ReadPaddingDataTypeDefinition ( self , definitions_registry , definition_values , definition_name , is_member = False ) :
"""Reads a padding data type definition .
Args :
definitions _ registry ( DataTypeDefinitionsRegistry ) : data type definitions
registry .
definition _ values ( dict [ str , object ] ) : definition values .
definition _ name ( str ) : name of the definition .
is _ member ( Optional [ bool ] ) : True if the data type definition is a member
data type definition .
Returns :
PaddingtDefinition : padding definition .
Raises :
DefinitionReaderError : if the definitions values are missing or if
the format is incorrect ."""
|
if not is_member :
error_message = 'data type only supported as member'
raise errors . DefinitionReaderError ( definition_name , error_message )
definition_object = self . _ReadDataTypeDefinition ( definitions_registry , definition_values , data_types . PaddingDefinition , definition_name , self . _SUPPORTED_DEFINITION_VALUES_PADDING )
alignment_size = definition_values . get ( 'alignment_size' , None )
if not alignment_size :
error_message = 'missing alignment_size'
raise errors . DefinitionReaderError ( definition_name , error_message )
try :
int ( alignment_size )
except ValueError :
error_message = 'unuspported alignment size attribute: {0!s}' . format ( alignment_size )
raise errors . DefinitionReaderError ( definition_name , error_message )
if alignment_size not in ( 2 , 4 , 8 , 16 ) :
error_message = 'unuspported alignment size value: {0!s}' . format ( alignment_size )
raise errors . DefinitionReaderError ( definition_name , error_message )
definition_object . alignment_size = alignment_size
return definition_object
|
def iter_doc_objs ( self , ** kwargs ) :
"""Generator that iterates over all detected documents ( eg , nexson studies )
and returns the doc object ( deserialized from JSON ) for each doc .
Order is by shard , but arbitrary within shards .
@ TEMP not locked to prevent doc creation / deletion"""
|
for shard in self . _shards :
for doc_id , blob in shard . iter_doc_objs ( ** kwargs ) :
yield doc_id , blob
|
def ftp_url ( self ) : # type : ( ) - > Text
"""Get the FTP url this filesystem will open ."""
|
url = ( "ftp://{}" . format ( self . host ) if self . port == 21 else "ftp://{}:{}" . format ( self . host , self . port ) )
return url
|
def map ( self , data , layout ) :
"""Assign a data points to panels
Parameters
data : DataFrame
Data for a layer
layout : DataFrame
As returned by self . compute _ layout
Returns
data : DataFrame
Data with all points mapped to the panels
on which they will be plotted ."""
|
msg = "{} should implement this method."
raise NotImplementedError ( msg . format ( self . __class . __name__ ) )
|
def _get_connection ( self ) :
"""Make SSH connection to the IOS XE device .
The external ncclient library is used for creating this connection .
This method keeps state of any existing connections and reuses them if
already connected . Also interfaces ( except management ) are typically
disabled by default when it is booted . So if connecting for the first
time , driver will enable all other interfaces and keep that status in
the ` _ itfcs _ enabled ` flag ."""
|
try :
if self . _ncc_connection and self . _ncc_connection . connected :
return self . _ncc_connection
else : # ncclient needs ' name ' to be ' csr ' in order to communicate
# with the device in the correct way .
self . _ncc_connection = manager . connect ( host = self . _host_ip , port = self . _host_ssh_port , username = self . _username , password = self . _password , device_params = { 'name' : "csr" } , timeout = self . _timeout )
if not self . _itfcs_enabled :
self . _itfcs_enabled = self . _enable_itfcs ( self . _ncc_connection )
return self . _ncc_connection
except Exception as e :
conn_params = { 'host' : self . _host_ip , 'port' : self . _host_ssh_port , 'user' : self . _username , 'timeout' : self . _timeout , 'reason' : e . message }
raise cfg_exc . ConnectionException ( ** conn_params )
|
def set_element_tail ( parent_to_parse , element_path = None , element_tail = u'' ) :
"""Assigns the text following the parsed parent element and then returns it .
If element _ path is provided and doesn ' t exist , it is inserted with element _ tail .
: see : get _ element ( parent _ to _ parse , element _ path )"""
|
return _set_element_property ( parent_to_parse , element_path , _ELEM_TAIL , element_tail )
|
def forwarder ( frontend , backend ) :
"""Simple pub / sub forwarder
: param int frontend : fontend zeromq port
: param int backend : backend zeromq port"""
|
try :
context = zmq . Context ( )
front_sub = context . socket ( zmq . SUB )
front_sub . bind ( "tcp://*:%d" % frontend )
front_sub . setsockopt_string ( zmq . SUBSCRIBE , "" )
back_pub = context . socket ( zmq . PUB )
back_pub . bind ( "tcp://*:%d" % backend )
print ( "forwarder started, backend on port : %d\tfrontend on port: %d" % ( backend , frontend ) )
zmq . proxy ( front_sub , back_pub )
except Exception as e :
print ( e )
finally :
front_sub . close ( )
back_pub . close ( )
context . term ( )
|
def _fill_untouched ( idx , ret , fill_value ) :
"""any elements of ret not indexed by idx are set to fill _ value ."""
|
untouched = np . ones_like ( ret , dtype = bool )
untouched [ idx ] = False
ret [ untouched ] = fill_value
|
def get_groups ( self , load ) :
'''Read in a load and return the groups a user is a member of
by asking the appropriate provider'''
|
if 'eauth' not in load :
return False
fstr = '{0}.groups' . format ( load [ 'eauth' ] )
if fstr not in self . auth :
return False
fcall = salt . utils . args . format_call ( self . auth [ fstr ] , load , expected_extra_kws = AUTH_INTERNAL_KEYWORDS )
try :
return self . auth [ fstr ] ( * fcall [ 'args' ] , ** fcall [ 'kwargs' ] )
except IndexError :
return False
except Exception :
return None
|
def turb45 ( msg ) :
"""Turbulence .
Args :
msg ( String ) : 28 bytes hexadecimal message string
Returns :
int : Turbulence level . 0 = NIL , 1 = Light , 2 = Moderate , 3 = Severe"""
|
d = hex2bin ( data ( msg ) )
if d [ 0 ] == '0' :
return None
turb = bin2int ( d [ 1 : 3 ] )
return turb
|
def pack ( self , value = None ) :
"""Pack the value as a binary representation .
: attr : ` data ` is packed before the calling : meth : ` . GenericMessage . pack ` .
After that , : attr : ` data ` ' s value is restored .
Returns :
bytes : The binary representation .
Raises :
: exc : ` ~ . exceptions . PackException ` : If pack fails ."""
|
if value is None :
data_backup = None
if self . data is not None and not isinstance ( self . data , bytes ) :
data_backup = self . data
self . data = self . data . pack ( )
packed = super ( ) . pack ( )
if data_backup is not None :
self . data = data_backup
return packed
elif isinstance ( value , type ( self ) ) :
return value . pack ( )
else :
msg = "{} is not an instance of {}" . format ( value , type ( self ) . __name__ )
raise PackException ( msg )
|
def current_rev_reg_id ( base_dir : str , cd_id : str ) -> str :
"""Return the current revocation registry identifier for
input credential definition identifier , in input directory .
Raise AbsentTails if no corresponding tails file , signifying no such revocation registry defined .
: param base _ dir : base directory for tails files , thereafter split by cred def id
: param cd _ id : credential definition identifier of interest
: return : identifier for current revocation registry on input credential definition identifier"""
|
LOGGER . debug ( 'Tails.current_rev_reg_id >>> base_dir: %s, cd_id: %s' , base_dir , cd_id )
if not ok_cred_def_id ( cd_id ) :
LOGGER . debug ( 'Tails.current_rev_reg_id <!< Bad cred def id %s' , cd_id )
raise BadIdentifier ( 'Bad cred def id {}' . format ( cd_id ) )
tags = [ int ( rev_reg_id2tag ( basename ( f ) ) ) for f in Tails . links ( base_dir ) if cd_id in basename ( f ) ]
if not tags :
raise AbsentTails ( 'No tails files present for cred def id {}' . format ( cd_id ) )
rv = rev_reg_id ( cd_id , str ( max ( tags ) ) )
# ensure 10 > 9 , not ' 9 ' > ' 10'
LOGGER . debug ( 'Tails.current_rev_reg_id <<< %s' , rv )
return rv
|
def _load ( self , ) :
"""Loads the plugin
: raises : errors . PluginInitError"""
|
try :
self . init ( )
except Exception as e :
log . exception ( "Load failed!" )
raise errors . PluginInitError ( '%s' % e )
self . __status = self . __LOADED
|
def ConsultarCTG ( self , numero_carta_de_porte = None , numero_ctg = None , patente = None , cuit_solicitante = None , cuit_destino = None , fecha_emision_desde = None , fecha_emision_hasta = None ) :
"Operación que realiza consulta de CTGs según el criterio ingresado ."
|
ret = self . client . consultarCTG ( request = dict ( auth = { 'token' : self . Token , 'sign' : self . Sign , 'cuitRepresentado' : self . Cuit , } , consultarCTGDatos = dict ( cartaPorte = numero_carta_de_porte , ctg = numero_ctg , patente = patente , cuitSolicitante = cuit_solicitante , cuitDestino = cuit_destino , fechaEmisionDesde = fecha_emision_desde , fechaEmisionHasta = fecha_emision_hasta , ) ) ) [ 'response' ]
self . __analizar_errores ( ret )
datos = ret . get ( 'arrayDatosConsultarCTG' )
if datos :
self . DatosCTG = datos
self . LeerDatosCTG ( pop = False )
return True
else :
self . DatosCTG = [ ]
return ''
|
def dentategyrus ( adjusted = True ) :
"""Dentate Gyrus dataset from Hochgerner et al . ( 2018 ) .
Dentate gyrus is part of the hippocampus involved in learning , episodic memory formation and spatial coding .
It is measured using 10X Genomics Chromium and described in Hochgerner et al . ( 2018 ) .
The data consists of 25,919 genes across 3,396 cells and provides several interesting characteristics .
Returns
Returns ` adata ` object"""
|
if adjusted :
filename = 'data/DentateGyrus/10X43_1.h5ad'
url = 'https://github.com/theislab/scvelo_notebooks/raw/master/data/DentateGyrus/10X43_1.h5ad'
adata = read ( filename , backup_url = url , sparse = True , cache = True )
else :
filename = 'data/DentateGyrus/10X43_1.loom'
url = 'http://pklab.med.harvard.edu/velocyto/DG1/10X43_1.loom'
adata = read ( filename , backup_url = url , cleanup = True , sparse = True , cache = True )
cleanup ( adata , clean = 'all' , keep = { 'spliced' , 'unspliced' , 'ambiguous' } )
url_louvain = 'https://github.com/theislab/scvelo_notebooks/raw/master/data/DentateGyrus/DG_clusters.npy'
url_umap = 'https://github.com/theislab/scvelo_notebooks/raw/master/data/DentateGyrus/DG_umap.npy'
adata . obs [ 'clusters' ] = load ( './data/DentateGyrus/DG_clusters.npy' , url_louvain )
adata . obsm [ 'X_umap' ] = load ( './data/DentateGyrus/DG_umap.npy' , url_umap )
adata . obs [ 'clusters' ] = pd . Categorical ( adata . obs [ 'clusters' ] )
return adata
|
def send_request ( self , method , url , headers = None , json_data = None , retry = True ) :
"""Send requests to Skybell ."""
|
if not self . cache ( CONST . ACCESS_TOKEN ) and url != CONST . LOGIN_URL :
self . login ( )
if not headers :
headers = { }
if self . cache ( CONST . ACCESS_TOKEN ) :
headers [ 'Authorization' ] = 'Bearer ' + self . cache ( CONST . ACCESS_TOKEN )
headers [ 'user-agent' ] = ( 'SkyBell/3.4.1 (iPhone9,2; iOS 11.0; loc=en_US; lang=en-US) ' 'com.skybell.doorbell/1' )
headers [ 'content-type' ] = 'application/json'
headers [ 'accepts' ] = '*/*'
headers [ 'x-skybell-app-id' ] = self . cache ( CONST . APP_ID )
headers [ 'x-skybell-client-id' ] = self . cache ( CONST . CLIENT_ID )
_LOGGER . debug ( "HTTP %s %s Request with headers: %s" , method , url , headers )
try :
response = getattr ( self . _session , method ) ( url , headers = headers , json = json_data )
_LOGGER . debug ( "%s %s" , response , response . text )
if response and response . status_code < 400 :
return response
except RequestException as exc :
_LOGGER . warning ( "Skybell request exception: %s" , exc )
if retry :
self . login ( )
return self . send_request ( method , url , headers , json_data , False )
raise SkybellException ( ERROR . REQUEST , "Retry failed" )
|
def get ( self ) :
'''taobao . time . get 获取前台展示的店铺类目
获取淘宝系统当前时间'''
|
request = TOPRequest ( 'taobao.time.get' )
self . create ( self . execute ( request ) )
return self . time
|
def check_ns_run_members ( run ) :
"""Check nested sampling run member keys and values .
Parameters
run : dict
nested sampling run to check .
Raises
AssertionError
if run does not have expected properties ."""
|
run_keys = list ( run . keys ( ) )
# Mandatory keys
for key in [ 'logl' , 'nlive_array' , 'theta' , 'thread_labels' , 'thread_min_max' ] :
assert key in run_keys
run_keys . remove ( key )
# Optional keys
for key in [ 'output' ] :
try :
run_keys . remove ( key )
except ValueError :
pass
# Check for unexpected keys
assert not run_keys , 'Unexpected keys in ns_run: ' + str ( run_keys )
# Check type of mandatory members
for key in [ 'logl' , 'nlive_array' , 'theta' , 'thread_labels' , 'thread_min_max' ] :
assert isinstance ( run [ key ] , np . ndarray ) , ( key + ' is type ' + type ( run [ key ] ) . __name__ )
# check shapes of keys
assert run [ 'logl' ] . ndim == 1
assert run [ 'logl' ] . shape == run [ 'nlive_array' ] . shape
assert run [ 'logl' ] . shape == run [ 'thread_labels' ] . shape
assert run [ 'theta' ] . ndim == 2
assert run [ 'logl' ] . shape [ 0 ] == run [ 'theta' ] . shape [ 0 ]
|
def shouldReportBuild ( self , builder , buildnum ) :
"""Returns True if this build should be reported for this contact
( eliminating duplicates ) , and also records the report for later"""
|
for w , b , n in self . reported_builds :
if b == builder and n == buildnum :
return False
self . reported_builds . append ( [ util . now ( ) , builder , buildnum ] )
# clean the reported builds
horizon = util . now ( ) - 60
while self . reported_builds and self . reported_builds [ 0 ] [ 0 ] < horizon :
self . reported_builds . pop ( 0 )
# and return True , since this is a new one
return True
|
def lfsr_next_one_seed ( seed_iter , min_value_shift ) :
"""High - quality seeding for LFSR generators .
The LFSR generator components discard a certain number of their lower bits
when generating each output . The significant bits of their state must not
all be zero . We must ensure that when seeding the generator .
In case generators are seeded from an incrementing input ( such as a system
timer ) , and between increments only the lower bits may change , we would
also like the lower bits of the input to change the initial state , and not
just be discarded . So we do basic manipulation of the seed input value to
ensure that all bits of the seed input affect the initial state ."""
|
try :
seed = seed_iter . next ( )
except StopIteration :
return 0xFFFFFFFF
else :
if seed is None :
return 0xFFFFFFFF
else :
seed = int ( seed ) & 0xFFFFFFFF
working_seed = ( seed ^ ( seed << 16 ) ) & 0xFFFFFFFF
min_value = 1 << min_value_shift
if working_seed < min_value :
working_seed = ( seed << 24 ) & 0xFFFFFFFF
if working_seed < min_value :
working_seed ^= 0xFFFFFFFF
return working_seed
|
def fromwgs84 ( lat , lng , pkm = False ) :
"""Convert coordintes from WGS84 to TWD97
pkm true for Penghu , Kinmen and Matsu area
The latitude and longitude can be in the following formats :
[ + / - ] DDD ° MMM ' SSS . SSSS " ( unicode )
[ + / - ] DDD ° MMM . MMMM ' ( unicode )
[ + / - ] DDD . DDDDD ( string , unicode or float )
The returned coordinates are in meters"""
|
_lng0 = lng0pkm if pkm else lng0
lat = radians ( todegdec ( lat ) )
lng = radians ( todegdec ( lng ) )
t = sinh ( ( atanh ( sin ( lat ) ) - 2 * pow ( n , 0.5 ) / ( 1 + n ) * atanh ( 2 * pow ( n , 0.5 ) / ( 1 + n ) * sin ( lat ) ) ) )
epsilonp = atan ( t / cos ( lng - _lng0 ) )
etap = atan ( sin ( lng - _lng0 ) / pow ( 1 + t * t , 0.5 ) )
E = E0 + k0 * A * ( etap + alpha1 * cos ( 2 * 1 * epsilonp ) * sinh ( 2 * 1 * etap ) + alpha2 * cos ( 2 * 2 * epsilonp ) * sinh ( 2 * 2 * etap ) + alpha3 * cos ( 2 * 3 * epsilonp ) * sinh ( 2 * 3 * etap ) )
N = N0 + k0 * A * ( epsilonp + alpha1 * sin ( 2 * 1 * epsilonp ) * cosh ( 2 * 1 * etap ) + alpha2 * sin ( 2 * 2 * epsilonp ) * cosh ( 2 * 2 * etap ) + alpha3 * sin ( 2 * 3 * epsilonp ) * cosh ( 2 * 3 * etap ) )
return E * 1000 , N * 1000
|
def set_data ( self , * args ) :
"""we cant to call set _ data to manually update"""
|
db = self . begining . get_data ( ) or formats . DATE_DEFAULT
df = self . end . get_data ( ) or formats . DATE_DEFAULT
jours = max ( ( df - db ) . days + 1 , 0 )
self . setText ( str ( jours ) + ( jours >= 2 and " jours" or " jour" ) )
|
def move ( self , destination , remove_tombstone = True ) :
'''Method to move resource to another location .
Note : by default , this method removes the tombstone at the resource ' s original URI .
Can use optional flag remove _ tombstone to keep tombstone on successful move .
Note : other resource ' s triples that are managed by Fedora that point to this resource ,
* will * point to the new URI after the move .
Args :
destination ( rdflib . term . URIRef , str ) : URI location to move resource
remove _ tombstone ( bool ) : defaults to False , set to True to keep tombstone
Returns :
( Resource ) new , moved instance of resource'''
|
# set move headers
destination_uri = self . repo . parse_uri ( destination )
# http request
response = self . repo . api . http_request ( 'MOVE' , self . uri , data = None , headers = { 'Destination' : destination_uri . toPython ( ) } )
# handle response
if response . status_code == 201 : # set self exists
self . exists = False
# handle tombstone
if remove_tombstone :
tombstone_response = self . repo . api . http_request ( 'DELETE' , "%s/fcr:tombstone" % self . uri )
# udpdate uri , refresh , and return
self . uri = destination_uri
self . refresh ( )
return destination_uri
else :
raise Exception ( 'HTTP %s, could not move resource %s to %s' % ( response . status_code , self . uri , destination_uri ) )
|
def _merge ( self , key , value ) :
"""Internal merge logic implementation to allow merging of values when
setting attributes / items .
: param key : Attribute name or item key
: type key : str
: param value : Value to set attribute / item as .
: type value : object
: rtype : None"""
|
method = self . _merge_method ( key )
if method is not None : # strings are special , update methods like set . update looks for
# iterables
if method == "update" and is_str ( value ) :
value = [ value ]
if ( method == "append" and isinstance ( self [ key ] , list ) and isinstance ( value , list ) ) : # if rvalue is a list and given object is a list , we expect all
# values to be appended
method = "extend"
getattr ( self [ key ] , method ) ( value )
else :
super ( MergingDict , self ) . __setitem__ ( key , value )
|
def run ( self , cmd ) :
"""Runs the appropriate command"""
|
print datetime . datetime . now ( )
output = subprocess . Popen ( cmd , shell = True )
output = output . communicate ( ) [ 0 ]
print output
|
def parse ( self , data ) :
"""Args :
data ( str ) : Raw filter expression ."""
|
parsed_data = self . yacc . parse ( data , lexer = self . lexer . get_yacc_compat_lexer ( ) , debug = self . debug )
self . errors = self . lexer . errors + self . errors
return parsed_data , self . errors
|
def wait ( self ) :
"""Wait for the request to finish and return the result or error when finished
: returns : result or error
: type : result tyoe or Error"""
|
self . thread . join ( )
if self . error is not None :
return self . error
return self . result
|
def validate_index ( self , rdf_class ) :
"""Will compare the triplestore and elasticsearch index to ensure that
that elasticsearch and triplestore items match . elasticsearch records
that are not in the triplestore will be deleteed"""
|
es_ids = set ( self . get_es_ids ( ) )
tstore_ids = set ( [ item [ 1 ] for item in self . get_uri_list ( no_status = True ) ] )
diff = es_ids - tstore_ids
if diff :
pdb . set_trace ( )
action_list = self . es_worker . make_action_list ( diff , action_type = "delete" )
results = self . es_worker . bulk_save ( action_list )
|
def on ( self , * qubits : raw_types . Qid ) -> 'SingleQubitPauliStringGateOperation' :
"""Returns an application of this gate to the given qubits .
Args :
* qubits : The collection of qubits to potentially apply the gate to ."""
|
if len ( qubits ) != 1 :
raise ValueError ( 'Expected a single qubit, got <{!r}>.' . format ( qubits ) )
from cirq . ops . pauli_string import SingleQubitPauliStringGateOperation
return SingleQubitPauliStringGateOperation ( self , qubits [ 0 ] )
|
def get_models ( ) :
"""Finds all models , returning a list of model number and names
sorted increasing .
Returns : [ ( 13 , 000013 - modelname ) , ( 17 , 000017 - modelname ) , . . . etc ]"""
|
all_models = gfile . Glob ( os . path . join ( models_dir ( ) , '*.meta' ) )
model_filenames = [ os . path . basename ( m ) for m in all_models ]
model_numbers_names = sorted ( [ ( shipname . detect_model_num ( m ) , shipname . detect_model_name ( m ) ) for m in model_filenames ] )
return model_numbers_names
|
def LabelValueTable ( self , keys = None ) :
"""Return LabelValue with FSM derived keys ."""
|
keys = keys or self . superkey
# pylint : disable = E1002
return super ( CliTable , self ) . LabelValueTable ( keys )
|
def _prepare_paths ( self ) :
"""Set the value of { { base } } in paths if the base path is set in the
configuration .
: raises : ValueError"""
|
if config . BASE in self . paths :
for path in [ path for path in self . paths if path != config . BASE ] :
if config . BASE_VARIABLE in self . paths [ path ] :
self . paths [ path ] = self . paths [ path ] . replace ( config . BASE_VARIABLE , self . paths [ config . BASE ] )
LOGGER . debug ( 'Prepared paths: %r' , self . paths )
|
def mark_bl ( self ) -> int :
"""Mark unprocessed lines that have no content and no string nodes
covering them as blank line BL .
Returns :
Number of blank lines found with no stringy parent node ."""
|
counter = 0
stringy_lines = find_stringy_lines ( self . node , self . first_line_no )
for relative_line_number , line in enumerate ( self . lines ) :
if relative_line_number not in stringy_lines and line . strip ( ) == '' :
counter += 1
self . line_markers [ relative_line_number ] = LineType . blank_line
return counter
|
def page ( self , number , * args , ** kwargs ) :
"""Return a standard ` ` Page ` ` instance with custom , digg - specific
page ranges attached ."""
|
page = super ( ) . page ( number , * args , ** kwargs )
number = int ( number )
# we know this will work
# easier access
num_pages , body , tail , padding , margin = self . num_pages , self . body , self . tail , self . padding , self . margin
# put active page in middle of main range
main_range = list ( map ( int , [ math . floor ( number - body / 2.0 ) + 1 , # + 1 = shift odd body to right
math . floor ( number + body / 2.0 ) ] ) )
# adjust bounds
if main_range [ 0 ] < 1 :
main_range = list ( map ( abs ( main_range [ 0 ] - 1 ) . __add__ , main_range ) )
if main_range [ 1 ] > num_pages :
main_range = list ( map ( ( num_pages - main_range [ 1 ] ) . __add__ , main_range ) )
# Determine leading and trailing ranges ; if possible and appropriate ,
# combine them with the main range , in which case the resulting main
# block might end up considerable larger than requested . While we
# can ' t guarantee the exact size in those cases , we can at least try
# to come as close as possible : we can reduce the other boundary to
# max padding , instead of using half the body size , which would
# otherwise be the case . If the padding is large enough , this will
# of course have no effect .
# Example :
# total pages = 100 , page = 4 , body = 5 , ( default padding = 2)
# 1 2 3 [ 4 ] 5 6 . . . 99 100
# total pages = 100 , page = 4 , body = 5 , padding = 1
# 1 2 3 [ 4 ] 5 . . . 99 100
# If it were not for this adjustment , both cases would result in the
# first output , regardless of the padding value .
if main_range [ 0 ] <= tail + margin :
leading = [ ]
main_range = [ 1 , max ( body , min ( number + padding , main_range [ 1 ] ) ) ]
main_range [ 0 ] = 1
else :
leading = list ( range ( 1 , tail + 1 ) )
# basically same for trailing range , but not in ` ` left _ align ` ` mode
if self . align_left :
trailing = [ ]
else :
if main_range [ 1 ] >= num_pages - ( tail + margin ) + 1 :
trailing = [ ]
if not leading : # . . . but handle the special case of neither leading nor
# trailing ranges ; otherwise , we would now modify the
# main range low bound , which we just set in the previous
# section , again .
main_range = [ 1 , num_pages ]
else :
main_range = [ min ( num_pages - body + 1 , max ( number - padding , main_range [ 0 ] ) ) , num_pages ]
else :
trailing = list ( range ( num_pages - tail + 1 , num_pages + 1 ) )
# finally , normalize values that are out of bound ; this basically
# fixes all the things the above code screwed up in the simple case
# of few enough pages where one range would suffice .
main_range = [ max ( main_range [ 0 ] , 1 ) , min ( main_range [ 1 ] , num_pages ) ]
# make the result of our calculations available as custom ranges
# on the ` ` Page ` ` instance .
page . main_range = list ( range ( main_range [ 0 ] , main_range [ 1 ] + 1 ) )
page . leading_range = leading
page . trailing_range = trailing
page . page_range = reduce ( lambda x , y : x + ( ( x and y ) and [ False ] ) + y , [ page . leading_range , page . main_range , page . trailing_range ] )
page . __class__ = DiggPage
return page
|
def lstltd ( x , n , array ) :
"""Given a number x and an array of non - decreasing floats
find the index of the largest array element less than x .
http : / / naif . jpl . nasa . gov / pub / naif / toolkit _ docs / C / cspice / lstltd _ c . html
: param x : Value to search against
: type x : float
: param n : Number elements in array
: type n : int
: param array : Array of possible lower bounds
: type array : list
: return : index of the last element of array that is less than x .
: rtype : int"""
|
array = stypes . toDoubleVector ( array )
x = ctypes . c_double ( x )
n = ctypes . c_int ( n )
return libspice . lstltd_c ( x , n , array )
|
def action_import_folder ( location ) :
"""Try to import all files from a local folder"""
|
if os . path . isdir ( location ) :
onlyfiles = [ f for f in os . listdir ( location ) if os . path . isfile ( os . path . join ( location , f ) ) ]
for file in onlyfiles :
if not file . startswith ( "." ) :
filepath = os . path . join ( location , file )
# print ( Fore . RED + " \ n - - - - - \ n " + filepath + " \ n - - - - - " + Style . RESET _ ALL )
click . secho ( "\n---------\n" + filepath + "\n---------" , fg = 'red' )
return action_import ( filepath )
else :
printDebug ( "Not a valid directory" , "important" )
return None
|
def get_minimum_size ( self , data ) :
"""Finds the minimum size of the grid ."""
|
# Gat a list of elements with their sizes , so we don ' t have to
# recalculate that each time .
sized_elements = [ ( col , row , cols , rows , element , element . get_minimum_size ( data ) ) for col , row , cols , rows , element in self . elements ]
# Create the heights and widths for each cell .
self . col_widths = [ 0 ] * self . cols
self . row_heights = [ 0 ] * self . rows
self . _compile_dimension_size ( 0 , self . col_widths , 'x' , sized_elements )
self . _compile_dimension_size ( 1 , self . row_heights , 'y' , sized_elements )
# The final size is the total width and height
om = 2 * self . outside_margin
return datatypes . Point ( sum ( self . col_widths ) + ( self . cols - 1 ) * self . margin + om , sum ( self . row_heights ) + ( self . rows - 1 ) * self . margin + om )
|
def has_parent ( self , router ) :
'''Check if ` ` router ` ` is ` ` self ` ` or a parent or ` ` self ` `'''
|
parent = self
while parent and parent is not router :
parent = parent . _parent
return parent is not None
|
def default_char ( self ) :
"""An empty character with default foreground and background colors ."""
|
reverse = mo . DECSCNM in self . mode
return Char ( data = " " , fg = "default" , bg = "default" , reverse = reverse )
|
def weave ( target , aspects , ** options ) :
"""Send a message to a recipient
Args :
target ( string , class , instance , function or builtin ) :
The object to weave .
aspects ( : py : obj : ` aspectlib . Aspect ` , function decorator or list of ) :
The aspects to apply to the object .
subclasses ( bool ) :
If ` ` True ` ` , subclasses of target are weaved . * Only available for classes *
aliases ( bool ) :
If ` ` True ` ` , aliases of target are replaced .
lazy ( bool ) :
If ` ` True ` ` only target ' s ` ` _ _ init _ _ ` ` method is patched , the rest of the methods are patched after
` ` _ _ init _ _ ` ` is called . * Only available for classes * .
methods ( list or regex or string ) :
Methods from target to patch . * Only available for classes *
Returns :
aspectlib . Rollback : An object that can rollback the patches .
Raises :
TypeError : If target is a unacceptable object , or the specified options are not available for that type of
object .
. . versionchanged : : 0.4.0
Replaced ` only _ methods ` , ` skip _ methods ` , ` skip _ magicmethods ` options with ` methods ` .
Renamed ` on _ init ` option to ` lazy ` .
Added ` aliases ` option .
Replaced ` skip _ subclasses ` option with ` subclasses ` ."""
|
if not callable ( aspects ) :
if not hasattr ( aspects , '__iter__' ) :
raise ExpectedAdvice ( '%s must be an `Aspect` instance, a callable or an iterable of.' % aspects )
for obj in aspects :
if not callable ( obj ) :
raise ExpectedAdvice ( '%s must be an `Aspect` instance or a callable.' % obj )
assert target , "Can't weave falsy value %r." % target
logdebug ( "weave (target=%s, aspects=%s, **options=%s)" , target , aspects , options )
bag = options . setdefault ( 'bag' , ObjectBag ( ) )
if isinstance ( target , ( list , tuple ) ) :
return Rollback ( [ weave ( item , aspects , ** options ) for item in target ] )
elif isinstance ( target , basestring ) :
parts = target . split ( '.' )
for part in parts :
_check_name ( part )
if len ( parts ) == 1 :
return weave_module ( _import_module ( part ) , aspects , ** options )
for pos in reversed ( range ( 1 , len ( parts ) ) ) :
owner , name = '.' . join ( parts [ : pos ] ) , '.' . join ( parts [ pos : ] )
try :
owner = _import_module ( owner )
except ImportError :
continue
else :
break
else :
raise ImportError ( "Could not import %r. Last try was for %s" % ( target , owner ) )
if '.' in name :
path , name = name . rsplit ( '.' , 1 )
path = deque ( path . split ( '.' ) )
while path :
owner = getattr ( owner , path . popleft ( ) )
logdebug ( "@ patching %s from %s ..." , name , owner )
obj = getattr ( owner , name )
if isinstance ( obj , ( type , ClassType ) ) :
logdebug ( " .. as a class %r." , obj )
return weave_class ( obj , aspects , owner = owner , name = name , ** options )
elif callable ( obj ) : # or isinstance ( obj , FunctionType ) ? ?
logdebug ( " .. as a callable %r." , obj )
if bag . has ( obj ) :
return Nothing
return patch_module_function ( owner , obj , aspects , force_name = name , ** options )
else :
return weave ( obj , aspects , ** options )
name = getattr ( target , '__name__' , None )
if name and getattr ( __builtin__ , name , None ) is target :
if bag . has ( target ) :
return Nothing
return patch_module_function ( __builtin__ , target , aspects , ** options )
elif PY3 and ismethod ( target ) :
if bag . has ( target ) :
return Nothing
inst = target . __self__
name = target . __name__
logdebug ( "@ patching %r (%s) as instance method." , target , name )
func = target . __func__
setattr ( inst , name , _checked_apply ( aspects , func ) . __get__ ( inst , type ( inst ) ) )
return Rollback ( lambda : delattr ( inst , name ) )
elif PY3 and isfunction ( target ) :
if bag . has ( target ) :
return Nothing
owner = _import_module ( target . __module__ )
path = deque ( target . __qualname__ . split ( '.' ) [ : - 1 ] )
while path :
owner = getattr ( owner , path . popleft ( ) )
name = target . __name__
logdebug ( "@ patching %r (%s) as a property." , target , name )
func = owner . __dict__ [ name ]
return patch_module ( owner , name , _checked_apply ( aspects , func ) , func , ** options )
elif PY2 and isfunction ( target ) :
if bag . has ( target ) :
return Nothing
return patch_module_function ( _import_module ( target . __module__ ) , target , aspects , ** options )
elif PY2 and ismethod ( target ) :
if target . im_self :
if bag . has ( target ) :
return Nothing
inst = target . im_self
name = target . __name__
logdebug ( "@ patching %r (%s) as instance method." , target , name )
func = target . im_func
setattr ( inst , name , _checked_apply ( aspects , func ) . __get__ ( inst , type ( inst ) ) )
return Rollback ( lambda : delattr ( inst , name ) )
else :
klass = target . im_class
name = target . __name__
return weave ( klass , aspects , methods = '%s$' % name , ** options )
elif isclass ( target ) :
return weave_class ( target , aspects , ** options )
elif ismodule ( target ) :
return weave_module ( target , aspects , ** options )
elif type ( target ) . __module__ not in ( 'builtins' , '__builtin__' ) or InstanceType and isinstance ( target , InstanceType ) :
return weave_instance ( target , aspects , ** options )
else :
raise UnsupportedType ( "Can't weave object %s of type %s" % ( target , type ( target ) ) )
|
def draw_lnm_samples ( ** kwargs ) :
'''Draw samples for uniform - in - log model
Parameters
* * kwargs : string
Keyword arguments as model parameters and number of samples
Returns
array
The first mass
array
The second mass'''
|
# PDF doesnt match with sampler
nsamples = kwargs . get ( 'nsamples' , 1 )
min_mass = kwargs . get ( 'min_mass' , 5. )
max_mass = kwargs . get ( 'max_mass' , 95. )
max_mtotal = min_mass + max_mass
lnmmin = log ( min_mass )
lnmmax = log ( max_mass )
k = nsamples * int ( 1.5 + log ( 1 + 100. / nsamples ) )
aa = np . exp ( np . random . uniform ( lnmmin , lnmmax , k ) )
bb = np . exp ( np . random . uniform ( lnmmin , lnmmax , k ) )
idx = np . where ( aa + bb < max_mtotal )
m1 , m2 = ( np . maximum ( aa , bb ) ) [ idx ] , ( np . minimum ( aa , bb ) ) [ idx ]
return np . resize ( m1 , nsamples ) , np . resize ( m2 , nsamples )
|
def copy_resources ( src_container , src_resources , storage_dir , dst_directories = None , apply_chown = None , apply_chmod = None ) :
"""Copies files and directories from a Docker container . Multiple resources can be copied and additional options are
available than in : func : ` copy _ resource ` . Unlike in : func : ` copy _ resource ` , Resources are copied as they are and not
compressed to a tarball , and they are left on the remote machine .
: param src _ container : Container name or id .
: type src _ container : unicode
: param src _ resources : Resources , as ( file or directory ) names to copy .
: type src _ resources : iterable
: param storage _ dir : Remote directory to store the copied objects in .
: type storage _ dir : unicode
: param dst _ directories : Optional dictionary of destination directories , in the format ` ` resource : destination ` ` . If
not set , resources will be in the same relative structure to one another as inside the container . For setting a
common default , use ` ` * ` ` as the resource key .
: type dst _ directories : dict
: param apply _ chown : Owner to set for the copied resources . Can be a user name or id , group name or id , both in the
notation ` ` user : group ` ` , or as a tuple ` ` ( user , group ) ` ` .
: type apply _ chown : unicode or tuple
: param apply _ chmod : File system permissions to set for the copied resources . Can be any notation as accepted by
` chmod ` .
: type apply _ chmod : unicode"""
|
def _copy_resource ( resource ) :
default_dest_path = generic_path if generic_path is not None else resource
dest_path = directories . get ( resource , default_dest_path ) . strip ( posixpath . sep )
head , tail = posixpath . split ( dest_path )
rel_path = posixpath . join ( storage_dir , head )
run ( mkdir ( rel_path , check_if_exists = True ) )
run ( 'docker cp {0}:{1} {2}' . format ( src_container , resource , rel_path ) , shell = False )
directories = dst_directories or { }
generic_path = directories . get ( '*' )
for res in src_resources :
_copy_resource ( res )
if apply_chmod :
run ( chmod ( apply_chmod , storage_dir ) )
if apply_chown :
sudo ( chown ( apply_chown , storage_dir ) )
|
def get_repo_paths ( path ) :
"""Returns path ' s subdirectories which seems to be a repository ."""
|
repo_paths = [ ]
dirnames = ( os . path . abspath ( dirname ) for dirname in os . listdir ( path ) )
for dirname in dirnames :
try :
get_scm ( dirname )
repo_paths . append ( dirname )
except VCSError :
pass
return repo_paths
|
def p_def_label ( p ) :
"""line : ID EQU expr NEWLINE
| ID EQU pexpr NEWLINE"""
|
p [ 0 ] = None
__DEBUG__ ( "Declaring '%s%s' in %i" % ( NAMESPACE , p [ 1 ] , p . lineno ( 1 ) ) )
MEMORY . declare_label ( p [ 1 ] , p . lineno ( 1 ) , p [ 3 ] )
|
def list ( self , reservation_status = values . unset , limit = None , page_size = None ) :
"""Lists ReservationInstance records from the API as a list .
Unlike stream ( ) , this operation is eager and will load ` limit ` records into
memory before returning .
: param ReservationInstance . Status reservation _ status : Filter by a worker ' s reservation status
: param int limit : Upper limit for the number of records to return . list ( ) guarantees
never to return more than limit . Default is no limit
: param int page _ size : Number of records to fetch per request , when not set will use
the default value of 50 records . If no page _ size is defined
but a limit is defined , list ( ) will attempt to read the limit
with the most efficient page size , i . e . min ( limit , 1000)
: returns : Generator that will yield up to limit results
: rtype : list [ twilio . rest . taskrouter . v1 . workspace . worker . reservation . ReservationInstance ]"""
|
return list ( self . stream ( reservation_status = reservation_status , limit = limit , page_size = page_size , ) )
|
def unit ( u ) :
'''unit ( u ) yields the pimms - library unit object for the given unit object u ( which may be from a
separate pint . UnitRegistry instance ) .
unit ( uname ) yields the unit object for the given unit name uname .
unit ( None ) yields None .
unit ( q ) yields the unit of the given quantity q .'''
|
if u is None :
return None
elif is_unit ( u ) :
return getattr ( units , str ( u ) )
elif is_quantity ( u ) :
if isinstance ( u , tuple ) :
return getattr ( units , str ( u [ 1 ] ) )
else :
return getattr ( units , str ( u . u ) )
else :
raise ValueError ( 'unrecotnized unit argument' )
|
def invert_inventory ( inventory ) :
"""Return { item : binding } from { binding : item }
Protect against items with additional metadata
and items whose type is a number
Returns :
Dictionary of inverted inventory"""
|
inverted = dict ( )
for binding , items in inventory . iteritems ( ) :
for item in items :
if isinstance ( item , dict ) :
item = item . keys ( ) [ 0 ]
item = str ( item )
# Key may be number
if item in inverted :
echo ( "Warning: Duplicate item found, " "for \"%s: %s\"" % ( binding , item ) )
continue
inverted [ item ] = binding
return inverted
|
def load_sub_plugins_from_str ( cls , plugins_str ) :
"""Load plugin classes based on column separated list of plugin names .
Returns dict with plugin name as key and class as value ."""
|
plugin_classes = { }
if plugins_str :
for plugin_name in plugins_str . split ( ":" ) :
pc = load_plugin ( plugin_name , MONITOR_DEFAULT_PLUGIN_MODULE )
plugin_classes [ plugin_name ] = pc
return plugin_classes
|
def prompt_for_trilateral_choice ( self , prompt , option1 , option2 , option3 ) :
"""Prompt the user for a response that must be one of the three supplied choices .
NOTE : The user input verification is case - insensitive , but will return the original case provided
by the given options ."""
|
if prompt is None :
prompt = ''
prompt = prompt . rstrip ( ) + ' (' + option1 + '/' + option2 + '/' + option3 + ')'
while True :
user_input = self . __screen . input ( prompt )
if str ( user_input ) . lower ( ) == option1 . lower ( ) :
return option1
elif str ( user_input ) . lower ( ) == option2 . lower ( ) :
return option2
elif str ( user_input ) . lower ( ) == option3 . lower ( ) :
return option3
|
def _set_af_ipv4_uc_and_vrf_cmds_call_point_holder ( self , v , load = False ) :
"""Setter method for af _ ipv4 _ uc _ and _ vrf _ cmds _ call _ point _ holder , mapped from YANG variable / rbridge _ id / router / router _ bgp / address _ family / ipv4 / ipv4 _ unicast / default _ vrf / af _ ipv4 _ uc _ and _ vrf _ cmds _ call _ point _ holder ( container )
If this variable is read - only ( config : false ) in the
source YANG file , then _ set _ af _ ipv4 _ uc _ and _ vrf _ cmds _ call _ point _ holder is considered as a private
method . Backends looking to populate this variable should
do so via calling thisObj . _ set _ af _ ipv4 _ uc _ and _ vrf _ cmds _ call _ point _ holder ( ) directly ."""
|
if hasattr ( v , "_utype" ) :
v = v . _utype ( v )
try :
t = YANGDynClass ( v , base = af_ipv4_uc_and_vrf_cmds_call_point_holder . af_ipv4_uc_and_vrf_cmds_call_point_holder , is_container = 'container' , presence = False , yang_name = "af-ipv4-uc-and-vrf-cmds-call-point-holder" , rest_name = "" , parent = self , path_helper = self . _path_helper , extmethods = self . _extmethods , register_paths = True , extensions = { u'tailf-common' : { u'cli-drop-node-name' : None , u'callpoint' : u'AfIpv4Ucast' } } , namespace = 'urn:brocade.com:mgmt:brocade-bgp' , defining_module = 'brocade-bgp' , yang_type = 'container' , is_config = True )
except ( TypeError , ValueError ) :
raise ValueError ( { 'error-string' : """af_ipv4_uc_and_vrf_cmds_call_point_holder must be of a type compatible with container""" , 'defined-type' : "container" , 'generated-type' : """YANGDynClass(base=af_ipv4_uc_and_vrf_cmds_call_point_holder.af_ipv4_uc_and_vrf_cmds_call_point_holder, is_container='container', presence=False, yang_name="af-ipv4-uc-and-vrf-cmds-call-point-holder", rest_name="", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, extensions={u'tailf-common': {u'cli-drop-node-name': None, u'callpoint': u'AfIpv4Ucast'}}, namespace='urn:brocade.com:mgmt:brocade-bgp', defining_module='brocade-bgp', yang_type='container', is_config=True)""" , } )
self . __af_ipv4_uc_and_vrf_cmds_call_point_holder = t
if hasattr ( self , '_set' ) :
self . _set ( )
|
def p_jointype ( self , t ) :
"""jointype : kw _ join
| kw _ inner kw _ join
| outerjoin kw _ outer kw _ join
| outerjoin kw _ join"""
|
if len ( t ) <= 2 or t [ 1 ] == 'inner' :
t [ 0 ] = JoinTypeX ( None , False , None )
else :
t [ 0 ] = JoinTypeX ( t [ 1 ] , True , None )
|
async def getiter ( self , url : str , url_vars : Dict [ str , str ] = { } , * , accept : str = sansio . accept_format ( ) , jwt : Opt [ str ] = None , oauth_token : Opt [ str ] = None ) -> AsyncGenerator [ Any , None ] :
"""Return an async iterable for all the items at a specified endpoint ."""
|
data , more = await self . _make_request ( "GET" , url , url_vars , b"" , accept , jwt = jwt , oauth_token = oauth_token )
if isinstance ( data , dict ) and "items" in data :
data = data [ "items" ]
for item in data :
yield item
if more : # ` yield from ` is not supported in coroutines .
async for item in self . getiter ( more , url_vars , accept = accept , jwt = jwt , oauth_token = oauth_token ) :
yield item
|
def _html ( title : str , field_names : List [ str ] ) -> str :
"""Returns bare bones HTML for serving up an input form with the
specified fields that can render predictions from the configured model ."""
|
inputs = '' . join ( _SINGLE_INPUT_TEMPLATE . substitute ( field_name = field_name ) for field_name in field_names )
quoted_field_names = [ f"'{field_name}'" for field_name in field_names ]
quoted_field_list = f"[{','.join(quoted_field_names)}]"
return _PAGE_TEMPLATE . substitute ( title = title , css = _CSS , inputs = inputs , qfl = quoted_field_list )
|
def lstltc ( string , n , lenvals , array ) :
"""Given a character string and an ordered array of character
strings , find the index of the largest array element less than
the given string .
http : / / naif . jpl . nasa . gov / pub / naif / toolkit _ docs / C / cspice / lstltc _ c . html
: param string : Upper bound value to search against .
: type string : int
: param n : Number elements in array .
: type n : int
: param lenvals : String length .
: type lenvals : int
: param array : Array of possible lower bounds
: type array : list
: return :
index of the last element of array that
is lexically less than string .
: rtype : int"""
|
string = stypes . stringToCharP ( string )
array = stypes . listToCharArrayPtr ( array , xLen = lenvals , yLen = n )
n = ctypes . c_int ( n )
lenvals = ctypes . c_int ( lenvals )
return libspice . lstltc_c ( string , n , lenvals , array )
|
def convert_time_units ( t ) :
"""Convert time in seconds into reasonable time units ."""
|
if t == 0 :
return '0 s'
order = log10 ( t )
if - 9 < order < - 6 :
time_units = 'ns'
factor = 1000000000
elif - 6 <= order < - 3 :
time_units = 'us'
factor = 1000000
elif - 3 <= order < - 1 :
time_units = 'ms'
factor = 1000.
elif - 1 <= order :
time_units = 's'
factor = 1
return "{:.3f} {}" . format ( factor * t , time_units )
|
def to_edit ( self , postid ) :
'''Try to edit the Post .'''
|
if self . userinfo . role [ 0 ] > '0' :
pass
else :
return False
kwd = { }
self . render ( 'man_info/wiki_man_edit.html' , userinfo = self . userinfo , postinfo = MWiki . get_by_uid ( postid ) , kwd = kwd )
|
def process_mavlink_packet ( self , m ) :
'''handle an incoming mavlink packet'''
|
mtype = m . get_type ( )
# if you add processing for an mtype here , remember to add it
# to mavlink _ packet , above
if mtype in [ 'WAYPOINT_COUNT' , 'MISSION_COUNT' ] :
if ( self . num_wps_expected == 0 ) : # I haven ' t asked for WPs , or these messages are duplicates
# of msgs I ' ve already received .
self . console . error ( "No waypoint load started (from Editor)." )
# I only clear the mission in the Editor if this was a read event
elif ( self . num_wps_expected == - 1 ) :
self . gui_event_queue . put ( MissionEditorEvent ( me_event . MEGE_CLEAR_MISS_TABLE ) )
self . num_wps_expected = m . count
self . wps_received = { }
if ( m . count > 0 ) :
self . gui_event_queue . put ( MissionEditorEvent ( me_event . MEGE_ADD_MISS_TABLE_ROWS , num_rows = m . count - 1 ) )
# write has been sent by the mission editor :
elif ( self . num_wps_expected > 1 ) :
if ( m . count != self . num_wps_expected ) :
self . console . error ( "Unepxected waypoint count from APM after write (Editor)" )
# since this is a write operation from the Editor there
# should be no need to update number of table rows
elif mtype in [ 'WAYPOINT' , 'MISSION_ITEM' ] : # still expecting wps ?
if ( len ( self . wps_received ) < self . num_wps_expected ) : # if we haven ' t already received this wp , write it to the GUI :
if ( m . seq not in self . wps_received . keys ( ) ) :
self . gui_event_queue . put ( MissionEditorEvent ( me_event . MEGE_SET_MISS_ITEM , num = m . seq , command = m . command , param1 = m . param1 , param2 = m . param2 , param3 = m . param3 , param4 = m . param4 , lat = m . x , lon = m . y , alt = m . z , frame = m . frame ) )
self . wps_received [ m . seq ] = True
|
def windowed_run_events_1d ( arr , window ) :
"""Return the number of runs of a minimum length .
Parameters
arr : bool array
Input array
window : int
Minimum run length .
Returns
out : func
Number of distinct runs of a minimum length ."""
|
v , rl , pos = rle_1d ( arr )
return ( v * rl >= window ) . sum ( )
|
def tail ( self , since , filter_pattern , limit = 10000 , keep_open = True , colorize = True , http = False , non_http = False , force_colorize = False ) :
"""Tail this function ' s logs .
if keep _ open , do so repeatedly , printing any new logs"""
|
try :
since_stamp = string_to_timestamp ( since )
last_since = since_stamp
while True :
new_logs = self . zappa . fetch_logs ( self . lambda_name , start_time = since_stamp , limit = limit , filter_pattern = filter_pattern , )
new_logs = [ e for e in new_logs if e [ 'timestamp' ] > last_since ]
self . print_logs ( new_logs , colorize , http , non_http , force_colorize )
if not keep_open :
break
if new_logs :
last_since = new_logs [ - 1 ] [ 'timestamp' ]
time . sleep ( 1 )
except KeyboardInterrupt : # pragma : no cover
# Die gracefully
try :
sys . exit ( 0 )
except SystemExit :
os . _exit ( 130 )
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.