signature stringlengths 29 44.1k | implementation stringlengths 0 85.2k |
|---|---|
def update ( custom_sources = False ) :
"""Update function to be called from cli . py""" | if not shutil . which ( 'git' ) :
print ( 'Git executable not found in $PATH.' )
sys . exit ( 1 )
if not custom_sources :
print ( 'Creating sources.yaml…' )
write_sources_file ( )
print ( 'Cloning sources…' )
sources_file = rel_to_cwd ( 'sources.yaml' )
jobs = yaml_to_job_list ( sources_file , rel_to_cwd ( 'sources' ) )
git_clone_job_list ( jobs )
print ( 'Cloning templates…' )
jobs = yaml_to_job_list ( rel_to_cwd ( 'sources' , 'templates' , 'list.yaml' ) , rel_to_cwd ( 'templates' ) )
print ( 'Cloning schemes…' )
jobs . extend ( yaml_to_job_list ( rel_to_cwd ( 'sources' , 'schemes' , 'list.yaml' ) , rel_to_cwd ( 'schemes' ) ) )
git_clone_job_list ( jobs )
print ( 'Completed updating repositories.' ) |
def extract_minors_from_setup_py ( filename_setup_py ) :
'''Extract supported python minor versions from setup . py and return them
as a list of str .
Return example :
[ ' 2.6 ' , ' 2.7 ' , ' 3.3 ' , ' 3.4 ' , ' 3.5 ' , ' 3.6 ' ]''' | # eg : minors _ str = ' 2.6 \ n2.7 \ n3.3 \ n3.4 \ n3.5 \ n3.6'
minors_str = fabric . api . local ( flo ( 'grep --perl-regexp --only-matching ' '"(?<=Programming Language :: Python :: )\\d+\\.\\d+" ' '{filename_setup_py}' ) , capture = True )
# eg : minors = [ ' 2.6 ' , ' 2.7 ' , ' 3.3 ' , ' 3.4 ' , ' 3.5 ' , ' 3.6 ' ]
minors = minors_str . split ( )
return minors |
def write ( self , symbol , data , metadata = None , prune_previous_version = True , ** kwargs ) :
"""Write ' data ' under the specified ' symbol ' name to this library .
Parameters
symbol : ` str `
symbol name for the item
data :
to be persisted
metadata : ` dict `
an optional dictionary of metadata to persist along with the symbol .
Default : None
prune _ previous _ version : ` bool `
Removes previous ( non - snapshotted ) versions from the database .
Default : True
kwargs :
passed through to the write handler
Returns
VersionedItem named tuple containing the metadata and version number
of the written symbol in the store .""" | self . _arctic_lib . check_quota ( )
version = { '_id' : bson . ObjectId ( ) }
version [ 'arctic_version' ] = ARCTIC_VERSION_NUMERICAL
version [ 'symbol' ] = symbol
version [ 'version' ] = self . _version_nums . find_one_and_update ( { 'symbol' : symbol } , { '$inc' : { 'version' : 1 } } , upsert = True , new = True ) [ 'version' ]
version [ 'metadata' ] = metadata
previous_version = self . _versions . find_one ( { 'symbol' : symbol , 'version' : { '$lt' : version [ 'version' ] } } , sort = [ ( 'version' , pymongo . DESCENDING ) ] )
handler = self . _write_handler ( version , symbol , data , ** kwargs )
handler . write ( self . _arctic_lib , version , symbol , data , previous_version , ** kwargs )
if prune_previous_version and previous_version :
self . _prune_previous_versions ( symbol , new_version_shas = version . get ( FW_POINTERS_REFS_KEY ) )
# Insert the new version into the version DB
self . _insert_version ( version )
logger . debug ( 'Finished writing versions for %s' , symbol )
return VersionedItem ( symbol = symbol , library = self . _arctic_lib . get_name ( ) , version = version [ 'version' ] , metadata = version . pop ( 'metadata' , None ) , data = None , host = self . _arctic_lib . arctic . mongo_host ) |
def to_csv ( df , filepath , sep = ',' , header = True , index = True ) :
"""Save DataFrame as csv .
Note data is expected to be evaluated .
Currently delegates to Pandas .
Parameters
df : DataFrame
filepath : str
sep : str , optional
Separator used between values .
header : bool , optional
Whether to save the header .
index : bool , optional
Whether to save the index columns .
Returns
None
See Also
pandas . DataFrame . to _ csv : https : / / pandas . pydata . org / pandas - docs / stable / generated / pandas . DataFrame . to _ csv . html""" | df . to_pandas ( ) . to_csv ( filepath , sep = sep , header = header , index = index ) |
def get_cloudflare_records ( self , * , account ) :
"""Return a ` list ` of ` dict ` s containing the zones and their records , obtained from the CloudFlare API
Returns :
account ( : obj : ` CloudFlareAccount ` ) : A CloudFlare Account object
: obj : ` list ` of ` dict `""" | zones = [ ]
for zobj in self . __cloudflare_list_zones ( account = account ) :
try :
self . log . debug ( 'Processing DNS zone CloudFlare/{}' . format ( zobj [ 'name' ] ) )
zone = { 'zone_id' : get_resource_id ( 'cfz' , zobj [ 'name' ] ) , 'name' : zobj [ 'name' ] , 'source' : 'CloudFlare' , 'comment' : None , 'tags' : { } , 'records' : [ ] }
for record in self . __cloudflare_list_zone_records ( account = account , zoneID = zobj [ 'id' ] ) :
zone [ 'records' ] . append ( { 'id' : get_resource_id ( 'cfr' , zobj [ 'id' ] , [ '{}={}' . format ( k , v ) for k , v in record . items ( ) ] ) , 'zone_id' : zone [ 'zone_id' ] , 'name' : record [ 'name' ] , 'value' : record [ 'value' ] , 'type' : record [ 'type' ] } )
if len ( zone [ 'records' ] ) > 0 :
zones . append ( zone )
except CloudFlareError :
self . log . exception ( 'Failed getting records for CloudFlare zone {}' . format ( zobj [ 'name' ] ) )
return zones |
def application_state ( context , application ) :
"""Render current state of application , verbose .""" | new_context = { 'roles' : context [ 'roles' ] , 'org_name' : context [ 'org_name' ] , 'application' : application , }
nodelist = template . loader . get_template ( 'kgapplications/%s_common_state.html' % application . type )
output = nodelist . render ( new_context )
return output |
def predict_proba ( self , X ) :
"""Predict class probabilities for X .
The predicted class probabilities of an input sample is computed as
the mean predicted class probabilities of the base estimators in the
ensemble . If base estimators do not implement a ` ` predict _ proba ` `
method , then it resorts to voting and the predicted class probabilities
of a an input sample represents the proportion of estimators predicting
each class .
Parameters
X : { array - like , sparse matrix } of shape = [ n _ samples , n _ features ]
The training input samples . Sparse matrices are accepted only if
they are supported by the base estimator .
Returns
p : array of shape = [ n _ samples , n _ classes ]
The class probabilities of the input samples . The order of the
classes corresponds to that in the attribute ` classes _ ` .""" | # Check data
# X = check _ array ( X , accept _ sparse = [ ' csr ' , ' csc ' , ' coo ' ] ) # Dont in version 0.15
if self . n_features_ != X . shape [ 1 ] :
raise ValueError ( "Number of features of the model must " "match the input. Model n_features is {0} and " "input n_features is {1}." "" . format ( self . n_features_ , X . shape [ 1 ] ) )
# Parallel loop
n_jobs , n_estimators , starts = _partition_estimators ( self . n_estimators , self . n_jobs )
all_proba = Parallel ( n_jobs = n_jobs , verbose = self . verbose ) ( delayed ( _parallel_predict_proba ) ( self . estimators_ [ starts [ i ] : starts [ i + 1 ] ] , self . estimators_features_ [ starts [ i ] : starts [ i + 1 ] ] , X , self . n_classes_ , self . combination , self . estimators_weight_ [ starts [ i ] : starts [ i + 1 ] ] ) for i in range ( n_jobs ) )
# Reduce
if self . combination in [ 'majority_voting' , 'majority_bmr' ] :
proba = sum ( all_proba ) / self . n_estimators
elif self . combination in [ 'weighted_voting' , 'weighted_bmr' ] :
proba = sum ( all_proba )
elif self . combination in [ 'stacking' , 'stacking_proba' , 'stacking_bmr' , 'stacking_proba_bmr' ] :
X_stacking = _create_stacking_set ( self . estimators_ , self . estimators_features_ , self . estimators_weight_ , X , self . combination )
proba = self . f_staking . predict_proba ( X_stacking )
return proba |
def _get_api_urls ( self , api_urls = None ) :
"""Completes a dict with the CRUD urls of the API .
: param api _ urls : A dict with the urls { ' < FUNCTION > ' : ' < URL > ' , . . . }
: return : A dict with the CRUD urls of the base API .""" | view_name = self . __class__ . __name__
api_urls = api_urls or { }
api_urls [ "read" ] = url_for ( view_name + ".api_read" )
api_urls [ "delete" ] = url_for ( view_name + ".api_delete" , pk = "" )
api_urls [ "create" ] = url_for ( view_name + ".api_create" )
api_urls [ "update" ] = url_for ( view_name + ".api_update" , pk = "" )
return api_urls |
def cli ( env , columns , sortby , volume_id ) :
"""List ACLs .""" | block_manager = SoftLayer . BlockStorageManager ( env . client )
access_list = block_manager . get_block_volume_access_list ( volume_id = volume_id )
table = formatting . Table ( columns . columns )
table . sortby = sortby
for key , type_name in [ ( 'allowedVirtualGuests' , 'VIRTUAL' ) , ( 'allowedHardware' , 'HARDWARE' ) , ( 'allowedSubnets' , 'SUBNET' ) , ( 'allowedIpAddresses' , 'IP' ) ] :
for obj in access_list . get ( key , [ ] ) :
obj [ 'type' ] = type_name
table . add_row ( [ value or formatting . blank ( ) for value in columns . row ( obj ) ] )
env . fout ( table ) |
def reset ( self ) :
"""Process everything all over again .""" | self . indexCount = 0
indexDir = self . store . newDirectory ( self . indexDirectory )
if indexDir . exists ( ) :
indexDir . remove ( )
for src in self . getSources ( ) :
src . removeReliableListener ( self )
src . addReliableListener ( self , style = iaxiom . REMOTE ) |
def as_proj4 ( self ) :
"""Return the PROJ . 4 string which corresponds to the CRS .
For example : :
> > > print ( get ( 21781 ) . as _ proj4 ( ) )
+ proj = somerc + lat _ 0 = 46.95240555556 + lon _ 0 = 7.4395833333 + k _ 0 = 1 + x _ 0 = 600000 + y _ 0 = 200000 + ellps = bessel + towgs84 = 674.4,15.1,405.3,0,0,0,0 + units = m + no _ defs""" | url = '{prefix}{code}.proj4?download' . format ( prefix = EPSG_IO_URL , code = self . id )
return requests . get ( url ) . text . strip ( ) |
def ListFiles ( self , ext_attrs = None ) :
"""A generator of all keys and values .""" | del ext_attrs
# Unused .
if not self . IsDirectory ( ) :
return
if self . hive is None :
for name in dir ( winreg ) :
if name . startswith ( "HKEY_" ) :
response = rdf_client_fs . StatEntry ( st_mode = stat . S_IFDIR )
response_pathspec = self . pathspec . Copy ( )
response_pathspec . last . path = utils . JoinPath ( response_pathspec . last . path , name )
response . pathspec = response_pathspec
yield response
return
try :
with OpenKey ( self . hive , self . local_path ) as key :
( self . number_of_keys , self . number_of_values , self . last_modified ) = QueryInfoKey ( key )
# First keys - These will look like directories .
for i in range ( self . number_of_keys ) :
try :
name = EnumKey ( key , i )
key_name = utils . JoinPath ( self . local_path , name )
try : # Store the default value in the stat response for values .
with OpenKey ( self . hive , key_name ) as subkey :
value , value_type = QueryValueEx ( subkey , "" )
except OSError :
value , value_type = None , None
response = self . _Stat ( name , value , value_type )
# Keys look like Directories in the VFS .
response . st_mode = stat . S_IFDIR
yield response
except OSError :
pass
# Now Values - These will look like files .
for i in range ( self . number_of_values ) :
try :
name , value , value_type = EnumValue ( key , i )
response = self . _Stat ( name , value , value_type )
# Values look like files in the VFS .
response . st_mode = stat . S_IFREG
yield response
except OSError :
pass
except OSError as e :
raise IOError ( "Unable to list key %s: %s" % ( self . key_name , e ) ) |
def _normalize_overlap ( overlap , window , nfft , samp , method = 'welch' ) :
"""Normalise an overlap in physical units to a number of samples
Parameters
overlap : ` float ` , ` Quantity ` , ` None `
the overlap in some physical unit ( seconds )
window : ` str `
the name of the window function that will be used , only used
if ` overlap = None ` is given
nfft : ` int `
the number of samples that will be used in the fast Fourier
transform
samp : ` Quantity `
the sampling rate ( Hz ) of the data that will be transformed
method : ` str `
the name of the averaging method , default : ` ' welch ' ` , only
used to return ` 0 ` for ` ' bartlett ' ` averaging
Returns
noverlap : ` int `
the number of samples to be be used for the overlap""" | if method == 'bartlett' :
return 0
if overlap is None and isinstance ( window , string_types ) :
return recommended_overlap ( window , nfft )
if overlap is None :
return 0
return seconds_to_samples ( overlap , samp ) |
def lnprob ( self , theta ) :
"""Logarithm of the probability""" | global niter
params , priors , loglike = self . params , self . priors , self . loglike
# Avoid extra likelihood calls with bad priors
_lnprior = self . lnprior ( theta )
if np . isfinite ( _lnprior ) :
_lnlike = self . lnlike ( theta )
else :
_lnprior = - np . inf
_lnlike = - np . inf
_lnprob = _lnprior + _lnlike
if ( niter % 100 == 0 ) :
msg = "%i function calls ...\n" % niter
msg += ', ' . join ( '%s: %.3f' % ( k , v ) for k , v in zip ( params , theta ) )
msg += '\nlog(like): %.3f, log(prior): %.3f' % ( _lnprior , _lnlike )
logger . debug ( msg )
niter += 1
return _lnprob |
def read_port ( self , port ) :
"""Read the pin state of a whole port ( 8 pins )
: Example :
> > > expander = MCP23017I2C ( gw )
> > > # Read pin A0 - A7 as a int ( A0 and A1 are high )
> > > expander . read _ port ( ' A ' )
> > > # Read pin B0 - B7 as a int ( B2 is high )
> > > expander . read _ port ( ' B ' )
: param port : use ' A ' to read port A and ' B ' for port b
: return : An int where every bit represents the input level .""" | if port == 'A' :
raw = self . i2c_read_register ( 0x12 , 1 )
elif port == 'B' :
raw = self . i2c_read_register ( 0x13 , 1 )
return struct . unpack ( '>B' , raw ) [ 0 ] |
def extract_args_from_signature ( operation , excluded_params = None ) :
"""Extracts basic argument data from an operation ' s signature and docstring
excluded _ params : List of params to ignore and not extract . By default we ignore [ ' self ' , ' kwargs ' ] .""" | args = [ ]
try : # only supported in python3 - falling back to argspec if not available
sig = inspect . signature ( operation )
args = sig . parameters
except AttributeError :
sig = inspect . getargspec ( operation )
# pylint : disable = deprecated - method , useless - suppression
args = sig . args
arg_docstring_help = option_descriptions ( operation )
excluded_params = excluded_params or [ 'self' , 'kwargs' ]
for arg_name in [ a for a in args if a not in excluded_params ] :
try : # this works in python3
default = args [ arg_name ] . default
required = default == inspect . Parameter . empty
# pylint : disable = no - member , useless - suppression
except TypeError :
arg_defaults = ( dict ( zip ( sig . args [ - len ( sig . defaults ) : ] , sig . defaults ) ) if sig . defaults else { } )
default = arg_defaults . get ( arg_name )
required = arg_name not in arg_defaults
action = 'store_' + str ( not default ) . lower ( ) if isinstance ( default , bool ) else None
try :
default = ( default if default != inspect . _empty # pylint : disable = protected - access
else None )
except AttributeError :
pass
options_list = [ '--' + arg_name . replace ( '_' , '-' ) ]
help_str = arg_docstring_help . get ( arg_name )
yield ( arg_name , CLICommandArgument ( arg_name , options_list = options_list , required = required , default = default , help = help_str , action = action ) ) |
def get_chain_mutations ( self , pdb_id_1 , chain_1 , pdb_id_2 , chain_2 ) :
'''Returns a list of tuples each containing a SEQRES Mutation object and an ATOM Mutation object representing the
mutations from pdb _ id _ 1 , chain _ 1 to pdb _ id _ 2 , chain _ 2.
SequenceMaps are constructed in this function between the chains based on the alignment .
PDBMutationPair are returned as they are hashable and amenable to Set construction to eliminate duplicates .''' | # Set up the objects
p1 = self . add_pdb ( pdb_id_1 )
p2 = self . add_pdb ( pdb_id_2 )
sifts_1 , pdb_1 = p1 [ 'sifts' ] , p1 [ 'pdb' ]
sifts_2 , pdb_2 = p2 [ 'sifts' ] , p2 [ 'pdb' ]
# Set up the sequences
# pprint . pprint ( sifts _ 1 . seqres _ to _ atom _ sequence _ maps )
seqres_to_atom_sequence_maps_1 = sifts_1 . seqres_to_atom_sequence_maps . get ( chain_1 , { } )
# this is not guaranteed to exist e . g . 2ZNW chain A
seqres_1 , atom_1 = pdb_1 . seqres_sequences . get ( chain_1 ) , pdb_1 . atom_sequences . get ( chain_1 )
seqres_2 , atom_2 = pdb_2 . seqres_sequences . get ( chain_2 ) , pdb_2 . atom_sequences . get ( chain_2 )
if not seqres_1 :
raise Exception ( 'No SEQRES sequence for chain {0} of {1}.' . format ( chain_1 , pdb_1 ) )
if not atom_1 :
raise Exception ( 'No ATOM sequence for chain {0} of {1}.' . format ( chain_1 , pdb_1 ) )
if not seqres_2 :
raise Exception ( 'No SEQRES sequence for chain {0} of {1}.' . format ( chain_2 , pdb_2 ) )
if not atom_2 :
raise Exception ( 'No ATOM sequence for chain {0} of {1}.' . format ( chain_2 , pdb_2 ) )
seqres_str_1 = str ( seqres_1 )
seqres_str_2 = str ( seqres_2 )
# Align the SEQRES sequences
sa = SequenceAligner ( )
sa . add_sequence ( '{0}_{1}' . format ( pdb_id_1 , chain_1 ) , seqres_str_1 )
sa . add_sequence ( '{0}_{1}' . format ( pdb_id_2 , chain_2 ) , seqres_str_2 )
sa . align ( )
seqres_residue_mapping , seqres_match_mapping = sa . get_residue_mapping ( )
# colortext . pcyan ( sa . alignment _ output )
# Create a SequenceMap
seqres_sequence_map = SequenceMap ( )
assert ( sorted ( seqres_residue_mapping . keys ( ) ) == sorted ( seqres_match_mapping . keys ( ) ) )
for k , v in seqres_residue_mapping . iteritems ( ) :
seqres_sequence_map . add ( k , v , seqres_match_mapping [ k ] )
self . seqres_sequence_maps [ ( pdb_id_1 , chain_1 ) ] [ ( pdb_id_2 , chain_2 ) ] = seqres_sequence_map
# Determine the mutations between the SEQRES sequences and use these to generate a list of ATOM mutations
mutations = [ ]
clustal_symbols = SubstitutionScore . clustal_symbols
# print ( pdb _ id _ 1 , chain _ 1 , pdb _ id _ 2 , chain _ 2)
# print ( seqres _ to _ atom _ sequence _ maps _ 1)
for seqres_res_id , v in seqres_match_mapping . iteritems ( ) : # Look at all positions which differ . seqres _ res _ id is 1 - indexed , following the SEQRES and UniProt convention . However , so our our Sequence objects .
if clustal_symbols [ v . clustal ] != '*' : # Get the wildtype Residue objects
seqres_wt_residue = seqres_1 [ seqres_res_id ]
# print ( seqres _ wt _ residue )
seqres_mutant_residue = seqres_2 [ seqres_residue_mapping [ seqres_res_id ] ]
# todo : this will probably fail for some cases where there is no corresponding mapping
# If there is an associated ATOM record for the wildtype residue , get its residue ID
atom_res_id = None
atom_chain_res_id = seqres_to_atom_sequence_maps_1 . get ( seqres_res_id )
try :
if atom_chain_res_id :
assert ( atom_chain_res_id [ 0 ] == chain_1 )
atom_residue = atom_1 [ atom_chain_res_id ]
atom_res_id = atom_chain_res_id [ 1 : ]
assert ( atom_residue . ResidueAA == seqres_wt_residue . ResidueAA )
assert ( atom_residue . ResidueID == atom_res_id )
except :
atom_res_id = None
if seqres_wt_residue . ResidueAA != 'X' : # we do not seem to keep ATOM records for unknown / non - canonicals : see 2BTF chain A - > 2PBD chain A
raise
# Create two mutations - one for the SEQRES residue and one for the corresponding ( if any ) ATOM residue
# We create both so that the user is informed whether there is a mutation between the structures which is
# not captured by the coordinates .
# If there are no ATOM coordinates , there is no point creating an ATOM mutation object so we instead use
# the None type . This also fits with the approach in the SpiderWeb framework .
seqres_mutation = ChainMutation ( seqres_wt_residue . ResidueAA , seqres_res_id , seqres_mutant_residue . ResidueAA , Chain = chain_1 )
atom_mutation = None
if atom_res_id :
atom_mutation = ChainMutation ( seqres_wt_residue . ResidueAA , atom_res_id , seqres_mutant_residue . ResidueAA , Chain = chain_1 )
mutations . append ( PDBMutationPair ( seqres_mutation , atom_mutation ) )
return mutations |
def Write ( self , packet ) :
"""See base class .""" | report_id = 0
out_report_buffer = ( ctypes . c_uint8 * self . internal_max_out_report_len ) ( )
out_report_buffer [ : ] = packet [ : ]
result = iokit . IOHIDDeviceSetReport ( self . device_handle , K_IO_HID_REPORT_TYPE_OUTPUT , report_id , out_report_buffer , self . internal_max_out_report_len )
# Non - zero status indicates failure
if result != K_IO_RETURN_SUCCESS :
raise errors . OsHidError ( 'Failed to write report to device' ) |
def socks_endpoint ( self , reactor , port = None ) :
"""Returns a TorSocksEndpoint configured to use an already - configured
SOCKSPort from the Tor we ' re connected to . By default , this
will be the very first SOCKSPort .
: param port : a str , the first part of the SOCKSPort line ( that
is , a port like " 9151 " or a Unix socket config like
" unix : / path " . You may also specify a port as an int .
If you need to use a particular port that may or may not
already be configured , see the async method
: meth : ` txtorcon . TorConfig . create _ socks _ endpoint `""" | if len ( self . SocksPort ) == 0 :
raise RuntimeError ( "No SOCKS ports configured" )
socks_config = None
if port is None :
socks_config = self . SocksPort [ 0 ]
else :
port = str ( port )
# in case e . g . an int passed in
if ' ' in port :
raise ValueError ( "Can't specify options; use create_socks_endpoint instead" )
for idx , port_config in enumerate ( self . SocksPort ) : # " SOCKSPort " is a gnarly beast that can have a bunch
# of options appended , so we have to split off the
# first thing which * should * be the port ( or can be a
# string like ' unix : ' )
if port_config . split ( ) [ 0 ] == port :
socks_config = port_config
break
if socks_config is None :
raise RuntimeError ( "No SOCKSPort configured for port {}" . format ( port ) )
return _endpoint_from_socksport_line ( reactor , socks_config ) |
def config ( env = DEFAULT_ENV , default = 'locmem://' ) :
"""Returns configured CACHES dictionary from CACHE _ URL""" | config = { }
s = os . environ . get ( env , default )
if s :
config = parse ( s )
return config |
def verify_tls ( self , socket , hostname , context ) :
"""Verify a TLS connection . Return behaviour is dependent on the as _ callback parameter :
- If True , a return value of None means verification succeeded , else it failed .
- If False , a return value of True means verification succeeded , an exception or False means it failed .""" | cert = socket . getpeercert ( )
try : # Make sure the hostnames for which this certificate is valid include the one we ' re connecting to .
ssl . match_hostname ( cert , hostname )
except ssl . CertificateError :
return ssl . ALERT_DESCRIPTION_BAD_CERTIFICATE
return None |
def is_valid_for ( self , entry_point , protocol ) :
"""Check if the current function can be executed from a request to the given entry point
and with the given protocol""" | return self . available_for_entry_point ( entry_point ) and self . available_for_protocol ( protocol ) |
def rotate_eggs ( self ) :
"""moves newest . egg to last _ stable . egg
this is used by the upload ( ) function upon 2XX return
returns ( bool ) : if eggs rotated successfully
raises ( IOError ) : if it cant copy the egg from newest to last _ stable""" | # make sure the library directory exists
if os . path . isdir ( constants . insights_core_lib_dir ) : # make sure the newest . egg exists
if os . path . isfile ( constants . insights_core_newest ) : # try copying newest to latest _ stable
try : # copy the core
shutil . move ( constants . insights_core_newest , constants . insights_core_last_stable )
# copy the core sig
shutil . move ( constants . insights_core_gpg_sig_newest , constants . insights_core_last_stable_gpg_sig )
except IOError :
message = ( "There was a problem copying %s to %s." % ( constants . insights_core_newest , constants . insights_core_last_stable ) )
logger . debug ( message )
raise IOError ( message )
return True
else :
message = ( "Cannot copy %s to %s because %s does not exist." % ( constants . insights_core_newest , constants . insights_core_last_stable , constants . insights_core_newest ) )
logger . debug ( message )
return False
else :
logger . debug ( "Cannot copy %s to %s because the %s directory does not exist." % ( constants . insights_core_newest , constants . insights_core_last_stable , constants . insights_core_lib_dir ) )
logger . debug ( "Try installing the Core first." )
return False |
def record_event ( self , event : Event ) -> None :
"""Record the event async .""" | from polyaxon . celery_api import celery_app
from polyaxon . settings import EventsCeleryTasks
if not event . ref_id :
event . ref_id = self . get_ref_id ( )
serialized_event = event . serialize ( dumps = False , include_actor_name = True , include_instance_info = True )
celery_app . send_task ( EventsCeleryTasks . EVENTS_TRACK , kwargs = { 'event' : serialized_event } )
celery_app . send_task ( EventsCeleryTasks . EVENTS_LOG , kwargs = { 'event' : serialized_event } )
celery_app . send_task ( EventsCeleryTasks . EVENTS_NOTIFY , kwargs = { 'event' : serialized_event } )
# We include the instance in the serialized event for executor
serialized_event [ 'instance' ] = event . instance
self . executor . record ( event_type = event . event_type , event_data = serialized_event ) |
def _amp3d_to_2d ( self , amp , sigma_x , sigma_y ) :
"""converts 3d density into 2d density parameter
: param amp :
: param sigma _ x :
: param sigma _ y :
: return :""" | return amp * np . sqrt ( np . pi ) * np . sqrt ( sigma_x * sigma_y * 2 ) |
def calculateOptionPrice ( self , contract : Contract , volatility : float , underPrice : float , optPrcOptions = None ) -> OptionComputation :
"""Calculate the option price given the volatility .
This method is blocking .
https : / / interactivebrokers . github . io / tws - api / option _ computations . html
Args :
contract : Option contract .
volatility : Option volatility to use in calculation .
underPrice : Price of the underlier to use in calculation
implVolOptions : Unknown""" | return self . _run ( self . calculateOptionPriceAsync ( contract , volatility , underPrice , optPrcOptions ) ) |
def setInstrumentParameters ( self , instrpars ) :
"""This method overrides the superclass to set default values into
the parameter dictionary , in case empty entries are provided .""" | pri_header = self . _image [ 0 ] . header
self . proc_unit = instrpars [ 'proc_unit' ]
instrpars [ 'gnkeyword' ] = 'ATODGAIN'
# hard - code for WFPC2 data
instrpars [ 'rnkeyword' ] = None
if self . _isNotValid ( instrpars [ 'exptime' ] , instrpars [ 'expkeyword' ] ) :
instrpars [ 'expkeyword' ] = 'EXPTIME'
for chip in self . returnAllChips ( extname = self . scienceExt ) :
chip . _headergain = self . getInstrParameter ( instrpars [ 'gain' ] , pri_header , instrpars [ 'gnkeyword' ] )
chip . _exptime = self . getInstrParameter ( instrpars [ 'exptime' ] , pri_header , instrpars [ 'expkeyword' ] )
# We need to treat Read Noise as a special case since it is
# not populated in the WFPC2 primary header
if instrpars [ 'rnkeyword' ] is None :
chip . _rdnoise = None
else :
chip . _rdnoise = self . getInstrParameter ( instrpars [ 'rdnoise' ] , pri_header , instrpars [ 'rnkeyword' ] )
if chip . _headergain is None or chip . _exptime is None :
print ( 'ERROR: invalid instrument task parameter' )
raise ValueError
# We need to determine if the user has used the default readnoise / gain value
# since if not , they will need to supply a gain / readnoise value as well
usingDefaultGain = instrpars [ 'gnkeyword' ] == 'ATODGAIN'
usingDefaultReadnoise = instrpars [ 'rnkeyword' ] in [ None , 'None' ]
# If the user has specified either the readnoise or the gain , we need to make sure
# that they have actually specified both values . In the default case , the readnoise
# of the system depends on what the gain
if usingDefaultReadnoise and usingDefaultGain :
self . _setchippars ( )
elif usingDefaultReadnoise and not usingDefaultGain :
raise ValueError ( "ERROR: You need to supply readnoise information\n when not using the default gain for WFPC2." )
elif not usingDefaultReadnoise and usingDefaultGain :
raise ValueError ( "ERROR: You need to supply gain information when\n not using the default readnoise for WFPC2." )
else : # In this case , the user has specified both a gain and readnoise values . Just use them as is .
for chip in self . returnAllChips ( extname = self . scienceExt ) :
chip . _gain = chip . _headergain
print ( "Using user defined values for gain and readnoise" )
# Convert the science data to electrons
self . doUnitConversions ( ) |
def process_spawn_qty ( self , name ) :
"""Return the number of processes to spawn for the given consumer name .
: param str name : The consumer name
: rtype : int""" | return self . consumers [ name ] . qty - self . process_count ( name ) |
def _initialize ( self , runtime ) :
"""Common initializer for OsidManager and OsidProxyManager""" | if runtime is None :
raise NullArgument ( )
if self . _my_runtime is not None :
raise IllegalState ( 'this manager has already been initialized.' )
self . _my_runtime = runtime
config = runtime . get_configuration ( )
cf_public_key_param_id = Id ( 'parameter:cloudFrontPublicKey@aws_adapter' )
cf_private_key_param_id = Id ( 'parameter:cloudFrontPrivateKey@aws_adapter' )
cf_keypair_id_param_id = Id ( 'parameter:cloudFrontSigningKeypairId@aws_adapter' )
cf_private_key_file_param_id = Id ( 'parameter:cloudFrontSigningPrivateKeyFile@aws_adapter' )
cf_distro_param_id = Id ( 'parameter:cloudFrontDistro@aws_adapter' )
cf_distro_id_param_id = Id ( 'parameter:cloudFrontDistroId@aws_adapter' )
s3_public_key_param_id = Id ( 'parameter:S3PublicKey@aws_adapter' )
s3_private_key_param_id = Id ( 'parameter:S3PrivateKey@aws_adapter' )
s3_bucket_param_id = Id ( 'parameter:S3Bucket@aws_adapter' )
cf_public_key = config . get_value_by_parameter ( cf_public_key_param_id ) . get_string_value ( )
cf_private_key = config . get_value_by_parameter ( cf_private_key_param_id ) . get_string_value ( )
cf_keypair_id = config . get_value_by_parameter ( cf_keypair_id_param_id ) . get_string_value ( )
cf_private_key_file = config . get_value_by_parameter ( cf_private_key_file_param_id ) . get_string_value ( )
cf_distro = config . get_value_by_parameter ( cf_distro_param_id ) . get_string_value ( )
cf_distro_id = config . get_value_by_parameter ( cf_distro_id_param_id ) . get_string_value ( )
s3_public_key = config . get_value_by_parameter ( s3_public_key_param_id ) . get_string_value ( )
s3_private_key = config . get_value_by_parameter ( s3_private_key_param_id ) . get_string_value ( )
s3_bucket = config . get_value_by_parameter ( s3_bucket_param_id ) . get_string_value ( )
self . _config_map [ 'cloudfront_public_key' ] = cf_public_key
self . _config_map [ 'cloudfront_private_key' ] = cf_private_key
self . _config_map [ 'cloudfront_keypair_id' ] = cf_keypair_id
self . _config_map [ 'cloudfront_private_key_file' ] = cf_private_key_file
self . _config_map [ 'cloudfront_distro' ] = cf_distro
self . _config_map [ 'cloudfront_distro_id' ] = cf_distro_id
self . _config_map [ 'put_public_key' ] = s3_public_key
self . _config_map [ 'put_private_key' ] = s3_private_key
self . _config_map [ 's3_bucket' ] = s3_bucket |
def __pre_check ( self , requestedUrl ) :
'''Allow the pre - emptive fetching of sites with a full browser if they ' re known
to be dick hosters .''' | components = urllib . parse . urlsplit ( requestedUrl )
netloc_l = components . netloc . lower ( )
if netloc_l in Domain_Constants . SUCURI_GARBAGE_SITE_NETLOCS :
self . __check_suc_cookie ( components )
elif netloc_l in Domain_Constants . CF_GARBAGE_SITE_NETLOCS :
self . __check_cf_cookie ( components )
elif components . path == '/sucuri_shit_2' :
self . __check_suc_cookie ( components )
elif components . path == '/sucuri_shit_3' :
self . __check_suc_cookie ( components )
elif components . path == '/cloudflare_under_attack_shit_2' :
self . __check_cf_cookie ( components )
elif components . path == '/cloudflare_under_attack_shit_3' :
self . __check_cf_cookie ( components ) |
def _normalize_files ( item , fc_dir = None ) :
"""Ensure the files argument is a list of absolute file names .
Handles BAM , single and paired end fastq , as well as split inputs .""" | files = item . get ( "files" )
if files :
if isinstance ( files , six . string_types ) :
files = [ files ]
fastq_dir = flowcell . get_fastq_dir ( fc_dir ) if fc_dir else os . getcwd ( )
files = [ _file_to_abs ( x , [ os . getcwd ( ) , fc_dir , fastq_dir ] ) for x in files ]
files = [ x for x in files if x ]
_sanity_check_files ( item , files )
item [ "files" ] = files
return item |
def configure_client ( cls , address : Union [ str , Tuple [ str , int ] , Path ] = 'localhost' , port : int = 6379 , db : int = 0 , password : str = None , ssl : Union [ bool , str , SSLContext ] = False , ** client_args ) -> Dict [ str , Any ] :
"""Configure a Redis client .
: param address : IP address , host name or path to a UNIX socket
: param port : port number to connect to ( ignored for UNIX sockets )
: param db : database number to connect to
: param password : password used if the server requires authentication
: param ssl : one of the following :
* ` ` False ` ` to disable SSL
* ` ` True ` ` to enable SSL using the default context
* an : class : ` ~ ssl . SSLContext ` instance
* a ` ` module : varname ` ` reference to an : class : ` ~ ssl . SSLContext ` instance
* name of an : class : ` ~ ssl . SSLContext ` resource
: param client _ args : extra keyword arguments passed to : func : ` ~ aioredis . create _ redis _ pool `""" | assert check_argument_types ( )
if isinstance ( address , str ) and not address . startswith ( '/' ) :
address = ( address , port )
elif isinstance ( address , Path ) :
address = str ( address )
client_args . update ( { 'address' : address , 'db' : db , 'password' : password , 'ssl' : resolve_reference ( ssl ) } )
return client_args |
def slug ( self ) :
"""It returns node ' s slug""" | if self . is_root_node ( ) :
return ""
if self . slugable and self . parent . parent :
if not self . page . regex or ( self . page . regex and not self . page . show_regex ) or self . is_leaf_node ( ) :
return u"{0}/{1}" . format ( self . parent . slug , self . page . slug )
elif self . page . regex and self . value_regex and self . page . show_regex :
return u'{0}/{1}/{2}' . format ( self . parent . slug , self . page . slug , self . value_regex )
elif not self . hide_in_url :
return u'{0}/{1}' . format ( self . parent . slug , self . name )
elif self . slugable :
if not self . page . regex or ( self . page . regex and not self . page . show_regex ) or self . is_leaf_node ( ) :
return u"{0}" . format ( self . page . slug )
elif self . page . regex and self . value_regex and self . page . show_regex :
return u'{0}/{1}' . format ( self . page . slug , self . value_regex )
elif not self . hide_in_url :
return u'{0}' . format ( self . name )
return "" |
def to_triples ( self , short_pred = True , properties = True ) :
"""Encode the Eds as triples suitable for PENMAN serialization .""" | node_triples , edge_triples = [ ] , [ ]
# sort nodeids just so top var is first
nodes = sorted ( self . nodes ( ) , key = lambda n : n . nodeid != self . top )
for node in nodes :
nid = node . nodeid
pred = node . pred . short_form ( ) if short_pred else node . pred . string
node_triples . append ( ( nid , 'predicate' , pred ) )
if node . lnk :
node_triples . append ( ( nid , 'lnk' , '"{}"' . format ( str ( node . lnk ) ) ) )
if node . carg :
node_triples . append ( ( nid , 'carg' , '"{}"' . format ( node . carg ) ) )
if properties :
if node . cvarsort is not None :
node_triples . append ( ( nid , 'type' , node . cvarsort ) )
props = node . properties
node_triples . extend ( ( nid , p , v ) for p , v in props . items ( ) )
edge_triples . extend ( ( nid , rargname , tgt ) for rargname , tgt in sorted ( self . edges ( nid ) . items ( ) , key = lambda x : rargname_sortkey ( x [ 0 ] ) ) )
return node_triples + edge_triples |
def calc_spectrum ( signal , rate ) :
"""Return the spectrum and frequency indexes for real - valued input signal""" | npts = len ( signal )
padto = 1 << ( npts - 1 ) . bit_length ( )
# print ' length of signal { } , pad to { } ' . format ( npts , padto )
npts = padto
sp = np . fft . rfft ( signal , n = padto ) / npts
# print ( ' sp len ' , len ( sp ) )
freq = np . arange ( ( npts / 2 ) + 1 ) / ( npts / rate )
# print ( ' freq len ' , len ( freq ) )
return freq , abs ( sp ) |
def first_prediction ( self , singular_value ) :
"""get the null space term ( first term ) contribution to prediction error variance
at a singular value . used to construct error variance dataframe
Parameters
singular _ value : int
singular value to calc first term at
Returns
dict : dict
dictionary of ( " first " , prediction _ names ) , error variance pairs at singular _ value""" | if not self . predictions :
raise Exception ( "ErrVar.first(): no predictions are set" )
if singular_value > self . jco . ncol :
zero_preds = { }
for pred in self . predictions_iter :
zero_preds [ ( "first" , pred . col_names [ 0 ] ) ] = 0.0
return zero_preds
self . log ( "calc first term parameter @" + str ( singular_value ) )
first_term = self . I_minus_R ( singular_value ) . T * self . parcov * self . I_minus_R ( singular_value )
if self . predictions :
results = { }
for prediction in self . predictions_iter :
results [ ( "first" , prediction . col_names [ 0 ] ) ] = float ( ( prediction . T * first_term * prediction ) . x )
self . log ( "calc first term parameter @" + str ( singular_value ) )
return results |
def get_service_methods ( iface ) :
"""Get a list of methods defined in the interface for a Thrift service .
: param iface :
The Thrift - generated Iface class defining the interface for the
service .
: returns :
A set containing names of the methods defined for the service .""" | methods = inspect . getmembers ( iface , predicate = inspect . ismethod )
return set ( name for ( name , method ) in methods if not name . startswith ( '__' ) ) |
def removeSessionWithKey ( self , key ) :
"""Remove a persistent session , if it exists .
@ type key : L { bytes }
@ param key : The persistent session identifier .""" | self . store . query ( PersistentSession , PersistentSession . sessionKey == key ) . deleteFromStore ( ) |
def create_key_file ( service , key ) :
"""Create a file containing key .""" | keyfile = _keyfile_path ( service )
if os . path . exists ( keyfile ) :
log ( 'Keyfile exists at %s.' % keyfile , level = WARNING )
return
with open ( keyfile , 'w' ) as fd :
fd . write ( key )
log ( 'Created new keyfile at %s.' % keyfile , level = INFO ) |
def _blas_is_applicable ( * args ) :
"""Whether BLAS routines can be applied or not .
BLAS routines are available for single and double precision
float or complex data only . If the arrays are non - contiguous ,
BLAS methods are usually slower , and array - writing routines do
not work at all . Hence , only contiguous arrays are allowed .
Parameters
x1 , . . . , xN : ` NumpyTensor `
The tensors to be tested for BLAS conformity .
Returns
blas _ is _ applicable : bool
` ` True ` ` if all mentioned requirements are met , ` ` False ` ` otherwise .""" | if any ( x . dtype != args [ 0 ] . dtype for x in args [ 1 : ] ) :
return False
elif any ( x . dtype not in _BLAS_DTYPES for x in args ) :
return False
elif not ( all ( x . flags . f_contiguous for x in args ) or all ( x . flags . c_contiguous for x in args ) ) :
return False
elif any ( x . size > np . iinfo ( 'int32' ) . max for x in args ) : # Temporary fix for 32 bit int overflow in BLAS
# TODO : use chunking instead
return False
else :
return True |
def table_data_client ( self ) :
"""Getter for the gRPC stub used for the Table Admin API .
For example :
. . literalinclude : : snippets . py
: start - after : [ START bigtable _ table _ data _ client ]
: end - before : [ END bigtable _ table _ data _ client ]
: rtype : : class : ` . bigtable _ v2 . BigtableClient `
: returns : A BigtableClient object .""" | if self . _table_data_client is None :
self . _table_data_client = _create_gapic_client ( bigtable_v2 . BigtableClient ) ( self )
return self . _table_data_client |
def timeout_process_add_queue ( self , module , cache_time ) :
"""Add a module to the timeout _ queue if it is scheduled in the future or
if it is due for an update immediately just trigger that .
the timeout _ queue is a dict with the scheduled time as the key and the
value is a list of module instance names due to be updated at that
point . An ordered list of keys is kept to allow easy checking of when
updates are due . A list is also kept of which modules are in the
update _ queue to save having to search for modules in it unless needed .""" | # If already set to update do nothing
if module in self . timeout_update_due :
return
# remove if already in the queue
key = self . timeout_queue_lookup . get ( module )
if key :
queue_item = self . timeout_queue [ key ]
queue_item . remove ( module )
if not queue_item :
del self . timeout_queue [ key ]
self . timeout_keys . remove ( key )
if cache_time == 0 : # if cache _ time is 0 we can just trigger the module update
self . timeout_update_due . append ( module )
self . timeout_queue_lookup [ module ] = None
else : # add the module to the timeout queue
if cache_time not in self . timeout_keys :
self . timeout_queue [ cache_time ] = set ( [ module ] )
self . timeout_keys . append ( cache_time )
# sort keys so earliest is first
self . timeout_keys . sort ( )
# when is next timeout due ?
try :
self . timeout_due = self . timeout_keys [ 0 ]
except IndexError :
self . timeout_due = None
else :
self . timeout_queue [ cache_time ] . add ( module )
# note that the module is in the timeout _ queue
self . timeout_queue_lookup [ module ] = cache_time |
def _initialize ( self , ** resource_attributes ) :
"""Initialize the collection .
: param resource _ attributes : API resource parameters""" | super ( APIResourceCollection , self ) . _initialize ( ** resource_attributes )
dict_list = self . data
self . data = [ ]
for resource in dict_list :
self . data . append ( self . _expected_api_resource ( ** resource ) ) |
def _parse_flowcontrol_send ( self , config ) :
"""Scans the config block and returns the flowcontrol send value
Args :
config ( str ) : The interface config block to scan
Returns :
dict : Returns a dict object with the flowcontrol send value
retrieved from the config block . The returned dict object
is intended to be merged into the interface resource dict""" | value = 'off'
match = re . search ( r'flowcontrol send (\w+)$' , config , re . M )
if match :
value = match . group ( 1 )
return dict ( flowcontrol_send = value ) |
def main ( req_files , verbose = False , outdated = False , latest = False , verbatim = False , repo = None , path = 'requirements.txt' , token = None , branch = 'master' , url = None , delay = None , ) :
"""Given a list of requirements files reports which requirements are out
of date .
Everything is rather somewhat obvious :
- verbose makes things a little louder
- outdated forces piprot to only report out of date packages
- latest outputs the requirements line with the latest version
- verbatim outputs the requirements file as - is - with comments showing the
latest versions ( can be used with latest to output the latest with the
old version in the comment )
- delay specifies a timerange during an outdated package is allowed""" | requirements = [ ]
if repo :
github_url = build_github_url ( repo , branch , path , token )
req_file = get_requirements_file_from_url ( github_url )
requirements . extend ( parse_req_file ( req_file ) )
elif url :
req_file = get_requirements_file_from_url ( url )
requirements . extend ( parse_req_file ( req_file ) )
else :
for req_file in req_files :
requirements . extend ( parse_req_file ( req_file , verbatim = verbatim ) )
req_file . close ( )
total_time_delta = 0
max_outdated_time = 0
session = FuturesSession ( )
results = [ ]
for req , version , ignore in requirements :
if verbatim and not req :
results . append ( version )
elif req :
results . append ( { 'req' : req , 'version' : version , 'ignore' : ignore , 'latest' : session . get ( get_pypi_url ( req ) ) , 'specified' : session . get ( get_pypi_url ( req , version ) ) } )
for result in results :
if isinstance ( result , str ) :
print ( result . replace ( '\n' , '' ) )
continue
if result [ 'ignore' ] :
if verbatim :
print ( '{}=={} # norot' . format ( result [ 'req' ] , result [ 'version' ] ) )
else :
print ( 'Ignoring updates for {}. ' . format ( result [ 'req' ] ) )
continue
req = result [ 'req' ]
version = result [ 'version' ]
latest_version , latest_release_date = get_version_and_release_date ( req , verbose = verbose , response = result [ 'latest' ] . result ( ) )
specified_version , specified_release_date = get_version_and_release_date ( req , version , response = result [ 'specified' ] . result ( ) )
if latest_release_date and specified_release_date :
time_delta = ( latest_release_date - specified_release_date ) . days
total_time_delta = total_time_delta + time_delta
max_outdated_time = max ( time_delta , max_outdated_time )
if verbose :
if time_delta > 0 :
print ( '{} ({}) is {} days out of date. ' 'Latest is {}' . format ( req , version , time_delta , latest_version ) )
elif version != latest_version :
print ( '{} ({}) is out of date. ' 'Latest is {}' . format ( req , version , latest_version ) )
elif not outdated :
print ( '{} ({}) is up to date' . format ( req , version ) )
if latest and latest_version != specified_version :
print ( '{}=={} # Updated from {}' . format ( req , latest_version , specified_version ) )
elif verbatim and latest_version != specified_version :
print ( '{}=={} # Latest {}' . format ( req , specified_version , latest_version ) )
elif verbatim :
print ( '{}=={}' . format ( req , specified_version ) )
elif verbatim :
print ( '{}=={} # Error checking latest version' . format ( req , version ) )
verbatim_str = ""
if verbatim :
verbatim_str = "# Generated with piprot {}\n# " . format ( VERSION )
if total_time_delta > 0 and delay is None :
print ( "{}Your requirements are {} " "days out of date" . format ( verbatim_str , total_time_delta ) )
sys . exit ( 1 )
elif delay is not None and max_outdated_time > int ( delay ) :
print ( "{}At least one of your dependancies is {} " "days out of date which is more than the allowed" "{} days." . format ( verbatim_str , max_outdated_time , delay ) )
sys . exit ( 1 )
elif delay is not None and max_outdated_time <= int ( delay ) :
print ( "{}All of your dependancies are at most {} " "days out of date." . format ( verbatim_str , delay ) )
sys . exit ( 1 )
else :
print ( "{}Looks like you've been keeping up to date, " "time for a delicious beverage!" . format ( verbatim_str ) ) |
def connect ( ) :
"""Connect controller to handle token exchange and query Uber API .""" | # Exchange authorization code for acceess token and create session
session = auth_flow . get_session ( request . url )
client = UberRidesClient ( session )
# Fetch profile for driver
profile = client . get_driver_profile ( ) . json
# Fetch last 50 trips and payments for driver
trips = client . get_driver_trips ( 0 , 50 ) . json
payments = client . get_driver_payments ( 0 , 50 ) . json
return render_template ( 'driver_dashboard.html' , profile = profile , trips = trips [ 'trips' ] , payments = payments [ 'payments' ] ) |
def validate_subnet ( s ) :
"""Validate a dotted - quad ip address including a netmask .
The string is considered a valid dotted - quad address with netmask if it
consists of one to four octets ( 0-255 ) seperated by periods ( . ) followed
by a forward slash ( / ) and a subnet bitmask which is expressed in
dotted - quad format .
> > > validate _ subnet ( ' 127.0.0.1/255.255.255.255 ' )
True
> > > validate _ subnet ( ' 127.0/255.0.0.0 ' )
True
> > > validate _ subnet ( ' 127.0/255 ' )
True
> > > validate _ subnet ( ' 127.0.0.256/255.255.255.255 ' )
False
> > > validate _ subnet ( ' 127.0.0.1/255.255.255.256 ' )
False
> > > validate _ subnet ( ' 127.0.0.0 ' )
False
> > > validate _ subnet ( None )
Traceback ( most recent call last ) :
TypeError : expected string or unicode
: param s : String to validate as a dotted - quad ip address with netmask .
: type s : str
: returns : ` ` True ` ` if a valid dotted - quad ip address with netmask ,
` ` False ` ` otherwise .
: raises : TypeError""" | if isinstance ( s , basestring ) :
if '/' in s :
start , mask = s . split ( '/' , 2 )
return validate_ip ( start ) and validate_netmask ( mask )
else :
return False
raise TypeError ( "expected string or unicode" ) |
def parse_uniprot_xml_metadata ( sr ) :
"""Load relevant attributes and dbxrefs from a parsed UniProt XML file in a SeqRecord .
Returns :
dict : All parsed information""" | # TODO : What about " reviewed " status ? and EC number
xref_dbs_to_keep = [ 'GO' , 'KEGG' , 'PDB' , 'PROSITE' , 'Pfam' , 'RefSeq' ]
infodict = { }
infodict [ 'alt_uniprots' ] = list ( set ( sr . annotations [ 'accessions' ] ) . difference ( [ sr . id ] ) )
infodict [ 'gene_name' ] = None
if 'gene_name_primary' in sr . annotations :
infodict [ 'gene_name' ] = sr . annotations [ 'gene_name_primary' ]
infodict [ 'description' ] = sr . description
infodict [ 'taxonomy' ] = None
if 'organism' in sr . annotations :
infodict [ 'taxonomy' ] = sr . annotations [ 'organism' ]
infodict [ 'seq_version' ] = sr . annotations [ 'sequence_version' ]
infodict [ 'seq_date' ] = sr . annotations [ 'sequence_modified' ]
infodict [ 'entry_version' ] = sr . annotations [ 'version' ]
infodict [ 'entry_date' ] = sr . annotations [ 'modified' ]
tmp = defaultdict ( list )
for xref in sr . dbxrefs :
database = xref . split ( ':' , 1 ) [ 0 ]
xrefs = xref . split ( ':' , 1 ) [ - 1 ]
if database in xref_dbs_to_keep :
if database == 'PDB' :
tmp [ 'pdbs' ] . append ( xrefs )
else :
tmp [ database . lower ( ) ] . append ( xrefs )
infodict . update ( tmp )
return infodict |
def initialized ( name , ** kwargs ) :
r'''Defines a new VM with specified arguments , but does not start it .
: param name : the Salt _ id node name you wish your VM to have .
Each machine must be initialized individually using this function
or the " vagrant . running " function , or the vagrant . init execution module call .
This command will not change the state of a running or paused machine .
Possible keyword arguments :
- cwd : The directory ( path ) containing the Vagrantfile
- machine : ( ' ' ) the name of the machine ( in the Vagrantfile ) if not default
- vagrant _ runas : ( ' root ' ) the username who owns the vagrantbox file
- vagrant _ provider : the provider to run the VM ( usually ' virtualbox ' )
- vm : ( { } ) a dictionary containing these or other keyword arguments
. . code - block : : yaml
node _ name1:
vagrant . initialized
- cwd : / projects / my _ project
- vagrant _ runas : my _ username
- machine : machine1
node _ name2:
vagrant . initialized
- cwd : / projects / my _ project
- vagrant _ runas : my _ username
- machine : machine2
start _ nodes :
vagrant . start :
- name : node _ name ?''' | ret = { 'name' : name , 'changes' : { } , 'result' : True , 'comment' : 'The VM is already correctly defined' }
# define a machine to start later
ret , kwargs = _find_init_change ( name , ret , ** kwargs )
if ret [ 'changes' ] == { } :
return ret
kwargs [ 'start' ] = False
__salt__ [ 'vagrant.init' ] ( name , ** kwargs )
ret [ 'changes' ] [ name ] = 'Node initialized'
ret [ 'comment' ] = 'Node {0} defined but not started.' . format ( name )
return ret |
def _recv_internal ( self , timeout ) :
"""Read a message from the serial device .
: param timeout :
. . warning : :
This parameter will be ignored . The timeout value of the channel is used .
: returns :
Received message and False ( because not filtering as taken place ) .
. . warning : :
Flags like is _ extended _ id , is _ remote _ frame and is _ error _ frame
will not be set over this function , the flags in the return
message are the default values .
: rtype :
can . Message , bool""" | try : # ser . read can return an empty string
# or raise a SerialException
rx_byte = self . ser . read ( )
except serial . SerialException :
return None , False
if rx_byte and ord ( rx_byte ) == 0xAA :
s = bytearray ( self . ser . read ( 4 ) )
timestamp = ( struct . unpack ( '<I' , s ) ) [ 0 ]
dlc = ord ( self . ser . read ( ) )
s = bytearray ( self . ser . read ( 4 ) )
arb_id = ( struct . unpack ( '<I' , s ) ) [ 0 ]
data = self . ser . read ( dlc )
rxd_byte = ord ( self . ser . read ( ) )
if rxd_byte == 0xBB : # received message data okay
msg = Message ( timestamp = timestamp / 1000 , arbitration_id = arb_id , dlc = dlc , data = data )
return msg , False
else :
return None , False |
def _init_client ( ) :
"""Initialize connection and create table if needed""" | if client is not None :
return
global _mysql_kwargs , _table_name
_mysql_kwargs = { 'host' : __opts__ . get ( 'mysql.host' , '127.0.0.1' ) , 'user' : __opts__ . get ( 'mysql.user' , None ) , 'passwd' : __opts__ . get ( 'mysql.password' , None ) , 'db' : __opts__ . get ( 'mysql.database' , _DEFAULT_DATABASE_NAME ) , 'port' : __opts__ . get ( 'mysql.port' , 3306 ) , 'unix_socket' : __opts__ . get ( 'mysql.unix_socket' , None ) , 'connect_timeout' : __opts__ . get ( 'mysql.connect_timeout' , None ) , 'autocommit' : True , }
_table_name = __opts__ . get ( 'mysql.table_name' , _table_name )
# TODO : handle SSL connection parameters
for k , v in _mysql_kwargs . items ( ) :
if v is None :
_mysql_kwargs . pop ( k )
kwargs_copy = _mysql_kwargs . copy ( )
kwargs_copy [ 'passwd' ] = "<hidden>"
log . info ( "mysql_cache: Setting up client with params: %r" , kwargs_copy )
# The MySQL client is created later on by run _ query
_create_table ( ) |
def compare_mean_curves ( calc_ref , calc , nsigma = 3 ) :
"""Compare the hazard curves coming from two different calculations .""" | dstore_ref = datastore . read ( calc_ref )
dstore = datastore . read ( calc )
imtls = dstore_ref [ 'oqparam' ] . imtls
if dstore [ 'oqparam' ] . imtls != imtls :
raise RuntimeError ( 'The IMTs and levels are different between ' 'calculation %d and %d' % ( calc_ref , calc ) )
sitecol_ref = dstore_ref [ 'sitecol' ]
sitecol = dstore [ 'sitecol' ]
site_id_ref = { ( lon , lat ) : sid for sid , lon , lat in zip ( sitecol_ref . sids , sitecol_ref . lons , sitecol_ref . lats ) }
site_id = { ( lon , lat ) : sid for sid , lon , lat in zip ( sitecol . sids , sitecol . lons , sitecol . lats ) }
common = set ( site_id_ref ) & set ( site_id )
if not common :
raise RuntimeError ( 'There are no common sites between calculation ' '%d and %d' % ( calc_ref , calc ) )
pmap_ref = PmapGetter ( dstore_ref , sids = [ site_id_ref [ lonlat ] for lonlat in common ] ) . get_mean ( )
pmap = PmapGetter ( dstore , sids = [ site_id [ lonlat ] for lonlat in common ] ) . get_mean ( )
for lonlat in common :
mean , std = pmap [ site_id [ lonlat ] ] . array . T
# shape ( 2 , N )
mean_ref , std_ref = pmap_ref [ site_id_ref [ lonlat ] ] . array . T
err = numpy . sqrt ( std ** 2 + std_ref ** 2 )
for imt in imtls :
sl = imtls ( imt )
ok = ( numpy . abs ( mean [ sl ] - mean_ref [ sl ] ) < nsigma * err [ sl ] ) . all ( )
if not ok :
md = ( numpy . abs ( mean [ sl ] - mean_ref [ sl ] ) ) . max ( )
plt . title ( 'point=%s, imt=%s, maxdiff=%.2e' % ( lonlat , imt , md ) )
plt . loglog ( imtls [ imt ] , mean_ref [ sl ] + std_ref [ sl ] , label = str ( calc_ref ) , color = 'black' )
plt . loglog ( imtls [ imt ] , mean_ref [ sl ] - std_ref [ sl ] , color = 'black' )
plt . loglog ( imtls [ imt ] , mean [ sl ] + std [ sl ] , label = str ( calc ) , color = 'red' )
plt . loglog ( imtls [ imt ] , mean [ sl ] - std [ sl ] , color = 'red' )
plt . legend ( )
plt . show ( ) |
def support_difference_count ( m , m_hat ) :
"""Count the number of different elements in the support in one triangle ,
not including the diagonal .""" | m_nnz , m_hat_nnz , intersection_nnz = _nonzero_intersection ( m , m_hat )
return int ( ( m_nnz + m_hat_nnz - ( 2 * intersection_nnz ) ) / 2.0 ) |
def _add_logging ( dsk , ignore = None ) :
"""Add logging to a Dask graph .
@ param dsk : The Dask graph .
@ return : New Dask graph .""" | ctx = current_action ( )
result = { }
# Use topological sort to ensure Eliot actions are in logical order of
# execution in Dask :
keys = toposort ( dsk )
# Give each key a string name . Some keys are just aliases to other
# keys , so make sure we have underlying key available . Later on might
# want to shorten them as well .
def simplify ( k ) :
if isinstance ( k , str ) :
return k
return "-" . join ( str ( o ) for o in k )
key_names = { }
for key in keys :
value = dsk [ key ]
if not callable ( value ) and value in keys : # It ' s an alias for another key :
key_names [ key ] = key_names [ value ]
else :
key_names [ key ] = simplify ( key )
# 2 . Create Eliot child Actions for each key , in topological order :
key_to_action_id = { key : str ( ctx . serialize_task_id ( ) , "utf-8" ) for key in keys }
# 3 . Replace function with wrapper that logs appropriate Action :
for key in keys :
func = dsk [ key ] [ 0 ]
args = dsk [ key ] [ 1 : ]
if not callable ( func ) : # This key is just an alias for another key , no need to add
# logging :
result [ key ] = dsk [ key ]
continue
wrapped_func = _RunWithEliotContext ( task_id = key_to_action_id [ key ] , func = func , key = key_names [ key ] , dependencies = [ key_names [ k ] for k in get_dependencies ( dsk , key ) ] , )
result [ key ] = ( wrapped_func , ) + tuple ( args )
assert result . keys ( ) == dsk . keys ( )
return result |
def rmswidth ( self , wavelengths = None , threshold = None ) :
"""Calculate the : ref : ` bandpass RMS width < synphot - formula - rmswidth > ` .
Not to be confused with : func : ` photbw ` .
Parameters
wavelengths : array - like , ` ~ astropy . units . quantity . Quantity ` , or ` None `
Wavelength values for sampling .
If not a Quantity , assumed to be in Angstrom .
If ` None ` , ` ` self . waveset ` ` is used .
threshold : float or ` ~ astropy . units . quantity . Quantity ` , optional
Data points with throughput below this value are not
included in the calculation . By default , all data points
are included .
Returns
rms _ width : ` ~ astropy . units . quantity . Quantity `
RMS width of the bandpass .
Raises
synphot . exceptions . SynphotError
Threshold is invalid .""" | x = self . _validate_wavelengths ( wavelengths ) . value
y = self ( x ) . value
if threshold is None :
wave = x
thru = y
else :
if ( isinstance ( threshold , numbers . Real ) or ( isinstance ( threshold , u . Quantity ) and threshold . unit == self . _internal_flux_unit ) ) :
mask = y >= threshold
else :
raise exceptions . SynphotError ( '{0} is not a valid threshold' . format ( threshold ) )
wave = x [ mask ]
thru = y [ mask ]
a = self . avgwave ( wavelengths = wavelengths ) . value
num = np . trapz ( ( wave - a ) ** 2 * thru , x = wave )
den = np . trapz ( thru , x = wave )
if den == 0 : # pragma : no cover
rms_width = 0.0
else :
rms_width = np . sqrt ( abs ( num / den ) )
return rms_width * self . _internal_wave_unit |
def get_default_config ( self ) :
"""Returns the default collector settings""" | config = super ( S3BucketCollector , self ) . get_default_config ( )
config . update ( { 'path' : 'aws.s3' , 'byte_unit' : 'byte' } )
return config |
def create_view ( self , request ) :
"""Initiates the organization and user account creation process""" | try :
if request . user . is_authenticated ( ) :
return redirect ( "organization_add" )
except TypeError :
if request . user . is_authenticated :
return redirect ( "organization_add" )
form = org_registration_form ( self . org_model ) ( request . POST or None )
if form . is_valid ( ) :
try :
user = self . user_model . objects . get ( email = form . cleaned_data [ "email" ] )
except self . user_model . DoesNotExist :
user = self . user_model . objects . create ( username = self . get_username ( ) , email = form . cleaned_data [ "email" ] , password = self . user_model . objects . make_random_password ( ) , )
user . is_active = False
user . save ( )
else :
return redirect ( "organization_add" )
organization = create_organization ( user , form . cleaned_data [ "name" ] , form . cleaned_data [ "slug" ] , is_active = False , )
return render ( request , self . activation_success_template , { "user" : user , "organization" : organization } , )
return render ( request , self . registration_form_template , { "form" : form } ) |
def exchange_reference ( root_url , service , version ) :
"""Generate URL for a Taskcluster exchange reference .""" | root_url = root_url . rstrip ( '/' )
if root_url == OLD_ROOT_URL :
return 'https://references.taskcluster.net/{}/{}/exchanges.json' . format ( service , version )
else :
return '{}/references/{}/{}/exchanges.json' . format ( root_url , service , version ) |
def add_data_to_database_table ( self , dictList , createStatement = False ) :
"""* Import data in the list of dictionaries in the requested database table *
Also adds HTMIDs and updates the sherlock - catalogue database helper table with the time - stamp of when the imported catlogue was last updated
* * Key Arguments : * *
- ` ` dictList ` ` - a list of dictionaries containing all the rows in the catalogue to be imported
- ` ` createStatement ` ` - the table ' s mysql create statement ( used to generate table if it does not yet exist in database ) . Default * False *
* * Usage : * *
. . code - block : : python
self . add _ data _ to _ database _ table (
dictList = dictList ,
createStatement = createStatement
. . todo : :
- Write a checklist for creating a new sherlock database importer""" | self . log . debug ( 'starting the ``add_data_to_database_table`` method' )
if len ( dictList ) == 0 :
return
myPid = self . myPid
dbTableName = self . dbTableName
if createStatement :
writequery ( log = self . log , sqlQuery = createStatement , dbConn = self . cataloguesDbConn , )
insert_list_of_dictionaries_into_database_tables ( dbConn = self . cataloguesDbConn , log = self . log , dictList = dictList , dbTableName = dbTableName , uniqueKeyList = [ ] , dateModified = True , dateCreated = True , batchSize = 10000 , replace = True , dbSettings = self . settings [ "database settings" ] [ "static catalogues" ] )
self . _add_htmids_to_database_table ( )
cleaner = database_cleaner ( log = self . log , settings = self . settings )
cleaner . _update_tcs_helper_catalogue_tables_info_with_new_tables ( )
self . _update_database_helper_table ( )
print """Now:
- [ ] edit the `%(dbTableName)s` row in the sherlock catalogues database adding relevant column mappings, catalogue version number etc
- [ ] retire any previous version of this catlogue in the database. Renaming the catalogue-table by appending `legacy_` and also change the name in the `tcs_helper_catalogue_tables_info` table
- [ ] dupliate views from the previous catalogue version to point towards the new version and then delete the old views
- [ ] run the command `sherlock clean [-s <pathToSettingsFile>]` to clean up helper tables
- [ ] switch out the old catalogue table/views in your sherlock search algorithms in the yaml settings files
- [ ] run a test batch of transients to make sure catalogue is installed as expected
""" % locals ( )
self . log . debug ( 'completed the ``add_data_to_database_table`` method' )
return None |
def removeUserData ( self , users = None ) :
"""Removes users ' content and data .
Args :
users ( str ) : A comma delimited list of user names .
Defaults to ` ` None ` ` .
Warning :
When ` ` users ` ` is not provided ( ` ` None ` ` ) , all users
in the organization will have their data deleted !""" | admin = None
portal = None
user = None
adminusercontent = None
userFolder = None
userContent = None
userItem = None
folderContent = None
try :
admin = arcrest . manageorg . Administration ( securityHandler = self . _securityHandler )
if users is None :
print ( "You have selected to remove all users data, you must modify the code to do this" )
usersObj = [ ]
commUsers = admin . portals . portalSelf . users ( start = 1 , num = 100 )
commUsers = commUsers [ 'users' ]
for user in commUsers :
usersObj . append ( user . userContent )
return
else :
usersObj = [ ]
userStr = users . split ( ',' )
for user in userStr :
try :
user = admin . content . users . user ( str ( user ) . strip ( ) )
usersObj . append ( user )
except :
print ( "%s does not exist" % str ( user ) . strip ( ) )
if usersObj :
for user in usersObj :
print ( "Loading content for user: %s" % user . username )
itemsToDel = [ ]
for userItem in user . items :
itemsToDel . append ( userItem . id )
if len ( itemsToDel ) > 0 :
print ( user . deleteItems ( items = "," . join ( itemsToDel ) ) )
if user . folders :
for userFolder in user . folders :
if ( user . currentFolder [ 'title' ] != userFolder [ 'title' ] ) :
user . currentFolder = userFolder [ 'title' ]
itemsToDel = [ ]
for userItem in user . items :
itemsToDel . append ( userItem . id )
if len ( itemsToDel ) > 0 :
print ( user . deleteItems ( items = "," . join ( itemsToDel ) ) )
print ( user . deleteFolder ( ) )
except :
line , filename , synerror = trace ( )
raise common . ArcRestHelperError ( { "function" : "removeUserData" , "line" : line , "filename" : filename , "synerror" : synerror , } )
finally :
admin = None
portal = None
user = None
adminusercontent = None
userFolder = None
userContent = None
userItem = None
folderContent = None
del admin
del portal
del user
del adminusercontent
del userFolder
del userContent
del userItem
del folderContent
gc . collect ( ) |
def augment_send ( self , send_func ) :
""": param send _ func :
a function that sends messages , such as : meth : ` . Bot . send \ * `
: return :
a function that wraps around ` ` send _ func ` ` and examines whether the
sent message contains an inline keyboard with callback data . If so ,
future callback query originating from the sent message will be captured .""" | def augmented ( * aa , ** kw ) :
sent = send_func ( * aa , ** kw )
if self . _enable_chat and self . _contains_callback_data ( kw ) :
self . capture_origin ( message_identifier ( sent ) )
return sent
return augmented |
def load ( self ) :
"""Load our config , log and raise on error .""" | try :
merged_configfile = self . get_merged_config ( )
self . yamldocs = yaml . load ( merged_configfile , Loader = Loader )
# Strip out the top level ' None ' s we get from concatenation .
# Functionally not required , but makes dumps cleaner .
self . yamldocs = [ x for x in self . yamldocs if x ]
self . logdebug ( 'parsed_rules:\n%s\n' % pretty ( self . yamldocs ) )
except ( yaml . scanner . ScannerError , yaml . parser . ParserError ) :
self . raise_and_log_error ( ConfigError , 'error parsing config.' ) |
def encode_data ( self ) :
"""Encode this command into a byte array that can be sent
The actual command this is encoded into depends on the data
that was added .""" | assert self . get_empty ( ) is False
self . _data_encoded = True
if self . _block_allowed :
data = self . _encode_transfer_block_data ( )
else :
data = self . _encode_transfer_data ( )
return data |
def verify_signature ( public_key , scheme , signature , data ) :
"""< Purpose >
Verify that ' signature ' was produced by the private key associated with
' public _ key ' .
> > > scheme = ' ecdsa - sha2 - nistp256'
> > > public , private = generate _ public _ and _ private ( scheme )
> > > data = b ' The quick brown fox jumps over the lazy dog '
> > > signature , scheme = create _ signature ( public , private , data , scheme )
> > > verify _ signature ( public , scheme , signature , data )
True
> > > verify _ signature ( public , scheme , signature , b ' bad data ' )
False
< Arguments >
public _ key :
The ECDSA public key in PEM format . The public key is needed to verify
' signature ' .
scheme :
The signature scheme used to generate ' signature ' . For example :
' ecdsa - sha2 - nistp256 ' .
signature :
The signature to be verified , which should have been generated by
the private key associated with ' public _ key ' . ' data ' .
data :
Byte data that was used by create _ signature ( ) to generate ' signature ' .
< Exceptions >
securesystemslib . exceptions . FormatError , if any of the arguments are
improperly formatted .
securesystemslib . exceptions . UnsupportedAlgorithmError , if ' scheme ' is
not one of the supported signature schemes .
< Side Effects >
None .
< Returns >
Boolean , indicating whether the ' signature ' of data was generated by
the private key associated with ' public _ key ' .""" | # Are the arguments properly formatted ?
# If not , raise ' securesystemslib . exceptions . FormatError ' .
securesystemslib . formats . PEMECDSA_SCHEMA . check_match ( public_key )
securesystemslib . formats . ECDSA_SCHEME_SCHEMA . check_match ( scheme )
securesystemslib . formats . ECDSASIGNATURE_SCHEMA . check_match ( signature )
ecdsa_key = load_pem_public_key ( public_key . encode ( 'utf-8' ) , backend = default_backend ( ) )
if not isinstance ( ecdsa_key , ec . EllipticCurvePublicKey ) :
raise securesystemslib . exceptions . FormatError ( 'Invalid ECDSA public' ' key: ' + repr ( public_key ) )
else :
logger . debug ( 'Loaded a valid ECDSA public key.' )
# verify ( ) raises an ' InvalidSignature ' exception if ' signature '
# is invalid .
try :
ecdsa_key . verify ( signature , data , ec . ECDSA ( hashes . SHA256 ( ) ) )
return True
except ( TypeError , cryptography . exceptions . InvalidSignature ) :
return False |
def get_model_index ( cls , model , default = True ) :
'''Returns the default model index for the given model , or the list of indices if default is False .
: param model : model name as a string .
: raise KeyError : If the provided model does not have any index associated .''' | try :
if default :
return cls . _model_name_to_default_index [ model ]
return cls . _model_name_to_model_idx [ model ]
except KeyError :
raise KeyError ( 'Could not find any model index defined for model {}.' . format ( model ) ) |
def data_log_send ( self , fl_1 , fl_2 , fl_3 , fl_4 , fl_5 , fl_6 , force_mavlink1 = False ) :
'''Configurable data log probes to be used inside Simulink
fl _ 1 : Log value 1 ( float )
fl _ 2 : Log value 2 ( float )
fl _ 3 : Log value 3 ( float )
fl _ 4 : Log value 4 ( float )
fl _ 5 : Log value 5 ( float )
fl _ 6 : Log value 6 ( float )''' | return self . send ( self . data_log_encode ( fl_1 , fl_2 , fl_3 , fl_4 , fl_5 , fl_6 ) , force_mavlink1 = force_mavlink1 ) |
def qos_red_profile_drop_probability ( self , ** kwargs ) :
"""Auto Generated Code""" | config = ET . Element ( "config" )
qos = ET . SubElement ( config , "qos" , xmlns = "urn:brocade.com:mgmt:brocade-qos" )
red_profile = ET . SubElement ( qos , "red-profile" )
profile_id_key = ET . SubElement ( red_profile , "profile-id" )
profile_id_key . text = kwargs . pop ( 'profile_id' )
drop_probability = ET . SubElement ( red_profile , "drop-probability" )
drop_probability . text = kwargs . pop ( 'drop_probability' )
callback = kwargs . pop ( 'callback' , self . _callback )
return callback ( config ) |
def structure_attrs_fromtuple ( self , obj , cl ) : # type : ( Tuple , Type [ T ] ) - > T
"""Load an attrs class from a sequence ( tuple ) .""" | conv_obj = [ ]
# A list of converter parameters .
for a , value in zip ( cl . __attrs_attrs__ , obj ) : # type : ignore
# We detect the type by the metadata .
converted = self . _structure_attr_from_tuple ( a , a . name , value )
conv_obj . append ( converted )
return cl ( * conv_obj ) |
def call_remoteckan ( self , * args , ** kwargs ) : # type : ( Any , Any ) - > Dict
"""Calls the remote CKAN
Args :
* args : Arguments to pass to remote CKAN call _ action method
* * kwargs : Keyword arguments to pass to remote CKAN call _ action method
Returns :
Dict : The response from the remote CKAN call _ action method""" | requests_kwargs = kwargs . get ( 'requests_kwargs' , dict ( ) )
credentials = self . _get_credentials ( )
if credentials :
requests_kwargs [ 'auth' ] = credentials
kwargs [ 'requests_kwargs' ] = requests_kwargs
apikey = kwargs . get ( 'apikey' , self . get_api_key ( ) )
kwargs [ 'apikey' ] = apikey
return self . remoteckan ( ) . call_action ( * args , ** kwargs ) |
def iter_chunks_class ( self ) :
"""Yield each readable chunk present in the region .
Chunks that can not be read for whatever reason are silently skipped .
This function returns a : class : ` nbt . chunk . Chunk ` instance .""" | for m in self . get_metadata ( ) :
try :
yield self . chunkclass ( self . get_chunk ( m . x , m . z ) )
except RegionFileFormatError :
pass |
def load_classifiers ( algo_defs , env ) :
"""Load an algorithm into a module . The algorithm is an instance of ` ` AlgorithmDef ` ` class .
: param algo _ defs : algorithm definitions
: type algo _ defs : AlgorithmDef | list [ AlgorithmDef ]
: param env : environment
: Example :
> > > import sys
> > > from odps . ml . algolib . loader import *
> > > a = XflowAlgorithmDef ( ' SampleAlgorithm ' )
> > > a . add _ param ( ParamDef ( ' param1 ' , ' val1 ' ) )
> > > a . add _ port ( PortDef ( ' input ' ) )
> > > a . add _ port ( PortDef ( ' output ' , PortDirection . OUTPUT ) )
> > > load _ classifiers ( a , sys . modules [ _ _ name _ _ ] )""" | if not isinstance ( algo_defs , Iterable ) :
algo_defs = [ algo_defs , ]
load_algorithms ( algo_defs , 'BaseTrainingAlgorithm' , env ) |
def parse_fn ( fn ) :
"""This parses the file name and returns the coordinates of the tile
Parameters
fn : str
Filename of a GEOTIFF
Returns
coords = [ LLC . lat , LLC . lon , URC . lat , URC . lon ]""" | try :
parts = os . path . splitext ( os . path . split ( fn ) [ - 1 ] ) [ 0 ] . replace ( 'o' , '.' ) . split ( '_' ) [ : 2 ]
coords = [ float ( crds ) for crds in re . split ( '[NSEW]' , parts [ 0 ] + parts [ 1 ] ) [ 1 : ] ]
except :
coords = [ np . nan ] * 4
return coords |
def RecursiveMultiListChildren ( self , urns , limit = None , age = NEWEST_TIME ) :
"""Recursively lists bunch of directories .
Args :
urns : List of urns to list children .
limit : Max number of children to list ( NOTE : this is per urn ) .
age : The age of the items to retrieve . Should be one of ALL _ TIMES ,
NEWEST _ TIME or a range .
Yields :
( subject < - > children urns ) tuples . RecursiveMultiListChildren will fetch
children lists for initial set of urns and then will fetch children ' s
children , etc .
For example , for the following objects structure :
b - > c
RecursiveMultiListChildren ( [ ' a ' ] ) will return :
[ ( ' a ' , [ ' b ' ] ) , ( ' b ' , [ ' c ' , ' d ' ] ) ]""" | checked_urns = set ( )
urns_to_check = urns
while True :
found_children = [ ]
for subject , values in self . MultiListChildren ( urns_to_check , limit = limit , age = age ) :
found_children . extend ( values )
yield subject , values
checked_urns . update ( urns_to_check )
urns_to_check = set ( found_children ) - checked_urns
if not urns_to_check :
break |
def extract_metatile ( io , fmt , offset = None ) :
"""Extract the tile at the given offset ( defaults to 0/0/0 ) and format from
the metatile in the file - like object io .""" | ext = fmt . extension
if offset is None :
tile_name = '0/0/0.%s' % ext
else :
tile_name = '%d/%d/%d.%s' % ( offset . zoom , offset . column , offset . row , ext )
with zipfile . ZipFile ( io , mode = 'r' ) as zf :
if tile_name in zf . namelist ( ) :
return zf . read ( tile_name )
else :
return None |
def notify_multiple_devices ( self , registration_ids = None , message_body = None , message_title = None , message_icon = None , sound = None , condition = None , collapse_key = None , delay_while_idle = False , time_to_live = None , restricted_package_name = None , low_priority = False , dry_run = False , data_message = None , click_action = None , badge = None , color = None , tag = None , body_loc_key = None , body_loc_args = None , title_loc_key = None , title_loc_args = None , content_available = None , android_channel_id = None , timeout = 5 , extra_notification_kwargs = None , extra_kwargs = { } ) :
"""Sends push notification to multiple devices , can send to over 1000 devices
Args :
registration _ ids ( list , optional ) : FCM device registration IDs
message _ body ( str , optional ) : Message string to display in the notification tray
message _ title ( str , optional ) : Message title to display in the notification tray
message _ icon ( str , optional ) : Icon that apperas next to the notification
sound ( str , optional ) : The sound file name to play . Specify " Default " for device default sound .
condition ( str , optiona ) : Topic condition to deliver messages to
collapse _ key ( str , optional ) : Identifier for a group of messages
that can be collapsed so that only the last message gets sent
when delivery can be resumed . Defaults to ` None ` .
delay _ while _ idle ( bool , optional ) : deprecated
time _ to _ live ( int , optional ) : How long ( in seconds ) the message
should be kept in FCM storage if the device is offline . The
maximum time to live supported is 4 weeks . Defaults to ` None `
which uses the FCM default of 4 weeks .
restricted _ package _ name ( str , optional ) : Name of package
low _ priority ( bool , optional ) : Whether to send notification with
the low priority flag . Defaults to ` False ` .
dry _ run ( bool , optional ) : If ` True ` no message will be sent but request will be tested .
data _ message ( dict , optional ) : Custom key - value pairs
click _ action ( str , optional ) : Action associated with a user click on the notification
badge ( str , optional ) : Badge of notification
color ( str , optional ) : Color of the icon
tag ( str , optional ) : Group notification by tag
body _ loc _ key ( str , optional ) : Indicates the key to the body string for localization
body _ loc _ args ( list , optional ) : Indicates the string value to replace format
specifiers in body string for localization
title _ loc _ key ( str , optional ) : Indicates the key to the title string for localization
title _ loc _ args ( list , optional ) : Indicates the string value to replace format
specifiers in title string for localization
content _ available ( bool , optional ) : Inactive client app is awoken
android _ channel _ id ( str , optional ) : Starting in Android 8.0 ( API level 26 ) ,
all notifications must be assigned to a channel . For each channel , you can set the
visual and auditory behavior that is applied to all notifications in that channel .
Then , users can change these settings and decide which notification channels from
your app should be intrusive or visible at all .
timeout ( int , optional ) : set time limit for the request
extra _ notification _ kwargs ( dict , optional ) : More notification keyword arguments
extra _ kwargs ( dict , optional ) : More keyword arguments
Returns :
dict : Response from FCM server ( ` multicast _ id ` , ` success ` , ` failure ` , ` canonical _ ids ` , ` results ` )
Raises :
AuthenticationError : If : attr : ` api _ key ` is not set or provided
or there is an error authenticating the sender .
FCMServerError : Internal server error or timeout error on Firebase cloud messaging server
InvalidDataError : Invalid data provided
InternalPackageError : JSON parsing error , mostly from changes in the response of FCM ,
create a new github issue to resolve it .""" | if not isinstance ( registration_ids , list ) :
raise InvalidDataError ( 'Invalid registration IDs (should be list)' )
payloads = [ ]
registration_id_chunks = self . registration_id_chunks ( registration_ids )
for registration_ids in registration_id_chunks : # appends a payload with a chunk of registration ids here
payloads . append ( self . parse_payload ( registration_ids = registration_ids , message_body = message_body , message_title = message_title , message_icon = message_icon , sound = sound , condition = condition , collapse_key = collapse_key , delay_while_idle = delay_while_idle , time_to_live = time_to_live , restricted_package_name = restricted_package_name , low_priority = low_priority , dry_run = dry_run , data_message = data_message , click_action = click_action , badge = badge , color = color , tag = tag , body_loc_key = body_loc_key , body_loc_args = body_loc_args , title_loc_key = title_loc_key , title_loc_args = title_loc_args , content_available = content_available , android_channel_id = android_channel_id , extra_notification_kwargs = extra_notification_kwargs , ** extra_kwargs ) )
self . send_request ( payloads , timeout )
return self . parse_responses ( ) |
def run ( self ) :
"""Load all paintings into the database""" | df = PaintingsInputData ( ) . load ( )
# rename columns
df . rename ( columns = { 'paintingLabel' : 'name' } , inplace = True )
# get artist IDs , map via artist wiki ID
artists = models . Entity . query_with_attributes ( 'artist' , self . client )
df [ 'artist_id' ] = df [ 'creator_wiki_id' ] . map ( artists . set_index ( 'wiki_id' ) [ 'id' ] )
# define attributes to create
attribute_columns = [ 'name' , 'wiki_id' , 'area' , 'decade' , 'artist_id' ]
# store entities and attributes
self . store ( df , attribute_columns )
self . done ( ) |
def get ( consul_url = None , key = None , token = None , recurse = False , decode = False , raw = False ) :
'''Get key from Consul
: param consul _ url : The Consul server URL .
: param key : The key to use as the starting point for the list .
: param recurse : Return values recursively beginning at the value of key .
: param decode : By default values are stored as Base64 encoded values ,
decode will return the whole key with the value decoded .
: param raw : Simply return the decoded value of the key .
: return : The keys in Consul .
CLI Example :
. . code - block : : bash
salt ' * ' consul . get key = ' web / key1'
salt ' * ' consul . get key = ' web ' recurse = True
salt ' * ' consul . get key = ' web ' recurse = True decode = True
By default values stored in Consul are base64 encoded , passing the
decode option will show them as the decoded values .
. . code - block : : bash
salt ' * ' consul . get key = ' web ' recurse = True decode = True raw = True
By default Consult will return other information about the key , the raw
option will return only the raw value .''' | ret = { }
if not consul_url :
consul_url = _get_config ( )
if not consul_url :
log . error ( 'No Consul URL found.' )
ret [ 'message' ] = 'No Consul URL found.'
ret [ 'res' ] = False
return ret
if not key :
raise SaltInvocationError ( 'Required argument "key" is missing.' )
query_params = { }
function = 'kv/{0}' . format ( key )
if recurse :
query_params [ 'recurse' ] = 'True'
if raw :
query_params [ 'raw' ] = True
ret = _query ( consul_url = consul_url , function = function , token = token , query_params = query_params )
if ret [ 'res' ] :
if decode :
for item in ret [ 'data' ] :
if item [ 'Value' ] is None :
item [ 'Value' ] = ""
else :
item [ 'Value' ] = base64 . b64decode ( item [ 'Value' ] )
return ret |
def safe_mkdtemp ( cleaner = _mkdtemp_atexit_cleaner , ** kw ) :
"""Create a temporary directory that is cleaned up on process exit .
Arguments are as to tempfile . mkdtemp .
: API : public""" | # Proper lock sanitation on fork [ issue 6721 ] would be desirable here .
with _MKDTEMP_LOCK :
return register_rmtree ( tempfile . mkdtemp ( ** kw ) , cleaner = cleaner ) |
def get_all_users ( self , path_prefix = '/' , marker = None , max_items = None ) :
"""List the users that have the specified path prefix .
: type path _ prefix : string
: param path _ prefix : If provided , only users whose paths match
the provided prefix will be returned .
: type marker : string
: param marker : Use this only when paginating results and only in
follow - up request after you ' ve received a response
where the results are truncated . Set this to the
value of the Marker element in the response you
just received .
: type max _ items : int
: param max _ items : Use this only when paginating results to indicate
the maximum number of groups you want in the
response .""" | params = { 'PathPrefix' : path_prefix }
if marker :
params [ 'Marker' ] = marker
if max_items :
params [ 'MaxItems' ] = max_items
return self . get_response ( 'ListUsers' , params , list_marker = 'Users' ) |
def get_node_selectable ( node , context ) :
"""Return the Selectable Union [ Table , CTE ] associated with the node .""" | query_path = node . query_path
if query_path not in context . query_path_to_selectable :
raise AssertionError ( u'Unable to find selectable for query path {} with context {}.' . format ( query_path , context ) )
selectable = context . query_path_to_selectable [ query_path ]
return selectable |
def normalize ( opts ) :
"""Performs various normalization functions on opts , the provided namespace . It
is assumed that opts has already been validated with
anchorhub . validation _ opts . validate ( ) .
: param opts : a namespace containing options for AnchorHub
: return : a namespace with the attributes modified""" | opts_dict = vars ( opts )
add_is_dir ( opts_dict )
ensure_directories_end_in_separator ( opts_dict )
add_abs_path_directories ( opts_dict )
add_open_close_wrappers ( opts_dict )
add_wrapper_regex ( opts_dict )
return Bunch ( opts_dict ) |
def eval_objfn ( self ) :
r"""Compute components of objective function as well as total
contribution to objective function . Data fidelity term is
: math : ` ( 1/2 ) \ | H \ mathbf { x } - \ mathbf { s } \ | _ 2 ^ 2 ` and
regularisation term is : math : ` \ | W _ { \ mathrm { tv } }
\ sqrt { ( G _ r \ mathbf { x } ) ^ 2 + ( G _ c \ mathbf { x } ) ^ 2 } \ | _ 1 ` .""" | Ef = self . Af * self . Xf - self . Sf
dfd = sl . rfl2norm2 ( Ef , self . S . shape , axis = self . axes ) / 2.0
reg = np . sum ( self . Wtv * np . sqrt ( np . sum ( self . obfn_gvar ( ) ** 2 , axis = self . saxes ) ) )
obj = dfd + self . lmbda * reg
return ( obj , dfd , reg ) |
def parametric_function_api ( scope_name = None , param_desc = None ) :
"""Decorator for parametric functions .
The decorated function is always called under
a parameter scope ` ` scope _ name ` ` .
Also , the decorator adds an additional argument ` ` name ` ` ( : obj : ` str ` ,
default is ` ` None ` ` ) at the end . If ` ` name ` ` is specified , the
scope ` ` scope _ name ` ` comes under a scope ` ` name ` ` . This feature
could reduce vertical space usage of the source code .
Any parametric function should be decorated by this .
Args :
scope _ name ( str , optional ) : The original function will be called
under a parameter scope named by ` ` scope _ name ` ` .
param _ desc ( list , optional ) :
Descriptions of parameters will be automatically included into docstring .
This must be a list of tuples with 4 elements composed of
( name ( str ) , description ( str ) , shape info ( str ) , need _ grad ( bool ) ) .
Returns :
function : A decorated parametric function .""" | if scope_name is None :
scope_name = name
def parametric_function_api_inside ( func ) :
from nnabla . utils . py23_compatible import getargspec
import inspect
name = func . __name__
doc = func . __doc__
if param_desc :
indent = 8
try :
desc = map ( lambda d : ' ' * indent + '* {} (``need_grad={}``) : {}. (shape: ``{}``)' . format ( d [ 0 ] , d [ 3 ] , d [ 1 ] , d [ 2 ] ) , param_desc )
except :
ValueError ( 'param_desc argument of parametric_function_api must be ' 'None or a list of tuple with three elements composed of ' '(name(str), description(str), need_grad(bool)).' )
doc += '''
Parameters to be registered
The following variables are registered in a parameter scope ``"{}"``;
{}
''' . format ( scope_name , '\n' . join ( desc ) )
doc += """
Note:
If the ``name`` option is passed, the parameters become wrapped inside the parameter scope
with the specified name, yielding the same results as the following code.
This can be used to simplify the code.
.. code-block:: python
with parametric_scope(name):
output = {name}(<args>)
""" . format ( name = name )
spec = getargspec ( func )
defaults = spec . defaults
if defaults is None :
defaults = tuple ( )
# None will be appended later
signature = inspect . formatargspec ( spec . args + [ 'name' ] , spec . varargs , spec . keywords , defaults + ( None , ) )
shortsignature = inspect . formatargspec ( spec . args , spec . varargs , spec . keywords , None )
# Check required argument
assert 'fix_parameters' in spec . args , "A parametric function must take `fix_parameters` as an argument." " `{}{}` doesn't have it." . format ( name , signature )
code = """
def {name}{signature}:
if name is None:
with parameter_scope(scope_name):
return func{shortsignature}
with parameter_scope(name):
with parameter_scope(scope_name):
return func{shortsignature}
""" . format ( ** locals ( ) )
execdict = dict ( func = func , parameter_scope = nn . parameter_scope , scope_name = scope_name )
exec_ ( code , execdict )
newfunc = execdict [ name ]
newfunc . __doc__ = doc
newfunc . __parametric_function_api_base__ = func
newfunc . __scope_name__ = scope_name
newfunc . __module__ = __name__
return newfunc
return parametric_function_api_inside |
def search ( self , pattern = "*" , mode = "both" ) :
"""Perform a pattern - based search on keyword names and documentation
The pattern matching is insensitive to case . The function
returns a list of tuples of the form library _ id , library _ name ,
keyword _ name , keyword _ synopsis , sorted by library id ,
library name , and then keyword name
If a pattern begins with " name : " , only the keyword names will
be searched . Otherwise , the pattern is searched for in both
the name and keyword documentation .
You can limit the search to a single library by specifying
" in : " followed by the name of the library or resource
file . For example , " screenshot in : Selenium2Library " will only
search for the word ' screenshot ' in the Selenium2Library .""" | pattern = self . _glob_to_sql ( pattern )
COND = "(keyword.name like ? OR keyword.doc like ?)"
args = [ pattern , pattern ]
if mode == "name" :
COND = "(keyword.name like ?)"
args = [ pattern , ]
sql = """SELECT collection.collection_id, collection.name, keyword.name, keyword.doc
FROM collection_table as collection
JOIN keyword_table as keyword
WHERE collection.collection_id == keyword.collection_id
AND %s
ORDER by collection.collection_id, collection.name, keyword.name
""" % COND
cursor = self . _execute ( sql , args )
result = [ ( row [ 0 ] , row [ 1 ] , row [ 2 ] , row [ 3 ] . strip ( ) . split ( "\n" ) [ 0 ] ) for row in cursor . fetchall ( ) ]
return list ( set ( result ) ) |
def _generate_examples ( self , images_dir_path ) :
"""Generate flower images and labels given the image directory path .
Args :
images _ dir _ path : path to the directory where the images are stored .
Yields :
The image path and its corresponding label .""" | parent_dir = tf . io . gfile . listdir ( images_dir_path ) [ 0 ]
walk_dir = os . path . join ( images_dir_path , parent_dir )
dirs = tf . io . gfile . listdir ( walk_dir )
for d in dirs :
if tf . io . gfile . isdir ( os . path . join ( walk_dir , d ) ) :
for full_path , _ , fname in tf . io . gfile . walk ( os . path . join ( walk_dir , d ) ) :
for image_file in fname :
if image_file . endswith ( ".jpg" ) :
image_path = os . path . join ( full_path , image_file )
yield { "image" : image_path , "label" : d . lower ( ) , } |
def get_plot ( self , color_set = 'PuBu' , grid_off = True , axis_off = True , show_area = False , alpha = 1 , off_color = 'red' , direction = None , bar_pos = ( 0.75 , 0.15 , 0.05 , 0.65 ) , bar_on = False , units_in_JPERM2 = True , legend_on = True , aspect_ratio = ( 8 , 8 ) , custom_colors = { } ) :
"""Get the Wulff shape plot .
Args :
color _ set : default is ' PuBu '
grid _ off ( bool ) : default is True
axis _ off ( bool ) : default is Ture
show _ area ( bool ) : default is False
alpha ( float ) : chosen from 0 to 1 ( float ) , default is 1
off _ color : Default color for facets not present on the Wulff shape .
direction : default is ( 1 , 1 , 1)
bar _ pos : default is [ 0.75 , 0.15 , 0.05 , 0.65]
bar _ on ( bool ) : default is False
legend _ on ( bool ) : default is True
aspect _ ratio : default is ( 8 , 8)
custom _ colors ( { ( h , k , l } : [ r , g , b , alpha } ) : Customize color of each
facet with a dictionary . The key is the corresponding Miller
index and value is the color . Undefined facets will use default
color site . Note : If you decide to set your own colors , it
probably won ' t make any sense to have the color bar on .
Return :
( matplotlib . pyplot )""" | import matplotlib as mpl
import matplotlib . pyplot as plt
import mpl_toolkits . mplot3d as mpl3
color_list , color_proxy , color_proxy_on_wulff , miller_on_wulff , e_surf_on_wulff = self . _get_colors ( color_set , alpha , off_color , custom_colors = custom_colors )
if not direction : # If direction is not specified , use the miller indices of
# maximum area .
direction = max ( self . area_fraction_dict . items ( ) , key = lambda x : x [ 1 ] ) [ 0 ]
fig = plt . figure ( )
fig . set_size_inches ( aspect_ratio [ 0 ] , aspect_ratio [ 1 ] )
azim , elev = self . _get_azimuth_elev ( [ direction [ 0 ] , direction [ 1 ] , direction [ - 1 ] ] )
wulff_pt_list = self . wulff_pt_list
ax = mpl3 . Axes3D ( fig , azim = azim , elev = elev )
for plane in self . facets : # check whether [ pts ] is empty
if len ( plane . points ) < 1 : # empty , plane is not on _ wulff .
continue
# assign the color for on _ wulff facets according to its
# index and the color _ list for on _ wulff
plane_color = color_list [ plane . index ]
pt = self . get_line_in_facet ( plane )
# plot from the sorted pts from [ simpx ]
tri = mpl3 . art3d . Poly3DCollection ( [ pt ] )
tri . set_color ( plane_color )
tri . set_edgecolor ( "#808080" )
ax . add_collection3d ( tri )
# set ranges of x , y , z
# find the largest distance between on _ wulff pts and the origin ,
# to ensure complete and consistent display for all directions
r_range = max ( [ np . linalg . norm ( x ) for x in wulff_pt_list ] )
ax . set_xlim ( [ - r_range * 1.1 , r_range * 1.1 ] )
ax . set_ylim ( [ - r_range * 1.1 , r_range * 1.1 ] )
ax . set_zlim ( [ - r_range * 1.1 , r_range * 1.1 ] )
# add legend
if legend_on :
color_proxy = color_proxy
if show_area :
ax . legend ( color_proxy , self . miller_area , loc = 'upper left' , bbox_to_anchor = ( 0 , 1 ) , fancybox = True , shadow = False )
else :
ax . legend ( color_proxy_on_wulff , miller_on_wulff , loc = 'upper center' , bbox_to_anchor = ( 0.5 , 1 ) , ncol = 3 , fancybox = True , shadow = False )
ax . set_xlabel ( 'x' )
ax . set_ylabel ( 'y' )
ax . set_zlabel ( 'z' )
# Add colorbar
if bar_on :
cmap = plt . get_cmap ( color_set )
cmap . set_over ( '0.25' )
cmap . set_under ( '0.75' )
bounds = [ round ( e , 2 ) for e in e_surf_on_wulff ]
bounds . append ( 1.2 * bounds [ - 1 ] )
norm = mpl . colors . BoundaryNorm ( bounds , cmap . N )
# display surface energies
ax1 = fig . add_axes ( bar_pos )
cbar = mpl . colorbar . ColorbarBase ( ax1 , cmap = cmap , norm = norm , boundaries = [ 0 ] + bounds + [ 10 ] , extend = 'both' , ticks = bounds [ : - 1 ] , spacing = 'proportional' , orientation = 'vertical' )
units = "$J/m^2$" if units_in_JPERM2 else "$eV/\AA^2$"
cbar . set_label ( 'Surface Energies (%s)' % ( units ) , fontsize = 100 )
if grid_off :
ax . grid ( 'off' )
if axis_off :
ax . axis ( 'off' )
return plt |
def match_keyword ( token , keywords ) :
"""Checks if the given token represents one of the given keywords""" | if not token :
return False
if not token . is_keyword :
return False
return token . value . upper ( ) in keywords |
def to_html ( self , wrap_slash = False ) :
"""Render a Text MessageElement as html .
: param wrap _ slash : Whether to replace slashes with the slash plus the
html < wbr > tag which will help to e . g . wrap html in small cells if
it contains a long filename . Disabled by default as it may cause
side effects if the text contains html markup .
: type wrap _ slash : bool
: returns : Html representation of the Text MessageElement .
: rtype : str""" | if self . text is None :
return
else :
text = ''
for t in self . text :
text += t . to_html ( ) + ' '
text = ' ' . join ( text . split ( ) )
if wrap_slash : # This is a hack to make text wrappable with long filenames TS 3.3
text = text . replace ( '/' , '/<wbr>' )
text = text . replace ( '\\' , '\\<wbr>' )
return text |
def get_mapping ( self , index = None , doc_type = None , params = None ) :
"""Retrieve mapping definition of index or index / type .
` < http : / / www . elastic . co / guide / en / elasticsearch / reference / current / indices - get - mapping . html > ` _
: arg index : A comma - separated list of index names
: arg doc _ type : A comma - separated list of document types
: arg allow _ no _ indices : Whether to ignore if a wildcard indices
expression resolves into no concrete indices . ( This includes ` _ all `
string or when no indices have been specified )
: arg expand _ wildcards : Whether to expand wildcard expression to concrete
indices that are open , closed or both . , default ' open ' , valid
choices are : ' open ' , ' closed ' , ' none ' , ' all '
: arg ignore _ unavailable : Whether specified concrete indices should be
ignored when unavailable ( missing or closed )
: arg local : Return local information , do not retrieve the state from
master node ( default : false )
: arg include _ type _ name : Specify whether requests and responses should include a
type name ( default : depends on Elasticsearch version ) .""" | return self . transport . perform_request ( "GET" , _make_path ( index , "_mapping" , doc_type ) , params = params ) |
def clean ( config ) :
"""Delete a wily cache .
: param config : The configuration
: type config : : class : ` wily . config . WilyConfig `""" | if not exists ( config ) :
logger . debug ( "Wily cache does not exist, skipping" )
return
shutil . rmtree ( config . cache_path )
logger . debug ( "Deleted wily cache" ) |
def create_channel ( self , name , values = None , * , shape = None , units = None , dtype = None , ** kwargs ) -> Channel :
"""Append a new channel .
Parameters
name : string
Unique name for this channel .
values : array ( optional )
Array . If None , an empty array equaling the data shape is
created . Default is None .
shape : tuple of int
Shape to use . Must broadcast with the full shape .
Only used if ` values ` is None .
Default is the full shape of self .
units : string ( optional )
Channel units . Default is None .
dtype : numpy . dtype ( optional )
dtype to use for dataset , default is np . float64.
Only used if ` values ` is None .
kwargs : dict
Additional keyword arguments passed to Channel instantiation .
Returns
Channel
Created channel .""" | if name in self . channel_names :
warnings . warn ( name , wt_exceptions . ObjectExistsWarning )
return self [ name ]
elif name in self . variable_names :
raise wt_exceptions . NameNotUniqueError ( name )
require_kwargs = { "chunks" : True }
if values is None :
if shape is None :
require_kwargs [ "shape" ] = self . shape
else :
require_kwargs [ "shape" ] = shape
if dtype is None :
require_kwargs [ "dtype" ] = np . dtype ( np . float64 )
else :
require_kwargs [ "dtype" ] = dtype
if require_kwargs [ "dtype" ] . kind in "fcmM" :
require_kwargs [ "fillvalue" ] = np . nan
else :
require_kwargs [ "fillvalue" ] = 0
else :
require_kwargs [ "data" ] = values
require_kwargs [ "shape" ] = values . shape
require_kwargs [ "dtype" ] = values . dtype
if np . prod ( require_kwargs [ "shape" ] ) == 1 :
require_kwargs [ "chunks" ] = None
# create dataset
dataset_id = self . require_dataset ( name = name , ** require_kwargs ) . id
channel = Channel ( self , dataset_id , units = units , ** kwargs )
# finish
self . attrs [ "channel_names" ] = np . append ( self . attrs [ "channel_names" ] , name . encode ( ) )
return channel |
def get_displayable_field_names ( self ) :
"""Return all field names , excluding reverse foreign key relationships .""" | return [ f . name for f in self . model . _meta . get_fields ( ) if not f . one_to_many ] |
def _parse_ignores ( self ) :
"""Parse the ignores setting from the pylintrc file if available .""" | error_message = ( colorama . Fore . RED + "{} does not appear to be a valid pylintrc file" . format ( self . rcfile ) + colorama . Fore . RESET )
if not os . path . isfile ( self . rcfile ) :
if not self . _is_using_default_rcfile ( ) :
print ( error_message )
sys . exit ( 1 )
else :
return
config = configparser . ConfigParser ( )
try :
config . read ( self . rcfile )
except configparser . MissingSectionHeaderError :
print ( error_message )
sys . exit ( 1 )
if config . has_section ( "MASTER" ) and config . get ( "MASTER" , "ignore" ) :
self . ignore_folders += config . get ( "MASTER" , "ignore" ) . split ( "," ) |
def skipBytes ( self , nroBytes ) :
"""Skips the specified number as parameter to the current value of the L { WriteData } stream .
@ type nroBytes : int
@ param nroBytes : The number of bytes to skip .""" | self . data . seek ( nroBytes + self . data . tell ( ) ) |
def writeprettyxml ( self , filename_or_file = None , indent = " " , newl = os . linesep , encoding = "UTF-8" ) :
"""Write the manifest as XML to a file or file object""" | if not filename_or_file :
filename_or_file = self . filename
if isinstance ( filename_or_file , ( str , unicode ) ) :
filename_or_file = open ( filename_or_file , "wb" )
xmlstr = self . toprettyxml ( indent , newl , encoding )
filename_or_file . write ( xmlstr )
filename_or_file . close ( ) |
def _masquerade ( origin : str , orig : ServiceDefn , new : ServiceDefn , ** map : str ) -> str :
"""build an origin URL such that the orig has all of the mappings to new defined by map""" | origin : ParseResult = urlparse ( origin )
prev_maps = { }
if origin . query :
prev_maps = { k : v for k , v in parse_qsl ( origin . query ) }
r_args = { }
for new_k , orig_k in map . items ( ) :
assert new_k in new . rpcs , [ new_k , new . rpcs ]
assert orig_k in orig . rpcs , [ orig_k , orig . rpcs ]
# todo : check if the definitions are the same
new_v = new . rpcs [ new_k ]
orig_v = orig . rpcs [ orig_k ]
if orig_k in prev_maps :
orig_k = prev_maps [ orig_k ]
assert new_v . res == orig_v . res , [ new_v . res , orig_v . res ]
assert new_v . req == orig_v . req , [ new_v . req , orig_v . req ]
r_args [ new_k ] = orig_k
return urlunparse ( origin . _replace ( query = urlencode ( r_args ) ) ) |
def batch_get_pay_giftcard ( self , effective = True , offset = 0 , count = 10 ) :
"""批量查询支付后投放卡券的规则
详情请参见
https : / / mp . weixin . qq . com / wiki ? id = mp1466494654 _ K9rNz
: param effective : 是否仅查询生效的规则
: type effective : bool
: param offset : 起始偏移量
: type offset : int
: param count : 查询的数量
: type count : int
: return : 支付后投放卡券规则的总数 , 以及查询到的列表""" | return self . _post ( 'card/paygiftcard/batchget' , data = { 'type' : 'RULE_TYPE_PAY_MEMBER_CARD' , 'effective' : effective , 'offset' : offset , 'count' : count , } , ) |
def addImagePath ( new_path ) :
"""Convenience function . Adds a path to the list of paths to search for images .
Can be a URL ( but must be accessible ) .""" | if os . path . exists ( new_path ) :
Settings . ImagePaths . append ( new_path )
elif "http://" in new_path or "https://" in new_path :
request = requests . get ( new_path )
if request . status_code < 400 : # Path exists
Settings . ImagePaths . append ( new_path )
else :
raise OSError ( "Unable to connect to " + new_path )
else :
raise OSError ( "File not found: " + new_path ) |
def parse_peddy_sexcheck ( handle : TextIO ) :
"""Parse Peddy sexcheck output .""" | data = { }
samples = csv . DictReader ( handle )
for sample in samples :
data [ sample [ 'sample_id' ] ] = { 'predicted_sex' : sample [ 'predicted_sex' ] , 'het_ratio' : float ( sample [ 'het_ratio' ] ) , 'error' : True if sample [ 'error' ] == 'True' else False , }
return data |
def boolean ( meshes , operation = 'difference' ) :
"""Run an operation on a set of meshes""" | script = operation + '(){'
for i in range ( len ( meshes ) ) :
script += 'import(\"$mesh_' + str ( i ) + '\");'
script += '}'
return interface_scad ( meshes , script ) |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.