signature
stringlengths 29
44.1k
| implementation
stringlengths 0
85.2k
|
|---|---|
def implany ( self ) :
"""Set the type to < xsd : any / > when implicit .
An element has an implicit < xsd : any / > type when it has no body and no
explicitly defined type .
@ return : self
@ rtype : L { Element }"""
|
if self . type is None and self . ref is None and self . root . isempty ( ) :
self . type = self . anytype ( )
|
def start_worker_message_handler ( self ) :
"""Start the worker message handler thread , that loops over messages from workers
( job progress updates , failures and successes etc . ) and then updates the job ' s status .
Returns : None"""
|
t = InfiniteLoopThread ( func = lambda : self . handle_worker_messages ( timeout = 2 ) , thread_name = "WORKERMESSAGEHANDLER" , wait_between_runs = 0.5 )
t . start ( )
return t
|
def patch ( source , sink ) :
"""Create a direct link between a source and a sink .
Implementation : :
sink = sink ( )
for value in source ( ) :
sink . send ( value )
. . | patch | replace : : : py : func : ` patch `"""
|
sink = sink ( )
for v in source ( ) :
try :
sink . send ( v )
except StopIteration :
return
|
def getSrcDatasetParents ( self , url , dataset ) :
"""List block at src DBS"""
|
# resturl = " % s / datasetparents ? dataset = % s " % ( url , dataset )
params = { 'dataset' : dataset }
return cjson . decode ( self . callDBSService ( url , 'datasetparents' , params , { } ) )
|
def save_tiles ( tiles , prefix = '' , directory = os . getcwd ( ) , format = 'png' ) :
"""Write image files to disk . Create specified folder ( s ) if they
don ' t exist . Return list of : class : ` Tile ` instance .
Args :
tiles ( list ) : List , tuple or set of : class : ` Tile ` objects to save .
prefix ( str ) : Filename prefix of saved tiles .
Kwargs :
directory ( str ) : Directory to save tiles . Created if non - existant .
Returns :
Tuple of : class : ` Tile ` instances ."""
|
# Causes problems in CLI script .
# if not os . path . exists ( directory ) :
# os . makedirs ( directory )
for tile in tiles :
tile . save ( filename = tile . generate_filename ( prefix = prefix , directory = directory , format = format ) , format = format )
return tuple ( tiles )
|
def search ( self , name ) :
"""Search node with given name based on regexp , basic method ( find ) uses equality"""
|
for node in self . climb ( ) :
if re . search ( name , node . name ) :
return node
return None
|
def get_context ( self ) :
"""Return the context used to render the templates for the email
subject and body .
By default , this context includes :
* All of the validated values in the form , as variables of the
same names as their fields .
* The current ` ` Site ` ` object , as the variable ` ` site ` ` .
* Any additional variables added by context processors ( this
will be a ` ` RequestContext ` ` ) ."""
|
if not self . is_valid ( ) :
raise ValueError ( "Cannot generate Context from invalid contact form" )
return dict ( self . cleaned_data , site = get_current_site ( self . request ) )
|
def request ( self , method , url , query_params = None , headers = None , body = None , post_params = None ) :
""": param method : http request method
: param url : http request url
: param query _ params : query parameters in the url
: param headers : http request headers
: param body : request json body , for ` application / json `
: param post _ params : request post parameters ,
` application / x - www - form - urlencode `
and ` multipart / form - data `"""
|
method = method . upper ( )
assert method in [ 'GET' , 'HEAD' , 'DELETE' , 'POST' , 'PUT' , 'PATCH' , 'OPTIONS' ]
if post_params and body :
raise ValueError ( "body parameter cannot be used with post_params parameter." )
post_params = post_params or { }
headers = headers or { }
if 'Content-Type' not in headers :
headers [ 'Content-Type' ] = 'application/json'
try : # For ` POST ` , ` PUT ` , ` PATCH ` , ` OPTIONS ` , ` DELETE `
if method in [ 'POST' , 'PUT' , 'PATCH' , 'OPTIONS' , 'DELETE' ] :
if query_params :
url += '?' + urlencode ( query_params )
if headers [ 'Content-Type' ] == 'application/json' :
request_body = None
if body is not None :
request_body = json . dumps ( body )
r = self . pool_manager . request ( method , url , body = request_body , headers = headers )
if headers [ 'Content-Type' ] == 'application/x-www-form-urlencoded' :
r = self . pool_manager . request ( method , url , fields = post_params , encode_multipart = False , headers = headers )
if headers [ 'Content-Type' ] == 'multipart/form-data' : # must del headers [ ' Content - Type ' ] , or the correct Content - Type
# which generated by urllib3 will be overwritten .
del headers [ 'Content-Type' ]
r = self . pool_manager . request ( method , url , fields = post_params , encode_multipart = True , headers = headers )
# For ` GET ` , ` HEAD `
else :
r = self . pool_manager . request ( method , url , fields = query_params , headers = headers , preload_content = False )
except urllib3 . exceptions . SSLError as e :
msg = "{0}\n{1}" . format ( type ( e ) . __name__ , str ( e ) )
raise ApiException ( status = 0 , reason = msg )
r = RESTResponse ( r , method )
# log response body
logger . debug ( "response body: %s" % r . data )
if r . status not in range ( 200 , 206 ) :
raise ApiException ( http_resp = r )
return r
|
def write_ds9region ( self , region , * args , ** kwargs ) :
"""Create a ds9 compatible region file from the ROI .
It calls the ` to _ ds9 ` method and write the result to the region file . Only the file name is required .
All other parameters will be forwarded to the ` to _ ds9 ` method , see the documentation of that method
for all accepted parameters and options .
Parameters
region : str
name of the region file ( string )"""
|
lines = self . to_ds9 ( * args , ** kwargs )
with open ( region , 'w' ) as fo :
fo . write ( "\n" . join ( lines ) )
|
def provides ( * specification ) :
"""Decorator marking wrapped : py : class : ` Module ` method as : term : ` provider ` for
given : term : ` specification ` .
For example : :
class ApplicationModule ( Module ) :
@ provides ( ' db _ connection ' )
def provide _ db _ connection ( self ) :
return DBConnection ( host = ' localhost ' )"""
|
if len ( specification ) == 1 :
specification = specification [ 0 ]
else :
specification = tuple ( specification )
def decorator ( function ) :
function . __provides__ = specification
return function
return decorator
|
def lookup ( self , inc_raw = False , retry_count = 3 , asn_alts = None , extra_org_map = None , asn_methods = None , get_asn_description = True ) :
"""The wrapper function for retrieving and parsing ASN information for an
IP address .
Args :
inc _ raw ( : obj : ` bool ` ) : Whether to include the raw results in the
returned dictionary . Defaults to False .
retry _ count ( : obj : ` int ` ) : The number of times to retry in case
socket errors , timeouts , connection resets , etc . are
encountered . Defaults to 3.
asn _ alts ( : obj : ` list ` ) : Additional lookup types to attempt if the
ASN dns lookup fails . Allow permutations must be enabled .
Defaults to all [ ' whois ' , ' http ' ] . * WARNING * deprecated in
favor of new argument asn _ methods . Defaults to None .
extra _ org _ map ( : obj : ` dict ` ) : Mapping org handles to RIRs . This is
for limited cases where ARIN REST ( ASN fallback HTTP lookup )
does not show an RIR as the org handle e . g . , DNIC ( which is
now the built in ORG _ MAP ) e . g . , { ' DNIC ' : ' arin ' } . Valid RIR
values are ( note the case - sensitive - this is meant to match
the REST result ) : ' ARIN ' , ' RIPE ' , ' apnic ' , ' lacnic ' , ' afrinic '
Defaults to None .
asn _ methods ( : obj : ` list ` ) : ASN lookup types to attempt , in order .
If None , defaults to all : [ ' dns ' , ' whois ' , ' http ' ] .
get _ asn _ description ( : obj : ` bool ` ) : Whether to run an additional
query when pulling ASN information via dns , in order to get
the ASN description . Defaults to True .
Returns :
dict : The ASN lookup results
' asn ' ( str ) - The Autonomous System Number
' asn _ date ' ( str ) - The ASN Allocation date
' asn _ registry ' ( str ) - The assigned ASN registry
' asn _ cidr ' ( str ) - The assigned ASN CIDR
' asn _ country _ code ' ( str ) - The assigned ASN country code
' asn _ description ' ( str ) - The ASN description
' raw ' ( str ) - Raw ASN results if the inc _ raw parameter is
True .
Raises :
ValueError : methods argument requires one of dns , whois , http .
ASNRegistryError : ASN registry does not match ."""
|
if asn_methods is None :
if asn_alts is None :
lookups = [ 'dns' , 'whois' , 'http' ]
else :
from warnings import warn
warn ( 'IPASN.lookup() asn_alts argument has been deprecated ' 'and will be removed. You should now use the asn_methods ' 'argument.' )
lookups = [ 'dns' ] + asn_alts
else :
if { 'dns' , 'whois' , 'http' } . isdisjoint ( asn_methods ) :
raise ValueError ( 'methods argument requires at least one of ' 'dns, whois, http.' )
lookups = asn_methods
response = None
asn_data = None
dns_success = False
for index , lookup_method in enumerate ( lookups ) :
if index > 0 and not asn_methods and not ( self . _net . allow_permutations ) :
raise ASNRegistryError ( 'ASN registry lookup failed. ' 'Permutations not allowed.' )
if lookup_method == 'dns' :
try :
self . _net . dns_resolver . lifetime = ( self . _net . dns_resolver . timeout * ( retry_count and retry_count or 1 ) )
response = self . _net . get_asn_dns ( )
asn_data_list = [ ]
for asn_entry in response :
asn_data_list . append ( self . parse_fields_dns ( str ( asn_entry ) ) )
# Iterate through the parsed ASN results to find the
# smallest CIDR
asn_data = asn_data_list . pop ( 0 )
try :
prefix_len = ip_network ( asn_data [ 'asn_cidr' ] ) . prefixlen
for asn_parsed in asn_data_list :
prefix_len_comp = ip_network ( asn_parsed [ 'asn_cidr' ] ) . prefixlen
if prefix_len_comp > prefix_len :
asn_data = asn_parsed
prefix_len = prefix_len_comp
except ( KeyError , ValueError ) : # pragma : no cover
pass
dns_success = True
break
except ( ASNLookupError , ASNRegistryError ) as e :
log . debug ( 'ASN DNS lookup failed: {0}' . format ( e ) )
pass
elif lookup_method == 'whois' :
try :
response = self . _net . get_asn_whois ( retry_count )
asn_data = self . parse_fields_whois ( response )
# pragma : no cover
break
except ( ASNLookupError , ASNRegistryError ) as e :
log . debug ( 'ASN WHOIS lookup failed: {0}' . format ( e ) )
pass
elif lookup_method == 'http' :
try :
response = self . _net . get_asn_http ( retry_count = retry_count )
asn_data = self . parse_fields_http ( response , extra_org_map )
break
except ( ASNLookupError , ASNRegistryError ) as e :
log . debug ( 'ASN HTTP lookup failed: {0}' . format ( e ) )
pass
if asn_data is None :
raise ASNRegistryError ( 'ASN lookup failed with no more methods to ' 'try.' )
if get_asn_description and dns_success :
try :
response = self . _net . get_asn_verbose_dns ( 'AS{0}' . format ( asn_data [ 'asn' ] ) )
asn_verbose_data = self . parse_fields_verbose_dns ( response )
asn_data [ 'asn_description' ] = asn_verbose_data [ 'asn_description' ]
except ( ASNLookupError , ASNRegistryError ) as e : # pragma : no cover
log . debug ( 'ASN DNS verbose lookup failed: {0}' . format ( e ) )
pass
if inc_raw :
asn_data [ 'raw' ] = response
return asn_data
|
def parse_args ( ) :
"""Parses the arguments and options ."""
|
parser = argparse . ArgumentParser ( prog = "geneparse-extractor" , description = "Genotype file extractor. This tool will extract markers " "according to names or to genomic locations." , epilog = "The parser arguments (PARSER_ARGS) are the same as the one in " "the API. For example, the arguments for the Plink parser is " "'prefix:PREFIX' (where PREFIX is the prefix of the " "BED/BIM/FAM files)." , )
# The input file format
group = parser . add_argument_group ( "Input Options" )
group . add_argument ( "-f" , "--format" , metavar = "FORMAT" , required = True , type = str , dest = "input_format" , choices = set ( parsers . keys ( ) ) , help = "The input file format." , )
group . add_argument ( nargs = "+" , dest = "parser_args" , type = str , metavar = "PARSER_ARGS" , help = "The arguments that will be passed to the genotype parsers." , )
# The extract options
group = parser . add_argument_group ( "Extract Options" )
group . add_argument ( "-e" , "--extract" , metavar = "FILE" , type = argparse . FileType ( "r" ) , help = "The list of markers to extract (one per line, no header)." , )
group . add_argument ( "-k" , "--keep" , metavar = "FILE" , type = argparse . FileType ( "r" ) , help = "The list of samples to keep (one per line, no header)." , )
group . add_argument ( "--maf" , action = "store_true" , help = "Check MAF and flip the allele coding if the MAF is higher " "than 50%%." , )
# The output options
group = parser . add_argument_group ( "Output Options" )
group . add_argument ( "-o" , "--output" , metavar = "FILE" , type = str , required = True , help = "The output file (can be '-' for STDOUT when using VCF or CSV as " "output format)." , )
group . add_argument ( "--output-format" , metavar = "FORMAT" , default = "vcf" , type = str , choices = { "vcf" , "plink" , "csv" } , help = "The output file format. Note that the extension will be added " "if absent. Note that CSV is a long format (hence it might take " "more disk space)." , )
return parser . parse_args ( )
|
def _map_values ( self , mapper , na_action = None ) :
"""An internal function that maps values using the input
correspondence ( which can be a dict , Series , or function ) .
Parameters
mapper : function , dict , or Series
The input correspondence object
na _ action : { None , ' ignore ' }
If ' ignore ' , propagate NA values , without passing them to the
mapping function
Returns
Union [ Index , MultiIndex ] , inferred
The output of the mapping function applied to the index .
If the function returns a tuple with more than one element
a MultiIndex will be returned ."""
|
# we can fastpath dict / Series to an efficient map
# as we know that we are not going to have to yield
# python types
if isinstance ( mapper , dict ) :
if hasattr ( mapper , '__missing__' ) : # If a dictionary subclass defines a default value method ,
# convert mapper to a lookup function ( GH # 15999 ) .
dict_with_default = mapper
mapper = lambda x : dict_with_default [ x ]
else : # Dictionary does not have a default . Thus it ' s safe to
# convert to an Series for efficiency .
# we specify the keys here to handle the
# possibility that they are tuples
from pandas import Series
mapper = Series ( mapper )
if isinstance ( mapper , ABCSeries ) : # Since values were input this means we came from either
# a dict or a series and mapper should be an index
if is_extension_type ( self . dtype ) :
values = self . _values
else :
values = self . values
indexer = mapper . index . get_indexer ( values )
new_values = algorithms . take_1d ( mapper . _values , indexer )
return new_values
# we must convert to python types
if is_extension_type ( self . dtype ) :
values = self . _values
if na_action is not None :
raise NotImplementedError
map_f = lambda values , f : values . map ( f )
else :
values = self . astype ( object )
values = getattr ( values , 'values' , values )
if na_action == 'ignore' :
def map_f ( values , f ) :
return lib . map_infer_mask ( values , f , isna ( values ) . view ( np . uint8 ) )
else :
map_f = lib . map_infer
# mapper is a function
new_values = map_f ( values , mapper )
return new_values
|
def respond ( self , output ) :
"""Generates server response ."""
|
response = { 'exit_code' : output . code , 'command_output' : output . log }
self . send_response ( 200 )
self . send_header ( 'Content-type' , 'application/json' )
self . end_headers ( )
self . wfile . write ( bytes ( json . dumps ( response ) , "utf8" ) )
|
def task_done ( self ) -> None :
"""Indicate that a formerly enqueued task is complete .
Used by queue consumers . For each ` . get ` used to fetch a task , a
subsequent call to ` . task _ done ` tells the queue that the processing
on the task is complete .
If a ` . join ` is blocking , it resumes when all items have been
processed ; that is , when every ` . put ` is matched by a ` . task _ done ` .
Raises ` ValueError ` if called more times than ` . put ` ."""
|
if self . _unfinished_tasks <= 0 :
raise ValueError ( "task_done() called too many times" )
self . _unfinished_tasks -= 1
if self . _unfinished_tasks == 0 :
self . _finished . set ( )
|
def validate_rid ( model , rid ) :
"""Ensure the resource id is proper"""
|
rid_field = getattr ( model , model . rid_field )
if isinstance ( rid_field , IntType ) :
try :
int ( rid )
except ( TypeError , ValueError ) :
abort ( exceptions . InvalidURL ( ** { 'detail' : 'The resource id {} in your request is not ' 'syntactically correct. Only numeric type ' 'resource id\'s are allowed' . format ( rid ) } ) )
|
def runjava ( self , classpath , main , jvm_options = None , args = None , workunit_name = None , workunit_labels = None , workunit_log_config = None , dist = None ) :
"""Runs the java main using the given classpath and args .
If - - execution - strategy = subprocess is specified then the java main is run in a freshly spawned
subprocess , otherwise a persistent nailgun server dedicated to this Task subclass is used to
speed up amortized run times .
: API : public"""
|
executor = self . create_java_executor ( dist = dist )
# Creating synthetic jar to work around system arg length limit is not necessary
# when ` NailgunExecutor ` is used because args are passed through socket , therefore turning off
# creating synthetic jar if nailgun is used .
create_synthetic_jar = self . execution_strategy != self . NAILGUN
try :
return util . execute_java ( classpath = classpath , main = main , jvm_options = jvm_options , args = args , executor = executor , workunit_factory = self . context . new_workunit , workunit_name = workunit_name , workunit_labels = workunit_labels , workunit_log_config = workunit_log_config , create_synthetic_jar = create_synthetic_jar , synthetic_jar_dir = self . _executor_workdir )
except executor . Error as e :
raise TaskError ( e )
|
def mark_address ( self , addr , size ) :
"""Marks address as being used in simulator"""
|
i = 0
while i < size :
self . _register_map [ addr ] = True
i += 1
|
def maybe_clean ( self ) :
"""Clean the cache if it ' s time to do so ."""
|
now = time . time ( )
if self . next_cleaning <= now :
keys_to_delete = [ ]
for ( k , v ) in self . data . iteritems ( ) :
if v . expiration <= now :
keys_to_delete . append ( k )
for k in keys_to_delete :
del self . data [ k ]
now = time . time ( )
self . next_cleaning = now + self . cleaning_interval
|
def inspiral_range_psd ( psd , snr = 8 , mass1 = 1.4 , mass2 = 1.4 , horizon = False ) :
"""Compute the inspiral sensitive distance PSD from a GW strain PSD
Parameters
psd : ` ~ gwpy . frequencyseries . FrequencySeries `
the instrumental power - spectral - density data
snr : ` float ` , optional
the signal - to - noise ratio for which to calculate range ,
default : ` 8 `
mass1 : ` float ` , ` ~ astropy . units . Quantity ` , optional
the mass ( ` float ` assumed in solar masses ) of the first binary
component , default : ` 1.4 `
mass2 : ` float ` , ` ~ astropy . units . Quantity ` , optional
the mass ( ` float ` assumed in solar masses ) of the second binary
component , default : ` 1.4 `
horizon : ` bool ` , optional
if ` True ` , return the maximal ' horizon ' sensitive distance , otherwise
return the angle - averaged range , default : ` False `
Returns
rspec : ` ~ gwpy . frequencyseries . FrequencySeries `
the calculated inspiral sensitivity PSD [ Mpc ^ 2 / Hz ]"""
|
# compute chirp mass and symmetric mass ratio
mass1 = units . Quantity ( mass1 , 'solMass' ) . to ( 'kg' )
mass2 = units . Quantity ( mass2 , 'solMass' ) . to ( 'kg' )
mtotal = mass1 + mass2
mchirp = ( mass1 * mass2 ) ** ( 3 / 5. ) / mtotal ** ( 1 / 5. )
# compute ISCO
fisco = ( constants . c ** 3 / ( constants . G * 6 ** 1.5 * pi * mtotal ) ) . to ( 'Hz' )
# calculate integral pre - factor
prefactor = ( ( 1.77 ** 2 * 5 * constants . c ** ( 1 / 3. ) * ( mchirp * constants . G / constants . c ** 2 ) ** ( 5 / 3. ) ) / ( 96 * pi ** ( 4 / 3. ) * snr ** 2 ) )
# calculate inspiral range ASD in m ^ 2 / Hz
integrand = 1 / psd * psd . frequencies ** ( - 7 / 3. ) * prefactor
# restrict to ISCO
integrand = integrand [ psd . frequencies . value < fisco . value ]
# normalize and return
if integrand . f0 . value == 0.0 :
integrand [ 0 ] = 0.0
if horizon :
integrand *= 2.26 ** 2
return integrand . to ( 'Mpc^2 / Hz' )
|
def process_pybel_neighborhood ( gene_names , network_file = None , network_type = 'belscript' , ** kwargs ) :
"""Return PybelProcessor around neighborhood of given genes in a network .
This function processes the given network file and filters the returned
Statements to ones that contain genes in the given list .
Parameters
network _ file : Optional [ str ]
Path to the network file to process . If not given , by default , the
BEL Large Corpus is used .
network _ type : Optional [ str ]
This function allows processing both BEL Script files and JSON files .
This argument controls which type is assumed to be processed , and the
value can be either ' belscript ' or ' json ' . Default : bel _ script
Returns
bp : PybelProcessor
A PybelProcessor object which contains INDRA Statements in
bp . statements ."""
|
if network_file is None : # Use large corpus as base network
network_file = os . path . join ( os . path . dirname ( os . path . abspath ( __file__ ) ) , os . path . pardir , os . path . pardir , os . path . pardir , 'data' , 'large_corpus.bel' )
if network_type == 'belscript' :
bp = process_belscript ( network_file , ** kwargs )
elif network_type == 'json' :
bp = process_json_file ( network_file )
filtered_stmts = [ ]
for stmt in bp . statements :
found = False
for agent in stmt . agent_list ( ) :
if agent is not None :
if agent . name in gene_names :
found = True
if found :
filtered_stmts . append ( stmt )
bp . statements = filtered_stmts
return bp
|
def iodp_srm_lore ( srm_file , dir_path = "." , input_dir_path = "" , noave = False , comp_depth_key = 'Depth CSF-B (m)' , meas_file = "srm_arch_measurements.txt" , spec_file = "srm_arch_specimens.txt" , samp_file = 'srm_arch_samples.txt' , site_file = 'srm_arch_sites.txt' , lat = "" , lon = "" ) :
"""Convert IODP archive half measurement files into MagIC file ( s ) .
Parameters
srm _ file : str
input csv file downloaded from LIMS online repository
dir _ path : str
output directory , default " . "
input _ dir _ path : str
input file directory IF different from dir _ path , default " "
meas _ file : str
output measurement file name , default " measurements . txt "
spec _ file : str
specimens file name as output file - these specimens are not already in the specimen table
created by iodp _ samples _ csv
samp _ file : str
samples file name as output file - these are not already in the specimen table
created by iodp _ samples _ csv
site _ file : str
sites file name as output file - these are not already in the samples table
noave : Boolean
if False , average replicate measurements
Returns
type - Tuple : ( True or False indicating if conversion was sucessful , meas _ file name written )"""
|
# initialize defaults
version_num = pmag . get_version ( )
# format variables
input_dir_path , output_dir_path = pmag . fix_directories ( input_dir_path , dir_path )
# convert cc to m ^ 3
meas_reqd_columns = [ 'specimen' , 'measurement' , 'experiment' , 'sequence' , 'quality' , 'method_codes' , 'instrument_codes' , 'citations' , 'treat_temp' , 'treat_ac_field' , 'treat_dc_field' , 'treat_dc_field_phi' , 'treat_dc_field_theta' , 'meas_temp' , 'dir_dec' , 'dir_inc' , 'magn_moment' , 'magn_volume' , 'description' , 'timestamp' , 'software_packages' , 'external_database_ids' , 'treat_step_num' , 'meas_n_orient' ]
srm_file = pmag . resolve_file_name ( srm_file , input_dir_path )
spec_file = pmag . resolve_file_name ( spec_file , dir_path )
in_df = pd . read_csv ( srm_file , header = 0 )
in_df . drop_duplicates ( inplace = True )
in_df . sort_values ( by = 'Treatment Value' , inplace = True )
if len ( in_df ) == 0 :
print ( 'you must download a csv file from the LIMS database and place it in your input_dir_path' )
return False
measurements_df = pd . DataFrame ( columns = meas_reqd_columns )
meas_out = os . path . join ( output_dir_path , meas_file )
hole , srm_specimens = iodp_samples_srm ( in_df , spec_file = spec_file , samp_file = samp_file , site_file = site_file , dir_path = dir_path , input_dir_path = input_dir_path , comp_depth_key = comp_depth_key , lat = lat , lon = lon )
# assume only one hole
# set up defaults
measurements_df [ 'specimen' ] = srm_specimens
measurements_df [ 'quality' ] = 'g'
measurements_df [ 'citations' ] = 'This study'
measurements_df [ 'meas_temp' ] = 273
measurements_df [ 'software_packages' ] = version_num
measurements_df [ "treat_temp" ] = '%8.3e' % ( 273 )
# room temp in kelvin
measurements_df [ "meas_temp" ] = '%8.3e' % ( 273 )
# room temp in kelvin
measurements_df [ "treat_ac_field" ] = '0'
measurements_df [ "treat_dc_field" ] = '0'
measurements_df [ "treat_dc_field_phi" ] = '0'
measurements_df [ "treat_dc_field_theta" ] = '0'
measurements_df [ "standard" ] = 'u'
# assume all data are " good "
measurements_df [ "dir_csd" ] = '0'
# assume all data are " good "
measurements_df [ "method_codes" ] = 'LT-NO'
# assume all are NRMs
measurements_df [ 'instrument_codes' ] = "IODP-SRM"
# assume all measurements on shipboard 2G
measurements_df [ 'treat_step_num' ] = '0'
# assign a number
measurements_df [ 'meas_n_orient' ] = 1
# at least one orientation
measurements_df [ 'timestamp' ] = pd . to_datetime ( in_df [ 'Timestamp (UTC)' ] ) . dt . strftime ( "%Y-%m-%dT%H:%M:%S" ) + 'Z'
measurements_df [ 'dir_dec' ] = in_df [ 'Declination background & drift corrected (deg)' ]
# declination
measurements_df [ 'dir_inc' ] = in_df [ 'Inclination background & drift corrected (deg)' ]
# inclination
measurements_df [ 'magn_volume' ] = in_df [ 'Intensity background & drift corrected (A/m)' ]
# magnetization
Xs = in_df [ 'Magnetic moment x (Am²)' ]
Ys = in_df [ 'Magnetic moment y (Am²)' ]
Zs = in_df [ 'Magnetic moment z (Am²)' ]
magn_moment = np . sqrt ( Xs ** 2 + Ys ** 2 + Zs ** 2 )
measurements_df [ 'magn_moment' ] = magn_moment
# moment in Am ^ 2
measurements_df [ 'description' ] = in_df [ 'Treatment Type' ]
# temporary column
measurements_df [ 'treat_ac_field' ] = in_df [ 'Treatment Value' ] * 1e-3
# assume all treatments are AF
measurements_df . loc [ measurements_df [ 'description' ] == 'IN-LINE AF DEMAG' , 'method_codes' ] = 'LT-AF-Z'
measurements_df . loc [ measurements_df [ 'description' ] == 'IN-LINE AF DEMAG' , 'instrument_codes' ] = 'IODP-SRM:IODP-SRM-AF'
measurements_df [ 'external_database_ids' ] = 'LORE[' + in_df [ 'Test No.' ] . astype ( 'str' ) + ']'
measurements_df . fillna ( "" , inplace = True )
meas_dicts = measurements_df . to_dict ( 'records' )
meas_dicts = pmag . measurements_methods3 ( meas_dicts , noave = noave )
pmag . magic_write ( meas_out , meas_dicts , 'measurements' )
return True
|
def is_parsable ( url ) :
"""Check if the given URL is parsable ( make sure it ' s a valid URL ) . If it is parsable , also cache it .
Args :
url ( str ) : The URL to check .
Returns :
bool : True if parsable , False otherwise ."""
|
try :
parsed = urlparse ( url )
URLHelper . __cache [ url ] = parsed
return True
except :
return False
|
def big_dataframe_setup ( ) : # pragma : no cover
"""Sets pandas to display really big data frames ."""
|
pd . set_option ( 'display.max_colwidth' , sys . maxsize )
pd . set_option ( 'max_colwidth' , sys . maxsize )
# height has been deprecated .
# pd . set _ option ( ' display . height ' , sys . maxsize )
pd . set_option ( 'display.max_rows' , sys . maxsize )
pd . set_option ( 'display.width' , sys . maxsize )
pd . set_option ( 'display.colheader_justify' , 'center' )
pd . set_option ( 'display.column_space' , sys . maxsize )
pd . set_option ( 'display.max_seq_items' , sys . maxsize )
pd . set_option ( 'display.expand_frame_repr' , True )
|
def inc_from_lat ( lat ) :
"""Calculate inclination predicted from latitude using the dipole equation
Parameter
lat : latitude in degrees
Returns
inc : inclination calculated using the dipole equation"""
|
rad = old_div ( np . pi , 180. )
inc = old_div ( np . arctan ( 2 * np . tan ( lat * rad ) ) , rad )
return inc
|
def file_copy ( self , source , destination , flags ) :
"""Copies a file from one guest location to another .
Will overwrite the destination file unless
: py : attr : ` FileCopyFlag . no _ replace ` is specified .
in source of type str
The path to the file to copy ( in the guest ) . Guest path style .
in destination of type str
The path to the target file ( in the guest ) . This cannot be a
directory . Guest path style .
in flags of type : class : ` FileCopyFlag `
Zero or more : py : class : ` FileCopyFlag ` values .
return progress of type : class : ` IProgress `
Progress object to track the operation to completion .
raises : class : ` OleErrorNotimpl `
Not yet implemented ."""
|
if not isinstance ( source , basestring ) :
raise TypeError ( "source can only be an instance of type basestring" )
if not isinstance ( destination , basestring ) :
raise TypeError ( "destination can only be an instance of type basestring" )
if not isinstance ( flags , list ) :
raise TypeError ( "flags can only be an instance of type list" )
for a in flags [ : 10 ] :
if not isinstance ( a , FileCopyFlag ) :
raise TypeError ( "array can only contain objects of type FileCopyFlag" )
progress = self . _call ( "fileCopy" , in_p = [ source , destination , flags ] )
progress = IProgress ( progress )
return progress
|
def fullName ( self ) :
"""A full name , intended to uniquely identify a parameter"""
|
# join with ' _ ' if both are set ( cannot put ' . ' , because it is used as
# * * kwargs )
if self . parentName and self . name :
return self . parentName + '_' + self . name
# otherwise just use the one that is set
# ( this allows empty name for " anonymous nests " )
return self . name or self . parentName
|
def is_all ( self ) -> bool :
"""True if the sequence set starts at ` ` 1 ` ` and ends at the maximum
value .
This may be used to optimize cases of checking for a value in the set ,
avoiding the need to provide ` ` max _ value ` ` in : meth : ` . flatten ` or
: meth : ` . iter ` ."""
|
first = self . sequences [ 0 ]
return isinstance ( first , tuple ) and first [ 0 ] == 1 and isinstance ( first [ 1 ] , MaxValue )
|
def printex ( ex , msg = '[!?] Caught exception' , prefix = None , key_list = [ ] , locals_ = None , iswarning = False , tb = TB , pad_stdout = True , N = 0 , use_stdout = False , reraise = False , msg_ = None , keys = None , colored = None ) :
"""Prints ( and / or logs ) an exception with relevant info
Args :
ex ( Exception ) : exception to print
msg ( str ) : a message to display to the user
keys ( None ) : a list of strings denoting variables or expressions of interest
iswarning ( bool ) : prints as a warning rather than an error if True ( defaults to False )
tb ( bool ) : if True prints the traceback in the error message
pad _ stdout ( bool ) : separate the error message from the rest of stdout with newlines
prefix ( None ) :
locals _ ( None ) :
N ( int ) :
use _ stdout ( bool ) :
reraise ( bool ) :
msg _ ( None ) :
key _ list ( list ) : DEPRICATED use keys
Returns :
None"""
|
import utool as ut
if isinstance ( ex , MemoryError ) :
ut . print_resource_usage ( )
if keys is not None : # shorthand for key _ list
key_list = keys
# Get error prefix and local info
if prefix is None :
prefix = get_caller_prefix ( aserror = True , N = N )
if locals_ is None :
locals_ = get_parent_frame ( N = N ) . f_locals
# build exception message
if msg is True :
key_list = get_parent_frame ( ) . f_locals
msg = msg_
exstr = formatex ( ex , msg , prefix , key_list , locals_ , iswarning , tb = tb , colored = colored )
# get requested print function
if use_stdout :
def print_func ( * args ) :
msg = ', ' . join ( list ( map ( six . text_type , args ) ) )
sys . stdout . write ( msg + '\n' )
sys . stdout . flush ( )
else :
print_func = ut . partial ( ut . colorprint , color = 'yellow' if iswarning else 'red' )
# print _ func = print
if pad_stdout :
print_func ( '\n+------\n' )
# print the execption
print_func ( exstr )
if pad_stdout :
print_func ( '\nL______\n' )
# If you dont know where an error is coming from raise - all
if ( reraise and not iswarning ) or RAISE_ALL :
sys . stdout . flush ( )
sys . stderr . flush ( )
raise ex
if ut . get_argflag ( '--exit-on-error' ) :
print ( 'WARNING: dont use this flag. Some errors are meant to be caught' )
ut . print_traceback ( )
print ( 'REQUESTED EXIT ON ERROR' )
sys . exit ( 1 )
|
def _Bound_Ps ( P , s ) :
"""Region definition for input P and s
Parameters
P : float
Pressure , [ MPa ]
s : float
Specific entropy , [ kJ / kgK ]
Returns
region : float
IAPWS - 97 region code
References
Wagner , W ; Kretzschmar , H - J : International Steam Tables : Properties of
Water and Steam Based on the Industrial Formulation IAPWS - IF97 ; Springer ,
2008 ; doi : 10.1007/978-3-540-74234-0 . Fig . 2.9"""
|
region = None
if Pmin <= P <= Ps_623 :
smin = _Region1 ( 273.15 , P ) [ "s" ]
s14 = _Region1 ( _TSat_P ( P ) , P ) [ "s" ]
s24 = _Region2 ( _TSat_P ( P ) , P ) [ "s" ]
s25 = _Region2 ( 1073.15 , P ) [ "s" ]
smax = _Region5 ( 2273.15 , P ) [ "s" ]
if smin <= s <= s14 :
region = 1
elif s14 < s < s24 :
region = 4
elif s24 <= s <= s25 :
region = 2
elif s25 < s <= smax :
region = 5
elif Ps_623 < P < Pc :
smin = _Region1 ( 273.15 , P ) [ "s" ]
s13 = _Region1 ( 623.15 , P ) [ "s" ]
s32 = _Region2 ( _t_P ( P ) , P ) [ "s" ]
s25 = _Region2 ( 1073.15 , P ) [ "s" ]
smax = _Region5 ( 2273.15 , P ) [ "s" ]
if smin <= s <= s13 :
region = 1
elif s13 < s < s32 :
try :
p34 = _PSat_s ( s )
except NotImplementedError :
p34 = Pc
if P < p34 :
region = 4
else :
region = 3
elif s32 <= s <= s25 :
region = 2
elif s25 < s <= smax :
region = 5
elif Pc <= P <= 100 :
smin = _Region1 ( 273.15 , P ) [ "s" ]
s13 = _Region1 ( 623.15 , P ) [ "s" ]
s32 = _Region2 ( _t_P ( P ) , P ) [ "s" ]
s25 = _Region2 ( 1073.15 , P ) [ "s" ]
smax = _Region5 ( 2273.15 , P ) [ "s" ]
if smin <= s <= s13 :
region = 1
elif s13 < s < s32 :
region = 3
elif s32 <= s <= s25 :
region = 2
elif P <= 50 and s25 <= s <= smax :
region = 5
return region
|
def compute_checksum ( self , payload_offset : Optional [ int ] = None ) :
'''Compute and add the checksum data to the record fields .
This function also sets the content length .'''
|
if not self . block_file :
self . fields [ 'Content-Length' ] = '0'
return
block_hasher = hashlib . sha1 ( )
payload_hasher = hashlib . sha1 ( )
with wpull . util . reset_file_offset ( self . block_file ) :
if payload_offset is not None :
data = self . block_file . read ( payload_offset )
block_hasher . update ( data )
while True :
data = self . block_file . read ( 4096 )
if data == b'' :
break
block_hasher . update ( data )
payload_hasher . update ( data )
content_length = self . block_file . tell ( )
content_hash = block_hasher . digest ( )
self . fields [ 'WARC-Block-Digest' ] = 'sha1:{0}' . format ( base64 . b32encode ( content_hash ) . decode ( ) )
if payload_offset is not None :
payload_hash = payload_hasher . digest ( )
self . fields [ 'WARC-Payload-Digest' ] = 'sha1:{0}' . format ( base64 . b32encode ( payload_hash ) . decode ( ) )
self . fields [ 'Content-Length' ] = str ( content_length )
|
def can_allow_multiple_input_shapes ( spec ) :
"""Examines a model specification and determines if it can compute results for more than one output shape .
: param spec : MLModel
The protobuf specification of the model .
: return : Bool
Returns True if the model can allow multiple input shapes , False otherwise ."""
|
# First , check that the model actually has a neural network in it
try :
layers = _get_nn_layers ( spec )
except :
raise Exception ( 'Unable to verify that this model contains a neural network.' )
try :
shaper = NeuralNetworkShaper ( spec , False )
except :
raise Exception ( 'Unable to compute shapes for this neural network.' )
inputs = _get_input_names ( spec )
for name in inputs :
shape_dict = shaper . shape ( name )
shape = NeuralNetworkMultiArrayShapeRange ( shape_dict )
if ( shape . isFlexible ( ) ) :
return True
return False
|
def lnprior ( x ) :
"""Return the log prior given parameter vector ` x ` ."""
|
per , t0 , b = x
if b < - 1 or b > 1 :
return - np . inf
elif per < 7 or per > 10 :
return - np . inf
elif t0 < 1978 or t0 > 1979 :
return - np . inf
else :
return 0.
|
def canon_ref ( did : str , ref : str , delimiter : str = None ) :
"""Given a reference in a DID document , return it in its canonical form of a URI .
: param did : DID acting as the identifier of the DID document
: param ref : reference to canonicalize , either a DID or a fragment pointing to a location in the DID doc
: param delimiter : delimiter character marking fragment ( default ' # ' ) or
introducing identifier ( ' ; ' ) against DID resource"""
|
if not ok_did ( did ) :
raise BadIdentifier ( 'Bad DID {} cannot act as DID document identifier' . format ( did ) )
if ok_did ( ref ) : # e . g . , LjgpST2rjsoxYegQDRm7EL
return 'did:sov:{}' . format ( did )
if ok_did ( resource ( ref , delimiter ) ) : # e . g . , LjgpST2rjsoxYegQDRm7EL # keys - 1
return 'did:sov:{}' . format ( ref )
if ref . startswith ( 'did:sov:' ) : # e . g . , did : sov : LjgpST2rjsoxYegQDRm7EL , did : sov : LjgpST2rjsoxYegQDRm7EL # 3
rv = ref [ 8 : ]
if ok_did ( resource ( rv , delimiter ) ) :
return ref
raise BadIdentifier ( 'Bad URI {} does not correspond to a sovrin DID' . format ( ref ) )
if urlparse ( ref ) . scheme : # e . g . , https : / / example . com / messages / 8377464
return ref
return 'did:sov:{}{}{}' . format ( did , delimiter if delimiter else '#' , ref )
|
def format_json ( item , ** kwargs ) :
"""formats a datatype object to a json value"""
|
try :
json . dumps ( item . value )
return item . value
except TypeError :
if 'time' in item . class_type . lower ( ) or 'date' in item . class_type . lower ( ) :
return item . value . isoformat ( )
raise
|
def filter ( self , record ) :
"""Adds user and remote _ addr to the record ."""
|
request = get_request ( )
if request :
user = getattr ( request , 'user' , None )
if user and not user . is_anonymous ( ) :
record . username = user . username
else :
record . username = '-'
meta = getattr ( request , 'META' , { } )
record . remote_addr = meta . get ( 'REMOTE_ADDR' , '-' )
record . http_user_agent = meta . get ( 'HTTP_USER_AGENT' , '-' )
if not hasattr ( record , 'request' ) :
record . request = request
else :
record . username = '-'
record . remote_addr = '-'
record . http_user_agent = '-'
return True
|
def insertPreviousCommand ( self ) :
"""Inserts the previous command from history into the line ."""
|
self . _currentHistoryIndex -= 1
if 0 <= self . _currentHistoryIndex < len ( self . _history ) :
cmd = self . _history [ self . _currentHistoryIndex ]
else :
cmd = '>>> '
self . _currentHistoryIndex = len ( self . _history )
self . replaceCommand ( cmd )
|
def play_state ( self ) :
"""Play state , e . g . playing or paused ."""
|
state = parser . first ( self . playstatus , 'cmst' , 'caps' )
return convert . playstate ( state )
|
def make_private ( self , client = None ) :
"""Update blob ' s ACL , revoking read access for anonymous users .
: type client : : class : ` ~ google . cloud . storage . client . Client ` or
` ` NoneType ` `
: param client : Optional . The client to use . If not passed , falls back
to the ` ` client ` ` stored on the blob ' s bucket ."""
|
self . acl . all ( ) . revoke_read ( )
self . acl . save ( client = client )
|
def get_key_codes ( keys ) :
"""Calculates the list of key codes from a string with key combinations .
Ex : ' CTRL + A ' will produce the output ( 17 , 65)"""
|
keys = keys . strip ( ) . upper ( ) . split ( '+' )
codes = list ( )
for key in keys :
code = ks_settings . KEY_CODES . get ( key . strip ( ) )
if code :
codes . append ( code )
return codes
|
def renameMenu ( self ) :
"""Prompts the user to supply a new name for the menu ."""
|
item = self . uiMenuTREE . currentItem ( )
name , accepted = QInputDialog . getText ( self , 'Rename Menu' , 'Name:' , QLineEdit . Normal , item . text ( 0 ) )
if ( accepted ) :
item . setText ( 0 , name )
|
def tableexists ( tablename ) :
"""Test if a table exists ."""
|
result = True
try :
t = table ( tablename , ack = False )
except :
result = False
return result
|
def get_variable ( self , variable_name , client = None ) :
"""API call : get a variable via a ` ` GET ` ` request .
This will return None if the variable doesn ' t exist : :
> > > from google . cloud import runtimeconfig
> > > client = runtimeconfig . Client ( )
> > > config = client . config ( ' my - config ' )
> > > print ( config . get _ variable ( ' variable - name ' ) )
< Variable : my - config , variable - name >
> > > print ( config . get _ variable ( ' does - not - exist ' ) )
None
: type variable _ name : str
: param variable _ name : The name of the variable to retrieve .
: type client : : class : ` ~ google . cloud . runtimeconfig . client . Client `
: param client :
( Optional ) The client to use . If not passed , falls back to the
` ` client ` ` stored on the current config .
: rtype : : class : ` google . cloud . runtimeconfig . variable . Variable ` or None
: returns : The variable object if it exists , otherwise None ."""
|
client = self . _require_client ( client )
variable = Variable ( config = self , name = variable_name )
try :
variable . reload ( client = client )
return variable
except NotFound :
return None
|
def sink ( self , * args , ** kwargs ) :
"""Define URL prefixes / handler matches where everything under the URL prefix should be handled"""
|
kwargs [ 'api' ] = self . api
return sink ( * args , ** kwargs )
|
def result ( self , timeout = None , do_raise = True ) :
"""Retrieve the result of the future , waiting for it to complete or at
max * timeout * seconds .
: param timeout : The number of maximum seconds to wait for the result .
: param do _ raise : Set to False to prevent any of the exceptions below
to be raised and return : const : ` None ` instead .
: raise Cancelled : If the future has been cancelled .
: raise Timeout : If the * timeout * has been exceeded .
: raise BaseException : Anything the worker has raised .
: return : Whatever the worker bound to the future returned ."""
|
with self . _lock :
self . wait ( timeout , do_raise = do_raise )
if self . _exc_info :
if not do_raise :
return None
# Its more important to re - raise the exception from the worker .
self . _exc_retrieved = True
reraise ( * self . _exc_info )
if self . _cancelled :
if not do_raise :
return None
raise self . Cancelled ( )
return self . _result
|
def drawQuad ( self , quad ) :
"""Draw a Quad ."""
|
q = Quad ( quad )
return self . drawPolyline ( [ q . ul , q . ll , q . lr , q . ur , q . ul ] )
|
def ToJsonString ( self ) :
"""Converts FieldMask to string according to proto3 JSON spec ."""
|
camelcase_paths = [ ]
for path in self . paths :
camelcase_paths . append ( _SnakeCaseToCamelCase ( path ) )
return ',' . join ( camelcase_paths )
|
def print_featurelist ( feature_list ) :
"""Print the feature _ list in a human - readable form .
Parameters
feature _ list : list
feature objects"""
|
input_features = sum ( map ( lambda n : n . get_dimension ( ) , feature_list ) )
print ( "## Features (%i)" % input_features )
print ( "```" )
for algorithm in feature_list :
print ( "* %s" % str ( algorithm ) )
print ( "```" )
|
def namedb_read_version ( path ) :
"""Get the db version"""
|
con = sqlite3 . connect ( path , isolation_level = None , timeout = 2 ** 30 )
con . row_factory = namedb_row_factory
sql = 'SELECT version FROM db_version;'
args = ( )
try :
rowdata = namedb_query_execute ( con , sql , args , abort = False )
row = rowdata . fetchone ( )
return row [ 'version' ]
except : # no version defined
return '0.0.0.0'
finally :
con . close ( )
|
def query ( data , ** options ) :
"""Filter data with given JMESPath expression .
See also : https : / / github . com / jmespath / jmespath . py and http : / / jmespath . org .
: param data : Target object ( a dict or a dict - like object ) to query
: param options :
Keyword option may include ' ac _ query ' which is a string represents
JMESPath expression .
: return : Maybe queried result data , primitive ( int , str , . . . ) or dict"""
|
expression = options . get ( "ac_query" , None )
if expression is None or not expression :
return data
try :
pexp = jmespath . compile ( expression )
return pexp . search ( data )
except ValueError as exc : # jmespath . exceptions . * Error inherit from it .
LOGGER . warning ( "Failed to compile or search: exp=%s, exc=%r" , expression , exc )
except ( NameError , AttributeError ) :
LOGGER . warning ( "Filter module (jmespath) is not available. " "Do nothing." )
return data
|
def path ( self ) :
"""Returns the directory for this target ."""
|
if not self . path_ :
if self . action_ :
p = self . action_ . properties ( )
( target_path , relative_to_build_dir ) = p . target_path ( )
if relative_to_build_dir : # Indicates that the path is relative to
# build dir .
target_path = os . path . join ( self . project_ . build_dir ( ) , target_path )
# Store the computed path , so that it ' s not recomputed
# any more
self . path_ = target_path
return os . path . normpath ( self . path_ )
|
def dispatch_event ( self , event : "Event" ) -> None :
"""Dispatches the given event .
It is the duty of this method to set the target of the dispatched event by calling
` event . set _ target ( self ) ` .
Args :
event ( Event ) : The event to dispatch . Must not be ` None ` .
Raises :
TypeError : If the event is ` None ` or its type is incorrect ."""
|
# Set the target of the event if it doesn ' t have one already . It could happen that
# we are simply redispatching an event .
if event . target is None :
event . set_target ( self )
listeners : dict [ types . MethodType , bool ] = self . _registered_listeners . get ( event . type )
if listeners is None :
return
for listener in listeners :
listener ( event )
|
def _load_schema_for_record ( data , schema = None ) :
"""Load the schema from a given record .
Args :
data ( dict ) : record data .
schema ( Union [ dict , str ] ) : schema to validate against .
Returns :
dict : the loaded schema .
Raises :
SchemaNotFound : if the given schema was not found .
SchemaKeyNotFound : if ` ` schema ` ` is ` ` None ` ` and no ` ` $ schema ` ` key was
found in ` ` data ` ` .
jsonschema . SchemaError : if the schema is invalid ."""
|
if schema is None :
if '$schema' not in data :
raise SchemaKeyNotFound ( data = data )
schema = data [ '$schema' ]
if isinstance ( schema , six . string_types ) :
schema = load_schema ( schema_name = schema )
return schema
|
def read_parameters ( infile = '../parameters/EQcorrscan_parameters.txt' ) :
"""Read the default parameters from file .
: type infile : str
: param infile : Full path to parameter file .
: returns : parameters read from file .
: rtype : : class : ` eqcorrscan . utils . parameters . EQcorrscanParameters `"""
|
try :
import ConfigParser
except ImportError :
import configparser as ConfigParser
import ast
f = open ( infile , 'r' )
print ( 'Reading parameters with the following header:' )
for line in f :
if line [ 0 ] == '#' :
print ( line . rstrip ( '\n' ) . lstrip ( '\n' ) )
f . close ( )
config = ConfigParser . ConfigParser ( )
config . read ( infile )
# Slightly tricky list reading
template_names = list ( ast . literal_eval ( config . get ( "eqcorrscan_pars" , "template_names" ) ) )
parameters = EQcorrscanParameters ( template_names = template_names , lowcut = config . get ( "eqcorrscan_pars" , "lowcut" ) , highcut = config . get ( "eqcorrscan_pars" , "highcut" ) , filt_order = config . get ( "eqcorrscan_pars" , "filt_order" ) , samp_rate = config . get ( "eqcorrscan_pars" , "samp_rate" ) , debug = config . get ( "eqcorrscan_pars" , "debug" ) , startdate = config . get ( "eqcorrscan_pars" , "startdate" ) , enddate = config . get ( "eqcorrscan_pars" , "enddate" ) , archive = config . get ( "eqcorrscan_pars" , "archive" ) , arc_type = config . get ( "eqcorrscan_pars" , "arc_type" ) , cores = config . get ( "eqcorrscan_pars" , "cores" ) , plotvar = config . getboolean ( "eqcorrscan_pars" , "plotvar" ) , plotdir = config . get ( "eqcorrscan_pars" , "plotdir" ) , plot_format = config . get ( "eqcorrscan_pars" , "plot_format" ) , tempdir = ast . literal_eval ( config . get ( "eqcorrscan_pars" , "tempdir" ) ) , threshold = config . get ( "eqcorrscan_pars" , "threshold" ) , threshold_type = config . get ( "eqcorrscan_pars" , "threshold_type" ) , trigger_interval = config . get ( "eqcorrscan_pars" , "trigger_interval" ) )
return parameters
|
def _intersect ( self , label , xmin , ymin , xmax , ymax ) :
"""Calculate intersect areas , normalized ."""
|
left = np . maximum ( label [ : , 0 ] , xmin )
right = np . minimum ( label [ : , 2 ] , xmax )
top = np . maximum ( label [ : , 1 ] , ymin )
bot = np . minimum ( label [ : , 3 ] , ymax )
invalid = np . where ( np . logical_or ( left >= right , top >= bot ) ) [ 0 ]
out = label . copy ( )
out [ : , 0 ] = left
out [ : , 1 ] = top
out [ : , 2 ] = right
out [ : , 3 ] = bot
out [ invalid , : ] = 0
return out
|
def request_videos ( blink , time = None , page = 0 ) :
"""Perform a request for videos .
: param blink : Blink instance .
: param time : Get videos since this time . In epoch seconds .
: param page : Page number to get videos from ."""
|
timestamp = get_time ( time )
url = "{}/api/v2/videos/changed?since={}&page={}" . format ( blink . urls . base_url , timestamp , page )
return http_get ( blink , url )
|
def log_in ( self , username = None , password = None , code = None , redirect_uri = "urn:ietf:wg:oauth:2.0:oob" , refresh_token = None , scopes = __DEFAULT_SCOPES , to_file = None ) :
"""Get the access token for a user .
The username is the e - mail used to log in into mastodon .
Can persist access token to file ` to _ file ` , to be used in the constructor .
Handles password and OAuth - based authorization .
Will throw a ` MastodonIllegalArgumentError ` if the OAuth or the
username / password credentials given are incorrect , and
` MastodonAPIError ` if all of the requested scopes were not granted .
For OAuth2 , obtain a code via having your user go to the url returned by
` auth _ request _ url ( ) ` _ and pass it as the code parameter . In this case ,
make sure to also pass the same redirect _ uri parameter as you used when
generating the auth request URL .
Returns the access token as a string ."""
|
if username is not None and password is not None :
params = self . __generate_params ( locals ( ) , [ 'scopes' , 'to_file' , 'code' , 'refresh_token' ] )
params [ 'grant_type' ] = 'password'
elif code is not None :
params = self . __generate_params ( locals ( ) , [ 'scopes' , 'to_file' , 'username' , 'password' , 'refresh_token' ] )
params [ 'grant_type' ] = 'authorization_code'
elif refresh_token is not None :
params = self . __generate_params ( locals ( ) , [ 'scopes' , 'to_file' , 'username' , 'password' , 'code' ] )
params [ 'grant_type' ] = 'refresh_token'
else :
raise MastodonIllegalArgumentError ( 'Invalid arguments given. username and password or code are required.' )
params [ 'client_id' ] = self . client_id
params [ 'client_secret' ] = self . client_secret
params [ 'scope' ] = " " . join ( scopes )
try :
response = self . __api_request ( 'POST' , '/oauth/token' , params , do_ratelimiting = False )
self . access_token = response [ 'access_token' ]
self . __set_refresh_token ( response . get ( 'refresh_token' ) )
self . __set_token_expired ( int ( response . get ( 'expires_in' , 0 ) ) )
except Exception as e :
if username is not None or password is not None :
raise MastodonIllegalArgumentError ( 'Invalid user name, password, or redirect_uris: %s' % e )
elif code is not None :
raise MastodonIllegalArgumentError ( 'Invalid access token or redirect_uris: %s' % e )
else :
raise MastodonIllegalArgumentError ( 'Invalid request: %s' % e )
received_scopes = response [ "scope" ] . split ( " " )
for scope_set in self . __SCOPE_SETS . keys ( ) :
if scope_set in received_scopes :
received_scopes += self . __SCOPE_SETS [ scope_set ]
if not set ( scopes ) <= set ( received_scopes ) :
raise MastodonAPIError ( 'Granted scopes "' + " " . join ( received_scopes ) + '" do not contain all of the requested scopes "' + " " . join ( scopes ) + '".' )
if to_file is not None :
with open ( to_file , 'w' ) as token_file :
token_file . write ( response [ 'access_token' ] + '\n' )
self . __logged_in_id = None
return response [ 'access_token' ]
|
def client_id ( self , client ) :
"""Get a client ' s ID . Uses GET to / clients ? name = < client > interface .
: Args :
* * client * : ( str ) Client ' s name
: Returns : ( str ) Client id"""
|
params = { "name" : client }
response = self . _get ( url . clients , params = params )
self . _check_response ( response , 200 )
return self . _create_response ( response ) . get ( "client_id" )
|
def defaults ( self ) :
"""Return the default values of this configuration ."""
|
return { k : v . default for k , v in self . options ( ) . items ( ) }
|
def auto_memoize ( func ) :
"""Based on django . util . functional . memoize . Automatically memoizes instace methods for the lifespan of an object .
Only works with methods taking non - keword arguments . Note that the args to the function must be usable as
dictionary keys . Also , the first argument MUST be self . This decorator will not work for functions or class
methods , only object methods ."""
|
@ wraps ( func )
def wrapper ( * args ) :
inst = args [ 0 ]
inst . _memoized_values = getattr ( inst , '_memoized_values' , { } )
key = ( func , args [ 1 : ] )
if key not in inst . _memoized_values :
inst . _memoized_values [ key ] = func ( * args )
return inst . _memoized_values [ key ]
return wrapper
|
def ising_energy ( sample , h , J , offset = 0.0 ) :
"""Calculate the energy for the specified sample of an Ising model .
Energy of a sample for a binary quadratic model is defined as a sum , offset
by the constant energy offset associated with the model , of
the sample multipled by the linear bias of the variable and
all its interactions . For an Ising model ,
. . math : :
E ( \ mathbf { s } ) = \ sum _ v h _ v s _ v + \ sum _ { u , v } J _ { u , v } s _ u s _ v + c
where : math : ` s _ v ` is the sample , : math : ` h _ v ` is the linear bias , : math : ` J _ { u , v } `
the quadratic bias ( interactions ) , and : math : ` c ` the energy offset .
Args :
sample ( dict [ variable , spin ] ) :
Sample for a binary quadratic model as a dict of form { v : spin , . . . } ,
where keys are variables of the model and values are spins ( either - 1 or 1 ) .
h ( dict [ variable , bias ] ) :
Linear biases as a dict of the form { v : bias , . . . } , where keys are variables of
the model and values are biases .
J ( dict [ ( variable , variable ) , bias ] ) :
Quadratic biases as a dict of the form { ( u , v ) : bias , . . . } , where keys
are 2 - tuples of variables of the model and values are quadratic biases
associated with the pair of variables ( the interaction ) .
offset ( numeric , optional , default = 0 ) :
Constant offset to be applied to the energy . Default 0.
Returns :
float : The induced energy .
Notes :
No input checking is performed .
Examples :
This example calculates the energy of a sample representing two down spins for
an Ising model of two variables that have positive biases of value 1 and
are positively coupled with an interaction of value 1.
> > > import dimod
> > > sample = { 1 : - 1 , 2 : - 1}
> > > h = { 1 : 1 , 2 : 1}
> > > J = { ( 1 , 2 ) : 1}
> > > dimod . ising _ energy ( sample , h , J , 0.5)
-0.5
References
` Ising model on Wikipedia < https : / / en . wikipedia . org / wiki / Ising _ model > ` _"""
|
# add the contribution from the linear biases
for v in h :
offset += h [ v ] * sample [ v ]
# add the contribution from the quadratic biases
for v0 , v1 in J :
offset += J [ ( v0 , v1 ) ] * sample [ v0 ] * sample [ v1 ]
return offset
|
def get_resources_to_check ( client_site_url , apikey ) :
"""Return a list of resource IDs to check for broken links .
Calls the client site ' s API to get a list of resource IDs .
: raises CouldNotGetResourceIDsError : if getting the resource IDs fails
for any reason"""
|
url = client_site_url + u"deadoralive/get_resources_to_check"
response = requests . get ( url , headers = dict ( Authorization = apikey ) )
if not response . ok :
raise CouldNotGetResourceIDsError ( u"Couldn't get resource IDs to check: {code} {reason}" . format ( code = response . status_code , reason = response . reason ) )
return response . json ( )
|
def append_id ( expr , id_col = 'append_id' ) :
"""Append an ID column to current column to form a new DataFrame .
: param str id _ col : name of appended ID field .
: return : DataFrame with ID field
: rtype : DataFrame"""
|
if hasattr ( expr , '_xflow_append_id' ) :
return expr . _xflow_append_id ( id_col )
else :
return _append_id ( expr , id_col )
|
def drawItem ( self , item , painter , option ) :
"""Draws the inputed item as a bar graph .
: param item | < XChartDatasetItem >
painter | < QPainter >
option | < QStyleOptionGraphicsItem >"""
|
dataset = item . dataset ( )
painter . save ( )
painter . setRenderHint ( painter . Antialiasing )
center = item . buildData ( 'center' )
radius = item . buildData ( 'radius' )
if int ( option . state ) & QStyle . State_MouseOver != 0 :
alpha = 20
mouse_over = True
else :
alpha = 0
mouse_over = False
for value , subpath in item . buildData ( 'subpaths' , [ ] ) :
clr = dataset . color ( value )
bg = clr . lighter ( 110 )
bg . setAlpha ( alpha + 100 )
painter . setBrush ( bg )
if mouse_over :
scale = 1.08
dx = ( center . x ( ) / scale ) - center . x ( )
dy = ( center . y ( ) / scale ) - center . y ( )
painter . save ( )
painter . scale ( scale , scale )
painter . translate ( dx , dy )
painter . setPen ( Qt . NoPen )
painter . drawPath ( subpath )
painter . restore ( )
pen = QPen ( clr )
pen . setWidth ( 0.5 )
painter . setPen ( pen )
painter . drawPath ( subpath )
painter . restore ( )
|
def to_ISO8601 ( timeobject ) :
"""Returns the ISO8601 - formatted string corresponding to the time value
conveyed by the specified object , which can be either a UNIXtime , a
` ` datetime . datetime ` ` object or an ISO8601 - formatted string in the format
` YYYY - MM - DD HH : MM : SS + 00 ` ` .
: param timeobject : the object conveying the time value
: type timeobject : int , ` ` datetime . datetime ` ` or ISO8601 - formatted
string
: returns : an ISO8601 - formatted string with pattern
` YYYY - MM - DD HH : MM : SS + 00 ` `
: raises : * TypeError * when bad argument types are provided , * ValueError *
when negative UNIXtimes are provided"""
|
if isinstance ( timeobject , int ) :
if timeobject < 0 :
raise ValueError ( "The time value is a negative number" )
return datetime . utcfromtimestamp ( timeobject ) . strftime ( '%Y-%m-%d %H:%M:%S+00' )
elif isinstance ( timeobject , datetime ) :
return timeobject . strftime ( '%Y-%m-%d %H:%M:%S+00' )
elif isinstance ( timeobject , str ) :
return timeobject
else :
raise TypeError ( 'The time value must be expressed either by an int ' 'UNIX time, a datetime.datetime object or an ' 'ISO8601-formatted string' )
|
def get ( cls , * args , ** kwargs ) :
"""Retrieve one instance from db according to given kwargs .
Optionnaly , one arg could be used to retrieve it from pk ."""
|
if len ( args ) == 1 : # Guess it ' s a pk
pk = args [ 0 ]
elif kwargs : # special case to check for a simple pk
if len ( kwargs ) == 1 and cls . _field_is_pk ( list ( kwargs . keys ( ) ) [ 0 ] ) :
pk = list ( kwargs . values ( ) ) [ 0 ]
else : # case with many filters
result = cls . collection ( ** kwargs ) . sort ( by = 'nosort' )
if len ( result ) == 0 :
raise DoesNotExist ( u"No object matching filter: %s" % kwargs )
elif len ( result ) > 1 :
raise ValueError ( u"More than one object matching filter: %s" % kwargs )
else :
try :
pk = result [ 0 ]
except IndexError : # object was deleted between the ` len ` check and now
raise DoesNotExist ( u"No object matching filter: %s" % kwargs )
else :
raise ValueError ( "Invalid `get` usage with args %s and kwargs %s" % ( args , kwargs ) )
return cls ( pk )
|
def _element_charfix ( self , element , charcount ) :
"""Updates the start and end attributes by charcount for the element ."""
|
element . start += charcount
element . docstart += charcount
element . end += charcount
element . docend += charcount
|
def experiments_fmri_get ( self , experiment_id ) :
"""Get fMRI data object that is associated with the given experiment .
Parameters
experiment _ id : string
unique experiment identifier
Returns
FMRIDataHandle
Handle for fMRI data object of None if ( a ) the experiment is unknown
or ( b ) has no fMRI data object associated with it ."""
|
# Get experiment to ensure that it exists
experiment = self . experiments_get ( experiment_id )
if experiment is None :
return None
# Check if experiment has fMRI data
if experiment . fmri_data_id is None :
return None
# Get functional data object handle from database .
func_data = self . funcdata . get_object ( experiment . fmri_data_id )
# Create fMRI handle from functional data handle
return funcdata . FMRIDataHandle ( func_data , experiment_id )
|
def option_from_wire ( otype , wire , current , olen ) :
"""Build an EDNS option object from wire format
@ param otype : The option type
@ type otype : int
@ param wire : The wire - format message
@ type wire : string
@ param current : The offet in wire of the beginning of the rdata .
@ type current : int
@ param olen : The length of the wire - format option data
@ type olen : int
@ rtype : dns . ends . Option instance"""
|
cls = get_option_class ( otype )
return cls . from_wire ( otype , wire , current , olen )
|
def from_string ( cls , resource_name ) :
"""Parse a resource name and return a ResourceName
: type resource _ name : str
: rtype : ResourceName
: raises InvalidResourceName : if the resource name is invalid ."""
|
# TODO Remote VISA
uname = resource_name . upper ( )
for interface_type in _INTERFACE_TYPES : # Loop through all known interface types until we found one
# that matches the beginning of the resource name
if not uname . startswith ( interface_type ) :
continue
if len ( resource_name ) == len ( interface_type ) :
parts = ( )
else :
parts = resource_name [ len ( interface_type ) : ] . split ( '::' )
# Try to match the last part of the resource name to
# one of the known resource classes for the given interface type .
# If not possible , use the default resource class
# for the given interface type .
if parts and parts [ - 1 ] in _RESOURCE_CLASSES [ interface_type ] :
parts , resource_class = parts [ : - 1 ] , parts [ - 1 ]
else :
try :
resource_class = _DEFAULT_RC [ interface_type ]
except KeyError :
raise InvalidResourceName . rc_notfound ( interface_type , resource_name )
# Look for the subclass
try :
subclass = _SUBCLASSES [ ( interface_type , resource_class ) ]
except KeyError :
raise InvalidResourceName . subclass_notfound ( ( interface_type , resource_class ) , resource_name )
# And create the object
try :
rn = subclass . from_parts ( * parts )
rn . user = resource_name
return rn
except ValueError as ex :
raise InvalidResourceName . bad_syntax ( subclass . _visa_syntax , resource_name , ex )
raise InvalidResourceName ( 'Could not parse %s: unknown interface type' % resource_name )
|
def _post_stats ( self , stats ) :
'''Fire events with stat info if it ' s time'''
|
end_time = time . time ( )
if end_time - self . stat_clock > self . opts [ 'master_stats_event_iter' ] : # Fire the event with the stats and wipe the tracker
self . aes_funcs . event . fire_event ( { 'time' : end_time - self . stat_clock , 'worker' : self . name , 'stats' : stats } , tagify ( self . name , 'stats' ) )
self . stats = collections . defaultdict ( lambda : { 'mean' : 0 , 'latency' : 0 , 'runs' : 0 } )
self . stat_clock = end_time
|
def request_analysis ( self ) :
"""Requests an analysis ."""
|
if self . _finished :
_logger ( self . __class__ ) . log ( 5 , 'running analysis' )
self . _job_runner . request_job ( self . _request )
elif self . editor : # retry later
_logger ( self . __class__ ) . log ( 5 , 'delaying analysis (previous analysis not finished)' )
QtCore . QTimer . singleShot ( 500 , self . request_analysis )
|
def calculateValues ( self ) :
"""Overloads the calculate values method to calculate the values for
this axis based on the minimum and maximum values , and the number
of steps desired .
: return [ < variant > , . . ]"""
|
chart = self . chart ( )
if not chart :
return super ( XDatasetAxis , self ) . calculateValues ( )
else :
values = [ ]
for dataset in chart . datasets ( ) :
values . append ( dataset . name ( ) )
values . sort ( )
return values
|
def _update_application_request ( app_metadata , application_id ) :
"""Construct the request body to update application .
: param app _ metadata : Object containing app metadata
: type app _ metadata : ApplicationMetadata
: param application _ id : The Amazon Resource Name ( ARN ) of the application
: type application _ id : str
: return : SAR UpdateApplication request body
: rtype : dict"""
|
request = { 'ApplicationId' : application_id , 'Author' : app_metadata . author , 'Description' : app_metadata . description , 'HomePageUrl' : app_metadata . home_page_url , 'Labels' : app_metadata . labels , 'ReadmeUrl' : app_metadata . readme_url }
return { k : v for k , v in request . items ( ) if v }
|
def replicate_per_farm_dbs ( cloud_url = None , local_url = None , farm_name = None ) :
"""Sete up replication of the per - farm databases from the local server to the
cloud server .
: param str cloud _ url : Used to override the cloud url from the global
configuration in case the calling function is in the process of
initializing the cloud server
: param str local _ url : Used to override the local url from the global
configuration in case the calling function is in the process of
initializing the local server
: param str farm _ name : Used to override the farm name from the global
configuratino in case the calling function is in the process of
initializing the farm"""
|
cloud_url = cloud_url or config [ "cloud_server" ] [ "url" ]
local_url = local_url or config [ "local_server" ] [ "url" ]
farm_name = farm_name or config [ "cloud_server" ] [ "farm_name" ]
username = config [ "cloud_server" ] [ "username" ]
password = config [ "cloud_server" ] [ "password" ]
# Add credentials to the cloud url
parsed_cloud_url = urlparse ( cloud_url )
if not parsed_cloud_url . username :
new_netloc = "{}:{}@{}" . format ( username , password , parsed_cloud_url . netloc )
cloud_url = ParseResult ( parsed_cloud_url . scheme , new_netloc , parsed_cloud_url . path , parsed_cloud_url . params , parsed_cloud_url . query , parsed_cloud_url . fragment ) . geturl ( )
server = Server ( local_url )
for db_name in per_farm_dbs :
remote_db_name = "{}/{}/{}" . format ( username , farm_name , db_name )
server . replicate ( db_name , db_name , urljoin ( cloud_url , remote_db_name ) , continuous = True )
|
def do_set_hub_connection ( self , args ) :
"""Set Hub connection parameters .
Usage :
set _ hub _ connection username password host [ port ]
Arguments :
username : Hub username
password : Hub password
host : host name or IP address
port : IP port [ default 25105]"""
|
params = args . split ( )
username = None
password = None
host = None
port = None
try :
username = params [ 0 ]
password = params [ 1 ]
host = params [ 2 ]
port = params [ 3 ]
except IndexError :
pass
if username and password and host :
if not port :
port = 25105
self . tools . username = username
self . tools . password = password
self . tools . host = host
self . tools . port = port
else :
_LOGGING . error ( 'username password host are required' )
self . do_help ( 'set_hub_connection' )
|
def initialize_pymol ( options ) :
"""Initializes PyMOL"""
|
import pymol
# Pass standard arguments of function to prevent PyMOL from printing out PDB headers ( workaround )
pymol . finish_launching ( args = [ 'pymol' , options , '-K' ] )
pymol . cmd . reinitialize ( )
|
def addPlot ( self , xdata , ydata , xlabel = None , ylabel = None , title = None , xunits = None , yunits = None ) :
"""Adds a new plot for the given set of data and / or labels , Generates a SimplePlotWidget
: param xdata : index values for data , plotted on x - axis
: type xdata : numpy . ndarray
: param ydata : value data to plot , dimension must match xdata
: type ydata : numpy . ndarray"""
|
p = SimplePlotWidget ( xdata , ydata )
p . setLabels ( xlabel , ylabel , title , xunits , yunits )
# self . plots . append ( p )
self . stacker . addWidget ( p )
|
def convert ( self ) :
"""Convert file in mp3 format ."""
|
if self . downloaded is False :
raise serror ( "Track not downloaded, can't convert file.." )
filetype = magic . from_file ( self . filepath , mime = True )
if filetype == "audio/mpeg" :
print ( "File is already in mp3 format. Skipping convert." )
return False
rootpath = os . path . dirname ( os . path . dirname ( self . filepath ) )
backupdir = rootpath + "/backups/" + self . get ( "username" )
if not os . path . exists ( backupdir ) :
os . makedirs ( backupdir )
backupfile = "%s/%s%s" % ( backupdir , self . gen_filename ( ) , self . get_file_extension ( self . filepath ) )
newfile = "%s.mp3" % self . filename_without_extension ( )
os . rename ( self . filepath , backupfile )
self . filepath = newfile
print ( "Converting to %s.." % newfile )
song = AudioSegment . from_file ( backupfile )
return song . export ( newfile , format = "mp3" )
|
def get_lldp_neighbor_detail_output_lldp_neighbor_detail_remaining_life ( self , ** kwargs ) :
"""Auto Generated Code"""
|
config = ET . Element ( "config" )
get_lldp_neighbor_detail = ET . Element ( "get_lldp_neighbor_detail" )
config = get_lldp_neighbor_detail
output = ET . SubElement ( get_lldp_neighbor_detail , "output" )
lldp_neighbor_detail = ET . SubElement ( output , "lldp-neighbor-detail" )
local_interface_name_key = ET . SubElement ( lldp_neighbor_detail , "local-interface-name" )
local_interface_name_key . text = kwargs . pop ( 'local_interface_name' )
remote_interface_name_key = ET . SubElement ( lldp_neighbor_detail , "remote-interface-name" )
remote_interface_name_key . text = kwargs . pop ( 'remote_interface_name' )
remaining_life = ET . SubElement ( lldp_neighbor_detail , "remaining-life" )
remaining_life . text = kwargs . pop ( 'remaining_life' )
callback = kwargs . pop ( 'callback' , self . _callback )
return callback ( config )
|
def GaussianCdfInverse ( p , mu = 0 , sigma = 1 ) :
"""Evaluates the inverse CDF of the gaussian distribution .
See http : / / en . wikipedia . org / wiki / Normal _ distribution # Quantile _ function
Args :
p : float
mu : mean parameter
sigma : standard deviation parameter
Returns :
float"""
|
x = ROOT2 * erfinv ( 2 * p - 1 )
return mu + x * sigma
|
def clone ( self , ** kwargs ) :
"""Clone a part .
. . versionadded : : 2.3
: param kwargs : ( optional ) additional keyword = value arguments
: type kwargs : dict
: return : cloned : class : ` models . Part `
: raises APIError : if the ` Part ` could not be cloned
Example
> > > bike = client . model ( ' Bike ' )
> > > bike2 = bike . clone ( )"""
|
parent = self . parent ( )
return self . _client . _create_clone ( parent , self , ** kwargs )
|
def exponential_backoff ( fn , sleeptime_s_max = 30 * 60 ) :
"""Calls ` fn ` until it returns True , with an exponentially increasing wait time between calls"""
|
sleeptime_ms = 500
while True :
if fn ( ) :
return True
else :
print ( 'Sleeping {} ms' . format ( sleeptime_ms ) )
time . sleep ( sleeptime_ms / 1000.0 )
sleeptime_ms *= 2
if sleeptime_ms / 1000.0 > sleeptime_s_max :
return False
|
def umode ( self , nick , modes = '' ) :
"""Sets / gets user modes .
Required arguments :
* nick - Nick to set / get user modes for .
Optional arguments :
* modes = ' ' - Sets these user modes on a nick ."""
|
with self . lock :
if not modes :
self . send ( 'MODE %s' % nick )
if self . readable ( ) :
msg = self . _recv ( expected_replies = ( '221' , ) )
if msg [ 0 ] == '221' :
modes = msg [ 2 ] . replace ( '+' , '' ) . replace ( ':' , '' , 1 )
return modes
self . send ( 'MODE %s %s' % ( nick , modes ) )
if self . readable ( ) :
msg = self . _recv ( expected_replies = ( 'MODE' , ) )
if msg [ 0 ] == 'MODE' :
if not self . hide_called_events :
self . stepback ( )
return msg [ 2 ] . replace ( ':' , '' , 1 )
|
def _get_interpretation_function ( interpretation , dtype ) :
"""Retrieves the interpretation function used ."""
|
type_string = dtype . __name__
name = "%s__%s" % ( interpretation , type_string )
global _interpretations
if not hasattr ( _interpretations , name ) :
raise ValueError ( "No transform available for type '%s' with interpretation '%s'." % ( type_string , interpretation ) )
return getattr ( _interpretations , name )
|
def write ( parsed_obj , spec = None , filename = None ) :
"""Writes an object created by ` parse ` to either a file or a bytearray .
If the object doesn ' t end on a byte boundary , zeroes are appended to it
until it does ."""
|
if not isinstance ( parsed_obj , BreadStruct ) :
raise ValueError ( 'Object to write must be a structure created ' 'by bread.parse' )
if filename is not None :
with open ( filename , 'wb' ) as fp :
parsed_obj . _data_bits [ : parsed_obj . _length ] . tofile ( fp )
else :
return bytearray ( parsed_obj . _data_bits [ : parsed_obj . _length ] . tobytes ( ) )
|
def paragraph ( content ) :
"""emit a paragraph , stripping out any leading or following empty paragraphs"""
|
# if the content contains a top - level div then don ' t wrap it in a < p >
# tag
if content . startswith ( '<div' ) and content . endswith ( '</div>' ) :
return '\n' + content + '\n'
text = '<p>' + content + '</p>'
text = re . sub ( r'<p>\s*</p>' , r'' , text )
return text or ' '
|
def get_download_url ( self , instance , default = None ) :
"""Calculate the download url"""
|
download = default
# calculate the download url
download = "{url}/at_download/{fieldname}" . format ( url = instance . absolute_url ( ) , fieldname = self . get_field_name ( ) )
return download
|
def _recreate_list_with_indices ( indices1 , values1 , indices2 , values2 ) :
"""Create a list in the right order .
: param list indices1 : contains the list of indices corresponding to
the values in values1.
: param list values1 : contains the first list of values .
: param list indices2 : contains the list of indices corresponding to
the values in values2.
: param list values2 : contains the second list of values .
: return : list of the values in the correct order .
: rtype : list ."""
|
# Test if indices are continuous
list_indices = sorted ( indices1 + indices2 )
for i , index in enumerate ( list_indices ) :
if i != index :
raise CraftAiInternalError ( "The agents's indices are not continuous" )
full_list = [ None ] * ( len ( indices1 ) + len ( indices2 ) )
for i , index in enumerate ( indices1 ) :
full_list [ index ] = values1 [ i ]
for i , index in enumerate ( indices2 ) :
full_list [ index ] = values2 [ i ]
return full_list
|
def gauss_jordan ( A , x , b ) :
"""Linear equation system Ax = b by Gauss - Jordan
: param A : n by m matrix
: param x : table of size n
: param b : table of size m
: modifies : x will contain solution if any
: returns int :
0 if no solution ,
1 if solution unique ,
2 otherwise
: complexity : : math : ` O ( n ^ 2m ) `"""
|
n = len ( x )
m = len ( b )
assert len ( A ) == m and len ( A [ 0 ] ) == n
S = [ ]
# put linear system in a single matrix S
for i in range ( m ) :
S . append ( A [ i ] [ : ] + [ b [ i ] ] )
S . append ( list ( range ( n ) ) )
# indices in x
k = diagonalize ( S , n , m )
if k < m :
for i in range ( k , m ) :
if not is_zero ( S [ i ] [ n ] ) :
return GJ_ZERO_SOLUTIONS
for j in range ( k ) :
x [ S [ m ] [ j ] ] = S [ j ] [ n ]
if k < n :
for j in range ( k , n ) :
x [ S [ m ] [ j ] ] = 0
return GJ_SEVERAL_SOLUTIONS
return GJ_SINGLE_SOLUTION
|
def chglog ( amend : bool = False , stage : bool = False , next_version : str = None , auto_next_version : bool = False ) :
"""Writes the changelog
Args :
amend : amend last commit with changes
stage : stage changes
next _ version : indicates next version
auto _ next _ version : infer next version from VCS"""
|
changed_files = CTX . repo . changed_files ( )
changelog_file_path : Path = config . CHANGELOG_FILE_PATH ( )
changelog_file_name = changelog_file_path . name
if changelog_file_name in changed_files :
LOGGER . error ( 'changelog has changed; cannot update it' )
exit ( - 1 )
_chglog ( amend , stage , next_version , auto_next_version )
|
def load ( self , callables_fname ) :
r"""Load traced modules information from a ` JSON < http : / / www . json . org / > ` _ file .
The loaded module information is merged with any existing module information
: param callables _ fname : File name
: type callables _ fname : : ref : ` FileNameExists `
: raises :
* OSError ( File * [ fname ] * could not be found )
* RuntimeError ( Argument \ \ ` callables _ fname \ \ ` is not valid )"""
|
# Validate file name
_validate_fname ( callables_fname )
if not os . path . exists ( callables_fname ) :
raise OSError ( "File {0} could not be found" . format ( callables_fname ) )
with open ( callables_fname , "r" ) as fobj :
fdict = json . load ( fobj )
if sys . hexversion < 0x03000000 : # pragma : no cover
fdict = _unicode_to_ascii ( fdict )
self . _callables_db . update ( fdict [ "_callables_db" ] )
# Reverse the tuple - to - string conversion that the save method
# does due to the fact that JSON keys need to be strings and the
# keys of the reverse callable dictionary are tuples where the first
# item is a file name and the second item is the starting line of the
# callable within that file ( dictionary value )
rdict = { }
for key , value in fdict [ "_reverse_callables_db" ] . items ( ) :
tokens = key [ 1 : - 1 ] . split ( "," )
key = tokens [ 0 ] . strip ( ) [ 1 : - 1 ]
if platform . system ( ) . lower ( ) == "windows" : # pragma : no cover
while True :
tmp = key
key = key . replace ( "\\\\" , "\\" )
if tmp == key :
break
rdict [ ( key , int ( tokens [ 1 ] ) ) ] = value
self . _reverse_callables_db . update ( rdict )
self . _modules_dict . update ( fdict [ "_modules_dict" ] )
self . _fnames . update ( fdict [ "_fnames" ] )
self . _module_names . extend ( fdict [ "_module_names" ] )
self . _class_names . extend ( fdict [ "_class_names" ] )
self . _module_names = sorted ( list ( set ( self . _module_names ) ) )
self . _class_names = sorted ( list ( set ( self . _class_names ) ) )
|
def gen_res ( args , resource , inflow , outflow ) :
"""Returns a line of text to add to an environment file , initializing a
standard resource with the specified name ( string ) , inflow ( int ) , and
outflow ( int )"""
|
return "" . join ( [ "RESOURCE " , resource , ":inflow=" , str ( inflow ) , ":outflow=" , str ( outflow ) , "\n" ] )
|
def listReleaseVersions ( self , release_version = "" , dataset = '' , logical_file_name = '' ) :
"""List release versions"""
|
if dataset and ( '%' in dataset or '*' in dataset ) :
dbsExceptionHandler ( 'dbsException-invalid-input' , " DBSReleaseVersion/listReleaseVersions. No wildcards are" + " allowed in dataset.\n." )
if logical_file_name and ( '%' in logical_file_name or '*' in logical_file_name ) :
dbsExceptionHandler ( 'dbsException-invalid-input' , " DBSReleaseVersion/listReleaseVersions. No wildcards are" + " allowed in logical_file_name.\n." )
conn = self . dbi . connection ( )
try :
plist = self . releaseVersion . execute ( conn , release_version . upper ( ) , dataset , logical_file_name )
result = [ { } ]
if plist :
t = [ ]
for i in plist :
for k , v in i . iteritems ( ) :
t . append ( v )
result [ 0 ] [ 'release_version' ] = t
return result
finally :
if conn :
conn . close ( )
|
def copy ( self , name = None , prefix = None ) :
"""A copy of this : class : ` Config ` container .
If ` ` prefix ` ` is given , it prefixes all non
: ref : ` global settings < setting - section - global - server - settings > `
with it . Used when multiple applications are loaded ."""
|
cls = self . __class__
me = cls . __new__ ( cls )
me . __dict__ . update ( self . __dict__ )
if prefix :
me . prefix = prefix
settings = me . settings
me . settings = { }
for setting in settings . values ( ) :
setting = setting . copy ( name , prefix )
me . settings [ setting . name ] = setting
me . params = me . params . copy ( )
return me
|
def set_attr ( self ) :
"""set the data for this column"""
|
setattr ( self . attrs , self . kind_attr , self . values )
setattr ( self . attrs , self . meta_attr , self . meta )
if self . dtype is not None :
setattr ( self . attrs , self . dtype_attr , self . dtype )
|
def _Open ( self , path_spec = None , mode = 'rb' ) :
"""Opens the file - like object defined by path specification .
Args :
path _ spec ( PathSpec ) : path specification .
mode ( Optional [ str ] ) : file access mode .
Raises :
AccessError : if the access to open the file was denied .
IOError : if the file - like object could not be opened .
NotSupported : if a data stream , like the resource or named fork , is
requested to be opened .
OSError : if the file - like object could not be opened .
PathSpecError : if the path specification is incorrect .
ValueError : if the path specification is invalid ."""
|
if not path_spec :
raise ValueError ( 'Missing path specification.' )
data_stream = getattr ( path_spec , 'data_stream' , None )
if data_stream :
raise errors . NotSupported ( 'Open data stream: {0:s} not supported.' . format ( data_stream ) )
self . _file_system = resolver . Resolver . OpenFileSystem ( path_spec , resolver_context = self . _resolver_context )
file_entry = self . _file_system . GetFileEntryByPathSpec ( path_spec )
if not file_entry :
raise IOError ( 'Unable to open file entry.' )
fsapfs_file_entry = file_entry . GetAPFSFileEntry ( )
if not fsapfs_file_entry :
raise IOError ( 'Unable to open APFS file entry.' )
self . _fsapfs_file_entry = fsapfs_file_entry
|
def request_headers ( self ) :
'''Fill request headers from the environ dictionary and
modify them via the list of : attr : ` headers _ middleware ` .
The returned headers will be sent to the target uri .'''
|
headers = CIMultiDict ( )
for k in self . environ :
if k . startswith ( 'HTTP_' ) :
head = k [ 5 : ] . replace ( '_' , '-' )
headers [ head ] = self . environ [ k ]
for head in ENVIRON_HEADERS :
k = head . replace ( '-' , '_' ) . upper ( )
v = self . environ . get ( k )
if v :
headers [ head ] = v
for middleware in self . wsgi . headers_middleware :
middleware ( self . environ , headers )
return headers
|
def upper_band ( close_data , high_data , low_data , period ) :
"""Upper Band .
Formula :
UB = CB + BW"""
|
cb = center_band ( close_data , high_data , low_data , period )
bw = band_width ( high_data , low_data , period )
ub = cb + bw
return ub
|
def write_type_dumps ( self , operations , preserve_order , output_dir ) :
"""Splits the list of SQL operations by type and dumps these to separate files"""
|
by_type = { SqlType . INDEX : [ ] , SqlType . FUNCTION : [ ] , SqlType . TRIGGER : [ ] }
for operation in operations :
by_type [ operation . sql_type ] . append ( operation )
# optionally sort each operation list by the object name
if not preserve_order :
for obj_type , ops in by_type . items ( ) :
by_type [ obj_type ] = sorted ( ops , key = lambda o : o . obj_name )
if by_type [ SqlType . INDEX ] :
self . write_dump ( 'indexes' , by_type [ SqlType . INDEX ] , output_dir )
if by_type [ SqlType . FUNCTION ] :
self . write_dump ( 'functions' , by_type [ SqlType . FUNCTION ] , output_dir )
if by_type [ SqlType . TRIGGER ] :
self . write_dump ( 'triggers' , by_type [ SqlType . TRIGGER ] , output_dir )
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.