signature
stringlengths 29
44.1k
| implementation
stringlengths 0
85.2k
|
|---|---|
def get_file_list ( path , max_depth = 1 , cur_depth = 0 ) :
"""Recursively returns a list of all files up to ` ` max _ depth ` `
in a directory ."""
|
if os . path . exists ( path ) :
for name in os . listdir ( path ) :
if name . startswith ( '.' ) :
continue
full_path = os . path . join ( path , name )
if os . path . isdir ( full_path ) :
if cur_depth == max_depth :
continue
file_list = get_file_list ( full_path , max_depth , cur_depth + 1 )
for result in file_list :
yield result
else :
yield full_path
|
def get_global_shelf_fpath ( appname = 'default' , ensure = False ) :
"""Returns the filepath to the global shelf"""
|
global_cache_dir = get_global_cache_dir ( appname , ensure = ensure )
shelf_fpath = join ( global_cache_dir , meta_util_constants . global_cache_fname )
return shelf_fpath
|
def save_config ( self , filepath ) :
"""saves gui configuration to out _ file _ name
Args :
filepath : name of file"""
|
def get_hidden_parameter ( item ) :
num_sub_elements = item . childCount ( )
if num_sub_elements == 0 :
dictator = { item . name : item . visible }
else :
dictator = { item . name : { } }
for child_id in range ( num_sub_elements ) :
dictator [ item . name ] . update ( get_hidden_parameter ( item . child ( child_id ) ) )
return dictator
try :
filepath = str ( filepath )
if not os . path . exists ( os . path . dirname ( filepath ) ) :
os . makedirs ( os . path . dirname ( filepath ) )
# build a dictionary for the configuration of the hidden parameters
dictator = { }
for index in range ( self . tree_scripts . topLevelItemCount ( ) ) :
script_item = self . tree_scripts . topLevelItem ( index )
dictator . update ( get_hidden_parameter ( script_item ) )
dictator = { "gui_settings" : self . gui_settings , "gui_settings_hidden" : self . gui_settings_hidden , "scripts_hidden_parameters" : dictator }
# update the internal dictionaries from the trees in the gui
for index in range ( self . tree_scripts . topLevelItemCount ( ) ) :
script_item = self . tree_scripts . topLevelItem ( index )
self . update_script_from_item ( script_item )
dictator . update ( { 'instruments' : { } , 'scripts' : { } , 'probes' : { } } )
for instrument in self . instruments . values ( ) :
dictator [ 'instruments' ] . update ( instrument . to_dict ( ) )
for script in self . scripts . values ( ) :
dictator [ 'scripts' ] . update ( script . to_dict ( ) )
for instrument , probe_dict in self . probes . items ( ) :
dictator [ 'probes' ] . update ( { instrument : ',' . join ( list ( probe_dict . keys ( ) ) ) } )
with open ( filepath , 'w' ) as outfile :
json . dump ( dictator , outfile , indent = 4 )
save_config_path = os . path . abspath ( os . path . join ( os . path . dirname ( __file__ ) , os . pardir , 'save_config.json' ) )
if os . path . isfile ( save_config_path ) and os . access ( save_config_path , os . R_OK ) :
with open ( save_config_path , 'w' ) as outfile :
json . dump ( { 'last_save_path' : filepath } , outfile , indent = 4 )
else :
with io . open ( save_config_path , 'w' ) as save_config_file :
save_config_file . write ( json . dumps ( { 'last_save_path' : filepath } ) )
self . log ( 'Saved GUI configuration (location: {0}' . format ( filepath ) )
except Exception :
msg = QtWidgets . QMessageBox ( )
msg . setText ( "Saving failed. Please use 'save as' to define a valid path for the gui." )
msg . exec_ ( )
|
def set_active_scalar ( self , name , preference = 'cell' ) :
"""Finds the scalar by name and appropriately sets it as active"""
|
_ , field = get_scalar ( self , name , preference = preference , info = True )
if field == POINT_DATA_FIELD :
self . GetPointData ( ) . SetActiveScalars ( name )
elif field == CELL_DATA_FIELD :
self . GetCellData ( ) . SetActiveScalars ( name )
else :
raise RuntimeError ( 'Data field ({}) not useable' . format ( field ) )
self . _active_scalar_info = [ field , name ]
|
def eventsource_connect ( url , io_loop = None , callback = None , connect_timeout = None ) :
"""Client - side eventsource support .
Takes a url and returns a Future whose result is a
` EventSourceClient ` ."""
|
if io_loop is None :
io_loop = IOLoop . current ( )
if isinstance ( url , httpclient . HTTPRequest ) :
assert connect_timeout is None
request = url
# Copy and convert the headers dict / object ( see comments in
# AsyncHTTPClient . fetch )
request . headers = httputil . HTTPHeaders ( request . headers )
else :
request = httpclient . HTTPRequest ( url , connect_timeout = connect_timeout , headers = httputil . HTTPHeaders ( { "Accept-Encoding" : "identity" } ) )
request = httpclient . _RequestProxy ( request , httpclient . HTTPRequest . _DEFAULTS )
conn = EventSourceClient ( io_loop , request )
if callback is not None :
io_loop . add_future ( conn . connect_future , callback )
return conn . connect_future
|
def getClassInPackageFromName ( className , pkg ) :
"""get a class from name within a package"""
|
# TODO : more efficiency !
n = getAvClassNamesInPackage ( pkg )
i = n . index ( className )
c = getAvailableClassesInPackage ( pkg )
return c [ i ]
|
def _get_validation ( self , method , param ) :
"""Return the correct validations dictionary for this parameter .
First checks the method itself and then its class . If no validation is
defined for this parameter , None is returned .
: param method : A function to get the validation information from .
: type method : Python function
: param param : Name of the parameter to get validation information for .
: type param : str"""
|
if hasattr ( method , '_validations' ) and param in method . _validations :
return method . _validations [ param ]
elif ( hasattr ( method . im_class , '_validations' ) and param in method . im_class . _validations ) :
return method . im_class . _validations [ param ]
else :
return None
|
def csnr ( freqs , hc , hn , fmrg , fpeak , prefactor = 1.0 ) :
"""Calculate the SNR of a frequency domain waveform .
SNRCalculation is a function that takes waveforms ( frequencies and hcs )
and a noise curve , and returns SNRs for all binary phases and the whole waveform .
Arguments :
freqs ( 1D or 2D array of floats ) : Frequencies corresponding to the waveforms .
Shape is ( num binaries , num _ points ) if 2D .
Shape is ( num _ points , ) if 1D for one binary .
hc ( 1D or 2D array of floats ) : Characteristic strain of the waveforms .
Shape is ( num binaries , num _ points ) if 2D .
Shape is ( num _ points , ) if 1D for one binary .
fmrg : ( scalar float or 1D array of floats ) : Merger frequency of each binary separating
inspiral from merger phase . ( 0.014 / M ) Shape is ( num binaries , )
if more than one binary .
fpeak : ( scalar float or 1D array of floats ) : Peak frequency of each binary separating
merger from ringdown phase . ( 0.014 / M ) Shape is ( num binaries , )
if more than one binary .
hn : ( 1D or 2D array of floats ) : Characteristic strain of the noise .
Shape is ( num binaries , num _ points ) if 2D .
Shape is ( num _ points , ) if 1D for one binary .
prefactor ( float , optional ) : Factor to multiply snr ( not snr ^ 2 ) integral values by .
Default is 1.0.
Returns :
( dict ) : Dictionary with SNRs from each phase ."""
|
cfd = os . path . dirname ( os . path . abspath ( __file__ ) )
if 'phenomd.cpython-35m-darwin.so' in os . listdir ( cfd ) :
exec_call = cfd + '/phenomd.cpython-35m-darwin.so'
else :
exec_call = cfd + '/phenomd/phenomd.so'
c_obj = ctypes . CDLL ( exec_call )
# check dimensionality
remove_axis = False
try :
len ( fmrg )
except TypeError :
remove_axis = True
freqs , hc = np . array ( [ freqs ] ) , np . array ( [ hc ] )
hn , fmrg , fpeak = np . array ( [ hn ] ) , np . array ( [ fmrg ] ) , np . array ( [ fpeak ] )
# this implimentation in ctypes works with 1D arrays
freqs_in = freqs . flatten ( )
hc_in = hc . flatten ( )
hn_in = hn . flatten ( )
num_binaries , length_of_signal = hc . shape
# prepare outout arrays
snr_cast = ctypes . c_double * num_binaries
snr_all = snr_cast ( )
snr_ins = snr_cast ( )
snr_mrg = snr_cast ( )
snr_rd = snr_cast ( )
# find SNR values
c_obj . SNR_function ( ctypes . byref ( snr_all ) , ctypes . byref ( snr_ins ) , ctypes . byref ( snr_mrg ) , ctypes . byref ( snr_rd ) , freqs_in . ctypes . data_as ( ctypes . POINTER ( ctypes . c_double ) ) , hc_in . ctypes . data_as ( ctypes . POINTER ( ctypes . c_double ) ) , hn_in . ctypes . data_as ( ctypes . POINTER ( ctypes . c_double ) ) , fmrg . ctypes . data_as ( ctypes . POINTER ( ctypes . c_double ) ) , fpeak . ctypes . data_as ( ctypes . POINTER ( ctypes . c_double ) ) , ctypes . c_int ( length_of_signal ) , ctypes . c_int ( num_binaries ) )
# make into numpy arrays
snr_all , snr_ins , = np . ctypeslib . as_array ( snr_all ) , np . ctypeslib . as_array ( snr_ins )
snr_mrg , snr_rd = np . ctypeslib . as_array ( snr_mrg ) , np . ctypeslib . as_array ( snr_rd )
# remove axis if one binary
if remove_axis :
snr_all , snr_ins , snr_mrg , snr_rd = snr_all [ 0 ] , snr_ins [ 0 ] , snr_mrg [ 0 ] , snr_rd [ 0 ]
# prepare output by multiplying by prefactor
return ( { 'all' : snr_all * prefactor , 'ins' : snr_ins * prefactor , 'mrg' : snr_mrg * prefactor , 'rd' : snr_rd * prefactor } )
|
def assign_coords ( self , ** kwargs ) :
"""Assign new coordinates to this object .
Returns a new object with all the original data in addition to the new
coordinates .
Parameters
kwargs : keyword , value pairs
keywords are the variables names . If the values are callable , they
are computed on this object and assigned to new coordinate
variables . If the values are not callable , ( e . g . a DataArray ,
scalar , or array ) , they are simply assigned .
Returns
assigned : same type as caller
A new object with the new coordinates in addition to the existing
data .
Examples
Convert longitude coordinates from 0-359 to - 180-179:
> > > da = xr . DataArray ( np . random . rand ( 4 ) ,
. . . coords = [ np . array ( [ 358 , 359 , 0 , 1 ] ) ] ,
. . . dims = ' lon ' )
> > > da
< xarray . DataArray ( lon : 4 ) >
array ( [ 0.28298 , 0.667347 , 0.657938 , 0.177683 ] )
Coordinates :
* lon ( lon ) int64 358 359 0 1
> > > da . assign _ coords ( lon = ( ( ( da . lon + 180 ) % 360 ) - 180 ) )
< xarray . DataArray ( lon : 4 ) >
array ( [ 0.28298 , 0.667347 , 0.657938 , 0.177683 ] )
Coordinates :
* lon ( lon ) int64 - 2 - 1 0 1
Notes
Since ` ` kwargs ` ` is a dictionary , the order of your arguments may not
be preserved , and so the order of the new variables is not well
defined . Assigning multiple variables within the same ` ` assign _ coords ` `
is possible , but you cannot reference other variables created within
the same ` ` assign _ coords ` ` call .
See also
Dataset . assign
Dataset . swap _ dims"""
|
data = self . copy ( deep = False )
results = self . _calc_assign_results ( kwargs )
data . coords . update ( results )
return data
|
def FoldValue ( self , value ) :
"""Folds the data type into a value .
Args :
value ( object ) : value .
Returns :
object : folded value .
Raises :
ValueError : if the data type definition cannot be folded into the value ."""
|
if value is False and self . _data_type_definition . false_value is not None :
return self . _data_type_definition . false_value
if value is True and self . _data_type_definition . true_value is not None :
return self . _data_type_definition . true_value
raise ValueError ( 'No matching True and False values' )
|
def POST ( self ) :
"""The HTTP POST body parsed into a MultiDict .
This supports urlencoded and multipart POST requests . Multipart
is commonly used for file uploads and may result in some of the
values beeing cgi . FieldStorage objects instead of strings .
Multiple values per key are possible . See MultiDict for details ."""
|
if self . _POST is None :
save_env = dict ( )
# Build a save environment for cgi
for key in ( 'REQUEST_METHOD' , 'CONTENT_TYPE' , 'CONTENT_LENGTH' ) :
if key in self . environ :
save_env [ key ] = self . environ [ key ]
save_env [ 'QUERY_STRING' ] = ''
# Without this , sys . argv is called !
if TextIOWrapper :
fb = TextIOWrapper ( self . body , encoding = 'ISO-8859-1' )
else :
fb = self . body
data = cgi . FieldStorage ( fp = fb , environ = save_env )
self . _POST = MultiDict ( )
for item in data . list :
self . _POST [ item . name ] = item if item . filename else item . value
return self . _POST
|
def customer_id ( self , customer_id ) :
"""Sets the customer _ id of this ChargeRequest .
The ID of the customer to associate this transaction with . This field is required if you provide a value for ` customer _ card _ id ` , and optional otherwise .
: param customer _ id : The customer _ id of this ChargeRequest .
: type : str"""
|
if customer_id is None :
raise ValueError ( "Invalid value for `customer_id`, must not be `None`" )
if len ( customer_id ) > 50 :
raise ValueError ( "Invalid value for `customer_id`, length must be less than `50`" )
self . _customer_id = customer_id
|
def update_ledger ( self , ledger_id , description = None ) :
"""Update ledger info
Arguments :
ledger _ id :
Ledger id assigned by mCASH
description :
Description of the Ledger and it ' s usage"""
|
arguments = { 'description' : description }
return self . do_req ( 'PUT' , self . merchant_api_base_url + '/ledger/' + ledger_id + '/' , arguments )
|
def update ( self , data ) :
"""Update a list ."""
|
# if self . debug > = 2:
# self . debug ( data )
url = "{base}/change_password" . format ( base = self . local_base_url )
self . _check ( data )
res = self . core . create ( url , data )
self . log . debug ( "result: %s" , res )
return res
|
def Sample ( self , tasks_status ) :
"""Takes a sample of the status of queued tasks for profiling .
Args :
tasks _ status ( TasksStatus ) : status information about tasks ."""
|
sample_time = time . time ( )
sample = '{0:f}\t{1:d}\t{2:d}\t{3:d}\t{4:d}\t{5:d}\n' . format ( sample_time , tasks_status . number_of_queued_tasks , tasks_status . number_of_tasks_processing , tasks_status . number_of_tasks_pending_merge , tasks_status . number_of_abandoned_tasks , tasks_status . total_number_of_tasks )
self . _WritesString ( sample )
|
def nice_join ( seq , sep = ", " , conjunction = "or" ) :
'''Join together sequences of strings into English - friendly phrases using
a conjunction when appropriate .
Args :
seq ( seq [ str ] ) : a sequence of strings to nicely join
sep ( str , optional ) : a sequence delimiter to use ( default : " , " )
conjunction ( str or None , optional ) : a conjunction to use for the last
two items , or None to reproduce basic join behavior ( default : " or " )
Returns :
a joined string
Examples :
> > > nice _ join ( [ " a " , " b " , " c " ] )
' a , b or c ' '''
|
seq = [ str ( x ) for x in seq ]
if len ( seq ) <= 1 or conjunction is None :
return sep . join ( seq )
else :
return "%s %s %s" % ( sep . join ( seq [ : - 1 ] ) , conjunction , seq [ - 1 ] )
|
async def _query ( server , method , parameters , timeout = DEFAULT_TIMEOUT , verify_ssl = True , loop : asyncio . AbstractEventLoop = None ) :
"""Formats and performs the asynchronous query against the API
: param server : The server to query .
: param method : The method name .
: param parameters : A dict of parameters to send
: param timeout : The timeout to make the call , in seconds . By default , this is 300 seconds ( or 5 minutes ) .
: param verify _ ssl : Whether or not to verify SSL connections
: return : The JSON - decoded result from the server
: raise MyGeotabException : Raises when an exception occurs on the MyGeotab server
: raise TimeoutException : Raises when the request does not respond after some time .
: raise aiohttp . ClientResponseError : Raises when there is an HTTP status code that indicates failure ."""
|
api_endpoint = api . get_api_url ( server )
params = dict ( id = - 1 , method = method , params = parameters )
headers = get_headers ( )
ssl_context = None
verify = verify_ssl
if verify_ssl :
ssl_context = ssl . SSLContext ( ssl . PROTOCOL_TLSv1_2 )
conn = aiohttp . TCPConnector ( verify_ssl = verify , ssl_context = ssl_context , loop = loop )
try :
async with aiohttp . ClientSession ( connector = conn , loop = loop ) as session :
response = await session . post ( api_endpoint , data = json . dumps ( params , default = object_serializer ) , headers = headers , timeout = timeout , allow_redirects = True )
response . raise_for_status ( )
content_type = response . headers . get ( 'Content-Type' )
body = await response . text ( )
except TimeoutError :
raise TimeoutException ( server )
if content_type and 'application/json' not in content_type . lower ( ) :
return body
return api . _process ( json . loads ( body , object_hook = object_deserializer ) )
|
def create_temp_space ( ) :
"""Create a new temporary cloud foundry space for
a project ."""
|
# Truncating uuid to just take final 12 characters since space name
# is used to name services and there is a 50 character limit on instance
# names .
# MAINT : hacky with possible collisions
unique_name = str ( uuid . uuid4 ( ) ) . split ( '-' ) [ - 1 ]
admin = predix . admin . cf . spaces . Space ( )
res = admin . create_space ( unique_name )
space = predix . admin . cf . spaces . Space ( guid = res [ 'metadata' ] [ 'guid' ] , name = res [ 'entity' ] [ 'name' ] )
space . target ( )
return space
|
def _sync_folder_to_container ( self , folder_path , container , prefix , delete , include_hidden , ignore , ignore_timestamps , object_prefix , verbose ) :
"""This is the internal method that is called recursively to handle
nested folder structures ."""
|
fnames = os . listdir ( folder_path )
ignore = utils . coerce_to_list ( ignore )
log = logging . getLogger ( "pyrax" )
if not include_hidden :
ignore . append ( ".*" )
for fname in fnames :
if utils . match_pattern ( fname , ignore ) :
self . _sync_summary [ "ignored" ] += 1
continue
pth = os . path . join ( folder_path , fname )
if os . path . isdir ( pth ) :
subprefix = fname
if prefix :
subprefix = os . path . join ( prefix , subprefix )
self . _sync_folder_to_container ( pth , container , prefix = subprefix , delete = delete , include_hidden = include_hidden , ignore = ignore , ignore_timestamps = ignore_timestamps , object_prefix = object_prefix , verbose = verbose )
continue
self . _local_files . append ( os . path . join ( object_prefix , prefix , fname ) )
local_etag = utils . get_checksum ( pth )
if object_prefix :
prefix = os . path . join ( object_prefix , prefix )
object_prefix = ""
fullname_with_prefix = os . path . join ( prefix , fname )
try :
obj = self . _remote_files [ fullname_with_prefix ]
obj_etag = obj . etag
except KeyError :
obj = None
obj_etag = None
if local_etag != obj_etag :
if not ignore_timestamps :
if obj :
obj_time_str = obj . last_modified [ : 19 ]
else :
obj_time_str = EARLY_DATE_STR
local_mod = datetime . datetime . utcfromtimestamp ( os . stat ( pth ) . st_mtime )
local_mod_str = local_mod . isoformat ( )
if obj_time_str >= local_mod_str : # Remote object is newer
self . _sync_summary [ "older" ] += 1
if verbose :
log . info ( "%s NOT UPLOADED because remote object is " "newer" , fullname_with_prefix )
log . info ( " Local: %s Remote: %s" % ( local_mod_str , obj_time_str ) )
continue
try :
container . upload_file ( pth , obj_name = fullname_with_prefix , etag = local_etag , return_none = True )
self . _sync_summary [ "uploaded" ] += 1
if verbose :
log . info ( "%s UPLOADED" , fullname_with_prefix )
except Exception as e : # Record the failure , and move on
self . _sync_summary [ "failed" ] += 1
self . _sync_summary [ "failure_reasons" ] . append ( "%s" % e )
if verbose :
log . error ( "%s UPLOAD FAILED. Exception: %s" % ( fullname_with_prefix , e ) )
else :
self . _sync_summary [ "duplicate" ] += 1
if verbose :
log . info ( "%s NOT UPLOADED because it already exists" , fullname_with_prefix )
if delete and not prefix :
self . _delete_objects_not_in_list ( container , object_prefix )
|
def get_group_list ( user , include_default = True ) :
'''Returns a list of all of the system group names of which the user
is a member .'''
|
if HAS_GRP is False or HAS_PWD is False :
return [ ]
group_names = None
ugroups = set ( )
if hasattr ( os , 'getgrouplist' ) : # Try os . getgrouplist , available in python > = 3.3
log . trace ( 'Trying os.getgrouplist for \'%s\'' , user )
try :
group_names = [ grp . getgrgid ( grpid ) . gr_name for grpid in os . getgrouplist ( user , pwd . getpwnam ( user ) . pw_gid ) ]
except Exception :
pass
elif HAS_PYSSS : # Try pysss . getgrouplist
log . trace ( 'Trying pysss.getgrouplist for \'%s\'' , user )
try :
group_names = list ( pysss . getgrouplist ( user ) )
except Exception :
pass
if group_names is None : # Fall back to generic code
# Include the user ' s default group to match behavior of
# os . getgrouplist ( ) and pysss . getgrouplist ( )
log . trace ( 'Trying generic group list for \'%s\'' , user )
group_names = [ g . gr_name for g in grp . getgrall ( ) if user in g . gr_mem ]
try :
default_group = get_default_group ( user )
if default_group not in group_names :
group_names . append ( default_group )
except KeyError : # If for some reason the user does not have a default group
pass
if group_names is not None :
ugroups . update ( group_names )
if include_default is False : # Historically , saltstack code for getting group lists did not
# include the default group . Some things may only want
# supplemental groups , so include _ default = False omits the users
# default group .
try :
default_group = grp . getgrgid ( pwd . getpwnam ( user ) . pw_gid ) . gr_name
ugroups . remove ( default_group )
except KeyError : # If for some reason the user does not have a default group
pass
log . trace ( 'Group list for user \'%s\': %s' , user , sorted ( ugroups ) )
return sorted ( ugroups )
|
def registerParentFlag ( self , optionName , value ) :
'''Register a flag of a parent command
: Parameters :
- ` optionName ` : String . Name of option
- ` value ` : Mixed . Value of parsed flag `'''
|
self . parentFlags . update ( { optionName : value } )
return self
|
def _connected ( self , transport , conn ) :
"""Login and sync the ElkM1 panel to memory ."""
|
LOG . info ( "Connected to ElkM1" )
self . _conn = conn
self . _transport = transport
self . _connection_retry_timer = 1
if url_scheme_is_secure ( self . _config [ 'url' ] ) :
self . _conn . write_data ( self . _config [ 'userid' ] , raw = True )
self . _conn . write_data ( self . _config [ 'password' ] , raw = True )
self . call_sync_handlers ( )
if not self . _config [ 'url' ] . startswith ( 'serial://' ) :
self . _heartbeat = self . loop . call_later ( 120 , self . _reset_connection )
|
def make_parts_for ( self , field_name , field_data ) :
"""Create the relevant parts for this field
Args :
field _ name ( str ) : Short field name , e . g . VAL
field _ data ( FieldData ) : Field data object"""
|
typ = field_data . field_type
subtyp = field_data . field_subtype
if typ in ( "read" , "xadc" ) :
writeable = False
else :
writeable = True
if typ == "time" or typ in ( "param" , "read" ) and subtyp == "time" :
self . _make_time_parts ( field_name , field_data , writeable )
elif typ == "write" and subtyp == "action" :
self . _make_action_part ( field_name , field_data )
elif typ in ( "param" , "read" , "write" , "xadc" ) :
self . _make_param_part ( field_name , field_data , writeable )
elif typ == "bit_out" :
self . _make_out ( field_name , field_data , "bit" )
elif typ == "pos_out" :
self . _make_out ( field_name , field_data , "pos" )
self . _make_scale_offset ( field_name )
self . _make_out_capture ( field_name , field_data )
elif typ == "ext_out" :
self . _make_out_capture ( field_name , field_data )
elif typ == "bit_mux" :
self . _make_mux ( field_name , field_data , "bit" )
self . _make_mux_delay ( field_name )
elif typ == "pos_mux" :
self . _make_mux ( field_name , field_data , "pos" )
elif typ == "table" :
self . _make_table ( field_name , field_data )
else :
raise ValueError ( "Unknown type %r subtype %r" % ( typ , subtyp ) )
|
def log_game_start ( self , players , terrain , numbers , ports ) :
"""Begin a game .
Erase the log , set the timestamp , set the players , and write the log header .
The robber is assumed to start on the desert ( or off - board ) .
: param players : iterable of catan . game . Player objects
: param terrain : list of 19 catan . board . Terrain objects .
: param numbers : list of 19 catan . board . HexNumber objects .
: param ports : list of catan . board . Port objects ."""
|
self . reset ( )
self . _set_players ( players )
self . _logln ( '{} v{}' . format ( __name__ , __version__ ) )
self . _logln ( 'timestamp: {0}' . format ( self . timestamp_str ( ) ) )
self . _log_players ( players )
self . _log_board_terrain ( terrain )
self . _log_board_numbers ( numbers )
self . _log_board_ports ( ports )
self . _logln ( '...CATAN!' )
|
def render ( self , ** kwargs ) :
"""Renders the HTML representation of the element ."""
|
self . color_domain = [ self . vmin + ( self . vmax - self . vmin ) * k / 499. for k in range ( 500 ) ]
self . color_range = [ self . __call__ ( x ) for x in self . color_domain ]
self . tick_labels = legend_scaler ( self . index )
super ( ColorMap , self ) . render ( ** kwargs )
figure = self . get_root ( )
assert isinstance ( figure , Figure ) , ( 'You cannot render this Element ' 'if it is not in a Figure.' )
figure . header . add_child ( JavascriptLink ( "https://cdnjs.cloudflare.com/ajax/libs/d3/3.5.5/d3.min.js" ) , name = 'd3' )
|
def process_response ( self , request , response ) :
"""Do nothing , if process _ request never completed ( redirect )"""
|
if not hasattr ( request , 'location' ) :
return response
storage = storage_class ( request = request , response = response )
try :
storage . set ( location = request . location )
except ValueError : # bad location _ id
pass
return response
|
async def servo_config ( self , pin , min_pulse = 544 , max_pulse = 2400 ) :
"""Configure a pin as a servo pin . Set pulse min , max in ms .
Use this method ( not set _ pin _ mode ) to configure a pin for servo
operation .
: param pin : Servo Pin .
: param min _ pulse : Min pulse width in ms .
: param max _ pulse : Max pulse width in ms .
: returns : No return value"""
|
command = [ pin , min_pulse & 0x7f , ( min_pulse >> 7 ) & 0x7f , max_pulse & 0x7f , ( max_pulse >> 7 ) & 0x7f ]
await self . _send_sysex ( PrivateConstants . SERVO_CONFIG , command )
|
def CKroneckerLMM_optdelta ( ldelta_opt , A , X , Y , S_C1 , S_R1 , S_C2 , S_R2 , ldeltamin , ldeltamax , numintervals ) :
"""CKroneckerLMM _ optdelta ( limix : : mfloat _ t & ldelta _ opt , MatrixXdVec A , MatrixXdVec X , MatrixXd const & Y , VectorXd const & S _ C1 , VectorXd const & S _ R1 , VectorXd const & S _ C2 , VectorXd const & S _ R2 , limix : : mfloat _ t ldeltamin , limix : : mfloat _ t ldeltamax , limix : : muint _ t numintervals ) - > limix : : mfloat _ t
Parameters
ldelta _ opt : limix : : mfloat _ t &
A : limix : : MatrixXdVec const &
X : limix : : MatrixXdVec const &
Y : MatrixXd const &
S _ C1 : VectorXd const &
S _ R1 : VectorXd const &
S _ C2 : VectorXd const &
S _ R2 : VectorXd const &
ldeltamin : limix : : mfloat _ t
ldeltamax : limix : : mfloat _ t
numintervals : limix : : muint _ t"""
|
return _core . CKroneckerLMM_optdelta ( ldelta_opt , A , X , Y , S_C1 , S_R1 , S_C2 , S_R2 , ldeltamin , ldeltamax , numintervals )
|
def predict ( self , n_periods = 10 , exogenous = None , return_conf_int = False , alpha = 0.05 , ** kwargs ) :
"""Forecast future ( transformed ) values
Generate predictions ( forecasts ) ` ` n _ periods ` ` in the future .
Note that if ` ` exogenous ` ` variables were used in the model fit , they
will be expected for the predict procedure and will fail otherwise .
Forecasts may be transformed by the endogenous steps along the way and
might be on a different scale than raw training / test data .
Parameters
n _ periods : int , optional ( default = 10)
The number of periods in the future to forecast .
exogenous : array - like , shape = [ n _ obs , n _ vars ] , optional ( default = None )
An optional 2 - d array of exogenous variables . If provided , these
variables are used as additional features in the regression
operation . This should not include a constant or trend . Note that
if an ` ` ARIMA ` ` is fit on exogenous features , it must be provided
exogenous features for making predictions .
return _ conf _ int : bool , optional ( default = False )
Whether to get the confidence intervals of the forecasts .
alpha : float , optional ( default = 0.05)
The confidence intervals for the forecasts are ( 1 - alpha ) %
* * kwargs : keyword args
Extra keyword arguments used for each stage ' s ` ` transform ` ` stage
and the estimator ' s ` ` predict ` ` stage . Similar to scikit - learn
pipeline keyword args , the keys are compound , comprised of the
stage name and the argument name separated by a " _ _ " . For instance ,
if you have a FourierFeaturizer whose stage is named
" fourier " , your transform kwargs could resemble : :
{ " fourier _ _ n _ periods " : 50}
Returns
forecasts : array - like , shape = ( n _ periods , )
The array of transformed , forecasted values .
conf _ int : array - like , shape = ( n _ periods , 2 ) , optional
The confidence intervals for the forecasts . Only returned if
` ` return _ conf _ int ` ` is True ."""
|
check_is_fitted ( self , "steps_" )
# Push the arrays through the transformer stages , but ONLY the exog
# transformer stages since we don ' t have a Y . . .
Xt = exogenous
named_kwargs = self . _get_kwargs ( ** kwargs )
for step_idx , name , transformer in self . _iter ( with_final = False ) :
if isinstance ( transformer , BaseExogTransformer ) :
kw = named_kwargs [ name ]
# If it ' s a featurizer , we may also need to add ' n _ periods '
if isinstance ( transformer , BaseExogFeaturizer ) :
num_p = kw . get ( "n_periods" , None )
if num_p is not None and num_p != n_periods :
raise ValueError ( "Manually set 'n_periods' kwarg for " "step '%s' differs from forecasting " "n_periods (%r != %r)" % ( name , num_p , n_periods ) )
kw [ "n_periods" ] = n_periods
_ , Xt = transformer . transform ( y = None , exogenous = Xt , ** kw )
# Now we should be able to run the prediction
nm , est = self . steps_ [ - 1 ]
return est . predict ( n_periods = n_periods , exogenous = Xt , return_conf_int = return_conf_int , alpha = alpha , ** named_kwargs [ nm ] )
|
def strip_spaces ( s ) :
"""Strip excess spaces from a string"""
|
return u" " . join ( [ c for c in s . split ( u' ' ) if c ] )
|
def b58decode ( v : str ) -> bytes :
'''Decode a Base58 encoded string'''
|
origlen = len ( v )
v = v . lstrip ( alphabet [ 0 ] )
newlen = len ( v )
p , acc = 1 , 0
for c in v [ : : - 1 ] :
acc += p * alphabet . index ( c )
p *= 58
result = [ ]
while acc > 0 :
acc , mod = divmod ( acc , 256 )
result . append ( mod )
return ( bseq ( result ) + b'\0' * ( origlen - newlen ) ) [ : : - 1 ]
|
def _create_secret ( namespace , name , data , apiserver_url ) :
'''create namespace on the defined k8s cluster'''
|
# Prepare URL
url = "{0}/api/v1/namespaces/{1}/secrets" . format ( apiserver_url , namespace )
# Prepare data
request = { "apiVersion" : "v1" , "kind" : "Secret" , "metadata" : { "name" : name , "namespace" : namespace , } , "data" : data }
# Make request
ret = _kpost ( url , request )
return ret
|
def remove ( self , song ) :
"""Remove song from playlist . O ( n )
If song is current song , remove the song and play next . Otherwise ,
just remove it ."""
|
if song in self . _songs :
if self . _current_song is None :
self . _songs . remove ( song )
elif song == self . _current_song :
next_song = self . next_song
# 随机模式下或者歌单只剩一首歌曲 , 下一首可能和当前歌曲相同
if next_song == self . current_song :
self . current_song = None
self . _songs . remove ( song )
self . current_song = self . next_song
else :
self . current_song = self . next_song
self . _songs . remove ( song )
else :
self . _songs . remove ( song )
logger . debug ( 'Remove {} from player playlist' . format ( song ) )
else :
logger . debug ( 'Remove failed: {} not in playlist' . format ( song ) )
if song in self . _bad_songs :
self . _bad_songs . remove ( song )
|
def post_cleanup ( self ) :
"""remove any divs that looks like non - content ,
clusters of links , or paras with no gusto"""
|
targetNode = self . article . top_node
node = self . add_siblings ( targetNode )
for e in self . parser . getChildren ( node ) :
e_tag = self . parser . getTag ( e )
if e_tag != 'p' :
if self . is_highlink_density ( e ) or self . is_table_and_no_para_exist ( e ) or not self . is_nodescore_threshold_met ( node , e ) :
self . parser . remove ( e )
return node
|
def sagemaker_auth ( overrides = { } , path = "." ) :
"""Write a secrets . env file with the W & B ApiKey and any additional secrets passed .
Args :
overrides ( dict , optional ) : Additional environment variables to write to secrets . env
path ( str , optional ) : The path to write the secrets file ."""
|
api_key = overrides . get ( env . API_KEY , Api ( ) . api_key )
if api_key is None :
raise ValueError ( "Can't find W&B ApiKey, set the WANDB_API_KEY env variable or run `wandb login`" )
overrides [ env . API_KEY ] = api_key
with open ( os . path . join ( path , "secrets.env" ) , "w" ) as file :
for k , v in six . iteritems ( overrides ) :
file . write ( "{}={}\n" . format ( k , v ) )
|
def fetch_genome ( genome_id ) :
'''Acquire a genome from Entrez'''
|
# TODO : Can strandedness by found in fetched genome attributes ?
# TODO : skip read / write step ?
# Using a dummy email for now - does this violate NCBI guidelines ?
email = 'loremipsum@gmail.com'
Entrez . email = email
print 'Downloading Genome...'
handle = Entrez . efetch ( db = 'nucleotide' , id = str ( genome_id ) , rettype = 'gb' , retmode = 'text' )
print 'Genome Downloaded...'
tmpfile = os . path . join ( mkdtemp ( ) , 'tmp.gb' )
with open ( tmpfile , 'w' ) as f :
f . write ( handle . read ( ) )
genome = coral . seqio . read_dna ( tmpfile )
return genome
|
def to_hex_twos_compliment ( value , bit_size ) :
"""Converts integer value to twos compliment hex representation with given bit _ size"""
|
if value >= 0 :
return to_hex_with_size ( value , bit_size )
value = ( 1 << bit_size ) + value
hex_value = hex ( value )
hex_value = hex_value . rstrip ( "L" )
return hex_value
|
def db_group ( self ) :
'''str : database system group ( defaults to
: attr : ` db _ user < tmdeploy . config . AnsibleHostVariableSection . db _ user > ` )'''
|
if self . _db_group is None :
self . _db_group = self . db_user
return self . _db_group
|
def solve ( self , ** kwargs ) :
"""The kwargs required depend upon the script type .
hash160 _ lookup :
dict - like structure that returns a secret exponent for a hash160
existing _ script :
existing solution to improve upon ( optional )
sign _ value :
the integer value to sign ( derived from the transaction hash )
signature _ type :
usually SIGHASH _ ALL ( 1)"""
|
# we need a hash160 = > secret _ exponent lookup
db = kwargs . get ( "hash160_lookup" )
if db is None :
raise SolvingError ( "missing hash160_lookup parameter" )
sign_value = kwargs . get ( "sign_value" )
signature_type = kwargs . get ( "signature_type" )
secs_solved = set ( )
existing_signatures = [ ]
existing_script = kwargs . get ( "existing_script" )
if existing_script :
pc = 0
opcode , data , pc = tools . get_opcode ( existing_script , pc )
# ignore the first opcode
while pc < len ( existing_script ) :
opcode , data , pc = tools . get_opcode ( existing_script , pc )
sig_pair , actual_signature_type = parse_signature_blob ( data )
for sec_key in self . sec_keys :
try :
public_pair = encoding . sec_to_public_pair ( sec_key )
sig_pair , signature_type = parse_signature_blob ( data )
v = ecdsa . verify ( ecdsa . generator_secp256k1 , public_pair , sign_value , sig_pair )
if v :
existing_signatures . append ( data )
secs_solved . add ( sec_key )
break
except encoding . EncodingError : # if public _ pair is invalid , we just ignore it
pass
for sec_key in self . sec_keys :
if sec_key in secs_solved :
continue
if len ( existing_signatures ) >= self . n :
break
hash160 = encoding . hash160 ( sec_key )
result = db . get ( hash160 )
if result is None :
continue
secret_exponent , public_pair , compressed = result
binary_signature = self . _create_script_signature ( secret_exponent , sign_value , signature_type )
existing_signatures . append ( b2h ( binary_signature ) )
DUMMY_SIGNATURE = "OP_0"
while len ( existing_signatures ) < self . n :
existing_signatures . append ( DUMMY_SIGNATURE )
script = "OP_0 %s" % " " . join ( s for s in existing_signatures )
solution = tools . compile ( script )
return solution
|
def ytdl_progress_hook ( self , d ) :
"""Called when youtube - dl updates progress"""
|
if d [ 'status' ] == 'downloading' :
self . play_empty ( )
if "elapsed" in d :
if d [ "elapsed" ] > self . current_download_elapsed + 4 :
self . current_download_elapsed = d [ "elapsed" ]
current_download = 0
current_download_total = 0
current_download_eta = 0
if "total_bytes" in d and d [ "total_bytes" ] > 0 :
current_download_total = d [ "total_bytes" ]
elif "total_bytes_estimate" in d and d [ "total_bytes_estimate" ] > 0 :
current_download_total = d [ "total_bytes_estimate" ]
if "downloaded_bytes" in d and d [ "downloaded_bytes" ] > 0 :
current_download = d [ "downloaded_bytes" ]
if "eta" in d and d [ "eta" ] > 0 :
current_download_eta = d [ "eta" ]
if current_download_total > 0 :
percent = round ( 100 * ( current_download / current_download_total ) )
if percent > 100 :
percent = 100
elif percent < 0 :
percent = 0
seconds = str ( round ( current_download_eta ) ) if current_download_eta > 0 else ""
eta = " ({} {} remaining)" . format ( seconds , "seconds" if seconds != 1 else "second" )
downloading = "Downloading song: {}%{}" . format ( percent , eta )
if self . prev_time != downloading :
self . timelog . debug ( downloading )
self . prev_time = downloading
if d [ 'status' ] == 'error' :
self . statuslog . error ( "Error downloading song" )
elif d [ 'status' ] == 'finished' :
self . statuslog . info ( "Downloaded song" )
downloading = "Downloading song: {}%" . format ( 100 )
if self . prev_time != downloading :
self . timelog . debug ( downloading )
self . prev_time = downloading
if "elapsed" in d :
download_time = "{} {}" . format ( d [ "elapsed" ] if d [ "elapsed" ] > 0 else "<1" , "seconds" if d [ "elapsed" ] != 1 else "second" )
self . logger . debug ( "Downloaded song in {}" . format ( download_time ) )
# Create an FFmpeg player
future = asyncio . run_coroutine_threadsafe ( self . create_ffmpeg_player ( d [ 'filename' ] ) , client . loop )
try :
future . result ( )
except Exception as e :
logger . exception ( e )
return
|
def memory_usage ( proc = - 1 , interval = .1 , timeout = None , timestamps = False , include_children = False , max_usage = False , retval = False , stream = None ) :
"""Return the memory usage of a process or piece of code
Parameters
proc : { int , string , tuple , subprocess . Popen } , optional
The process to monitor . Can be given by an integer / string
representing a PID , by a Popen object or by a tuple
representing a Python function . The tuple contains three
values ( f , args , kw ) and specifies to run the function
f ( * args , * * kw ) .
Set to - 1 ( default ) for current process .
interval : float , optional
Interval at which measurements are collected .
timeout : float , optional
Maximum amount of time ( in seconds ) to wait before returning .
max _ usage : bool , optional
Only return the maximum memory usage ( default False )
retval : bool , optional
For profiling python functions . Save the return value of the profiled
function . Return value of memory _ usage becomes a tuple :
( mem _ usage , retval )
timestamps : bool , optional
if True , timestamps of memory usage measurement are collected as well .
stream : File
if stream is a File opened with write access , then results are written
to this file instead of stored in memory and returned at the end of
the subprocess . Useful for long - running processes .
Implies timestamps = True .
Returns
mem _ usage : list of floating - poing values
memory usage , in MiB . It ' s length is always < timeout / interval
if max _ usage is given , returns the two elements maximum memory and
number of measurements effectuated
ret : return value of the profiled function
Only returned if retval is set to True"""
|
if stream is not None :
timestamps = True
if not max_usage :
ret = [ ]
else :
ret = - 1
if timeout is not None :
max_iter = int ( timeout / interval )
elif isinstance ( proc , int ) : # external process and no timeout
max_iter = 1
else : # for a Python function wait until it finishes
max_iter = float ( 'inf' )
if hasattr ( proc , '__call__' ) :
proc = ( proc , ( ) , { } )
if isinstance ( proc , ( list , tuple ) ) :
if len ( proc ) == 1 :
f , args , kw = ( proc [ 0 ] , ( ) , { } )
elif len ( proc ) == 2 :
f , args , kw = ( proc [ 0 ] , proc [ 1 ] , { } )
elif len ( proc ) == 3 :
f , args , kw = ( proc [ 0 ] , proc [ 1 ] , proc [ 2 ] )
else :
raise ValueError
while True :
child_conn , parent_conn = Pipe ( )
# this will store MemTimer ' s results
p = MemTimer ( os . getpid ( ) , interval , child_conn , timestamps = timestamps , max_usage = max_usage , include_children = include_children )
p . start ( )
parent_conn . recv ( )
# wait until we start getting memory
returned = f ( * args , ** kw )
parent_conn . send ( 0 )
# finish timing
ret = parent_conn . recv ( )
n_measurements = parent_conn . recv ( )
if retval :
ret = ret , returned
p . join ( 5 * interval )
if n_measurements > 4 or interval < 1e-6 :
break
interval /= 10.
elif isinstance ( proc , subprocess . Popen ) : # external process , launched from Python
line_count = 0
while True :
if not max_usage :
mem_usage = _get_memory ( proc . pid , timestamps = timestamps , include_children = include_children )
if stream is not None :
stream . write ( "MEM {0:.6f} {1:.4f}\n" . format ( * mem_usage ) )
else :
ret . append ( mem_usage )
else :
ret = max ( [ ret , _get_memory ( proc . pid , include_children = include_children ) ] )
time . sleep ( interval )
line_count += 1
# flush every 50 lines . Make ' tail - f ' usable on profile file
if line_count > 50 :
line_count = 0
if stream is not None :
stream . flush ( )
if timeout is not None :
max_iter -= 1
if max_iter == 0 :
break
if proc . poll ( ) is not None :
break
else : # external process
if max_iter == - 1 :
max_iter = 1
counter = 0
while counter < max_iter :
counter += 1
if not max_usage :
mem_usage = _get_memory ( proc , timestamps = timestamps , include_children = include_children )
if stream is not None :
stream . write ( "MEM {0:.6f} {1:.4f}\n" . format ( * mem_usage ) )
else :
ret . append ( mem_usage )
else :
ret = max ( [ ret , _get_memory ( proc , include_children = include_children ) ] )
time . sleep ( interval )
# Flush every 50 lines .
if counter % 50 == 0 and stream is not None :
stream . flush ( )
if stream :
return None
return ret
|
def start_tls ( self , ssl_context : Union [ bool , dict , ssl . SSLContext ] = True ) -> 'SSLConnection' :
'''Start client TLS on this connection and return SSLConnection .
Coroutine'''
|
sock = self . writer . get_extra_info ( 'socket' )
ssl_conn = SSLConnection ( self . _address , ssl_context = ssl_context , hostname = self . _hostname , timeout = self . _timeout , connect_timeout = self . _connect_timeout , bind_host = self . _bind_host , bandwidth_limiter = self . _bandwidth_limiter , sock = sock )
yield from ssl_conn . connect ( )
return ssl_conn
|
def poly_energy ( sample_like , poly ) :
"""Calculates energy of a sample from a higher order polynomial .
Args :
sample ( samples _ like ) :
A raw sample . ` samples _ like ` is an extension of NumPy ' s
array _ like structure . See : func : ` . as _ samples ` .
poly ( dict ) :
Polynomial as a dict of form { term : bias , . . . } , where ` term ` is a
tuple of variables and ` bias ` the associated bias .
Returns :
float : The energy of the sample ."""
|
msg = ( "poly_energy is deprecated and will be removed in dimod 0.9.0." "In the future, use BinaryPolynomial.energy" )
warnings . warn ( msg , DeprecationWarning )
# dev note the vartype is not used in the energy calculation and this will
# be deprecated in the future
return BinaryPolynomial ( poly , 'SPIN' ) . energy ( sample_like )
|
def eliminate ( self , node , data ) :
"""Resolves a source node , passing the message to all associated checks"""
|
# Cache resolved value
self . eliminated [ node ] = data
others = self . checks [ node ]
del self . checks [ node ]
# Pass messages to all associated checks
for check in others :
check . check ^= data
check . src_nodes . remove ( node )
# Yield all nodes that can now be resolved
if len ( check . src_nodes ) == 1 :
yield ( next ( iter ( check . src_nodes ) ) , check . check )
|
def add_fragment ( self , fragment , as_last = True ) :
"""Add the given sync map fragment ,
as the first or last child of the root node
of the sync map tree .
: param fragment : the sync map fragment to be added
: type fragment : : class : ` ~ aeneas . syncmap . fragment . SyncMapFragment `
: param bool as _ last : if ` ` True ` ` , append fragment ; otherwise prepend it
: raises : TypeError : if ` ` fragment ` ` is ` ` None ` ` or
it is not an instance of : class : ` ~ aeneas . syncmap . fragment . SyncMapFragment `"""
|
if not isinstance ( fragment , SyncMapFragment ) :
self . log_exc ( u"fragment is not an instance of SyncMapFragment" , None , True , TypeError )
self . fragments_tree . add_child ( Tree ( value = fragment ) , as_last = as_last )
|
def process_jpeg_bytes ( bytes_in , quality = DEFAULT_JPEG_QUALITY ) :
"""Generates an optimized JPEG from JPEG - encoded bytes .
: param bytes _ in : the input image ' s bytes
: param quality : the output JPEG quality ( default 95)
: returns : Optimized JPEG bytes
: rtype : bytes
: raises ValueError : Guetzli was not able to decode the image ( the image is
probably corrupted or is not a JPEG )
. . code : : python
import pyguetzli
input _ jpeg _ bytes = open ( " . / test / image . jpg " , " rb " ) . read ( )
optimized _ jpeg = pyguetzli . process _ jpeg _ bytes ( input _ jpeg _ bytes )"""
|
bytes_out_p = ffi . new ( "char**" )
bytes_out_p_gc = ffi . gc ( bytes_out_p , lib . guetzli_free_bytes )
length = lib . guetzli_process_jpeg_bytes ( bytes_in , len ( bytes_in ) , bytes_out_p_gc , quality )
if length == 0 :
raise ValueError ( "Invalid JPEG: Guetzli was not able to decode the image" )
# noqa
bytes_out = ffi . cast ( "char*" , bytes_out_p_gc [ 0 ] )
return ffi . unpack ( bytes_out , length )
|
def get_description_from_docstring ( cls , obj ) :
"""Returns a pair ( description , details ) from the obj ' s docstring .
description is a single line .
details is a list of subsequent lines , possibly empty ."""
|
doc = obj . __doc__ or ''
p = doc . find ( '\n' )
if p == - 1 :
return doc , [ ]
else :
description = doc [ : p ]
details = textwrap . dedent ( doc [ p + 1 : ] ) . splitlines ( )
# Remove leading and trailing empty lines .
while details and not details [ 0 ] . strip ( ) :
details = details [ 1 : ]
while details and not details [ - 1 ] . strip ( ) :
details . pop ( )
recording = True
details_without_params = [ ]
for detail_line in details :
if ":param" in detail_line :
recording = False
if not detail_line . strip ( ) :
recording = True
if recording :
details_without_params . append ( detail_line )
return description , details_without_params
|
def get_jobs ( self , name = None ) :
"""Retrieves jobs running on this resource in its instance .
Args :
name ( str , optional ) : Only return jobs containing property * * name * * that matches ` name ` . ` name ` can be a
regular expression . If ` name ` is not supplied , then all jobs are returned .
Returns :
list ( Job ) : A list of jobs matching the given ` name ` .
. . note : : If ` ` applicationResource ` ` is ` False ` an empty list is returned .
. . versionadded : : 1.9"""
|
if self . applicationResource :
return self . _get_elements ( self . jobs , 'jobs' , Job , None , name )
else :
return [ ]
|
def tick ( self ) :
"""Ticks the environment once . Normally used for multi - agent environments .
Returns :
dict : A dictionary from agent name to its full state . The full state is another dictionary
from : obj : ` holodeck . sensors . Sensors ` enum to np . ndarray , containing the sensors information
for each sensor . The sensors always include the reward and terminal sensors ."""
|
self . _handle_command_buffer ( )
self . _client . release ( )
self . _client . acquire ( )
return self . _get_full_state ( )
|
def _unapply_interception ( target , ctx = None ) :
"""Unapply interception on input target in cleaning it .
: param routine target : target from where removing an interception
function . is _ joinpoint ( target ) must be True .
: param ctx : target ctx ."""
|
# try to get the right ctx
if ctx is None :
ctx = find_ctx ( elt = target )
# get previous target
intercepted , old_ctx = get_intercepted ( target )
# if ctx is None and old _ ctx is not None , update ctx with old _ ctx
if ctx is None and old_ctx is not None :
ctx = old_ctx
if intercepted is None :
raise JoinpointError ( '{0} must be intercepted' . format ( target ) )
# flag to deleting of joinpoint _ function
del_joinpoint_function = False
# if old target is a not modifiable resource
if isbuiltin ( intercepted ) :
module = getmodule ( intercepted )
found = False
# update references to target to not modifiable element in module
for name , member in getmembers ( module ) :
if member is target :
setattr ( module , name , intercepted )
found = True
# if no reference found , raise an Exception
if not found :
raise JoinpointError ( "Impossible to unapply interception on not modifiable element \
{0}. Must be contained in module {1}" . format ( target , module ) )
elif ctx is None : # get joinpoint function
joinpoint_function = _get_function ( target )
# update old code on target
joinpoint_function . __code__ = intercepted . __code__
# ensure to delete joinpoint _ function
del_joinpoint_function = True
else : # flag for joinpoint recovering
recover = False
# get interception name in order to update / delete interception from ctx
intercepted_name = intercepted . __name__
# should we change of target or is it inherited ?
if isclass ( ctx ) :
base_interception , _ = super_method ( name = intercepted_name , ctx = ctx )
else :
base_interception = getattr ( ctx . __class__ , intercepted_name , None )
# if base interception does not exist
if base_interception is None : # recover intercepted
recover = True
else : # get joinpoint _ function
joinpoint_function = _get_function ( target )
# get base function
if is_intercepted ( base_interception ) :
base_intercepted , _ = get_intercepted ( base_interception )
else :
base_intercepted = _get_function ( base_interception )
# is interception inherited ?
if base_intercepted is joinpoint_function :
pass
# do nothing
# is intercepted inherited
elif base_intercepted is intercepted : # del interception
delattr ( ctx , intercepted_name )
del_joinpoint_function = True
else : # base function is something else
recover = True
if recover : # if recover is required
# new content to put in ctx
new_content = intercepted
if ismethod ( target ) : # in creating eventually a new method
args = [ new_content , ctx ]
if PY2 : # if py2 , specify the ctx class
# and unbound method type
if target . __self__ is None :
args = [ new_content , None , ctx ]
else : # or instance method
args . append ( ctx . __class__ )
# instantiate a new method
new_content = MethodType ( * args )
# update ctx with intercepted
setattr ( ctx , intercepted_name , new_content )
joinpoint_function = _get_function ( target )
del_joinpoint_function = True
if del_joinpoint_function : # delete _ INTERCEPTED and _ INTERCEPTED _ CTX from joinpoint _ function
if hasattr ( joinpoint_function , _INTERCEPTED ) :
delattr ( joinpoint_function , _INTERCEPTED )
if hasattr ( joinpoint_function , _INTERCEPTED_CTX ) :
delattr ( joinpoint_function , _INTERCEPTED_CTX )
del joinpoint_function
|
def history ( self ) :
'''Get history / version information for this datastream and
return as an instance of
: class : ` ~ eulfedora . xml . DatastreamHistory ` .'''
|
r = self . obj . api . getDatastreamHistory ( self . obj . pid , self . id , format = 'xml' )
return parse_xml_object ( DatastreamHistory , r . content , r . url )
|
def get_community_trends ( self , indicator_type = None , days_back = None ) :
"""Find indicators that are trending in the community .
: param indicator _ type : A type of indicator to filter by . If ` ` None ` ` , will get all types of indicators except
for MALWARE and CVEs ( this convention is for parity with the corresponding view on the Dashboard ) .
: param days _ back : The number of days back to search . Any integer between 1 and 30 is allowed .
: return : A list of | Indicator | objects ."""
|
params = { 'type' : indicator_type , 'daysBack' : days_back }
resp = self . _client . get ( "indicators/community-trending" , params = params )
body = resp . json ( )
# parse items in response as indicators
return [ Indicator . from_dict ( indicator ) for indicator in body ]
|
def children ( self , alias , bank_id ) :
"""URL for getting or setting child relationships for the specified bank
: param alias :
: param bank _ id :
: return :"""
|
return self . _root + self . _safe_alias ( alias ) + '/child/ids/' + str ( bank_id )
|
def get_group_id ( self , user_id ) :
"""获取用户所在分组 ID
详情请参考
http : / / mp . weixin . qq . com / wiki / 0/56d992c605a97245eb7e617854b169fc . html
: param user _ id : 用户 ID
: return : 用户所在分组 ID
使用示例 : :
from wechatpy import WeChatClient
client = WeChatClient ( ' appid ' , ' secret ' )
group _ id = client . user . get _ group _ id ( ' openid ' )"""
|
res = self . _post ( 'groups/getid' , data = { 'openid' : user_id } , result_processor = lambda x : x [ 'groupid' ] )
return res
|
def get_event_canned_questions ( self , id , ** data ) :
"""GET / events / : id / canned _ questions /
This endpoint returns canned questions of a single event ( examples : first name , last name , company , prefix , etc . ) . This endpoint will return : format : ` question ` ."""
|
return self . get ( "/events/{0}/canned_questions/" . format ( id ) , data = data )
|
def write_info ( self , url_data ) :
"""Write url _ data . info ."""
|
self . write ( self . part ( "info" ) + self . spaces ( "info" ) )
self . writeln ( self . wrap ( url_data . info , 65 ) , color = self . colorinfo )
|
def initialize_environments ( self , batch_size = 1 ) :
"""Initializes the environments and trajectories .
Subclasses can override this if they don ' t want a default implementation
which initializes ` batch _ size ` environments , but must take care to
initialize self . _ trajectories ( this is checked in _ _ init _ _ anyways ) .
Args :
batch _ size : ( int ) Number of ` self . base _ env _ name ` envs to initialize ."""
|
assert batch_size >= 1
self . _batch_size = batch_size
self . _envs = [ gym . make ( self . base_env_name ) for _ in range ( batch_size ) ]
if self . _env_wrapper_fn is not None :
self . _envs = list ( map ( self . _env_wrapper_fn , self . _envs ) )
# If self . observation _ space and self . action _ space aren ' t None , then it means
# that this is a re - initialization of this class , in that case make sure
# that this matches our previous behaviour .
if self . _observation_space :
assert str ( self . _observation_space ) == str ( self . _envs [ 0 ] . observation_space )
else : # This means that we are initializing this class for the first time .
# We set this equal to the first env ' s observation space , later on we ' ll
# verify that all envs have the same observation space .
self . _observation_space = self . _envs [ 0 ] . observation_space
# Similarly for action _ space
if self . _action_space :
assert str ( self . _action_space ) == str ( self . _envs [ 0 ] . action_space )
else :
self . _action_space = self . _envs [ 0 ] . action_space
self . _verify_same_spaces ( )
# If self . reward _ range is None , i . e . this means that we should take the
# reward range of the env .
if self . reward_range is None :
self . _reward_range = self . _envs [ 0 ] . reward_range
# This data structure stores the history of each env .
# NOTE : Even if the env is a NN and can step in all batches concurrently , it
# is still valuable to store the trajectories separately .
self . _trajectories = trajectory . BatchTrajectory ( batch_size = batch_size )
|
def synthetic_grad ( X , theta , sigma1 , sigma2 , sigmax , rescale_grad = 1.0 , grad = None ) :
"""Get synthetic gradient value"""
|
if grad is None :
grad = nd . empty ( theta . shape , theta . context )
theta1 = theta . asnumpy ( ) [ 0 ]
theta2 = theta . asnumpy ( ) [ 1 ]
v1 = sigma1 ** 2
v2 = sigma2 ** 2
vx = sigmax ** 2
denominator = numpy . exp ( - ( X - theta1 ) ** 2 / ( 2 * vx ) ) + numpy . exp ( - ( X - theta1 - theta2 ) ** 2 / ( 2 * vx ) )
grad_npy = numpy . zeros ( theta . shape )
grad_npy [ 0 ] = - rescale_grad * ( ( numpy . exp ( - ( X - theta1 ) ** 2 / ( 2 * vx ) ) * ( X - theta1 ) / vx + numpy . exp ( - ( X - theta1 - theta2 ) ** 2 / ( 2 * vx ) ) * ( X - theta1 - theta2 ) / vx ) / denominator ) . sum ( ) + theta1 / v1
grad_npy [ 1 ] = - rescale_grad * ( ( numpy . exp ( - ( X - theta1 - theta2 ) ** 2 / ( 2 * vx ) ) * ( X - theta1 - theta2 ) / vx ) / denominator ) . sum ( ) + theta2 / v2
grad [ : ] = grad_npy
return grad
|
def _create_co_virtual_idp ( self , context ) :
"""Create a virtual IdP to represent the CO .
: type context : The current context
: rtype : saml . server . Server
: param context :
: return : An idp server"""
|
co_name = self . _get_co_name ( context )
context . decorate ( self . KEY_CO_NAME , co_name )
# Verify that we are configured for this CO . If the CO was not
# configured most likely the endpoint used was not registered and
# SATOSA core code threw an exception before getting here , but we
# include this check in case later the regex used to register the
# endpoints is relaxed .
co_names = self . _co_names_from_config ( )
if co_name not in co_names :
msg = "CO {} not in configured list of COs {}" . format ( co_name , co_names )
satosa_logging ( logger , logging . WARN , msg , context . state )
raise SATOSAError ( msg )
# Make a copy of the general IdP config that we will then overwrite
# with mappings between SAML bindings and CO specific URL endpoints ,
# and the entityID for the CO virtual IdP .
backend_name = context . target_backend
idp_config = copy . deepcopy ( self . idp_config )
idp_config = self . _add_endpoints_to_config ( idp_config , co_name , backend_name )
idp_config = self . _add_entity_id ( idp_config , co_name )
# Use the overwritten IdP config to generate a pysaml2 config object
# and from it a server object .
pysaml2_idp_config = IdPConfig ( ) . load ( idp_config , metadata_construction = False )
server = Server ( config = pysaml2_idp_config )
return server
|
def count_indents_length_last_line ( self , spacecount , tabs = 0 , back = 5 ) :
"""Finds the last meaningful line and returns its indent level and
character length .
Back specifies the amount of lines to look back for a none whitespace
line ."""
|
if not self . has_space ( ) :
return 0
lines = self . get_surrounding_lines ( back , 0 )
for line in reversed ( lines ) :
if not line . string . isspace ( ) :
return line . count_indents_length ( spacecount , tabs )
return ( 0 , 0 )
|
def UpdateArpeggiates ( self , type = "start" ) :
'''method which searches for all arpeggiates and updates the top one of each chord to be a start ,
and the bottom one to be a stop ready for lilypond output
: param type :
: return :'''
|
result = self . item . Search ( Arpeggiate )
if result is not None :
if type == "start" :
result . type = type
child = self . GetChild ( 0 )
if child is not None :
if child . item . Search ( Arpeggiate ) is None :
new_obj = copy . deepcopy ( result )
new_obj . type = "none"
child . GetItem ( ) . addNotation ( new_obj )
if child is not None and hasattr ( child , "UpdateArpeggiates" ) :
child . UpdateArpeggiates ( type = "stop" )
else :
result . type = type
else :
result = self . item . Search ( NonArpeggiate )
if result is not None :
if type == "start" :
result . type = type
child = self . GetChild ( 0 )
if child is not None :
search = child . item . Search ( NonArpeggiate )
if search is None :
cpy = copy . deepcopy ( result )
cpy . type = "none"
child . item . addNotation ( cpy )
if hasattr ( child , "UpdateArpeggiates" ) :
child . UpdateArpeggiates ( type = "bottom" )
else :
result . type = type
|
def enable ( self ) :
"""Enable contextual logging"""
|
with self . _lock :
if self . filter is None :
self . filter = self . _filter_type ( self )
|
def _all_fields ( self ) :
"""Returns the entire serializer field set .
Does not respect dynamic field inclusions / exclusions ."""
|
if ( not settings . ENABLE_FIELDS_CACHE or not self . ENABLE_FIELDS_CACHE or self . __class__ not in FIELDS_CACHE ) :
all_fields = super ( WithDynamicSerializerMixin , self ) . get_fields ( )
if ( settings . ENABLE_FIELDS_CACHE and self . ENABLE_FIELDS_CACHE ) :
FIELDS_CACHE [ self . __class__ ] = all_fields
else :
all_fields = copy . copy ( FIELDS_CACHE [ self . __class__ ] )
for k , field in six . iteritems ( all_fields ) :
if hasattr ( field , 'reset' ) :
field . reset ( )
for k , field in six . iteritems ( all_fields ) :
field . field_name = k
field . parent = self
return all_fields
|
def assemble_default_config ( modules ) :
"""Build the default configuration from a set of modules .
` modules ` is an iterable of
: class : ` yakonfig . configurable . Configurable ` objects , or anything
equivalently typed . This produces the default configuration from
that list of modules .
: param modules : modules or Configurable instances to use
: type modules : iterable of : class : ` ~ yakonfig . configurable . Configurable `
: return : configuration dictionary"""
|
def work_in ( parent_config , config_name , prefix , module ) :
my_config = dict ( getattr ( module , 'default_config' , { } ) )
if config_name in parent_config :
extra_config = parent_config [ config_name ]
raise ProgrammerError ( 'config for {0} already present when about to fetch {3}.default_config (had {1!r} would have set {2!r})' . format ( prefix , extra_config , my_config , module ) )
parent_config [ config_name ] = my_config
return _recurse_config ( dict ( ) , modules , work_in )
|
def stop ( self , timeout = 1.0 ) :
"""Stop a running server ( from another thread ) .
Parameters
timeout : float , optional
Seconds to wait for server to have * started * .
Returns
stopped : thread - safe Future
Resolves when the server is stopped"""
|
stopped = self . _server . stop ( timeout )
if self . _handler_thread :
self . _handler_thread . stop ( timeout )
return stopped
|
def set_configuration ( self , message_number = default_message_number , exception_number = default_exception_number , permanent_progressbar_slots = default_permanent_progressbar_slots , redraw_frequency_millis = default_redraw_frequency_millis , console_level = default_level , task_millis_to_removal = default_task_millis_to_removal , console_format_strftime = default_console_format_strftime , console_format = default_console_format , file_handlers = default_file_handlers ) :
"""Defines the current configuration of the logger . Can be used at any moment during runtime to modify the logger
behavior .
: param message _ number : [ Optional ] Number of simultaneously displayed messages below progress bars .
: param exception _ number : [ Optional ] Number of simultaneously displayed exceptions below messages .
: param permanent _ progressbar _ slots : [ Optional ] The amount of vertical space ( bar slots ) to keep at all times ,
so the message logger will not move anymore if the bar number is equal or
lower than this parameter .
: param redraw _ frequency _ millis : [ Optional ] Minimum time lapse in milliseconds between two redraws . It may be
more because the redraw rate depends upon time AND method calls .
: param console _ level : [ Optional ] The logging level ( from standard logging module ) .
: param task _ millis _ to _ removal : [ Optional ] Minimum time lapse in milliseconds at maximum completion before
a progress bar is removed from display . The progress bar may vanish at a
further time as the redraw rate depends upon time AND method calls .
: param console _ format _ strftime : [ Optional ] Specify the time format for console log lines using python
strftime format . Defaults to format : ' 29 november 2016 21:52:12 ' .
: param console _ format : [ Optional ] Specify the format of the console log lines . There are two
variables available : { T } for timestamp , { L } for level . Will then add some
tabulations in order to align text beginning for all levels .
Defaults to format : ' { T } [ { L } ] '
Which will produce : ' 29 november 2016 21:52:12 [ INFO ] my log text '
'29 november 2016 21:52:13 [ WARNING ] my log text '
'29 november 2016 21:52:14 [ DEBUG ] my log text '
: param file _ handlers : [ Optional ] Specify the file handlers to use . Each file handler will use its
own regular formatter and level . Console logging is distinct from file
logging . Console logging uses custom stdout formatting , while file logging
uses regular python logging rules . All handlers are permitted except
StreamHandler if used with stdout or stderr which are reserved by this
library for custom console output ."""
|
self . queue . put ( dill . dumps ( SetConfigurationCommand ( task_millis_to_removal = task_millis_to_removal , console_level = console_level , permanent_progressbar_slots = permanent_progressbar_slots , message_number = message_number , exception_number = exception_number , redraw_frequency_millis = redraw_frequency_millis , console_format_strftime = console_format_strftime , console_format = console_format , file_handlers = file_handlers ) ) )
|
def from_args ( cls , args = None , skip_dir_check = False ) :
"""Construct a Fetcher from given command line arguments .
@ type args : list ( str )
@ param args : Command line arguments ( optional ) . Default is to use args from sys . argv
@ type skip _ dir _ check : bool
@ param skip _ dir _ check : Boolean identifying whether to check for existing build directory
@ rtype : tuple ( Fetcher , output path )
@ return : Returns a Fetcher object and keyword arguments for extract _ build ."""
|
parser = argparse . ArgumentParser ( )
parser . set_defaults ( target = 'firefox' , build = 'latest' , tests = None )
# branch default is set after parsing
target_group = parser . add_argument_group ( 'Target' )
target_group . add_argument ( '--target' , choices = sorted ( cls . TARGET_CHOICES ) , help = ( 'Specify the build target. (default: %(default)s)' ) )
target_group . add_argument ( '--os' , choices = sorted ( Platform . SUPPORTED ) , help = ( 'Specify the target system. (default: ' + std_platform . system ( ) + ')' ) )
cpu_choices = sorted ( set ( itertools . chain ( itertools . chain . from_iterable ( Platform . SUPPORTED . values ( ) ) , Platform . CPU_ALIASES ) ) )
target_group . add_argument ( '--cpu' , choices = cpu_choices , help = ( 'Specify the target CPU. (default: ' + std_platform . machine ( ) + ')' ) )
type_group = parser . add_argument_group ( 'Build' )
type_group . add_argument ( '--build' , metavar = 'DATE|REV|NS' , help = 'Specify the build to download, (default: %(default)s)' ' Accepts values in format YYYY-MM-DD (2017-01-01)' ' revision (57b37213d81150642f5139764e7044b07b9dccc3)' ' or TaskCluster namespace (gecko.v2....)' )
branch_group = parser . add_argument_group ( 'Branch' )
branch_args = branch_group . add_mutually_exclusive_group ( )
branch_args . add_argument ( '--inbound' , action = 'store_const' , const = 'inbound' , dest = 'branch' , help = 'Download from mozilla-inbound' )
branch_args . add_argument ( '--central' , action = 'store_const' , const = 'central' , dest = 'branch' , help = 'Download from mozilla-central (default)' )
branch_args . add_argument ( '--release' , action = 'store_const' , const = 'release' , dest = 'branch' , help = 'Download from mozilla-release' )
branch_args . add_argument ( '--beta' , action = 'store_const' , const = 'beta' , dest = 'branch' , help = 'Download from mozilla-beta' )
branch_args . add_argument ( '--esr52' , action = 'store_const' , const = 'esr52' , dest = 'branch' , help = 'Download from mozilla-esr52' )
branch_args . add_argument ( '--esr' , action = 'store_const' , const = 'esr60' , dest = 'branch' , help = 'Download from mozilla-esr60' )
build_group = parser . add_argument_group ( 'Build Arguments' )
build_group . add_argument ( '-d' , '--debug' , action = 'store_true' , help = 'Get debug builds w/ symbols (default=optimized).' )
build_group . add_argument ( '-a' , '--asan' , action = 'store_true' , help = 'Download AddressSanitizer builds.' )
build_group . add_argument ( '--fuzzing' , action = 'store_true' , help = 'Download --enable-fuzzing builds.' )
build_group . add_argument ( '--coverage' , action = 'store_true' , help = 'Download --coverage builds. This also pulls down the *.gcno files' )
build_group . add_argument ( '--valgrind' , action = 'store_true' , help = 'Download Valgrind builds.' )
test_group = parser . add_argument_group ( 'Test Arguments' )
test_group . add_argument ( '--tests' , nargs = '+' , metavar = '' , choices = cls . TEST_CHOICES , help = ( 'Download tests associated with this build. Acceptable values are: ' + ', ' . join ( cls . TEST_CHOICES ) ) )
test_group . add_argument ( '--full-symbols' , action = 'store_true' , help = 'Download the full crashreport-symbols.zip archive.' )
misc_group = parser . add_argument_group ( 'Misc. Arguments' )
misc_group . add_argument ( '-n' , '--name' , help = 'Specify a name (default=auto)' )
misc_group . add_argument ( '-o' , '--out' , default = os . getcwd ( ) , help = 'Specify output directory (default=.)' )
misc_group . add_argument ( '--dry-run' , action = 'store_true' , help = "Search for build and output metadata only, don't download anything." )
args = parser . parse_args ( args = args )
if re . match ( r'(\d{4}-\d{2}-\d{2}|[0-9A-Fa-f]{40}|latest)$' , args . build ) is None : # this is a custom build
# ensure conflicting options are not set
if args . branch is not None :
parser . error ( 'Cannot specify --build namespace and branch argument: %s' % args . branch )
if args . debug :
parser . error ( 'Cannot specify --build namespace and --debug' )
if args . asan :
parser . error ( 'Cannot specify --build namespace and --asan' )
if args . fuzzing :
parser . error ( 'Cannot specify --build namespace and --fuzzing' )
if args . coverage :
parser . error ( 'Cannot specify --build namespace and --coverage' )
if args . valgrind :
parser . error ( 'Cannot specify --build namespace and --valgrind' )
# do this default manually so we can error if combined with - - build namespace
# parser . set _ defaults ( branch = ' central ' )
elif args . branch is None :
args . branch = 'central'
flags = BuildFlags ( args . asan , args . debug , args . fuzzing , args . coverage , args . valgrind )
obj = cls ( args . target , args . branch , args . build , flags , Platform ( args . os , args . cpu ) )
if args . name is None :
args . name = obj . get_auto_name ( )
final_dir = os . path . realpath ( os . path . join ( args . out , args . name ) )
if not skip_dir_check and os . path . exists ( final_dir ) :
parser . error ( 'Folder exists: %s .. exiting' % final_dir )
extract_options = { 'dry_run' : args . dry_run , 'out' : final_dir , 'full_symbols' : args . full_symbols , 'tests' : args . tests }
return obj , extract_options
|
def _is_daemonset ( self , pod = None ) :
"""Determines if a K8sPod is part of a K8sDaemonSet .
: param pod : The K8sPod we ' re interested in .
: return : a boolean ."""
|
if 'kubernetes.io/created-by' in pod . annotations :
parent = json . loads ( pod . annotations [ 'kubernetes.io/created-by' ] )
if parent [ 'reference' ] [ 'kind' ] == 'DaemonSet' :
return True
return False
|
def fingerprint_correlation ( T , obs1 , obs2 = None , tau = 1 , k = None , ncv = None ) :
r"""Dynamical fingerprint for equilibrium correlation experiment .
Parameters
T : ( M , M ) ndarray or scipy . sparse matrix
Transition matrix
obs1 : ( M , ) ndarray
Observable , represented as vector on state space
obs2 : ( M , ) ndarray ( optional )
Second observable , for cross - correlations
k : int ( optional )
Number of time - scales and amplitudes to compute
tau : int ( optional )
Lag time of given transition matrix , for correct time - scales
ncv : int ( optional )
The number of Lanczos vectors generated , ` ncv ` must be greater than k ;
it is recommended that ncv > 2 * k
Returns
timescales : ( N , ) ndarray
Time - scales of the transition matrix
amplitudes : ( N , ) ndarray
Amplitudes for the correlation experiment
See also
correlation , fingerprint _ relaxation
References
. . [ 1 ] Noe , F , S Doose , I Daidone , M Loellmann , M Sauer , J D
Chodera and J Smith . 2010 . Dynamical fingerprints for probing
individual relaxation processes in biomolecular dynamics with
simulations and kinetic experiments . PNAS 108 ( 12 ) : 4822-4827.
Notes
Fingerprints are a combination of time - scale and amplitude spectrum for
a equilibrium correlation or a non - equilibrium relaxation experiment .
* * Auto - correlation * *
The auto - correlation of an observable : math : ` a ( x ) ` for a system in
equilibrium is
. . math : : \ mathbb { E } _ { \ mu } [ a ( x , 0 ) a ( x , t ) ] = \ sum _ x \ mu ( x ) a ( x , 0 ) a ( x , t )
: math : ` a ( x , 0 ) = a ( x ) ` is the observable at time : math : ` t = 0 ` . It can
be propagated forward in time using the t - step transition matrix
: math : ` p ^ { t } ( x , y ) ` .
The propagated observable at time : math : ` t ` is : math : ` a ( x ,
t ) = \ sum _ y p ^ t ( x , y ) a ( y , 0 ) ` .
Using the eigenvlaues and eigenvectors of the transition matrix the autocorrelation
can be written as
. . math : : \ mathbb { E } _ { \ mu } [ a ( x , 0 ) a ( x , t ) ] = \ sum _ i \ lambda _ i ^ t \ langle a , r _ i \ rangle _ { \ mu } \ langle l _ i , a \ rangle .
The fingerprint amplitudes : math : ` \ gamma _ i ` are given by
. . math : : \ gamma _ i = \ langle a , r _ i \ rangle _ { \ mu } \ langle l _ i , a \ rangle .
And the fingerprint time scales : math : ` t _ i ` are given by
. . math : : t _ i = - \ frac { \ tau } { \ log \ lvert \ lambda _ i \ rvert } .
* * Cross - correlation * *
The cross - correlation of two observables : math : ` a ( x ) ` , : math : ` b ( x ) ` is similarly given
. . math : : \ mathbb { E } _ { \ mu } [ a ( x , 0 ) b ( x , t ) ] = \ sum _ x \ mu ( x ) a ( x , 0 ) b ( x , t )
The fingerprint amplitudes : math : ` \ gamma _ i ` are similarly given in terms of the eigenvectors
. . math : : \ gamma _ i = \ langle a , r _ i \ rangle _ { \ mu } \ langle l _ i , b \ rangle .
Examples
> > > import numpy as np
> > > from msmtools . analysis import fingerprint _ correlation
> > > T = np . array ( [ [ 0.9 , 0.1 , 0.0 ] , [ 0.5 , 0.0 , 0.5 ] , [ 0.0 , 0.1 , 0.9 ] ] )
> > > a = np . array ( [ 1.0 , 0.0 , 0.0 ] )
> > > ts , amp = fingerprint _ correlation ( T , a )
> > > ts
array ( [ inf , 9.49122158 , 0.43429448 ] )
> > > amp
array ( [ 0.20661157 , 0.22727273 , 0.02066116 ] )"""
|
# check if square matrix and remember size
T = _types . ensure_ndarray_or_sparse ( T , ndim = 2 , uniform = True , kind = 'numeric' )
n = T . shape [ 0 ]
# will not do fingerprint analysis for nonreversible matrices
if not is_reversible ( T ) :
raise ValueError ( 'Fingerprint calculation is not supported for nonreversible transition matrices. ' )
obs1 = _types . ensure_ndarray ( obs1 , ndim = 1 , size = n , kind = 'numeric' )
obs1 = _types . ensure_ndarray_or_None ( obs1 , ndim = 1 , size = n , kind = 'numeric' )
# go
if _issparse ( T ) :
return sparse . fingerprints . fingerprint_correlation ( T , obs1 , obs2 = obs2 , tau = tau , k = k , ncv = ncv )
else :
return dense . fingerprints . fingerprint_correlation ( T , obs1 , obs2 , tau = tau , k = k )
|
def run ( create_application , settings = None , log_config = None ) :
"""Run a Tornado create _ application .
: param create _ application : function to call to create a new
application instance
: param dict | None settings : optional configuration dictionary
that will be passed through to ` ` create _ application ` `
as kwargs .
: param dict | None log _ config : optional logging configuration
dictionary to use . By default , a reasonable logging
configuration is generated based on settings . If you
need to override the configuration , then use this parameter .
It is passed as - is to : func : ` logging . config . dictConfig ` .
. . rubric : : settings [ ' debug ' ]
If the ` settings ` parameter includes a value for the ` ` debug ` `
key , then the application will be run in Tornado debug mode .
If the ` settings ` parameter does not include a ` ` debug ` ` key ,
then debug mode will be enabled based on the : envvar : ` DEBUG `
environment variable .
. . rubric : : settings [ ' port ' ]
If the ` settings ` parameter includes a value for the ` ` port ` `
key , then the application will be configured to listen on the
specified port . If this key is not present , then the : envvar : ` PORT `
environment variable determines which port to bind to . The
default port is 8000 if nothing overrides it .
. . rubric : : settings [ ' number _ of _ procs ' ]
If the ` settings ` parameter includes a value for the ` ` number _ of _ procs ` `
key , then the application will be configured to run this many processes
unless in * debug * mode . This is passed to ` ` HTTPServer . start ` ` .
. . rubric : : settings [ ' xheaders ' ]
If the ` settings ` parameter includes a value for the ` ` xheaders ` `
key , then the application will be configured to use headers , like
X - Real - IP , to get the user ' s IP address instead of attributing all
traffic to the load balancer ' s IP address . When running behind a load
balancer like nginx , it is recommended to pass xheaders = True . The default
value is False if nothing overrides it ."""
|
from . import runner
app_settings = { } if settings is None else settings . copy ( )
debug_mode = bool ( app_settings . get ( 'debug' , int ( os . environ . get ( 'DEBUG' , 0 ) ) != 0 ) )
app_settings [ 'debug' ] = debug_mode
logging . config . dictConfig ( _get_logging_config ( debug_mode ) if log_config is None else log_config )
port_number = int ( app_settings . pop ( 'port' , os . environ . get ( 'PORT' , 8000 ) ) )
num_procs = int ( app_settings . pop ( 'number_of_procs' , '0' ) )
server = runner . Runner ( create_application ( ** app_settings ) )
server . run ( port_number , num_procs )
|
def get_tasks ( self , list_id , completed = False ) :
'''Gets tasks for the list with the given ID , filtered by the given completion flag'''
|
return tasks_endpoint . get_tasks ( self , list_id , completed = completed )
|
def set_env ( self , arguments ) :
"""Setup the environment variables for the process ."""
|
return lib . zproc_set_env ( self . _as_parameter_ , byref ( zhash_p . from_param ( arguments ) ) )
|
def frommembers ( cls , members ) :
"""Series from iterable of member iterables ."""
|
return cls . frombitsets ( map ( cls . BitSet . frommembers , members ) )
|
def parse_pseudo_class ( self , sel , m , has_selector , iselector , is_html ) :
"""Parse pseudo class ."""
|
complex_pseudo = False
pseudo = util . lower ( css_unescape ( m . group ( 'name' ) ) )
if m . group ( 'open' ) :
complex_pseudo = True
if complex_pseudo and pseudo in PSEUDO_COMPLEX :
has_selector = self . parse_pseudo_open ( sel , pseudo , has_selector , iselector , m . end ( 0 ) )
elif not complex_pseudo and pseudo in PSEUDO_SIMPLE :
if pseudo == ':root' :
sel . flags |= ct . SEL_ROOT
elif pseudo == ':defined' :
sel . flags |= ct . SEL_DEFINED
is_html = True
elif pseudo == ':scope' :
sel . flags |= ct . SEL_SCOPE
elif pseudo == ':empty' :
sel . flags |= ct . SEL_EMPTY
elif pseudo in ( ':link' , ':any-link' ) :
sel . selectors . append ( CSS_LINK )
elif pseudo == ':checked' :
sel . selectors . append ( CSS_CHECKED )
elif pseudo == ':default' :
sel . selectors . append ( CSS_DEFAULT )
elif pseudo == ':indeterminate' :
sel . selectors . append ( CSS_INDETERMINATE )
elif pseudo == ":disabled" :
sel . selectors . append ( CSS_DISABLED )
elif pseudo == ":enabled" :
sel . selectors . append ( CSS_ENABLED )
elif pseudo == ":required" :
sel . selectors . append ( CSS_REQUIRED )
elif pseudo == ":optional" :
sel . selectors . append ( CSS_OPTIONAL )
elif pseudo == ":read-only" :
sel . selectors . append ( CSS_READ_ONLY )
elif pseudo == ":read-write" :
sel . selectors . append ( CSS_READ_WRITE )
elif pseudo == ":in-range" :
sel . selectors . append ( CSS_IN_RANGE )
elif pseudo == ":out-of-range" :
sel . selectors . append ( CSS_OUT_OF_RANGE )
elif pseudo == ":placeholder-shown" :
sel . selectors . append ( CSS_PLACEHOLDER_SHOWN )
elif pseudo == ':first-child' :
sel . nth . append ( ct . SelectorNth ( 1 , False , 0 , False , False , ct . SelectorList ( ) ) )
elif pseudo == ':last-child' :
sel . nth . append ( ct . SelectorNth ( 1 , False , 0 , False , True , ct . SelectorList ( ) ) )
elif pseudo == ':first-of-type' :
sel . nth . append ( ct . SelectorNth ( 1 , False , 0 , True , False , ct . SelectorList ( ) ) )
elif pseudo == ':last-of-type' :
sel . nth . append ( ct . SelectorNth ( 1 , False , 0 , True , True , ct . SelectorList ( ) ) )
elif pseudo == ':only-child' :
sel . nth . extend ( [ ct . SelectorNth ( 1 , False , 0 , False , False , ct . SelectorList ( ) ) , ct . SelectorNth ( 1 , False , 0 , False , True , ct . SelectorList ( ) ) ] )
elif pseudo == ':only-of-type' :
sel . nth . extend ( [ ct . SelectorNth ( 1 , False , 0 , True , False , ct . SelectorList ( ) ) , ct . SelectorNth ( 1 , False , 0 , True , True , ct . SelectorList ( ) ) ] )
has_selector = True
elif complex_pseudo and pseudo in PSEUDO_COMPLEX_NO_MATCH :
self . parse_selectors ( iselector , m . end ( 0 ) , FLG_PSEUDO | FLG_OPEN )
sel . no_match = True
has_selector = True
elif not complex_pseudo and pseudo in PSEUDO_SIMPLE_NO_MATCH :
sel . no_match = True
has_selector = True
elif pseudo in PSEUDO_SUPPORTED :
raise SelectorSyntaxError ( "Invalid syntax for pseudo class '{}'" . format ( pseudo ) , self . pattern , m . start ( 0 ) )
else :
raise NotImplementedError ( "'{}' pseudo-class is not implemented at this time" . format ( pseudo ) )
return has_selector , is_html
|
def get_flux_biases_from_cache ( cur , chains , system_name , chain_strength , max_age = 3600 ) :
"""Determine the flux biases for all of the the given chains , system and chain strength .
Args :
cur ( : class : ` sqlite3 . Cursor ` ) :
An sqlite3 cursor . This function is meant to be run within a : obj : ` with ` statement .
chains ( iterable ) :
An iterable of chains . Each chain is a collection of nodes . Chains in embedding act as
one node .
system _ name ( str ) :
The unique name of a system .
chain _ strength ( float ) :
The magnitude of the negative quadratic bias that induces the given chain in an Ising
problem .
max _ age ( int , optional , default = 3600 ) :
The maximum age ( in seconds ) for the flux _ bias offsets .
Returns :
dict : A dict where the keys are the nodes in the chains and the values are the flux biases ."""
|
select = """
SELECT
flux_bias
FROM flux_bias_view WHERE
chain_length = :chain_length AND
nodes = :nodes AND
chain_strength = :chain_strength AND
system_name = :system_name AND
insert_time >= :time_limit;
"""
encoded_data = { 'chain_strength' : _encode_real ( chain_strength ) , 'system_name' : system_name , 'time_limit' : datetime . datetime . now ( ) + datetime . timedelta ( seconds = - max_age ) }
flux_biases = { }
for chain in chains :
encoded_data [ 'chain_length' ] = len ( chain )
encoded_data [ 'nodes' ] = json . dumps ( sorted ( chain ) , separators = ( ',' , ':' ) )
row = cur . execute ( select , encoded_data ) . fetchone ( )
if row is None :
raise MissingFluxBias
flux_bias = _decode_real ( * row )
if flux_bias == 0 :
continue
flux_biases . update ( { v : flux_bias for v in chain } )
return flux_biases
|
def from_logits ( behaviour_policy_logits , target_policy_logits , actions , discounts , rewards , values , bootstrap_value , clip_rho_threshold = 1.0 , clip_pg_rho_threshold = 1.0 , name = "vtrace_from_logits" ) :
"""multi _ from _ logits wrapper used only for tests"""
|
res = multi_from_logits ( [ behaviour_policy_logits ] , [ target_policy_logits ] , [ actions ] , discounts , rewards , values , bootstrap_value , clip_rho_threshold = clip_rho_threshold , clip_pg_rho_threshold = clip_pg_rho_threshold , name = name )
return VTraceFromLogitsReturns ( vs = res . vs , pg_advantages = res . pg_advantages , log_rhos = res . log_rhos , behaviour_action_log_probs = tf . squeeze ( res . behaviour_action_log_probs , axis = 0 ) , target_action_log_probs = tf . squeeze ( res . target_action_log_probs , axis = 0 ) , )
|
def archive_path ( self , real_path ) :
"""Return the archive path for file with real _ path .
Mapping is based on removal of self . path _ prefix which is determined
by self . check _ files ( ) ."""
|
if ( not self . path_prefix ) :
return ( real_path )
else :
return ( os . path . relpath ( real_path , self . path_prefix ) )
|
def update_model_dict ( self ) :
"""Updates the model dictionary"""
|
dct = { }
models = self . chimera . openModels
for md in models . list ( ) :
dct [ md . name ] = md . id
self . model_dict = dct
|
def vb_xpcom_to_attribute_dict ( xpcom , interface_name = None , attributes = None , excluded_attributes = None , extra_attributes = None ) :
'''Attempts to build a dict from an XPCOM object .
Attributes that don ' t exist in the object return an empty string .
attribute _ list = list of str or tuple ( str , < a class > )
e . g attributes = [ ( ' bad _ attribute ' , list ) ] - - > { ' bad _ attribute ' : [ ] }
@ param xpcom :
@ type xpcom :
@ param interface _ name : Which interface we will be converting from .
Without this it ' s best to specify the list of attributes you want
@ type interface _ name : str
@ param attributes : Overrides the attributes used from XPCOM _ ATTRIBUTES
@ type attributes : attribute _ list
@ param excluded _ attributes : Which should be excluded in the returned dict .
! ! These take precedence over extra _ attributes ! !
@ type excluded _ attributes : attribute _ list
@ param extra _ attributes : Which should be retrieved in addition those already being retrieved
@ type extra _ attributes : attribute _ list
@ return :
@ rtype : dict'''
|
# Check the interface
if interface_name :
m = re . search ( r'XPCOM.+implementing {0}' . format ( interface_name ) , six . text_type ( xpcom ) )
if not m : # TODO maybe raise error here ?
log . warning ( 'Interface %s is unknown and cannot be converted to dict' , interface_name )
return dict ( )
interface_attributes = set ( attributes or XPCOM_ATTRIBUTES . get ( interface_name , [ ] ) )
if extra_attributes :
interface_attributes = interface_attributes . union ( extra_attributes )
if excluded_attributes :
interface_attributes = interface_attributes . difference ( excluded_attributes )
attribute_tuples = [ ]
for attribute in interface_attributes :
if isinstance ( attribute , tuple ) :
attribute_name = attribute [ 0 ]
attribute_class = attribute [ 1 ]
value = ( attribute_name , getattr ( xpcom , attribute_name , attribute_class ( ) ) )
else :
value = ( attribute , getattr ( xpcom , attribute , '' ) )
attribute_tuples . append ( value )
return dict ( attribute_tuples )
|
async def set_room_temperatures ( self , room_id , sleep_temp = None , comfort_temp = None , away_temp = None ) :
"""Set room temps ."""
|
if sleep_temp is None and comfort_temp is None and away_temp is None :
return
room = self . rooms . get ( room_id )
if room is None :
_LOGGER . error ( "No such device" )
return
room . sleep_temp = sleep_temp if sleep_temp else room . sleep_temp
room . away_temp = away_temp if away_temp else room . away_temp
room . comfort_temp = comfort_temp if comfort_temp else room . comfort_temp
payload = { "roomId" : room_id , "sleepTemp" : room . sleep_temp , "comfortTemp" : room . comfort_temp , "awayTemp" : room . away_temp , "homeType" : 0 }
await self . request ( "changeRoomModeTempInfo" , payload )
self . rooms [ room_id ] = room
|
def get_input_stream ( environ , safe_fallback = True ) :
"""Returns the input stream from the WSGI environment and wraps it
in the most sensible way possible . The stream returned is not the
raw WSGI stream in most cases but one that is safe to read from
without taking into account the content length .
. . versionadded : : 0.9
: param environ : the WSGI environ to fetch the stream from .
: param safe : indicates weather the function should use an empty
stream as safe fallback or just return the original
WSGI input stream if it can ' t wrap it safely . The
default is to return an empty string in those cases ."""
|
stream = environ [ 'wsgi.input' ]
content_length = get_content_length ( environ )
# A wsgi extension that tells us if the input is terminated . In
# that case we return the stream unchanged as we know we can savely
# read it until the end .
if environ . get ( 'wsgi.input_terminated' ) :
return stream
# If we don ' t have a content length we fall back to an empty stream
# in case of a safe fallback , otherwise we return the stream unchanged .
# The non - safe fallback is not recommended but might be useful in
# some situations .
if content_length is None :
return safe_fallback and _empty_stream or stream
# Otherwise limit the stream to the content length
return LimitedStream ( stream , content_length )
|
def __display_stats ( self ) :
"""Display some stats regarding unify process"""
|
self . display ( 'unify.tmpl' , processed = self . total , matched = self . matched , unified = self . total - self . matched )
|
def patch_namespaced_controller_revision ( self , name , namespace , body , ** kwargs ) : # noqa : E501
"""patch _ namespaced _ controller _ revision # noqa : E501
partially update the specified ControllerRevision # noqa : E501
This method makes a synchronous HTTP request by default . To make an
asynchronous HTTP request , please pass async _ req = True
> > > thread = api . patch _ namespaced _ controller _ revision ( name , namespace , body , async _ req = True )
> > > result = thread . get ( )
: param async _ req bool
: param str name : name of the ControllerRevision ( required )
: param str namespace : object name and auth scope , such as for teams and projects ( required )
: param UNKNOWN _ BASE _ TYPE body : ( required )
: param str pretty : If ' true ' , then the output is pretty printed .
: param str dry _ run : When present , indicates that modifications should not be persisted . An invalid or unrecognized dryRun directive will result in an error response and no further processing of the request . Valid values are : - All : all dry run stages will be processed
: return : V1ControllerRevision
If the method is called asynchronously ,
returns the request thread ."""
|
kwargs [ '_return_http_data_only' ] = True
if kwargs . get ( 'async_req' ) :
return self . patch_namespaced_controller_revision_with_http_info ( name , namespace , body , ** kwargs )
# noqa : E501
else :
( data ) = self . patch_namespaced_controller_revision_with_http_info ( name , namespace , body , ** kwargs )
# noqa : E501
return data
|
def transform ( self , maps ) :
"""This function transforms from component masses and caretsian spins
to chi _ p .
Parameters
maps : a mapping object
Examples
Convert a dict of numpy . array :
Returns
out : dict
A dict with key as parameter name and value as numpy . array or float
of transformed values ."""
|
out = { }
out [ "chi_p" ] = conversions . chi_p ( maps [ parameters . mass1 ] , maps [ parameters . mass2 ] , maps [ parameters . spin1x ] , maps [ parameters . spin1y ] , maps [ parameters . spin2x ] , maps [ parameters . spin2y ] )
return self . format_output ( maps , out )
|
def condense ( args ) :
"""% prog condense OM . bed
Merge split alignments in OM bed ."""
|
from itertools import groupby
from jcvi . assembly . patch import merge_ranges
p = OptionParser ( condense . __doc__ )
opts , args = p . parse_args ( args )
if len ( args ) != 1 :
sys . exit ( not p . print_help ( ) )
bedfile , = args
bed = Bed ( bedfile , sorted = False )
key = lambda x : ( x . seqid , x . start , x . end )
for k , sb in groupby ( bed , key = key ) :
sb = list ( sb )
b = sb [ 0 ]
chr , start , end , strand = merge_ranges ( sb )
id = "{0}:{1}-{2}" . format ( chr , start , end )
b . accn = id
print ( b )
|
def add_r ( self , text = None ) :
"""Return a newly appended < a : r > element ."""
|
r = self . _add_r ( )
if text :
r . t . text = text
return r
|
def main ( ) :
"""main"""
|
args = MainArguments ( )
if args . tool . lower ( ) == "tool1" :
args = Tool1Arguments ( )
elif args . tool . lower ( ) == "tool2" :
args = Tool2Arguments ( )
else :
print ( "Unknown tool" , args . tool )
print ( args )
|
def open_reader ( self , * args , ** kwargs ) :
"""Open the reader to read records from the result of the instance . If ` tunnel ` is ` True ` ,
instance tunnel will be used . Otherwise conventional routine will be used . If instance tunnel
is not available and ` tunnel ` is not specified , , the method will fall back to the
conventional routine .
Note that the number of records returned is limited unless ` options . limited _ instance _ tunnel `
is set to ` True ` or ` limit = True ` is configured under instance tunnel mode . Otherwise
the number of records returned is always limited .
: param tunnel : if true , use instance tunnel to read from the instance .
if false , use conventional routine .
if absent , ` options . tunnel . use _ instance _ tunnel ` will be used and automatic fallback
is enabled .
: param reopen : the reader will reuse last one , reopen is true means open a new reader .
: type reopen : bool
: param endpoint : the tunnel service URL
: param compress _ option : compression algorithm , level and strategy
: type compress _ option : : class : ` odps . tunnel . CompressOption `
: param compress _ algo : compression algorithm , work when ` ` compress _ option ` ` is not provided ,
can be ` ` zlib ` ` , ` ` snappy ` `
: param compress _ level : used for ` ` zlib ` ` , work when ` ` compress _ option ` ` is not provided
: param compress _ strategy : used for ` ` zlib ` ` , work when ` ` compress _ option ` ` is not provided
: return : reader , ` ` count ` ` means the full size , ` ` status ` ` means the tunnel status
: Example :
> > > with instance . open _ reader ( ) as reader :
> > > count = reader . count # How many records of a table or its partition
> > > for record in reader [ 0 : count ] :
> > > # read all data , actually better to split into reading for many times"""
|
use_tunnel = kwargs . get ( 'use_tunnel' , kwargs . get ( 'tunnel' ) )
auto_fallback_result = use_tunnel is None
if use_tunnel is None :
use_tunnel = options . tunnel . use_instance_tunnel
result_fallback_errors = ( errors . InvalidProjectTable , errors . InvalidArgument )
if use_tunnel : # for compatibility
if 'limit_enabled' in kwargs :
kwargs [ 'limit' ] = kwargs [ 'limit_enabled' ]
del kwargs [ 'limit_enabled' ]
if 'limit' not in kwargs :
kwargs [ 'limit' ] = options . tunnel . limit_instance_tunnel
auto_fallback_protection = False
if kwargs [ 'limit' ] is None :
kwargs [ 'limit' ] = False
auto_fallback_protection = True
try :
return self . _open_tunnel_reader ( ** kwargs )
except result_fallback_errors : # service version too low to support instance tunnel .
if not auto_fallback_result :
raise
if not kwargs . get ( 'limit' ) :
warnings . warn ( 'Instance tunnel not supported, will fallback to ' 'conventional ways. 10000 records will be limited.' )
except requests . Timeout : # tunnel creation timed out , which might be caused by too many files
# on the service .
if not auto_fallback_result :
raise
if not kwargs . get ( 'limit' ) :
warnings . warn ( 'Instance tunnel timed out, will fallback to ' 'conventional ways. 10000 records will be limited.' )
except ( Instance . DownloadSessionCreationError , errors . InstanceTypeNotSupported ) : # this is for DDL sql instances such as ` show partitions ` which raises
# InternalServerError when creating download sessions .
if not auto_fallback_result :
raise
except errors . NoPermission : # project is protected
if not auto_fallback_protection :
raise
if not kwargs . get ( 'limit' ) :
warnings . warn ( 'Project under protection, 10000 records will be limited.' )
kwargs [ 'limit' ] = True
return self . _open_tunnel_reader ( ** kwargs )
return self . _open_result_reader ( * args , ** kwargs )
|
def show ( self ) :
"""Plot the result of the simulation once it ' s been intialized"""
|
from matplotlib import pyplot as plt
if self . already_run :
for ref in self . volts . keys ( ) :
plt . plot ( self . t , self . volts [ ref ] , label = ref )
plt . title ( "Simulation voltage vs time" )
plt . legend ( )
plt . xlabel ( "Time [ms]" )
plt . ylabel ( "Voltage [mV]" )
else :
pynml . print_comment ( "First you have to 'go()' the simulation." , True )
plt . show ( )
|
def _vax_to_ieee_single_float ( data ) :
"""Converts a float in Vax format to IEEE format .
data should be a single string of chars that have been read in from
a binary file . These will be processed 4 at a time into float values .
Thus the total number of byte / chars in the string should be divisible
by 4.
Based on VAX data organization in a byte file , we need to do a bunch of
bitwise operations to separate out the numbers that correspond to the
sign , the exponent and the fraction portions of this floating point
number
role : S EEEEE FFFFF FFFFF FFFFF
bits : 1 2 9 10 32
bytes : byte2 byte1 byte4 byte3"""
|
f = [ ]
nfloat = int ( len ( data ) / 4 )
for i in range ( nfloat ) :
byte2 = data [ 0 + i * 4 ]
byte1 = data [ 1 + i * 4 ]
byte4 = data [ 2 + i * 4 ]
byte3 = data [ 3 + i * 4 ]
# hex 0x80 = binary mask 100000
# hex 0x7f = binary mask 011111
sign = ( byte1 & 0x80 ) >> 7
expon = ( ( byte1 & 0x7f ) << 1 ) + ( ( byte2 & 0x80 ) >> 7 )
fract = ( ( byte2 & 0x7f ) << 16 ) + ( byte3 << 8 ) + byte4
if sign == 0 :
sign_mult = 1.0
else :
sign_mult = - 1.0
if 0 < expon : # note 16777216.0 = = 2 ^ 24
val = sign_mult * ( 0.5 + ( fract / 16777216.0 ) ) * pow ( 2.0 , expon - 128.0 )
f . append ( val )
elif expon == 0 and sign == 0 :
f . append ( 0 )
else :
f . append ( 0 )
# may want to raise an exception here . . .
return f
|
def number_starts_with ( string : str ) -> bool :
"""Determine if a particular string begins with a specific number .
Args :
string : A string which we are checking
Returns :
A boolean value indicating whether the string starts with ' 5'
Examples :
> > > number _ starts _ with ( ' 5-2345861 ' )
True
> > > number _ starts _ with ( ' 6-2345861 ' )
False
> > > number _ starts _ with ( ' 78910 ' )
False"""
|
import re
pattern = re . compile ( '^5' )
if pattern . match ( string ) :
return True
return False
|
def add_name_variant ( self , name ) :
"""Add name variant .
Args :
: param name : name variant for the current author .
: type name : string"""
|
self . _ensure_field ( 'name' , { } )
self . obj [ 'name' ] . setdefault ( 'name_variants' , [ ] ) . append ( name )
|
def init_UI ( self ) :
"""Builds User Interface for the interpretation Editor"""
|
# set fonts
FONT_WEIGHT = 1
if sys . platform . startswith ( 'win' ) :
FONT_WEIGHT = - 1
font1 = wx . Font ( 9 + FONT_WEIGHT , wx . SWISS , wx . NORMAL , wx . NORMAL , False , self . font_type )
font2 = wx . Font ( 12 + FONT_WEIGHT , wx . SWISS , wx . NORMAL , wx . NORMAL , False , self . font_type )
# if you ' re on mac do some funny stuff to make it look okay
is_mac = False
if sys . platform . startswith ( "darwin" ) :
is_mac = True
self . search_bar = wx . SearchCtrl ( self . panel , size = ( 350 * self . GUI_RESOLUTION , 25 ) , style = wx . TE_PROCESS_ENTER | wx . TE_PROCESS_TAB | wx . TE_NOHIDESEL )
self . Bind ( wx . EVT_TEXT_ENTER , self . on_enter_search_bar , self . search_bar )
self . Bind ( wx . EVT_SEARCHCTRL_SEARCH_BTN , self . on_enter_search_bar , self . search_bar )
self . search_bar . SetHelpText ( dieh . search_help )
# self . Bind ( wx . EVT _ TEXT , self . on _ complete _ search _ bar , self . search _ bar )
# build logger
self . logger = wx . ListCtrl ( self . panel , - 1 , size = ( 100 * self . GUI_RESOLUTION , 475 * self . GUI_RESOLUTION ) , style = wx . LC_REPORT )
self . logger . SetFont ( font1 )
self . logger . InsertColumn ( 0 , 'specimen' , width = 75 * self . GUI_RESOLUTION )
self . logger . InsertColumn ( 1 , 'fit name' , width = 65 * self . GUI_RESOLUTION )
self . logger . InsertColumn ( 2 , 'max' , width = 55 * self . GUI_RESOLUTION )
self . logger . InsertColumn ( 3 , 'min' , width = 55 * self . GUI_RESOLUTION )
self . logger . InsertColumn ( 4 , 'n' , width = 25 * self . GUI_RESOLUTION )
self . logger . InsertColumn ( 5 , 'fit type' , width = 60 * self . GUI_RESOLUTION )
self . logger . InsertColumn ( 6 , 'dec' , width = 45 * self . GUI_RESOLUTION )
self . logger . InsertColumn ( 7 , 'inc' , width = 45 * self . GUI_RESOLUTION )
self . logger . InsertColumn ( 8 , 'mad' , width = 45 * self . GUI_RESOLUTION )
self . logger . InsertColumn ( 9 , 'dang' , width = 45 * self . GUI_RESOLUTION )
self . logger . InsertColumn ( 10 , 'a95' , width = 45 * self . GUI_RESOLUTION )
self . logger . InsertColumn ( 11 , 'K' , width = 45 * self . GUI_RESOLUTION )
self . logger . InsertColumn ( 12 , 'R' , width = 45 * self . GUI_RESOLUTION )
self . Bind ( wx . EVT_LIST_ITEM_ACTIVATED , self . OnClick_listctrl , self . logger )
self . Bind ( wx . EVT_LIST_ITEM_RIGHT_CLICK , self . OnRightClickListctrl , self . logger )
self . logger . SetHelpText ( dieh . logger_help )
# set fit attributes boxsizers
self . display_sizer = wx . StaticBoxSizer ( wx . StaticBox ( self . panel , wx . ID_ANY , "display options" ) , wx . HORIZONTAL )
self . name_sizer = wx . StaticBoxSizer ( wx . StaticBox ( self . panel , wx . ID_ANY , "fit name/color" ) , wx . VERTICAL )
self . bounds_sizer = wx . StaticBoxSizer ( wx . StaticBox ( self . panel , wx . ID_ANY , "fit bounds" ) , wx . VERTICAL )
self . buttons_sizer = wx . StaticBoxSizer ( wx . StaticBox ( self . panel , wx . ID_ANY ) , wx . VERTICAL )
# logger display selection box
UPPER_LEVEL = self . parent . level_box . GetValue ( )
if UPPER_LEVEL == 'sample' :
name_choices = self . parent . samples
if UPPER_LEVEL == 'site' :
name_choices = self . parent . sites
if UPPER_LEVEL == 'location' :
name_choices = self . parent . locations
if UPPER_LEVEL == 'study' :
name_choices = [ 'this study' ]
self . level_box = wx . ComboBox ( self . panel , - 1 , size = ( 110 * self . GUI_RESOLUTION , 25 ) , value = UPPER_LEVEL , choices = [ 'sample' , 'site' , 'location' , 'study' ] , style = wx . CB_DROPDOWN | wx . TE_READONLY )
self . Bind ( wx . EVT_COMBOBOX , self . on_select_high_level , self . level_box )
self . level_box . SetHelpText ( dieh . level_box_help )
self . level_names = wx . ComboBox ( self . panel , - 1 , size = ( 110 * self . GUI_RESOLUTION , 25 ) , value = self . parent . level_names . GetValue ( ) , choices = name_choices , style = wx . CB_DROPDOWN | wx . TE_READONLY )
self . Bind ( wx . EVT_COMBOBOX , self . on_select_level_name , self . level_names )
self . level_names . SetHelpText ( dieh . level_names_help )
# mean type and plot display boxes
self . mean_type_box = wx . ComboBox ( self . panel , - 1 , size = ( 110 * self . GUI_RESOLUTION , 25 ) , value = self . parent . mean_type_box . GetValue ( ) , choices = [ 'Fisher' , 'Fisher by polarity' , 'None' ] , style = wx . CB_DROPDOWN | wx . TE_READONLY , name = "high_type" )
self . Bind ( wx . EVT_COMBOBOX , self . on_select_mean_type_box , self . mean_type_box )
self . mean_type_box . SetHelpText ( dieh . mean_type_help )
self . mean_fit_box = wx . ComboBox ( self . panel , - 1 , size = ( 110 * self . GUI_RESOLUTION , 25 ) , value = self . parent . mean_fit , choices = ( [ 'None' , 'All' ] + self . parent . fit_list ) , style = wx . CB_DROPDOWN | wx . TE_READONLY , name = "high_type" )
self . Bind ( wx . EVT_COMBOBOX , self . on_select_mean_fit_box , self . mean_fit_box )
self . mean_fit_box . SetHelpText ( dieh . mean_fit_help )
# show box
if UPPER_LEVEL == "study" or UPPER_LEVEL == "location" :
show_box_choices = [ 'specimens' , 'samples' , 'sites' ]
if UPPER_LEVEL == "site" :
show_box_choices = [ 'specimens' , 'samples' ]
if UPPER_LEVEL == "sample" :
show_box_choices = [ 'specimens' ]
self . show_box = wx . ComboBox ( self . panel , - 1 , size = ( 110 * self . GUI_RESOLUTION , 25 ) , value = 'specimens' , choices = show_box_choices , style = wx . CB_DROPDOWN | wx . TE_READONLY , name = "high_elements" )
self . Bind ( wx . EVT_COMBOBOX , self . on_select_show_box , self . show_box )
self . show_box . SetHelpText ( dieh . show_help )
# coordinates box
self . coordinates_box = wx . ComboBox ( self . panel , - 1 , size = ( 110 * self . GUI_RESOLUTION , 25 ) , choices = self . parent . coordinate_list , value = self . parent . coordinates_box . GetValue ( ) , style = wx . CB_DROPDOWN | wx . TE_READONLY , name = "coordinates" )
self . Bind ( wx . EVT_COMBOBOX , self . on_select_coordinates , self . coordinates_box )
self . coordinates_box . SetHelpText ( dieh . coordinates_box_help )
# bounds select boxes
self . tmin_box = wx . ComboBox ( self . panel , - 1 , size = ( 80 * self . GUI_RESOLUTION , 25 ) , choices = [ '' ] + self . parent . T_list , style = wx . CB_DROPDOWN | wx . TE_READONLY , name = "lower bound" )
self . tmin_box . SetHelpText ( dieh . tmin_box_help )
self . tmax_box = wx . ComboBox ( self . panel , - 1 , size = ( 80 * self . GUI_RESOLUTION , 25 ) , choices = [ '' ] + self . parent . T_list , style = wx . CB_DROPDOWN | wx . TE_READONLY , name = "upper bound" )
self . tmax_box . SetHelpText ( dieh . tmax_box_help )
# color box
self . color_dict = self . parent . color_dict
self . color_box = wx . ComboBox ( self . panel , - 1 , size = ( 80 * self . GUI_RESOLUTION , 25 ) , choices = [ '' ] + sorted ( self . color_dict . keys ( ) ) , style = wx . CB_DROPDOWN | wx . TE_PROCESS_ENTER , name = "color" )
self . Bind ( wx . EVT_TEXT_ENTER , self . add_new_color , self . color_box )
self . color_box . SetHelpText ( dieh . color_box_help )
# name box
self . name_box = wx . TextCtrl ( self . panel , - 1 , size = ( 80 * self . GUI_RESOLUTION , 25 ) , name = "name" )
self . name_box . SetHelpText ( dieh . name_box_help )
# more mac stuff
h_size_buttons , button_spacing = 25 , 5.5
if is_mac :
h_size_buttons , button_spacing = 18 , 0.
# buttons
self . add_all_button = wx . Button ( self . panel , id = - 1 , label = 'add new fit to all specimens' , size = ( 160 * self . GUI_RESOLUTION , h_size_buttons ) )
self . add_all_button . SetFont ( font1 )
self . Bind ( wx . EVT_BUTTON , self . add_fit_to_all , self . add_all_button )
self . add_all_button . SetHelpText ( dieh . add_all_help )
self . add_fit_button = wx . Button ( self . panel , id = - 1 , label = 'add fit to highlighted specimens' , size = ( 160 * self . GUI_RESOLUTION , h_size_buttons ) )
self . add_fit_button . SetFont ( font1 )
self . Bind ( wx . EVT_BUTTON , self . add_highlighted_fits , self . add_fit_button )
self . add_fit_button . SetHelpText ( dieh . add_fit_btn_help )
self . delete_fit_button = wx . Button ( self . panel , id = - 1 , label = 'delete highlighted fits' , size = ( 160 * self . GUI_RESOLUTION , h_size_buttons ) )
self . delete_fit_button . SetFont ( font1 )
self . Bind ( wx . EVT_BUTTON , self . delete_highlighted_fits , self . delete_fit_button )
self . delete_fit_button . SetHelpText ( dieh . delete_fit_btn_help )
self . apply_changes_button = wx . Button ( self . panel , id = - 1 , label = 'apply changes to highlighted fits' , size = ( 160 * self . GUI_RESOLUTION , h_size_buttons ) )
self . apply_changes_button . SetFont ( font1 )
self . Bind ( wx . EVT_BUTTON , self . apply_changes , self . apply_changes_button )
self . apply_changes_button . SetHelpText ( dieh . apply_changes_help )
# windows
display_window_0 = wx . GridSizer ( 2 , 1 , 10 * self . GUI_RESOLUTION , 19 * self . GUI_RESOLUTION )
display_window_1 = wx . GridSizer ( 2 , 1 , 10 * self . GUI_RESOLUTION , 19 * self . GUI_RESOLUTION )
display_window_2 = wx . GridSizer ( 2 , 1 , 10 * self . GUI_RESOLUTION , 19 * self . GUI_RESOLUTION )
name_window = wx . GridSizer ( 2 , 1 , 10 * self . GUI_RESOLUTION , 19 * self . GUI_RESOLUTION )
bounds_window = wx . GridSizer ( 2 , 1 , 10 * self . GUI_RESOLUTION , 19 * self . GUI_RESOLUTION )
buttons1_window = wx . GridSizer ( 4 , 1 , 5 * self . GUI_RESOLUTION , 19 * self . GUI_RESOLUTION )
display_window_0 . AddMany ( [ ( self . coordinates_box , wx . ALIGN_LEFT ) , ( self . show_box , wx . ALIGN_LEFT ) ] )
display_window_1 . AddMany ( [ ( self . level_box , wx . ALIGN_LEFT ) , ( self . level_names , wx . ALIGN_LEFT ) ] )
display_window_2 . AddMany ( [ ( self . mean_type_box , wx . ALIGN_LEFT ) , ( self . mean_fit_box , wx . ALIGN_LEFT ) ] )
name_window . AddMany ( [ ( self . name_box , wx . ALIGN_LEFT ) , ( self . color_box , wx . ALIGN_LEFT ) ] )
bounds_window . AddMany ( [ ( self . tmin_box , wx . ALIGN_LEFT ) , ( self . tmax_box , wx . ALIGN_LEFT ) ] )
buttons1_window . AddMany ( [ ( self . add_fit_button , wx . ALL | wx . ALIGN_CENTER | wx . SHAPED , 0 ) , ( self . add_all_button , wx . ALL | wx . ALIGN_CENTER | wx . SHAPED , 0 ) , ( self . delete_fit_button , wx . ALL | wx . ALIGN_CENTER | wx . SHAPED , 0 ) , ( self . apply_changes_button , wx . ALL | wx . ALIGN_CENTER | wx . SHAPED , 0 ) ] )
self . display_sizer . Add ( display_window_0 , 1 , wx . TOP | wx . EXPAND , 8 )
self . display_sizer . Add ( display_window_1 , 1 , wx . TOP | wx . LEFT | wx . EXPAND , 8 )
self . display_sizer . Add ( display_window_2 , 1 , wx . TOP | wx . LEFT | wx . EXPAND , 8 )
self . name_sizer . Add ( name_window , 1 , wx . TOP , 5.5 )
self . bounds_sizer . Add ( bounds_window , 1 , wx . TOP , 5.5 )
self . buttons_sizer . Add ( buttons1_window , 1 , wx . TOP , 0 )
# duplicate high levels plot
self . fig = Figure ( ( 2.5 * self . GUI_RESOLUTION , 2.5 * self . GUI_RESOLUTION ) , dpi = 100 )
self . canvas = FigCanvas ( self . panel , - 1 , self . fig , )
self . toolbar = NavigationToolbar ( self . canvas )
self . toolbar . Hide ( )
self . toolbar . zoom ( )
self . high_EA_setting = "Zoom"
self . canvas . Bind ( wx . EVT_LEFT_DCLICK , self . on_equalarea_high_select )
self . canvas . Bind ( wx . EVT_MOTION , self . on_change_high_mouse_cursor )
self . canvas . Bind ( wx . EVT_MIDDLE_DOWN , self . home_high_equalarea )
self . canvas . Bind ( wx . EVT_RIGHT_DOWN , self . pan_zoom_high_equalarea )
self . canvas . SetHelpText ( dieh . eqarea_help )
self . eqarea = self . fig . add_subplot ( 111 )
draw_net ( self . eqarea )
# Higher Level Statistics Box
self . stats_sizer = wx . StaticBoxSizer ( wx . StaticBox ( self . panel , wx . ID_ANY , "mean statistics" ) , wx . VERTICAL )
for parameter in [ 'mean_type' , 'dec' , 'inc' , 'alpha95' , 'K' , 'R' , 'n_lines' , 'n_planes' ] :
COMMAND = "self.%s_window=wx.TextCtrl(self.panel,style=wx.TE_CENTER|wx.TE_READONLY,size=(100*self.GUI_RESOLUTION,25))" % parameter
exec ( COMMAND )
COMMAND = "self.%s_window.SetBackgroundColour(wx.WHITE)" % parameter
exec ( COMMAND )
COMMAND = "self.%s_window.SetFont(font2)" % parameter
exec ( COMMAND )
COMMAND = "self.%s_outer_window = wx.GridSizer(1,2,5*self.GUI_RESOLUTION,15*self.GUI_RESOLUTION)" % parameter
exec ( COMMAND )
COMMAND = """self.%s_outer_window.AddMany([
(wx.StaticText(self.panel,label='%s',style=wx.TE_CENTER),wx.EXPAND),
(self.%s_window, wx.EXPAND)])""" % ( parameter , parameter , parameter )
exec ( COMMAND )
COMMAND = "self.stats_sizer.Add(self.%s_outer_window, 1, wx.ALIGN_LEFT|wx.EXPAND, 0)" % parameter
exec ( COMMAND )
self . switch_stats_button = wx . SpinButton ( self . panel , id = wx . ID_ANY , style = wx . SP_HORIZONTAL | wx . SP_ARROW_KEYS | wx . SP_WRAP , name = "change stats" )
self . Bind ( wx . EVT_SPIN , self . on_select_stats_button , self . switch_stats_button )
self . switch_stats_button . SetHelpText ( dieh . switch_stats_btn_help )
# construct panel
hbox0 = wx . BoxSizer ( wx . HORIZONTAL )
hbox0 . Add ( self . name_sizer , flag = wx . ALIGN_TOP | wx . EXPAND , border = 8 )
hbox0 . Add ( self . bounds_sizer , flag = wx . ALIGN_TOP | wx . EXPAND , border = 8 )
vbox0 = wx . BoxSizer ( wx . VERTICAL )
vbox0 . Add ( hbox0 , flag = wx . ALIGN_TOP , border = 8 )
vbox0 . Add ( self . buttons_sizer , flag = wx . ALIGN_TOP , border = 8 )
hbox1 = wx . BoxSizer ( wx . HORIZONTAL )
hbox1 . Add ( vbox0 , flag = wx . ALIGN_TOP , border = 8 )
hbox1 . Add ( self . stats_sizer , flag = wx . ALIGN_TOP , border = 8 )
hbox1 . Add ( self . switch_stats_button , flag = wx . ALIGN_TOP | wx . EXPAND , border = 8 )
vbox1 = wx . BoxSizer ( wx . VERTICAL )
vbox1 . Add ( self . display_sizer , flag = wx . ALIGN_TOP , border = 8 )
vbox1 . Add ( hbox1 , flag = wx . ALIGN_TOP , border = 8 )
vbox1 . Add ( self . canvas , proportion = 1 , flag = wx . ALIGN_CENTER_HORIZONTAL | wx . ALIGN_CENTER_VERTICAL | wx . EXPAND , border = 8 )
vbox2 = wx . BoxSizer ( wx . VERTICAL )
vbox2 . Add ( self . search_bar , proportion = .5 , flag = wx . ALIGN_LEFT | wx . ALIGN_BOTTOM | wx . EXPAND , border = 8 )
vbox2 . Add ( self . logger , proportion = 1 , flag = wx . ALIGN_LEFT | wx . EXPAND , border = 8 )
hbox2 = wx . BoxSizer ( wx . HORIZONTAL )
hbox2 . Add ( vbox2 , proportion = 1 , flag = wx . ALIGN_LEFT | wx . EXPAND )
hbox2 . Add ( vbox1 , flag = wx . ALIGN_TOP | wx . EXPAND )
self . panel . SetSizerAndFit ( hbox2 )
hbox2 . Fit ( self )
|
def nearest_multiple ( number , divisor ) :
"""This function rounds the input number to the closest multiple of a specified number .
Parameters :
number : The number to be rounded .
divisor : The number whose multiple is used for rounding .
Returns :
int : The rounded number .
Examples :
> > > nearest _ multiple ( 4722 , 10)
4720
> > > nearest _ multiple ( 1111 , 5)
1110
> > > nearest _ multiple ( 219 , 2)
218"""
|
lower = ( number // divisor ) * divisor
upper = lower + divisor
if ( number - lower ) > ( upper - number ) :
return upper
else :
return lower
|
def emulate_repeat ( self , value , timeval ) :
"""The repeat press of a key / mouse button , e . g . double click ."""
|
repeat_event = self . create_event_object ( "Repeat" , 2 , value , timeval )
return repeat_event
|
def get_service_from_display_name ( displayName ) :
"""Get the service unique name given its display name .
@ see : L { get _ service }
@ type displayName : str
@ param displayName : Service display name . You can get this value from
the C { DisplayName } member of the service descriptors returned by
L { get _ services } or L { get _ active _ services } .
@ rtype : str
@ return : Service unique name ."""
|
with win32 . OpenSCManager ( dwDesiredAccess = win32 . SC_MANAGER_ENUMERATE_SERVICE ) as hSCManager :
return win32 . GetServiceKeyName ( hSCManager , displayName )
|
def remove ( self , key ) :
'''remove key from the namespace . it is fine to remove a key multiple times .'''
|
encodedKey = json . dumps ( key )
sql = 'DELETE FROM ' + self . table + ' WHERE name = %s'
with self . connect ( ) as conn :
with doTransaction ( conn ) :
return executeSQL ( conn , sql , args = [ encodedKey ] )
|
def calculate_imf_steadiness ( inst , steady_window = 15 , min_window_frac = 0.75 , max_clock_angle_std = 90.0 / np . pi , max_bmag_cv = 0.5 ) :
"""Calculate IMF steadiness using clock angle standard deviation and
the coefficient of variation of the IMF magnitude in the GSM Y - Z plane
Parameters
inst : pysat . Instrument
Instrument with OMNI HRO data
steady _ window : int
Window for calculating running statistical moments in min ( default = 15)
min _ window _ frac : float
Minimum fraction of points in a window for steadiness to be calculated
( default = 0.75)
max _ clock _ angle _ std : float
Maximum standard deviation of the clock angle in degrees ( default = 22.5)
max _ bmag _ cv : float
Maximum coefficient of variation of the IMF magnitude in the GSM
Y - Z plane ( default = 0.5)"""
|
# We are not going to interpolate through missing values
sample_rate = int ( inst . tag [ 0 ] )
max_wnum = np . floor ( steady_window / sample_rate )
if max_wnum != steady_window / sample_rate :
steady_window = max_wnum * sample_rate
print ( "WARNING: sample rate is not a factor of the statistical window" )
print ( "new statistical window is {:.1f}" . format ( steady_window ) )
min_wnum = int ( np . ceil ( max_wnum * min_window_frac ) )
# Calculate the running coefficient of variation of the BYZ magnitude
byz_mean = inst [ 'BYZ_GSM' ] . rolling ( min_periods = min_wnum , center = True , window = steady_window ) . mean ( )
byz_std = inst [ 'BYZ_GSM' ] . rolling ( min_periods = min_wnum , center = True , window = steady_window ) . std ( )
inst [ 'BYZ_CV' ] = pds . Series ( byz_std / byz_mean , index = inst . data . index )
# Calculate the running circular standard deviation of the clock angle
circ_kwargs = { 'high' : 360.0 , 'low' : 0.0 }
ca = inst [ 'clock_angle' ] [ ~ np . isnan ( inst [ 'clock_angle' ] ) ]
ca_std = inst [ 'clock_angle' ] . rolling ( min_periods = min_wnum , window = steady_window , center = True ) . apply ( pysat . utils . nan_circstd , kwargs = circ_kwargs )
inst [ 'clock_angle_std' ] = pds . Series ( ca_std , index = inst . data . index )
# Determine how long the clock angle and IMF magnitude are steady
imf_steady = np . zeros ( shape = inst . data . index . shape )
steady = False
for i , cv in enumerate ( inst . data [ 'BYZ_CV' ] ) :
if steady :
del_min = int ( ( inst . data . index [ i ] - inst . data . index [ i - 1 ] ) . total_seconds ( ) / 60.0 )
if np . isnan ( cv ) or np . isnan ( ca_std [ i ] ) or del_min > sample_rate : # Reset the steadiness flag if fill values are encountered , or
# if an entry is missing
steady = False
if cv <= max_bmag_cv and ca_std [ i ] <= max_clock_angle_std : # Steadiness conditions have been met
if steady :
imf_steady [ i ] = imf_steady [ i - 1 ]
imf_steady [ i ] += sample_rate
steady = True
inst [ 'IMF_Steady' ] = pds . Series ( imf_steady , index = inst . data . index )
return
|
def image ( self ) :
"""Generates the image using self . genImage ( ) ,
then rotates it to self . direction and returns it ."""
|
self . _image = self . genImage ( )
self . _image = funcs . rotateImage ( self . _image , self . direction )
return self . _image
|
def sum ( self , axis = None , dtype = None , out = None , keepdims = False ) :
"""Return the sum of ` ` self ` ` .
See Also
numpy . sum
prod"""
|
return self . elem . __array_ufunc__ ( np . add , 'reduce' , self . elem , axis = axis , dtype = dtype , out = ( out , ) , keepdims = keepdims )
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.