signature stringlengths 29 44.1k | implementation stringlengths 0 85.2k |
|---|---|
def _set_system_qos ( self , v , load = False ) :
"""Setter method for system _ qos , mapped from YANG variable / system _ qos ( container )
If this variable is read - only ( config : false ) in the
source YANG file , then _ set _ system _ qos is considered as a private
method . Backends looking to populate this variable should
do so via calling thisObj . _ set _ system _ qos ( ) directly .""" | if hasattr ( v , "_utype" ) :
v = v . _utype ( v )
try :
t = YANGDynClass ( v , base = system_qos . system_qos , is_container = 'container' , presence = False , yang_name = "system-qos" , rest_name = "" , parent = self , path_helper = self . _path_helper , extmethods = self . _extmethods , register_paths = True , extensions = { u'tailf-common' : { u'cli-drop-node-name' : None , u'sort-priority' : u'70' } } , namespace = 'urn:brocade.com:mgmt:brocade-policer' , defining_module = 'brocade-policer' , yang_type = 'container' , is_config = True )
except ( TypeError , ValueError ) :
raise ValueError ( { 'error-string' : """system_qos must be of a type compatible with container""" , 'defined-type' : "container" , 'generated-type' : """YANGDynClass(base=system_qos.system_qos, is_container='container', presence=False, yang_name="system-qos", rest_name="", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, extensions={u'tailf-common': {u'cli-drop-node-name': None, u'sort-priority': u'70'}}, namespace='urn:brocade.com:mgmt:brocade-policer', defining_module='brocade-policer', yang_type='container', is_config=True)""" , } )
self . __system_qos = t
if hasattr ( self , '_set' ) :
self . _set ( ) |
def _set_token_expiration_time ( self , expires_in ) :
"""Saves the token expiration time by adding the ' expires in ' parameter
to the current datetime ( in utc ) .
Parameters
expires _ in : int
number of seconds from the time of the request until expiration
Returns
nothing
saves expiration time in self . token _ expiration _ time as
datetime . datetime""" | self . token_expiration_time = dt . datetime . utcnow ( ) + dt . timedelta ( 0 , expires_in ) |
def _create_controller_info_record ( self , controller_module_name ) :
"""Creates controller info record for a particular controller type .
Info is retrieved from all the controller objects spawned from the
specified module , using the controller module ' s ` get _ info ` function .
Args :
controller _ module _ name : string , the name of the controller module
to retrieve info from .
Returns :
A records . ControllerInfoRecord object .""" | module = self . _controller_modules [ controller_module_name ]
controller_info = None
try :
controller_info = module . get_info ( copy . copy ( self . _controller_objects [ controller_module_name ] ) )
except AttributeError :
logging . warning ( 'No optional debug info found for controller ' '%s. To provide it, implement `get_info`.' , controller_module_name )
try :
yaml . dump ( controller_info )
except TypeError :
logging . warning ( 'The info of controller %s in class "%s" is not ' 'YAML serializable! Coercing it to string.' , controller_module_name , self . _class_name )
controller_info = str ( controller_info )
return records . ControllerInfoRecord ( self . _class_name , module . MOBLY_CONTROLLER_CONFIG_NAME , controller_info ) |
def _meco_frequency ( m1 , m2 , chi1 , chi2 ) :
"""Returns the frequency of the minimum energy cutoff for 3.5pN ( 2.5pN spin )""" | return velocity_to_frequency ( meco_velocity ( m1 , m2 , chi1 , chi2 ) , m1 + m2 ) |
def run_job_async ( self , tgt , fun , arg = ( ) , tgt_type = 'glob' , ret = '' , timeout = None , jid = '' , kwarg = None , listen = True , io_loop = None , ** kwargs ) :
'''Asynchronously send a command to connected minions
Prep the job directory and publish a command to any targeted minions .
: return : A dictionary of ( validated ) ` ` pub _ data ` ` or an empty
dictionary on failure . The ` ` pub _ data ` ` contains the job ID and a
list of all minions that are expected to return data .
. . code - block : : python
> > > local . run _ job _ async ( ' * ' , ' test . sleep ' , [ 300 ] )
{ ' jid ' : ' 20131219215650131543 ' , ' minions ' : [ ' jerry ' ] }''' | arg = salt . utils . args . parse_input ( arg , kwargs = kwarg )
try :
pub_data = yield self . pub_async ( tgt , fun , arg , tgt_type , ret , jid = jid , timeout = self . _get_timeout ( timeout ) , io_loop = io_loop , listen = listen , ** kwargs )
except SaltClientError : # Re - raise error with specific message
raise SaltClientError ( 'The salt master could not be contacted. Is master running?' )
except AuthenticationError as err :
raise AuthenticationError ( err )
except AuthorizationError as err :
raise AuthorizationError ( err )
except Exception as general_exception : # Convert to generic client error and pass along message
raise SaltClientError ( general_exception )
raise tornado . gen . Return ( self . _check_pub_data ( pub_data , listen = listen ) ) |
def _build ( self , inputs , is_training = True , dropout_keep_prob = 0.5 ) :
"""Assembles the ` MLP ` and connects it to the graph .
Args :
inputs : A 2D Tensor of size ` [ batch _ size , input _ size ] ` .
is _ training : A bool or tf . Bool Tensor . Indicates whether we are
currently training . Defaults to ` True ` .
dropout _ keep _ prob : The probability that each element is kept when
both ` use _ dropout ` and ` is _ training ` are True . Defaults to 0.5.
Returns :
A 2D Tensor of size ` [ batch _ size , output _ sizes [ - 1 ] ] ` .""" | self . _input_shape = tuple ( inputs . get_shape ( ) . as_list ( ) )
net = inputs
final_index = self . _num_layers - 1
for layer_id in xrange ( self . _num_layers ) :
net = self . _layers [ layer_id ] ( net )
if final_index != layer_id or self . _activate_final : # Only perform dropout whenever we are activating the layer ' s outputs .
if self . _use_dropout :
keep_prob = utils . smart_cond ( is_training , true_fn = lambda : dropout_keep_prob , false_fn = lambda : tf . constant ( 1.0 ) )
net = tf . nn . dropout ( net , keep_prob = keep_prob )
net = self . _activation ( net )
return net |
def attr ( self , key ) :
"""Get attribute string from the Booster .
Parameters
key : str
The key to get attribute from .
Returns
value : str
The attribute value of the key , returns None if attribute do not exist .""" | ret = ctypes . c_char_p ( )
success = ctypes . c_int ( )
_check_call ( _LIB . XGBoosterGetAttr ( self . handle , c_str ( key ) , ctypes . byref ( ret ) , ctypes . byref ( success ) ) )
if success . value != 0 :
return py_str ( ret . value )
return None |
def save_loop ( filename , framerate = 30 , time = 3.0 , axis = np . array ( [ 0. , 0. , 1. ] ) , clf = True , ** kwargs ) :
"""Off - screen save a GIF of one rotation about the scene .
Parameters
filename : str
The filename in which to save the output image ( should have extension . gif )
framerate : int
The frame rate at which to animate motion .
time : float
The number of seconds for one rotation .
axis : ( 3 , ) float or None
If present , the animation will rotate about the given axis in world coordinates .
Otherwise , the animation will rotate in azimuth .
clf : bool
If true , the Visualizer is cleared after rendering the figure .
kwargs : dict
Other keyword arguments for the SceneViewer instance .""" | n_frames = framerate * time
az = 2.0 * np . pi / n_frames
Visualizer3D . save ( filename , n_frames = n_frames , axis = axis , clf = clf , animate_rate = framerate , animate_az = az )
if clf :
Visualizer3D . clf ( ) |
def euclidean_dissim ( a , b , ** _ ) :
"""Euclidean distance dissimilarity function""" | if np . isnan ( a ) . any ( ) or np . isnan ( b ) . any ( ) :
raise ValueError ( "Missing values detected in numerical columns." )
return np . sum ( ( a - b ) ** 2 , axis = 1 ) |
def _strip_object ( key ) :
"""Strips branch and version info if the given key supports those attributes .""" | if hasattr ( key , 'version_agnostic' ) and hasattr ( key , 'for_branch' ) :
return key . for_branch ( None ) . version_agnostic ( )
else :
return key |
def get_related_indicators ( self , indicators = None , enclave_ids = None ) :
"""Uses the | get _ related _ indicators _ page | method to create a generator that returns each successive report .
: param list ( string ) indicators : list of indicator values to search for
: param list ( string ) enclave _ ids : list of GUIDs of enclaves to search in
: return : The generator .""" | return Page . get_generator ( page_generator = self . _get_related_indicators_page_generator ( indicators , enclave_ids ) ) |
def get_epoch_iterator ( self , ** kwargs ) :
"""Get an epoch iterator for the data stream .""" | if not self . _fresh_state :
self . next_epoch ( )
else :
self . _fresh_state = False
return super ( DataStream , self ) . get_epoch_iterator ( ** kwargs ) |
def restart ( self , restart_data , run_async = False ) :
"""conditional and unconditional restart""" | assert isinstance ( restart_data , dict )
if ( not self . restart_matches ( restart_data . get ( 'role' ) , restart_data . get ( 'postgres_version' ) , ( 'restart_pending' in restart_data ) ) ) :
return ( False , "restart conditions are not satisfied" )
with self . _async_executor :
prev = self . _async_executor . schedule ( 'restart' )
if prev is not None :
return ( False , prev + ' already in progress' )
# Make the main loop to think that we were recovering dead postgres . If we fail
# to start postgres after a specified timeout ( see below ) , we need to remove
# leader key ( if it belong to us ) rather than trying to start postgres once again .
self . recovering = True
# Now that restart is scheduled we can set timeout for startup , it will get reset
# once async executor runs and main loop notices PostgreSQL as up .
timeout = restart_data . get ( 'timeout' , self . patroni . config [ 'master_start_timeout' ] )
self . set_start_timeout ( timeout )
# For non async cases we want to wait for restart to complete or timeout before returning .
do_restart = functools . partial ( self . state_handler . restart , timeout , self . _async_executor . critical_task )
if self . is_synchronous_mode ( ) and not self . has_lock ( ) :
do_restart = functools . partial ( self . while_not_sync_standby , do_restart )
if run_async :
self . _async_executor . run_async ( do_restart )
return ( True , 'restart initiated' )
else :
res = self . _async_executor . run ( do_restart )
if res :
return ( True , 'restarted successfully' )
elif res is None :
return ( False , 'postgres is still starting' )
else :
return ( False , 'restart failed' ) |
def random_filtered_sources ( sources , srcfilter , seed ) :
""": param sources : a list of sources
: param srcfilte : a SourceFilter instance
: param seed : a random seed
: returns : an empty list or a list with a single filtered source""" | random . seed ( seed )
while sources :
src = random . choice ( sources )
if srcfilter . get_close_sites ( src ) is not None :
return [ src ]
sources . remove ( src )
return [ ] |
def _match_getters ( self , query , getters = None ) :
"""Tries to match in getters
: param query : message tuple
: type query : Tuple [ bytes ]
: return : response if found or None
: rtype : Tuple [ bytes ] | None""" | if getters is None :
getters = self . _getters
if query in getters :
name , response = getters [ query ]
logger . debug ( 'Found response in getter of %s' % name )
response = response . format ( self . _properties [ name ] . get_value ( ) )
return response . encode ( 'utf-8' ) |
def diffuser_curved ( Di1 , Di2 , l ) :
r'''Returns loss coefficient for any curved wall pipe expansion
as shown in [ 1 ] _ .
. . math : :
K _ 1 = \ phi ( 1.43-1.3 \ beta ^ 2 ) ( 1 - \ beta ^ 2 ) ^ 2
. . math : :
\ phi = 1.01 - 0.624 \ frac { l } { d _ 1 } + 0.30 \ left ( \ frac { l } { d _ 1 } \ right ) ^ 2
- 0.074 \ left ( \ frac { l } { d _ 1 } \ right ) ^ 3 + 0.0070 \ left ( \ frac { l } { d _ 1 } \ right ) ^ 4
. . figure : : fittings / curved _ wall _ diffuser . png
: scale : 25 %
: alt : diffuser curved ; after [ 1 ] _
Parameters
Di1 : float
Inside diameter of original pipe ( smaller ) , [ m ]
Di2 : float
Inside diameter of following pipe ( larger ) , [ m ]
l : float
Length of the curve along the pipe axis , [ m ]
Returns
K : float
Loss coefficient [ - ]
Notes
Beta ^ 2 should be between 0.1 and 0.9.
A small mismatch between tabulated values of this function in table 11.3
is observed with the equation presented .
Examples
> > > diffuser _ curved ( Di1 = . 25 * * 0.5 , Di2 = 1 . , l = 2 . )
0.229978125000002
References
. . [ 1 ] Rennels , Donald C . , and Hobart M . Hudson . Pipe Flow : A Practical
and Comprehensive Guide . 1st edition . Hoboken , N . J : Wiley , 2012.''' | beta = Di1 / Di2
phi = 1.01 - 0.624 * l / Di1 + 0.30 * ( l / Di1 ) ** 2 - 0.074 * ( l / Di1 ) ** 3 + 0.0070 * ( l / Di1 ) ** 4
return phi * ( 1.43 - 1.3 * beta ** 2 ) * ( 1 - beta ** 2 ) ** 2 |
def is_expanded ( request , key ) :
"""Examines request object to return boolean of whether
passed field is expanded .""" | expand = request . query_params . get ( "expand" , "" )
expand_fields = [ ]
for e in expand . split ( "," ) :
expand_fields . extend ( [ e for e in e . split ( "." ) ] )
return "~all" in expand_fields or key in expand_fields |
def baart_criteria ( self , X , y ) :
"""Returns the optimal Fourier series degree as determined by
` Baart ' s Criteria < http : / / articles . adsabs . harvard . edu / cgi - bin / nph - iarticle _ query ? 1986A % 26A . . . 170 . . . 59P & amp ; data _ type = PDF _ HIGH & amp ; whole _ paper = YES & amp ; type = PRINTER & amp ; filetype = . pdf > ` _ [ JOP ] _ .
* * Citations * *
. . [ JOP ] J . O . Petersen , 1986,
" Studies of Cepheid type variability . IV .
The uncertainties of Fourier decomposition parameters . " ,
A & A , Vol . 170 , p . 59-69""" | try :
min_degree , max_degree = self . degree_range
except ValueError :
raise ValueError ( "Degree range must be a length two sequence" )
cutoff = self . baart_tolerance ( X )
pipeline = Pipeline ( [ ( 'Fourier' , Fourier ( ) ) , ( 'Regressor' , self . regressor ) ] )
sorted_X = numpy . sort ( X , axis = 0 )
X_sorting = numpy . argsort ( rowvec ( X ) )
for degree in range ( min_degree , max_degree ) :
pipeline . set_params ( Fourier__degree = degree )
pipeline . fit ( X , y )
lc = pipeline . predict ( sorted_X )
residuals = y [ X_sorting ] - lc
p_c = autocorrelation ( residuals )
if abs ( p_c ) <= cutoff :
return degree
# reached max _ degree without reaching cutoff
return max_degree |
def process ( self , filename , encoding , ** kwargs ) :
"""Process ` ` filename ` ` and encode byte - string with ` ` encoding ` ` . This
method is called by : func : ` textract . parsers . process ` and wraps
the : meth : ` . BaseParser . extract ` method in ` a delicious unicode
sandwich < http : / / nedbatchelder . com / text / unipain . html > ` _ .""" | # make a " unicode sandwich " to handle dealing with unknown
# input byte strings and converting them to a predictable
# output encoding
# http : / / nedbatchelder . com / text / unipain / unipain . html # 35
byte_string = self . extract ( filename , ** kwargs )
unicode_string = self . decode ( byte_string )
return self . encode ( unicode_string , encoding ) |
def plot_mag_map_basemap ( fignum , element , lons , lats , element_type , cmap = 'RdYlBu' , lon_0 = 0 , date = "" ) :
"""makes a color contour map of geomagnetic field element
Parameters
_ _ _ _ _
fignum : matplotlib figure number
element : field element array from pmag . do _ mag _ map for plotting
lons : longitude array from pmag . do _ mag _ map for plotting
lats : latitude array from pmag . do _ mag _ map for plotting
element _ type : [ B , Br , I , D ] geomagnetic element type
B : field intensity
Br : radial field intensity
I : inclinations
D : declinations
Optional
_ _ _ _ _
cmap : matplotlib color map
lon _ 0 : central longitude of the Hammer projection
date : date used for field evaluation ,
if custom ghfile was used , supply filename
Effects
_ _ _ _ _
plots a Hammer projection color contour with the desired field element""" | if not has_basemap :
print ( '-W- Basemap must be installed to run plot_mag_map_basemap' )
return
from matplotlib import cm
# matplotlib ' s color map module
lincr = 1
if type ( date ) != str :
date = str ( date )
fig = plt . figure ( fignum )
m = Basemap ( projection = 'hammer' , lon_0 = lon_0 )
x , y = m ( * meshgrid ( lons , lats ) )
m . drawcoastlines ( )
if element_type == 'B' :
levmax = element . max ( ) + lincr
levmin = round ( element . min ( ) - lincr )
levels = np . arange ( levmin , levmax , lincr )
cs = m . contourf ( x , y , element , levels = levels , cmap = cmap )
plt . title ( 'Field strength ($\mu$T): ' + date )
if element_type == 'Br' :
levmax = element . max ( ) + lincr
levmin = round ( element . min ( ) - lincr )
cs = m . contourf ( x , y , element , levels = np . arange ( levmin , levmax , lincr ) , cmap = cmap )
plt . title ( 'Radial field strength ($\mu$T): ' + date )
if element_type == 'I' :
levmax = element . max ( ) + lincr
levmin = round ( element . min ( ) - lincr )
cs = m . contourf ( x , y , element , levels = np . arange ( - 90 , 100 , 20 ) , cmap = cmap )
m . contour ( x , y , element , levels = np . arange ( - 80 , 90 , 10 ) , colors = 'black' )
plt . title ( 'Field inclination: ' + date )
if element_type == 'D' : # cs = m . contourf ( x , y , element , levels = np . arange ( - 180,180,10 ) , cmap = cmap )
cs = m . contourf ( x , y , element , levels = np . arange ( - 180 , 180 , 10 ) , cmap = cmap )
m . contour ( x , y , element , levels = np . arange ( - 180 , 180 , 10 ) , colors = 'black' )
plt . title ( 'Field declination: ' + date )
cbar = m . colorbar ( cs , location = 'bottom' ) |
def _emit_select ( self , cluster_ids , ** kwargs ) :
"""Choose spikes from the specified clusters and emit the
` select ` event on the GUI .""" | # Remove non - existing clusters from the selection .
cluster_ids = self . _keep_existing_clusters ( cluster_ids )
logger . debug ( "Select cluster(s): %s." , ', ' . join ( map ( str , cluster_ids ) ) )
self . emit ( 'select' , cluster_ids , ** kwargs ) |
def hash ( self ) :
''': rtype : int
: return : hash of the field''' | hashed = super ( Dynamic , self ) . hash ( )
return khash ( hashed , self . _key , self . _length ) |
def loads ( cls , data , store_password , try_decrypt_keys = True ) :
"""Loads the given keystore file using the supplied password for
verifying its integrity , and returns a : class : ` KeyStore ` instance .
Note that entries in the store that represent some form of
cryptographic key material are stored in encrypted form , and
therefore require decryption before becoming accessible .
Upon original creation of a key entry in a Java keystore ,
users are presented with the choice to either use the same
password as the store password , or use a custom one . The most
common choice is to use the store password for the individual
key entries as well .
For ease of use in this typical scenario , this function will
attempt to decrypt each key entry it encounters with the store
password :
- If the key can be successfully decrypted with the store
password , the entry is returned in its decrypted form , and
its attributes are immediately accessible .
- If the key cannot be decrypted with the store password , the
entry is returned in its encrypted form , and requires a
manual follow - up decrypt ( key _ password ) call from the user
before its individual attributes become accessible .
Setting ` ` try _ decrypt _ keys ` ` to ` ` False ` ` disables this automatic
decryption attempt , and returns all key entries in encrypted
form .
You can query whether a returned entry object has already been
decrypted by calling the : meth : ` is _ decrypted ` method on it .
Attempting to access attributes of an entry that has not yet
been decrypted will result in a
: class : ` ~ jks . util . NotYetDecryptedException ` .
: param bytes data : Byte string representation of the keystore
to be loaded .
: param str password : Keystore password string
: param bool try _ decrypt _ keys : Whether to automatically try to
decrypt any encountered key entries using the same password
as the keystore password .
: returns : A loaded : class : ` KeyStore ` instance , if the keystore
could be successfully parsed and the supplied store password
is correct .
If the ` ` try _ decrypt _ keys ` ` parameter was set to ` ` True ` ` , any
keys that could be successfully decrypted using the store
password have already been decrypted ; otherwise , no atttempt
to decrypt any key entries is made .
: raises BadKeystoreFormatException : If the keystore is malformed
in some way
: raises UnsupportedKeystoreVersionException : If the keystore
contains an unknown format version number
: raises KeystoreSignatureException : If the keystore signature
could not be verified using the supplied store password
: raises DuplicateAliasException : If the keystore contains
duplicate aliases""" | store_type = ""
magic_number = data [ : 4 ]
if magic_number == MAGIC_NUMBER_JKS :
store_type = "jks"
elif magic_number == MAGIC_NUMBER_JCEKS :
store_type = "jceks"
else :
raise BadKeystoreFormatException ( 'Not a JKS or JCEKS keystore' ' (magic number wrong; expected' ' FEEDFEED or CECECECE)' )
try :
version = b4 . unpack_from ( data , 4 ) [ 0 ]
if version != 2 :
tmpl = 'Unsupported keystore version; expected v2, found v%r'
raise UnsupportedKeystoreVersionException ( tmpl % version )
entries = { }
entry_count = b4 . unpack_from ( data , 8 ) [ 0 ]
pos = 12
for i in range ( entry_count ) :
tag = b4 . unpack_from ( data , pos ) [ 0 ] ;
pos += 4
alias , pos = cls . _read_utf ( data , pos , kind = "entry alias" )
timestamp = int ( b8 . unpack_from ( data , pos ) [ 0 ] ) ;
pos += 8
# milliseconds since UNIX epoch
if tag == 1 :
entry , pos = cls . _read_private_key ( data , pos , store_type )
elif tag == 2 :
entry , pos = cls . _read_trusted_cert ( data , pos , store_type )
elif tag == 3 :
if store_type != "jceks" :
raise BadKeystoreFormatException ( "Unexpected entry tag {0} encountered in JKS keystore; only supported in JCEKS keystores" . format ( tag ) )
entry , pos = cls . _read_secret_key ( data , pos , store_type )
else :
raise BadKeystoreFormatException ( "Unexpected keystore entry tag %d" , tag )
entry . alias = alias
entry . timestamp = timestamp
if try_decrypt_keys :
try :
entry . decrypt ( store_password )
except DecryptionFailureException :
pass
# ok , let user call decrypt ( ) manually
if alias in entries :
raise DuplicateAliasException ( "Found duplicate alias '%s'" % alias )
entries [ alias ] = entry
except struct . error as e :
raise BadKeystoreFormatException ( e )
# check keystore integrity ( uses UTF - 16BE encoding of the password )
hash_fn = hashlib . sha1
hash_digest_size = hash_fn ( ) . digest_size
store_password_utf16 = store_password . encode ( 'utf-16be' )
expected_hash = hash_fn ( store_password_utf16 + SIGNATURE_WHITENING + data [ : pos ] ) . digest ( )
found_hash = data [ pos : pos + hash_digest_size ]
if len ( found_hash ) != hash_digest_size :
tmpl = "Bad signature size; found %d bytes, expected %d bytes"
raise BadKeystoreFormatException ( tmpl % ( len ( found_hash ) , hash_digest_size ) )
if expected_hash != found_hash :
raise KeystoreSignatureException ( "Hash mismatch; incorrect keystore password?" )
return cls ( store_type , entries ) |
def on_completions_request ( self , py_db , request ) :
''': param CompletionsRequest request :''' | arguments = request . arguments
# : : type arguments : CompletionsArguments
seq = request . seq
text = arguments . text
frame_id = arguments . frameId
thread_id = py_db . suspended_frames_manager . get_thread_id_for_variable_reference ( frame_id )
if thread_id is None :
body = CompletionsResponseBody ( [ ] )
variables_response = pydevd_base_schema . build_response ( request , kwargs = { 'body' : body , 'success' : False , 'message' : 'Thread to get completions seems to have resumed already.' } )
return NetCommand ( CMD_RETURN , 0 , variables_response , is_json = True )
# Note : line and column are 1 - based ( convert to 0 - based for pydevd ) .
column = arguments . column - 1
if arguments . line is None : # line is optional
line = - 1
else :
line = arguments . line - 1
self . api . request_completions ( py_db , seq , thread_id , frame_id , text , line = line , column = column ) |
def get_driver_platform ( self ) :
"""Get driver platform where tests are running
: return : platform name""" | platform = ''
if 'platform' in self . driver . desired_capabilities :
platform = self . driver . desired_capabilities [ 'platform' ]
elif 'platformName' in self . driver . desired_capabilities :
platform = self . driver . desired_capabilities [ 'platformName' ]
return platform |
def delete ( self , key , time = 0 ) :
'''Deletes a key from the memcache .
@ return : Nonzero on success .
@ param time : number of seconds any subsequent set / update commands
should fail . Defaults to None for no delay .
@ rtype : int''' | if self . do_check_key :
self . check_key ( key )
server , key = self . _get_server ( key )
if not server :
return 0
self . _statlog ( 'delete' )
if time != None and time != 0 :
cmd = "delete %s %d" % ( key , time )
else :
cmd = "delete %s" % key
try :
server . send_cmd ( cmd )
line = server . readline ( )
if line and line . strip ( ) in [ 'DELETED' , 'NOT_FOUND' ] :
return 1
self . debuglog ( 'Delete expected DELETED or NOT_FOUND, got: %s' % repr ( line ) )
except socket . error , msg :
if isinstance ( msg , tuple ) :
msg = msg [ 1 ]
server . mark_dead ( msg )
return 0 |
def get_handler ( ) :
"""Return the handler as a named tuple .
The named tuple attributes are ' host ' , ' port ' , ' signum ' .
Return None when no handler has been registered .""" | host , port , signum = _pdbhandler . _registered ( )
if signum :
return Handler ( host if host else DFLT_ADDRESS [ 0 ] . encode ( ) , port if port else DFLT_ADDRESS [ 1 ] , signum ) |
def count_between ( self , min_score = None , max_score = None ) :
"""Returns the number of members whose score is between * min _ score * and
* max _ score * ( inclusive ) .""" | min_score = float ( '-inf' ) if min_score is None else float ( min_score )
max_score = float ( 'inf' ) if max_score is None else float ( max_score )
return self . redis . zcount ( self . key , min_score , max_score ) |
def _make_class ( cls , ** kwargs ) :
"""Return a custom Visual class with given parameters .""" | kwargs = { k : ( v if v is not None else getattr ( cls , k , None ) ) for k , v in kwargs . items ( ) }
# The class name contains a hash of the custom parameters .
name = cls . __name__ + '_' + _hash ( kwargs )
if name not in _CLASSES :
logger . log ( 5 , "Create class %s %s." , name , kwargs )
cls = type ( name , ( cls , ) , kwargs )
_CLASSES [ name ] = cls
return _CLASSES [ name ] |
def output_sshkey ( gandi , sshkey , output_keys , justify = 12 ) :
"""Helper to output an ssh key information .""" | output_generic ( gandi , sshkey , output_keys , justify ) |
def reload ( * command , ignore_patterns = [ ] ) :
"""Reload given command""" | path = "."
sig = signal . SIGTERM
delay = 0.25
ignorefile = ".reloadignore"
ignore_patterns = ignore_patterns or load_ignore_patterns ( ignorefile )
event_handler = ReloadEventHandler ( ignore_patterns )
reloader = Reloader ( command , signal )
observer = Observer ( )
observer . schedule ( event_handler , path , recursive = True )
observer . start ( )
reloader . start_command ( )
try :
while True :
time . sleep ( delay )
sys . stdout . write ( reloader . read ( ) )
sys . stdout . flush ( )
if event_handler . modified :
reloader . restart_command ( )
except KeyboardInterrupt :
observer . stop ( )
observer . join ( )
reloader . stop_command ( )
sys . stdout . write ( reloader . read ( ) )
sys . stdout . flush ( ) |
def init_defaults ( self ) :
"""Sets a model instance variable to the table value and sets the name to the
table name as determined from the model class""" | super ( ModelTable , self ) . init_defaults ( )
self . model = self . table
self . name = self . model . _meta . db_table |
def descendants ( self , typename ) :
"""Return the descendant types of * typename * .""" | xs = [ ]
for child in self . _hier [ typename ] [ 1 ] :
xs . append ( child )
xs . extend ( self . descendants ( child ) )
return xs |
def get_referenced_object ( referring_object , fieldname ) :
"""Get an object referred to by a field in another object .
For example an object of type Construction has fields for each layer , each
of which refers to a Material . This functions allows the object
representing a Material to be fetched using the name of the layer .
Returns the first item found since if there is more than one matching item ,
it is a malformed IDF .
Parameters
referring _ object : EpBunch
The object which contains a reference to another object ,
fieldname : str
The name of the field in the referring object which contains the
reference to another object .
Returns
EpBunch""" | idf = referring_object . theidf
object_list = referring_object . getfieldidd_item ( fieldname , u'object-list' )
for obj_type in idf . idfobjects :
for obj in idf . idfobjects [ obj_type ] :
valid_object_lists = obj . getfieldidd_item ( "Name" , u'reference' )
if set ( object_list ) . intersection ( set ( valid_object_lists ) ) :
referenced_obj_name = referring_object [ fieldname ]
if obj . Name == referenced_obj_name :
return obj |
def dump ( self ) :
"""Serialize the state of this subsystem into a dict .
Returns :
dict : The serialized state""" | walker = self . dump_walker
if walker is not None :
walker = walker . dump ( )
state = { 'storage' : self . storage . dump ( ) , 'dump_walker' : walker , 'next_id' : self . next_id }
return state |
def _parse_auth_message ( self , auth_message ) :
"""Parse a message to see if we have ip addresses or users that we care about
: param auth _ message : The auth message to parse
: return : Result""" | result = { }
has_matched = False
for regex in REGEXES_INVALID_USER : # Check for the invalid user / ip messages
m = re . search ( regex , auth_message )
if m and not has_matched :
has_matched = True
# Save the username and IP
result [ 'username' ] = m . group ( 'user' )
result [ 'ip' ] = m . group ( 'ip' )
for regex in REGEXES_INVALID_IP : # Check for the invalid ip messages
m = re . search ( regex , auth_message )
if m and not has_matched :
has_matched = True
# Save the IP
result [ 'ip' ] = m . group ( 'ip' )
for regex in REGEXES_IGNORE : # Check for messages we want to ignore
m = re . search ( regex , auth_message )
if m and not has_matched :
has_matched = True
# If it ' s an ssh log and we don ' t know what it is , handle that
if not has_matched :
sys . stderr . write ( "Unhandled auth message: %s\n" % auth_message )
return result |
def saltmem ( human_readable = False ) :
'''. . versionadded : : 2015.8.0
Returns the amount of memory that salt is using
human _ readable : False
return the value in a nicely formatted number
CLI Example :
. . code - block : : bash
salt ' * ' status . saltmem
salt ' * ' status . saltmem human _ readable = True''' | # psutil . Process defaults to current process ( ` os . getpid ( ) ` )
p = psutil . Process ( )
# Use oneshot to get a snapshot
with p . oneshot ( ) :
mem = p . memory_info ( ) . rss
if human_readable :
return _byte_calc ( mem )
return mem |
def sendChatMessage ( self , text , id = None , user = None , group = None , channel = None , parse = 'none' , link_names = True , unfurl_links = True , unfurl_media = False , send_with_api = False , icon_emoji = None , icon_url = None , username = None , attachments = None , thread_ts = None , reply_broadcast = False ) :
"""Sends a chat message to a given id , user , group or channel .
If the API token is not a bot token ( xoxb ) , ` ` send _ with _ api ` ` may be set
to True . This will send messages using ` ` chat . postMessage ` ` in the Slack
API , instead of using the WebSockets channel .
This makes the message sending process a little bit slower , however
permits writing of messages containing hyperlinks , like what can be done
with Incoming and Outgoing Webhooks integrations .
Bots are not permitted by Slack to use ` ` chat . postMessage ` ` so this will
result in an error .
Note : channel names must * * not * * be preceeded with ` ` # ` ` .""" | if id is not None :
assert user is None , 'id and user cannot both be set.'
assert group is None , 'id and group cannot both be set.'
assert channel is None , 'id and channel cannot both be set.'
elif user is not None :
assert group is None , 'user and group cannot both be set.'
assert channel is None , 'user and channel cannot both be set.'
# Private message to user , get the IM name
id = self . meta . find_im_by_user_name ( user , auto_create = True ) [ 0 ]
elif group is not None :
assert channel is None , 'group and channel cannot both be set.'
# Message to private group , get the group name .
id = self . meta . find_group_by_name ( group ) [ 0 ]
elif channel is not None : # Message sent to a channel
id = self . meta . find_channel_by_name ( channel ) [ 0 ]
else :
raise Exception , 'Should not reach here.'
if send_with_api :
return self . meta . api . chat . postMessage ( token = self . meta . token , channel = id , text = text , parse = parse , link_names = link_names , unfurl_links = unfurl_links , unfurl_media = unfurl_media , icon_url = icon_url , icon_emoji = icon_emoji , username = username , attachments = attachments , thread_ts = thread_ts , reply_broadcast = reply_broadcast , )
else :
assert icon_url is None , 'icon_url can only be set if send_with_api is True'
assert icon_emoji is None , 'icon_emoji can only be set if send_with_api is True'
assert username is None , 'username can only be set if send_with_api is True'
return self . sendCommand ( type = 'message' , channel = id , text = text , parse = parse , link_names = link_names , unfurl_links = unfurl_links , unfurl_media = unfurl_media , thread_ts = thread_ts , reply_broadcast = reply_broadcast , ) |
def resolve_doc ( cls , manifest , target_doc_name , target_doc_package , current_project , node_package ) :
"""Resolve the given documentation . This follows the same algorithm as
resolve _ ref except the is _ enabled checks are unnecessary as docs are
always enabled .""" | if target_doc_package is not None :
return manifest . find_docs_by_name ( target_doc_name , target_doc_package )
candidate_targets = [ current_project , node_package , None ]
target_doc = None
for candidate in candidate_targets :
target_doc = manifest . find_docs_by_name ( target_doc_name , candidate )
if target_doc is not None :
break
return target_doc |
def convert_result ( converter ) :
"""Decorator that can convert the result of a function call .""" | def decorate ( fn ) :
@ inspection . wraps ( fn )
def new_fn ( * args , ** kwargs ) :
return converter ( fn ( * args , ** kwargs ) )
return new_fn
return decorate |
def describe ( self ) :
"""Describes the method .
: return : Description
: rtype : dict [ str , object ]""" | return { "name" : self . name , "params" : self . params , "returns" : self . returns , "description" : self . description , } |
def combine_mean_curves ( calc_big , calc_small ) :
"""Combine the hazard curves coming from two different calculations .
The result will be the hazard curves of calc _ big , updated on the sites
in common with calc _ small with the PoEs of calc _ small . For instance :
calc _ big = USA , calc _ small = California""" | dstore_big = datastore . read ( calc_big )
dstore_small = datastore . read ( calc_small )
sitecol_big = dstore_big [ 'sitecol' ]
sitecol_small = dstore_small [ 'sitecol' ]
site_id_big = { ( lon , lat ) : sid for sid , lon , lat in zip ( sitecol_big . sids , sitecol_big . lons , sitecol_big . lats ) }
site_id_small = { ( lon , lat ) : sid for sid , lon , lat in zip ( sitecol_small . sids , sitecol_small . lons , sitecol_small . lats ) }
common = set ( site_id_big ) & set ( site_id_small )
if not common :
raise RuntimeError ( 'There are no common sites between calculation ' '%d and %d' % ( calc_big , calc_small ) )
sids_small = [ site_id_small [ lonlat ] for lonlat in common ]
pmap_big = PmapGetter ( dstore_big ) . get_mean ( )
# USA
pmap_small = PmapGetter ( dstore_big , sids = sids_small ) . get_mean ( )
# Cal
for lonlat in common :
pmap_big [ site_id_big [ lonlat ] ] |= pmap_small . get ( site_id_small [ lonlat ] , 0 )
out = 'combine_%d_%d.hdf5' % ( calc_big , calc_small )
with hdf5 . File ( out , 'w' ) as h5 :
h5 [ 'hcurves/mean' ] = pmap_big
h5 [ 'oqparam' ] = dstore_big [ 'oqparam' ]
h5 [ 'sitecol' ] = dstore_big [ 'sitecol' ]
print ( 'Generated %s' % out ) |
def get_phrases ( text = None , tokens = None , postags = None , tagger = 'nltk' , grammar = 'SimpleNP' , regex = None , minlen = 2 , maxlen = 8 , output = 'counts' ) :
"""Give a text ( or POS tag sequence ) , return the phrases matching the given
grammar . Works on documents or sentences .
Returns a dict with one or more keys with the phrase information .
text : the text of the document . If supplied , we will try to POS tag it .
You can also do your own tokenzation and / or tagging and supply them as
' tokens ' and / or ' postags ' , which are lists of strings ( of the same length ) .
- Must supply both to get phrase counts back .
- With only postags , can get phrase token spans back .
- With only tokens , we will try to POS - tag them if possible .
output : a string , or list of strings , of information to return . Options include :
- counts : a Counter with phrase frequencies . ( default )
- token _ spans : a list of the token spans of each matched phrase . This is
a list of ( start , end ) pairs of integers , which refer to token positions .
- pos , tokens can be returned too .
tagger : if you ' re passing in raw text , can supply your own tagger , from one
of the get _ * _ tagger ( ) functions . If this is not supplied , we will try to load one .
grammar : the grammar to use . Only one option right now . . .
regex : a custom regex to use , instead of a premade grammar . Currently ,
this must work on the 5 - tag system described near the top of this file .""" | global SimpleNP
# # try to get values for both ' postags ' and ' tokens ' , parallel lists of strings
if postags is None :
try :
tagger = TAGGER_NAMES [ tagger ] ( )
except :
raise Exception ( "We don't support tagger %s" % tagger )
# otherwise , assume it ' s one of our wrapper * Tagger objects
d = None
if tokens is not None :
d = tagger . tag_tokens ( tokens )
elif text is not None :
d = tagger . tag_text ( text )
else :
raise Exception ( "Need to supply text or tokens." )
postags = d [ 'pos' ]
tokens = d [ 'tokens' ]
if regex is None :
if grammar == 'SimpleNP' :
regex = SimpleNP
else :
assert False , "Don't know grammar %s" % grammar
phrase_tokspans = extract_ngram_filter ( postags , minlen = minlen , maxlen = maxlen )
# # Handle multiple possible return info outputs
if isinstance ( output , str ) :
output = [ output ]
our_options = set ( )
def retopt ( x ) :
our_options . add ( x )
return x in output
ret = { }
ret [ 'num_tokens' ] = len ( postags )
if retopt ( 'token_spans' ) :
ret [ 'token_spans' ] = phrase_tokspans
if retopt ( 'counts' ) :
counts = Counter ( )
for ( start , end ) in phrase_tokspans :
phrase = safejoin ( [ tokens [ i ] for i in xrange ( start , end ) ] )
phrase = phrase . lower ( )
counts [ phrase ] += 1
ret [ 'counts' ] = counts
if retopt ( 'pos' ) :
ret [ 'pos' ] = postags
if retopt ( 'tokens' ) :
ret [ 'tokens' ] = tokens
xx = set ( output ) - our_options
if xx :
raise Exception ( "Don't know how to handle output options: %s" % list ( xx ) )
return ret |
def _adjust_bin_edges ( datetime_bins , offset , closed , index , labels ) :
"""This is required for determining the bin edges resampling with
daily frequencies greater than one day , month end , and year end
frequencies .
Consider the following example . Let ' s say you want to downsample the
time series with the following coordinates to month end frequency :
CFTimeIndex ( [ 2000-01-01 12:00:00 , 2000-01-31 12:00:00,
2000-02-01 12:00:00 ] , dtype = ' object ' )
Without this adjustment , _ get _ time _ bins with month - end frequency will
return the following index for the bin edges ( default closed = ' right ' and
label = ' right ' in this case ) :
CFTimeIndex ( [ 1999-12-31 00:00:00 , 2000-01-31 00:00:00,
2000-02-29 00:00:00 ] , dtype = ' object ' )
If 2000-01-31 is used as a bound for a bin , the value on
2000-01-31T12:00:00 ( at noon on January 31st ) , will not be included in the
month of January . To account for this , pandas adds a day minus one worth
of microseconds to the bin edges generated by cftime range , so that we do
bin the value at noon on January 31st in the January bin . This results in
an index with bin edges like the following :
CFTimeIndex ( [ 1999-12-31 23:59:59 , 2000-01-31 23:59:59,
2000-02-29 23:59:59 ] , dtype = ' object ' )
The labels are still :
CFTimeIndex ( [ 2000-01-31 00:00:00 , 2000-02-29 00:00:00 ] , dtype = ' object ' )
This is also required for daily frequencies longer than one day and
year - end frequencies .""" | is_super_daily = ( isinstance ( offset , ( MonthEnd , QuarterEnd , YearEnd ) ) or ( isinstance ( offset , Day ) and offset . n > 1 ) )
if is_super_daily :
if closed == 'right' :
datetime_bins = datetime_bins + datetime . timedelta ( days = 1 , microseconds = - 1 )
if datetime_bins [ - 2 ] > index . max ( ) :
datetime_bins = datetime_bins [ : - 1 ]
labels = labels [ : - 1 ]
return datetime_bins , labels |
def normalizeGlyphTopMargin ( value ) :
"""Normalizes glyph top margin .
* * * value * * must be a : ref : ` type - int - float ` or ` None ` .
* Returned value is the same type as the input value .""" | if not isinstance ( value , ( int , float ) ) and value is not None :
raise TypeError ( "Glyph top margin must be an :ref:`type-int-float`, " "not %s." % type ( value ) . __name__ )
return value |
def write_file ( self , filename , ** kwargs ) :
"""Writes POSCAR to a file . The supported kwargs are the same as those for
the Poscar . get _ string method and are passed through directly .""" | with zopen ( filename , "wt" ) as f :
f . write ( self . get_string ( ** kwargs ) ) |
def get_callable_documentation ( the_callable ) :
"""Return a string with the callable signature and its docstring .
: param the _ callable : the callable to be analyzed .
: type the _ callable : function / callable .
: return : the signature .""" | return wrap_text_in_a_box ( title = get_callable_signature_as_string ( the_callable ) , body = ( getattr ( the_callable , '__doc__' ) or 'No documentation' ) . replace ( '\n' , '\n\n' ) , style = 'ascii_double' ) |
def _read_para_relay_hmac ( self , code , cbit , clen , * , desc , length , version ) :
"""Read HIP RELAY _ HMAC parameter .
Structure of HIP RELAY _ HMAC parameter [ RFC 5770 ] :
0 1 2 3
0 1 2 3 4 5 6 7 8 9 0 1 2 3 4 5 6 7 8 9 0 1 2 3 4 5 6 7 8 9 0 1
| HMAC |
| | Padding |
Octets Bits Name Description
0 0 relay _ hmac . type Parameter Type
1 15 relay _ hmac . critical Critical Bit
2 16 relay _ hmac . length Length of Contents
4 32 relay _ hmac . hmac HMAC
? ? - Padding""" | _hmac = self . _read_fileng ( clen )
relay_hmac = dict ( type = desc , critical = cbit , length = clen , hmac = _hmac , )
_plen = length - clen
if _plen :
self . _read_fileng ( _plen )
return relay_hmac |
def conforms ( element , etype , namespace : Dict [ str , Any ] ) -> bool :
"""Determine whether element conforms to etype
: param element : Element to test for conformance
: param etype : Type to test against
: param namespace : Namespace to use to resolve forward references
: return :""" | etype = proc_forward ( etype , namespace )
if is_union ( etype ) :
return union_conforms ( element , etype , namespace , conforms )
else :
return element_conforms ( element , etype ) |
def setLinkQuality ( self , EUIadr , LinkQuality ) :
"""set custom LinkQualityIn for all receiving messages from the specified EUIadr
Args :
EUIadr : a given extended address
LinkQuality : a given custom link quality
link quality / link margin mapping table
3 : 21 - 255 ( dB )
2 : 11 - 20 ( dB )
1 : 3 - 9 ( dB )
0 : 0 - 2 ( dB )
Returns :
True : successful to set the link quality
False : fail to set the link quality""" | print '%s call setLinkQuality' % self . port
print EUIadr
print LinkQuality
try : # process EUIadr
euiHex = hex ( EUIadr )
euiStr = str ( euiHex )
euiStr = euiStr . rstrip ( 'L' )
address64 = ''
if '0x' in euiStr :
address64 = euiStr . lstrip ( '0x' )
# prepend 0 at the beginning
if len ( address64 ) < 16 :
address64 = address64 . zfill ( 16 )
print address64
cmd = 'macfilter rss add-lqi %s %s' % ( address64 , str ( LinkQuality ) )
print cmd
return self . __sendCommand ( cmd ) [ 0 ] == 'Done'
except Exception , e :
ModuleHelper . WriteIntoDebugLogger ( "setLinkQuality() Error: " + str ( e ) ) |
def norm_join ( prefix , suffix ) :
"""Join ` ` prefix ` ` and ` ` suffix ` ` paths
and return the resulting path , normalized .
: param string prefix : the prefix path
: param string suffix : the suffix path
: rtype : string""" | if ( prefix is None ) and ( suffix is None ) :
return "."
if prefix is None :
return os . path . normpath ( suffix )
if suffix is None :
return os . path . normpath ( prefix )
return os . path . normpath ( os . path . join ( prefix , suffix ) ) |
def optimizer_array ( self , p ) :
"""Make sure the optimizer copy does not get touched , thus , we only want to
set the values * inside * not the array itself .
Also we want to update param _ array in here .""" | f = None
if self . has_parent ( ) and self . constraints [ __fixed__ ] . size != 0 :
f = np . ones ( self . size ) . astype ( bool )
f [ self . constraints [ __fixed__ ] ] = FIXED
elif self . _has_fixes ( ) :
f = self . _fixes_
if f is None :
self . param_array . flat = p
[ np . put ( self . param_array , ind , c . f ( self . param_array . flat [ ind ] ) ) # py3 fix
# for c , ind in self . constraints . iteritems ( ) if c ! = _ _ fixed _ _ ]
for c , ind in self . constraints . items ( ) if c != __fixed__ ]
else :
self . param_array . flat [ f ] = p
[ np . put ( self . param_array , ind [ f [ ind ] ] , c . f ( self . param_array . flat [ ind [ f [ ind ] ] ] ) ) # py3 fix
# for c , ind in self . constraints . iteritems ( ) if c ! = _ _ fixed _ _ ]
for c , ind in self . constraints . items ( ) if c != __fixed__ ]
# self . _ highest _ parent _ . tie . propagate _ val ( )
self . _optimizer_copy_transformed = False
self . trigger_update ( ) |
def _get_docargs ( self , args_user , prt ) :
"""Pare down docopt . Return a minimal dictionary and a set containing runtime arg values .""" | if prt is not None :
print ( "DocOptParse BEFORE docopt: {}" . format ( args_user ) )
docargs = docopt ( self . doc , args_user )
if prt is not None :
print ( "DocOptParse AFTER docopt: {}" . format ( docargs ) )
kwargs_doc = { re . sub ( r'^-{1,2}' , '' , k ) : v for k , v in docargs . items ( ) }
self . _chk_docopt_kws ( kwargs_doc , args_user )
kwargs_usr = get_kwargs ( kwargs_doc , self . exp_keys , self . exp_elems )
if prt is not None :
print ( "DocOptParse AFTER pared: {}" . format ( kwargs_usr ) )
for key in [ 'taxid' ] :
if key in kwargs_usr :
kwargs_usr [ key ] = int ( kwargs_usr [ key ] )
if prt is not None :
print ( "DocOptParse AFTER edited/checked: {}" . format ( kwargs_usr ) )
return kwargs_usr |
def from_pillow ( pil_image ) :
"""Convert from pillow image to opencv""" | # convert PIL to OpenCV
pil_image = pil_image . convert ( 'RGB' )
cv2_image = np . array ( pil_image )
# Convert RGB to BGR
cv2_image = cv2_image [ : , : , : : - 1 ] . copy ( )
return cv2_image |
def CreateTaskStorage ( self , task ) :
"""Creates a task storage .
The task storage is used to store attributes created by the task .
Args :
task ( Task ) : task .
Returns :
StorageWriter : storage writer .
Raises :
IOError : if the storage type is not supported .
OSError : if the storage type is not supported .""" | if self . _storage_type != definitions . STORAGE_TYPE_SESSION :
raise IOError ( 'Unsupported storage type.' )
storage_file_path = self . _GetTaskStorageFilePath ( task )
return self . _CreateTaskStorageWriter ( storage_file_path , task ) |
def static_proxy ( request ) :
"""Serves TinyMCE plugins inside the inline popups and the uploadify
SWF , as these are normally static files , and will break with
cross - domain JavaScript errors if ` ` STATIC _ URL ` ` is an external
host . URL for the file is passed in via querystring in the inline
popup plugin template , and we then attempt to pull out the relative
path to the file , so that we can serve it locally via Django .""" | normalize = lambda u : ( "//" + u . split ( "://" ) [ - 1 ] ) if "://" in u else u
url = normalize ( request . GET [ "u" ] )
host = "//" + request . get_host ( )
static_url = normalize ( settings . STATIC_URL )
for prefix in ( host , static_url , "/" ) :
if url . startswith ( prefix ) :
url = url . replace ( prefix , "" , 1 )
response = ""
( content_type , encoding ) = mimetypes . guess_type ( url )
if content_type is None :
content_type = "application/octet-stream"
path = finders . find ( url )
if path :
if isinstance ( path , ( list , tuple ) ) :
path = path [ 0 ]
if url . endswith ( ".htm" ) : # Inject < base href = " { { STATIC _ URL } } " > into TinyMCE
# plugins , since the path static files in these won ' t be
# on the same domain .
static_url = settings . STATIC_URL + os . path . split ( url ) [ 0 ] + "/"
if not urlparse ( static_url ) . scheme :
static_url = urljoin ( host , static_url )
base_tag = "<base href='%s'>" % static_url
with open ( path , "r" ) as f :
response = f . read ( ) . replace ( "<head>" , "<head>" + base_tag )
else :
try :
with open ( path , "rb" ) as f :
response = f . read ( )
except IOError :
return HttpResponseNotFound ( )
return HttpResponse ( response , content_type = content_type ) |
def statistical_inefficiencies ( dtrajs , lag , C = None , truncate_acf = True , mact = 2.0 , n_jobs = 1 , callback = None ) :
r"""Computes statistical inefficiencies of sliding - window transition counts at given lag
Consider a discrete trajectory : math ` { x _ t } ` with : math : ` x _ t \ in { 1 , . . . , n } ` . For each starting state : math : ` i ` ,
we collect the target sequence
. . mathh :
Y ^ ( i ) = { x _ { t + \ tau } | x _ { t } = i }
which contains the time - ordered target states at times : math : ` t + \ tau ` whenever we started in state : math : ` i `
at time : math : ` t ` . Then we define the indicator sequence :
. . math :
a ^ { ( i , j ) } _ t ( \ tau ) = 1 ( Y ^ ( i ) _ t = j )
The statistical inefficiency for transition counts : math : ` c _ { ij } ( tau ) ` is computed as the statistical inefficiency
of the sequence : math : ` a ^ { ( i , j ) } _ t ( \ tau ) ` .
Parameters
dtrajs : list of int - iterables
discrete trajectories
lag : int
lag time
C : scipy sparse matrix ( n , n ) or None
sliding window count matrix , if already available
truncate _ acf : bool , optional , default = True
When the normalized autocorrelation function passes through 0 , it is truncated in order to avoid integrating
random noise
n _ jobs : int , default = 1
If greater one , the function will be evaluated with multiple processes .
callback : callable , default = None
will be called for every statistical inefficiency computed ( number of nonzero elements in count matrix ) .
If n _ jobs is greater one , the callback will be invoked per finished batch .
Returns
I : scipy sparse matrix ( n , n )
Statistical inefficiency matrix with a sparsity pattern identical to the sliding - window count matrix at the
same lag time . Will contain a statistical inefficiency : math : ` I _ { ij } \ in ( 0,1 ] ` whenever there is a count
: math : ` c _ { ij } > 0 ` . When there is no transition count ( : math : ` c _ { ij } = 0 ` ) , the statistical inefficiency is 0.
See also
msmtools . util . statistics . statistical _ inefficiency
used to compute the statistical inefficiency for conditional trajectories""" | # count matrix
if C is None :
C = count_matrix_coo2_mult ( dtrajs , lag , sliding = True , sparse = True )
if callback is not None :
if not callable ( callback ) :
raise ValueError ( 'Provided callback is not callable' )
# split sequences
splitseq = _split_sequences_multitraj ( dtrajs , lag )
# compute inefficiencies
I , J = C . nonzero ( )
if n_jobs > 1 :
from multiprocessing . pool import Pool , MapResult
from contextlib import closing
import tempfile
# to avoid pickling partial results , we store these in a numpy . memmap
ntf = tempfile . NamedTemporaryFile ( delete = False )
arr = np . memmap ( ntf . name , dtype = np . float64 , mode = 'w+' , shape = C . nnz )
# arr [ : ] = np . nan
gen = _arguments_generator ( I , J , splitseq , truncate_acf = truncate_acf , mact = truncate_acf , array = ntf . name , njobs = n_jobs )
if callback :
x = gen . n_blocks ( )
_callback = lambda _ : callback ( x )
else :
_callback = callback
with closing ( Pool ( n_jobs ) ) as pool :
result_async = [ pool . apply_async ( _wrapper , ( args , ) , callback = _callback ) for args in gen ]
[ t . get ( ) for t in result_async ]
data = np . array ( arr [ : ] )
# assert np . all ( np . isfinite ( data ) )
import os
os . unlink ( ntf . name )
else :
data = np . empty ( C . nnz )
for index , ( i , j ) in enumerate ( zip ( I , J ) ) :
data [ index ] = statistical_inefficiency ( _indicator_multitraj ( splitseq , i , j ) , truncate_acf = truncate_acf , mact = mact )
if callback is not None :
callback ( 1 )
res = csr_matrix ( ( data , ( I , J ) ) , shape = C . shape )
return res |
async def list_blocks ( self , request ) :
"""Fetches list of blocks from validator , optionally filtered by id .
Request :
query :
- head : The id of the block to use as the head of the chain
- id : Comma separated list of block ids to include in results
Response :
data : JSON array of fully expanded Block objects
head : The head used for this query ( most recent if unspecified )
link : The link to this exact query , including head block
paging : Paging info and nav , like total resources and a next link""" | paging_controls = self . _get_paging_controls ( request )
validator_query = client_block_pb2 . ClientBlockListRequest ( head_id = self . _get_head_id ( request ) , block_ids = self . _get_filter_ids ( request ) , sorting = self . _get_sorting_message ( request , "block_num" ) , paging = self . _make_paging_message ( paging_controls ) )
response = await self . _query_validator ( Message . CLIENT_BLOCK_LIST_REQUEST , client_block_pb2 . ClientBlockListResponse , validator_query )
return self . _wrap_paginated_response ( request = request , response = response , controls = paging_controls , data = [ self . _expand_block ( b ) for b in response [ 'blocks' ] ] ) |
def fields_from_names ( fields , names = None ) :
"""Given a dictionary of fields and a list of names , will return a
dictionary consisting of the fields specified by names . Names can be
either the names of fields , or their aliases .""" | if names is None :
return fields
if isinstance ( names , string_types ) :
names = [ names ]
aliases_to_names = aliases_from_fields ( fields )
names_to_aliases = dict ( zip ( aliases_to_names . values ( ) , aliases_to_names . keys ( ) ) )
outfields = { }
for name in names :
try :
outfields [ name ] = fields [ name ]
except KeyError :
if name in aliases_to_names :
key = ( name , aliases_to_names [ name ] )
elif name in names_to_aliases :
key = ( names_to_aliases [ name ] , name )
else :
raise KeyError ( 'default fields has no field %s' % name )
outfields [ key ] = fields [ key ]
return outfields |
def summarize_crud_mutation ( method , model , isAsync = False ) :
"""This function provides the standard form for crud mutations .""" | # create the approrpriate action type
action_type = get_crud_action ( method = method , model = model )
# the name of the mutation
name = crud_mutation_name ( model = model , action = method )
# a mapping of methods to input factories
input_map = { 'create' : create_mutation_inputs , 'update' : update_mutation_inputs , 'delete' : delete_mutation_inputs , }
# a mappting of methods to output factories
output_map = { 'create' : create_mutation_outputs , 'update' : update_mutation_outputs , 'delete' : delete_mutation_outputs , }
# the inputs for the mutation
inputs = input_map [ method ] ( model )
# the mutation outputs
outputs = output_map [ method ] ( model )
# return the appropriate summary
return summarize_mutation ( mutation_name = name , event = action_type , isAsync = isAsync , inputs = inputs , outputs = outputs ) |
def sparse_arrays ( self , value ) :
"""Validate and enable spare arrays .""" | if not isinstance ( value , bool ) :
raise TypeError ( 'sparse_arrays attribute must be a logical type.' )
self . _sparse_arrays = value |
def _findSwiplMacOSHome ( ) :
"""This function is guesing where SWI - Prolog is
installed in MacOS via . app .
: parameters :
- ` swi _ ver ` ( str ) - Version of SWI - Prolog in ' [ 0-9 ] . [ 0-9 ] . [ 0-9 ] ' format
: returns :
A tuple of ( path to the swipl so , path to the resource file )
: returns type :
( { str , None } , { str , None } )""" | # Need more help with MacOS
# That way works , but need more work
names = [ 'libswipl.dylib' , 'libpl.dylib' ]
path = os . environ . get ( 'SWI_HOME_DIR' )
if path is None :
path = os . environ . get ( 'SWI_LIB_DIR' )
if path is None :
path = os . environ . get ( 'PLBASE' )
if path is None :
swi_ver = get_swi_ver ( )
path = '/Applications/SWI-Prolog.app/Contents/swipl-' + swi_ver + '/lib/'
paths = [ path ]
for name in names :
for path in paths :
( path_res , back_path ) = walk ( path , name )
if path_res is not None :
os . environ [ 'SWI_LIB_DIR' ] = back_path
return ( path_res , None )
return ( None , None ) |
def printrec ( recst ) :
"""Pretty - printing rtsp strings""" | try :
recst = recst . decode ( 'UTF-8' )
except AttributeError :
pass
recs = [ x for x in recst . split ( '\r\n' ) if x ]
for rec in recs :
print ( rec )
print ( "\n" ) |
def get_compiled_serving ( self , kitchen , recipe_name , variation_name ) :
"""get the compiled version of arecipe with variables applied for a specific variation in a kitchen
returns a dictionary
' / v2 / servings / compiled / get / < string : kitchenname > / < string : recipename > / < string : variationname > ' , methods = [ ' GET ' ]
: param self : DKCloudAPI
: param kitchen : basestring
: param recipe _ name : basestring - - kitchen name , basestring
: param variation _ name : basestring message - - name of variation , basestring
: rtype : dict""" | rc = DKReturnCode ( )
if kitchen is None or isinstance ( kitchen , basestring ) is False :
rc . set ( rc . DK_FAIL , 'issue with kitchen' )
return rc
if recipe_name is None or isinstance ( recipe_name , basestring ) is False :
rc . set ( rc . DK_FAIL , 'issue with recipe_name' )
return rc
if variation_name is None or isinstance ( variation_name , basestring ) is False :
rc . set ( rc . DK_FAIL , 'issue with variation_name' )
return rc
url = '%s/v2/servings/compiled/get/%s/%s/%s' % ( self . get_url_for_direct_rest_call ( ) , kitchen , recipe_name , variation_name )
try :
response = requests . get ( url , headers = self . _get_common_headers ( ) )
rdict = self . _get_json ( response )
pass
except ( RequestException , ValueError , TypeError ) , c :
rc . set ( rc . DK_FAIL , "get_compiled_serving: exception: %s" % str ( c ) )
return rc
if DKCloudAPI . _valid_response ( response ) :
rc . set ( rc . DK_SUCCESS , None , rdict [ rdict . keys ( ) [ 0 ] ] )
return rc
else :
arc = DKAPIReturnCode ( rdict , response )
rc . set ( rc . DK_FAIL , arc . get_message ( ) )
return rc |
def encode_max_segments_accepted ( arg ) :
"""Encode the maximum number of segments the device will accept , Section
20.1.2.4 , and if the device says it can only accept one segment it shouldn ' t
say that it supports segmentation !""" | # unspecified
if not arg :
return 0
if arg > 64 :
return 7
# the largest number not greater than the arg
for i in range ( 6 , 0 , - 1 ) :
if _max_segments_accepted_encoding [ i ] <= arg :
return i
raise ValueError ( "invalid max max segments accepted: %r" % ( arg , ) ) |
def run_validator ( pattern ) :
"""Validates a pattern against the STIX Pattern grammar . Error messages are
returned in a list . The test passed if the returned list is empty .""" | start = ''
if isinstance ( pattern , six . string_types ) :
start = pattern [ : 2 ]
pattern = InputStream ( pattern )
if not start :
start = pattern . readline ( ) [ : 2 ]
pattern . seek ( 0 )
parseErrListener = STIXPatternErrorListener ( )
lexer = STIXPatternLexer ( pattern )
# it always adds a console listener by default . . . remove it .
lexer . removeErrorListeners ( )
stream = CommonTokenStream ( lexer )
parser = STIXPatternParser ( stream )
parser . buildParseTrees = False
# it always adds a console listener by default . . . remove it .
parser . removeErrorListeners ( )
parser . addErrorListener ( parseErrListener )
# To improve error messages , replace " < INVALID > " in the literal
# names with symbolic names . This is a hack , but seemed like
# the simplest workaround .
for i , lit_name in enumerate ( parser . literalNames ) :
if lit_name == u"<INVALID>" :
parser . literalNames [ i ] = parser . symbolicNames [ i ]
parser . pattern ( )
# replace with easier - to - understand error message
if not ( start [ 0 ] == '[' or start == '([' ) :
parseErrListener . err_strings [ 0 ] = "FAIL: Error found at line 1:0. " "input is missing square brackets"
return parseErrListener . err_strings |
def generate_image_beacon ( event_collection , body , timestamp = None ) :
"""Generates an image beacon URL .
: param event _ collection : the name of the collection to insert the
event to
: param body : dict , the body of the event to insert the event to
: param timestamp : datetime , optional , the timestamp of the event""" | _initialize_client_from_environment ( )
return _client . generate_image_beacon ( event_collection , body , timestamp = timestamp ) |
def on_stepin_request ( self , py_db , request ) :
''': param StepInRequest request :''' | arguments = request . arguments
# : : type arguments : StepInArguments
thread_id = arguments . threadId
if py_db . get_use_libraries_filter ( ) :
step_cmd_id = CMD_STEP_INTO_MY_CODE
else :
step_cmd_id = CMD_STEP_INTO
self . api . request_step ( py_db , thread_id , step_cmd_id )
response = pydevd_base_schema . build_response ( request )
return NetCommand ( CMD_RETURN , 0 , response , is_json = True ) |
def portable_hash ( x ) :
"""This function returns consistent hash code for builtin types , especially
for None and tuple with None .
The algorithm is similar to that one used by CPython 2.7
> > > portable _ hash ( None )
> > > portable _ hash ( ( None , 1 ) ) & 0xfffff
219750521""" | if sys . version_info >= ( 3 , 2 , 3 ) and 'PYTHONHASHSEED' not in os . environ :
raise Exception ( "Randomness of hash of string should be disabled via PYTHONHASHSEED" )
if x is None :
return 0
if isinstance ( x , tuple ) :
h = 0x345678
for i in x :
h ^= portable_hash ( i )
h *= 1000003
h &= sys . maxsize
h ^= len ( x )
if h == - 1 :
h = - 2
return int ( h )
return hash ( x ) |
def _inject_lua_code ( self , lua_code ) :
"""Sends raw lua code and evaluate it wihtout any checking !""" | msg = ( ctypes . c_ubyte * len ( lua_code ) ) . from_buffer_copy ( lua_code . encode ( ) )
self . call_remote_api ( 'simxWriteStringStream' , 'my_lua_code' , msg ) |
def truncateGraph ( graph , root_nodes ) :
"""Create a set of all nodes containg the root _ nodes and
all nodes reacheable from them""" | subgraph = Graph ( )
for node in root_nodes :
subgraph = GraphUtils . joinGraphs ( subgraph , GraphUtils . getReacheableSubgraph ( graph , node ) )
return subgraph |
def read_namespaced_replica_set_status ( self , name , namespace , ** kwargs ) : # noqa : E501
"""read _ namespaced _ replica _ set _ status # noqa : E501
read status of the specified ReplicaSet # noqa : E501
This method makes a synchronous HTTP request by default . To make an
asynchronous HTTP request , please pass async _ req = True
> > > thread = api . read _ namespaced _ replica _ set _ status ( name , namespace , async _ req = True )
> > > result = thread . get ( )
: param async _ req bool
: param str name : name of the ReplicaSet ( required )
: param str namespace : object name and auth scope , such as for teams and projects ( required )
: param str pretty : If ' true ' , then the output is pretty printed .
: return : V1beta1ReplicaSet
If the method is called asynchronously ,
returns the request thread .""" | kwargs [ '_return_http_data_only' ] = True
if kwargs . get ( 'async_req' ) :
return self . read_namespaced_replica_set_status_with_http_info ( name , namespace , ** kwargs )
# noqa : E501
else :
( data ) = self . read_namespaced_replica_set_status_with_http_info ( name , namespace , ** kwargs )
# noqa : E501
return data |
def row_structural_typicality ( self , X_L_list , X_D_list , row_id ) :
"""Returns the typicality ( opposite of anomalousness ) of given row .
: param row _ id : id of the target row
: type row _ id : int
: returns : float , the typicality , from 0 to 1""" | return su . row_structural_typicality ( X_L_list , X_D_list , row_id ) |
def _init_topics ( self ) :
"""Set up initial subscription of mysensors topics .""" | _LOGGER . info ( 'Setting up initial MQTT topic subscription' )
init_topics = [ '{}/+/+/0/+/+' . format ( self . _in_prefix ) , '{}/+/+/3/+/+' . format ( self . _in_prefix ) , ]
self . _handle_subscription ( init_topics )
if not self . persistence :
return
topics = [ '{}/{}/{}/{}/+/+' . format ( self . _in_prefix , str ( sensor . sensor_id ) , str ( child . id ) , msg_type ) for sensor in self . sensors . values ( ) for child in sensor . children . values ( ) for msg_type in ( int ( self . const . MessageType . set ) , int ( self . const . MessageType . req ) ) ]
topics . extend ( [ '{}/{}/+/{}/+/+' . format ( self . _in_prefix , str ( sensor . sensor_id ) , int ( self . const . MessageType . stream ) ) for sensor in self . sensors . values ( ) ] )
self . _handle_subscription ( topics ) |
def var ( inlist ) :
"""Returns the variance of the values in the passed list using N - 1
for the denominator ( i . e . , for estimating population variance ) .
Usage : lvar ( inlist )""" | n = len ( inlist )
mn = mean ( inlist )
deviations = [ 0 ] * len ( inlist )
for i in range ( len ( inlist ) ) :
deviations [ i ] = inlist [ i ] - mn
return ss ( deviations ) / float ( n - 1 ) |
def load_into_repo ( self , repository : ParameterRepository = None , sheet_name : str = None ) :
"""Create a Repo from an excel file .
: param repository : the repository to load into
: param sheet _ name :
: return :""" | repository . add_all ( self . load_parameters ( sheet_name ) ) |
def _random_color ( h_range = ( 0. , 1. ) , s_range = ( .5 , 1. ) , v_range = ( .5 , 1. ) , ) :
"""Generate a random RGB color .""" | h , s , v = uniform ( * h_range ) , uniform ( * s_range ) , uniform ( * v_range )
r , g , b = hsv_to_rgb ( np . array ( [ [ [ h , s , v ] ] ] ) ) . flat
return r , g , b |
def delete ( self ) :
"""Delete the NTP source entry from the node .
Returns :
True if the operation succeeds , otherwise False .""" | cmd = self . command_builder ( 'ntp source' , disable = True )
return self . configure ( cmd ) |
def cancelar_ultima_venda ( self , chave_cfe , dados_cancelamento ) :
"""Sobrepõe : meth : ` ~ satcfe . base . FuncoesSAT . cancelar _ ultima _ venda ` .
: return : Uma resposta SAT especializada em ` ` CancelarUltimaVenda ` ` .
: rtype : satcfe . resposta . cancelarultimavenda . RespostaCancelarUltimaVenda""" | retorno = super ( ClienteSATLocal , self ) . cancelar_ultima_venda ( chave_cfe , dados_cancelamento )
return RespostaCancelarUltimaVenda . analisar ( retorno ) |
def as_xml_index ( self , basename = "/tmp/sitemap.xml" ) :
"""Return a string of the index for a large list that is split .
All we need to do is determine the number of component sitemaps will
be is and generate their URIs based on a pattern .
Q - should there be a flag to select generation of each component sitemap
in order to calculate the md5sum ?
Q - what timestamp should be used ?""" | num_parts = self . requires_multifile ( )
if ( not num_parts ) :
raise ListBaseIndexError ( "Request for sitemapindex for list with only %d entries when max_sitemap_entries is set to %s" % ( len ( self ) , str ( self . max_sitemap_entries ) ) )
index = ListBase ( )
index . sitemapindex = True
index . capability_name = self . capability_name
index . default_capability ( )
for n in range ( num_parts ) :
r = Resource ( uri = self . part_name ( basename , n ) )
index . add ( r )
return ( index . as_xml ( ) ) |
def load_outputs ( self ) :
"""Load output module ( s )""" | for output in sorted ( logdissect . output . __formats__ ) :
self . output_modules [ output ] = __import__ ( 'logdissect.output.' + output , globals ( ) , locals ( ) , [ logdissect ] ) . OutputModule ( args = self . output_args ) |
def urlmap ( patterns ) :
"""Recursively build a map of ( group , name ) = > url patterns .
Group is either the resolver namespace or app name for the url config .
The urls are joined with any prefixes , and cleaned up of extraneous regex
specific syntax .""" | for pattern in patterns :
group = getattr ( pattern , 'namespace' , None )
if group is None :
group = getattr ( pattern , 'app_name' , None )
path = '/' + get_pattern ( pattern ) . lstrip ( '^' ) . rstrip ( '$' )
if isinstance ( pattern , PATTERNS ) :
yield ( group , pattern . name ) , path
elif isinstance ( pattern , RESOLVERS ) :
subpatterns = pattern . url_patterns
for ( _ , name ) , subpath in urlmap ( subpatterns ) :
yield ( group , name ) , path . rstrip ( '/' ) + subpath |
def animate_ellipsis ( self ) :
"""Animate dots at the end of the splash screen message .""" | ellipsis = self . ellipsis . pop ( 0 )
text = ' ' * len ( ellipsis ) + self . splash_text + ellipsis
self . ellipsis . append ( ellipsis )
self . _show_message ( text ) |
def set_remote_config ( experiment_config , port , config_file_name ) :
'''Call setClusterMetadata to pass trial''' | # set machine _ list
request_data = dict ( )
request_data [ 'machine_list' ] = experiment_config [ 'machineList' ]
if request_data [ 'machine_list' ] :
for i in range ( len ( request_data [ 'machine_list' ] ) ) :
if isinstance ( request_data [ 'machine_list' ] [ i ] . get ( 'gpuIndices' ) , int ) :
request_data [ 'machine_list' ] [ i ] [ 'gpuIndices' ] = str ( request_data [ 'machine_list' ] [ i ] . get ( 'gpuIndices' ) )
response = rest_put ( cluster_metadata_url ( port ) , json . dumps ( request_data ) , REST_TIME_OUT )
err_message = ''
if not response or not check_response ( response ) :
if response is not None :
err_message = response . text
_ , stderr_full_path = get_log_path ( config_file_name )
with open ( stderr_full_path , 'a+' ) as fout :
fout . write ( json . dumps ( json . loads ( err_message ) , indent = 4 , sort_keys = True , separators = ( ',' , ':' ) ) )
return False , err_message
result , message = setNNIManagerIp ( experiment_config , port , config_file_name )
if not result :
return result , message
# set trial _ config
return set_trial_config ( experiment_config , port , config_file_name ) , err_message |
def zero_to_one ( table , option ) :
"""normalize from zero to one for row or table""" | if option == 'table' :
m = min ( min ( table ) )
ma = max ( max ( table ) )
t = [ ]
for row in table :
t_row = [ ]
if option != 'table' :
m , ma = min ( row ) , max ( row )
for i in row :
if ma == m :
t_row . append ( 0 )
else :
t_row . append ( ( i - m ) / ( ma - m ) )
t . append ( t_row )
return t |
def find_minimum ( numbers ) :
"""This Python function finds and returns the smallest number in a given list .
Examples :
> > > find _ minimum ( [ 10 , 20 , 1 , 45 , 99 ] )
> > > find _ minimum ( [ 1 , 2 , 3 ] )
> > > find _ minimum ( [ 45 , 46 , 50 , 60 ] )
45
Parameters :
numbers ( list ) : a list of numeric values .
Returns :
The smallest value found in the given list .""" | return min ( numbers ) |
def decode_aes256 ( key , iv_plus_encrypted ) :
"""Utility method to decode a payload consisting of the hexed IV + the hexed ciphertext using
the given key . See above for more details .
: param key : string , 64 hex characters long
: param iv _ plus _ encrypted : string , a hexed IV + hexed ciphertext""" | # grab first AES . block _ size bytes ( aka 2 * AES . block _ size characters of hex ) - that ' s the IV
iv_size = 2 * AES . block_size
hexed_iv = iv_plus_encrypted [ : iv_size ]
# grab everything else - that ' s the ciphertext ( aka encrypted message )
hexed_ciphertext = iv_plus_encrypted [ iv_size : ]
# unhex the iv and ciphertext
iv = binascii . unhexlify ( hexed_iv )
ciphertext = binascii . unhexlify ( hexed_ciphertext )
# set up the correct AES cipher object
cipher = AES . new ( binascii . unhexlify ( key . encode ( 'ascii' ) ) , mode = AES . MODE_CBC , IV = iv )
# decrypt !
plaintext = cipher . decrypt ( ciphertext )
# return the unpadded version of this
return unpad_aes256 ( plaintext ) |
def peak_load ( self ) :
"""Cumulative peak load capacity of generators of this grid
Returns
float
Ad - hoc calculated or cached peak load capacity""" | if self . _peak_load is None :
self . _peak_load = sum ( [ _ . peak_load . sum ( ) for _ in self . graph . nodes_by_attribute ( 'load' ) ] )
return self . _peak_load |
def currentAbove ( requestContext , seriesList , n ) :
"""Takes one metric or a wildcard seriesList followed by an integer N .
Out of all metrics passed , draws only the metrics whose value is above N
at the end of the time period specified .
Example : :
& target = currentAbove ( server * . instance * . threads . busy , 50)
Draws the servers with more than 50 busy threads .""" | results = [ ]
for series in seriesList :
val = safeLast ( series )
if val is not None and val >= n :
results . append ( series )
return results |
def f_cash ( x , counts , bkg , model ) :
"""Wrapper for cash statistics , that defines the model function .
Parameters
x : float
Model amplitude .
counts : ` ~ numpy . ndarray `
Count map slice , where model is defined .
bkg : ` ~ numpy . ndarray `
Background map slice , where model is defined .
model : ` ~ numpy . ndarray `
Source template ( multiplied with exposure ) .""" | return 2.0 * poisson_log_like ( counts , bkg + x * model ) |
def _prepare_data_dir ( self , data ) :
"""Prepare destination directory where the data will live .
: param data : The : class : ` ~ resolwe . flow . models . Data ` object for
which to prepare the private execution directory .
: return : The prepared data directory path .
: rtype : str""" | logger . debug ( __ ( "Preparing data directory for Data with id {}." , data . id ) )
with transaction . atomic ( ) : # Create a temporary random location and then override it with data
# location id since object has to be created first .
# TODO Find a better solution , e . g . defer the database constraint .
temporary_location_string = uuid . uuid4 ( ) . hex [ : 10 ]
data_location = DataLocation . objects . create ( subpath = temporary_location_string )
data_location . subpath = str ( data_location . id )
data_location . save ( )
data_location . data . add ( data )
output_path = self . _get_per_data_dir ( 'DATA_DIR' , data_location . subpath )
dir_mode = self . settings_actual . get ( 'FLOW_EXECUTOR' , { } ) . get ( 'DATA_DIR_MODE' , 0o755 )
os . mkdir ( output_path , mode = dir_mode )
# os . mkdir is not guaranteed to set the given mode
os . chmod ( output_path , dir_mode )
return output_path |
def key_release_event ( self , widget , event ) :
"""Called when a key is released after being pressed .
Adjust method signature as appropriate for callback .""" | # get keyname or keycode and translate to ginga standard
# keyname =
# keycode =
keyname = ''
# self . transkey ( keyname , keycode )
self . logger . debug ( "key release event, key=%s" % ( keyname ) )
return self . make_ui_callback ( 'key-release' , keyname ) |
def _hyphenate ( input , add_prefix = False ) :
"""Change underscores to hyphens so that object attributes can be easily
tranlated to GPG option names .
: param str input : The attribute to hyphenate .
: param bool add _ prefix : If True , add leading hyphens to the input .
: rtype : str
: return : The ` ` input ` ` with underscores changed to hyphens .""" | ret = '--' if add_prefix else ''
ret += input . replace ( '_' , '-' )
return ret |
def portgroups_configured ( name , dvs , portgroups ) :
'''Configures portgroups on a DVS .
Creates / updates / removes portgroups in a provided DVS
dvs
Name of the DVS
portgroups
Portgroup dict representations ( see module sysdocs )''' | datacenter = _get_datacenter_name ( )
log . info ( 'Running state %s on DVS \'%s\', datacenter \'%s\'' , name , dvs , datacenter )
changes_required = False
ret = { 'name' : name , 'changes' : { } , 'result' : None , 'comment' : None }
comments = [ ]
changes = { }
changes_required = False
try : # TODO portroups validation
si = __salt__ [ 'vsphere.get_service_instance_via_proxy' ] ( )
current_pgs = __salt__ [ 'vsphere.list_dvportgroups' ] ( dvs = dvs , service_instance = si )
expected_pg_names = [ ]
for pg in portgroups :
pg_name = pg [ 'name' ]
expected_pg_names . append ( pg_name )
del pg [ 'name' ]
log . info ( 'Checking pg \'%s\'' , pg_name )
filtered_current_pgs = [ p for p in current_pgs if p . get ( 'name' ) == pg_name ]
if not filtered_current_pgs :
changes_required = True
if __opts__ [ 'test' ] :
comments . append ( 'State {0} will create a new portgroup ' '\'{1}\' in DVS \'{2}\', datacenter ' '\'{3}\'' . format ( name , pg_name , dvs , datacenter ) )
else :
__salt__ [ 'vsphere.create_dvportgroup' ] ( portgroup_dict = pg , portgroup_name = pg_name , dvs = dvs , service_instance = si )
comments . append ( 'Created a new portgroup \'{0}\' in DVS ' '\'{1}\', datacenter \'{2}\'' '' . format ( pg_name , dvs , datacenter ) )
log . info ( comments [ - 1 ] )
changes . update ( { pg_name : { 'new' : pg } } )
else : # Porgroup already exists . Checking the config
log . trace ( 'Portgroup \'%s\' found in DVS \'%s\', datacenter ' '\'%s\'. Checking for any updates.' , pg_name , dvs , datacenter )
current_pg = filtered_current_pgs [ 0 ]
diff_dict = _get_diff_dict ( current_pg , pg )
if diff_dict :
changes_required = True
if __opts__ [ 'test' ] :
changes_strings = _get_changes_from_diff_dict ( diff_dict )
log . trace ( 'changes_strings = %s' , changes_strings )
comments . append ( 'State {0} will update portgroup \'{1}\' in ' 'DVS \'{2}\', datacenter \'{3}\':\n{4}' '' . format ( name , pg_name , dvs , datacenter , '\n' . join ( [ '\t{0}' . format ( c ) for c in changes_strings ] ) ) )
else :
__salt__ [ 'vsphere.update_dvportgroup' ] ( portgroup_dict = pg , portgroup = pg_name , dvs = dvs , service_instance = si )
comments . append ( 'Updated portgroup \'{0}\' in DVS ' '\'{1}\', datacenter \'{2}\'' '' . format ( pg_name , dvs , datacenter ) )
log . info ( comments [ - 1 ] )
changes . update ( { pg_name : { 'new' : _get_val2_dict_from_diff_dict ( diff_dict ) , 'old' : _get_val1_dict_from_diff_dict ( diff_dict ) } } )
# Add the uplink portgroup to the expected pg names
uplink_pg = __salt__ [ 'vsphere.list_uplink_dvportgroup' ] ( dvs = dvs , service_instance = si )
expected_pg_names . append ( uplink_pg [ 'name' ] )
# Remove any extra portgroups
for current_pg in current_pgs :
if current_pg [ 'name' ] not in expected_pg_names :
changes_required = True
if __opts__ [ 'test' ] :
comments . append ( 'State {0} will remove ' 'the portgroup \'{1}\' from DVS \'{2}\', ' 'datacenter \'{3}\'' '' . format ( name , current_pg [ 'name' ] , dvs , datacenter ) )
else :
__salt__ [ 'vsphere.remove_dvportgroup' ] ( portgroup = current_pg [ 'name' ] , dvs = dvs , service_instance = si )
comments . append ( 'Removed the portgroup \'{0}\' from DVS ' '\'{1}\', datacenter \'{2}\'' '' . format ( current_pg [ 'name' ] , dvs , datacenter ) )
log . info ( comments [ - 1 ] )
changes . update ( { current_pg [ 'name' ] : { 'old' : current_pg } } )
__salt__ [ 'vsphere.disconnect' ] ( si )
except salt . exceptions . CommandExecutionError as exc :
log . exception ( 'Encountered error' )
if si :
__salt__ [ 'vsphere.disconnect' ] ( si )
if not __opts__ [ 'test' ] :
ret [ 'result' ] = False
ret . update ( { 'comment' : exc . strerror , 'result' : False if not __opts__ [ 'test' ] else None } )
return ret
if not changes_required : # We have no changes
ret . update ( { 'comment' : ( 'All portgroups in DVS \'{0}\', datacenter ' '\'{1}\' exist and are correctly configured. ' 'Nothing to be done.' . format ( dvs , datacenter ) ) , 'result' : True } )
else :
ret . update ( { 'comment' : '\n' . join ( comments ) , 'changes' : changes , 'result' : None if __opts__ [ 'test' ] else True , } )
return ret |
def compute_regex_CDR3_template_pgen ( self , regex_seq , V_usage_mask_in = None , J_usage_mask_in = None , print_warnings = True , raise_overload_warning = True ) :
"""Compute Pgen for all seqs consistent with regular expression regex _ seq .
Computes Pgen for a ( limited vocabulary ) regular expression of CDR3
amino acid sequences , conditioned on the V genes / alleles indicated in
V _ usage _ mask _ in and the J genes / alleles in J _ usage _ mask _ in . Please note
that this function will list out all the sequences that correspond to the
regular expression and then calculate the Pgen of each sequence in
succession . THIS CAN BE SLOW . Consider defining a custom alphabet to
represent any undetermined amino acids as this will greatly speed up the
computations . For example , if the symbol ^ is defined as [ AGR ] in a custom
alphabet , then instead of running
compute _ regex _ CDR3 _ template _ pgen ( ' CASS [ AGR ] SARPEQFF ' , ppp ) ,
which will compute Pgen for 3 sequences , the single sequence
' CASS ^ SARPEQFF ' can be considered . ( Examples are TCRB sequences / model )
Parameters
regex _ seq : str
The regular expression string that represents the CDR3 sequences to be
listed then their Pgens computed and summed .
V _ usage _ mask _ in : str or list
An object to indicate which V alleles should be considered . The default
input is None which returns the list of all productive V alleles .
J _ usage _ mask _ in : str or list
An object to indicate which J alleles should be considered . The default
input is None which returns the list of all productive J alleles .
print _ warnings : bool
Determines whether warnings are printed or not . Default ON .
raise _ overload _ warning : bool
A flag to warn of more than 10000 seqs corresponding to the regex _ seq
Returns
pgen : float
The generation probability ( Pgen ) of the sequence
Examples
> > > generation _ probability . compute _ regex _ CDR3 _ template _ pgen ( ' CASS [ AGR ] SARPEQFF ' )
8.1090898050318022e - 10
> > > generation _ probability . compute _ regex _ CDR3 _ template _ pgen ( ' CASSAX { 0,5 } SARPEQFF ' )
6.8468778040965569e - 10""" | V_usage_mask , J_usage_mask = self . format_usage_masks ( V_usage_mask_in , J_usage_mask_in , print_warnings )
CDR3_seqs = self . list_seqs_from_regex ( regex_seq , print_warnings , raise_overload_warning )
pgen = 0
for CDR3_seq in CDR3_seqs :
if len ( CDR3_seq ) == 0 :
continue
pgen += self . compute_CDR3_pgen ( CDR3_seq , V_usage_mask , J_usage_mask )
return pgen |
def _exec_template ( callable_ , context , args = None , kwargs = None ) :
"""execute a rendering callable given the callable , a
Context , and optional explicit arguments
the contextual Template will be located if it exists , and
the error handling options specified on that Template will
be interpreted here .""" | template = context . _with_template
if template is not None and ( template . format_exceptions or template . error_handler ) :
try :
callable_ ( context , * args , ** kwargs )
except Exception :
_render_error ( template , context , compat . exception_as ( ) )
except :
e = sys . exc_info ( ) [ 0 ]
_render_error ( template , context , e )
else :
callable_ ( context , * args , ** kwargs ) |
def rename_mapobject_type ( self , name , new_name ) :
'''Renames a mapobject type .
Parameters
name : str
name of the mapobject type that should be renamed
new _ name : str
name that should be given to the mapobject type
See also
: func : ` tmserver . api . mapobject . update _ mapobject _ type `
: class : ` tmlib . models . mapobject . MapobjectType `''' | logger . info ( 'rename mapobject type "%s" of experiment "%s"' , name , self . experiment_name )
content = { 'name' : new_name }
mapobject_type_id = self . _get_mapobject_type_id ( name )
url = self . _build_api_url ( '/experiments/{experiment_id}/mapobject_types/{mapobject_type_id}' . format ( experiment_id = self . _experiment_id , mapobject_type_id = mapobject_type_id ) )
res = self . _session . put ( url , json = content )
res . raise_for_status ( ) |
def next ( self ) :
"""Get next data batch from iterator .
Returns
DataBatch
The data of next batch .
Raises
StopIteration
If the end of the data is reached .""" | if self . iter_next ( ) :
return DataBatch ( data = self . getdata ( ) , label = self . getlabel ( ) , pad = self . getpad ( ) , index = self . getindex ( ) )
else :
raise StopIteration |
def super_glob ( pattern ) :
'glob that understands * * / for all sub - directories recursively .' | pieces = pattern . split ( '/' )
if '**' in pieces :
prefix = '/' . join ( pieces [ : pieces . index ( '**' ) ] )
postfix = '/' . join ( pieces [ pieces . index ( '**' ) + 1 : ] )
roots = [ dirname for dirname , dirnames , filenames in os . walk ( prefix ) ]
patterns = [ root + '/' + postfix for root in roots ]
else :
patterns = [ '/' . join ( pieces ) ]
return chain . from_iterable ( glob ( pattern ) for pattern in patterns ) |
def bind ( self , name , filterset ) :
"""attach filter to filterset
gives a name to use to extract arguments from querydict""" | if self . name is not None :
name = self . name
self . field . bind ( name , self ) |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.