signature stringlengths 29 44.1k | implementation stringlengths 0 85.2k |
|---|---|
def findFileParam ( self , comp ) :
"""Finds the filename auto - parameter that component * comp * is
in , and returns all the filenames for that parameter . Notes this
assumes that * comp * will only be in a single filename auto - parameter .
: param comp : Component to search parameter membership for
: type comp : : class : ` AbstractStimulusComponent < sparkle . stim . abstract _ component . AbstractStimulusComponent > `
: returns : list < str > - - filenames the found parameter will loop through""" | for p in self . _parameters :
if p [ 'parameter' ] == 'filename' and comp in p [ 'selection' ] :
return p [ 'names' ] |
def register ( parser ) :
"""Register commands with the given parser .""" | cmd_machines . register ( parser )
cmd_machine . register ( parser )
cmd_allocate . register ( parser )
cmd_deploy . register ( parser )
cmd_commission . register ( parser )
cmd_release . register ( parser )
cmd_abort . register ( parser )
cmd_mark_fixed . register ( parser )
cmd_mark_broken . register ( parser )
cmd_power_off . register ( parser )
cmd_power_on . register ( parser )
cmd_ssh . register ( parser ) |
def insert ( self , node , before = None ) :
"""Insert a new node in the list .
If * before * is specified , the new node is inserted before this node .
Otherwise , the node is inserted at the end of the list .""" | node . _list = self
if self . _first is None :
self . _first = self . _last = node
# first node in list
self . _size += 1
return node
if before is None :
self . _last . _next = node
# insert as last node
node . _prev = self . _last
self . _last = node
else :
node . _next = before
node . _prev = before . _prev
if node . _prev :
node . _prev . _next = node
else :
self . _first = node
# inserting as first node
node . _next . _prev = node
self . _size += 1
return node |
def twoSurfplots ( self ) :
"""Plot multiple subplot figure for 2D array""" | # Could more elegantly just call surfplot twice
# And also could include xyzinterp as an option inside surfplot .
# Noted here in case anyone wants to take that on in the future . . .
plt . subplot ( 211 )
plt . title ( 'Load thickness, mantle equivalent [m]' , fontsize = 16 )
if self . latlon :
plt . imshow ( self . qs / ( self . rho_m * self . g ) , extent = ( 0 , self . dx * self . qs . shape [ 0 ] , self . dy * self . qs . shape [ 1 ] , 0 ) )
plt . xlabel ( 'longitude [deg E]' , fontsize = 12 , fontweight = 'bold' )
plt . ylabel ( 'latitude [deg N]' , fontsize = 12 , fontweight = 'bold' )
else :
plt . imshow ( self . qs / ( self . rho_m * self . g ) , extent = ( 0 , self . dx / 1000. * self . qs . shape [ 0 ] , self . dy / 1000. * self . qs . shape [ 1 ] , 0 ) )
plt . xlabel ( 'x [km]' , fontsize = 12 , fontweight = 'bold' )
plt . ylabel ( 'y [km]' , fontsize = 12 , fontweight = 'bold' )
plt . colorbar ( )
plt . subplot ( 212 )
plt . title ( 'Deflection [m]' )
if self . latlon :
plt . imshow ( self . w , extent = ( 0 , self . dx * self . w . shape [ 0 ] , self . dy * self . w . shape [ 1 ] , 0 ) )
plt . xlabel ( 'longitude [deg E]' , fontsize = 12 , fontweight = 'bold' )
plt . ylabel ( 'latitude [deg N]' , fontsize = 12 , fontweight = 'bold' )
else :
plt . imshow ( self . w , extent = ( 0 , self . dx / 1000. * self . w . shape [ 0 ] , self . dy / 1000. * self . w . shape [ 1 ] , 0 ) )
plt . xlabel ( 'x [km]' , fontsize = 12 , fontweight = 'bold' )
plt . ylabel ( 'y [km]' , fontsize = 12 , fontweight = 'bold' )
plt . colorbar ( ) |
def deliver_hook ( target , payload , instance_id = None , hook_id = None , ** kwargs ) :
"""target : the url to receive the payload .
payload : a python primitive data structure
instance _ id : a possibly None " trigger " instance ID
hook _ id : the ID of defining Hook object""" | r = requests . post ( url = target , data = json . dumps ( payload ) , headers = { "Content-Type" : "application/json" , "Authorization" : "Token %s" % settings . HOOK_AUTH_TOKEN , } , )
r . raise_for_status ( )
return r . text |
def login ( ) :
"""Enables the user to login to the remote GMQL service .
If both username and password are None , the user will be connected as guest .""" | from . RemoteConnection . RemoteManager import RemoteManager
global __remote_manager , __session_manager
logger = logging . getLogger ( )
remote_address = get_remote_address ( )
res = __session_manager . get_session ( remote_address )
if res is None : # there is no session for this address , let ' s login as guest
warnings . warn ( "There is no active session for address {}. Logging as Guest user" . format ( remote_address ) )
rm = RemoteManager ( address = remote_address )
rm . login ( )
session_type = "guest"
else : # there is a previous session for this address , let ' s do an auto login
# using that access token
logger . info ( "Logging using stored authentication token" )
rm = RemoteManager ( address = remote_address , auth_token = res [ 1 ] )
# if the access token is not valid anymore ( therefore we are in guest mode )
# the auto _ login function will perform a guest login from scratch
session_type = rm . auto_login ( how = res [ 2 ] )
# store the new session
__remote_manager = rm
access_time = int ( time . time ( ) )
auth_token = rm . auth_token
__session_manager . add_session ( remote_address , auth_token , access_time , session_type ) |
def top ( num_processes = 5 , interval = 3 ) :
'''Return a list of top CPU consuming processes during the interval .
num _ processes = return the top N CPU consuming processes
interval = the number of seconds to sample CPU usage over
CLI Examples :
. . code - block : : bash
salt ' * ' ps . top
salt ' * ' ps . top 5 10''' | result = [ ]
start_usage = { }
for pid in psutil . pids ( ) :
try :
process = psutil . Process ( pid )
user , system = process . cpu_times ( )
except ValueError :
user , system , _ , _ = process . cpu_times ( )
except psutil . NoSuchProcess :
continue
start_usage [ process ] = user + system
time . sleep ( interval )
usage = set ( )
for process , start in six . iteritems ( start_usage ) :
try :
user , system = process . cpu_times ( )
except ValueError :
user , system , _ , _ = process . cpu_times ( )
except psutil . NoSuchProcess :
continue
now = user + system
diff = now - start
usage . add ( ( diff , process ) )
for idx , ( diff , process ) in enumerate ( reversed ( sorted ( usage ) ) ) :
if num_processes and idx >= num_processes :
break
if not _get_proc_cmdline ( process ) :
cmdline = _get_proc_name ( process )
else :
cmdline = _get_proc_cmdline ( process )
info = { 'cmd' : cmdline , 'user' : _get_proc_username ( process ) , 'status' : _get_proc_status ( process ) , 'pid' : _get_proc_pid ( process ) , 'create_time' : _get_proc_create_time ( process ) , 'cpu' : { } , 'mem' : { } , }
for key , value in six . iteritems ( process . cpu_times ( ) . _asdict ( ) ) :
info [ 'cpu' ] [ key ] = value
for key , value in six . iteritems ( process . memory_info ( ) . _asdict ( ) ) :
info [ 'mem' ] [ key ] = value
result . append ( info )
return result |
def find_config ( test_file = None , defaults = None , root = os . curdir ) :
"""Find the path to the default config file .
We look at : root : for the : default : config file . If we can ' t find it
there we start looking at the parent directory recursively until we
find a file named : default : and return the absolute path to it .
If we can ' t find anything , we return None .
Args :
default : The name of the config file we look for .
root : The directory to start looking for .
Returns :
Path to the default config file , None if we can ' t find anything .""" | if defaults is None :
defaults = [ ".benchbuild.yml" , ".benchbuild.yaml" ]
def walk_rec ( cur_path , root ) :
cur_path = local . path ( root ) / test_file
if cur_path . exists ( ) :
return cur_path
new_root = local . path ( root ) / os . pardir
return walk_rec ( cur_path , new_root ) if new_root != root else None
if test_file is not None :
return walk_rec ( test_file , root )
for test_file in defaults :
ret = walk_rec ( test_file , root )
if ret is not None :
return ret |
def init ( opts , no_install = False , quiet = False ) :
"""Initialize a purged environment or copied environment directory
Usage :
datacats init [ - in ] [ - - syslog ] [ - s NAME ] [ - - address = IP ] [ - - interactive ]
[ - - site - url SITE _ URL ] [ ENVIRONMENT _ DIR [ PORT ] ] [ - - no - init - db ]
Options :
- - address = IP Address to listen on ( Linux - only )
- - interactive Don ' t detach from the web container
- i - - image - only Create the environment but don ' t start containers
- - no - init - db Don ' t initialize the database . Useful for importing other CKANs
- n - - no - sysadmin Don ' t prompt for an initial sysadmin user account
- s - - site = NAME Pick a site to initialize [ default : primary ]
- - site - url SITE _ URL The site _ url to use in API responses ( e . g . http : / / example . org : { port } / )
- - syslog Log to the syslog
ENVIRONMENT _ DIR is an existing datacats environment directory . Defaults to ' . '""" | if opts [ '--address' ] and is_boot2docker ( ) :
raise DatacatsError ( 'Cannot specify address on boot2docker.' )
environment_dir = opts [ 'ENVIRONMENT_DIR' ]
port = opts [ 'PORT' ]
address = opts [ '--address' ]
start_web = not opts [ '--image-only' ]
create_sysadmin = not opts [ '--no-sysadmin' ]
site_name = opts [ '--site' ]
site_url = opts [ '--site-url' ]
interactive = opts [ '--interactive' ]
init_db = not opts [ '--no-init-db' ]
environment_dir = abspath ( environment_dir or '.' )
log_syslog = opts [ '--syslog' ]
environment = Environment . load ( environment_dir , site_name )
if address :
environment . address = address
if port :
environment . port = int ( port )
if site_url :
environment . site_url = site_url
try :
if environment . sites and site_name in environment . sites :
raise DatacatsError ( 'Site named {0} already exists.' . format ( site_name ) )
# There are a couple of steps we can / must skip if we ' re making a sub - site only
making_full_environment = not environment . data_exists ( )
if not quiet :
write ( 'Creating environment {0}/{1} ' 'from existing environment directory "{0}"' . format ( environment . name , environment . site_name ) )
steps = [ lambda : environment . create_directories ( create_project_dir = False ) ] + ( [ environment . save , environment . create_virtualenv ] if making_full_environment else [ ] ) + [ environment . save_site , environment . start_supporting_containers , environment . fix_storage_permissions , ]
for fn in steps :
fn ( )
if not quiet :
write ( '.' )
if not quiet :
write ( '\n' )
except :
if not quiet :
print
raise
return finish_init ( environment , start_web , create_sysadmin , log_syslog = log_syslog , do_install = not no_install , quiet = quiet , site_url = site_url , interactive = interactive , init_db = init_db ) |
def receive ( host , timeout ) :
"""Print all messages in queue .
Args :
host ( str ) : Specified - - host .
timeout ( int ) : How log should script wait for message .""" | parameters = settings . get_amqp_settings ( ) [ host ]
queues = parameters [ "queues" ]
queues = dict ( map ( lambda ( x , y ) : ( y , x ) , queues . items ( ) ) )
# reverse items
queue = queues [ parameters [ "out_key" ] ]
channel = _get_channel ( host , timeout )
for method_frame , properties , body in channel . consume ( queue ) :
print json . dumps ( { "method_frame" : str ( method_frame ) , "properties" : str ( properties ) , "body" : body } )
print "-" * 79
print
channel . basic_ack ( method_frame . delivery_tag ) |
def named_field_regex ( keypat_tups ) :
"""named _ field _ regex
Args :
keypat _ tups ( list ) : tuples of ( name , pattern ) or a string for an unnamed
pattern
Returns :
str : regex
Example :
> > > # ENABLE _ DOCTEST
> > > from utool . util _ regex import * # NOQA
> > > keypat _ tups = [
. . . ( ' name ' , r ' G \ d + ' ) , # species and 2 numbers
. . . ( ' under ' , r ' _ ' ) , # 2 more numbers
. . . ( ' id ' , r ' \ d + ' ) , # 2 more numbers
. . . ( None , r ' \ . ' ) ,
. . . ( ' ext ' , r ' \ w + ' ) ,
> > > regex = named _ field _ regex ( keypat _ tups )
> > > result = ( regex )
> > > print ( result )
( ? P < name > G \ d + ) ( ? P < under > _ ) ( ? P < id > \ d + ) ( \ . ) ( ? P < ext > \ w + )""" | # Allow for unnamed patterns
keypat_tups_ = [ ( None , tup ) if isinstance ( tup , six . string_types ) else tup for tup in keypat_tups ]
named_fields = [ named_field ( key , pat ) for key , pat in keypat_tups_ ]
regex = '' . join ( named_fields )
return regex |
def update_config_mode ( self , prompt = None ) :
"""Update config mode .""" | # TODO : Fix the conflict with config mode attribute at connection
if prompt :
self . mode = self . driver . update_config_mode ( prompt )
else :
self . mode = self . driver . update_config_mode ( self . prompt ) |
def setup_argparse ( ) :
"""Setup the argparse argument parser
: return : instance of argparse
: rtype : ArgumentParser""" | parser = argparse . ArgumentParser ( description = 'Convert old ini-style GNS3 topologies (<=0.8.7) to ' 'the newer version 1+ JSON format' )
parser . add_argument ( '--version' , action = 'version' , version = '%(prog)s ' + __version__ )
parser . add_argument ( '-n' , '--name' , help = 'Topology name (default uses the ' 'name of the old project ' 'directory)' )
parser . add_argument ( '-o' , '--output' , help = 'Output directory' )
parser . add_argument ( 'topology' , nargs = '?' , default = 'topology.net' , help = 'GNS3 .net topology file (default: topology.net)' )
parser . add_argument ( '--debug' , help = 'Enable debugging output' , action = 'store_true' )
parser . add_argument ( '-q' , '--quiet' , help = 'Quiet-mode (no output to console)' , action = 'store_true' )
return parser |
def AddValue ( self , name , number , aliases = None , description = None ) :
"""Adds an enumeration value .
Args :
name ( str ) : name .
number ( int ) : number .
aliases ( Optional [ list [ str ] ] ) : aliases .
description ( Optional [ str ] ) : description .
Raises :
KeyError : if the enumeration value already exists .""" | if name in self . values_per_name :
raise KeyError ( 'Value with name: {0:s} already exists.' . format ( name ) )
if number in self . values_per_number :
raise KeyError ( 'Value with number: {0!s} already exists.' . format ( number ) )
for alias in aliases or [ ] :
if alias in self . values_per_alias :
raise KeyError ( 'Value with alias: {0:s} already exists.' . format ( alias ) )
enumeration_value = EnumerationValue ( name , number , aliases = aliases , description = description )
self . values . append ( enumeration_value )
self . values_per_name [ name ] = enumeration_value
self . values_per_number [ number ] = enumeration_value
for alias in aliases or [ ] :
self . values_per_alias [ alias ] = enumeration_value |
def overlay ( main_parent_node , overlay_parent_node , eof_action = 'repeat' , ** kwargs ) :
"""Overlay one video on top of another .
Args :
x : Set the expression for the x coordinates of the overlaid video on the main video . Default value is 0 . In
case the expression is invalid , it is set to a huge value ( meaning that the overlay will not be displayed
within the output visible area ) .
y : Set the expression for the y coordinates of the overlaid video on the main video . Default value is 0 . In
case the expression is invalid , it is set to a huge value ( meaning that the overlay will not be displayed
within the output visible area ) .
eof _ action : The action to take when EOF is encountered on the secondary input ; it accepts one of the following
values :
* ` ` repeat ` ` : Repeat the last frame ( the default ) .
* ` ` endall ` ` : End both streams .
* ` ` pass ` ` : Pass the main input through .
eval : Set when the expressions for x , and y are evaluated .
It accepts the following values :
* ` ` init ` ` : only evaluate expressions once during the filter initialization or when a command is
processed
* ` ` frame ` ` : evaluate expressions for each incoming frame
Default value is ` ` frame ` ` .
shortest : If set to 1 , force the output to terminate when the shortest input terminates . Default value is 0.
format : Set the format for the output video .
It accepts the following values :
* ` ` yuv420 ` ` : force YUV420 output
* ` ` yuv422 ` ` : force YUV422 output
* ` ` yuv444 ` ` : force YUV444 output
* ` ` rgb ` ` : force packed RGB output
* ` ` gbrp ` ` : force planar RGB output
Default value is ` ` yuv420 ` ` .
rgb ( deprecated ) : If set to 1 , force the filter to accept inputs in the RGB color space . Default value is 0.
This option is deprecated , use format instead .
repeatlast : If set to 1 , force the filter to draw the last overlay frame over the main input until the end of
the stream . A value of 0 disables this behavior . Default value is 1.
Official documentation : ` overlay < https : / / ffmpeg . org / ffmpeg - filters . html # overlay - 1 > ` _ _""" | kwargs [ 'eof_action' ] = eof_action
return FilterNode ( [ main_parent_node , overlay_parent_node ] , overlay . __name__ , kwargs = kwargs , max_inputs = 2 ) . stream ( ) |
def beautify ( self , string ) :
"""Wraps together all actions needed to beautify a string , i . e .
parse the string and then stringify the phrases ( replace tags
with formatting codes ) .
Arguments :
string ( str ) : The string to beautify / parse .
Returns :
The parsed , stringified and ultimately beautified string .
Raises :
errors . ArgumentError if phrases were found , but not a single style
( flag combination ) was supplied .""" | if not string :
return string
# string may differ because of escaped characters
string , phrases = self . parse ( string )
if not phrases :
return string
if not self . positional and not self . always :
raise errors . ArgumentError ( "Found phrases, but no styles " "were supplied!" )
return self . stringify ( string , phrases ) |
def getScales ( self , term_i = None ) :
"""Returns the Parameters
Args :
term _ i : index of the term we are interested in
if term _ i = = None returns the whole vector of parameters""" | if term_i == None :
RV = self . vd . getScales ( )
else :
assert term_i < self . n_terms , 'Term index non valid'
RV = self . vd . getScales ( term_i )
return RV |
def points_properties_df ( self ) :
"""Return a dictionary of point / point _ properties in preparation for storage in SQL .""" | pprops = { }
for each in self . points :
p = each . properties . asdict . copy ( )
p . pop ( "device" , None )
p . pop ( "network" , None )
p . pop ( "simulated" , None )
p . pop ( "overridden" , None )
pprops [ each . properties . name ] = p
df = pd . DataFrame ( pprops )
return df |
def makeDirectory ( self , full_path , dummy = 40841 ) :
"""Make a directory
> > > nd . makeDirectory ( ' / test ' )
: param full _ path : The full path to get the directory property . Should be end with ' / ' .
: return : ` ` True ` ` when success to make a directory or ` ` False ` `""" | if full_path [ - 1 ] is not '/' :
full_path += '/'
data = { 'dstresource' : full_path , 'userid' : self . user_id , 'useridx' : self . useridx , 'dummy' : dummy , }
s , metadata = self . POST ( 'makeDirectory' , data )
return s |
def key ( self , direction , mechanism , purviews = False , _prefix = None ) :
"""Cache key . This is the call signature of | Subsystem . find _ mice ( ) | .""" | return "subsys:{}:{}:{}:{}:{}" . format ( self . subsystem_hash , _prefix , direction , mechanism , purviews ) |
def __stringlist ( self , ttype , tvalue ) :
"""Specific method to parse the ' string - list ' type
Syntax :
string - list = " [ " string * ( " , " string ) " ] " / string
; if there is only a single string , the brackets
; are optional""" | if ttype == "string" :
self . __curstringlist += [ tvalue . decode ( "utf-8" ) ]
self . __set_expected ( "comma" , "right_bracket" )
return True
if ttype == "comma" :
self . __set_expected ( "string" )
return True
if ttype == "right_bracket" :
self . __curcommand . check_next_arg ( "stringlist" , self . __curstringlist )
self . __cstate = self . __arguments
return self . __check_command_completion ( )
return False |
def spherical_k_means ( X , n_clusters , sample_weight = None , init = "k-means++" , n_init = 10 , max_iter = 300 , verbose = False , tol = 1e-4 , random_state = None , copy_x = True , n_jobs = 1 , algorithm = "auto" , return_n_iter = False , ) :
"""Modified from sklearn . cluster . k _ means _ . k _ means .""" | if n_init <= 0 :
raise ValueError ( "Invalid number of initializations." " n_init=%d must be bigger than zero." % n_init )
random_state = check_random_state ( random_state )
if max_iter <= 0 :
raise ValueError ( "Number of iterations should be a positive number," " got %d instead" % max_iter )
best_inertia = np . infty
# avoid forcing order when copy _ x = False
order = "C" if copy_x else None
X = check_array ( X , accept_sparse = "csr" , dtype = [ np . float64 , np . float32 ] , order = order , copy = copy_x )
# verify that the number of samples given is larger than k
if _num_samples ( X ) < n_clusters :
raise ValueError ( "n_samples=%d should be >= n_clusters=%d" % ( _num_samples ( X ) , n_clusters ) )
tol = _tolerance ( X , tol )
if hasattr ( init , "__array__" ) :
init = check_array ( init , dtype = X . dtype . type , order = "C" , copy = True )
_validate_center_shape ( X , n_clusters , init )
if n_init != 1 :
warnings . warn ( "Explicit initial center position passed: " "performing only one init in k-means instead of n_init=%d" % n_init , RuntimeWarning , stacklevel = 2 , )
n_init = 1
# precompute squared norms of data points
x_squared_norms = row_norms ( X , squared = True )
if n_jobs == 1 : # For a single thread , less memory is needed if we just store one set
# of the best results ( as opposed to one set per run per thread ) .
for it in range ( n_init ) : # run a k - means once
labels , inertia , centers , n_iter_ = _spherical_kmeans_single_lloyd ( X , n_clusters , sample_weight , max_iter = max_iter , init = init , verbose = verbose , tol = tol , x_squared_norms = x_squared_norms , random_state = random_state , )
# determine if these results are the best so far
if best_inertia is None or inertia < best_inertia :
best_labels = labels . copy ( )
best_centers = centers . copy ( )
best_inertia = inertia
best_n_iter = n_iter_
else : # parallelisation of k - means runs
seeds = random_state . randint ( np . iinfo ( np . int32 ) . max , size = n_init )
results = Parallel ( n_jobs = n_jobs , verbose = 0 ) ( delayed ( _spherical_kmeans_single_lloyd ) ( X , n_clusters , sample_weight , max_iter = max_iter , init = init , verbose = verbose , tol = tol , x_squared_norms = x_squared_norms , # Change seed to ensure variety
random_state = seed , ) for seed in seeds )
# Get results with the lowest inertia
labels , inertia , centers , n_iters = zip ( * results )
best = np . argmin ( inertia )
best_labels = labels [ best ]
best_inertia = inertia [ best ]
best_centers = centers [ best ]
best_n_iter = n_iters [ best ]
if return_n_iter :
return best_centers , best_labels , best_inertia , best_n_iter
else :
return best_centers , best_labels , best_inertia |
def plot_number_observer_with_matplotlib ( * args , ** kwargs ) :
"""Generate a plot from NumberObservers and show it on IPython notebook
with matplotlib .
Parameters
obs : NumberObserver ( e . g . FixedIntervalNumberObserver )
fmt : str , optional
opt : dict , optional
matplotlib plot options .
Examples
> > > plot _ number _ observer ( obs1)
> > > plot _ number _ observer ( obs1 , ' o ' )
> > > plot _ number _ observer ( obs1 , obs2 , obs3 , { ' linewidth ' : 2 } )
> > > plot _ number _ observer ( obs1 , ' k - ' , obs2 , ' k - - ' )""" | import matplotlib . pylab as plt
import numpy
import collections
special_keys = ( "xlim" , "ylim" , "xlabel" , "ylabel" , "legend" , "x" , "y" , "filename" )
plot_opts = { key : value for key , value in kwargs . items ( ) if key not in special_keys }
if 'axes.prop_cycle' in plt . rcParams . keys ( ) :
color_cycle = [ prop [ 'color' ] for prop in plt . rcParams [ 'axes.prop_cycle' ] ]
else :
color_cycle = plt . rcParams [ 'axes.color_cycle' ]
if "y" in kwargs . keys ( ) and isinstance ( kwargs [ "y" ] , str ) :
kwargs [ "y" ] = ( kwargs [ "y" ] , )
fig = plt . figure ( )
ax = fig . add_subplot ( 111 )
if len ( args ) > 1 and isinstance ( args [ 1 ] , str ) :
if len ( args ) % 2 == 0 :
observers = [ ( args [ i ] , args [ i + 1 ] ) for i in range ( 0 , len ( args ) , 2 ) ]
else :
observers = [ ( args [ i ] , args [ i + 1 ] ) for i in range ( 0 , len ( args ) - 1 , 2 ) ]
observers . append ( args [ - 1 ] , None )
else :
observers = [ ( obs , None ) for obs in args ]
color_map = { }
data , xidx = None , 0
for obs , fmt in observers :
if isinstance ( obs , types . FunctionType ) :
if data is None :
raise ValueError ( "A function must be given after an observer." )
y = [ obs ( xi ) for xi in data [ xidx ] ]
opts = plot_opts . copy ( )
label = obs . __name__
opts [ "label" ] = label
if label not in color_map . keys ( ) :
color_map [ label ] = color_cycle [ len ( color_map ) % len ( color_cycle ) ]
opts [ "label" ] = label
opts [ "color" ] = color_map [ label ]
if fmt is None :
ax . plot ( data [ xidx ] , y , ** opts )
else :
ax . plot ( data [ xidx ] , y , fmt , ** opts )
continue
data = numpy . array ( obs . data ( ) ) . T
try :
err = obs . error ( ) . T
except AttributeError :
err = None
if "x" in kwargs . keys ( ) :
targets = [ sp . serial ( ) for sp in obs . targets ( ) ]
if kwargs [ "x" ] not in targets :
raise ValueError ( "[{0}] given as 'x' was not found." . fomrat ( kwargs [ "x" ] ) )
xidx = targets . index ( kwargs [ "x" ] ) + 1
else :
xidx = 0
if "y" in kwargs . keys ( ) :
targets = [ sp . serial ( ) for sp in obs . targets ( ) ]
targets = [ ( targets . index ( serial ) , serial ) for serial in kwargs [ "y" ] if serial in targets ]
else :
targets = [ sp . serial ( ) for sp in obs . targets ( ) ]
targets = list ( enumerate ( targets ) )
# targets . sort ( key = lambda x : x [ 1 ] )
for idx , serial in targets :
opts = plot_opts . copy ( )
label = serial
if len ( label ) > 0 and label [ 0 ] == '_' :
label = '$\_$' + label [ 1 : ]
# XXX : lazy escaping for a special character
if label not in color_map . keys ( ) :
color_map [ label ] = color_cycle [ len ( color_map ) % len ( color_cycle ) ]
opts [ "label" ] = label
opts [ "color" ] = color_map [ label ]
if err is None :
if fmt is None :
ax . plot ( data [ xidx ] , data [ idx + 1 ] , ** opts )
else :
ax . plot ( data [ xidx ] , data [ idx + 1 ] , fmt , ** opts )
else :
if fmt is None :
ax . errorbar ( data [ xidx ] , data [ idx + 1 ] , xerr = ( None if xidx == 0 else err [ xidx ] ) , yerr = err [ idx + 1 ] , ** opts )
else :
ax . errorbar ( data [ xidx ] , data [ idx + 1 ] , xerr = ( None if xidx == 0 else err [ xidx ] ) , yerr = err [ idx + 1 ] , fmt = fmt , ** opts )
# if " legend " not in kwargs . keys ( ) or kwargs [ " legend " ] :
# ax . legend ( * ax . get _ legend _ handles _ labels ( ) , loc = " best " , shadow = True )
if "legend" not in kwargs . keys ( ) or ( kwargs [ "legend" ] is not None and kwargs [ "legend" ] is not False ) :
legend_opts = { "loc" : "best" , "shadow" : True }
if "legend" in kwargs and isinstance ( kwargs [ "legend" ] , dict ) :
legend_opts . update ( kwargs [ "legend" ] )
ax . legend ( * ax . get_legend_handles_labels ( ) , ** legend_opts )
if "xlabel" in kwargs . keys ( ) :
ax . set_xlabel ( kwargs [ "xlabel" ] )
elif "x" in kwargs . keys ( ) :
ax . set_xlabel ( "The Number of Molecules [{0}]" . format ( kwargs [ "x" ] ) )
else :
ax . set_xlabel ( "Time" )
if "ylabel" in kwargs . keys ( ) :
ax . set_ylabel ( kwargs [ "ylabel" ] )
else :
ax . set_ylabel ( "The Number of Molecules" )
if "xlim" in kwargs . keys ( ) :
ax . set_xlim ( kwargs [ "xlim" ] )
if "ylim" in kwargs . keys ( ) :
ax . set_ylim ( kwargs [ "ylim" ] )
if "filename" in kwargs . keys ( ) :
plt . savefig ( kwargs [ "filename" ] )
else :
plt . show ( ) |
def getdict ( locale ) :
"""Generate or get convertion dict cache for certain locale .
Dictionaries are loaded on demand .""" | global zhcdicts , dict_zhcn , dict_zhsg , dict_zhtw , dict_zhhk , pfsdict
if zhcdicts is None :
loaddict ( DICTIONARY )
if locale == 'zh-cn' :
if dict_zhcn :
got = dict_zhcn
else :
dict_zhcn = zhcdicts [ 'zh2Hans' ] . copy ( )
dict_zhcn . update ( zhcdicts [ 'zh2CN' ] )
got = dict_zhcn
elif locale == 'zh-tw' :
if dict_zhtw :
got = dict_zhtw
else :
dict_zhtw = zhcdicts [ 'zh2Hant' ] . copy ( )
dict_zhtw . update ( zhcdicts [ 'zh2TW' ] )
got = dict_zhtw
elif locale == 'zh-hk' or locale == 'zh-mo' :
if dict_zhhk :
got = dict_zhhk
else :
dict_zhhk = zhcdicts [ 'zh2Hant' ] . copy ( )
dict_zhhk . update ( zhcdicts [ 'zh2HK' ] )
got = dict_zhhk
elif locale == 'zh-sg' or locale == 'zh-my' :
if dict_zhsg :
got = dict_zhsg
else :
dict_zhsg = zhcdicts [ 'zh2Hans' ] . copy ( )
dict_zhsg . update ( zhcdicts [ 'zh2SG' ] )
got = dict_zhsg
elif locale == 'zh-hans' :
got = zhcdicts [ 'zh2Hans' ]
elif locale == 'zh-hant' :
got = zhcdicts [ 'zh2Hant' ]
else :
got = { }
if locale not in pfsdict :
pfsdict [ locale ] = getpfset ( got )
return got |
def valid_connection ( * outer_args , ** outer_kwargs ) : # pylint : disable = unused - argument , no - method - argument
"""Check if the daemon connection is established and valid""" | def decorator ( func ) : # pylint : disable = missing - docstring
def decorated ( * args , ** kwargs ) : # pylint : disable = missing - docstring
# outer _ args and outer _ kwargs are the decorator arguments
# args and kwargs are the decorated function arguments
link = args [ 0 ]
if not link . con :
raise LinkError ( "The connection is not created for %s" % link . name )
if not link . running_id :
raise LinkError ( "The connection is not initialized for %s" % link . name )
return func ( * args , ** kwargs )
return decorated
return decorator |
def handle_stream ( self , message ) :
"""Acts on message reception
: param message : string of an incoming message
parse all the fields and builds an Event object that is passed to the callback function""" | logging . debug ( "handle_stream(...)" )
event = Event ( )
for line in message . strip ( ) . splitlines ( ) :
( field , value ) = line . split ( ":" , 1 )
field = field . strip ( )
if field == "event" :
event . name = value . lstrip ( )
elif field == "data" :
value = value . lstrip ( )
if event . data is None :
event . data = value
else :
event . data = "%s\n%s" % ( event . data , value )
elif field == "id" :
event . id = value . lstrip ( )
self . last_event_id = event . id
elif field == "retry" :
try :
self . retry_timeout = int ( value )
event . retry = self . retry_timeout
logging . info ( "timeout reset: %s" % ( value , ) )
except ValueError :
pass
elif field == "" :
logging . debug ( "received comment: %s" % ( value , ) )
else :
raise Exception ( "Unknown field !" )
self . events . append ( event ) |
def css_class ( self , cell ) :
"""Return the CSS class for this column .""" | if isinstance ( self . _css_class , basestring ) :
return self . _css_class
else :
return self . _css_class ( cell ) |
def hourly_solar_radiation ( self ) :
"""Three data collections containing hourly direct normal , diffuse horizontal ,
and global horizontal radiation .""" | dir_norm , diff_horiz , glob_horiz = self . _sky_condition . radiation_values ( self . _location )
dir_norm_data = self . _get_daily_data_collections ( energyintensity . DirectNormalRadiation ( ) , 'Wh/m2' , dir_norm )
diff_horiz_data = self . _get_daily_data_collections ( energyintensity . DiffuseHorizontalRadiation ( ) , 'Wh/m2' , diff_horiz )
glob_horiz_data = self . _get_daily_data_collections ( energyintensity . GlobalHorizontalRadiation ( ) , 'Wh/m2' , glob_horiz )
return dir_norm_data , diff_horiz_data , glob_horiz_data |
def get_fwhm_tag ( expnum , ccd , prefix = None , version = 'p' ) :
"""Get the FWHM from the VOSpace annotation .
@ param expnum :
@ param ccd :
@ param prefix :
@ param version :
@ return :""" | uri = get_uri ( expnum , ccd , version , ext = 'fwhm' , prefix = prefix )
if uri not in fwhm :
key = "fwhm_{:1s}{:02d}" . format ( version , int ( ccd ) )
fwhm [ uri ] = get_tag ( expnum , key )
return fwhm [ uri ] |
def configure_uwsgi ( configurator_func ) :
"""Allows configuring uWSGI using Configuration objects returned
by the given configuration function .
. . code - block : python
# In configuration module , e . g ` uwsgicfg . py `
from uwsgiconf . config import configure _ uwsgi
configure _ uwsgi ( get _ configurations )
: param callable configurator _ func : Function which return a list on configurations .
: rtype : list | None
: returns : A list with detected configurations or
` ` None ` ` if called from within uWSGI ( e . g . when trying to load WSGI application ) .
: raises ConfigurationError :""" | from . settings import ENV_CONF_READY , ENV_CONF_ALIAS , CONFIGS_MODULE_ATTR
if os . environ . get ( ENV_CONF_READY ) : # This call is from uWSGI trying to load an application .
# We prevent unnecessary configuration
# for setups where application is located in the same
# file as configuration .
del os . environ [ ENV_CONF_READY ]
# Drop it support consecutive reconfiguration .
return None
configurations = configurator_func ( )
registry = OrderedDict ( )
if not isinstance ( configurations , ( list , tuple ) ) :
configurations = [ configurations ]
for conf_candidate in configurations :
if not isinstance ( conf_candidate , ( Section , Configuration ) ) :
continue
if isinstance ( conf_candidate , Section ) :
conf_candidate = conf_candidate . as_configuration ( )
alias = conf_candidate . alias
if alias in registry :
raise ConfigurationError ( "Configuration alias '%s' clashes with another configuration. " "Please change the alias." % alias )
registry [ alias ] = conf_candidate
if not registry :
raise ConfigurationError ( "Callable passed into 'configure_uwsgi' must return 'Section' or 'Configuration' objects." )
# Try to get configuration alias from env with fall back
# to - - conf argument ( as passed by UwsgiRunner . spawn ( ) ) .
target_alias = os . environ . get ( ENV_CONF_ALIAS )
if not target_alias :
last = sys . argv [ - 2 : ]
if len ( last ) == 2 and last [ 0 ] == '--conf' :
target_alias = last [ 1 ]
conf_list = list ( registry . values ( ) )
if target_alias : # This call is [ presumably ] from uWSGI configuration read procedure .
config = registry . get ( target_alias )
if config :
section = config . sections [ 0 ]
# type : Section
# Set ready marker which is checked above .
os . environ [ ENV_CONF_READY ] = '1'
# Placeholder for runtime introspection .
section . set_placeholder ( 'config-alias' , target_alias )
# Print out
config . print_ini ( )
else : # This call is from module containing uWSGI configurations .
import inspect
# Set module attribute automatically .
config_module = inspect . currentframe ( ) . f_back
config_module . f_locals [ CONFIGS_MODULE_ATTR ] = conf_list
return conf_list |
def get_batch ( self , user_list ) :
"""批量获取用户基本信息
开发者可通过该接口来批量获取用户基本信息 。 最多支持一次拉取100条 。
详情请参考
https : / / mp . weixin . qq . com / wiki ? t = resource / res _ main & id = mp1421140839
: param user _ list : user _ list , 支持 “ 使用示例 ” 中两种输入格式
: return : 用户信息的 list
使用示例 : :
from wechatpy import WeChatClient
client = WeChatClient ( ' appid ' , ' secret ' )
users = client . user . get _ batch ( [ ' openid1 ' , ' openid2 ' ] )
users = client . user . get _ batch ( [
{ ' openid ' : ' openid1 ' , ' lang ' : ' zh - CN ' } ,
{ ' openid ' : ' openid2 ' , ' lang ' : ' en ' } ,""" | if all ( ( isinstance ( x , six . string_types ) for x in user_list ) ) :
user_list = [ { 'openid' : oid } for oid in user_list ]
res = self . _post ( 'user/info/batchget' , data = { 'user_list' : user_list } , result_processor = lambda x : x [ 'user_info_list' ] )
return res |
def _android_update_sdk ( self , * sdkmanager_commands ) :
"""Update the tools and package - tools if possible""" | auto_accept_license = self . buildozer . config . getbooldefault ( 'app' , 'android.accept_sdk_license' , False )
if auto_accept_license : # ` SIGPIPE ` is not being reported somehow , but ` EPIPE ` is .
# This leads to a stderr " Broken pipe " message which is harmless ,
# but doesn ' t look good on terminal , hence redirecting to / dev / null
yes_command = 'yes 2>/dev/null'
command = '{} | {} --licenses' . format ( yes_command , self . sdkmanager_path )
self . buildozer . cmd ( command , cwd = self . android_sdk_dir )
self . _sdkmanager ( * sdkmanager_commands ) |
def get_msg_login ( self , username ) :
"""message for welcome .""" | account = self . get_account ( username )
if account :
account . update_last_login ( )
account . save ( )
return 'welcome.' |
def cli ( ctx , name , all ) :
"""Show example for doing some task in bubble ( experimental )""" | ctx . gbc . say ( 'all_example_functions' , stuff = all_examples_functions , verbosity = 1000 )
for example in all_examples_functions :
if all or ( name and example [ 'name' ] == name ) :
if all :
ctx . gbc . say ( 'example' , stuff = example , verbosity = 100 )
name = example [ 'name' ]
# click . echo _ via _ pager ( example [ ' fun ' ] ( ) )
click . echo ( "#" * 80 )
click . echo ( "### start of bubble example: " + name )
click . echo ( "#" * 80 )
click . echo ( example [ 'fun' ] ( ) )
click . echo ( "#" * 80 )
click . echo ( "### end of bubble example: " + name )
click . echo ( "#" * 80 )
click . echo ( )
else :
click . echo ( "available example: " + example [ 'name' ] ) |
def get_session ( self , username , password , remote = "127.0.0.1" , proxy = None ) :
"""Create a session for a user .
Attempts to create a user session on the Crowd server .
Args :
username : The account username .
password : The account password .
remote :
The remote address of the user . This can be used
to create multiple concurrent sessions for a user .
The host you run this program on may need to be configured
in Crowd as a trusted proxy for this to work .
proxy : Value of X - Forwarded - For server header .
Returns :
dict :
A dict mapping of user attributes if the application
authentication was successful . See the Crowd
documentation for the authoritative list of attributes .
None : If authentication failed .""" | params = { "username" : username , "password" : password , "validation-factors" : { "validationFactors" : [ { "name" : "remote_address" , "value" : remote , } , ] } }
if proxy :
params [ "validation-factors" ] [ "validationFactors" ] . append ( { "name" : "X-Forwarded-For" , "value" : proxy , } )
response = self . _post ( self . rest_url + "/session" , data = json . dumps ( params ) , params = { "expand" : "user" } )
# If authentication failed for any reason return None
if not response . ok :
return None
# Otherwise return the user object
return response . json ( ) |
def update ( self , message = None , subject = None , days = None , downloads = None , notify = None ) :
"""Update properties for a transfer .
: param message : updated message to recipient ( s )
: param subject : updated subject for trasfer
: param days : updated amount of days transfer is available
: param downloads : update amount of downloads allowed for transfer
: param notify : update whether to notifiy on downloads or not
: type message : ` ` str ` ` or ` ` unicode ` `
: type subject : ` ` str ` ` or ` ` unicode ` `
: type days : ` ` int ` `
: type downloads : ` ` int ` `
: type notify : ` ` bool ` `
: rtype : ` ` bool ` `""" | method , url = get_URL ( 'update' )
payload = { 'apikey' : self . config . get ( 'apikey' ) , 'logintoken' : self . session . cookies . get ( 'logintoken' ) , 'transferid' : self . transfer_id , }
data = { 'message' : message or self . transfer_info . get ( 'message' ) , 'message' : subject or self . transfer_info . get ( 'subject' ) , 'days' : days or self . transfer_info . get ( 'days' ) , 'downloads' : downloads or self . transfer_info . get ( 'downloads' ) , 'notify' : notify or self . transfer_info . get ( 'notify' ) }
payload . update ( data )
res = getattr ( self . session , method ) ( url , params = payload )
if res . status_code :
self . transfer_info . update ( data )
return True
hellraiser ( res ) |
def iter_chunks ( cls , sock , return_bytes = False , timeout_object = None ) :
"""Generates chunks from a connected socket until an Exit chunk is sent or a timeout occurs .
: param sock : the socket to read from .
: param bool return _ bytes : If False , decode the payload into a utf - 8 string .
: param cls . TimeoutProvider timeout _ object : If provided , will be checked every iteration for a
possible timeout .
: raises : : class : ` cls . ProcessStreamTimeout `""" | assert ( timeout_object is None or isinstance ( timeout_object , cls . TimeoutProvider ) )
orig_timeout_time = None
timeout_interval = None
while 1 :
if orig_timeout_time is not None :
remaining_time = time . time ( ) - ( orig_timeout_time + timeout_interval )
if remaining_time > 0 :
original_timestamp = datetime . datetime . fromtimestamp ( orig_timeout_time ) . isoformat ( )
raise cls . ProcessStreamTimeout ( "iterating over bytes from nailgun timed out with timeout interval {} starting at {}, " "overtime seconds: {}" . format ( timeout_interval , original_timestamp , remaining_time ) )
elif timeout_object is not None :
opts = timeout_object . maybe_timeout_options ( )
if opts :
orig_timeout_time = opts . start_time
timeout_interval = opts . interval
continue
remaining_time = None
else :
remaining_time = None
with cls . _set_socket_timeout ( sock , timeout = remaining_time ) :
chunk_type , payload = cls . read_chunk ( sock , return_bytes )
yield chunk_type , payload
if chunk_type == ChunkType . EXIT :
break |
def cfrom ( self ) :
"""The initial character position in the surface string .
Defaults to - 1 if there is no valid cfrom value .""" | cfrom = - 1
try :
if self . lnk . type == Lnk . CHARSPAN :
cfrom = self . lnk . data [ 0 ]
except AttributeError :
pass
# use default cfrom of - 1
return cfrom |
def get_window_from_xy ( self , xy ) :
"""Get the window index given a coordinate ( raster CRS ) .""" | a_transform = self . _get_template_for_given_resolution ( res = self . dst_res , return_ = "meta" ) [ "transform" ]
row , col = transform . rowcol ( a_transform , xy [ 0 ] , xy [ 1 ] )
ij_containing_xy = None
for ji , win in enumerate ( self . windows ) :
( row_start , row_end ) , ( col_start , col_end ) = rasterio . windows . toranges ( win )
# print ( row , col , row _ start , row _ end , col _ start , col _ end )
if ( ( col >= col_start ) & ( col < col_end ) ) & ( ( row >= row_start ) & ( row < row_end ) ) :
ij_containing_xy = ji
break
if ij_containing_xy is None :
raise ValueError ( "The given 'xy' value is not contained in any window." )
return ij_containing_xy |
def load_from_docinfo ( self , docinfo , delete_missing = False , raise_failure = False ) :
"""Populate the XMP metadata object with DocumentInfo
Arguments :
docinfo : a DocumentInfo , e . g pdf . docinfo
delete _ missing : if the entry is not DocumentInfo , delete the equivalent
from XMP
raise _ failure : if True , raise any failure to convert docinfo ;
otherwise warn and continue
A few entries in the deprecated DocumentInfo dictionary are considered
approximately equivalent to certain XMP records . This method copies
those entries into the XMP metadata .""" | for uri , shortkey , docinfo_name , converter in self . DOCINFO_MAPPING :
qname = QName ( uri , shortkey )
# docinfo might be a dict or pikepdf . Dictionary , so lookup keys
# by str ( Name )
val = docinfo . get ( str ( docinfo_name ) )
if val is None :
if delete_missing and qname in self :
del self [ qname ]
continue
try :
val = str ( val )
if converter :
val = converter . xmp_from_docinfo ( val )
if not val :
continue
self [ qname ] = val
except ( ValueError , AttributeError ) as e :
msg = "The metadata field {} could not be copied to XMP" . format ( docinfo_name )
if raise_failure :
raise ValueError ( msg ) from e
else :
warn ( msg ) |
def limits ( self , clip_negative = True ) :
"""Return intensity limits , i . e . ( min , max ) tuple , of the dtype .
Args :
clip _ negative : bool , optional
If True , clip the negative range ( i . e . return 0 for min intensity )
even if the image dtype allows negative values .
Returns
min , max : tuple
Lower and upper intensity limits .""" | min , max = dtype_range [ self . as_numpy_dtype ]
# pylint : disable = redefined - builtin
if clip_negative :
min = 0
# pylint : disable = redefined - builtin
return min , max |
def ensure_project ( three = None , python = None , validate = True , system = False , warn = True , site_packages = False , deploy = False , skip_requirements = False , pypi_mirror = None , clear = False , ) :
"""Ensures both Pipfile and virtualenv exist for the project .""" | from . environments import PIPENV_USE_SYSTEM
# Clear the caches , if appropriate .
if clear :
print ( "clearing" )
sys . exit ( 1 )
# Automatically use an activated virtualenv .
if PIPENV_USE_SYSTEM :
system = True
if not project . pipfile_exists and deploy :
raise exceptions . PipfileNotFound
# Fail if working under /
if not project . name :
click . echo ( "{0}: Pipenv is not intended to work under the root directory, " "please choose another path." . format ( crayons . red ( "ERROR" ) ) , err = True )
sys . exit ( 1 )
# Skip virtualenv creation when - - system was used .
if not system :
ensure_virtualenv ( three = three , python = python , site_packages = site_packages , pypi_mirror = pypi_mirror , )
if warn : # Warn users if they are using the wrong version of Python .
if project . required_python_version :
path_to_python = which ( "python" ) or which ( "py" )
if path_to_python and project . required_python_version not in ( python_version ( path_to_python ) or "" ) :
click . echo ( "{0}: Your Pipfile requires {1} {2}, " "but you are using {3} ({4})." . format ( crayons . red ( "Warning" , bold = True ) , crayons . normal ( "python_version" , bold = True ) , crayons . blue ( project . required_python_version ) , crayons . blue ( python_version ( path_to_python ) ) , crayons . green ( shorten_path ( path_to_python ) ) , ) , err = True , )
click . echo ( " {0} and rebuilding the virtual environment " "may resolve the issue." . format ( crayons . green ( "$ pipenv --rm" ) ) , err = True , )
if not deploy :
click . echo ( " {0} will surely fail." "" . format ( crayons . red ( "$ pipenv check" ) ) , err = True , )
else :
raise exceptions . DeployException
# Ensure the Pipfile exists .
ensure_pipfile ( validate = validate , skip_requirements = skip_requirements , system = system ) |
def set_orient ( self ) :
"""Return the computed orientation based on CD matrix .""" | self . orient = RADTODEG ( N . arctan2 ( self . cd12 , self . cd22 ) ) |
def _ParseItems ( self , parser_mediator , msiecf_file ) :
"""Parses a MSIE Cache File ( MSIECF ) items .
Args :
parser _ mediator ( ParserMediator ) : mediates interactions between parsers
and other components , such as storage and dfvfs .
msiecf _ file ( pymsiecf . file ) : MSIECF file .""" | format_version = msiecf_file . format_version
decode_error = False
cache_directories = [ ]
for cache_directory_name in iter ( msiecf_file . cache_directories ) :
try :
cache_directory_name = cache_directory_name . decode ( 'ascii' )
except UnicodeDecodeError :
decode_error = True
cache_directory_name = cache_directory_name . decode ( 'ascii' , errors = 'replace' )
cache_directories . append ( cache_directory_name )
if decode_error :
parser_mediator . ProduceExtractionWarning ( ( 'unable to decode cache directory names. Characters that cannot ' 'be decoded will be replaced with "?" or "\\ufffd".' ) )
for item_index in range ( 0 , msiecf_file . number_of_items ) :
try :
msiecf_item = msiecf_file . get_item ( item_index )
if isinstance ( msiecf_item , pymsiecf . leak ) :
self . _ParseLeak ( parser_mediator , cache_directories , msiecf_item )
elif isinstance ( msiecf_item , pymsiecf . redirected ) :
self . _ParseRedirected ( parser_mediator , msiecf_item )
elif isinstance ( msiecf_item , pymsiecf . url ) :
self . _ParseUrl ( parser_mediator , format_version , cache_directories , msiecf_item )
except IOError as exception :
parser_mediator . ProduceExtractionWarning ( 'Unable to parse item: {0:d} with error: {1!s}' . format ( item_index , exception ) )
for item_index in range ( 0 , msiecf_file . number_of_recovered_items ) :
try :
msiecf_item = msiecf_file . get_recovered_item ( item_index )
if isinstance ( msiecf_item , pymsiecf . leak ) :
self . _ParseLeak ( parser_mediator , cache_directories , msiecf_item , recovered = True )
elif isinstance ( msiecf_item , pymsiecf . redirected ) :
self . _ParseRedirected ( parser_mediator , msiecf_item , recovered = True )
elif isinstance ( msiecf_item , pymsiecf . url ) :
self . _ParseUrl ( parser_mediator , format_version , cache_directories , msiecf_item , recovered = True )
except IOError as exception :
parser_mediator . ProduceExtractionWarning ( 'Unable to parse recovered item: {0:d} with error: {1!s}' . format ( item_index , exception ) ) |
def _update_internal_states ( self , message ) :
"""Updates internal device states .
: param message : : py : class : ` ~ alarmdecoder . messages . Message ` to update internal states with
: type message : : py : class : ` ~ alarmdecoder . messages . Message ` , : py : class : ` ~ alarmdecoder . messages . ExpanderMessage ` , : py : class : ` ~ alarmdecoder . messages . LRRMessage ` , or : py : class : ` ~ alarmdecoder . messages . RFMessage `""" | if isinstance ( message , Message ) and not self . _ignore_message_states :
self . _update_armed_ready_status ( message )
self . _update_power_status ( message )
self . _update_chime_status ( message )
self . _update_alarm_status ( message )
self . _update_zone_bypass_status ( message )
self . _update_battery_status ( message )
self . _update_fire_status ( message )
elif isinstance ( message , ExpanderMessage ) :
self . _update_expander_status ( message )
self . _update_zone_tracker ( message ) |
def commentToJson ( comment ) :
"""Returns a serializable Comment dict
: param comment : Comment to get info for
: type comment : Comment
: returns : dict""" | obj = { 'id' : comment . id , 'comment' : comment . comment , 'user' : userToJson ( comment . user ) , 'date' : comment . submit_date . isoformat ( ) , }
return obj |
def get_users ( self , course ) :
"""Returns a sorted list of users""" | users = OrderedDict ( sorted ( list ( self . user_manager . get_users_info ( self . user_manager . get_course_registered_users ( course ) ) . items ( ) ) , key = lambda k : k [ 1 ] [ 0 ] if k [ 1 ] is not None else "" ) )
return users |
async def _reset_protocol ( self , exc = None ) :
"""Reset the protocol if an error occurs .""" | # Be responsible and clean up .
protocol = await self . _get_protocol ( )
await protocol . shutdown ( )
self . _protocol = None
# Let any observers know the protocol has been shutdown .
for ob_error in self . _observations_err_callbacks :
ob_error ( exc )
self . _observations_err_callbacks . clear ( ) |
def loader ( pattern , dimensions = None , distributed_dim = 'time' , read_only = False ) :
"""It provide a root descriptor to be used inside a with statement . It
automatically close the root when the with statement finish .
Keyword arguments :
root - - the root descriptor returned by the ' open ' function""" | if dimensions :
root = tailor ( pattern , dimensions , distributed_dim , read_only = read_only )
else :
root , _ = open ( pattern , read_only = read_only )
yield root
root . close ( ) |
def get_help_commands ( server_prefix ) :
"""Get the help commands for all modules
Args :
server _ prefix : The server command prefix
Returns :
datapacks ( list ) : A list of datapacks for the help commands for all the modules""" | datapacks = [ ]
_dir = os . path . realpath ( os . path . join ( os . getcwd ( ) , os . path . dirname ( __file__ ) ) )
for module_name in os . listdir ( "{}/../" . format ( _dir ) ) :
if not module_name . startswith ( "_" ) and not module_name . startswith ( "!" ) :
help_command = "`{}help {}`" . format ( server_prefix , module_name )
datapacks . append ( ( module_name , help_command , True ) )
return datapacks |
def parse_net16string ( self ) :
"""> > > next ( InBuffer ( b " \\ 0 \\ x03eggs " ) . parse _ net16string ( ) ) = = b ' egg '
True""" | return parse_map ( operator . itemgetter ( 1 ) , parse_chain ( self . parse_net16int , self . parse_fixedbuffer ) ) |
def free ( self ) :
"""Free the memory referred to by the file - like , any subsequent
operations on this file - like or slices of it will fail .""" | # Free the memory
self . _machine_controller . sdram_free ( self . _start_address , self . _x , self . _y )
# Mark as freed
self . _freed = True |
def _adjust_merged_values_orm ( env , model_name , record_ids , target_record_id , field_spec ) :
"""This method deals with the values on the records to be merged +
the target record , performing operations that makes sense on the meaning
of the model .
: param field _ spec : Dictionary with field names as keys and forced operation
to perform as values . If a field is not present here , default operation
will be performed .
Possible operations by field types :
* Char , Text and Html fields :
- ' merge ' ( default for Text and Html ) : content is concatenated
with an ' | ' as separator
- other value ( default for Char ) : content on target record is preserved
* Integer , Float and Monetary fields :
- ' sum ' ( default for Float and Monetary ) : Sum all the values of
the records .
- ' avg ' : Perform the arithmetic average of the values of the records .
- ' max ' : Put the maximum of all the values .
- ' min ' : Put the minimum of all the values .
- other value ( default for Integer ) : content on target record
is preserved
* Binary field :
- ' merge ' ( default ) : apply first not null value of the records if
value of target record is null , preserve target value otherwise .
- other value : content on target record is preserved
* Boolean field :
- ' and ' : Perform a logical AND over all values .
- ' or ' : Perform a logical OR over all values .
- other value ( default ) : content on target record is preserved
* Date and Datetime fields :
- ' max ' : Put the maximum of all the values .
- ' min ' : Put the minimum of all the values .
- other value ( default ) : content on target record is preserved
* Many2one fields :
- ' merge ' ( default ) : apply first not null value of the records if
value of target record is null , preserve target value otherwise .
- other value : content on target record is preserved
* Many2many fields :
- ' merge ' ( default ) : combine all the values
- other value : content on target record is preserved
* One2many fields :
- ' merge ' ( default ) : combine all the values
- other value : content on target record is preserved
* Reference fields :
- any value : content on target record is preserved
* Selection fields :
- any value : content on target record is preserved""" | model = env [ model_name ]
fields = model . _fields . values ( )
all_records = model . browse ( tuple ( record_ids ) + ( target_record_id , ) )
target_record = model . browse ( target_record_id )
vals = { }
o2m_changes = 0
for field in fields :
if not field . store or field . compute or field . related :
continue
# don ' t do anything on these cases
op = field_spec . get ( field . name , False )
l = all_records . mapped ( field . name )
if field . type in ( 'char' , 'text' , 'html' ) :
if not op :
op = 'other' if field . type == 'char' else 'merge'
if op == 'merge' :
l = filter ( lambda x : x is not False , l )
vals [ field . name ] = ' | ' . join ( l )
elif field . type in ( 'integer' , 'float' , 'monetary' ) :
if not op :
op = 'other' if field . type == 'integer' else 'sum'
if op == 'sum' :
vals [ field . name ] = sum ( l )
elif op == 'avg' :
vals [ field . name ] = sum ( l ) / len ( l )
elif op == 'max' :
vals [ field . name ] = max ( l )
elif op == 'min' :
vals [ field . name ] = min ( l )
elif field . type == 'boolean' :
op = op or 'other'
if op == 'and' :
vals [ field . name ] = functools . reduce ( lambda x , y : x & y , l )
elif op == 'or' :
vals [ field . name ] = functools . reduce ( lambda x , y : x | y , l )
elif field . type in ( 'date' , 'datetime' ) :
if op :
l = filter ( lambda x : x is not False , l )
op = op or 'other'
if op == 'max' :
vals [ field . name ] = max ( l )
elif op == 'min' :
vals [ field . name ] = min ( l )
elif field . type == 'many2many' :
op = op or 'merge'
if op == 'merge' :
l = filter ( lambda x : x is not False , l )
vals [ field . name ] = [ ( 4 , x . id ) for x in l ]
elif field . type == 'one2many' :
op = op or 'merge'
if op == 'merge' :
o2m_changes += 1
l . write ( { field . inverse_name : target_record_id } )
elif field . type == 'binary' :
op = op or 'merge'
if op == 'merge' :
l = [ x for x in l if x ]
if not getattr ( target_record , field . name ) and l :
vals [ field . name ] = l [ 0 ]
elif field . type == 'many2one' :
op = op or 'merge'
if op == 'merge' :
if not getattr ( target_record , field . name ) and l :
vals [ field . name ] = l [ 0 ]
# Curate values that haven ' t changed
new_vals = { }
for f in vals :
if model . _fields [ f ] . type != 'many2many' :
if vals [ f ] != getattr ( target_record , f ) :
new_vals [ f ] = vals [ f ]
else :
if [ x [ 1 ] for x in vals [ f ] ] not in getattr ( target_record , f ) . ids :
new_vals [ f ] = vals [ f ]
if new_vals :
target_record . write ( new_vals )
logger . debug ( "Write %s value(s) in target record '%s' of model '%s'" , len ( new_vals ) + o2m_changes , target_record_id , model_name , ) |
def get_sequence ( queries , fap = None , fmt = 'fasta' , organism_taxid = 9606 , test = False ) :
"""http : / / www . ebi . ac . uk / Tools / dbfetch / dbfetch ? db = uniprotkb & id = P14060 + P26439 & format = fasta & style = raw & Retrieve = Retrieve
https : / / www . uniprot . org / uniprot / ? format = fasta & organism = 9606 & query = O75116 + O75116 + P35548 + O14944 + O14944""" | url = 'http://www.ebi.ac.uk/Tools/dbfetch/dbfetch'
params = { 'id' : ' ' . join ( queries ) , 'db' : 'uniprotkb' , 'organism' : organism_taxid , 'format' : fmt , 'style' : 'raw' , 'Retrieve' : 'Retrieve' , }
response = requests . get ( url , params = params )
if test :
print ( response . url )
if response . ok :
if not fap is None :
with open ( fap , 'w' ) as f :
f . write ( response . text )
return fap
else :
return response . text
else :
print ( 'Something went wrong ' , response . status_code ) |
def libvlc_media_new_path ( p_instance , path ) :
'''Create a media for a certain file path .
See L { libvlc _ media _ release } .
@ param p _ instance : the instance .
@ param path : local filesystem path .
@ return : the newly created media or NULL on error .''' | f = _Cfunctions . get ( 'libvlc_media_new_path' , None ) or _Cfunction ( 'libvlc_media_new_path' , ( ( 1 , ) , ( 1 , ) , ) , class_result ( Media ) , ctypes . c_void_p , Instance , ctypes . c_char_p )
return f ( p_instance , path ) |
def triple_plot ( cccsum , cccsum_hist , trace , threshold , ** kwargs ) :
"""Plot a seismogram , correlogram and histogram .
: type cccsum : numpy . ndarray
: param cccsum : Array of the cross - channel cross - correlation sum
: type cccsum _ hist : numpy . ndarray
: param cccsum _ hist : cccsum for histogram plotting , can be the same as cccsum but included if cccsum is just an envelope .
: type trace : obspy . core . trace . Trace
: param trace : A sample trace from the same time as cccsum
: type threshold : float
: param threshold : Detection threshold within cccsum
: type save : bool
: param save : If True will save and not plot to screen , vice - versa if False
: type savefile : str
: param savefile : Path to save figure to , only required if save = True
: returns : : class : ` matplotlib . figure . Figure `
. . rubric : : Example
> > > from obspy import read
> > > from eqcorrscan . core . match _ filter import normxcorr2
> > > from eqcorrscan . utils . plotting import triple _ plot
> > > st = read ( )
> > > template = st [ 0 ] . copy ( ) . trim ( st [ 0 ] . stats . starttime + 8,
. . . st [ 0 ] . stats . starttime + 12)
> > > tr = st [ 0]
> > > ccc = normxcorr2 ( template = template . data , image = tr . data )
> > > tr . data = tr . data [ 0 : len ( ccc [ 0 ] ) ]
> > > triple _ plot ( cccsum = ccc [ 0 ] , cccsum _ hist = ccc [ 0 ] , trace = tr ,
. . . threshold = 0.8 ) # doctest : + SKIP
. . image : : . . / . . / plots / triple _ plot . png""" | import matplotlib . pyplot as plt
if len ( cccsum ) != len ( trace . data ) :
print ( 'cccsum is: ' + str ( len ( cccsum ) ) + ' trace is: ' + str ( len ( trace . data ) ) )
msg = ' ' . join ( [ 'cccsum and trace must have the' , 'same number of data points' ] )
raise ValueError ( msg )
df = trace . stats . sampling_rate
npts = trace . stats . npts
t = np . arange ( npts , dtype = np . float32 ) / ( df * 3600 )
# Generate the subplot for the seismic data
ax1 = plt . subplot2grid ( ( 2 , 5 ) , ( 0 , 0 ) , colspan = 4 )
ax1 . plot ( t , trace . data , 'k' )
ax1 . axis ( 'tight' )
ax1 . set_ylim ( [ - 15 * np . mean ( np . abs ( trace . data ) ) , 15 * np . mean ( np . abs ( trace . data ) ) ] )
# Generate the subplot for the correlation sum data
ax2 = plt . subplot2grid ( ( 2 , 5 ) , ( 1 , 0 ) , colspan = 4 , sharex = ax1 )
# Plot the threshold values
ax2 . plot ( [ min ( t ) , max ( t ) ] , [ threshold , threshold ] , color = 'r' , lw = 1 , label = "Threshold" )
ax2 . plot ( [ min ( t ) , max ( t ) ] , [ - threshold , - threshold ] , color = 'r' , lw = 1 )
ax2 . plot ( t , cccsum , 'k' )
ax2 . axis ( 'tight' )
ax2 . set_ylim ( [ - 1.7 * threshold , 1.7 * threshold ] )
ax2 . set_xlabel ( "Time after %s [hr]" % trace . stats . starttime . isoformat ( ) )
# ax2 . legend ( )
# Generate a small subplot for the histogram of the cccsum data
ax3 = plt . subplot2grid ( ( 2 , 5 ) , ( 1 , 4 ) , sharey = ax2 )
ax3 . hist ( cccsum_hist , 200 , normed = 1 , histtype = 'stepfilled' , orientation = 'horizontal' , color = 'black' )
ax3 . set_ylim ( [ - 5 , 5 ] )
fig = plt . gcf ( )
fig . suptitle ( trace . id )
fig . canvas . draw ( )
fig = _finalise_figure ( fig = fig , ** kwargs )
# pragma : no cover
return fig |
def _update_chime_status ( self , message = None , status = None ) :
"""Uses the provided message to update the Chime state .
: param message : message to use to update
: type message : : py : class : ` ~ alarmdecoder . messages . Message `
: param status : chime status , overrides message bits .
: type status : bool
: returns : bool indicating the new status""" | chime_status = status
if isinstance ( message , Message ) :
chime_status = message . chime_on
if chime_status is None :
return
if chime_status != self . _chime_status :
self . _chime_status , old_status = chime_status , self . _chime_status
if old_status is not None :
self . on_chime_changed ( status = self . _chime_status )
return self . _chime_status |
def export_xml_file_no_di ( self , directory , filename ) :
"""Exports diagram inner graph to BPMN 2.0 XML file ( without Diagram Interchange data ) .
: param directory : strings representing output directory ,
: param filename : string representing output file name .""" | bpmn_export . BpmnDiagramGraphExport . export_xml_file_no_di ( directory , filename , self ) |
def get_success_url ( self ) :
"""Returns redirect URL for valid form submittal .
: rtype : str .""" | if self . success_url :
url = force_text ( self . success_url )
else :
url = reverse ( '{0}:index' . format ( self . url_namespace ) )
return url |
def digest_filename ( filename , algorithm = DEFAULT_HASH_ALGORITHM , hash_library = DEFAULT_HASH_LIBRARY , normalize_line_endings = False ) :
"""< Purpose >
Generate a digest object , update its hash using a file object
specified by filename , and then return it to the caller .
< Arguments >
filename :
The filename belonging to the file object to be used .
algorithm :
The hash algorithm ( e . g . , ' md5 ' , ' sha1 ' , ' sha256 ' ) .
hash _ library :
The library providing the hash algorithms ( e . g . , ' hashlib ' ) .
normalize _ line _ endings :
Whether or not to normalize line endings for cross - platform support .
< Exceptions >
securesystemslib . exceptions . FormatError , if the arguments are
improperly formatted .
securesystemslib . exceptions . UnsupportedAlgorithmError , if the given
' algorithm ' is unsupported .
securesystemslib . exceptions . UnsupportedLibraryError , if the given
' hash _ library ' is unsupported .
< Side Effects >
None .
< Returns >
Digest object ( e . g . , hashlib . new ( algorithm ) ) .""" | # Are the arguments properly formatted ? If not , raise
# ' securesystemslib . exceptions . FormatError ' .
securesystemslib . formats . RELPATH_SCHEMA . check_match ( filename )
securesystemslib . formats . NAME_SCHEMA . check_match ( algorithm )
securesystemslib . formats . NAME_SCHEMA . check_match ( hash_library )
digest_object = None
# Open ' filename ' in read + binary mode .
with open ( filename , 'rb' ) as file_object : # Create digest _ object and update its hash data from file _ object .
# digest _ fileobject ( ) raises :
# securesystemslib . exceptions . UnsupportedAlgorithmError
# securesystemslib . exceptions . UnsupportedLibraryError
digest_object = digest_fileobject ( file_object , algorithm , hash_library , normalize_line_endings )
return digest_object |
def weingarten_image_curvature ( image , sigma = 1.0 , opt = 'mean' ) :
"""Uses the weingarten map to estimate image mean or gaussian curvature
ANTsR function : ` weingartenImageCurvature `
Arguments
image : ANTsImage
image from which curvature is calculated
sigma : scalar
smoothing parameter
opt : string
mean by default , otherwise ` gaussian ` or ` characterize `
Returns
ANTsImage
Example
> > > import ants
> > > image = ants . image _ read ( ants . get _ ants _ data ( ' mni ' ) ) . resample _ image ( ( 3,3,3 ) )
> > > imagecurv = ants . weingarten _ image _ curvature ( image )""" | if image . dimension not in { 2 , 3 } :
raise ValueError ( 'image must be 2D or 3D' )
if image . dimension == 2 :
d = image . shape
temp = np . zeros ( list ( d ) + [ 10 ] )
for k in range ( 1 , 7 ) :
voxvals = image [ : d [ 0 ] , : d [ 1 ] ]
temp [ : d [ 0 ] , : d [ 1 ] , k ] = voxvals
temp = core . from_numpy ( temp )
myspc = image . spacing
myspc = list ( myspc ) + [ min ( myspc ) ]
temp . set_spacing ( myspc )
temp = temp . clone ( 'float' )
else :
temp = image . clone ( 'float' )
optnum = 0
if opt == 'gaussian' :
optnum = 6
if opt == 'characterize' :
optnum = 5
libfn = utils . get_lib_fn ( 'weingartenImageCurvature' )
mykout = libfn ( temp . pointer , sigma , optnum )
mykout = iio . ANTsImage ( pixeltype = image . pixeltype , dimension = 3 , components = image . components , pointer = mykout )
if image . dimension == 3 :
return mykout
elif image . dimension == 2 :
subarr = core . from_numpy ( mykout . numpy ( ) [ : , : , 4 ] )
return core . copy_image_info ( image , subarr ) |
def blend ( self , other ) :
"""Alpha blend * other * on top of the current image .""" | raise NotImplementedError ( "This method has not be implemented for " "xarray support." )
if self . mode != "RGBA" or other . mode != "RGBA" :
raise ValueError ( "Images must be in RGBA" )
src = other
dst = self
outa = src . channels [ 3 ] + dst . channels [ 3 ] * ( 1 - src . channels [ 3 ] )
for i in range ( 3 ) :
dst . channels [ i ] = ( src . channels [ i ] * src . channels [ 3 ] + dst . channels [ i ] * dst . channels [ 3 ] * ( 1 - src . channels [ 3 ] ) ) / outa
dst . channels [ i ] [ outa == 0 ] = 0
dst . channels [ 3 ] = outa |
def decimal ( anon , obj , field , val ) :
"""Returns a random decimal""" | return anon . faker . decimal ( field = field ) |
def create_default_links ( self ) :
"""Create the default links between the IM and the device .""" | self . _plm . manage_aldb_record ( 0x40 , 0xe2 , 0x00 , self . address , self . cat , self . subcat , self . product_key )
self . manage_aldb_record ( 0x41 , 0xa2 , 0x00 , self . _plm . address , self . _plm . cat , self . _plm . subcat , self . _plm . product_key )
for link in self . _stateList :
state = self . _stateList [ link ]
if state . is_responder : # IM is controller
self . _plm . manage_aldb_record ( 0x40 , 0xe2 , link , self . _address , 0x00 , 0x00 , 0x00 )
# Device is responder
self . manage_aldb_record ( 0x41 , 0xa2 , link , self . _plm . address , state . linkdata1 , state . linkdata2 , state . linkdata3 )
if state . is_controller : # IM is responder
self . _plm . manage_aldb_record ( 0x41 , 0xa2 , link , self . _address , 0x00 , 0x00 , 0x00 )
# Device is controller
self . manage_aldb_record ( 0x40 , 0xe2 , link , self . _plm . address , 0x00 , 0x00 , 0x00 )
self . read_aldb ( ) |
def _gen_xml ( name , cpu , mem , diskp , nicp , hypervisor , os_type , arch , graphics = None , loader = None , ** kwargs ) :
'''Generate the XML string to define a libvirt VM''' | mem = int ( mem ) * 1024
# MB
context = { 'hypervisor' : hypervisor , 'name' : name , 'cpu' : six . text_type ( cpu ) , 'mem' : six . text_type ( mem ) , }
if hypervisor in [ 'qemu' , 'kvm' ] :
context [ 'controller_model' ] = False
elif hypervisor == 'vmware' : # TODO : make bus and model parameterized , this works for 64 - bit Linux
context [ 'controller_model' ] = 'lsilogic'
# By default , set the graphics to listen to all addresses
if graphics :
if 'listen' not in graphics :
graphics [ 'listen' ] = { 'type' : 'address' , 'address' : '0.0.0.0' }
elif 'address' not in graphics [ 'listen' ] and graphics [ 'listen' ] [ 'type' ] == 'address' :
graphics [ 'listen' ] [ 'address' ] = '0.0.0.0'
# Graphics of type ' none ' means no graphics device at all
if graphics . get ( 'type' , 'none' ) == 'none' :
graphics = None
context [ 'graphics' ] = graphics
if loader and 'path' not in loader :
log . info ( '`path` is a required property of `loader`, and cannot be found. Skipping loader configuration' )
loader = None
elif loader :
loader_attributes = [ ]
for key , val in loader . items ( ) :
if key == 'path' :
continue
loader_attributes . append ( "{key}='{val}'" . format ( key = key , val = val ) )
loader [ '_attributes' ] = ' ' . join ( loader_attributes )
if 'boot_dev' in kwargs :
context [ 'boot_dev' ] = [ ]
for dev in kwargs [ 'boot_dev' ] . split ( ) :
context [ 'boot_dev' ] . append ( dev )
else :
context [ 'boot_dev' ] = [ 'hd' ]
context [ 'loader' ] = loader
if os_type == 'xen' : # Compute the Xen PV boot method
if __grains__ [ 'os_family' ] == 'Suse' :
context [ 'kernel' ] = '/usr/lib/grub2/x86_64-xen/grub.xen'
context [ 'boot_dev' ] = [ ]
if 'serial_type' in kwargs :
context [ 'serial_type' ] = kwargs [ 'serial_type' ]
if 'serial_type' in context and context [ 'serial_type' ] == 'tcp' :
if 'telnet_port' in kwargs :
context [ 'telnet_port' ] = kwargs [ 'telnet_port' ]
else :
context [ 'telnet_port' ] = 23023
# FIXME : use random unused port
if 'serial_type' in context :
if 'console' in kwargs :
context [ 'console' ] = kwargs [ 'console' ]
else :
context [ 'console' ] = True
context [ 'disks' ] = [ ]
disk_bus_map = { 'virtio' : 'vd' , 'xen' : 'xvd' , 'fdc' : 'fd' , 'ide' : 'hd' }
for i , disk in enumerate ( diskp ) :
prefix = disk_bus_map . get ( disk [ 'model' ] , 'sd' )
disk_context = { 'device' : disk . get ( 'device' , 'disk' ) , 'target_dev' : '{0}{1}' . format ( prefix , string . ascii_lowercase [ i ] ) , 'disk_bus' : disk [ 'model' ] , 'type' : disk [ 'format' ] , 'index' : six . text_type ( i ) , }
if 'source_file' and disk [ 'source_file' ] :
disk_context [ 'source_file' ] = disk [ 'source_file' ]
if hypervisor in [ 'qemu' , 'kvm' , 'bhyve' , 'xen' ] :
disk_context [ 'address' ] = False
disk_context [ 'driver' ] = True
elif hypervisor in [ 'esxi' , 'vmware' ] :
disk_context [ 'address' ] = True
disk_context [ 'driver' ] = False
context [ 'disks' ] . append ( disk_context )
context [ 'nics' ] = nicp
context [ 'os_type' ] = os_type
context [ 'arch' ] = arch
fn_ = 'libvirt_domain.jinja'
try :
template = JINJA . get_template ( fn_ )
except jinja2 . exceptions . TemplateNotFound :
log . error ( 'Could not load template %s' , fn_ )
return ''
return template . render ( ** context ) |
def find ( self , ** kwargs ) :
"""Returns List ( typeof = ) .
Executes collection ' s find method based on keyword args
maps results ( dict to list of entity instances ) .
Set max _ limit parameter to limit the amount of data send back through network
Example : :
manager = EntityManager ( Product )
products = yield manager . find ( age = { ' $ gt ' : 17 } , max _ limit = 100)""" | max_limit = None
if 'max_limit' in kwargs :
max_limit = kwargs . pop ( 'max_limit' )
cursor = self . __collection . find ( kwargs )
instances = [ ]
for doc in ( yield cursor . to_list ( max_limit ) ) :
instance = self . __entity ( )
instance . map_dict ( doc )
instances . append ( instance )
return instances |
def calculate ( self ) :
"""Calculate the overall counts of best , worst , fastest , slowest , total
found , total not found and total runtime
Results are returned in a dictionary with the above parameters as keys .""" | best , worst , fastest , slowest = ( ) , ( ) , ( ) , ( )
found = notfound = total_time = 0
for source , rec in self . source_stats . items ( ) :
if not best or rec . successes > best [ 1 ] :
best = ( source , rec . successes , rec . success_rate ( ) )
if not worst or rec . successes < worst [ 1 ] :
worst = ( source , rec . successes , rec . success_rate ( ) )
avg_time = self . avg_time ( source )
if not fastest or ( avg_time != 0 and avg_time < fastest [ 1 ] ) :
fastest = ( source , avg_time )
if not slowest or ( avg_time != 0 and avg_time > slowest [ 1 ] ) :
slowest = ( source , avg_time )
found += rec . successes
notfound += rec . fails
total_time += sum ( rec . runtimes )
return { 'best' : best , 'worst' : worst , 'fastest' : fastest , 'slowest' : slowest , 'found' : found , 'notfound' : notfound , 'total_time' : total_time } |
def prepare_data_maybe_download ( directory ) :
"""Download and unpack dialogs if necessary .""" | filename = 'ubuntu_dialogs.tgz'
url = 'http://cs.mcgill.ca/~jpineau/datasets/ubuntu-corpus-1.0/ubuntu_dialogs.tgz'
dialogs_path = os . path . join ( directory , 'dialogs' )
# test it there are some dialogs in the path
if not os . path . exists ( os . path . join ( directory , "10" , "1.tst" ) ) : # dialogs are missing
archive_path = os . path . join ( directory , filename )
if not os . path . exists ( archive_path ) : # archive missing , download it
print ( "Downloading %s to %s" % ( url , archive_path ) )
filepath , _ = urllib . request . urlretrieve ( url , archive_path )
print "Successfully downloaded " + filepath
# unpack data
if not os . path . exists ( dialogs_path ) :
print ( "Unpacking dialogs ..." )
with tarfile . open ( archive_path ) as tar :
tar . extractall ( path = directory )
print ( "Archive unpacked." )
return |
def flatten ( l , types = ( list , ) ) :
"""Given a list / tuple that potentially contains nested lists / tuples of arbitrary nesting ,
flatten into a single dimension . In other words , turn [ ( 5 , 6 , [ 8 , 3 ] ) , 2 , [ 2 , 1 , ( 3 , 4 ) ] ]
into [ 5 , 6 , 8 , 3 , 2 , 2 , 1 , 3 , 4]
This is safe to call on something not a list / tuple - the original input is returned as a list""" | # For backwards compatibility , this returned a list , not an iterable .
# Changing to return an iterable could break things .
if not isinstance ( l , types ) :
return l
return list ( flattened_iterator ( l , types ) ) |
def validate_registry_uri_authority ( auth : str ) -> None :
"""Raise an exception if the authority is not a valid ENS domain
or a valid checksummed contract address .""" | if is_ens_domain ( auth ) is False and not is_checksum_address ( auth ) :
raise ValidationError ( f"{auth} is not a valid registry URI authority." ) |
def admin_url_params ( request , params = None ) :
"""given a request , looks at GET and POST values to determine which params
should be added . Is used to keep the context of popup and picker mode .""" | params = params or { }
if popup_status ( request ) :
params [ IS_POPUP_VAR ] = '1'
pick_type = popup_pick_type ( request )
if pick_type :
params [ '_pick' ] = pick_type
return params |
def get_side_length_of_resize_handle ( view , item ) :
"""Calculate the side length of a resize handle
: param rafcon . gui . mygaphas . view . ExtendedGtkView view : View
: param rafcon . gui . mygaphas . items . state . StateView item : StateView
: return : side length
: rtype : float""" | from rafcon . gui . mygaphas . items . state import StateView , NameView
if isinstance ( item , StateView ) :
return item . border_width * view . get_zoom_factor ( ) / 1.5
elif isinstance ( item , NameView ) :
return item . parent . border_width * view . get_zoom_factor ( ) / 2.5
return 0 |
def _get_version_output ( self ) :
"""Ignoring errors , call ` ceph - - version ` and return only the version
portion of the output . For example , output like : :
ceph version 9.0.1-1234kjd ( asdflkj2k3jh234jhg )
Would return : :
9.0.1-1234kjd""" | if not self . executable :
return ''
command = [ self . executable , '--version' ]
out , _ , _ = self . _check ( self . conn , command )
try :
return out . decode ( 'utf-8' ) . split ( ) [ 2 ]
except IndexError :
return '' |
def queue_delete ( self , queue , if_unused = False , if_empty = False ) :
"""Delete queue by name .""" | return self . channel . queue_delete ( queue , if_unused , if_empty ) |
def group ( text , size ) :
"""Group ` ` text ` ` into blocks of ` ` size ` ` .
Example :
> > > group ( " test " , 2)
[ ' te ' , ' st ' ]
Args :
text ( str ) : text to separate
size ( int ) : size of groups to split the text into
Returns :
List of n - sized groups of text
Raises :
ValueError : If n is non positive""" | if size <= 0 :
raise ValueError ( "n must be a positive integer" )
return [ text [ i : i + size ] for i in range ( 0 , len ( text ) , size ) ] |
def ones_matrix_band_part ( rows , cols , num_lower , num_upper , out_shape = None ) :
"""Matrix band part of ones .
Args :
rows : int determining number of rows in output
cols : int
num _ lower : int , maximum distance backward . Negative values indicate
unlimited .
num _ upper : int , maximum distance forward . Negative values indicate
unlimited .
out _ shape : shape to reshape output by .
Returns :
Tensor of size rows * cols reshaped into shape out _ shape .""" | if all ( [ isinstance ( el , int ) for el in [ rows , cols , num_lower , num_upper ] ] ) : # Needed info is constant , so we construct in numpy
if num_lower < 0 :
num_lower = rows - 1
if num_upper < 0 :
num_upper = cols - 1
lower_mask = np . tri ( cols , rows , num_lower ) . T
upper_mask = np . tri ( rows , cols , num_upper )
band = np . ones ( ( rows , cols ) ) * lower_mask * upper_mask
if out_shape :
band = band . reshape ( out_shape )
band = tf . constant ( band , tf . float32 )
else :
band = tf . matrix_band_part ( tf . ones ( [ rows , cols ] ) , tf . cast ( num_lower , tf . int64 ) , tf . cast ( num_upper , tf . int64 ) )
if out_shape :
band = tf . reshape ( band , out_shape )
return band |
def shift_coordinate_grid ( self , x_shift , y_shift , pixel_unit = False ) :
"""shifts the coordinate system
: param x _ shif : shift in x ( or RA )
: param y _ shift : shift in y ( or DEC )
: param pixel _ unit : bool , if True , units of pixels in input , otherwise RA / DEC
: return : updated data class with change in coordinate system""" | self . _coords . shift_coordinate_grid ( x_shift , y_shift , pixel_unit = pixel_unit )
self . _x_grid , self . _y_grid = self . _coords . coordinate_grid ( self . nx ) |
def get_hash ( self , msg ) :
''': rettype : string''' | msg = msg . encode ( 'utf-8' , 'replace' )
return hashlib . sha1 ( msg ) . hexdigest ( ) |
def sonos_uri_from_id ( self , item_id ) :
"""Get a uri which can be sent for playing .
Args :
item _ id ( str ) : The unique id of a playable item for this music
service , such as that returned in the metadata from
` get _ metadata ` , eg ` ` spotify : track : 2qs5ZcLByNTctJKbhAZ9JE ` `
Returns :
str : A URI of the form : ` ` soco : / / spotify % 3Atrack
%3A2qs5ZcLByNTctJKbhAZ9JE ? sid = 2311 & sn = 1 ` ` which encodes the
` ` item _ id ` ` , and relevant data from the account for the music
service . This URI can be sent to a Sonos device for playing ,
and the device itself will retrieve all the necessary metadata
such as title , album etc .""" | # Real Sonos URIs look like this :
# x - sonos - http : tr % 3a92352286 . mp3 ? sid = 2 & flags = 8224 & sn = 4 The
# extension ( . mp3 ) presumably comes from the mime - type returned in a
# MusicService . get _ metadata ( ) result ( though for Spotify the mime - type
# is audio / x - spotify , and there is no extension . See
# http : / / musicpartners . sonos . com / node / 464 for supported mime - types and
# related extensions ) . The scheme ( x - sonos - http ) presumably
# indicates how the player is to obtain the stream for playing . It
# is not clear what the flags param is used for ( perhaps bitrate ,
# or certain metadata such as canSkip ? ) . Fortunately , none of these
# seems to be necessary . We can leave them out , ( or in the case of
# the scheme , use ' soco ' as dummy text , and the players still seem
# to do the right thing .
# quote _ url will break if given unicode on Py2.6 , and early 2.7 . So
# we need to encode .
item_id = quote_url ( item_id . encode ( 'utf-8' ) )
# Add the account info to the end as query params
account = self . account
result = "soco://{}?sid={}&sn={}" . format ( item_id , self . service_id , account . serial_number )
return result |
def config ( self , show_row_hdrs = True , show_col_hdrs = True , show_col_hdr_in_cell = False , auto_resize = True ) :
"""Override the in - class params :
@ param show _ row _ hdrs : show row headers
@ param show _ col _ hdrs : show column headers
@ param show _ col _ hdr _ in _ cell : embed column header in each cell
@ param auto _ resize : auto resize according to the size of terminal""" | self . show_row_hdrs = show_row_hdrs
self . show_col_hdrs = show_col_hdrs
self . show_col_hdr_in_cell = show_col_hdr_in_cell |
def fcpinfo ( rh ) :
"""Get fcp info and filter by the status .
Input :
Request Handle with the following properties :
function - ' GETVM '
subfunction - ' FCPINFO '
userid - userid of the virtual machine
parms [ ' status ' ] - The status for filter results .
Output :
Request Handle updated with the results .
Return code - 0 : ok , non - zero : error""" | rh . printSysLog ( "Enter changeVM.dedicate" )
parms = [ "-T" , rh . userid ]
hideList = [ ]
results = invokeSMCLI ( rh , "System_WWPN_Query" , parms , hideInLog = hideList )
if results [ 'overallRC' ] != 0 : # SMAPI API failed .
rh . printLn ( "ES" , results [ 'response' ] )
rh . updateResults ( results )
# Use results from invokeSMCLI
if results [ 'overallRC' ] == 0 : # extract data from smcli return
ret = extract_fcp_data ( results [ 'response' ] , rh . parms [ 'status' ] )
# write the ret into results [ ' response ' ]
rh . printLn ( "N" , ret )
else :
rh . printLn ( "ES" , results [ 'response' ] )
rh . updateResults ( results )
# Use results from invokeSMCLI
return rh . results [ 'overallRC' ] |
def _generate_main_files_header ( notebook_object , notebook_title = "Notebook Title" , notebook_description = "Notebook Description" ) :
"""Internal function that is used for generation of the ' MainFiles ' notebooks header .
Parameters
notebook _ object : notebook object
Object of " notebook " class where the header will be created .
notebook _ title : None or str
Title of the Notebook .
notebook _ description : str
An introductory text to present the Notebook and involve the reader .""" | # = = = = = Creation of Header = = = = =
header_temp = HEADER_MAIN_FILES . replace ( "Notebook Title" , notebook_title )
notebook_object [ "cells" ] . append ( nb . v4 . new_markdown_cell ( header_temp ) )
# = = = = = Insertion of the div reserved to the Notebook Description = = = = =
notebook_object [ "cells" ] . append ( nb . v4 . new_markdown_cell ( notebook_description , ** { "metadata" : { "tags" : [ "test" ] } } ) ) |
def readLipd ( usr_path = "" ) :
"""Read LiPD file ( s ) .
Enter a file path , directory path , or leave args blank to trigger gui .
: param str usr _ path : Path to file / directory ( optional )
: return dict _ d : Metadata""" | global cwd , settings , files
if settings [ "verbose" ] :
__disclaimer ( opt = "update" )
start = clock ( )
files [ ".lpd" ] = [ ]
__read ( usr_path , ".lpd" )
_d = __read_lipd_contents ( )
end = clock ( )
logger_benchmark . info ( log_benchmark ( "readLipd" , start , end ) )
return _d |
def show_fibrechannel_interface_info_output_show_fibrechannel_interface_show_fibrechannel_info_port_index ( self , ** kwargs ) :
"""Auto Generated Code""" | config = ET . Element ( "config" )
show_fibrechannel_interface_info = ET . Element ( "show_fibrechannel_interface_info" )
config = show_fibrechannel_interface_info
output = ET . SubElement ( show_fibrechannel_interface_info , "output" )
show_fibrechannel_interface = ET . SubElement ( output , "show-fibrechannel-interface" )
portsgroup_rbridgeid_key = ET . SubElement ( show_fibrechannel_interface , "portsgroup-rbridgeid" )
portsgroup_rbridgeid_key . text = kwargs . pop ( 'portsgroup_rbridgeid' )
show_fibrechannel_info = ET . SubElement ( show_fibrechannel_interface , "show-fibrechannel-info" )
port_index = ET . SubElement ( show_fibrechannel_info , "port-index" )
port_index . text = kwargs . pop ( 'port_index' )
callback = kwargs . pop ( 'callback' , self . _callback )
return callback ( config ) |
def t_pragma_ID ( self , t ) :
r'[ _ a - zA - Z ] [ _ a - zA - Z0-9 ] *' | # pragma directives
if t . value . upper ( ) in ( 'PUSH' , 'POP' ) :
t . type = t . value . upper ( )
return t |
def GetAllUserSummaries ( ) :
"""Returns a string containing summary info for all GRR users .""" | grr_api = maintenance_utils . InitGRRRootAPI ( )
user_wrappers = sorted ( grr_api . ListGrrUsers ( ) , key = lambda x : x . username )
summaries = [ _Summarize ( w . data ) for w in user_wrappers ]
return "\n\n" . join ( summaries ) |
def add_filter ( self , ftype , func ) :
'''Register a new output filter . Whenever bottle hits a handler output
matching ` ftype ` , ` func ` is applyed to it .''' | if not isinstance ( ftype , type ) :
raise TypeError ( "Expected type object, got %s" % type ( ftype ) )
self . castfilter = [ ( t , f ) for ( t , f ) in self . castfilter if t != ftype ]
self . castfilter . append ( ( ftype , func ) )
self . castfilter . sort ( ) |
def filter ( self , endpoint , params ) :
"""Makes a get request by construction
the path from an endpoint and a dict
with filter query params
e . g .
params = { ' category _ _ in ' : [ 1,2 ] }
response = self . client . filter ( ' / experiences / ' , params )""" | params = self . parse_params ( params )
params = urlencode ( params )
path = '{0}?{1}' . format ( endpoint , params )
return self . get ( path ) |
def long_click ( self , duration = 2.0 ) :
"""Perform the long click action on the UI element ( s ) represented by the UI proxy . If this UI proxy represents a
set of UI elements , the first one in the set is clicked and the anchor point of the UI element is used as the
default one . Similar to click but press the screen for the given time interval and then release .
Args :
duration ( : py : obj : ` float ` ) : whole action duration .
Return :
the same as : py : meth : ` poco . pocofw . Poco . long _ click ` , depending on poco agent implementation .""" | try :
duration = float ( duration )
except ValueError :
raise ValueError ( 'Argument `duration` should be <float>. Got {}' . format ( repr ( duration ) ) )
pos_in_percentage = self . get_position ( self . _focus or 'anchor' )
self . poco . pre_action ( 'long_click' , self , pos_in_percentage )
ret = self . poco . long_click ( pos_in_percentage , duration )
self . poco . post_action ( 'long_click' , self , pos_in_percentage )
return ret |
def requirements ( fname ) :
"""Utility function to create a list of requirements from the output of
the pip freeze command saved in a text file .""" | packages = Setup . read ( fname , fail_silently = True ) . split ( '\n' )
packages = ( p . strip ( ) for p in packages )
packages = ( p for p in packages if p and not p . startswith ( '#' ) )
return list ( packages ) |
def _isnan ( self ) :
"""Return if each value is NaN .""" | if self . _can_hold_na :
return isna ( self )
else : # shouldn ' t reach to this condition by checking hasnans beforehand
values = np . empty ( len ( self ) , dtype = np . bool_ )
values . fill ( False )
return values |
def deactivate ( ) :
"""Deactivate a state in this thread .""" | if hasattr ( _mode , "current_state" ) :
del _mode . current_state
if hasattr ( _mode , "schema" ) :
del _mode . schema
for k in connections :
con = connections [ k ]
if hasattr ( con , 'reset_schema' ) :
con . reset_schema ( ) |
def get_enabled ( ) :
'''Return which jails are set to be run
CLI Example :
. . code - block : : bash
salt ' * ' jail . get _ enabled''' | ret = [ ]
for rconf in ( '/etc/rc.conf' , '/etc/rc.conf.local' ) :
if os . access ( rconf , os . R_OK ) :
with salt . utils . files . fopen ( rconf , 'r' ) as _fp :
for line in _fp :
line = salt . utils . stringutils . to_unicode ( line )
if not line . strip ( ) :
continue
if not line . startswith ( 'jail_list=' ) :
continue
jails = line . split ( '"' ) [ 1 ] . split ( )
for j in jails :
ret . append ( j )
return ret |
async def _discard ( self , path , * , recurse = None , separator = None , cas = None ) :
"""Deletes the Key""" | path = "/v1/kv/%s" % path
response = await self . _api . delete ( path , params = { "cas" : cas , "recurse" : recurse , "separator" : separator } )
return response |
def getkey ( ctx , pubkey ) :
"""Obtain private key in WIF format""" | click . echo ( ctx . bitshares . wallet . getPrivateKeyForPublicKey ( pubkey ) ) |
def get_node_network_state ( self , node_address : Address ) :
"""Returns the currently network status of ` node _ address ` .""" | return views . get_node_network_status ( chain_state = views . state_from_raiden ( self . raiden ) , node_address = node_address , ) |
def update_keyboard_mapping ( conn , e ) :
"""Whenever the keyboard mapping is changed , this function needs to be called
to update xpybutil ' s internal representing of the current keysym table .
Indeed , xpybutil will do this for you automatically .
Moreover , if something is changed that affects the current keygrabs ,
xpybutil will initiate a regrab with the changed keycode .
: param e : The MappingNotify event .
: type e : xcb . xproto . MappingNotifyEvent
: rtype : void""" | global __kbmap , __keysmods
newmap = get_keyboard_mapping ( conn ) . reply ( )
if e is None :
__kbmap = newmap
__keysmods = get_keys_to_mods ( conn )
return
if e . request == xproto . Mapping . Keyboard :
changes = { }
for kc in range ( * get_min_max_keycode ( conn ) ) :
knew = get_keysym ( kc , kbmap = newmap )
oldkc = get_keycode ( conn , knew )
if oldkc != kc :
changes [ oldkc ] = kc
__kbmap = newmap
__regrab ( changes )
elif e . request == xproto . Mapping . Modifier :
__keysmods = get_keys_to_mods ( ) |
def cable_from_file ( filename ) :
"""Returns a cable from the provided file .
` filename `
An absolute path to the cable file .""" | html = codecs . open ( filename , 'rb' , 'utf-8' ) . read ( )
return cable_from_html ( html , reader . reference_id_from_filename ( filename ) ) |
def lin_moma2 ( self , objective , wt_obj ) :
"""Find the smallest redistribution vector using a linear objective .
The change in flux distribution is mimimized by minimizing the sum
of the absolute values of the differences of wild type FBA solution
and the knockout strain flux solution .
Creates the constraint that the we select the optimal flux vector that
is closest to the wildtype . This might still return an arbitrary flux
vector the maximizes the objective function .
Args :
objective : Objective reaction for the model .
wt _ obj : The flux value for your wild type objective reactions .
Can either use an expiremental value or on determined by FBA
by using : meth : ` . get _ fba _ obj _ flux ( objective ) ` .""" | reactions = set ( self . _adjustment_reactions ( ) )
z_diff = self . _z_diff
v = self . _v
v_wt = self . _v_wt
with self . constraints ( ) as constr :
for f_reaction in reactions : # Add the constraint that finds the optimal solution , such
# that the difference between the wildtype flux
# is similar to the knockout flux .
constr . add ( z_diff [ f_reaction ] >= v_wt [ f_reaction ] - v [ f_reaction ] , v_wt [ f_reaction ] - v [ f_reaction ] >= - z_diff [ f_reaction ] )
# If we minimize the sum of the z vector then we will minimize
# the | v _ wt - v | from above
self . _prob . set_objective ( z_diff . sum ( reactions ) )
constr . add ( self . _v_wt [ objective ] >= wt_obj )
self . _solve ( lp . ObjectiveSense . Minimize ) |
def contains ( self , location ) :
"""Checks that the provided point is on the sphere .""" | return self . almostEqual ( sum ( [ coord ** 2 for coord in location ] ) , self . radius ** 2 ) |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.