signature
stringlengths 29
44.1k
| implementation
stringlengths 0
85.2k
|
|---|---|
def startMenu ( translator , navigation , tag ) :
"""Drop - down menu - style navigation view .
For each primary navigation element available , a copy of the I { tab }
pattern will be loaded from the tag . It will have its I { href } slot
filled with the URL for that navigation item . It will have its I { name }
slot filled with the user - visible name of the navigation element . It
will have its I { kids } slot filled with a list of secondary navigation
for that element .
For each secondary navigation element available beneath each primary
navigation element , a copy of the I { subtabs } pattern will be loaded
from the tag . It will have its I { kids } slot filled with a self - similar
structure .
@ type translator : L { IWebTranslator } provider
@ type navigation : L { list } of L { Tab }
@ rtype : { nevow . stan . Tag }"""
|
setTabURLs ( navigation , translator )
getp = IQ ( tag ) . onePattern
def fillSlots ( tabs ) :
for tab in tabs :
if tab . children :
kids = getp ( 'subtabs' ) . fillSlots ( 'kids' , fillSlots ( tab . children ) )
else :
kids = ''
yield dictFillSlots ( getp ( 'tab' ) , dict ( href = tab . linkURL , name = tab . name , kids = kids ) )
return tag . fillSlots ( 'tabs' , fillSlots ( navigation ) )
|
def _update_plotting_params ( self , ** kwargs ) :
"""Some plotting parameters can be changed through the tool ; this
updataes those plotting parameters ."""
|
scalars = kwargs . get ( 'scalars' , None )
if scalars is not None :
old = self . display_params [ 'scalars' ]
self . display_params [ 'scalars' ] = scalars
if old != scalars :
self . plotter . subplot ( * self . loc )
self . plotter . remove_actor ( self . _data_to_update , reset_camera = False )
self . _need_to_update = True
self . valid_range = self . input_dataset . get_data_range ( scalars )
# self . display _ params [ ' rng ' ] = self . valid _ range
cmap = kwargs . get ( 'cmap' , None )
if cmap is not None :
self . display_params [ 'cmap' ] = cmap
|
def get_mean_values ( self , C , sites , rup , dists , a1100 ) :
"""Returns the mean values for a specific IMT"""
|
if isinstance ( a1100 , np . ndarray ) : # Site model defined
temp_vs30 = sites . vs30
temp_z2pt5 = sites . z2pt5
else : # Default site and basin model
temp_vs30 = 1100.0 * np . ones ( len ( sites . vs30 ) )
temp_z2pt5 = self . _select_basin_model ( 1100.0 ) * np . ones_like ( temp_vs30 )
return ( self . _get_magnitude_term ( C , rup . mag ) + self . _get_geometric_attenuation_term ( C , rup . mag , dists . rrup ) + self . _get_style_of_faulting_term ( C , rup ) + self . _get_hanging_wall_term ( C , rup , dists ) + self . _get_shallow_site_response_term ( C , temp_vs30 , a1100 ) + self . _get_basin_response_term ( C , temp_z2pt5 ) + self . _get_hypocentral_depth_term ( C , rup ) + self . _get_fault_dip_term ( C , rup ) + self . _get_anelastic_attenuation_term ( C , dists . rrup ) )
|
def diff ( name ) :
'''Get information on changes made to container ' s filesystem since it was
created . Equivalent to running the ` ` docker diff ` ` Docker CLI command .
name
Container name or ID
* * RETURN DATA * *
A dictionary containing any of the following keys :
- ` ` Added ` ` - A list of paths that were added .
- ` ` Changed ` ` - A list of paths that were changed .
- ` ` Deleted ` ` - A list of paths that were deleted .
These keys will only be present if there were changes , so if the container
has no differences the return dict will be empty .
CLI Example :
. . code - block : : bash
salt myminion docker . diff mycontainer'''
|
changes = _client_wrapper ( 'diff' , name )
kind_map = { 0 : 'Changed' , 1 : 'Added' , 2 : 'Deleted' }
ret = { }
for change in changes :
key = kind_map . get ( change [ 'Kind' ] , 'Unknown' )
ret . setdefault ( key , [ ] ) . append ( change [ 'Path' ] )
if 'Unknown' in ret :
log . error ( 'Unknown changes detected in docker.diff of container %s. ' 'This is probably due to a change in the Docker API. Please ' 'report this to the SaltStack developers' , name )
return ret
|
def generateRevision ( self ) :
"""Generates the revision file for this builder ."""
|
revpath = self . sourcePath ( )
if not os . path . exists ( revpath ) :
return
# determine the revision location
revfile = os . path . join ( revpath , self . revisionFilename ( ) )
mode = ''
# test for svn revision
try :
args = [ 'svn' , 'info' , revpath ]
proc = subprocess . Popen ( args , stdout = subprocess . PIPE )
mode = 'svn'
except WindowsError :
try :
args = [ 'git' , 'rev-parse' , 'HEAD' , revpath ]
proc = subprocess . Popen ( args , stdout = subprocess . PIPE )
mode = 'git'
except WindowsError :
return
# process SVN revision
rev = None
if mode == 'svn' :
for line in proc . stdout :
data = re . match ( '^Revision: (\d+)' , line )
if data :
rev = int ( data . group ( 1 ) )
break
if rev is not None :
try :
f = open ( revfile , 'w' )
f . write ( '__revision__ = {0}\n' . format ( rev ) )
f . close ( )
except IOError :
pass
|
def send_mail_worker ( config , mail , event ) :
"""Worker task to send out an email , which blocks the process unless it is threaded"""
|
log = ""
try :
if config . mail_ssl :
server = SMTP_SSL ( config . mail_server , port = config . mail_server_port , timeout = 30 )
else :
server = SMTP ( config . mail_server , port = config . mail_server_port , timeout = 30 )
if config . mail_tls :
log += 'Starting TLS\n'
server . starttls ( )
if config . mail_username != '' :
log += 'Logging in with ' + str ( config . mail_username ) + "\n"
server . login ( config . mail_username , config . mail_password )
else :
log += 'No username, trying anonymous access\n'
log += 'Sending Mail\n'
response_send = server . send_message ( mail )
server . quit ( )
except timeout as e :
log += 'Could not send email to enrollee, mailserver timeout: ' + str ( e ) + "\n"
return False , log , event
log += 'Server response:' + str ( response_send )
return True , log , event
|
def remove_if_same ( self , key , value ) :
"""Removes the entry for a key only if it is currently mapped to a given value .
This is equivalent to :
> > > if map . contains _ key ( key ) and map . get ( key ) . equals ( value ) :
> > > map . remove ( key )
> > > return true
> > > else :
> > > return false
except that the action is performed atomically .
* * Warning : This method uses _ _ hash _ _ and _ _ eq _ _ methods of binary form of the key , not the actual implementations
of _ _ hash _ _ and _ _ eq _ _ defined in key ' s class . * *
: param key : ( object ) , the specified key .
: param value : ( object ) , remove the key if it has this value .
: return : ( bool ) , ` ` true ` ` if the value was removed ."""
|
check_not_none ( key , "key can't be None" )
check_not_none ( value , "value can't be None" )
key_data = self . _to_data ( key )
value_data = self . _to_data ( value )
return self . _remove_if_same_internal_ ( key_data , value_data )
|
def _GenOpenApiSpec ( service_class_names , output_path , hostname = None , application_path = None , x_google_api_name = False ) :
"""Write openapi documents generated from the service classes to file .
Args :
service _ class _ names : A list of fully qualified ProtoRPC service names .
output _ path : The directory to which to output the OpenAPI specs .
hostname : A string hostname which will be used as the default version
hostname . If no hostname is specified in the @ endpoints . api decorator ,
this value is the fallback . Defaults to None .
application _ path : A string containing the path to the AppEngine app .
Returns :
A list of OpenAPI spec filenames ."""
|
output_files = [ ]
service_configs = GenApiConfig ( service_class_names , hostname = hostname , config_string_generator = openapi_generator . OpenApiGenerator ( ) , application_path = application_path , x_google_api_name = x_google_api_name )
for api_name_version , config in service_configs . iteritems ( ) :
openapi_name = api_name_version . replace ( '-' , '' ) + 'openapi.json'
output_files . append ( _WriteFile ( output_path , openapi_name , config ) )
return output_files
|
def to_array ( self , variables ) :
"""Converts the clamping to a 1 - D array with respect to the given variables
Parameters
variables : list [ str ]
List of variables names
Returns
` numpy . ndarray ` _
1 - D array where position ` i ` correspond to the sign of the clamped variable at
position ` i ` in the given list of variables
. . _ numpy . ndarray : http : / / docs . scipy . org / doc / numpy / reference / generated / numpy . ndarray . html # numpy . ndarray"""
|
arr = np . zeros ( len ( variables ) , np . int8 )
dc = dict ( self )
for i , var in enumerate ( variables ) :
arr [ i ] = dc . get ( var , arr [ i ] )
return arr
|
def orbitproject ( x , y , inc , phi = 0 , psi = 0 ) :
"""Transform x , y planar coordinates into observer ' s coordinate frame .
x , y are coordinates in z = 0 plane ( plane of the orbit )
observer is at ( inc , phi ) on celestial sphere ( angles in radians ) ;
psi is orientation of final x - y axes about the ( inc , phi ) vector .
Returns x , y , z values in observer ' s coordinate frame , where
x , y are now plane - of - sky coordinates and z is along the line of sight .
Parameters
x , y : float or arrray - like
Coordinates to transorm
inc : float or array - like
Polar angle ( s ) of observer ( where inc = 0 corresponds to north pole
of original x - y plane ) . This angle is the same as standard " inclination . "
phi : float or array - like , optional
Azimuthal angle of observer around z - axis
psi : float or array - like , optional
Orientation of final observer coordinate frame ( azimuthal around
( inc , phi ) vector .
Returns
x , y , z : ` ` ndarray ` `
Coordinates in observers ' frames . x , y in " plane of sky " and z
along line of sight ."""
|
x2 = x * np . cos ( phi ) + y * np . sin ( phi )
y2 = - x * np . sin ( phi ) + y * np . cos ( phi )
z2 = y2 * np . sin ( inc )
y2 = y2 * np . cos ( inc )
xf = x2 * np . cos ( psi ) - y2 * np . sin ( psi )
yf = x2 * np . sin ( psi ) + y2 * np . cos ( psi )
return ( xf , yf , z2 )
|
def _get_info ( self ) :
"""Reads the tool ' s output from its stderr stream , extracts the
relevant information , and parses it ."""
|
out_parts = [ ]
while True :
line = self . proc . stderr . readline ( )
if not line : # EOF and data not found .
raise CommunicationError ( "stream info not found" )
# In Python 3 , result of reading from stderr is bytes .
if isinstance ( line , bytes ) :
line = line . decode ( 'utf8' , 'ignore' )
line = line . strip ( ) . lower ( )
if 'no such file' in line :
raise IOError ( 'file not found' )
elif 'invalid data found' in line :
raise UnsupportedError ( )
elif 'duration:' in line :
out_parts . append ( line )
elif 'audio:' in line :
out_parts . append ( line )
self . _parse_info ( '' . join ( out_parts ) )
break
|
def get_layout ( name , * args , ** kwargs ) :
"""Retrieve a graph layout
Some graph layouts accept extra options . Please refer to their
documentation for more information .
Parameters
name : string
The name of the layout . The variable ` AVAILABLE _ LAYOUTS `
contains all available layouts .
* args
Positional arguments which are passed to the layout .
* * kwargs
Keyword arguments which are passed to the layout .
Returns
layout : callable
The callable generator which will calculate the graph layout"""
|
if name not in _layout_map :
raise KeyError ( "Graph layout '%s' not found. Should be one of %s" % ( name , AVAILABLE_LAYOUTS ) )
layout = _layout_map [ name ]
if inspect . isclass ( layout ) :
layout = layout ( * args , ** kwargs )
return layout
|
def sh ( cmd , grid = False , infile = None , outfile = None , errfile = None , append = False , background = False , threaded = None , log = True , grid_opts = None , silent = False , shell = "/bin/bash" , check = False ) :
"""simple wrapper for system calls"""
|
if not cmd :
return 1
if silent :
outfile = errfile = "/dev/null"
if grid :
from jcvi . apps . grid import GridProcess
pr = GridProcess ( cmd , infile = infile , outfile = outfile , errfile = errfile , threaded = threaded , grid_opts = grid_opts )
pr . start ( )
return pr . jobid
else :
if infile :
cat = "cat"
if infile . endswith ( ".gz" ) :
cat = "zcat"
cmd = "{0} {1} |" . format ( cat , infile ) + cmd
if outfile and outfile != "stdout" :
if outfile . endswith ( ".gz" ) :
cmd += " | gzip"
tag = ">"
if append :
tag = ">>"
cmd += " {0}{1}" . format ( tag , outfile )
if errfile :
if errfile == outfile :
errfile = "&1"
cmd += " 2>{0}" . format ( errfile )
if background :
cmd += " &"
if log :
logging . debug ( cmd )
call_func = check_call if check else call
return call_func ( cmd , shell = True , executable = shell )
|
def del_repo ( repo , ** kwargs ) :
'''Remove an XBPS repository from the system .
repo
url of repo to remove ( persistent ) .
CLI Examples :
. . code - block : : bash
salt ' * ' pkg . del _ repo < repo url >'''
|
try :
_locate_repo_files ( repo , rewrite = True )
except IOError :
return False
else :
return True
|
def get_cf_distribution_class ( ) :
"""Return the correct troposphere CF distribution class ."""
|
if LooseVersion ( troposphere . __version__ ) == LooseVersion ( '2.4.0' ) :
cf_dist = cloudfront . Distribution
cf_dist . props [ 'DistributionConfig' ] = ( DistributionConfig , True )
return cf_dist
return cloudfront . Distribution
|
def export_jwks_as_json ( self , private = False , issuer = "" ) :
"""Export a JWKS as a JSON document .
: param private : Whether it should be the private keys or the public
: param issuer : The entity ID .
: return : A JSON representation of a JWKS"""
|
return json . dumps ( self . export_jwks ( private , issuer ) )
|
def mark_offer_as_unclear ( self , offer_id ) :
"""Mark offer as unclear
: param offer _ id : the offer id
: return Response"""
|
return self . _create_put_request ( resource = OFFERS , billomat_id = offer_id , command = UNCLEAR , )
|
def focusOutEvent ( self , event ) :
"""Reimplement Qt method to close the widget when loosing focus ."""
|
event . ignore ( )
# Inspired from CompletionWidget . focusOutEvent ( ) in file
# widgets / sourcecode / base . py line 212
if sys . platform == "darwin" :
if event . reason ( ) != Qt . ActiveWindowFocusReason :
self . close ( )
else :
self . close ( )
|
def is_point ( nc , variable ) :
'''Returns true if the variable is a point feature type
: param netCDF4 . Dataset nc : An open netCDF dataset
: param str variable : name of the variable to check'''
|
# x ( o ) , y ( o ) , z ( o ) , t ( o )
# X ( o )
dims = nc . variables [ variable ] . dimensions
cmatrix = coordinate_dimension_matrix ( nc )
first_coord = None
if 't' in cmatrix :
first_coord = cmatrix [ 't' ]
if len ( cmatrix [ 't' ] ) > 1 :
return False
if 'x' in cmatrix :
if first_coord is None :
first_coord = cmatrix [ 'x' ]
if first_coord != cmatrix [ 'x' ] :
return False
if len ( cmatrix [ 'x' ] ) > 1 :
return False
if 'y' in cmatrix :
if first_coord is None :
first_coord = cmatrix [ 'y' ]
if first_coord != cmatrix [ 'y' ] :
return False
if len ( cmatrix [ 'y' ] ) > 1 :
return False
if 'z' in cmatrix :
if first_coord is None :
first_coord = cmatrix [ 'z' ]
if first_coord != cmatrix [ 'z' ] :
return False
if len ( cmatrix [ 'z' ] ) > 1 :
return False
if first_coord and dims != first_coord :
return False
# Point is indistinguishable from trajectories where the instance dimension
# is implied ( scalar )
traj_ids = nc . get_variables_by_attributes ( cf_role = "trajectory_id" )
if traj_ids :
return False
return True
|
def manage_view ( request , semester , profile = None ) :
"""View all members ' preferences . This view also includes forms to create an
entire semester ' s worth of weekly workshifts ."""
|
page_name = "Manage Workshift"
pools = WorkshiftPool . objects . filter ( semester = semester )
full_management = utils . can_manage ( request . user , semester = semester )
edit_semester_form = None
close_semester_form = None
open_semester_form = None
if not full_management :
pools = pools . filter ( managers__incumbent__user = request . user )
if not pools . count ( ) :
messages . add_message ( request , messages . ERROR , MESSAGES [ "ADMINS_ONLY" ] )
return HttpResponseRedirect ( semester . get_view_url ( ) )
else :
edit_semester_form = FullSemesterForm ( data = request . POST if "edit_semester" in request . POST else None , instance = semester , )
if semester . current :
close_semester_form = CloseSemesterForm ( data = request . POST if "close_semester" in request . POST else None , semester = semester , )
else :
open_semester_form = OpenSemesterForm ( data = request . POST if "open_semester" in request . POST else None , semester = semester )
if edit_semester_form and edit_semester_form . is_valid ( ) :
semester = edit_semester_form . save ( )
messages . add_message ( request , messages . INFO , "Semester successfully updated." , )
return HttpResponseRedirect ( wurl ( "workshift:manage" , sem_url = semester . sem_url , ) )
if close_semester_form and close_semester_form . is_valid ( ) :
close_semester_form . save ( )
messages . add_message ( request , messages . INFO , "Semester closed." )
return HttpResponseRedirect ( wurl ( "workshift:manage" , sem_url = semester . sem_url , ) )
if open_semester_form and open_semester_form . is_valid ( ) :
open_semester_form . save ( )
messages . add_message ( request , messages . INFO , "Semester reopened." )
return HttpResponseRedirect ( wurl ( "workshift:manage" , sem_url = semester . sem_url , ) )
pools = pools . order_by ( "-is_primary" , "title" )
workshifters = WorkshiftProfile . objects . filter ( semester = semester )
pool_hours = [ [ workshifter . pool_hours . get ( pool = pool ) for pool in pools ] for workshifter in workshifters ]
return render_to_response ( "manage.html" , { "page_name" : page_name , "pools" : pools , "full_management" : full_management , "edit_semester_form" : edit_semester_form , "close_semester_form" : close_semester_form , "open_semester_form" : open_semester_form , "workshifters" : zip ( workshifters , pool_hours ) , } , context_instance = RequestContext ( request ) )
|
def rm ( self , fname = None ) :
"""Remove a file , don ' t raise exception if file does not exist ."""
|
if fname is not None :
return ( self / fname ) . rm ( )
try :
self . remove ( )
except OSError :
pass
|
def setup_coverage ( config , kernel , floc , output_loc = None ) :
"""Start coverage reporting in kernel .
Currently supported kernel languages are :
- Python"""
|
language = kernel . language
if language . startswith ( 'python' ) : # Get the pytest - cov coverage object
cov = get_cov ( config )
if cov : # If present , copy the data file location used by pytest - cov
data_file = os . path . abspath ( cov . config . data_file )
else : # Fall back on output _ loc and current dir if not
data_file = os . path . abspath ( os . path . join ( output_loc or os . getcwd ( ) , '.coverage' ) )
# Get options from pytest - cov ' s command line arguments :
source = config . option . cov_source
config_file = config . option . cov_config
if isinstance ( config_file , str ) and os . path . isfile ( config_file ) :
config_file = os . path . abspath ( config_file )
# Copy the suffix of plugin if available
suffix = _make_suffix ( cov )
if suffix is True : # Cannot merge data with autogen suffix , so turn off warning
# for missing data in pytest - cov collector
cov . _warn_no_data = False
# Build setup command and execute in kernel :
cmd = _python_setup % ( data_file , source , config_file , suffix )
msg_id = kernel . kc . execute ( cmd , stop_on_error = False )
kernel . await_idle ( msg_id , 60 )
# A minute should be plenty to enable coverage
else :
config . warn ( 'C1' , 'Coverage currently not supported for language "%s".' % language , floc )
return
|
def _rdsignal ( fp , file_size , header_size , n_sig , bit_width , is_signed , cut_end ) :
"""Read the signal
Parameters
cut _ end : bool , optional
If True , enables reading the end of files which appear to terminate
with the incorrect number of samples ( ie . sample not present for all channels ) ,
by checking and skipping the reading the end of such files .
Checking this option makes reading slower ."""
|
# Cannot initially figure out signal length because there
# are escape sequences .
fp . seek ( header_size )
signal_size = file_size - header_size
byte_width = int ( bit_width / 8 )
# numpy dtype
dtype = str ( byte_width )
if is_signed :
dtype = 'i' + dtype
else :
dtype = 'u' + dtype
# big endian
dtype = '>' + dtype
# The maximum possible samples given the file size
# All channels must be present
max_samples = int ( signal_size / byte_width )
max_samples = max_samples - max_samples % n_sig
# Output information
signal = np . empty ( max_samples , dtype = dtype )
markers = [ ]
triggers = [ ]
# Number of ( total ) samples read
sample_num = 0
# Read one sample for all channels at a time
if cut_end :
stop_byte = file_size - n_sig * byte_width + 1
while fp . tell ( ) < stop_byte :
chunk = fp . read ( 2 )
sample_num = _get_sample ( fp , chunk , n_sig , dtype , signal , markers , triggers , sample_num )
else :
while True :
chunk = fp . read ( 2 )
if not chunk :
break
sample_num = _get_sample ( fp , chunk , n_sig , dtype , signal , markers , triggers , sample_num )
# No more bytes to read . Reshape output arguments .
signal = signal [ : sample_num ]
signal = signal . reshape ( ( - 1 , n_sig ) )
markers = np . array ( markers , dtype = 'int' )
triggers = np . array ( triggers , dtype = 'int' )
return signal , markers , triggers
|
def couple ( f , g ) :
r"""Compose a function thate returns two arguments .
Given a pair of functions that take the same arguments , return a
single function that returns a pair consisting of the return values
of each function .
Notes
Equivalent to : :
lambda f , g : lambda * args , * * kwargs : ( f ( * args , * * kwargs ) ,
g ( * args , * * kwargs ) )
Examples
> > > f = lambda x : 2 * x * * 3
> > > df = lambda x : 6 * x * * 2
> > > f _ new = couple ( f , df )
> > > f _ new ( 5)
(250 , 150)"""
|
def coupled ( * args , ** kwargs ) :
return f ( * args , ** kwargs ) , g ( * args , ** kwargs )
return coupled
|
def immediateAssignmentExtended ( StartingTime_presence = 0 ) :
"""IMMEDIATE ASSIGNMENT EXTENDED Section 9.1.19"""
|
a = L2PseudoLength ( )
b = TpPd ( pd = 0x6 )
c = MessageType ( mesType = 0x39 )
# 00111001
d = PageModeAndSpareHalfOctets ( )
f = ChannelDescription ( )
g = RequestReference ( )
h = TimingAdvance ( )
i = MobileAllocation ( )
packet = a / b / c / d / f / g / h / i
if StartingTime_presence is 1 :
j = StartingTimeHdr ( ieiST = 0x7C , eightBitST = 0x0 )
packet = packet / j
k = IaxRestOctets ( )
packet = packet / k
return packet
|
def _parse_contract_wages ( self , table ) :
"""Parse the wages on the contract .
The wages are listed as the data points in the contract table . Any
values that don ' t have a value which starts with a ' $ ' sign are likely
not valid and should be dropped .
Parameters
table : PyQuery object
A PyQuery object containing the contract table .
Returns
list
Returns a list of all wages where each element is a string denoting
the dollar amount , such as ' $ 40,000,000 ' ."""
|
wages = [ i . text ( ) if i . text ( ) . startswith ( '$' ) else '' for i in table ( 'td' ) . items ( ) ]
wages . remove ( '' )
return wages
|
def labelHealpix ( pixels , values , nside , threshold = 0 , xsize = 1000 ) :
"""Label contiguous regions of a ( sparse ) HEALPix map . Works by mapping
HEALPix array to a Mollweide projection and applying scipy . ndimage . label
Assumes non - nested HEALPix map .
Parameters :
pixels : Pixel values associated to ( sparse ) HEALPix array
values : ( Sparse ) HEALPix array of data values
nside : HEALPix dimensionality
threshold : Threshold value for object detection
xsize : Size of Mollweide projection
Returns :
labels , nlabels"""
|
proj = healpy . projector . MollweideProj ( xsize = xsize )
vec = healpy . pix2vec ( nside , pixels )
xy = proj . vec2xy ( vec )
ij = proj . xy2ij ( xy )
xx , yy = proj . ij2xy ( )
# Convert to Mollweide
searchims = [ ]
if values . ndim < 2 :
iterate = [ values ]
else :
iterate = values . T
for i , value in enumerate ( iterate ) :
logger . debug ( "Labeling slice %i..." )
searchim = numpy . zeros ( xx . shape , dtype = bool )
select = ( value > threshold )
yidx = ij [ 0 ] [ select ] ;
xidx = ij [ 1 ] [ select ]
searchim [ yidx , xidx ] |= True
searchims . append ( searchim )
searchims = numpy . array ( searchims )
# Full binary structure
s = ndimage . generate_binary_structure ( searchims . ndim , searchims . ndim )
# # # # Dilate in the z - direction
logger . info ( " Dilating image..." )
searchims = ndimage . binary_dilation ( searchims , s , 1 )
# Do the labeling
logger . info ( " Labeling image..." )
labels , nlabels = ndimage . label ( searchims , structure = s )
# Convert back to healpix
pix_labels = labels [ : , ij [ 0 ] , ij [ 1 ] ] . T
pix_labels = pix_labels . reshape ( values . shape )
pix_labels *= ( values > threshold )
# re - trim
return pix_labels , nlabels
|
def quit ( self ) :
"""Remove this user from all channels and reinitialize the user ' s list
of joined channels ."""
|
for c in self . channels :
c . users . remove ( self . nick )
self . channels = [ ]
|
def confidential_login ( credentials = None , client_id = None , client_secret = None , services = None , make_clients = True , token_dir = DEFAULT_CRED_PATH ) :
"""Log in to Globus services as a confidential client
( a client with its own login information ) .
Arguments :
credentials ( str or dict ) : A string filename , string JSON , or dictionary
with credential and config information .
By default , uses the ` ` DEFAULT _ CRED _ FILENAME ` ` and token _ dir .
Contains ` ` client _ id ` ` , ` ` client _ secret ` ` , and ` ` services ` ` as defined below .
client _ id ( str ) : The ID of the client .
client _ secret ( str ) : The client ' s secret for authentication .
services ( list of str ) : Services to authenticate with .
make _ clients ( bool ) : If ` ` True ` ` , will make and return appropriate clients
with generated tokens .
If ` ` False ` ` , will only return authorizers .
* * Default * * : ` ` True ` ` .
token _ dir ( str ) : The path to the directory to save tokens in and look for
credentials by default .
* * Default * * : ` ` DEFAULT _ CRED _ PATH ` ` .
Returns :
dict : The clients and authorizers requested , indexed by service name ."""
|
DEFAULT_CRED_FILENAME = "confidential_globus_login.json"
# Read credentials if supplied
if credentials :
if type ( credentials ) is str :
try :
with open ( credentials ) as cred_file :
creds = json . load ( cred_file )
except IOError :
try :
creds = json . loads ( credentials )
except ValueError :
raise ValueError ( "Credentials unreadable or missing" )
elif type ( credentials ) is dict :
creds = credentials
else :
try :
with open ( os . path . join ( os . getcwd ( ) , DEFAULT_CRED_FILENAME ) ) as cred_file :
creds = json . load ( cred_file )
except IOError :
try :
with open ( os . path . join ( token_dir , DEFAULT_CRED_FILENAME ) ) as cred_file :
creds = json . load ( cred_file )
except IOError :
raise ValueError ( "Credentials/configuration must be passed as a " "filename string, JSON string, or dictionary, or provided " "in '{}' or '{}'." . format ( DEFAULT_CRED_FILENAME , token_dir ) )
client_id = creds . get ( "client_id" )
client_secret = creds . get ( "client_secret" )
services = creds . get ( "services" , services )
if not client_id or not client_secret :
raise ValueError ( "A client_id and client_secret are required." )
if not services :
services = [ ]
elif isinstance ( services , str ) :
services = [ services ]
conf_client = globus_sdk . ConfidentialAppAuthClient ( client_id , client_secret )
servs = [ ]
for serv in services :
serv = serv . lower ( ) . strip ( )
if type ( serv ) is str :
servs += serv . split ( " " )
else :
servs += list ( serv )
# Translate services into scopes as possible
scopes = [ KNOWN_SCOPES . get ( sc , sc ) for sc in servs ]
# Make authorizers with every returned token
all_authorizers = { }
for scope in scopes : # TODO : Allow non - CC authorizers ?
try :
all_authorizers [ scope ] = globus_sdk . ClientCredentialsAuthorizer ( conf_client , scope )
except Exception as e :
print ( "Error: Cannot create authorizer for scope '{}' ({})" . format ( scope , str ( e ) ) )
returnables = { }
# Populate clients and named services
# Only translate back services - if user provides scope directly , don ' t translate back
# ex . transfer = > urn : transfer . globus . org : all = > transfer ,
# but urn : transfer . globus . org : all ! = > transfer
for service in servs :
token_key = KNOWN_SCOPES . get ( service )
# If the . by _ resource _ server key ( token key ) for the service was returned
if token_key in all_authorizers . keys ( ) : # If there is an applicable client ( all clients have known token key )
# Pop from all _ authorizers to remove from final return value
if make_clients and KNOWN_CLIENTS . get ( service ) :
try :
returnables [ service ] = KNOWN_CLIENTS [ service ] ( authorizer = all_authorizers . pop ( token_key ) , http_timeout = STD_TIMEOUT )
except globus_sdk . GlobusAPIError as e :
print ( "Error: Unable to create {} client: {}" . format ( service , e . message ) )
# If no applicable client , just translate the key
else :
returnables [ service ] = all_authorizers . pop ( token_key )
# Add authorizers not associated with service to returnables
returnables . update ( all_authorizers )
return returnables
|
def _get_rnn_layer ( mode , num_layers , input_size , hidden_size , dropout , weight_dropout ) :
"""create rnn layer given specs"""
|
if mode == 'rnn_relu' :
rnn_block = functools . partial ( rnn . RNN , activation = 'relu' )
elif mode == 'rnn_tanh' :
rnn_block = functools . partial ( rnn . RNN , activation = 'tanh' )
elif mode == 'lstm' :
rnn_block = rnn . LSTM
elif mode == 'gru' :
rnn_block = rnn . GRU
block = rnn_block ( hidden_size , num_layers , dropout = dropout , input_size = input_size )
if weight_dropout :
apply_weight_drop ( block , '.*h2h_weight' , rate = weight_dropout )
return block
|
def register_actions ( self , shortcut_manager ) :
"""Register callback methods for triggered actions
: param rafcon . gui . shortcut _ manager . ShortcutManager shortcut _ manager :"""
|
shortcut_manager . add_callback_for_action ( "undo" , self . undo )
shortcut_manager . add_callback_for_action ( "redo" , self . redo )
|
def _check_cmd ( call ) :
'''Check the output of the cmd . run _ all function call .'''
|
if call [ 'retcode' ] != 0 :
comment = ''
std_err = call . get ( 'stderr' )
std_out = call . get ( 'stdout' )
if std_err :
comment += std_err
if std_out :
comment += std_out
raise CommandExecutionError ( 'Error running command: {0}' . format ( comment ) )
return call
|
def _prepare_output ( topics_with_wrong_rf , verbose ) :
"""Returns dict with ' raw ' and ' message ' keys filled ."""
|
out = { }
topics_count = len ( topics_with_wrong_rf )
out [ 'raw' ] = { 'topics_with_wrong_replication_factor_count' : topics_count , }
if topics_count == 0 :
out [ 'message' ] = 'All topics have proper replication factor.'
else :
out [ 'message' ] = ( "{0} topic(s) have replication factor lower than specified min ISR + 1." ) . format ( topics_count )
if verbose :
lines = ( "replication_factor={replication_factor} is lower than min_isr={min_isr} + 1 for {topic}" . format ( min_isr = topic [ 'min_isr' ] , topic = topic [ 'topic' ] , replication_factor = topic [ 'replication_factor' ] , ) for topic in topics_with_wrong_rf )
out [ 'verbose' ] = "Topics:\n" + "\n" . join ( lines )
if verbose :
out [ 'raw' ] [ 'topics' ] = topics_with_wrong_rf
return out
|
def get_ssh_gateway_config ( vm_ ) :
'''Return the ssh _ gateway configuration .'''
|
ssh_gateway = config . get_cloud_config_value ( 'ssh_gateway' , vm_ , __opts__ , default = None , search_global = False )
# Check to see if a SSH Gateway will be used .
if not isinstance ( ssh_gateway , six . string_types ) :
return None
# Create dictionary of configuration items
# ssh _ gateway
ssh_gateway_config = { 'ssh_gateway' : ssh_gateway }
# ssh _ gateway _ port
ssh_gateway_config [ 'ssh_gateway_port' ] = config . get_cloud_config_value ( 'ssh_gateway_port' , vm_ , __opts__ , default = None , search_global = False )
# ssh _ gateway _ username
ssh_gateway_config [ 'ssh_gateway_user' ] = config . get_cloud_config_value ( 'ssh_gateway_username' , vm_ , __opts__ , default = None , search_global = False )
# ssh _ gateway _ private _ key
ssh_gateway_config [ 'ssh_gateway_key' ] = config . get_cloud_config_value ( 'ssh_gateway_private_key' , vm_ , __opts__ , default = None , search_global = False )
# ssh _ gateway _ password
ssh_gateway_config [ 'ssh_gateway_password' ] = config . get_cloud_config_value ( 'ssh_gateway_password' , vm_ , __opts__ , default = None , search_global = False )
# ssh _ gateway _ command
ssh_gateway_config [ 'ssh_gateway_command' ] = config . get_cloud_config_value ( 'ssh_gateway_command' , vm_ , __opts__ , default = None , search_global = False )
# Check if private key exists
key_filename = ssh_gateway_config [ 'ssh_gateway_key' ]
if key_filename is not None and not os . path . isfile ( key_filename ) :
raise SaltCloudConfigError ( 'The defined ssh_gateway_private_key \'{0}\' does not exist' . format ( key_filename ) )
elif ( key_filename is None and not ssh_gateway_config [ 'ssh_gateway_password' ] ) :
raise SaltCloudConfigError ( 'No authentication method. Please define: ' ' ssh_gateway_password or ssh_gateway_private_key' )
return ssh_gateway_config
|
def list_huisnummers_by_straat ( self , straat , sort = 1 ) :
'''List all ` huisnummers ` in a ` Straat ` .
: param straat : The : class : ` Straat ` for which the ` huisnummers ` are wanted .
: rtype : A : class : ` list ` of : class : ` Huisnummer `'''
|
try :
id = straat . id
except AttributeError :
id = straat
def creator ( ) :
res = crab_gateway_request ( self . client , 'ListHuisnummersWithStatusByStraatnaamId' , id , sort )
try :
return [ Huisnummer ( r . HuisnummerId , r . StatusHuisnummer , r . Huisnummer , id ) for r in res . HuisnummerWithStatusItem ]
except AttributeError :
return [ ]
if self . caches [ 'short' ] . is_configured :
key = 'ListHuisnummersWithStatusByStraatnaamId#%s%s' % ( id , sort )
huisnummers = self . caches [ 'short' ] . get_or_create ( key , creator )
else :
huisnummers = creator ( )
for h in huisnummers :
h . set_gateway ( self )
return huisnummers
|
def install_handler ( self , event_type , handler , user_handle = None ) :
"""Installs handlers for event callbacks in this resource .
: param event _ type : Logical event identifier .
: param handler : Interpreted as a valid reference to a handler to be installed by a client application .
: param user _ handle : A value specified by an application that can be used for identifying handlers
uniquely for an event type .
: returns : user handle ( a ctypes object )"""
|
return self . visalib . install_visa_handler ( self . session , event_type , handler , user_handle )
|
def initialize_outlineexplorer ( self ) :
"""This method is called separately from ' set _ oulineexplorer ' to avoid
doing unnecessary updates when there are multiple editor windows"""
|
for index in range ( self . get_stack_count ( ) ) :
if index != self . get_stack_index ( ) :
self . _refresh_outlineexplorer ( index = index )
|
def loop ( self , timeout = 1 ) :
"""Main loop ."""
|
rlist = [ self . sock ]
wlist = [ ]
if len ( self . out_packet ) > 0 :
wlist . append ( self . sock )
to_read , to_write , _ = select . select ( rlist , wlist , [ ] , timeout )
if len ( to_read ) > 0 :
ret , _ = self . loop_read ( )
if ret != NC . ERR_SUCCESS :
return ret
if len ( to_write ) > 0 :
ret , _ = self . loop_write ( )
if ret != NC . ERR_SUCCESS :
return ret
self . loop_misc ( )
return NC . ERR_SUCCESS
|
def _ingest_response ( self , response ) :
'''Takes a response object and ingests state , links , embedded
documents and updates the self link of this navigator to
correspond . This will only work if the response is valid
JSON'''
|
self . response = response
if self . _can_parse ( response . headers [ 'Content-Type' ] ) :
hal_json = self . _parse_content ( response . text )
else :
raise exc . HALNavigatorError ( message = "Unexpected content type! Wanted {0}, got {1}" . format ( self . headers . get ( 'Accept' , self . DEFAULT_CONTENT_TYPE ) , self . response . headers [ 'content-type' ] ) , nav = self , status = self . response . status_code , response = self . response , )
self . _links = self . _make_links_from ( hal_json )
self . _embedded = self . _make_embedded_from ( hal_json )
# Set properties from new document ' s self link
self . _update_self_link ( hal_json . get ( '_links' , { } ) . get ( 'self' , { } ) , response . headers , )
# Set curies if available
self . curies = dict ( ( curie [ 'name' ] , curie [ 'href' ] ) for curie in hal_json . get ( '_links' , { } ) . get ( 'curies' , [ ] ) )
# Set state by removing HAL attributes
self . state = utils . getstate ( hal_json )
|
def set_color_temp ( self , color_temp ) :
"""Set device color ."""
|
if self . _json_state [ 'control_url' ] :
url = CONST . INTEGRATIONS_URL + self . _device_uuid
color_data = { 'action' : 'setcolortemperature' , 'colorTemperature' : int ( color_temp ) }
response = self . _abode . send_request ( "post" , url , data = color_data )
response_object = json . loads ( response . text )
_LOGGER . debug ( "Set Color Temp Response: %s" , response . text )
if response_object [ 'idForPanel' ] != self . device_id :
raise AbodeException ( ( ERROR . SET_STATUS_DEV_ID ) )
if response_object [ 'colorTemperature' ] != int ( color_temp ) :
_LOGGER . warning ( ( "Set color temp mismatch for device %s. " "Request val: %s, Response val: %s " ) , self . device_id , color_temp , response_object [ 'colorTemperature' ] )
self . update ( response_object )
_LOGGER . info ( "Set device %s color_temp to: %s" , self . device_id , color_temp )
return True
return False
|
def get_site_pattern ( agent ) :
"""Construct a dictionary of Monomer site states from an Agent .
This crates the mapping to the associated PySB monomer from an
INDRA Agent object ."""
|
if not isinstance ( agent , ist . Agent ) :
return { }
pattern = { }
# Handle bound conditions
for bc in agent . bound_conditions : # Here we make the assumption that the binding site
# is simply named after the binding partner
if bc . is_bound :
pattern [ get_binding_site_name ( bc . agent ) ] = ANY
else :
pattern [ get_binding_site_name ( bc . agent ) ] = None
# Handle modifications
for mod in agent . mods :
mod_site_str = abbrevs [ mod . mod_type ]
if mod . residue is not None :
mod_site_str = mod . residue
mod_pos_str = mod . position if mod . position is not None else ''
mod_site = ( '%s%s' % ( mod_site_str , mod_pos_str ) )
site_states = states [ mod . mod_type ]
if mod . is_modified :
pattern [ mod_site ] = ( site_states [ 1 ] , WILD )
else :
pattern [ mod_site ] = ( site_states [ 0 ] , WILD )
# Handle mutations
for mc in agent . mutations :
res_from = mc . residue_from if mc . residue_from else 'mut'
res_to = mc . residue_to if mc . residue_to else 'X'
if mc . position is None :
mut_site_name = res_from
else :
mut_site_name = res_from + mc . position
pattern [ mut_site_name ] = res_to
# Handle location
if agent . location is not None :
pattern [ 'loc' ] = _n ( agent . location )
# Handle activity
if agent . activity is not None :
active_site_name = agent . activity . activity_type
if agent . activity . is_active :
active_site_state = 'active'
else :
active_site_state = 'inactive'
pattern [ active_site_name ] = active_site_state
return pattern
|
def run_command ( command , args ) :
"""Run all tasks registered in a command ."""
|
for category , commands in iteritems ( command_categories ) :
for existing_command in commands :
if existing_command . match ( command ) :
existing_command . run ( args )
|
def channels_remove_moderator ( self , room_id , user_id , ** kwargs ) :
"""Removes the role of moderator from a user in the current channel ."""
|
return self . __call_api_post ( 'channels.removeModerator' , roomId = room_id , userId = user_id , kwargs = kwargs )
|
def _multi_take ( self , tup ) :
"""Create the indexers for the passed tuple of keys , and execute the take
operation . This allows the take operation to be executed all at once -
rather than once for each dimension - improving efficiency .
Parameters
tup : tuple
Tuple of indexers , one per axis
Returns
values : same type as the object being indexed"""
|
# GH 836
o = self . obj
d = { axis : self . _get_listlike_indexer ( key , axis ) for ( key , axis ) in zip ( tup , o . _AXIS_ORDERS ) }
return o . _reindex_with_indexers ( d , copy = True , allow_dups = True )
|
def _warmup ( self , num_updates ) :
"""Returns linearly increasing fraction of base _ lr ."""
|
assert self . base_lr is not None
if not self . warmup :
return self . base_lr
fraction = ( num_updates + 1 ) * self . base_lr / ( self . warmup + 1 )
if num_updates > self . last_warmup_log and num_updates % self . log_warmup_every_t == 0 :
self . last_warmup_log = num_updates
logger . info ( "Learning rate warmup: %3.0f%%" , fraction / self . base_lr * 100.0 )
return fraction
|
def short_text__str ( self , column_name , output_column_prefix ) :
"""Transforms short text into a dictionary of TFIDF - weighted 3 - gram
character counts ."""
|
from . _ngram_counter import NGramCounter
from . _tfidf import TFIDF
return [ NGramCounter ( features = [ column_name ] , n = 3 , method = "character" , output_column_prefix = output_column_prefix ) , TFIDF ( features = [ column_name ] , min_document_frequency = 0.01 , max_document_frequency = 0.5 , output_column_prefix = output_column_prefix ) ]
|
def bind ( * args ) :
"""bind a list of clients and servers together , top down ."""
|
if _debug :
bind . _debug ( "bind %r" , args )
# generic bind is pairs of names
if not args : # find unbound clients and bind them
for cid , client in client_map . items ( ) : # skip those that are already bound
if client . clientPeer :
continue
if not cid in server_map :
raise RuntimeError ( "unmatched server {!r}" . format ( cid ) )
server = server_map [ cid ]
if server . serverPeer :
raise RuntimeError ( "server already bound %r" . format ( cid ) )
bind ( client , server )
# see if there are any unbound servers
for sid , server in server_map . items ( ) :
if server . serverPeer :
continue
if not sid in client_map :
raise RuntimeError ( "unmatched client {!r}" . format ( sid ) )
else :
raise RuntimeError ( "mistery unbound server {!r}" . format ( sid ) )
# find unbound application service elements and bind them
for eid , element in element_map . items ( ) : # skip those that are already bound
if element . elementService :
continue
if not eid in service_map :
raise RuntimeError ( "unmatched element {!r}" . format ( cid ) )
service = service_map [ eid ]
if server . serverPeer :
raise RuntimeError ( "service already bound {!r}" . format ( cid ) )
bind ( element , service )
# see if there are any unbound services
for sid , service in service_map . items ( ) :
if service . serviceElement :
continue
if not sid in element_map :
raise RuntimeError ( "unmatched service {!r}" . format ( sid ) )
else :
raise RuntimeError ( "mistery unbound service {!r}" . format ( sid ) )
# go through the argument pairs
for i in xrange ( len ( args ) - 1 ) :
client = args [ i ]
if _debug :
bind . _debug ( " - client: %r" , client )
server = args [ i + 1 ]
if _debug :
bind . _debug ( " - server: %r" , server )
# make sure we ' re binding clients and servers
if isinstance ( client , Client ) and isinstance ( server , Server ) :
client . clientPeer = server
server . serverPeer = client
# we could be binding application clients and servers
elif isinstance ( client , ApplicationServiceElement ) and isinstance ( server , ServiceAccessPoint ) :
client . elementService = server
server . serviceElement = client
# error
else :
raise TypeError ( "bind() requires a client and server" )
if _debug :
bind . _debug ( " - bound" )
|
def _IndexedScan ( self , i , max_records = None ) :
"""Scan records starting with index i ."""
|
self . _ReadIndex ( )
# The record number that we will read next .
idx = 0
# The timestamp that we will start reading from .
start_ts = 0
if i >= self . _max_indexed :
start_ts = max ( ( 0 , 0 ) , ( self . _index [ self . _max_indexed ] [ 0 ] , self . _index [ self . _max_indexed ] [ 1 ] - 1 ) )
idx = self . _max_indexed
else :
try :
possible_idx = i - i % self . INDEX_SPACING
start_ts = ( max ( 0 , self . _index [ possible_idx ] [ 0 ] ) , self . _index [ possible_idx ] [ 1 ] - 1 )
idx = possible_idx
except KeyError :
pass
if max_records is not None :
max_records += i - idx
with data_store . DB . GetMutationPool ( ) as mutation_pool :
for ( ts , value ) in self . Scan ( after_timestamp = start_ts , max_records = max_records , include_suffix = True ) :
self . _MaybeWriteIndex ( idx , ts , mutation_pool )
if idx >= i :
yield ( idx , ts , value )
idx += 1
|
def parse_vmware_file ( path ) :
"""Parses a VMware file ( VMX , preferences or inventory ) .
: param path : path to the VMware file
: returns : dict"""
|
pairs = OrderedDict ( )
encoding = "utf-8"
# get the first line to read the . encoding value
with open ( path , "rb" ) as f :
line = f . readline ( ) . decode ( encoding , errors = "ignore" )
if line . startswith ( "#!" ) : # skip the shebang
line = f . readline ( ) . decode ( encoding , errors = "ignore" )
try :
key , value = line . split ( '=' , 1 )
if key . strip ( ) . lower ( ) == ".encoding" :
file_encoding = value . strip ( '" ' )
try :
codecs . lookup ( file_encoding )
encoding = file_encoding
except LookupError :
log . warning ( "Invalid file encoding detected in '{}': {}" . format ( path , file_encoding ) )
except ValueError :
log . warning ( "Couldn't find file encoding in {}, using {}..." . format ( path , encoding ) )
# read the file with the correct encoding
with open ( path , encoding = encoding , errors = "ignore" ) as f :
for line in f . read ( ) . splitlines ( ) :
try :
key , value = line . split ( '=' , 1 )
pairs [ key . strip ( ) . lower ( ) ] = value . strip ( '" ' )
except ValueError :
continue
return pairs
|
def render ( self , ctx = None ) :
'''Render only the mutated field ( or the first one if not in mutation )
: param ctx : rendering context in which the method was called
: rtype : ` Bits `
: return : rendered value of the container'''
|
if ctx is None :
ctx = RenderContext ( )
ctx . push ( self )
self . _initialize ( )
offset = self . offset if self . offset else 0
self . _fields [ self . _field_idx ] . set_offset ( offset )
rendered = self . _fields [ self . _field_idx ] . render ( ctx )
self . set_current_value ( rendered )
ctx . pop ( )
return self . _current_rendered
|
def initialize_means ( data , clusters , k ) :
"""Initializes the M matrix given the data and a set of cluster labels .
Cluster centers are set to the mean of each cluster .
Args :
data ( array ) : genes x cells
clusters ( array ) : 1d array of ints ( 0 . . . k - 1)
k ( int ) : number of clusters"""
|
init_w = np . zeros ( ( data . shape [ 0 ] , k ) )
if sparse . issparse ( data ) :
for i in range ( k ) :
if data [ : , clusters == i ] . shape [ 1 ] == 0 :
point = np . random . randint ( 0 , data . shape [ 1 ] )
init_w [ : , i ] = data [ : , point ] . toarray ( ) . flatten ( )
else : # memory usage might be a problem here ?
init_w [ : , i ] = np . array ( data [ : , clusters == i ] . mean ( 1 ) ) . flatten ( ) + eps
else :
for i in range ( k ) :
if data [ : , clusters == i ] . shape [ 1 ] == 0 :
point = np . random . randint ( 0 , data . shape [ 1 ] )
init_w [ : , i ] = data [ : , point ] . flatten ( )
else :
init_w [ : , i ] = data [ : , clusters == i ] . mean ( 1 ) + eps
return init_w
|
def sinLdot_fc ( tfdata , pfdata ) :
"""Apply sin of theta times the L operator to the data in the Fourier
domain ."""
|
dphi_fc ( tfdata )
sin_fc ( pfdata )
dtheta_fc ( pfdata )
return 1j * ( tfdata - pfdata )
|
def curve_reduce_approx ( curve , reduced ) :
"""Image for : meth : ` . curve . Curve . reduce ` docstring ."""
|
if NO_IMAGES :
return
ax = curve . plot ( 256 )
color = ax . lines [ - 1 ] . get_color ( )
add_patch ( ax , curve . _nodes , color , alpha = 0.25 , node_color = color )
reduced . plot ( 256 , ax = ax )
color = ax . lines [ - 1 ] . get_color ( )
add_patch ( ax , reduced . _nodes , color , alpha = 0.25 , node_color = color )
ax . axis ( "scaled" )
_plot_helpers . add_plot_boundary ( ax )
save_image ( ax . figure , "curve_reduce_approx.png" )
|
def _parse_bands ( lines , n_start ) :
"""Parse band structure from cp2k output"""
|
kpoints = [ ]
labels = [ ]
bands_s1 = [ ]
bands_s2 = [ ]
known_kpoints = { }
pattern = re . compile ( ".*?Nr.*?Spin.*?K-Point.*?" , re . DOTALL )
selected_lines = lines [ n_start : ]
for current_line , line in enumerate ( selected_lines ) :
splitted = line . split ( )
if "KPOINTS| Special K-Point" in line :
kpoint = tuple ( map ( float , splitted [ - 3 : ] ) )
if " " . join ( splitted [ - 5 : - 3 ] ) != "not specified" :
label = splitted [ - 4 ]
known_kpoints [ kpoint ] = label
elif pattern . match ( line ) :
spin = int ( splitted [ 3 ] )
kpoint = tuple ( map ( float , splitted [ - 3 : ] ) )
kpoint_n_lines = int ( math . ceil ( int ( selected_lines [ current_line + 1 ] ) / 4. ) )
band = list ( map ( float , ' ' . join ( selected_lines [ current_line + 2 : current_line + 2 + kpoint_n_lines ] ) . split ( ) ) )
if spin == 1 :
if kpoint in known_kpoints :
labels . append ( ( len ( kpoints ) , known_kpoints [ kpoint ] ) )
kpoints . append ( kpoint )
bands_s1 . append ( band )
elif spin == 2 :
bands_s2 . append ( band )
if bands_s2 :
bands = [ bands_s1 , bands_s2 ]
else :
bands = bands_s1
return np . array ( kpoints ) , labels , np . array ( bands )
|
def post_events ( self , events ) :
"""Posts a single event to the Keen IO API . The write key must be set first .
: param events : an Event to upload"""
|
url = "{0}/{1}/projects/{2}/events" . format ( self . base_url , self . api_version , self . project_id )
headers = utilities . headers ( self . write_key )
payload = json . dumps ( events )
response = self . fulfill ( HTTPMethods . POST , url , data = payload , headers = headers , timeout = self . post_timeout )
self . _error_handling ( response )
return self . _get_response_json ( response )
|
def VxLANTunnelState_originator_switch_info_switchVcsId ( self , ** kwargs ) :
"""Auto Generated Code"""
|
config = ET . Element ( "config" )
VxLANTunnelState = ET . SubElement ( config , "VxLANTunnelState" , xmlns = "http://brocade.com/ns/brocade-notification-stream" )
originator_switch_info = ET . SubElement ( VxLANTunnelState , "originator-switch-info" )
switchVcsId = ET . SubElement ( originator_switch_info , "switchVcsId" )
switchVcsId . text = kwargs . pop ( 'switchVcsId' )
callback = kwargs . pop ( 'callback' , self . _callback )
return callback ( config )
|
def _assert_relation_does_not_exists ( self ) :
"""Check if a relation with the current related _ name doesn ' t already exists
for the related model"""
|
relations = self . database . _relations [ self . related_to ]
existing = [ r for r in relations if r [ 2 ] == self . related_name ]
if existing :
error = ( "The related name defined for the field '%s.%s', named '%s', already exists " "on the model '%s' (tied to the field '%s.%s')" )
raise ImplementationError ( error % ( self . _model . __name__ , self . name , self . related_name , self . related_to , existing [ 0 ] [ 1 ] , existing [ 0 ] [ 0 ] ) )
|
def _upload ( self , archive , region ) :
"""Upload function source and return source url"""
|
# Generate source upload url
url = self . client . execute_command ( 'generateUploadUrl' , { 'parent' : 'projects/{}/locations/{}' . format ( self . session . get_default_project ( ) , region ) } ) . get ( 'uploadUrl' )
log . debug ( "uploading function code %s" , url )
http = self . _get_http_client ( self . client )
headers , response = http . request ( url , method = 'PUT' , headers = { 'content-type' : 'application/zip' , 'Content-Length' : '%d' % archive . size , 'x-goog-content-length-range' : '0,104857600' } , body = open ( archive . path , 'rb' ) )
log . info ( "function code uploaded" )
if headers [ 'status' ] != '200' :
raise RuntimeError ( "%s\n%s" % ( headers , response ) )
return url
|
def _set_number_of_plots ( self , n ) :
"""Adjusts number of plots & curves to the desired value the gui ."""
|
# multi plot , right number of plots and curves = great !
if self . button_multi . is_checked ( ) and len ( self . _curves ) == len ( self . plot_widgets ) and len ( self . _curves ) == n :
return
# single plot , right number of curves = great !
if not self . button_multi . is_checked ( ) and len ( self . plot_widgets ) == 1 and len ( self . _curves ) == n :
return
# time to rebuild !
# don ' t show the plots as they are built
self . grid_plot . block_events ( )
# make sure the number of curves is on target
while len ( self . _curves ) > n :
self . _curves . pop ( - 1 )
while len ( self . _curves ) < n :
self . _curves . append ( _g . PlotCurveItem ( pen = ( len ( self . _curves ) , n ) ) )
# figure out the target number of plots
if self . button_multi . is_checked ( ) :
n_plots = n
else :
n_plots = min ( n , 1 )
# clear the plots
while len ( self . plot_widgets ) : # pop the last plot widget and remove all items
p = self . plot_widgets . pop ( - 1 )
p . clear ( )
# remove it from the grid
self . grid_plot . remove_object ( p )
# add new plots
for i in range ( n_plots ) :
self . plot_widgets . append ( self . grid_plot . place_object ( _g . PlotWidget ( ) , 0 , i , alignment = 0 ) )
# loop over the curves and add them to the plots
for i in range ( n ) :
self . plot_widgets [ min ( i , len ( self . plot_widgets ) - 1 ) ] . addItem ( self . _curves [ i ] )
# loop over the ROI ' s and add them
if self . ROIs is not None :
for i in range ( len ( self . ROIs ) ) : # get the ROIs for this plot
ROIs = self . ROIs [ i ]
if not _spinmob . fun . is_iterable ( ROIs ) :
ROIs = [ ROIs ]
# loop over the ROIs for this plot
for ROI in ROIs : # determine which plot to add the ROI to
m = min ( i , len ( self . plot_widgets ) - 1 )
# add the ROI to the appropriate plot
if m >= 0 :
self . plot_widgets [ m ] . addItem ( ROI )
# show the plots
self . grid_plot . unblock_events ( )
|
def get_code ( self ) :
"""Returns code representation of value of widget"""
|
selection = self . GetSelection ( )
if selection == wx . NOT_FOUND :
selection = 0
# Return code string
return self . styles [ selection ] [ 1 ]
|
def rgbmap_cb ( self , rgbmap , channel ) :
"""This method is called when the RGBMap is changed . We update
the ColorBar to match ."""
|
if not self . gui_up :
return
fitsimage = channel . fitsimage
if fitsimage != self . fv . getfocus_fitsimage ( ) :
return False
self . change_cbar ( self . fv , channel )
|
def get_trg_ids ( self , src_ids : np . ndarray ) -> np . ndarray :
"""Lookup possible target ids for input sequence of source ids .
: param src _ ids : Sequence ( s ) of source ids ( any shape ) .
: return : Possible target ids for source ( unique sorted , always includes special symbols ) ."""
|
# TODO : When MXNet adds support for set operations , we can migrate to avoid conversions to / from NumPy .
unique_src_ids = np . lib . arraysetops . unique ( src_ids )
trg_ids = np . lib . arraysetops . union1d ( self . always_allow , self . lex [ unique_src_ids , : ] . reshape ( - 1 ) )
return trg_ids
|
def _add_logger_by_name ( self , name ) :
'''Handles POST requests for adding a new logger .
Expects logger configuration to be passed in the request ' s query string .
The logger name is included in the URL and the address components and
connection type should be included as well . The loc attribute is
defaulted to " localhost " when making the socket connection if not
defined .
loc = IP / interface
port = port / protocol
conn _ type = udp or ethernet
Raises :
ValueError :
if the port or connection type are not supplied .'''
|
data = dict ( request . forms )
loc = data . pop ( 'loc' , '' )
port = data . pop ( 'port' , None )
conn_type = data . pop ( 'conn_type' , None )
if not port or not conn_type :
e = 'Port and/or conn_type not set'
raise ValueError ( e )
address = [ loc , int ( port ) ]
if 'rotate_log' in data :
data [ 'rotate_log' ] = True if data == 'true' else False
if 'rotate_log_delta' in data :
data [ 'rotate_log_delta' ] = int ( data [ 'rotate_log_delta' ] )
self . _logger_manager . add_logger ( name , address , conn_type , ** data )
|
def controller ( url_prefix_or_controller_cls : Union [ str , Type [ Controller ] ] , controller_cls : Optional [ Type [ Controller ] ] = None , * , rules : Optional [ Iterable [ Union [ Route , RouteGenerator ] ] ] = None , ) -> RouteGenerator :
"""This function is used to register a controller class ' s routes .
Example usage : :
routes = lambda : [
controller ( SiteController ) ,
Or with the optional prefix argument : :
routes = lambda : [
controller ( ' / products ' , ProductController ) ,
Specify ` ` rules ` ` to only include those routes from the controller : :
routes = lambda : [
controller ( SecurityController , rules = [
rule ( ' / login ' , SecurityController . login ) ,
rule ( ' / logout ' , SecurityController . logout ) ,
rule ( ' / sign - up ' , SecurityController . register ) ,
: param url _ prefix _ or _ controller _ cls : The controller class , or a url prefix for
all of the rules from the controller class
passed as the second argument
: param controller _ cls : If a url prefix was given as the first argument , then
the controller class must be passed as the second argument
: param rules : An optional list of rules to limit / customize the routes included
from the controller"""
|
url_prefix , controller_cls = _normalize_args ( url_prefix_or_controller_cls , controller_cls , _is_controller_cls )
url_prefix = url_prefix or controller_cls . Meta . url_prefix
routes = [ ]
controller_routes = getattr ( controller_cls , CONTROLLER_ROUTES_ATTR )
if rules is None :
routes = controller_routes . values ( )
else :
for route in _reduce_routes ( rules ) :
existing = controller_routes . get ( route . method_name )
if existing :
routes . append ( _inherit_route_options ( route , existing [ 0 ] ) )
else :
routes . append ( route )
yield from _normalize_controller_routes ( routes , controller_cls , url_prefix = url_prefix )
|
def _collect_dirty_tabs ( self , exept = None ) :
"""Collects the list of dirty tabs"""
|
widgets = [ ]
filenames = [ ]
for i in range ( self . count ( ) ) :
widget = self . widget ( i )
try :
if widget . dirty and widget != exept :
widgets . append ( widget )
filenames . append ( widget . file . path )
except AttributeError :
pass
return widgets , filenames
|
def concatenate_variables ( scope , variables , container ) :
'''This function allocate operators to from a float tensor by concatenating all input variables . Notice that if all
integer inputs would be converted to floats before concatenation .'''
|
# Check if it ' s possible to concatenate those inputs .
type_set = set ( type ( variable . type ) for variable in variables )
number_type_set = { FloatType , FloatTensorType , Int64Type , Int64TensorType }
if StringType in type_set and any ( number_type in type_set for number_type in number_type_set ) :
raise RuntimeError ( 'We are not able to concatenate numerical tensor(s) and string tensor(s)' )
input_names = [ ]
# input variables ' names we want to concatenate
input_dims = [ ]
# dimensions of the variables that is going to be concatenated
# Collect input variable names and do cast if needed
for variable in variables :
if isinstance ( variable . type , ( Int64TensorType , Int64Type ) ) :
input_names . append ( convert_integer_to_float ( scope , variable , container ) )
else :
input_names . append ( variable . full_name )
# We assume input variables ' shape are [ 1 , C _ 1 ] , . . . , [ 1 , C _ n ] if there are n inputs .
input_dims . append ( variable . type . shape [ 1 ] )
if len ( input_names ) == 1 : # No need to concatenate tensors if there is only one input
return input_names [ 0 ]
else : # To combine all inputs , we need a FeatureVectorizer
op_type = 'FeatureVectorizer'
attrs = { 'name' : scope . get_unique_operator_name ( op_type ) , 'inputdimensions' : input_dims }
# Create a variable name to capture feature vectorizer ' s output
concatenated_name = scope . get_unique_variable_name ( 'concatenated' )
# Set up our FeatureVectorizer
container . add_node ( op_type , input_names , concatenated_name , op_domain = 'ai.onnx.ml' , ** attrs )
return concatenated_name
|
def p_speed_information_duration ( self , p ) :
'speed : information IN duration'
|
logger . debug ( 'speed = information %s in duration %s' , p [ 1 ] , p [ 3 ] )
p [ 0 ] = p [ 1 ] . in_duration ( p [ 3 ] )
|
def _run_cnvkit_cancer ( items , background ) :
"""Run CNVkit on a tumor / normal pair ."""
|
paired = vcfutils . get_paired_bams ( [ x [ "align_bam" ] for x in items ] , items )
normal_data = [ x for x in items if dd . get_sample_name ( x ) != paired . tumor_name ]
tumor_ready , normal_ready = _match_batches ( paired . tumor_data , normal_data [ 0 ] if normal_data else None )
ckouts = _run_cnvkit_shared ( [ tumor_ready ] , [ normal_ready ] if normal_ready else [ ] )
if not ckouts :
return items
assert len ( ckouts ) == 1
tumor_data = _associate_cnvkit_out ( ckouts , [ paired . tumor_data ] , is_somatic = True )
return tumor_data + normal_data
|
def is_symbol_wildcard ( term : Any ) -> bool :
"""Return True iff the given term is a subclass of : class : ` . Symbol ` ."""
|
return isinstance ( term , type ) and issubclass ( term , Symbol )
|
def parse ( self , xml_data ) :
"""Parse XML data"""
|
# parse tree
try :
root = ET . fromstring ( xml_data )
except StdlibParseError as e :
raise ParseError ( str ( e ) )
self . origin = root . attrib [ 'origin' ]
for child in root :
component = Component ( )
component . parse ( child )
self . components [ component . id ] = component
|
def setHalfLife ( self , halfLife , timeUnit ) :
"""Set number of batches after which the centroids of that
particular batch has half the weightage ."""
|
self . _timeUnit = timeUnit
self . _decayFactor = exp ( log ( 0.5 ) / halfLife )
return self
|
def settimeout ( self , timeout ) :
'''Set the timeout value for this socket .'''
|
if timeout is None :
self . _block = True
elif float ( timeout ) == 0.0 :
self . _block = False
else :
self . _timeout = float ( timeout )
self . _block = True
|
def role_exists ( name , region = None , key = None , keyid = None , profile = None ) :
'''Check to see if an IAM role exists .
CLI Example :
. . code - block : : bash
salt myminion boto _ iam . role _ exists myirole'''
|
conn = _get_conn ( region = region , key = key , keyid = keyid , profile = profile )
try :
conn . get_role ( name )
return True
except boto . exception . BotoServerError :
return False
|
def check_as_b_send_completion ( self ) :
"""Checks if the current asynchronous send job was completed and terminates
immediately ."""
|
op_result = ctypes . c_int32 ( )
result = self . library . Par_CheckAsBSendCompletion ( self . pointer , ctypes . byref ( op_result ) )
return_values = { 0 : "job complete" , 1 : "job in progress" , - 2 : "invalid handled supplied" , }
if result == - 2 :
raise Snap7Exception ( "The Client parameter was invalid" )
return return_values [ result ] , op_result
|
def containers ( self ) :
"""The containers that are connected to the network , as a list of
: py : class : ` ~ docker . models . containers . Container ` objects ."""
|
return [ self . client . containers . get ( cid ) for cid in ( self . attrs . get ( 'Containers' ) or { } ) . keys ( ) ]
|
def grouping ( f = None , interaction = [ 'call' , 'text' ] , summary = 'default' , user_kwd = False ) :
"""` ` grouping ` ` is a decorator for indicator functions , used to simplify the
source code .
Parameters
f : function
The function to decorate
user _ kwd : boolean
If user _ kwd is True , the user object will be passed to the decorated
function
interaction : ' call ' , ' text ' , ' location ' , or a list
By default , all indicators use only ' call ' and ' text ' records , but the
interaction keywords filters the records passed to the function .
summary : ' default ' , ' extended ' , None
An indicator returns data statistics , ether * mean * and * std * by
default , more with ' extended ' , or the inner distribution with None .
See : meth : ` ~ bandicoot . helper . group . statistics ` for more details .
See : ref : ` new - indicator - label ` to learn how to write an indicator with
this decorator ."""
|
if f is None :
return partial ( grouping , user_kwd = user_kwd , interaction = interaction , summary = summary )
def wrapper ( user , groupby = 'week' , interaction = interaction , summary = summary , split_week = False , split_day = False , filter_empty = True , datatype = None , ** kwargs ) :
if interaction is None :
interaction = [ 'call' , 'text' ]
parameters = divide_parameters ( split_week , split_day , interaction )
operations = { 'grouping' : { 'using' : 'records' , 'binning' : False , 'groupby' : groupby , 'filter_empty' : filter_empty , 'divide_by' : parameters } , 'apply' : { 'user_kwd' : user_kwd , 'summary' : summary , 'kwargs' : kwargs } }
for i in parameters [ 'interaction' ] :
if i not in [ 'callandtext' , 'call' , 'text' , 'location' ] :
raise ValueError ( "%s is not a valid interaction value. Only " "'call', 'text', and 'location' are accepted." % i )
return _generic_wrapper ( f , user , operations , datatype )
return advanced_wrap ( f , wrapper )
|
def _radius_auth ( self , server , username , password ) :
"""Authenticate the given username / password against the RADIUS server
described by ` server ` ."""
|
client = self . _get_client ( server )
packet = self . _get_auth_packet ( username , password , client )
return self . _perform_radius_auth ( client , packet )
|
def get_ip_prefixes_from_config ( config , services , ip_version ) :
"""Build a set of IP prefixes found in service configuration files .
Arguments :
config ( obg ) : A configparser object which holds our configuration .
services ( list ) : A list of section names which are the name of the
service checks .
ip _ version ( int ) : IP protocol version
Returns :
A set of IP prefixes ."""
|
ip_prefixes = set ( )
for service in services :
ip_prefix = ipaddress . ip_network ( config . get ( service , 'ip_prefix' ) )
if ip_prefix . version == ip_version :
ip_prefixes . add ( ip_prefix . with_prefixlen )
return ip_prefixes
|
def worker_workerfinished ( self , node ) :
"""Emitted when node executes its pytest _ sessionfinish hook .
Removes the node from the scheduler .
The node might not be in the scheduler if it had not emitted
workerready before shutdown was triggered ."""
|
self . config . hook . pytest_testnodedown ( node = node , error = None )
if node . workeroutput [ "exitstatus" ] == 2 : # keyboard - interrupt
self . shouldstop = "%s received keyboard-interrupt" % ( node , )
self . worker_errordown ( node , "keyboard-interrupt" )
return
if node in self . sched . nodes :
crashitem = self . sched . remove_node ( node )
assert not crashitem , ( crashitem , node )
self . _active_nodes . remove ( node )
|
def _list_locators ( self ) :
"""Lists locators .
Returns :
generator of tuple : locator name str , locator header dict"""
|
with _handle_client_error ( ) :
response = self . client . list_buckets ( )
for bucket in response [ 'Buckets' ] :
yield bucket . pop ( 'Name' ) , bucket
|
def get_info_of_object ( self , obj , selector = None ) :
"""return info dictionary of the * obj *
The info example :
u ' contentDescription ' : u ' ' ,
u ' checked ' : False ,
u ' scrollable ' : True ,
u ' text ' : u ' ' ,
u ' packageName ' : u ' com . android . launcher ' ,
u ' selected ' : False ,
u ' enabled ' : True ,
u ' bounds ' :
u ' top ' : 231,
u ' left ' : 0,
u ' right ' : 1080,
u ' bottom ' : 1776
u ' className ' : u ' android . view . View ' ,
u ' focusable ' : False ,
u ' focused ' : False ,
u ' clickable ' : False ,
u ' checkable ' : False ,
u ' chileCount ' : 1,
u ' longClickable ' : False ,
u ' visibleBounds ' :
u ' top ' : 231,
u ' left ' : 0,
u ' right ' : 1080,
u ' bottom ' : 1776"""
|
if selector :
return obj . info . get ( selector )
else :
return obj . info
|
def remove_elements ( parent_to_parse , element_paths , clear_empty = False ) :
"""Removes all elements named after each elements _ or _ paths . If clear _ empty is True ,
for each XPATH , empty parents are removed if all their children are removed .
: see : remove _ element ( parent _ to _ parse , element _ path )"""
|
element = get_element ( parent_to_parse )
removed = [ ]
if element is None or not element_paths :
return removed
if isinstance ( element_paths , string_types ) :
rem = remove_element ( element , element_paths , clear_empty )
removed . extend ( rem if isinstance ( rem , list ) else [ rem ] )
else :
for xpath in element_paths :
rem = remove_element ( element , xpath , clear_empty )
removed . extend ( rem if isinstance ( rem , list ) else [ rem ] )
return removed
|
def do_directives ( self , line ) :
"""List all directives supported by the bot"""
|
for name , cmd in self . adapter . directives . items ( ) :
with colorize ( 'blue' ) :
print ( 'bot %s:' % name )
if cmd . __doc__ :
for line in cmd . __doc__ . split ( '\n' ) :
print ( ' %s' % line )
else :
print ( )
|
def ellipsis ( source , max_length ) :
"""Truncates a string to be at most max _ length long ."""
|
if max_length == 0 or len ( source ) <= max_length :
return source
return source [ : max ( 0 , max_length - 3 ) ] + "..."
|
def update_user ( cls , username , email , password ) :
"""Edit user info"""
|
if cls . _check_email_changed ( username , email ) : # if we try to set the email to whatever it is already on SeAT , we get a HTTP422 error
logger . debug ( "Updating SeAT username %s with email %s and password" % ( username , email ) )
ret = cls . exec_request ( 'user/{}' . format ( username ) , 'put' , email = email )
logger . debug ( ret )
if not cls . _response_ok ( ret ) :
logger . warn ( "Failed to update email for username {}" . format ( username ) )
ret = cls . exec_request ( 'user/{}' . format ( username ) , 'put' , password = password )
logger . debug ( ret )
if not cls . _response_ok ( ret ) :
logger . warn ( "Failed to update password for username {}" . format ( username ) )
return None
logger . info ( "Updated SeAT user with username %s" % username )
return username
|
def get_urgent ( self , sensors ) :
"""Determine if any sensors should set the urgent flag ."""
|
if self . urgent_on not in ( 'warning' , 'critical' ) :
raise Exception ( "urgent_on must be one of (warning, critical)" )
for sensor in sensors :
if self . urgent_on == 'warning' and sensor . is_warning ( ) :
return True
elif self . urgent_on == 'critical' and sensor . is_critical ( ) :
return True
return False
|
def rename ( self ) :
"""rename _ expr : : = rename param _ start rename _ parameters param _ stop
expression
rename _ parameters : : = relation _ name |
paren _ left attribute _ list paren _ right |
relation _ name paren _ left attribute _ list paren _ right"""
|
params = self . relation_name ^ ( Optional ( self . relation_name ) + self . parenthesize ( self . attribute_list ) )
return self . parametrize ( self . syntax . rename_op , params )
|
def write_genotypes ( self , genotypes ) :
"""Write genotypes to binary file .
Args :
genotypes ( numpy . ndarray ) : The genotypes to write in the BED file ."""
|
if self . _mode != "w" :
raise UnsupportedOperation ( "not available in 'r' mode" )
# Initializing the number of samples if required
if self . _nb_values is None :
self . _nb_values = len ( genotypes )
# Checking the expected number of samples
if self . _nb_values != len ( genotypes ) :
raise ValueError ( "{:,d} samples expected, got {:,d}" . format ( self . _nb_values , len ( genotypes ) , ) )
# Writing to file
byte_array = [ g [ 0 ] | ( g [ 1 ] << 2 ) | ( g [ 2 ] << 4 ) | ( g [ 3 ] << 6 ) for g in self . _grouper ( ( _byte_recode [ geno ] for geno in genotypes ) , 4 ) ]
self . _bed . write ( bytearray ( byte_array ) )
|
def get_endtime ( jid ) :
'''Retrieve the stored endtime for a given job
Returns False if no endtime is present'''
|
jid_dir = salt . utils . jid . jid_dir ( jid , _job_dir ( ) , __opts__ [ 'hash_type' ] )
etpath = os . path . join ( jid_dir , ENDTIME )
if not os . path . exists ( etpath ) :
return False
with salt . utils . files . fopen ( etpath , 'r' ) as etfile :
endtime = salt . utils . stringutils . to_unicode ( etfile . read ( ) ) . strip ( '\n' )
return endtime
|
def write ( self , vals ) :
"""Overrides orm write method .
@ param self : The object pointer
@ param vals : dictionary of fields value ."""
|
if 'isroom' in vals and vals [ 'isroom' ] is False :
vals . update ( { 'color' : 2 , 'status' : 'occupied' } )
if 'isroom' in vals and vals [ 'isroom' ] is True :
vals . update ( { 'color' : 5 , 'status' : 'available' } )
ret_val = super ( HotelRoom , self ) . write ( vals )
return ret_val
|
def extract_spectra_from_file ( log , pathToSpectrum , convertLumToFlux = False ) :
"""* Given a spectrum file this function shall convert the two columns ( wavelength and luminosity ) to a wavelegnth ( wavelengthArray ) and flux ( fluxArray ) array *
* * Key Arguments : * *
- ` ` log ` ` - - logger
- ` ` pathToSpectrum ` ` - - absolute path the the spectrum file
* * Return : * *
- None"""
|
# # # # # # > IMPORTS # # # # #
# # STANDARD LIB # #
import os
# # THIRD PARTY # #
import numpy as np
# # LOCAL APPLICATION # #
import dryxPython . astrotools as at
# # # # # # > VARIABLE SETTINGS # # # # #
# # # # # # > ACTION ( S ) # # # # #
# USE numPy TO EXTRACT THE DATA FROM FILE
pwd = os . getcwd ( )
log . debug ( 'pwd %s' % ( pwd , ) )
log . debug ( 'pathToSpectrum %s' % ( pathToSpectrum , ) )
data = np . genfromtxt ( pathToSpectrum , skip_header = 0 , usecols = ( 0 , 1 ) )
wavelengthArray = data [ : , 0 ]
# minWl = wavelengthArray . min ( )
# maxWl = wavelengthArray . max ( )
luminosityArray = data [ : , 1 ]
# CONVERT TO FLUX : F = L / 4 * pi * ( r * * 2)
if convertLumToFlux :
fluxArray = at . luminosity_to_flux ( luminosityArray , 1e-5 )
else :
fluxArray = luminosityArray
# DEBUG BLOCK
log . debug ( 'pathToSpectrum: %s' % ( pathToSpectrum , ) )
# for i in range ( len ( fluxArray ) ) :
# print " " " % s \ t % s \ t % s " " " % ( wavelengthArray [ i ] , luminosityArray [ i ] , fluxArray [ i ] )
# print " \ n \ n \ n "
return wavelengthArray , fluxArray
|
def update ( self , ** kwargs ) :
"""Update the list with the current object attributes .
: param kwargs : Extra request options
: type kwargs : : class : ` ~ python : dict `
: return : Boolean to indicate if the request was successful
: rtype : : class : ` ~ python : bool `"""
|
item = self . _client [ 'users/*/lists/*' ] . update ( self . username , self . id , return_type = 'data' , ** kwargs )
if not item :
return False
self . _update ( item )
return True
|
def strip_dimensions ( self , text_lines , location , pid ) :
"""Calculate the dimension
Returns
out : types . SimpleNamespace
A structure with all the coordinates required
to draw the strip text and the background box ."""
|
dpi = 72
num_lines = len ( text_lines )
get_property = self . theme . themeables . property
ax = self . axs [ pid ]
bbox = ax . get_window_extent ( ) . transformed ( self . figure . dpi_scale_trans . inverted ( ) )
ax_width , ax_height = bbox . width , bbox . height
# in inches
strip_size = self . strip_size ( location , num_lines )
m1 , m2 = self . inner_strip_margins ( location )
m1 , m2 = m1 / dpi , m2 / dpi
margin = 0
# default
if location == 'right' :
box_x = 1
box_y = 0
box_width = strip_size / ax_width
box_height = 1
# y & height properties of the background slide and
# shrink the strip vertically . The y margin slides
# it horizontally .
with suppress ( KeyError ) :
box_y = get_property ( 'strip_background_y' , 'y' )
with suppress ( KeyError ) :
box_height = get_property ( 'strip_background_y' , 'height' )
with suppress ( KeyError ) :
margin = get_property ( 'strip_margin_y' )
x = 1 + ( strip_size - m2 + m1 ) / ( 2 * ax_width )
y = ( 2 * box_y + box_height ) / 2
# margin adjustment
hslide = 1 + margin * strip_size / ax_width
x *= hslide
box_x *= hslide
else :
box_x = 0
box_y = 1
box_width = 1
box_height = strip_size / ax_height
# x & width properties of the background slide and
# shrink the strip horizontally . The y margin slides
# it vertically .
with suppress ( KeyError ) :
box_x = get_property ( 'strip_background_x' , 'x' )
with suppress ( KeyError ) :
box_width = get_property ( 'strip_background_x' , 'width' )
with suppress ( KeyError ) :
margin = get_property ( 'strip_margin_x' )
x = ( 2 * box_x + box_width ) / 2
y = 1 + ( strip_size - m1 + m2 ) / ( 2 * ax_height )
# margin adjustment
vslide = 1 + margin * strip_size / ax_height
y *= vslide
box_y *= vslide
dimensions = types . SimpleNamespace ( x = x , y = y , box_x = box_x , box_y = box_y , box_width = box_width , box_height = box_height )
return dimensions
|
def main ( ) :
"""Define the CLI inteface / commands ."""
|
arguments = docopt ( __doc__ )
cfg_filename = pkg_resources . resource_filename ( 'knowledge_base' , 'config/virtuoso.ini' )
kb = KnowledgeBase ( cfg_filename )
# the user has issued a ` find ` command
if arguments [ "find" ] :
search_string = arguments [ "<search_string>" ]
try :
urn = CTS_URN ( search_string )
match = kb . get_resource_by_urn ( str ( urn ) )
show_result ( match , verbose = True )
return
except BadCtsUrnSyntax as e :
pass
except IndexError as e :
raise e
print ( "\nNo records with this CTS URN!\n" )
return
try :
matches = kb . search ( search_string )
print ( "\nSearching for \"%s\" yielded %s results" % ( search_string , len ( matches ) ) )
print_results ( matches )
return
except SparqlReaderException as e :
print ( "\nWildcard word needs at least 4 leading characters" )
# the user has issued an ` add ` command
elif arguments [ "add" ] :
input_urn = arguments [ "--to" ]
# first let ' s check if it ' s a valid URN
try :
urn = CTS_URN ( input_urn )
except Exception as e :
print ( "The provided URN ({}) is invalid!" . format ( input_urn ) )
return
try :
resource = kb . get_resource_by_urn ( urn )
assert resource is not None
except ResourceNotFound :
print ( "The KB does not contain a resource identified by {}" . format ( urn ) )
return
print ( arguments )
# if arguments [ " " ]
pass
|
def check_validity ( self , checks = None , report = True ) :
"""Runs a Symbol ' s validity checks .
Parameters
checks : str , [ str , ] , optional
Only run certain checks .
report : bool , optional
If set to False , the method will return only the result of the
check checks ( True / False ) . Set to True , to have a
SymbolReport returned as well .
Returns
Bool , or a Tuple of the form ( Bool , SymbolReport )"""
|
if report :
reportpoints = [ ]
allchecks = [ ]
checks_specified = False
if isinstance ( checks , ( str , unicode ) ) :
checks = [ checks ]
checks_specified = True
elif isinstance ( checks , ( list , tuple ) ) :
checks_specified = True
else :
checks = [ ]
for val in self . validity :
if ( val . validator in checks ) or ( not checks_specified ) :
ValCheck = validitychecks [ val . validator ]
anum = ValCheck . __init__ . func_code . co_argcount - 2
args = [ ]
for arg in SymbolValidity . argnames :
args . append ( getattr ( val , arg ) )
valid = ValCheck ( self . datatable_df , * args [ : anum ] )
res = valid . result
allchecks . append ( res )
rp = ReportPoint ( 'validation' , val . validator , res , str ( args [ : anum ] ) )
reportpoints . append ( rp )
if report :
return all ( allchecks ) , reportpoints
else :
return all ( allchecks )
|
def get_file_paths_for_program ( program , dir_to_search ) :
"""Return an array of full paths matching the given program . If no directory is
present , returns an empty list .
Path is not guaranteed to exist . Just says where it should be if it
existed . Paths must be fully expanded before being passed in ( i . e . no ~ or
variables ) ."""
|
if dir_to_search is None :
return [ ]
else :
wanted_file_name = program + EXAMPLE_FILE_SUFFIX
result = [ ]
for basedir , dirs , file_names in os . walk ( dir_to_search ) :
for file_name in file_names :
if file_name == wanted_file_name :
result . append ( os . path . join ( basedir , file_name ) )
return result
|
def getPath ( self , suffix = None , extension = "jar" , separator = os . sep ) :
"""Returns the full path , relative to the root of a Maven repository ,
of the current artifact , using Maven ' s conventions .
In particular , it will be :
< groupId with " . " replaced by < separator > > [ < separator > < artifactId > < separator > [ < version > < separator > < basename obtained via getFileName ( ) > ] ]
By default , < separator > = os . sep"""
|
assert ( self . _groupId is not None )
resultComponents = [ self . _groupId . replace ( "." , separator ) ]
if self . _artifactId is not None :
resultComponents . append ( self . _artifactId )
version = self . _version
if version is not None :
resultComponents . append ( version . getRawString ( ) )
resultComponents . append ( self . getFileName ( suffix , extension ) )
return separator . join ( resultComponents )
|
def get_order_container ( self , quote_id ) :
"""Generate an order container from a quote object .
: param quote _ id : ID number of target quote"""
|
quote = self . client [ 'Billing_Order_Quote' ]
container = quote . getRecalculatedOrderContainer ( id = quote_id )
return container
|
def describe_policy_version ( policyName , policyVersionId , region = None , key = None , keyid = None , profile = None ) :
'''Given a policy name and version describe its properties .
Returns a dictionary of interesting properties .
CLI Example :
. . code - block : : bash
salt myminion boto _ iot . describe _ policy _ version mypolicy version'''
|
try :
conn = _get_conn ( region = region , key = key , keyid = keyid , profile = profile )
policy = conn . get_policy_version ( policyName = policyName , policyVersionId = policyVersionId )
if policy :
keys = ( 'policyName' , 'policyArn' , 'policyDocument' , 'policyVersionId' , 'isDefaultVersion' )
return { 'policy' : dict ( [ ( k , policy . get ( k ) ) for k in keys ] ) }
else :
return { 'policy' : None }
except ClientError as e :
err = __utils__ [ 'boto3.get_error' ] ( e )
if e . response . get ( 'Error' , { } ) . get ( 'Code' ) == 'ResourceNotFoundException' :
return { 'policy' : None }
return { 'error' : __utils__ [ 'boto3.get_error' ] ( e ) }
|
def _rows_date2int ( self , rows ) :
"""Replaces start and end dates in a row set with their integer representation
: param list [ dict [ str , T ] ] rows : The list of rows ."""
|
for row in rows : # Determine the type of dates based on the first start date .
if not self . _date_type :
self . _date_type = self . _get_date_type ( row [ self . _key_start_date ] )
# Convert dates to integers .
row [ self . _key_start_date ] = self . _date2int ( row [ self . _key_start_date ] )
row [ self . _key_end_date ] = self . _date2int ( row [ self . _key_end_date ] )
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.