signature
stringlengths 29
44.1k
| implementation
stringlengths 0
85.2k
|
|---|---|
def cumulative_before_during_after ( self , start : datetime . datetime , when : datetime . datetime ) -> Tuple [ datetime . timedelta , datetime . timedelta , datetime . timedelta ] :
"""For a given time , ` ` when ` ` , returns the cumulative time
- after ` ` start ` ` but before ` ` self ` ` begins , prior to ` ` when ` ` ;
- after ` ` start ` ` and during intervals represented by ` ` self ` ` , prior
to ` ` when ` ` ;
- after ` ` start ` ` and after at least one interval represented by
` ` self ` ` has finished , and not within any intervals represented by
` ` self ` ` , and prior to ` ` when ` ` .
Args :
start : the start time of interest ( e . g . before ` ` self ` ` begins )
when : the time of interest
Returns :
tuple : ` ` before , during , after ` `
Illustration
. . code - block : : none
start : S
self : X - - - X X - - - X X - - - X X - - - X
when : W
before : - - - -
during : - - - - - - - - - - - - - - -
after : - - - - - - - - - - - - - -"""
|
assert self . no_overlap , ( "Only implemented for IntervalList objects with no_overlap == True" )
no_time = datetime . timedelta ( )
earliest_interval_start = self . start_datetime ( )
# Easy special cases
if when <= start :
return no_time , no_time , no_time
if self . is_empty ( ) or when <= earliest_interval_start :
return when - start , no_time , no_time
# Now we can guarantee :
# - " self " is a non - empty list
# - start < when
# - earliest _ interval _ start < when
# Before
if earliest_interval_start < start :
before = no_time
else :
before = earliest_interval_start - start
# During
during = self . cumulative_time_to ( when )
after = ( self . cumulative_gaps_to ( when ) + self . time_afterwards_preceding ( when ) )
return before , during , after
|
def set_joystick ( self , x , y , n ) :
"""Receives joystick values from the SnakeBoard
x , y Coordinates
n Robot number to give it to"""
|
self . robots [ n ] . set_joystick ( x , y )
|
def get_auth_string ( self ) :
"""Create auth string from credentials ."""
|
auth_info = '{}:{}' . format ( self . sauce_username , self . sauce_access_key )
return base64 . b64encode ( auth_info . encode ( 'utf-8' ) ) . decode ( 'utf-8' )
|
def merge_bins ( self , bin_ranges , axis = 0 ) :
"""Merge bins in bin ranges
Parameters
bin _ ranges : list of tuples
A list of tuples of bin indices for each bin range to be merged
into one bin .
axis : int ( default = 1)
The integer identifying the axis to merge bins along .
Returns
hist : TH1
The rebinned histogram .
Examples
Merge the overflow bins into the first and last real bins : :
newhist = hist . merge _ bins ( [ ( 0 , 1 ) , ( - 2 , - 1 ) ] )"""
|
ndim = self . GetDimension ( )
if axis > ndim - 1 :
raise ValueError ( "axis is out of range" )
axis_bins = self . nbins ( axis = axis , overflow = True )
# collect the indices along this axis to be merged
# support negative indices via slicing
windows = [ ]
for window in bin_ranges :
if len ( window ) != 2 :
raise ValueError ( "bin range tuples must contain " "two elements: {0!r}" . format ( window ) )
l , r = window
if l == r :
raise ValueError ( "bin indices must not be equal " "in a merging window: {0!r}" . format ( window ) )
if ( l < 0 and r >= 0 ) or ( l > 0 and r > 0 and l > r ) :
raise ValueError ( "invalid bin range: {0!r}" . format ( window ) )
if r == - 1 :
r = axis_bins
else :
r += 1
bin_idx = range ( * slice ( l , r ) . indices ( axis_bins ) )
if bin_idx : # skip [ ]
windows . append ( list ( bin_idx ) )
if not windows : # no merging will take place so return a clone
return self . Clone ( )
# check that windows do not overlap
if len ( windows ) > 1 :
flattened = [ idx for window in windows for idx in window ]
if len ( flattened ) != len ( set ( flattened ) ) :
raise ValueError ( "bin index windows overlap: {0!r}" . format ( bin_ranges ) )
# construct a mapping from old to new bin index along this axis
windows . sort ( )
mapping = { }
left_idx = { }
offset = 0
for window in windows : # put underflow in first bin
new_idx = window [ 0 ] - offset or 1
left_idx [ window [ 0 ] or 1 ] = None
for idx in window :
mapping [ idx ] = new_idx
offset += len ( window ) - 1
if window [ 0 ] == 0 :
offset -= 1
new_axis_bins = axis_bins - offset
# construct new bin edges
new_edges = [ ]
for i , edge in enumerate ( self . _edges ( axis ) ) :
if ( i != axis_bins - 2 and i + 1 in mapping and i + 1 not in left_idx ) :
continue
new_edges . append ( edge )
# construct new histogram and fill
new_hist = self . empty_clone ( binning = new_edges , axis = axis )
this_axis = self . axis ( axis )
new_axis = new_hist . axis ( axis )
def translate ( idx ) :
if idx in mapping :
return mapping [ idx ]
if idx == 0 :
return 0
# use TH1 . FindBin to determine where the bins should be merged
return new_axis . FindBin ( this_axis . GetBinCenter ( idx ) )
for bin in self . bins ( overflow = True ) :
xyz = bin . xyz
new_xyz = list ( xyz )
new_xyz [ axis ] = translate ( int ( xyz [ axis ] ) )
x , y , z = new_xyz
new_v = new_hist . GetBinContent ( x , y , z )
new_hist . SetBinContent ( x , y , z , new_v + bin . value )
sum_w2 = self . get_sum_w2 ( * xyz )
new_sum_w2 = new_hist . get_sum_w2 ( x , y , z )
new_hist . set_sum_w2 ( sum_w2 + new_sum_w2 , x , y , z )
# transfer stats info
stat_array = array ( 'd' , [ 0. ] * 10 )
self . GetStats ( stat_array )
new_hist . PutStats ( stat_array )
entries = self . GetEntries ( )
new_hist . SetEntries ( entries )
return new_hist
|
def add ( from_user , from_id , to_user , to_id , type ) :
"adds a relation to the graph"
|
if options . users and to_user :
G . add_node ( from_user , screen_name = from_user )
G . add_node ( to_user , screen_name = to_user )
if G . has_edge ( from_user , to_user ) :
weight = G [ from_user ] [ to_user ] [ 'weight' ] + 1
else :
weight = 1
G . add_edge ( from_user , to_user , type = type , weight = weight )
elif not options . users and to_id :
G . add_node ( from_id , screen_name = from_user , type = type )
if to_user :
G . add_node ( to_id , screen_name = to_user )
else :
G . add_node ( to_id )
G . add_edge ( from_id , to_id , type = type )
|
def absent ( name , version = - 1 , recursive = False , profile = None , hosts = None , scheme = None , username = None , password = None , default_acl = None ) :
'''Make sure znode is absent
name
path to znode
version
Specify the version which should be deleted
Default : - 1 ( always match )
recursive
Boolean to indicate if children should be recursively deleted
Default : False
profile
Configured Zookeeper profile to authenticate with ( Default : None )
hosts
Lists of Zookeeper Hosts ( Default : ' 127.0.0.1:2181)
scheme
Scheme to authenticate with ( Default : ' digest ' )
username
Username to authenticate ( Default : None )
password
Password to authenticate ( Default : None )
default _ acl
Default acls to assign if a node is created in this connection ( Default : None )
. . code - block : : yaml
delete znode :
zookeeper . absent :
- name : / test
- recursive : True'''
|
ret = { 'name' : name , 'result' : False , 'comment' : 'Failed to delete znode {0}' . format ( name ) , 'changes' : { } }
connkwargs = { 'profile' : profile , 'hosts' : hosts , 'scheme' : scheme , 'username' : username , 'password' : password , 'default_acl' : default_acl }
if __salt__ [ 'zookeeper.exists' ] ( name , ** connkwargs ) is False :
ret [ 'result' ] = True
ret [ 'comment' ] = 'Znode {0} does not exist' . format ( name )
return ret
changes = { }
changes [ 'value' ] = __salt__ [ 'zookeeper.get' ] ( name , ** connkwargs )
changes [ 'acls' ] = __salt__ [ 'zookeeper.get_acls' ] ( name , ** connkwargs )
if recursive is True :
changes [ 'children' ] = __salt__ [ 'zookeeper.get_children' ] ( name , ** connkwargs )
if __opts__ [ 'test' ] is True :
ret [ 'result' ] = None
ret [ 'comment' ] = 'Znode {0} will be removed' . format ( name )
ret [ 'changes' ] [ 'old' ] = changes
return ret
__salt__ [ 'zookeeper.delete' ] ( name , version , recursive , ** connkwargs )
if __salt__ [ 'zookeeper.exists' ] ( name , ** connkwargs ) is False :
ret [ 'result' ] = True
ret [ 'comment' ] = 'Znode {0} has been removed' . format ( name )
ret [ 'changes' ] [ 'old' ] = changes
return ret
|
def getNextSample ( self , V ) :
"""Generate the next sample for the condorcet model . This algorithm is described in " Computing
Optimal Bayesian Decisions for Rank Aggregation via MCMC Sampling , " and is adapted from
code written by Lirong Xia .
: ivar list < list < int > V : A two - dimensional list that for every pair of candidates cand1 and
cand2 , V [ cand1 ] [ cand2 ] contains 1 if cand1 is ranked above cand2 more times than cand2
is ranked above cand1 and 0 otherwise ."""
|
cands = range ( len ( self . wmg ) )
W = copy . deepcopy ( V )
allPairs = itertools . combinations ( cands , 2 )
for pair in allPairs :
a = pair [ 0 ]
b = pair [ 1 ]
if random . random ( ) < 1.0 / ( 1.0 + pow ( self . phi , self . wmg [ a + 1 ] [ b + 1 ] ) ) :
W [ a ] [ b ] = 1
W [ b ] [ a ] = 0
else :
W [ a ] [ b ] = 0
W [ b ] [ a ] = 1
prMW = 1
prMV = 1
prob = min ( 1.0 , prMW / prMV )
if random . random ( ) <= prob :
V = W
return V
|
def cli ( ctx , pattern , arguments , safe ) :
"""Executes a saved command ."""
|
matches = utils . grep_commands ( pattern )
if matches :
selected = utils . select_command ( matches )
if selected >= 0 :
cmd , desc = matches [ selected ]
pcmd = utils . create_pcmd ( cmd )
raw_params , params , defaults = utils . get_params_in_pcmd ( pcmd )
arguments = list ( arguments )
kargs = { }
for r , p , d in zip ( raw_params , params , defaults ) :
if arguments :
val = arguments . pop ( 0 )
click . echo ( "{}: {}" . format ( p , val ) )
kargs [ r ] = val
elif safe :
if d :
kargs [ r ] = d
else :
p_default = d if d else None
val = click . prompt ( "Enter value for '{}'" . format ( p ) , default = p_default )
kargs [ r ] = val
click . echo ( "\n" )
final_cmd = utils . substitute_pcmd ( pcmd , kargs , safe )
command = "$ {} :: {}" . format ( final_cmd , desc )
if click . confirm ( "Execute\n\t{}\n\n?" . format ( command ) , default = True ) :
os . system ( final_cmd )
elif matches == [ ] :
click . echo ( 'No saved commands matches the pattern {}' . format ( pattern ) )
else :
click . echo ( "No commands to run, Add one by 'keep new'. " )
|
def setCurrentIndex ( self , index ) :
"""Sets the current index on self and on the tab bar to keep the two insync .
: param index | < int >"""
|
super ( XViewPanel , self ) . setCurrentIndex ( index )
self . tabBar ( ) . setCurrentIndex ( index )
|
def _validate_filters ( cls , filters ) :
"""Raise a TypeError if ` ` filters ` ` contains any keys inappropriate to
this event class ."""
|
for k in iterkeys ( filters ) :
if k not in cls . filters : # Mirror " unexpected keyword argument " message :
raise TypeError ( "%s got an unsupported filter type '%s'" % ( cls . __name__ , k ) )
|
def format_json_api_response ( self , data , many ) :
"""Post - dump hook that formats serialized data as a top - level JSON API object .
See : http : / / jsonapi . org / format / # document - top - level"""
|
ret = self . format_items ( data , many )
ret = self . wrap_response ( ret , many )
ret = self . render_included_data ( ret )
ret = self . render_meta_document ( ret )
return ret
|
def _FormatSocketInet128Token ( self , token_data ) :
"""Formats an Internet socket token as a dictionary of values .
Args :
token _ data ( bsm _ token _ data _ sockinet64 ) : AUT _ SOCKINET128 token data .
Returns :
dict [ str , str ] : token values ."""
|
protocol = bsmtoken . BSM_PROTOCOLS . get ( token_data . socket_family , 'UNKNOWN' )
ip_address = self . _FormatPackedIPv6Address ( token_data . ip_addresss )
return { 'protocols' : protocol , 'family' : token_data . socket_family , 'port' : token_data . port_number , 'address' : ip_address }
|
def splitext ( path ) : # type : ( str ) - > Tuple [ str , str ]
"""Like os . path . splitext , but take off . tar too"""
|
base , ext = posixpath . splitext ( path )
if base . lower ( ) . endswith ( '.tar' ) :
ext = base [ - 4 : ] + ext
base = base [ : - 4 ]
return base , ext
|
def solveBinPacking ( s , B ) :
"""solveBinPacking : use an IP model to solve the in Packing Problem .
Parameters :
- s : list with item widths
- B : bin capacity
Returns a solution : list of lists , each of which with the items in a roll ."""
|
n = len ( s )
U = len ( FFD ( s , B ) )
# upper bound of the number of bins
model = bpp ( s , B )
x , y = model . data
model . optimize ( )
bins = [ [ ] for i in range ( U ) ]
for ( i , j ) in x :
if model . getVal ( x [ i , j ] ) > .5 :
bins [ j ] . append ( s [ i ] )
for i in range ( bins . count ( [ ] ) ) :
bins . remove ( [ ] )
for b in bins :
b . sort ( )
bins . sort ( )
return bins
|
async def set_power ( self , value : bool ) :
"""Toggle the device on and off ."""
|
if value :
status = "active"
else :
status = "off"
# TODO WoL works when quickboot is not enabled
return await self . services [ "system" ] [ "setPowerStatus" ] ( status = status )
|
def record ( self , tags = None ) :
"""records all the measures at the same time with a tag _ map .
tag _ map could either be explicitly passed to the method , or implicitly
read from current runtime context ."""
|
if tags is None :
tags = TagContext . get ( )
if self . _invalid :
logger . warning ( "Measurement map has included negative value " "measurements, refusing to record" )
return
for measure , value in self . measurement_map . items ( ) :
if value < 0 :
self . _invalid = True
logger . warning ( "Dropping values, value to record must be " "non-negative" )
logger . info ( "Measure '{}' has negative value ({}), refusing " "to record measurements from {}" . format ( measure . name , value , self ) )
return
self . measure_to_view_map . record ( tags = tags , measurement_map = self . measurement_map , timestamp = utils . to_iso_str ( ) , attachments = self . attachments )
|
def extract_suffix ( self , name ) :
"""Returns a tuple of ( name , suffix ) , or ( name , None ) if no suffix could be found .
As the method name indicates , the name is returned without the suffix .
Suffixes deemed to be degrees are discarded ."""
|
# don ' t extract suffixes if we can ' t reasonably suspect we have enough parts to the name for there to be one
if len ( name . strip ( ) . split ( ) ) > 2 :
name , suffix = self . extract_matching_portion ( r'\b(?P<suffix>{})(?=\b|\s|\Z|\W)' . format ( SUFFIX_RE ) , name )
suffix , degree = self . extract_matching_portion ( DEGREE_RE , suffix or '' )
return name , suffix or None
return name , None
|
def merge_instances ( cls , inst1 , inst2 ) :
"""Merges the two datasets ( side - by - side ) .
: param inst1 : the first dataset
: type inst1 : Instances or str
: param inst2 : the first dataset
: type inst2 : Instances
: return : the combined dataset
: rtype : Instances"""
|
return Instances ( javabridge . static_call ( "weka/core/Instances" , "mergeInstances" , "(Lweka/core/Instances;Lweka/core/Instances;)Lweka/core/Instances;" , inst1 . jobject , inst2 . jobject ) )
|
def on_cloud_download_item_activated ( self , menu_item ) :
'''创建离线下载任务 , 下载选中的BT种子 .'''
|
tree_paths = self . iconview . get_selected_items ( )
if not tree_paths :
return
self . app . cloud_page . add_cloud_bt_task ( self . liststore [ tree_paths [ 0 ] ] [ PATH_COL ] )
|
def traverse_frozen_data ( data_structure ) :
"""Yields the leaves of the frozen data - structure pre - order .
It will produce the same order as one would write the data - structure ."""
|
parent_stack = [ data_structure ]
while parent_stack :
node = parent_stack . pop ( 0 )
# We don ' t iterate strings
tlen = - 1
if not isinstance ( node , _string_types ) : # If item has a length we freeze it
try :
tlen = len ( node )
except :
pass
if tlen == - 1 :
yield node
else :
parent_stack = list ( node ) + parent_stack
|
def search_for_port ( port_glob , req , expected_res ) :
'''Find the serial port the arm is connected to .'''
|
# Check that the USB port actually exists , based on the known vendor and
# product ID .
if usb . core . find ( idVendor = 0x0403 , idProduct = 0x6001 ) is None :
return None
# Find ports matching the supplied glob .
ports = glob . glob ( port_glob )
if len ( ports ) == 0 :
return None
for port in ports :
with r12_serial_port ( port ) as ser :
if not ser . isOpen ( ) :
ser . open ( )
# Write a request out .
if sys . version_info [ 0 ] == 2 :
ser . write ( str ( req ) . encode ( 'utf-8' ) )
else :
ser . write ( bytes ( req , 'utf-8' ) )
# Wait a short period to allow the connection to generate output .
time . sleep ( 0.1 )
# Read output from the serial connection check if it ' s what we want .
res = ser . read ( ser . in_waiting ) . decode ( OUTPUT_ENCODING )
if expected_res in res :
return port
raise ArmException ( 'ST Robotics connection found, but is not responsive.' + ' Is the arm powered on?' )
return None
|
def animation_control ( object , sequence_length = None , add = True , interval = 200 ) :
"""Animate scatter , quiver or mesh by adding a slider and play button .
: param object : : any : ` Scatter ` or : any : ` Mesh ` object ( having an sequence _ index property ) , or a list of these to
control multiple .
: param sequence _ length : If sequence _ length is None we try try our best to figure out , in case we do it badly ,
you can tell us what it should be . Should be equal to the S in the shape of the numpy arrays as for instance
documented in : any : ` scatter ` or : any : ` plot _ mesh ` .
: param add : if True , add the widgets to the container , else return a HBox with the slider and play button . Useful when you
want to customise the layout of the widgets yourself .
: param interval : interval in msec between each frame
: return : If add is False , if returns the ipywidgets . HBox object containing the controls"""
|
if isinstance ( object , ( list , tuple ) ) :
objects = object
else :
objects = [ object ]
del object
if sequence_length is None : # get all non - None arrays
sequence_lengths = [ ]
for object in objects :
sequence_lengths_previous = list ( sequence_lengths )
values = [ getattr ( object , name ) for name in "x y z vx vy vz" . split ( ) if hasattr ( object , name ) ]
values = [ k for k in values if k is not None ]
# sort them such that the higest dim is first
values . sort ( key = lambda key : - len ( key . shape ) )
try :
sequence_length = values [ 0 ] . shape [ 0 ]
# assume this defines the sequence length
if isinstance ( object , ipv . Mesh ) : # for a mesh , it does not make sense to have less than 1 dimension
if len ( values [ 0 ] . shape ) >= 2 : # if just 1d , it is most likely not an animation
sequence_lengths . append ( sequence_length )
else :
sequence_lengths . append ( sequence_length )
except IndexError : # scalars get ignored
pass
if hasattr ( object , 'color' ) :
color = object . color
if color is not None :
shape = color . shape
if len ( shape ) == 3 : # would be the case for for ( frame , point _ index , color _ index )
sequence_lengths . append ( shape [ 0 ] )
# TODO : maybe support arrays of string type of form ( frame , point _ index )
if len ( sequence_lengths ) == len ( sequence_lengths_previous ) :
raise ValueError ( 'no frame dimension found for object: {}' . format ( object ) )
sequence_length = max ( sequence_lengths )
fig = gcf ( )
fig . animation = interval
fig . animation_exponent = 1.0
play = ipywidgets . Play ( min = 0 , max = sequence_length - 1 , interval = interval , value = 0 , step = 1 )
slider = ipywidgets . FloatSlider ( min = 0 , max = play . max , step = 1 )
ipywidgets . jslink ( ( play , 'value' ) , ( slider , 'value' ) )
for object in objects :
ipywidgets . jslink ( ( slider , 'value' ) , ( object , 'sequence_index' ) )
control = ipywidgets . HBox ( [ play , slider ] )
if add :
current . container . children += ( control , )
else :
return control
|
def gap_index_map ( sequence , gap_chars = '-' ) :
"""Opposite of ungap _ index _ map : returns mapping from gapped index to ungapped
index .
> > > gap _ index _ map ( ' AC - TG - ' )
{0 : 0 , 1 : 1 , 3 : 2 , 4 : 3}"""
|
return dict ( ( v , k ) for k , v in list ( ungap_index_map ( sequence , gap_chars ) . items ( ) ) )
|
def istrue ( self , * args ) :
"""Strict test for ' true ' value test . If multiple args are provided it will
test them all .
ISTRUE : true
% { ISTRUE : true } - > ' True '"""
|
def is_true ( val ) :
if val is True :
return True
val = str ( val ) . lower ( ) . strip ( )
return val in ( 'true' , 'yes' , '1' )
return all ( self . _arg_factory ( is_true , args ) )
|
def spher_harms ( l , m , inclination ) :
"""Return spherical harmonic polarizations"""
|
# FIXME : we are using spin - 2 weighted spherical harmonics for now ,
# when possible switch to spheroidal harmonics .
Y_lm = lal . SpinWeightedSphericalHarmonic ( inclination , 0. , - 2 , l , m ) . real
Y_lminusm = lal . SpinWeightedSphericalHarmonic ( inclination , 0. , - 2 , l , - m ) . real
Y_plus = Y_lm + ( - 1 ) ** l * Y_lminusm
Y_cross = Y_lm - ( - 1 ) ** l * Y_lminusm
return Y_plus , Y_cross
|
def disable_tracing ( self ) :
"""Disable tracing if it is disabled and debugged program is running ,
else do nothing .
: return : False if tracing has been disabled , True else ."""
|
_logger . x_debug ( "disable_tracing()" )
# self . dump _ tracing _ state ( " before disable _ tracing ( ) " )
if self . tracing_enabled and self . execution_started :
threading . settrace ( None )
# don ' t trace threads to come
iksettrace3 . _set_trace_off ( )
self . tracing_enabled = False
# self . dump _ tracing _ state ( " after disable _ tracing ( ) " )
return self . tracing_enabled
|
def from_string ( cls , string , exists = False , asynchronous = False , verbose = False ) :
"""if exists is bool , then check it either exists or it doesn ' t .
if exists is None , we don ' t care ."""
|
result = cls . parse ( string )
if result . scheme not in cls . TYPES :
raise CopyError ( "Invalid scheme: %s" % ( result . scheme ) )
return cls . TYPES [ result . scheme ] ( result , exists , asynchronous , verbose )
|
def nextStationJD ( ID , jd ) :
"""Finds the aproximate julian date of the
next station of a planet ."""
|
speed = swe . sweObject ( ID , jd ) [ 'lonspeed' ]
for i in range ( 2000 ) :
nextjd = jd + i / 2
nextspeed = swe . sweObject ( ID , nextjd ) [ 'lonspeed' ]
if speed * nextspeed <= 0 :
return nextjd
return None
|
def _exec_check ( self , check : FontbakeryCallable , args : Dict [ str , Any ] ) :
"""Yields check sub results .
Each check result is a tuple of : ( < Status > , mixed message )
` status ` : must be an instance of Status .
If one of the ` status ` entries in one of the results
is FAIL , the whole check is considered failed .
WARN is most likely a PASS in a non strict mode and a
FAIL in a strict mode .
` message ` :
* If it is an ` Exception ` type we expect ` status `
not to be PASS
* If it is a ` string ` it ' s a description of what passed
or failed .
* we ' ll think of an AdvancedMessageType as well , so that
we can connect the check result with more in depth
knowledge from the check definition ."""
|
try : # A check can be either a normal function that returns one Status or a
# generator that yields one or more . The latter will return a generator
# object that we can detect with types . GeneratorType .
result = check ( ** args )
# Might raise .
if isinstance ( result , types . GeneratorType ) : # Iterate over sub - results one - by - one , list ( result ) would abort on
# encountering the first exception .
for sub_result in result : # Might raise .
yield self . _check_result ( sub_result )
return
# Do not fall through to rest of method .
except Exception as e :
error = FailedCheckError ( e )
result = ( ERROR , error )
yield self . _check_result ( result )
|
def put ( self , instance , errors ) :
"""Update a model instance .
: param instance : The model instance .
: param errors : Any errors .
: return : The updated model instance , or a dictionary of errors ."""
|
if errors :
return self . errors ( errors )
return self . updated ( instance )
|
def set_color ( self , ipaddr , hue , sat , bri , kel , fade ) :
"""Send SETCOLOR message ."""
|
cmd = { "payloadtype" : PayloadType . SETCOLOR , "target" : ipaddr , "hue" : hue , "sat" : sat , "bri" : bri , "kel" : kel , "fade" : fade }
self . _send_command ( cmd )
|
def _CheckAttribute ( self , attribute , value ) :
"""Check that the value is of the expected type .
Args :
attribute : An instance of Attribute ( ) .
value : An instance of RDFValue .
Raises :
ValueError : when the value is not of the expected type .
AttributeError : When the attribute is not of type Attribute ( ) ."""
|
if not isinstance ( attribute , Attribute ) :
raise AttributeError ( "Attribute %s must be of type aff4.Attribute()" % attribute )
if not isinstance ( value , attribute . attribute_type ) :
raise ValueError ( "Value for attribute %s must be of type %s()" % ( attribute , attribute . attribute_type . __name__ ) )
|
def infer_transition_matrix_coefficient_from_data ( self , source : str , target : str , state : Optional [ str ] = None , crop : Optional [ str ] = None , ) :
"""Infer the distribution of a particular transition matrix
coefficient from data .
Args :
source : The source of the edge corresponding to the matrix element
to infer .
target : The target of the edge corresponding to the matrix element
to infer .
state :
The state in South Sudan for which the transition matrix
coefficient should be calculated .
crop :
The crop for which the transition matrix coefficient should be
calculated ."""
|
rows = engine . execute ( f"select * from dssat where `Crop` like '{crop}'" f" and `State` like '{state}'" )
xs , ys = lzip ( * [ ( r [ "Rainfall" ] , r [ "Production" ] ) for r in rows ] )
xs_scaled , ys_scaled = xs / np . mean ( xs ) , ys / np . mean ( ys )
p , V = np . polyfit ( xs_scaled , ys_scaled , 1 , cov = True )
self . edges [ source , target ] [ "βs" ] = np . random . normal ( p [ 0 ] , np . sqrt ( V [ 0 ] [ 0 ] ) , self . res )
self . sample_from_prior ( )
|
def shell_call ( command , ** kwargs ) :
"""Calls shell command with parameter substitution .
Args :
command : command to run as a list of tokens
* * kwargs : dirctionary with substitutions
Returns :
whether command was successful , i . e . returned 0 status code
Example of usage :
shell _ call ( [ ' cp ' , ' $ { A } ' , ' $ { B } ' ] , A = ' src _ file ' , B = ' dst _ file ' )
will call shell command :
cp src _ file dst _ file"""
|
command = list ( command )
for i in range ( len ( command ) ) :
m = CMD_VARIABLE_RE . match ( command [ i ] )
if m :
var_id = m . group ( 1 )
if var_id in kwargs :
command [ i ] = kwargs [ var_id ]
return subprocess . call ( command ) == 0
|
def envCheckFlag ( self , name , default = False ) :
"""Check graph flag for enabling / disabling attributes through
the use of < name > environment variable .
@ param name : Name of flag .
( Also determines the environment variable name . )
@ param default : Boolean ( True or False ) . Default value for flag .
@ return : Return True if the flag is enabled ."""
|
if self . _flags . has_key ( name ) :
return self . _flags [ name ]
else :
val = self . _env . get ( name )
if val is None :
return default
elif val . lower ( ) in [ 'yes' , 'on' ] :
self . _flags [ name ] = True
return True
elif val . lower ( ) in [ 'no' , 'off' ] :
self . _flags [ name ] = False
return False
else :
raise AttributeError ( "Value for flag %s, must be yes, no, on or off" % name )
|
def access_token ( self ) :
"""Get access _ token ."""
|
if self . cache_token :
return self . access_token_ or self . _resolve_credential ( 'access_token' )
return self . access_token_
|
def service_info ( self , name ) :
"""Pull descriptive info of a service by name .
Information returned includes the service ' s user friendly
name and whether it was preregistered or added dynamically .
Returns :
dict : A dictionary of service information with the following keys
set :
long _ name ( string ) : The user friendly name of the service
preregistered ( bool ) : Whether the service was explicitly
called out as a preregistered service ."""
|
return self . _loop . run_coroutine ( self . _client . service_info ( name ) )
|
def getPinProperties ( cardConnection , featureList = None , controlCode = None ) :
"""return the PIN _ PROPERTIES structure
@ param cardConnection : L { CardConnection } object
@ param featureList : feature list as returned by L { getFeatureRequest ( ) }
@ param controlCode : control code for L { FEATURE _ IFD _ PIN _ PROPERTIES }
@ rtype : dict
@ return : a dict"""
|
if controlCode is None :
if featureList is None :
featureList = getFeatureRequest ( cardConnection )
controlCode = hasFeature ( featureList , FEATURE_IFD_PIN_PROPERTIES )
if controlCode is None :
return { 'raw' : [ ] }
response = cardConnection . control ( controlCode , [ ] )
d = { 'raw' : response , 'LcdLayoutX' : response [ 0 ] , 'LcdLayoutY' : response [ 1 ] , 'EntryValidationCondition' : response [ 2 ] , 'TimeOut2' : response [ 3 ] }
return d
|
def openfile ( filename , mode = "rt" , * args , expanduser = False , expandvars = False , makedirs = False , ** kwargs ) :
"""Open filename and return a corresponding file object ."""
|
if filename in ( "-" , None ) :
return sys . stdin if "r" in mode else sys . stdout
if expanduser :
filename = os . path . expanduser ( filename )
if expandvars :
filename = os . path . expandvars ( filename )
if makedirs and ( "a" in mode or "w" in mode ) :
parentdir = os . path . dirname ( filename )
if not os . path . isdir ( parentdir ) :
os . makedirs ( parentdir )
if filename . endswith ( ".gz" ) :
if gzip is None :
raise NotImplementedError
_open = gzip . open
elif filename . endswith ( ".bz2" ) :
if bz2 is None :
raise NotImplementedError
_open = bz2 . open
elif filename . endswith ( ".xz" ) or filename . endswith ( ".lzma" ) :
if lzma is None :
raise NotImplementedError
_open = lzma . open
else :
_open = open
return _open ( filename , mode , * args , ** kwargs )
|
def fastqIterate ( infile ) :
'''iterate over contents of fastq file .'''
|
def convert2string ( b ) :
if type ( b ) == str :
return b
else :
return b . decode ( "utf-8" )
while 1 :
line1 = convert2string ( infile . readline ( ) )
if not line1 :
break
if not line1 . startswith ( '@' ) :
U . error ( "parsing error: expected '@' in line %s" % line1 )
line2 = convert2string ( infile . readline ( ) )
line3 = convert2string ( infile . readline ( ) )
if not line3 . startswith ( '+' ) :
U . error ( "parsing error: expected '+' in line %s" % line3 )
line4 = convert2string ( infile . readline ( ) )
# incomplete entry
if not line4 :
U . error ( "incomplete entry for %s" % line1 )
yield Record ( line1 [ 1 : - 1 ] , line2 [ : - 1 ] , line4 [ : - 1 ] )
|
def offset_overlays ( self , text , offset = 0 , ** kw ) :
"""Generate overlays after offset .
: param text : The text to be searched .
: param offset : Match starting that index . If none just search .
: returns : An overlay or None"""
|
# This may be a bit slower but overlayedtext takes care of
# unicode issues .
if not isinstance ( text , OverlayedText ) :
text = OverlayedText ( text )
for m in self . regex . finditer ( unicode ( text ) [ offset : ] ) :
yield Overlay ( text , ( offset + m . start ( ) , offset + m . end ( ) ) , props = self . props , value = self . value ( rxmatch = m ) )
|
def list_space_systems ( self , page_size = None ) :
"""Lists the space systems visible to this client .
Space systems are returned in lexicographical order .
: rtype : : class : ` . SpaceSystem ` iterator"""
|
params = { }
if page_size is not None :
params [ 'limit' ] = page_size
return pagination . Iterator ( client = self . _client , path = '/mdb/{}/space-systems' . format ( self . _instance ) , params = params , response_class = mdb_pb2 . ListSpaceSystemsResponse , items_key = 'spaceSystem' , item_mapper = SpaceSystem , )
|
def set_iscsi_initiator_info ( self , initiator_iqn ) :
"""Set iSCSI initiator information in iLO .
: param initiator _ iqn : Initiator iqn for iLO .
: raises : IloError , on an error from iLO .
: raises : IloCommandNotSupportedInBiosError , if the system is
in the BIOS boot mode ."""
|
sushy_system = self . _get_sushy_system ( PROLIANT_SYSTEM_ID )
if ( self . _is_boot_mode_uefi ( ) ) :
iscsi_data = { 'iSCSIInitiatorName' : initiator_iqn }
try :
( sushy_system . bios_settings . iscsi_resource . iscsi_settings . update_iscsi_settings ( iscsi_data ) )
except sushy . exceptions . SushyError as e :
msg = ( self . _ ( "The Redfish controller has failed to update " "iSCSI settings. Error %(error)s" ) % { 'error' : str ( e ) } )
LOG . debug ( msg )
raise exception . IloError ( msg )
else :
msg = 'iSCSI initiator cannot be updated in BIOS boot mode'
raise exception . IloCommandNotSupportedInBiosError ( msg )
|
def getfragment ( self , default = None , encoding = 'utf-8' , errors = 'strict' ) :
"""Return the decoded fragment identifier , or ` default ` if the
original URI did not contain a fragment component ."""
|
fragment = self . fragment
if fragment is not None :
return uridecode ( fragment , encoding , errors )
else :
return default
|
def from_payload ( self , payload ) :
"""Init frame from binary data ."""
|
number_of_objects = payload [ 0 ]
self . remaining_scenes = payload [ - 1 ]
predicted_len = number_of_objects * 65 + 2
if len ( payload ) != predicted_len :
raise PyVLXException ( 'scene_list_notification_wrong_length' )
self . scenes = [ ]
for i in range ( number_of_objects ) :
scene = payload [ ( i * 65 + 1 ) : ( i * 65 + 66 ) ]
number = scene [ 0 ]
name = bytes_to_string ( scene [ 1 : ] )
self . scenes . append ( ( number , name ) )
|
def login ( self , username = None , password = None ) :
"""Before doing any remote operation , the user has to login to the GMQL serivice .
This can be done in the two following ways :
* Guest mode : the user has no credentials and uses the system only as a temporary guest
* Authenticated mode : the users has credentials and a stable remote account
If neither username and password are specified , the user enters the system as a guest .
If both are specified and they correspond to an existent user , the user enters as an
authenticated user
: param username : ( optional )
: param password : ( optional )
: return : None"""
|
if ( username is None ) and ( password is None ) :
auth_token = self . __login_guest ( )
elif ( username is not None ) and ( password is not None ) :
auth_token , fullName = self . __login_credentials ( username , password )
self . logger . info ( "You are logged as {}" . format ( fullName ) )
else :
raise ValueError ( "you have to specify both username and password or nothing" )
if auth_token is not None :
self . auth_token = auth_token
else :
raise ConnectionError ( "Impossible to retrieve the authentication token" )
|
def URem ( a : BitVec , b : BitVec ) -> BitVec :
"""Create an unsigned remainder expression .
: param a :
: param b :
: return :"""
|
return _arithmetic_helper ( a , b , z3 . URem )
|
def _assert_explicit_vr ( dicom_input ) :
"""Assert that explicit vr is used"""
|
if settings . validate_multiframe_implicit :
header = dicom_input [ 0 ]
if header . file_meta [ 0x0002 , 0x0010 ] . value == '1.2.840.10008.1.2' :
raise ConversionError ( 'IMPLICIT_VR_ENHANCED_DICOM' )
|
def handle_new_user ( self , provider , access , info ) :
"Create a shell auth . User and redirect ."
|
user = self . get_or_create_user ( provider , access , info )
access . user = user
AccountAccess . objects . filter ( pk = access . pk ) . update ( user = user )
user = authenticate ( provider = access . provider , identifier = access . identifier )
login ( self . request , user )
return redirect ( self . get_login_redirect ( provider , user , access , True ) )
|
def _get_from_cache ( self , expr ) :
"""Obtain cached result , prepend with the keyname if necessary , and
indent for the current level"""
|
is_cached , res = super ( ) . _get_from_cache ( expr )
if is_cached :
indent_str = " " * self . _print_level
return True , indent ( res , indent_str )
else :
return False , None
|
def minver_error ( pkg_name ) :
"""Report error about missing minimum version constraint and exit ."""
|
print ( 'ERROR: specify minimal version of "{}" using ' '">=" or "=="' . format ( pkg_name ) , file = sys . stderr )
sys . exit ( 1 )
|
def run ( self ) :
"""Sends contents of a local file to a remote data service ."""
|
processes = [ ]
progress_queue = ProgressQueue ( Queue ( ) )
num_chunks = ParallelChunkProcessor . determine_num_chunks ( self . config . upload_bytes_per_chunk , self . local_file . size )
work_parcels = ParallelChunkProcessor . make_work_parcels ( self . config . upload_workers , num_chunks )
for ( index , num_items ) in work_parcels :
processes . append ( self . make_and_start_process ( index , num_items , progress_queue ) )
wait_for_processes ( processes , num_chunks , progress_queue , self . watcher , self . local_file )
|
def broadcast ( self , clients , msg ) :
"""Optimized C { broadcast } implementation . Depending on type of the
session , will json - encode message once and will call either
C { send _ message } or C { send _ jsonifed } .
@ param clients : Clients iterable
@ param msg : Message to send"""
|
json_msg = None
count = 0
for c in clients :
sess = c . session
if not sess . is_closed :
if sess . send_expects_json :
if json_msg is None :
json_msg = proto . json_encode ( msg )
sess . send_jsonified ( json_msg , stats = False )
else :
sess . send_message ( msg , stats = False )
count += 1
self . stats . packSent ( count )
|
def lambda_tilde ( mass1 , mass2 , lambda1 , lambda2 ) :
"""The effective lambda parameter
The mass - weighted dominant effective lambda parameter defined in
https : / / journals . aps . org / prd / pdf / 10.1103 / PhysRevD . 91.043002"""
|
m1 , m2 , lambda1 , lambda2 , input_is_array = ensurearray ( mass1 , mass2 , lambda1 , lambda2 )
lsum = lambda1 + lambda2
ldiff , _ = ensurearray ( lambda1 - lambda2 )
mask = m1 < m2
ldiff [ mask ] = - ldiff [ mask ]
eta = eta_from_mass1_mass2 ( m1 , m2 )
p1 = ( lsum ) * ( 1 + 7. * eta - 31 * eta ** 2.0 )
p2 = ( 1 - 4 * eta ) ** 0.5 * ( 1 + 9 * eta - 11 * eta ** 2.0 ) * ( ldiff )
return formatreturn ( 8.0 / 13.0 * ( p1 + p2 ) , input_is_array )
|
def register ( self , service , name = '' ) :
"""Exposes a given service to this API ."""
|
# expose a sub - factory
if isinstance ( service , ApiFactory ) :
self . services [ name ] = ( service . factory , None )
# expose a module dynamically as a service
elif inspect . ismodule ( service ) :
name = name or service . __name__ . split ( '.' ) [ - 1 ]
# exclude endpoints with patterns
for obj in vars ( service ) . values ( ) :
endpoint = getattr ( obj , 'endpoint' , None )
if isinstance ( endpoint , Endpoint ) and endpoint . pattern :
route = Route ( '' , endpoint . pattern )
self . routes . append ( ( route , endpoint ) )
self . services [ name ] = ( ModuleService , service )
# expose a class dynamically as a service
elif inspect . isclass ( service ) :
name = name or service . __name__
self . services [ name ] = ( ClassService , service )
# expose an endpoint directly
elif isinstance ( getattr ( service , 'endpoint' , None ) , Endpoint ) :
if service . endpoint . pattern :
route = Route ( '' , service . endpoint . pattern )
self . routes . append ( ( route , service . endpoint ) )
else :
self . services [ service . endpoint . name ] = ( service . endpoint , None )
# expose a scope
elif isinstance ( service , dict ) :
for srv in service . values ( ) :
try :
self . register ( srv )
except RuntimeError :
pass
# expose a list of services
elif isinstance ( service , list ) :
for srv in service :
try :
self . register ( srv )
except RuntimeError :
pass
# expose a service directly
else :
raise RuntimeError ( 'Invalid service provide: {0} ({1}).' . format ( service , type ( service ) ) )
|
def custom ( self , payload ) :
""": param payload :
a key / value object containing the scimeta you want to store
e . g . { " weather " : " sunny " , " temperature " : " 80C " }
: return :
empty ( 200 status code )"""
|
url = "{url_base}/resource/{pid}/scimeta/custom/" . format ( url_base = self . hs . url_base , pid = self . pid )
r = self . hs . _request ( 'POST' , url , data = payload )
return r
|
def plot ( self , channel_names , kind = 'histogram' , gates = None , gate_colors = None , gate_lw = 1 , ** kwargs ) :
"""Plot the flow cytometry data associated with the sample on the current axis .
To produce the plot , follow up with a call to matplotlib ' s show ( ) function .
Parameters
{ graph _ plotFCM _ pars }
{ FCMeasurement _ plot _ pars }
{ common _ plot _ ax }
gates : [ None , Gate , list of Gate ]
Gate must be of type { _ gate _ available _ classes } .
gate _ lw : float | iterable
line width to use when drawing gates
if float , uses the same line width for all gates
if iterable , then cycles between the values
kwargs : dict
Additional keyword arguments to be passed to graph . plotFCM
Returns
None : if no data is present
plot _ output : output of plot command used to draw ( e . g . , output of hist )
Examples
> > > sample . plot ( ' Y2 - A ' , bins = 100 , alpha = 0.7 , color = ' green ' , normed = 1 ) # 1d histogram
> > > sample . plot ( [ ' B1 - A ' , ' Y2 - A ' ] , cmap = cm . Oranges , colorbar = False ) # 2d histogram"""
|
ax = kwargs . get ( 'ax' )
channel_names = to_list ( channel_names )
gates = to_list ( gates )
plot_output = graph . plotFCM ( self . data , channel_names , kind = kind , ** kwargs )
if gates is not None :
if gate_colors is None :
gate_colors = cycle ( ( 'b' , 'g' , 'r' , 'm' , 'c' , 'y' ) )
if not isinstance ( gate_lw , collections . Iterable ) :
gate_lw = [ gate_lw ]
gate_lw = cycle ( gate_lw )
for ( g , c , lw ) in zip ( gates , gate_colors , gate_lw ) :
g . plot ( ax = ax , ax_channels = channel_names , color = c , lw = lw )
return plot_output
|
def conforms_to_template_filter ( self , template_filter ) :
"""Check AttributeFilter conforms to the rules set by the template
- If self , has attributes that template _ filter does not contain , throw Exception
- If sub list found , perform the first check
- If self has a value for an attribute , assign to final AttributeFilter
- If not found , assign value from template
todo : rename as current name is mis - leading"""
|
if not isinstance ( template_filter , self . __class__ ) :
raise TypeError ( "AttributeFilter can only check conformance against \
another template filter, %s provided" % template_filter . __class__ . __name__ )
# keys from the template
template_filter_keys = template_filter . keys ( )
# Keys from the object itself
this_filter_keys = self . keys ( )
# 1 . Check to see if the client has provided unwanted keys
unwanted_keys = set ( this_filter_keys ) - set ( template_filter_keys )
if len ( unwanted_keys ) > 0 :
raise exception . AttributeFilterDiffers ( list ( unwanted_keys ) )
# 2 . Make a attribute _ filter that we send back
evaluated_attribute_filter = AttributeFilter ( )
# 3 . Evaluate the differences between the two , with template _ filter as the standard
for template_key in template_filter_keys :
if template_key in this_filter_keys :
value = getattr ( self , template_key )
# if sub filter and boolean provided with of true , create default filter with value of true
if isinstance ( value , bool ) and value is True and isinstance ( getattr ( template_filter , template_key ) , AttributeFilter ) :
setattr ( evaluated_attribute_filter , template_key , getattr ( template_filter , template_key ) )
elif isinstance ( value , bool ) :
setattr ( evaluated_attribute_filter , template_key , value )
elif isinstance ( value , self . __class__ ) : # Attribute lists sort themselves out , to produce sub Attribute Filters
template_sub_list = getattr ( template_filter , template_key )
this_sub_list = getattr ( self , template_key )
setattr ( evaluated_attribute_filter , template_key , this_sub_list . conforms_to_template_filter ( template_sub_list ) )
else :
setattr ( evaluated_attribute_filter , template_key , getattr ( template_filter , template_key ) )
return evaluated_attribute_filter
|
def show ( self ) :
"""Display ( with a pretty print ) this object"""
|
off = 0
for n , i in enumerate ( self . get_instructions ( ) ) :
print ( "{:8d} (0x{:08x}) {:04x} {:30} {}" . format ( n , off , i . get_op_value ( ) , i . get_name ( ) , i . get_output ( self . idx ) ) )
off += i . get_length ( )
|
def __postCallAction_hwbp ( self , event ) :
"""Handles hardware breakpoint events on return from the function .
@ type event : L { ExceptionEvent }
@ param event : Single step event ."""
|
# Remove the one shot hardware breakpoint
# at the return address location in the stack .
tid = event . get_tid ( )
address = event . breakpoint . get_address ( )
event . debug . erase_hardware_breakpoint ( tid , address )
# Call the " post " callback .
try :
self . __postCallAction ( event )
# Forget the parameters .
finally :
self . __pop_params ( tid )
|
def fixed_values ( self ) :
"""A flat tuple of all values corresponding to ` scipy _ data _ fitting . Fit . fixed _ parameters `
and ` scipy _ data _ fitting . Fit . constants ` after applying any prefixes .
The values mimic the order of those lists ."""
|
values = [ ]
values . extend ( [ prefix_factor ( param ) * param [ 'value' ] for param in self . fixed_parameters ] )
values . extend ( [ prefix_factor ( const ) * get_constant ( const [ 'value' ] ) for const in self . constants ] )
return tuple ( values )
|
def _get_api_sig ( args ) :
"""Flickr API need a hash string which made using post arguments
: param args : Arguments of the flickr request
: type args : list of sets
: return : api _ sig , ex : ( ' api _ sig ' , ' abcdefg ' )
: rtype : tuple"""
|
tmp_sig = api_secret
for i in args :
tmp_sig = tmp_sig + i [ 0 ] + i [ 1 ]
api_sig = hashlib . md5 ( tmp_sig . encode ( 'utf-8' ) ) . hexdigest ( )
return 'api_sig' , api_sig
|
def resize ( img , width , height ) :
"""更改图片大小 , 只能更改磁盘空间的大小 。
: param img :
: param width :
: param height :
: return : 更改后的图片"""
|
return img . resize ( ( width , height ) , Image . ANTIALIAS )
|
def __x_product_aux ( property_sets , seen_features ) :
"""Returns non - conflicting combinations of property sets .
property _ sets is a list of PropertySet instances . seen _ features is a set of Property
instances .
Returns a tuple of :
- list of lists of Property instances , such that within each list , no two Property instance
have the same feature , and no Property is for feature in seen _ features .
- set of features we saw in property _ sets"""
|
assert is_iterable_typed ( property_sets , property_set . PropertySet )
assert isinstance ( seen_features , set )
if not property_sets :
return ( [ ] , set ( ) )
properties = property_sets [ 0 ] . all ( )
these_features = set ( )
for p in property_sets [ 0 ] . non_free ( ) :
these_features . add ( p . feature )
# Note : the algorithm as implemented here , as in original Jam code , appears to
# detect conflicts based on features , not properties . For example , if command
# line build request say :
# < a > 1 / < b > 1 c < 1 > / < b > 1
# It will decide that those two property sets conflict , because they both specify
# a value for ' b ' and will not try building " < a > 1 < c1 > < b1 > " , but rather two
# different property sets . This is a topic for future fixing , maybe .
if these_features & seen_features :
( inner_result , inner_seen ) = __x_product_aux ( property_sets [ 1 : ] , seen_features )
return ( inner_result , inner_seen | these_features )
else :
result = [ ]
( inner_result , inner_seen ) = __x_product_aux ( property_sets [ 1 : ] , seen_features | these_features )
if inner_result :
for inner in inner_result :
result . append ( properties + inner )
else :
result . append ( properties )
if inner_seen & these_features : # Some of elements in property _ sets [ 1 : ] conflict with elements of property _ sets [ 0 ] ,
# Try again , this time omitting elements of property _ sets [ 0]
( inner_result2 , inner_seen2 ) = __x_product_aux ( property_sets [ 1 : ] , seen_features )
result . extend ( inner_result2 )
return ( result , inner_seen | these_features )
|
def diffmap ( adata , ** kwargs ) -> Union [ Axes , List [ Axes ] , None ] :
"""Scatter plot in Diffusion Map basis .
Parameters
{ adata _ color _ etc }
{ scatter _ bulk }
{ show _ save _ ax }
Returns
If ` show = = False ` a : class : ` ~ matplotlib . axes . Axes ` or a list of it ."""
|
return plot_scatter ( adata , 'diffmap' , ** kwargs )
|
def wait_for_initial_conf ( self , timeout = 1.0 ) :
"""Wait initial configuration from the arbiter .
Basically sleep 1.0 and check if new _ conf is here
: param timeout : timeout to wait
: type timeout : int
: return : None"""
|
logger . info ( "Waiting for initial configuration" )
# Arbiter do not already set our have _ conf param
_ts = time . time ( )
while not self . new_conf and not self . interrupted : # Make a pause and check if the system time changed
_ , _ = self . make_a_pause ( timeout , check_time_change = True )
if not self . interrupted :
logger . info ( "Got initial configuration, waited for: %.2f seconds" , time . time ( ) - _ts )
statsmgr . timer ( 'configuration.initial' , time . time ( ) - _ts )
else :
logger . info ( "Interrupted before getting the initial configuration" )
|
def setup ( ) :
"""Walk the user though the Wallace setup ."""
|
# Create the Wallace config file if it does not already exist .
config_name = ".wallaceconfig"
config_path = os . path . join ( os . path . expanduser ( "~" ) , config_name )
if os . path . isfile ( config_path ) :
log ( "Wallace config file already exists." , chevrons = False )
else :
log ( "Creating Wallace config file at ~/.wallaceconfig..." , chevrons = False )
wallace_module_path = os . path . dirname ( os . path . realpath ( __file__ ) )
src = os . path . join ( wallace_module_path , "config" , config_name )
shutil . copyfile ( src , config_path )
|
def serialize ( element , strip = False ) :
"""A handy way to serialize an element to text ."""
|
text = etree . tostring ( element , method = 'text' , encoding = 'utf-8' )
if strip :
text = text . strip ( )
return str ( text , encoding = 'utf-8' )
|
def canonicalize ( self , include_nodes = True , sorted = False ) :
"""Generates a canonical : class : ` etc . Node ` object from this mock node ."""
|
node_class = Directory if self . dir else Value
kwargs = { attr : getattr ( self , attr ) for attr in node_class . __slots__ }
if self . dir :
if include_nodes :
nodes = [ node . canonicalize ( ) for node in six . viewvalues ( kwargs [ 'nodes' ] ) ]
if sorted :
nodes . sort ( key = lambda n : n . key )
kwargs [ 'nodes' ] = nodes
else :
kwargs [ 'nodes' ] = [ ]
return node_class ( ** kwargs )
|
def luhn_calc ( number , chars = DIGITS ) :
'''Calculate the Luhn check digit for ` ` number ` ` .
: param number : string
: param chars : string
> > > luhn _ calc ( ' 42 ' )'''
|
checksum = luhn_checksum ( str ( number ) + chars [ 0 ] , chars )
return chars [ - checksum ]
|
def register ( self , name , option ) :
"""Register a new option with the namespace .
Args :
name ( str ) : The name to register the option under .
option ( option . Option ) : The option object to register .
Raises :
TypeError : If the option is not an option . Option object .
ValueError : If the name is already registered ."""
|
if name in self . _options :
raise ValueError ( "Option {0} already exists." . format ( name ) )
if not isinstance ( option , opt . Option ) :
raise TypeError ( "Options must be of type Option." )
self . _options [ name ] = option
|
def create_or_edit ( self , id , seq , resource ) : # pylint : disable = invalid - name , redefined - builtin
"""Create or edit a highlight .
: param id : Result ID as an int .
: param seq : TestResult sequence ID as an int .
: param resource : : class : ` highlights . Highlight < highlights . Highlight > ` object
: return : : class : ` highlights . Highlight < highlights . Highlight > ` object
: rtype : highlights . Highlight"""
|
schema = HighlightSchema ( exclude = ( 'id' , 'seq' ) )
json = self . service . encode ( schema , resource )
schema = HighlightSchema ( )
resp = self . service . edit ( self . _base ( id , seq ) , resource . line , json )
return self . service . decode ( schema , resp )
|
def get_read_buffers ( self , size ) :
"""Get buffer ( s ) from which we can read data .
When done reading , use : meth : ` advance _ read _ index ` to make the
memory available for writing again .
: param size : The number of elements desired .
: type size : int
: returns :
* The number of elements available for reading ( which might
be less than the requested * size * ) .
* The first buffer .
* The second buffer .
: rtype : ( int , buffer , buffer )"""
|
ptr1 = self . _ffi . new ( 'void**' )
ptr2 = self . _ffi . new ( 'void**' )
size1 = self . _ffi . new ( 'ring_buffer_size_t*' )
size2 = self . _ffi . new ( 'ring_buffer_size_t*' )
return ( self . _lib . PaUtil_GetRingBufferReadRegions ( self . _ptr , size , ptr1 , size1 , ptr2 , size2 ) , self . _ffi . buffer ( ptr1 [ 0 ] , size1 [ 0 ] * self . elementsize ) , self . _ffi . buffer ( ptr2 [ 0 ] , size2 [ 0 ] * self . elementsize ) )
|
def add_observer ( self , signal , observer ) :
"""Add an observer to the object .
Raise an exception if the signal is not allowed .
Parameters
signal : str
a valid signal .
observer : @ func
a function that will be called when the signal is emitted ."""
|
self . _is_allowed_signal ( signal )
self . _add_observer ( signal , observer )
|
def get_multi_q ( self , sentinel = 'STOP' ) :
'''This helps indexq operate in multiprocessing environment without each process having to have it ' s own IndexQ . It also is a handy way to deal with thread / process safety .
This method will create and return a JoinableQueue object . Additionally , it will kick off a back end process that will monitor the queue , de - queue items and add them to this indexq .
The returned JoinableQueue object can be safely passed to multiple worker processes to populate it with data .
To indicate that you are done writing the data to the queue , pass in the sentinel value ( ' STOP ' by default ) .
Make sure you call join _ indexer ( ) after you are done to close out the queue and join the worker .'''
|
self . in_q = JoinableQueue ( )
self . indexer_process = Process ( target = self . _indexer_process , args = ( self . in_q , sentinel ) )
self . indexer_process . daemon = False
self . indexer_process . start ( )
return self . in_q
|
def local_filename ( self , url = None , filename = None , decompress = False ) :
"""What local filename will we use within the cache directory
for the given URL / filename / decompress options ."""
|
return common . build_local_filename ( url , filename , decompress )
|
def _value_format ( self , value , serie , index ) :
"""Display value and cumulation"""
|
sum_ = serie . points [ index ] [ 1 ]
if serie in self . series and ( self . stack_from_top and self . series . index ( serie ) == self . _order - 1 or not self . stack_from_top and self . series . index ( serie ) == 0 ) :
return super ( StackedLine , self ) . _value_format ( value )
return '%s (+%s)' % ( self . _y_format ( sum_ ) , self . _y_format ( value ) )
|
def remove_notification_listener ( self , notification_id ) :
"""Remove a previously added notification callback .
Args :
notification _ id : The numeric id passed back from add _ notification _ listener
Returns :
The function returns boolean true if found and removed , false otherwise ."""
|
for v in self . notifications . values ( ) :
toRemove = list ( filter ( lambda tup : tup [ 0 ] == notification_id , v ) )
if len ( toRemove ) > 0 :
v . remove ( toRemove [ 0 ] )
return True
return False
|
def get_error_message ( exception , context = None , suggestion = None ) :
"""Convert exception into an ErrorMessage containing a stack trace .
: param exception : Exception object .
: type exception : Exception
: param context : Optional context message .
: type context : str
: param suggestion : Optional suggestion .
: type suggestion : str
. . see also : : https : / / github . com / inasafe / inasafe / issues / 577
: returns : An error message with stack trace info suitable for display .
: rtype : ErrorMessage"""
|
name , trace = humanise_exception ( exception )
problem = m . Message ( name )
if exception is None or exception == '' :
problem . append = m . Text ( tr ( 'No details provided' ) )
else :
if hasattr ( exception , 'message' ) and isinstance ( exception . message , Message ) :
problem . append = m . Text ( str ( exception . message . message ) )
else :
problem . append = m . Text ( str ( exception ) )
suggestion = suggestion
if suggestion is None and hasattr ( exception , 'suggestion' ) :
suggestion = exception . suggestion
error_message = ErrorMessage ( problem , detail = context , suggestion = suggestion , traceback = trace )
args = exception . args
for arg in args :
error_message . details . append ( arg )
return error_message
|
def FILER_STORAGES ( self ) :
"""Filer config to set custom private media path
http : / / django - filer . readthedocs . org / en / 0.9.4 / settings . html # filer - storages"""
|
if not self . FILER_CUSTOM_NGINX_SERVER :
return { }
return { 'public' : { 'main' : { 'ENGINE' : self . default_file_storage , 'OPTIONS' : { } , 'UPLOAD_TO' : 'filer.utils.generate_filename.by_date' , 'UPLOAD_TO_PREFIX' : 'filer_public' , } , 'thumbnails' : { 'ENGINE' : self . default_file_storage , 'OPTIONS' : { } , 'THUMBNAIL_OPTIONS' : { 'base_dir' : 'filer_public_thumbnails' , } , } , } , 'private' : { 'main' : { 'ENGINE' : 'filer.storage.PrivateFileSystemStorage' , 'OPTIONS' : { 'location' : self . filer_private_files_path , 'base_url' : '/smedia/filer_private/' , } , 'UPLOAD_TO' : 'filer.utils.generate_filename.by_date' , 'UPLOAD_TO_PREFIX' : '' , } , 'thumbnails' : { 'ENGINE' : 'filer.storage.PrivateFileSystemStorage' , 'OPTIONS' : { 'location' : self . filer_private_thumbnails_path , 'base_url' : '/smedia/filer_private_thumbnails/' , } , 'THUMBNAIL_OPTIONS' : { } , } , } , }
|
def build_permuted_index ( self , lshash , buckets , num_permutation , beam_size , num_neighbour ) :
"""Build a permutedIndex and store it into the dict self . permutedIndexs .
lshash : the binary lshash object ( nearpy . hashes . lshash ) .
buckets : the buckets object corresponding to lshash . It ' s a dict object
which can get from nearpy . storage . buckets [ lshash . hash _ name ]
num _ permutation : the number of sorted randomly - permuted bucket key lists ( SRPBKL ) .
beam _ size : beam size , details please refer to _ _ init _ _ ( ) in nearpy . hashes . permutation . PermutedIndex
num _ neighbour : the number of neighbour bucket keys needed to return in self . get _ neighbour _ keys ( ) ."""
|
# Init a PermutedIndex
pi = PermutedIndex ( lshash , buckets , num_permutation , beam_size , num_neighbour )
# get hash _ name
hash_name = lshash . hash_name
self . permutedIndexs [ hash_name ] = pi
|
def format ( self , record ) :
"""Format log record ."""
|
format_orig = self . _fmt
self . _fmt = self . get_level_fmt ( record . levelno )
record . prefix = self . prefix
record . plugin_id = self . plugin_id
result = logging . Formatter . format ( self , record )
self . _fmt = format_orig
return result
|
def uptime ( ) :
"""Uptime of the host machine"""
|
from datetime import timedelta
with open ( '/proc/uptime' , 'r' ) as f :
uptime_seconds = float ( f . readline ( ) . split ( ) [ 0 ] )
uptime_string = str ( timedelta ( seconds = uptime_seconds ) )
bob . says ( uptime_string )
|
def __load_settings_from_dict ( self , settings ) :
"""Loads settings info from a settings Dict
: param settings : SAML Toolkit Settings
: type settings : dict
: returns : True if the settings info is valid
: rtype : boolean"""
|
errors = self . check_settings ( settings )
if len ( errors ) == 0 :
self . __errors = [ ]
self . __sp = settings [ 'sp' ]
self . __idp = settings . get ( 'idp' , { } )
self . __strict = settings . get ( 'strict' , False )
self . __debug = settings . get ( 'debug' , False )
self . __security = settings . get ( 'security' , { } )
self . __contacts = settings . get ( 'contactPerson' , { } )
self . __organization = settings . get ( 'organization' , { } )
self . __add_default_values ( )
return True
self . __errors = errors
return False
|
def write_properties_from_env ( cls , path ) :
'''Uses environmental variables to write a * . properties file for KCL ' s MultiLangDaemon'''
|
with open ( path , 'w' ) as f :
f . write ( "# Autogenerated by kclboot v%s on %s\n\n" % ( PACKAGE_VERSION , datetime . now ( ) ) )
for env_var , prop_var in ENV_TO_PROPERTY . items ( ) :
env_value = os . environ . get ( env_var )
if env_value :
f . write ( "%s=%s\n" % ( prop_var , env_value ) )
|
def _operate ( self , other , operation , inplace = True ) :
"""Gives the CanonicalDistribution operation ( product or divide ) with
the other factor .
Parameters
other : CanonicalDistribution
The CanonicalDistribution to be multiplied .
operation : String
' product ' for multiplication operation and
' divide ' for division operation .
Returns
CanonicalDistribution or None :
if inplace = True ( default ) returns None
if inplace = False returns a new CanonicalDistribution instance .
Examples
> > > import numpy as np
> > > from pgmpy . factors . distributions import GaussianDistribution as GD
> > > dis1 = GD ( [ ' x1 ' , ' x2 ' , ' x3 ' ] , np . array ( [ [ 1 ] , [ - 3 ] , [ 4 ] ] ) ,
. . . np . array ( [ [ 4 , 2 , - 2 ] , [ 2 , 5 , - 5 ] , [ - 2 , - 5 , 8 ] ] ) )
> > > dis2 = GD ( [ ' x3 ' , ' x4 ' ] , [ 1 , 2 ] , [ [ 2 , 3 ] , [ 5 , 6 ] ] )
> > > dis3 = dis1 * dis2
> > > dis3 . covariance
array ( [ [ 3.6 , 1 . , - 0.4 , - 0.6 ] ,
[ 1 . , 2.5 , - 1 . , - 1.5 ] ,
[ - 0.4 , - 1 . , 1.6 , 2.4 ] ,
[ - 1 . , - 2.5 , 4 . , 4.5 ] ] )
> > > dis3 . mean
array ( [ [ 1.6 ] ,
[ - 1.5 ] ,
[ 1.6 ] ,
[ 3.5 ] ] )"""
|
phi = self . to_canonical_factor ( ) . _operate ( other . to_canonical_factor ( ) , operation , inplace = False ) . to_joint_gaussian ( )
if not inplace :
return phi
|
def _default_key_normalizer ( key_class , request_context ) :
"""Create a pool key out of a request context dictionary .
According to RFC 3986 , both the scheme and host are case - insensitive .
Therefore , this function normalizes both before constructing the pool
key for an HTTPS request . If you wish to change this behaviour , provide
alternate callables to ` ` key _ fn _ by _ scheme ` ` .
: param key _ class :
The class to use when constructing the key . This should be a namedtuple
with the ` ` scheme ` ` and ` ` host ` ` keys at a minimum .
: type key _ class : namedtuple
: param request _ context :
A dictionary - like object that contain the context for a request .
: type request _ context : dict
: return : A namedtuple that can be used as a connection pool key .
: rtype : PoolKey"""
|
# Since we mutate the dictionary , make a copy first
context = request_context . copy ( )
context [ 'scheme' ] = context [ 'scheme' ] . lower ( )
context [ 'host' ] = context [ 'host' ] . lower ( )
# These are both dictionaries and need to be transformed into frozensets
for key in ( 'headers' , '_proxy_headers' , '_socks_options' ) :
if key in context and context [ key ] is not None :
context [ key ] = frozenset ( context [ key ] . items ( ) )
# The socket _ options key may be a list and needs to be transformed into a
# tuple .
socket_opts = context . get ( 'socket_options' )
if socket_opts is not None :
context [ 'socket_options' ] = tuple ( socket_opts )
# Map the kwargs to the names in the namedtuple - this is necessary since
# namedtuples can ' t have fields starting with ' _ ' .
for key in list ( context . keys ( ) ) :
context [ 'key_' + key ] = context . pop ( key )
# Default to ` ` None ` ` for keys missing from the context
for field in key_class . _fields :
if field not in context :
context [ field ] = None
return key_class ( ** context )
|
def _convert ( x , factor1 , factor2 ) :
"""Converts mixing ratio x in comp1 - comp2 tie line to that in
c1 - c2 tie line .
Args :
x ( float ) : Mixing ratio x in comp1 - comp2 tie line , a float
between 0 and 1.
factor1 ( float ) : Compositional ratio between composition c1 and
processed composition comp1 . E . g . , factor for
Composition ( ' SiO2 ' ) and Composition ( ' O ' ) is 2.0.
factor2 ( float ) : Compositional ratio between composition c2 and
processed composition comp2.
Returns :
Mixing ratio in c1 - c2 tie line , a float between 0 and 1."""
|
return x * factor2 / ( ( 1 - x ) * factor1 + x * factor2 )
|
def read_mash_output ( result_file ) :
""": param result _ file : Tab - delimited result file generated by mash dist .
: return : mash _ results : A list with each entry in the result file as an entry , with attributes reference , query ,
distance , pvalue , and matching _ hash"""
|
with open ( result_file ) as handle :
lines = handle . readlines ( )
mash_results = list ( )
for line in lines :
result = MashResult ( line )
mash_results . append ( result )
return mash_results
|
def install ( self , paths , maker , ** kwargs ) :
"""Install a wheel to the specified paths . If kwarg ` ` warner ` ` is
specified , it should be a callable , which will be called with two
tuples indicating the wheel version of this software and the wheel
version in the file , if there is a discrepancy in the versions .
This can be used to issue any warnings to raise any exceptions .
If kwarg ` ` lib _ only ` ` is True , only the purelib / platlib files are
installed , and the headers , scripts , data and dist - info metadata are
not written . If kwarg ` ` bytecode _ hashed _ invalidation ` ` is True , written
bytecode will try to use file - hash based invalidation ( PEP - 552 ) on
supported interpreter versions ( CPython 2.7 + ) .
The return value is a : class : ` InstalledDistribution ` instance unless
` ` options . lib _ only ` ` is True , in which case the return value is ` ` None ` ` ."""
|
dry_run = maker . dry_run
warner = kwargs . get ( 'warner' )
lib_only = kwargs . get ( 'lib_only' , False )
bc_hashed_invalidation = kwargs . get ( 'bytecode_hashed_invalidation' , False )
pathname = os . path . join ( self . dirname , self . filename )
name_ver = '%s-%s' % ( self . name , self . version )
data_dir = '%s.data' % name_ver
info_dir = '%s.dist-info' % name_ver
metadata_name = posixpath . join ( info_dir , METADATA_FILENAME )
wheel_metadata_name = posixpath . join ( info_dir , 'WHEEL' )
record_name = posixpath . join ( info_dir , 'RECORD' )
wrapper = codecs . getreader ( 'utf-8' )
with ZipFile ( pathname , 'r' ) as zf :
with zf . open ( wheel_metadata_name ) as bwf :
wf = wrapper ( bwf )
message = message_from_file ( wf )
wv = message [ 'Wheel-Version' ] . split ( '.' , 1 )
file_version = tuple ( [ int ( i ) for i in wv ] )
if ( file_version != self . wheel_version ) and warner :
warner ( self . wheel_version , file_version )
if message [ 'Root-Is-Purelib' ] == 'true' :
libdir = paths [ 'purelib' ]
else :
libdir = paths [ 'platlib' ]
records = { }
with zf . open ( record_name ) as bf :
with CSVReader ( stream = bf ) as reader :
for row in reader :
p = row [ 0 ]
records [ p ] = row
data_pfx = posixpath . join ( data_dir , '' )
info_pfx = posixpath . join ( info_dir , '' )
script_pfx = posixpath . join ( data_dir , 'scripts' , '' )
# make a new instance rather than a copy of maker ' s ,
# as we mutate it
fileop = FileOperator ( dry_run = dry_run )
fileop . record = True
# so we can rollback if needed
bc = not sys . dont_write_bytecode
# Double negatives . Lovely !
outfiles = [ ]
# for RECORD writing
# for script copying / shebang processing
workdir = tempfile . mkdtemp ( )
# set target dir later
# we default add _ launchers to False , as the
# Python Launcher should be used instead
maker . source_dir = workdir
maker . target_dir = None
try :
for zinfo in zf . infolist ( ) :
arcname = zinfo . filename
if isinstance ( arcname , text_type ) :
u_arcname = arcname
else :
u_arcname = arcname . decode ( 'utf-8' )
# The signature file won ' t be in RECORD ,
# and we don ' t currently don ' t do anything with it
if u_arcname . endswith ( '/RECORD.jws' ) :
continue
row = records [ u_arcname ]
if row [ 2 ] and str ( zinfo . file_size ) != row [ 2 ] :
raise DistlibException ( 'size mismatch for ' '%s' % u_arcname )
if row [ 1 ] :
kind , value = row [ 1 ] . split ( '=' , 1 )
with zf . open ( arcname ) as bf :
data = bf . read ( )
_ , digest = self . get_hash ( data , kind )
if digest != value :
raise DistlibException ( 'digest mismatch for ' '%s' % arcname )
if lib_only and u_arcname . startswith ( ( info_pfx , data_pfx ) ) :
logger . debug ( 'lib_only: skipping %s' , u_arcname )
continue
is_script = ( u_arcname . startswith ( script_pfx ) and not u_arcname . endswith ( '.exe' ) )
if u_arcname . startswith ( data_pfx ) :
_ , where , rp = u_arcname . split ( '/' , 2 )
outfile = os . path . join ( paths [ where ] , convert_path ( rp ) )
else : # meant for site - packages .
if u_arcname in ( wheel_metadata_name , record_name ) :
continue
outfile = os . path . join ( libdir , convert_path ( u_arcname ) )
if not is_script :
with zf . open ( arcname ) as bf :
fileop . copy_stream ( bf , outfile )
outfiles . append ( outfile )
# Double check the digest of the written file
if not dry_run and row [ 1 ] :
with open ( outfile , 'rb' ) as bf :
data = bf . read ( )
_ , newdigest = self . get_hash ( data , kind )
if newdigest != digest :
raise DistlibException ( 'digest mismatch ' 'on write for ' '%s' % outfile )
if bc and outfile . endswith ( '.py' ) :
try :
pyc = fileop . byte_compile ( outfile , hashed_invalidation = bc_hashed_invalidation )
outfiles . append ( pyc )
except Exception : # Don ' t give up if byte - compilation fails ,
# but log it and perhaps warn the user
logger . warning ( 'Byte-compilation failed' , exc_info = True )
else :
fn = os . path . basename ( convert_path ( arcname ) )
workname = os . path . join ( workdir , fn )
with zf . open ( arcname ) as bf :
fileop . copy_stream ( bf , workname )
dn , fn = os . path . split ( outfile )
maker . target_dir = dn
filenames = maker . make ( fn )
fileop . set_executable_mode ( filenames )
outfiles . extend ( filenames )
if lib_only :
logger . debug ( 'lib_only: returning None' )
dist = None
else : # Generate scripts
# Try to get pydist . json so we can see if there are
# any commands to generate . If this fails ( e . g . because
# of a legacy wheel ) , log a warning but don ' t give up .
commands = None
file_version = self . info [ 'Wheel-Version' ]
if file_version == '1.0' : # Use legacy info
ep = posixpath . join ( info_dir , 'entry_points.txt' )
try :
with zf . open ( ep ) as bwf :
epdata = read_exports ( bwf )
commands = { }
for key in ( 'console' , 'gui' ) :
k = '%s_scripts' % key
if k in epdata :
commands [ 'wrap_%s' % key ] = d = { }
for v in epdata [ k ] . values ( ) :
s = '%s:%s' % ( v . prefix , v . suffix )
if v . flags :
s += ' %s' % v . flags
d [ v . name ] = s
except Exception :
logger . warning ( 'Unable to read legacy script ' 'metadata, so cannot generate ' 'scripts' )
else :
try :
with zf . open ( metadata_name ) as bwf :
wf = wrapper ( bwf )
commands = json . load ( wf ) . get ( 'extensions' )
if commands :
commands = commands . get ( 'python.commands' )
except Exception :
logger . warning ( 'Unable to read JSON metadata, so ' 'cannot generate scripts' )
if commands :
console_scripts = commands . get ( 'wrap_console' , { } )
gui_scripts = commands . get ( 'wrap_gui' , { } )
if console_scripts or gui_scripts :
script_dir = paths . get ( 'scripts' , '' )
if not os . path . isdir ( script_dir ) :
raise ValueError ( 'Valid script path not ' 'specified' )
maker . target_dir = script_dir
for k , v in console_scripts . items ( ) :
script = '%s = %s' % ( k , v )
filenames = maker . make ( script )
fileop . set_executable_mode ( filenames )
if gui_scripts :
options = { 'gui' : True }
for k , v in gui_scripts . items ( ) :
script = '%s = %s' % ( k , v )
filenames = maker . make ( script , options )
fileop . set_executable_mode ( filenames )
p = os . path . join ( libdir , info_dir )
dist = InstalledDistribution ( p )
# Write SHARED
paths = dict ( paths )
# don ' t change passed in dict
del paths [ 'purelib' ]
del paths [ 'platlib' ]
paths [ 'lib' ] = libdir
p = dist . write_shared_locations ( paths , dry_run )
if p :
outfiles . append ( p )
# Write RECORD
dist . write_installed_files ( outfiles , paths [ 'prefix' ] , dry_run )
return dist
except Exception : # pragma : no cover
logger . exception ( 'installation failed.' )
fileop . rollback ( )
raise
finally :
shutil . rmtree ( workdir )
|
def context_serve ( context , configfile , listen_addr , listen_port , logfile , debug , daemon , uid , gid , pidfile , umask , rundir ) :
"""Takes a context object , which implements the _ _ enter _ _ / _ _ exit _ _ " with " interface
and starts a server within that context .
This method is a refactored single - place for handling the server - run code whether
running in daemon or non - daemon mode . It is invoked with a dummy ( passthrough )
context object for the non - daemon use case .
@ param options : The compiled collection of options that need to be parsed .
@ type options : C { ConfigParser }
@ param context : The context object that implements _ _ enter _ _ / _ _ exit _ _ " with " methods .
@ type context : C { object }
@ raise Exception : Any underlying exception will be logged but then re - raised .
@ see : server _ from _ config ( )"""
|
global global_config
server = None
try :
with context : # There ' s a possibility here that init _ logging ( ) will throw an exception . If it does ,
# AND we ' re in a daemon context , then we ' re not going to be able to do anything with it .
# We ' ve got no stderr / stdout here ; and so ( to my knowledge ) no reliable ( & cross - platform ) ,
# way to display errors .
level = logging . DEBUG if debug else logging . INFO
init_logging ( logfile = logfile , loglevel = level , configfile = configfile )
server = server_from_config ( )
logger . info ( "Stomp server listening on %s:%s" % server . server_address )
if debug :
poll_interval = float ( global_config . get ( 'coilmq' , 'debug.stats_poll_interval' ) )
if poll_interval : # Setting poll _ interval to 0 effectively disables it .
def diagnostic_loop ( server ) :
log = logger
while True :
log . debug ( "Stats heartbeat -------------------------------" )
store = server . queue_manager . store
for dest in store . destinations ( ) :
log . debug ( "Queue %s: size=%s, subscribers=%s" % ( dest , store . size ( dest ) , server . queue_manager . subscriber_count ( dest ) ) )
# TODO : Add number of subscribers ?
time . sleep ( poll_interval )
diagnostic_thread = threading . Thread ( target = diagnostic_loop , name = 'DiagnosticThread' , args = ( server , ) )
diagnostic_thread . daemon = True
diagnostic_thread . start ( )
server . serve_forever ( )
except ( KeyboardInterrupt , SystemExit ) :
logger . info ( "Stomp server stopped by user interrupt." )
raise SystemExit ( )
except Exception as e :
logger . error ( "Stomp server stopped due to error: %s" % e )
logger . exception ( e )
raise SystemExit ( )
finally :
if server :
server . server_close ( )
|
def reset ( self ) :
'''Reset Stan model and all tracked distributions and parameters .'''
|
self . parameters = [ ]
self . transformed_parameters = [ ]
self . expressions = [ ]
self . data = [ ]
self . transformed_data = [ ]
self . X = { }
self . model = [ ]
self . mu_cont = [ ]
self . mu_cat = [ ]
self . _original_names = { }
# variables to suppress in output . Stan uses limited set for variable
# names , so track variable names we may need to simplify for the model
# code and then sub back later .
self . _suppress_vars = [ 'yhat' , 'lp__' ]
|
def get_graph_by_ids ( self , network_ids : List [ int ] ) -> BELGraph :
"""Get a combine BEL Graph from a list of network identifiers ."""
|
if len ( network_ids ) == 1 :
return self . get_graph_by_id ( network_ids [ 0 ] )
log . debug ( 'getting graph by identifiers: %s' , network_ids )
graphs = self . get_graphs_by_ids ( network_ids )
log . debug ( 'getting union of graphs: %s' , network_ids )
rv = union ( graphs )
return rv
|
def _ScopesFromMetadataServer ( self , scopes ) :
"""Returns instance scopes based on GCE metadata server ."""
|
if not util . DetectGce ( ) :
raise exceptions . ResourceUnavailableError ( 'GCE credentials requested outside a GCE instance' )
if not self . GetServiceAccount ( self . __service_account_name ) :
raise exceptions . ResourceUnavailableError ( 'GCE credentials requested but service account ' '%s does not exist.' % self . __service_account_name )
if scopes :
scope_ls = util . NormalizeScopes ( scopes )
instance_scopes = self . GetInstanceScopes ( )
if scope_ls > instance_scopes :
raise exceptions . CredentialsError ( 'Instance did not have access to scopes %s' % ( sorted ( list ( scope_ls - instance_scopes ) ) , ) )
else :
scopes = self . GetInstanceScopes ( )
return scopes
|
def service ( ctx , opts ) :
"""Check the status of the Cloudsmith service ."""
|
click . echo ( "Retrieving service status ... " , nl = False )
context_msg = "Failed to retrieve status!"
with handle_api_exceptions ( ctx , opts = opts , context_msg = context_msg ) :
with maybe_spinner ( opts ) :
status , version = get_status ( with_version = True )
click . secho ( "OK" , fg = "green" )
config = cloudsmith_api . Configuration ( )
click . echo ( )
click . echo ( "The service endpoint is: %(endpoint)s" % { "endpoint" : click . style ( config . host , bold = True ) } )
click . echo ( "The service status is: %(status)s" % { "status" : click . style ( status , bold = True ) } )
click . echo ( "The service version is: %(version)s " % { "version" : click . style ( version , bold = True ) } , nl = False , )
api_version = get_api_version_info ( )
if semver . compare ( version , api_version ) > 0 :
click . secho ( "(maybe out-of-date)" , fg = "yellow" )
click . echo ( )
click . secho ( "The API library used by this CLI tool is built against " "service version: %(version)s" % { "version" : click . style ( api_version , bold = True ) } , fg = "yellow" , )
else :
click . secho ( "(up-to-date)" , fg = "green" )
click . echo ( )
click . secho ( "The API library used by this CLI tool seems to be up-to-date." , fg = "green" )
|
def stop_pipeline ( url , pipeline_id , auth , verify_ssl ) :
"""Stop a running pipeline . The API waits for the pipeline to be ' STOPPED ' before returning .
Args :
url ( str ) : the host url in the form ' http : / / host : port / ' .
pipeline _ id ( str ) : the ID of of the exported pipeline .
auth ( tuple ) : a tuple of username , and password .
verify _ ssl ( bool ) : whether to verify ssl certificates
Returns :
dict : the response json"""
|
stop_result = requests . post ( url + '/' + pipeline_id + '/stop' , headers = X_REQ_BY , auth = auth , verify = verify_ssl )
stop_result . raise_for_status ( )
logging . info ( "Pipeline stop requested." )
poll_pipeline_status ( STATUS_STOPPED , url , pipeline_id , auth , verify_ssl )
logging . info ( 'Pipeline stopped.' )
return stop_result . json ( )
|
def _sum_by_samples ( seqs_freq , samples_order ) :
"""Sum sequences of a metacluster by samples ."""
|
n = len ( seqs_freq [ seqs_freq . keys ( ) [ 0 ] ] . freq . keys ( ) )
y = np . array ( [ 0 ] * n )
for s in seqs_freq :
x = seqs_freq [ s ] . freq
exp = [ seqs_freq [ s ] . freq [ sam ] for sam in samples_order ]
y = list ( np . array ( exp ) + y )
return y
|
def _set_cid_card ( self , v , load = False ) :
"""Setter method for cid _ card , mapped from YANG variable / system _ monitor / cid _ card ( container )
If this variable is read - only ( config : false ) in the
source YANG file , then _ set _ cid _ card is considered as a private
method . Backends looking to populate this variable should
do so via calling thisObj . _ set _ cid _ card ( ) directly ."""
|
if hasattr ( v , "_utype" ) :
v = v . _utype ( v )
try :
t = YANGDynClass ( v , base = cid_card . cid_card , is_container = 'container' , presence = False , yang_name = "cid-card" , rest_name = "cid-card" , parent = self , path_helper = self . _path_helper , extmethods = self . _extmethods , register_paths = True , extensions = { u'tailf-common' : { u'info' : u'Configure threshold and alert setting for\ncomponent:CID-CARD' , u'cli-incomplete-no' : None } } , namespace = 'urn:brocade.com:mgmt:brocade-system-monitor' , defining_module = 'brocade-system-monitor' , yang_type = 'container' , is_config = True )
except ( TypeError , ValueError ) :
raise ValueError ( { 'error-string' : """cid_card must be of a type compatible with container""" , 'defined-type' : "container" , 'generated-type' : """YANGDynClass(base=cid_card.cid_card, is_container='container', presence=False, yang_name="cid-card", rest_name="cid-card", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, extensions={u'tailf-common': {u'info': u'Configure threshold and alert setting for\ncomponent:CID-CARD', u'cli-incomplete-no': None}}, namespace='urn:brocade.com:mgmt:brocade-system-monitor', defining_module='brocade-system-monitor', yang_type='container', is_config=True)""" , } )
self . __cid_card = t
if hasattr ( self , '_set' ) :
self . _set ( )
|
def demo ( nums = [ ] ) :
"Print a few usage examples on stdout ."
|
nums = nums or [ 3 , 1 , 4 , 1 , 5 , 9 , 2 , 6 ]
fmt = lambda num : '{0:g}' . format ( num ) if isinstance ( num , ( float , int ) ) else 'None'
nums1 = list ( map ( fmt , nums ) )
if __name__ == '__main__' :
prog = sys . argv [ 0 ]
else :
prog = 'sparklines'
result = [ ]
result . append ( 'Usage examples (command-line and programmatic use):' )
result . append ( '' )
result . append ( '- Standard one-line sparkline' )
result . append ( '{0!s} {1!s}' . format ( prog , ' ' . join ( nums1 ) ) )
result . append ( '>>> print(sparklines([{0!s}])[0])' . format ( ', ' . join ( nums1 ) ) )
result . append ( sparklines ( nums ) [ 0 ] )
result . append ( '' )
result . append ( '- Multi-line sparkline (n=2)' )
result . append ( '{0!s} -n 2 {1!s}' . format ( prog , ' ' . join ( nums1 ) ) )
result . append ( '>>> for line in sparklines([{0!s}], num_lines=2): print(line)' . format ( ', ' . join ( nums1 ) ) )
for line in sparklines ( nums , num_lines = 2 ) :
result . append ( line )
result . append ( '' )
result . append ( '- Multi-line sparkline (n=3)' )
result . append ( '{0!s} -n 3 {1!s}' . format ( prog , ' ' . join ( nums1 ) ) )
result . append ( '>>> for line in sparklines([{0!s}], num_lines=3): print(line)' . format ( ', ' . join ( nums1 ) ) )
for line in sparklines ( nums , num_lines = 3 ) :
result . append ( line )
result . append ( '' )
nums = nums + [ None ] + list ( reversed ( nums [ : ] ) )
result . append ( '- Standard one-line sparkline with gap' )
result . append ( '{0!s} {1!s}' . format ( prog , ' ' . join ( map ( str , nums ) ) ) )
result . append ( '>>> print(sparklines([{0!s}])[0])' . format ( ', ' . join ( map ( str , nums ) ) ) )
result . append ( sparklines ( nums ) [ 0 ] )
return '\n' . join ( result ) + '\n'
|
def prob_classify ( self , text ) :
"""Return the label probability distribution for classifying a string
of text .
Example :
> > > classifier = MaxEntClassifier ( train _ data )
> > > prob _ dist = classifier . prob _ classify ( " I feel happy this morning . " )
> > > prob _ dist . max ( )
' positive '
> > > prob _ dist . prob ( " positive " )
0.7
: rtype : nltk . probability . DictionaryProbDist"""
|
feats = self . extract_features ( text )
return self . classifier . prob_classify ( feats )
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.