signature stringlengths 29 44.1k | implementation stringlengths 0 85.2k |
|---|---|
async def catch_up ( self ) :
"""" Catches up " on the missed updates while the client was offline .
You should call this method after registering the event handlers
so that the updates it loads can by processed by your script .
This can also be used to forcibly fetch new updates if there are any .""" | pts , date = self . _state_cache [ None ]
self . session . catching_up = True
try :
while True :
d = await self ( functions . updates . GetDifferenceRequest ( pts , date , 0 ) )
if isinstance ( d , ( types . updates . DifferenceSlice , types . updates . Difference ) ) :
if isinstance ( d , types . updates . Difference ) :
state = d . state
else :
state = d . intermediate_state
pts , date = state . pts , state . date
self . _handle_update ( types . Updates ( users = d . users , chats = d . chats , date = state . date , seq = state . seq , updates = d . other_updates + [ types . UpdateNewMessage ( m , 0 , 0 ) for m in d . new_messages ] ) )
# TODO Implement upper limit ( max _ pts )
# We don ' t want to fetch updates we already know about .
# We may still get duplicates because the Difference
# contains a lot of updates and presumably only has
# the state for the last one , but at least we don ' t
# unnecessarily fetch too many .
# updates . getDifference ' s pts _ total _ limit seems to mean
# " how many pts is the request allowed to return " , and
# if there is more than that , it returns " too long " ( so
# there would be duplicate updates since we know about
# some ) . This can be used to detect collisions ( i . e .
# it would return an update we have already seen ) .
else :
if isinstance ( d , types . updates . DifferenceEmpty ) :
date = d . date
elif isinstance ( d , types . updates . DifferenceTooLong ) :
pts = d . pts
break
except ( ConnectionError , asyncio . CancelledError ) :
pass
finally : # TODO Save new pts to session
self . _state_cache . _pts_date = ( pts , date )
self . session . catching_up = False |
def rgb_to_cmy ( r , g = None , b = None ) :
"""Convert the color from RGB coordinates to CMY .
Parameters :
The Red component value [ 0 . . . 1]
The Green component value [ 0 . . . 1]
The Blue component value [ 0 . . . 1]
Returns :
The color as an ( c , m , y ) tuple in the range :
c [ 0 . . . 1 ] ,
m [ 0 . . . 1 ] ,
y [ 0 . . . 1]
> > > rgb _ to _ cmy ( 1 , 0.5 , 0)
(0 , 0.5 , 1)""" | if type ( r ) in [ list , tuple ] :
r , g , b = r
return ( 1 - r , 1 - g , 1 - b ) |
def generate_sample ( self , initial_pos , num_samples , trajectory_length , stepsize = None ) :
"""Method returns a generator type object whose each iteration yields a sample
using Hamiltonian Monte Carlo
Parameters
initial _ pos : A 1d array like object
Vector representing values of parameter position , the starting
state in markov chain .
num _ samples : int
Number of samples to be generated
trajectory _ length : int or float
Target trajectory length , stepsize * number of steps ( L ) ,
where L is the number of steps taken per HMC iteration ,
and stepsize is step size for splitting time method .
stepsize : float , defaults to None
The stepsize for proposing new values of position and momentum in simulate _ dynamics
If None , then will be choosen suitably
Returns
genrator : yielding a 1d numpy . array type object for a sample
Examples
> > > from pgmpy . sampling import HamiltonianMC as HMC , GradLogPDFGaussian as GLPG
> > > from pgmpy . factors import GaussianDistribution as JGD
> > > import numpy as np
> > > mean = np . array ( [ 4 , - 1 ] )
> > > covariance = np . array ( [ [ 3 , 0.4 ] , [ 0.4 , 3 ] ] )
> > > model = JGD ( [ ' x ' , ' y ' ] , mean , covariance )
> > > sampler = HMC ( model = model , grad _ log _ pdf = GLPG )
> > > gen _ samples = sampler . generate _ sample ( np . array ( [ - 1 , 1 ] ) , num _ samples = 10000,
. . . trajectory _ length = 2 , stepsize = 0.25)
> > > samples _ array = np . array ( [ sample for sample in gen _ samples ] )
> > > samples _ array
array ( [ [ 0.1467264 , 0.27143857 ] ,
[ 4.0371448 , 0.15871274 ] ,
[ 3.24656208 , - 1.03742621 ] ,
[ 6.45975905 , 1.97941306 ] ,
[ 4.89007171 , 0.15413156 ] ,
[ 5.9528083 , 1.92983158 ] ] )
> > > np . cov ( samples _ array . T )
array ( [ [ 2.95692642 , 0.4379419 ] ,
[ 0.4379419 , 3.00939434 ] ] )
> > > sampler . acceptance _ rate
0.9969""" | self . accepted_proposals = 0
initial_pos = _check_1d_array_object ( initial_pos , 'initial_pos' )
_check_length_equal ( initial_pos , self . model . variables , 'initial_pos' , 'model.variables' )
if stepsize is None :
stepsize = self . _find_reasonable_stepsize ( initial_pos )
lsteps = int ( max ( 1 , round ( trajectory_length / stepsize , 0 ) ) )
position_m = initial_pos . copy ( )
for i in range ( 0 , num_samples ) :
position_m , _ = self . _sample ( position_m , trajectory_length , stepsize , lsteps )
yield position_m
self . acceptance_rate = self . accepted_proposals / num_samples |
def compose_matrix ( scale = None , shear = None , angles = None , translate = None , perspective = None ) :
"""Return transformation matrix from sequence of transformations .
This is the inverse of the decompose _ matrix function .
Sequence of transformations :
scale : vector of 3 scaling factors
shear : list of shear factors for x - y , x - z , y - z axes
angles : list of Euler angles about static x , y , z axes
translate : translation vector along x , y , z axes
perspective : perspective partition of matrix
> > > scale = np . random . random ( 3 ) - 0.5
> > > shear = np . random . random ( 3 ) - 0.5
> > > angles = ( np . random . random ( 3 ) - 0.5 ) * ( 2 * math . pi )
> > > trans = np . random . random ( 3 ) - 0.5
> > > persp = np . random . random ( 4 ) - 0.5
> > > M0 = compose _ matrix ( scale , shear , angles , trans , persp )
> > > result = decompose _ matrix ( M0)
> > > M1 = compose _ matrix ( * result )
> > > is _ same _ transform ( M0 , M1)
True""" | M = np . identity ( 4 )
if perspective is not None :
P = np . identity ( 4 )
P [ 3 , : ] = perspective [ : 4 ]
M = np . dot ( M , P )
if translate is not None :
T = np . identity ( 4 )
T [ : 3 , 3 ] = translate [ : 3 ]
M = np . dot ( M , T )
if angles is not None :
R = euler_matrix ( angles [ 0 ] , angles [ 1 ] , angles [ 2 ] , 'sxyz' )
M = np . dot ( M , R )
if shear is not None :
Z = np . identity ( 4 )
Z [ 1 , 2 ] = shear [ 2 ]
Z [ 0 , 2 ] = shear [ 1 ]
Z [ 0 , 1 ] = shear [ 0 ]
M = np . dot ( M , Z )
if scale is not None :
S = np . identity ( 4 )
S [ 0 , 0 ] = scale [ 0 ]
S [ 1 , 1 ] = scale [ 1 ]
S [ 2 , 2 ] = scale [ 2 ]
M = np . dot ( M , S )
M /= M [ 3 , 3 ]
return M |
def create ( self , request , * args , ** kwargs ) :
"""To create new push hook issue * * POST * * against * / api / hooks - push / * as an authenticated user .
You should specify list of event _ types or event _ groups .
Example of a request :
. . code - block : : http
POST / api / hooks - push / HTTP / 1.1
Content - Type : application / json
Accept : application / json
Authorization : Token c84d653b9ec92c6cbac41c706593e66f567a7fa4
Host : example . com
" event _ types " : [ " resource _ start _ succeeded " ] ,
" event _ groups " : [ " users " ] ,
" type " : " Android "
You may temporarily disable hook without deleting it by issuing following * * PATCH * * request against hook URL :
. . code - block : : javascript
" is _ active " : " false " """ | return super ( PushHookViewSet , self ) . create ( request , * args , ** kwargs ) |
def _estimate_bkg_rms ( self , xmin , xmax , ymin , ymax ) :
"""Estimate the background noise mean and RMS .
The mean is estimated as the median of data .
The RMS is estimated as the IQR of data / 1.34896.
Parameters
xmin , xmax , ymin , ymax : int
The bounding region over which the bkg / rms will be calculated .
Returns
ymin , ymax , xmin , xmax : int
A copy of the input parameters
bkg , rms : float
The calculated background and noise .""" | data = self . global_data . data_pix [ ymin : ymax , xmin : xmax ]
pixels = np . extract ( np . isfinite ( data ) , data ) . ravel ( )
if len ( pixels ) < 4 :
bkg , rms = np . NaN , np . NaN
else :
pixels . sort ( )
p25 = pixels [ int ( pixels . size / 4 ) ]
p50 = pixels [ int ( pixels . size / 2 ) ]
p75 = pixels [ int ( pixels . size / 4 * 3 ) ]
iqr = p75 - p25
bkg , rms = p50 , iqr / 1.34896
# return the input and output data so we know what we are doing
# when compiling the results of multiple processes
return ymin , ymax , xmin , xmax , bkg , rms |
def attachment ( self , attachment ) :
"""Add attachment ( s ) to this email
: param attachment : Add attachment ( s ) to this email
: type attachment : Attachment , list ( Attachment )""" | if isinstance ( attachment , list ) :
for a in attachment :
self . add_attachment ( a )
else :
self . add_attachment ( attachment ) |
def _parse_errorbars ( self , label , err ) :
"""Look for error keyword arguments and return the actual errorbar data
or return the error DataFrame / dict
Error bars can be specified in several ways :
Series : the user provides a pandas . Series object of the same
length as the data
ndarray : provides a np . ndarray of the same length as the data
DataFrame / dict : error values are paired with keys matching the
key in the plotted DataFrame
str : the name of the column within the plotted DataFrame""" | if err is None :
return None
def match_labels ( data , e ) :
e = e . reindex ( data . index )
return e
# key - matched DataFrame
if isinstance ( err , ABCDataFrame ) :
err = match_labels ( self . data , err )
# key - matched dict
elif isinstance ( err , dict ) :
pass
# Series of error values
elif isinstance ( err , ABCSeries ) : # broadcast error series across data
err = match_labels ( self . data , err )
err = np . atleast_2d ( err )
err = np . tile ( err , ( self . nseries , 1 ) )
# errors are a column in the dataframe
elif isinstance ( err , str ) :
evalues = self . data [ err ] . values
self . data = self . data [ self . data . columns . drop ( err ) ]
err = np . atleast_2d ( evalues )
err = np . tile ( err , ( self . nseries , 1 ) )
elif is_list_like ( err ) :
if is_iterator ( err ) :
err = np . atleast_2d ( list ( err ) )
else : # raw error values
err = np . atleast_2d ( err )
err_shape = err . shape
# asymmetrical error bars
if err . ndim == 3 :
if ( err_shape [ 0 ] != self . nseries ) or ( err_shape [ 1 ] != 2 ) or ( err_shape [ 2 ] != len ( self . data ) ) :
msg = "Asymmetrical error bars should be provided " + "with the shape (%u, 2, %u)" % ( self . nseries , len ( self . data ) )
raise ValueError ( msg )
# broadcast errors to each data series
if len ( err ) == 1 :
err = np . tile ( err , ( self . nseries , 1 ) )
elif is_number ( err ) :
err = np . tile ( [ err ] , ( self . nseries , len ( self . data ) ) )
else :
msg = "No valid {label} detected" . format ( label = label )
raise ValueError ( msg )
return err |
def show_edit_form ( obj , attrs = None , title = "" , toolTips = None ) :
"""Shows parameters editor modal form .
Arguments :
obj : object to extract attribute values from , or a dict - like
attrs : list of attribute names
title :
toolTips :""" | if attrs is None :
if hasattr ( obj , "keys" ) :
attrs = list ( obj . keys ( ) )
else :
raise RuntimeError ( "attrs is None and cannot determine it from obj" )
specs = [ ]
for i , name in enumerate ( attrs ) : # Tries as attribute , then as key
try :
value = obj . __getattribute__ ( name )
except AttributeError :
value = obj [ name ]
if value is None :
value = ""
# None becomes str
dict_ = { "value" : value }
if toolTips is not None :
dict_ [ "toolTip" ] = toolTips [ i ]
dict_ [ "tooltip" ] = toolTips [ i ]
specs . append ( ( name , dict_ ) )
form = XParametersEditor ( specs = specs , title = title )
r = form . exec_ ( )
return r , form |
def all_subs ( bounds ) :
"""given a list of tuples specifying the bounds of an array , all _ subs ( )
returns a list of all the tuples of subscripts for that array .""" | idx_list = [ ]
for i in range ( len ( bounds ) ) :
this_dim = bounds [ i ]
lo , hi = this_dim [ 0 ] , this_dim [ 1 ]
# bounds for this dimension
this_dim_idxs = range ( lo , hi + 1 )
# indexes for this dimension
idx_list . append ( this_dim_idxs )
return idx2subs ( idx_list ) |
def tag ( self , sentence , tokenize = True ) :
"""Tag a string ` sentence ` .
: param str or list sentence : A string or a list of sentence strings .
: param tokenize : ( optional ) If ` ` False ` ` string has to be tokenized before
( space separated string ) .""" | # : Do not process empty strings ( Issue # 3)
if sentence . strip ( ) == "" :
return [ ]
# : Do not process strings consisting of a single punctuation mark ( Issue # 4)
elif sentence . strip ( ) in PUNCTUATION :
if self . include_punc :
_sym = sentence . strip ( )
if _sym in tuple ( '.?!' ) :
_tag = "."
else :
_tag = _sym
return [ ( _sym , _tag ) ]
else :
return [ ]
if tokenize :
_tokenized = " " . join ( self . tokenizer . tokenize ( sentence ) )
sentence = _tokenized
# Sentence is tokenized before it is passed on to pattern . de . tag
# ( i . e . it is either submitted tokenized or if )
_tagged = pattern_tag ( sentence , tokenize = False , encoding = self . encoding , tagset = self . tagset )
if self . include_punc :
return _tagged
else :
_tagged = [ ( word , t ) for word , t in _tagged if not PUNCTUATION_REGEX . match ( unicode ( t ) ) ]
return _tagged |
def set_categories ( self ) :
"""Parses and set feed categories""" | self . categories = [ ]
temp_categories = self . soup . findAll ( 'category' )
for category in temp_categories :
category_text = category . string
self . categories . append ( category_text ) |
def _prepare_colors ( color , values , limits_c , colormap , alpha , chan = None ) :
"""Return colors for all the channels based on various inputs .
Parameters
color : tuple
3 - , 4 - element tuple , representing RGB and alpha , between 0 and 1
values : ndarray
array with values for each channel
limits _ c : tuple of 2 floats , optional
min and max values to normalize the color
colormap : str
one of the colormaps in vispy
alpha : float
transparency ( 0 = transparent , 1 = opaque )
chan : instance of Channels
use labels to create channel groups
Returns
1d / 2d array
colors for all the channels or for each channel individually
tuple of two float or None
limits for the values""" | if values is not None :
if limits_c is None :
limits_c = array ( [ - 1 , 1 ] ) * nanmax ( abs ( values ) )
norm_values = normalize ( values , * limits_c )
cm = get_colormap ( colormap )
colors = cm [ norm_values ]
elif color is not None :
colors = ColorArray ( color )
else :
cm = get_colormap ( 'hsl' )
group_idx = _chan_groups_to_index ( chan )
colors = cm [ group_idx ]
if alpha is not None :
colors . alpha = alpha
return colors , limits_c |
def get_port_channel_detail_output_lacp_aggr_member_interface_type ( self , ** kwargs ) :
"""Auto Generated Code""" | config = ET . Element ( "config" )
get_port_channel_detail = ET . Element ( "get_port_channel_detail" )
config = get_port_channel_detail
output = ET . SubElement ( get_port_channel_detail , "output" )
lacp = ET . SubElement ( output , "lacp" )
aggr_member = ET . SubElement ( lacp , "aggr-member" )
interface_type = ET . SubElement ( aggr_member , "interface-type" )
interface_type . text = kwargs . pop ( 'interface_type' )
callback = kwargs . pop ( 'callback' , self . _callback )
return callback ( config ) |
async def executor ( func , * args , ** kwargs ) :
'''Execute a function in an executor thread .
Args :
todo ( ( func , args , kwargs ) ) : A todo tuple .''' | def syncfunc ( ) :
return func ( * args , ** kwargs )
loop = asyncio . get_running_loop ( )
return await loop . run_in_executor ( None , syncfunc ) |
def set_abort_pending ( self , newstate ) :
"""Method to set Abort state if something goes wrong during provisioning
Method also used to finish provisioning process when all is completed
Method : POST""" | self . logger . debug ( "set_abort_pending(" + "{})" . format ( newstate ) )
# NOT TO BE USED
# default _ minimal _ cluster _ config = ' { " installationId " : null , " mdmIPs " : [ " 192.168.102.12 " , " 192.168.102.13 " ] , " mdmPassword " : " Scaleio123 " , " liaPassword " : " Scaleio123 " , " licenseKey " : null , " primaryMdm " : { " node " : { " ostype " : " linux " , " nodeName " : null , " nodeIPs " : [ " 192.168.102.12 " ] , " domain " : null , " userName " : " root " , " password " : " vagrant " , " liaPassword " : null } , " nodeInfo " : null , " managementIPs " : null , " mdmIPs " : [ " 192.168.102.12 " ] } , " secondaryMdm " : { " node " : { " ostype " : " linux " , " nodeName " : null , " nodeIPs " : [ " 192.168.102.13 " ] , " domain " : null , " userName " : " root " , " password " : " vagrant " , " liaPassword " : null } , " nodeInfo " : null , " managementIPs " : null , " mdmIPs " : [ " 192.168.102.13 " ] } , " tb " : { " node " : { " ostype " : " linux " , " nodeName " : null , " nodeIPs " : [ " 192.168.102.11 " ] , " domain " : null , " userName " : " root " , " password " : " vagrant " , " liaPassword " : null } , " nodeInfo " : null , " tbIPs " : [ " 192.168.102.11 " ] } , " sdsList " : [ { " node " : { " ostype " : " linux " , " nodeName " : null , " nodeIPs " : [ " 192.168.102.11 " ] , " domain " : null , " userName " : " root " , " password " : " vagrant " , " liaPassword " : null } , " nodeInfo " : null , " sdsName " : " SDS _ [ 192.168.102.11 ] " , " protectionDomain " : " default " , " faultSet " : null , " allIPs " : [ " 192.168.102.11 " ] , " sdsOnlyIPs " : null , " sdcOnlyIPs " : null , " devices " : [ { " devicePath " : " / home / vagrant / scaleio1 " , " storagePool " : null , " deviceName " : null } ] , " optimized " : false , " port " : 7072 } , { " node " : { " ostype " : " linux " , " nodeName " : null , " nodeIPs " : [ " 192.168.102.12 " ] , " domain " : null , " userName " : " root " , " password " : " vagrant " , " liaPassword " : null } , " nodeInfo " : null , " sdsName " : " SDS _ [ 192.168.102.12 ] " , " protectionDomain " : " default " , " faultSet " : null , " allIPs " : [ " 192.168.102.12 " ] , " sdsOnlyIPs " : null , " sdcOnlyIPs " : null , " devices " : [ { " devicePath " : " / home / vagrant / scaleio1 " , " storagePool " : null , " deviceName " : null } ] , " optimized " : false , " port " : 7072 } , { " node " : { " ostype " : " linux " , " nodeName " : null , " nodeIPs " : [ " 192.168.102.13 " ] , " domain " : null , " userName " : " root " , " password " : " vagrant " , " liaPassword " : null } , " nodeInfo " : null , " sdsName " : " SDS _ [ 192.168.102.13 ] " , " protectionDomain " : " default " , " faultSet " : null , " allIPs " : [ " 192.168.102.13 " ] , " sdsOnlyIPs " : null , " sdcOnlyIPs " : null , " devices " : [ { " devicePath " : " / home / vagrant / scaleio1 " , " storagePool " : null , " deviceName " : null } ] , " optimized " : false , " port " : 7072 } ] , " sdcList " : [ { " node " : { " ostype " : " linux " , " nodeName " : null , " nodeIPs " : [ " 192.168.102.11 " ] , " domain " : null , " userName " : " root " , " password " : " vagrant " , " liaPassword " : null } , " nodeInfo " : null , " splitterRpaIp " : null } , { " node " : { " ostype " : " linux " , " nodeName " : null , " nodeIPs " : [ " 192.168.102.12 " ] , " domain " : null , " userName " : " root " , " password " : " vagrant " , " liaPassword " : null } , " nodeInfo " : null , " splitterRpaIp " : null } , { " node " : { " ostype " : " linux " , " nodeName " : null , " nodeIPs " : [ " 192.168.102.13 " ] , " domain " : null , " userName " : " root " , " password " : " vagrant " , " liaPassword " : null } , " nodeInfo " : null , " splitterRpaIp " : null } ] , " callHomeConfiguration " : null , " remoteSyslogConfiguration " : null } '
r1 = self . _im_session . post ( "{}/{}" . format ( self . _im_api_url , "types/Command/instances/actions/abortPending" ) , headers = { 'Content-type' : 'application/json' , 'Version' : '1.0' } , verify = self . _im_verify_ssl , data = newstate , stream = True )
if not r1 . ok : # Something went wrong
self . logger . error ( "Error set_abort_pending(" + "{})" . format ( newstate ) )
# print " Response after set _ abort _ pending ( ) "
# RESPONSE NEED TO BE WRAPPED IN try / catch . Cannot assume JSON is returned .
# print r1 . text
# pprint ( json . loads ( r1 . text ) )
return r1 . text |
def validate ( self ) :
"""Validate that the OutputContextVertex is correctly representable .""" | super ( OutputContextVertex , self ) . validate ( )
if self . location . field is not None :
raise ValueError ( u'Expected location at a vertex, but got: {}' . format ( self . location ) ) |
def _make_bz_instance ( opt ) :
"""Build the Bugzilla instance we will use""" | if opt . bztype != 'auto' :
log . info ( "Explicit --bztype is no longer supported, ignoring" )
cookiefile = None
tokenfile = None
use_creds = False
if opt . cache_credentials :
cookiefile = opt . cookiefile or - 1
tokenfile = opt . tokenfile or - 1
use_creds = True
bz = bugzilla . Bugzilla ( url = opt . bugzilla , cookiefile = cookiefile , tokenfile = tokenfile , sslverify = opt . sslverify , use_creds = use_creds , cert = opt . cert )
return bz |
def src_new ( converter_type , channels ) :
"""Initialise a new sample rate converter .
Parameters
converter _ type : int
Converter to be used .
channels : int
Number of channels .
Returns
state
An anonymous pointer to the internal state of the converter .
error : int
Error code .""" | error = ffi . new ( 'int*' )
state = _lib . src_new ( converter_type , channels , error )
return state , error [ 0 ] |
def get_instance ( self , payload ) :
"""Build an instance of VerificationCheckInstance
: param dict payload : Payload response from the API
: returns : twilio . rest . preview . acc _ security . service . verification _ check . VerificationCheckInstance
: rtype : twilio . rest . preview . acc _ security . service . verification _ check . VerificationCheckInstance""" | return VerificationCheckInstance ( self . _version , payload , service_sid = self . _solution [ 'service_sid' ] , ) |
def acquire ( self , timeout = None ) :
"""Acquire a connection
: param timeout : If provided , seconds to wait for a connection before raising
Queue . Empty . If not provided , blocks indefinitely .
: returns : Returns a RethinkDB connection
: raises Empty : No resources are available before timeout .""" | self . _pool_lock . acquire ( )
if timeout is None :
conn_wrapper = self . _pool . get_nowait ( )
else :
conn_wrapper = self . _pool . get ( True , timeout )
self . _current_acquired += 1
self . _pool_lock . release ( )
return conn_wrapper . connection |
def _in ( field , value , document ) :
"""Returns True if document [ field ] is in the interable value . If the
supplied value is not an iterable , then a MalformedQueryException is raised""" | try :
values = iter ( value )
except TypeError :
raise MalformedQueryException ( "'$in' must accept an iterable" )
return document . get ( field , None ) in values |
def lc_score ( value ) :
"""Evaluates the accuracy of a predictive measure ( e . g . r - squared )
: param value : float , between 0.0 and 1.0.
: return :""" | rebased = 2 * ( value - 0.5 )
if rebased == 0 :
return 0
elif rebased > 0 :
compliment = 1.0 - rebased
score = - np . log2 ( compliment )
else :
compliment = 1.0 + rebased
score = np . log2 ( compliment )
return score |
def _post_subject ( self , body ) :
"""Create new subjects and associated attributes .
Example :
acs . post _ subject ( [
" subjectIdentifier " : " / role / evangelist " ,
" parents " : [ ] ,
" attributes " : [
" issuer " : " default " ,
" name " : " role " ,
" value " : " developer evangelist " ,
The issuer is effectively a namespace , and in policy evaluations
you identify an attribute by a specific namespace . Many examples
provide a URL but it could be any arbitrary string .
The body is a list , so many subjects can be added at the same time .""" | assert isinstance ( body , ( list ) ) , "POST requires body to be a list"
uri = self . _get_subject_uri ( )
return self . service . _post ( uri , body ) |
def uploadItem ( self , filePath , description ) :
"""This operation uploads an item to the server . Each uploaded item is
identified by a unique itemID . Since this request uploads a file , it
must be a multi - part request as per IETF RFC1867.
Inputs :
filePath - the file to be uploaded .
description - optional description for the uploaded item .""" | import urlparse
url = self . _url + "/upload"
params = { "f" : "json" }
files = { }
files [ 'itemFile' ] = filePath
return self . _post ( url = url , param_dict = params , files = files , securityHandler = self . _securityHandler , proxy_url = self . _proxy_url , proxy_port = self . _proxy_port ) |
def call_nowait ( self , cb_name ) :
"""pick a callback command and call it without waiting for it to finish""" | if self . bootstrapping :
return
if cb_name in ( ACTION_ON_START , ACTION_ON_STOP , ACTION_ON_RESTART , ACTION_ON_ROLE_CHANGE ) :
self . __cb_called = True
if self . callback and cb_name in self . callback :
cmd = self . callback [ cb_name ]
try :
cmd = shlex . split ( self . callback [ cb_name ] ) + [ cb_name , self . role , self . scope ]
self . _callback_executor . call ( cmd )
except Exception :
logger . exception ( 'callback %s %s %s %s failed' , cmd , cb_name , self . role , self . scope ) |
def paginate ( self , request , offset = 0 , limit = None ) :
"""Paginate queryset .""" | return self . collection . offset ( offset ) . limit ( limit ) , self . collection . count ( ) |
def send_message ( self , message ) :
"""Send chat message to this steam user
: param message : message to send
: type message : str""" | self . _steam . send ( MsgProto ( EMsg . ClientFriendMsg ) , { 'steamid' : self . steam_id , 'chat_entry_type' : EChatEntryType . ChatMsg , 'message' : message . encode ( 'utf8' ) , } ) |
def clone ( self , spawn_mapping = None ) :
"""Return an exact copy of this generator which behaves the same way
( i . e . , produces the same elements in the same order ) and which is
automatically reset whenever the original generator is reset .""" | c = self . spawn ( spawn_mapping )
self . register_clone ( c )
c . register_parent ( self )
return c |
def add_citations ( voevent , event_ivorns ) :
"""Add citations to other voevents .
The schema mandates that the ' Citations ' section must either be entirely
absent , or non - empty - hence we require this wrapper function for its
creation prior to listing the first citation .
Args :
voevent ( : class : ` Voevent ` ) : Root node of a VOEvent etree .
event _ ivorns ( : class : ` voeventparse . misc . EventIvorn ` ) : List of EventIvorn
elements to add to citation list .""" | if not voevent . xpath ( 'Citations' ) :
etree . SubElement ( voevent , 'Citations' )
voevent . Citations . extend ( _listify ( event_ivorns ) ) |
def render ( self , ** kwargs ) :
"""Renders the HTML representation of the element .""" | for name , child in self . _children . items ( ) :
child . render ( ** kwargs )
return self . _template . render ( this = self , kwargs = kwargs ) |
def _pdist ( x ) :
"""Calculate the pair - wise point distances of a matrix
Parameters
x : 2d - array
An m - by - n array of scalars , where there are m points in n dimensions .
Returns
d : array
A 1 - by - b array of scalars , where b = m * ( m - 1 ) / 2 . This array contains
all the pair - wise point distances , arranged in the order ( 1 , 0 ) ,
(2 , 0 ) , . . . , ( m - 1 , 0 ) , ( 2 , 1 ) , . . . , ( m - 1 , 1 ) , . . . , ( m - 1 , m - 2 ) .
Examples
> > > x = np . array ( [ [ 0.1629447 , 0.8616334 ] ,
. . . [ 0.5811584 , 0.3826752 ] ,
. . . [ 0.2270954 , 0.4442068 ] ,
. . . [ 0.7670017 , 0.7264718 ] ,
. . . [ 0.8253975 , 0.1937736 ] ] )
> > > _ pdist ( x )
array ( [ 0.6358488 , 0.4223272 , 0.6189940 , 0.9406808 , 0.3593699,
0.3908118 , 0.3087661 , 0.6092392 , 0.6486001 , 0.5358894 ] )""" | x = np . atleast_2d ( x )
assert len ( x . shape ) == 2 , 'Input array must be 2d-dimensional'
m , n = x . shape
if m < 2 :
return [ ]
d = [ ]
for i in range ( m - 1 ) :
for j in range ( i + 1 , m ) :
d . append ( ( sum ( ( x [ j , : ] - x [ i , : ] ) ** 2 ) ) ** 0.5 )
return np . array ( d ) |
def _create_symlink_cygwin ( self , initial_path , final_path ) :
"""Use cygqin to generate symbolic link""" | symlink_cmd = [ os . path . join ( self . _cygwin_bin_location , "ln.exe" ) , "-s" , self . _get_cygwin_path ( initial_path ) , self . _get_cygwin_path ( final_path ) ]
process = Popen ( symlink_cmd , stdout = PIPE , stderr = PIPE , shell = False )
out , err = process . communicate ( )
if err :
print ( err )
raise Exception ( err )
return out . strip ( ) |
def MI_get_item ( self , key , index = 0 ) :
'return list of item' | index = _key_to_index_single ( force_list ( self . indices . keys ( ) ) , index )
if index != 0 :
key = self . indices [ index ] [ key ]
# always use first index key
# key must exist
value = super ( MIMapping , self ) . __getitem__ ( key )
N = len ( self . indices )
if N == 1 :
return [ key ]
if N == 2 :
value = [ value ]
return [ key ] + value |
def fxy ( z = "sin(3*x)*log(x-y)/3" , x = ( 0 , 3 ) , y = ( 0 , 3 ) , zlimits = ( None , None ) , showNan = True , zlevels = 10 , wire = False , c = "b" , bc = "aqua" , alpha = 1 , texture = "paper" , res = 100 , ) :
"""Build a surface representing the function : math : ` f ( x , y ) ` specified as a string
or as a reference to an external function .
: param float x : x range of values .
: param float y : y range of values .
: param float zlimits : limit the z range of the independent variable .
: param int zlevels : will draw the specified number of z - levels contour lines .
: param bool showNan : show where the function does not exist as red points .
: param bool wire : show surface as wireframe .
. . hint : : | fxy | | fxy . py | _
Function is : : math : ` f ( x , y ) = \ sin ( 3x ) \ cdot \ log ( x - y ) / 3 ` in range : math : ` x = [ 0,3 ] , y = [ 0,3 ] ` .""" | if isinstance ( z , str ) :
try :
z = z . replace ( "math." , "" ) . replace ( "np." , "" )
namespace = locals ( )
code = "from math import*\ndef zfunc(x,y): return " + z
exec ( code , namespace )
z = namespace [ "zfunc" ]
except :
vc . printc ( "Syntax Error in fxy()" , c = 1 )
return None
ps = vtk . vtkPlaneSource ( )
ps . SetResolution ( res , res )
ps . SetNormal ( [ 0 , 0 , 1 ] )
ps . Update ( )
poly = ps . GetOutput ( )
dx = x [ 1 ] - x [ 0 ]
dy = y [ 1 ] - y [ 0 ]
todel , nans = [ ] , [ ]
if zlevels :
tf = vtk . vtkTriangleFilter ( )
tf . SetInputData ( poly )
tf . Update ( )
poly = tf . GetOutput ( )
for i in range ( poly . GetNumberOfPoints ( ) ) :
px , py , _ = poly . GetPoint ( i )
xv = ( px + 0.5 ) * dx + x [ 0 ]
yv = ( py + 0.5 ) * dy + y [ 0 ]
try :
zv = z ( xv , yv )
poly . GetPoints ( ) . SetPoint ( i , [ xv , yv , zv ] )
except :
todel . append ( i )
nans . append ( [ xv , yv , 0 ] )
if len ( todel ) :
cellIds = vtk . vtkIdList ( )
poly . BuildLinks ( )
for i in todel :
poly . GetPointCells ( i , cellIds )
for j in range ( cellIds . GetNumberOfIds ( ) ) :
poly . DeleteCell ( cellIds . GetId ( j ) )
# flag cell
poly . RemoveDeletedCells ( )
cl = vtk . vtkCleanPolyData ( )
cl . SetInputData ( poly )
cl . Update ( )
poly = cl . GetOutput ( )
if not poly . GetNumberOfPoints ( ) :
vc . printc ( "Function is not real in the domain" , c = 1 )
return None
if zlimits [ 0 ] :
tmpact1 = Actor ( poly )
a = tmpact1 . cutWithPlane ( ( 0 , 0 , zlimits [ 0 ] ) , ( 0 , 0 , 1 ) )
poly = a . polydata ( )
if zlimits [ 1 ] :
tmpact2 = Actor ( poly )
a = tmpact2 . cutWithPlane ( ( 0 , 0 , zlimits [ 1 ] ) , ( 0 , 0 , - 1 ) )
poly = a . polydata ( )
if c is None :
elev = vtk . vtkElevationFilter ( )
elev . SetInputData ( poly )
elev . Update ( )
poly = elev . GetOutput ( )
actor = Actor ( poly , c = c , bc = bc , alpha = alpha , wire = wire , texture = texture )
acts = [ actor ]
if zlevels :
elevation = vtk . vtkElevationFilter ( )
elevation . SetInputData ( poly )
bounds = poly . GetBounds ( )
elevation . SetLowPoint ( 0 , 0 , bounds [ 4 ] )
elevation . SetHighPoint ( 0 , 0 , bounds [ 5 ] )
elevation . Update ( )
bcf = vtk . vtkBandedPolyDataContourFilter ( )
bcf . SetInputData ( elevation . GetOutput ( ) )
bcf . SetScalarModeToValue ( )
bcf . GenerateContourEdgesOn ( )
bcf . GenerateValues ( zlevels , elevation . GetScalarRange ( ) )
bcf . Update ( )
zpoly = bcf . GetContourEdgesOutput ( )
zbandsact = Actor ( zpoly , c = "k" , alpha = alpha )
zbandsact . GetProperty ( ) . SetLineWidth ( 1.5 )
acts . append ( zbandsact )
if showNan and len ( todel ) :
bb = actor . GetBounds ( )
zm = ( bb [ 4 ] + bb [ 5 ] ) / 2
nans = np . array ( nans ) + [ 0 , 0 , zm ]
nansact = vs . Points ( nans , c = "red" , alpha = alpha / 2 )
acts . append ( nansact )
if len ( acts ) > 1 :
asse = Assembly ( acts )
return asse
else :
return actor |
def get_bytes ( num_bytes ) :
"""Returns a random string of num _ bytes length .""" | # Is this the way to do it ?
# s = c _ ubyte ( )
# Or this ?
s = create_string_buffer ( num_bytes )
# Used to keep track of status . 1 = success , 0 = error .
ok = c_int ( )
# Provider ?
hProv = c_ulong ( )
ok = windll . Advapi32 . CryptAcquireContextA ( byref ( hProv ) , None , None , PROV_RSA_FULL , 0 )
ok = windll . Advapi32 . CryptGenRandom ( hProv , wintypes . DWORD ( num_bytes ) , cast ( byref ( s ) , POINTER ( c_byte ) ) )
return s . raw |
def get_all_objects ( self ) :
"Return pointers to all GC tracked objects" | for i , generation in enumerate ( self . gc_generations ) :
generation_head_ptr = pygc_head_ptr = generation . head . get_pointer ( )
generation_head_addr = generation_head_ptr . _value
while True : # _ PyObjectBase _ GC _ UNTRACK macro says that
# gc _ prev always points to some value
# there is still a race condition if PyGC _ Head
# gets free ' d and overwritten just before we look
# at him
pygc_head_ptr = pygc_head_ptr . deref ( ) . gc_next
if pygc_head_ptr . _value == generation_head_addr :
break
yield pygc_head_ptr . deref ( ) . get_object_ptr ( ) |
def change_nick ( self , nick ) :
"""Update this user ' s nick in all joined channels .""" | old_nick = self . nick
self . nick = IRCstr ( nick )
for c in self . channels :
c . users . remove ( old_nick )
c . users . add ( self . nick ) |
def wait_for_click ( self , button , timeOut = 10.0 ) :
"""Wait for a mouse click
Usage : C { mouse . wait _ for _ click ( self , button , timeOut = 10.0 ) }
@ param button : they mouse button click to wait for as a button number , 1-9
@ param timeOut : maximum time , in seconds , to wait for the keypress to occur""" | button = int ( button )
w = iomediator . Waiter ( None , None , button , timeOut )
w . wait ( ) |
def formpivot ( self ) :
'''- > # tag . match
- > form : prop
- > form''' | self . ignore ( whitespace )
self . nextmust ( '->' )
self . ignore ( whitespace )
if self . nextchar ( ) == '#' :
match = self . tagmatch ( )
return s_ast . PivotToTags ( kids = ( match , ) )
# check for pivot out syntax
if self . nextchar ( ) == '*' :
self . offs += 1
return s_ast . PivotOut ( )
prop = self . absprop ( )
return s_ast . FormPivot ( kids = ( prop , ) ) |
def inverse_transform ( self , y , exogenous = None ) :
"""Inverse transform a transformed array
Inverse the Box - Cox transformation on the transformed array . Note that
if truncation happened in the ` ` transform ` ` method , invertibility will
not be preserved , and the transformed array may not be perfectly
inverse - transformed .
Parameters
y : array - like or None , shape = ( n _ samples , )
The transformed endogenous ( time - series ) array .
exogenous : array - like or None , shape = ( n _ samples , n _ features ) , optional
The exogenous array of additional covariates . Not used for
endogenous transformers . Default is None , and non - None values will
serve as pass - through arrays .
Returns
y : array - like or None
The inverse - transformed y array
exogenous : array - like or None
The inverse - transformed exogenous array""" | check_is_fitted ( self , "lam1_" )
lam1 = self . lam1_
lam2 = self . lam2_
y , exog = self . _check_y_exog ( y , exogenous )
if lam1 == 0 :
return np . exp ( y ) - lam2 , exog
numer = y * lam1
# remove denominator
numer += 1.
# add 1 back to it
de_exp = numer ** ( 1. / lam1 )
# de - exponentiate
return de_exp - lam2 , exog |
def list_instances ( self , machine_state ) :
"""Returns the list of the instances in the Cloud .
in machine _ state of type : class : ` CloudMachineState `
out return _ names of type str
VM names .
return return _ ids of type str
VM ids .""" | if not isinstance ( machine_state , CloudMachineState ) :
raise TypeError ( "machine_state can only be an instance of type CloudMachineState" )
( return_ids , return_names ) = self . _call ( "listInstances" , in_p = [ machine_state ] )
return ( return_ids , return_names ) |
def deploy_to ( self , displays = None , exclude = [ ] , lock = [ ] ) :
"""Deploys page to listed display ( specify with display ) . If display is None ,
deploy to all display . Can specify exclude for which display to exclude .
This overwrites the first argument .""" | if displays is None :
signs = Sign . objects . all ( )
else :
signs = Sign . objects . filter ( display__in = displays )
for sign in signs . exclude ( display__in = exclude ) :
sign . pages . add ( self )
sign . save ( ) |
def new_issuer ( self , issuer_idx , info = None ) :
"""Add a new issuer to the dataset with the given data .
Parameters :
issuer _ idx ( str ) : The id to associate the issuer with . If None or already exists , one is
generated .
info ( dict , list ) : Additional info of the issuer .
Returns :
Issuer : The newly added issuer .""" | new_issuer_idx = issuer_idx
# Add index to idx if already existing
if new_issuer_idx in self . _issuers . keys ( ) :
new_issuer_idx = naming . index_name_if_in_list ( new_issuer_idx , self . _issuers . keys ( ) )
new_issuer = issuers . Issuer ( new_issuer_idx , info = info )
self . _issuers [ new_issuer_idx ] = new_issuer
return new_issuer |
def list_sdbs ( self ) :
"""Return sdbs by Name""" | sdb_raw = self . get_sdbs ( )
sdbs = [ ]
for s in sdb_raw :
sdbs . append ( s [ 'name' ] )
return sdbs |
def get_pixbeam_pixel ( self , x , y ) :
"""Determine the beam in pixels at the given location in pixel coordinates .
Parameters
x , y : float
The pixel coordinates at which the beam is determined .
Returns
beam : : class : ` AegeanTools . fits _ image . Beam `
A beam object , with a / b / pa in pixel coordinates .""" | ra , dec = self . pix2sky ( ( x , y ) )
return self . get_pixbeam ( ra , dec ) |
def next_unwrittable_on_col ( view , coords ) :
"""Return position of the next letter ( in column ) that is unwrittable""" | x , y = coords
maxy = max ( view . keys ( ) , key = itemgetter ( 1 ) ) [ 1 ]
for offset in range ( y + 1 , maxy ) :
letter = view [ x , offset ]
if letter not in REWRITABLE_LETTERS :
return offset
return None |
def cumprod ( vari , axis = None ) :
"""Perform the cumulative product of a shapeable quantity over a given axis .
Args :
vari ( chaospy . poly . base . Poly , numpy . ndarray ) :
Input data .
axis ( int ) :
Axis over which the sum is taken . By default ` ` axis ` ` is None , and
all elements are summed .
Returns :
( chaospy . poly . base . Poly ) :
An array shaped as ` ` vari ` ` but with the specified axis removed .
Examples :
> > > vari = cp . prange ( 4)
> > > print ( vari )
[1 , q0 , q0 ^ 2 , q0 ^ 3]
> > > print ( cp . cumprod ( vari ) )
[1 , q0 , q0 ^ 3 , q0 ^ 6]""" | if isinstance ( vari , Poly ) :
if np . prod ( vari . shape ) == 1 :
return vari . copy ( )
if axis is None :
vari = chaospy . poly . shaping . flatten ( vari )
axis = 0
vari = chaospy . poly . shaping . rollaxis ( vari , axis )
out = [ vari [ 0 ] ]
for poly in vari [ 1 : ] :
out . append ( out [ - 1 ] * poly )
return Poly ( out , vari . dim , vari . shape , vari . dtype )
return np . cumprod ( vari , axis ) |
def _build_command ( self , cmd , ** kwargs ) :
"""_ build _ command : string ( binary data ) . . . - > binary data
_ build _ command will construct a command packet according to the
specified command ' s specification in api _ commands . It will expect
named arguments for all fields other than those with a default
value or a length of ' None ' .
Each field will be written out in the order they are defined
in the command definition .""" | try :
cmd_spec = self . api_commands [ cmd ]
except AttributeError :
raise NotImplementedError ( "API command specifications could not be " "found; use a derived class which defines" " 'api_commands'." )
packet = b''
for field in cmd_spec :
try : # Read this field ' s name from the function arguments dict
data = kwargs [ field [ 'name' ] ]
if isinstance ( data , str ) :
data = stringToBytes ( data )
except KeyError : # Data wasn ' t given
# Only a problem if the field has a specific length
if field [ 'len' ] is not None : # Was a default value specified ?
default_value = field [ 'default' ]
if default_value : # If so , use it
data = default_value
else : # Otherwise , fail
raise KeyError ( "The expected field {} of length {} " "was not provided" . format ( field [ 'name' ] , field [ 'len' ] ) )
else : # No specific length , ignore it
data = None
# Ensure that the proper number of elements will be written
if field [ 'len' ] and len ( data ) != field [ 'len' ] :
raise ValueError ( "The data provided for '{}' was not {} " "bytes long" . format ( field [ 'name' ] , field [ 'len' ] ) )
# Add the data to the packet , if it has been specified .
# Otherwise , the parameter was of variable length , and not given .
if data :
packet += data
return packet |
def _redistribute_builder ( self , afi = 'ipv4' , source = None ) :
"""Build BGP redistribute method .
Do not use this method directly . You probably want ` ` redistribute ` ` .
Args :
source ( str ) : Source for redistributing . ( connected )
afi ( str ) : Address family to configure . ( ipv4 , ipv6)
Returns :
Method to redistribute desired source .
Raises :
KeyError : if ` source ` is not specified .
Examples :
> > > import pynos . device
> > > conn = ( ' 10.24.39.203 ' , ' 22 ' )
> > > auth = ( ' admin ' , ' password ' )
> > > with pynos . device . Device ( conn = conn , auth = auth ) as dev :
. . . output = dev . bgp . _ redistribute _ builder ( source = ' connected ' ,
. . . afi = ' ipv4 ' )
. . . dev . bgp . _ redistribute _ builder ( source = ' hodor ' ,
. . . afi = ' ipv4 ' ) # doctest : + IGNORE _ EXCEPTION _ DETAIL
Traceback ( most recent call last ) :
AttributeError""" | if source == 'connected' :
return getattr ( self . _rbridge , 'rbridge_id_router_router_bgp_address_family_{0}_' '{0}_unicast_default_vrf_af_{0}_uc_and_vrf_cmds_' 'call_point_holder_redistribute_connected_' 'redistribute_connected' . format ( afi ) )
# TODO : Add support for ' static ' and ' ospf '
else :
raise AttributeError ( 'Invalid source.' ) |
def stop_instance ( self , instance_id ) :
"""Stops the instance gracefully .
: param str instance _ id : instance identifier
: return : None""" | self . _restore_from_storage ( instance_id )
if self . _start_failed :
raise Exception ( 'stop_instance for node %s: failing due to' ' previous errors.' % instance_id )
with self . _resource_lock :
try :
v_m = self . _qualified_name_to_vm ( instance_id )
if not v_m :
err = "stop_instance: can't find instance %s" % instance_id
log . error ( err )
raise Exception ( err )
v_m . _cloud_service . _stop_vm ( v_m )
# note : self . _ n _ instances is a derived property , doesn ' t need
# to be updated
if self . _n_instances == 0 :
log . debug ( 'last instance deleted, destroying ' 'global resources' )
self . _delete_global_reqs ( )
self . _delete_cloud_provider_storage ( )
except Exception as exc :
log . error ( traceback . format_exc ( ) )
log . error ( "error stopping instance %s: %s" , instance_id , exc )
raise
log . debug ( 'stopped instance %s' , instance_id ) |
def sip ( self , sip_url , username = None , password = None , url = None , method = None , status_callback_event = None , status_callback = None , status_callback_method = None , ** kwargs ) :
"""Create a < Sip > element
: param sip _ url : SIP URL
: param username : SIP Username
: param password : SIP Password
: param url : Action URL
: param method : Action URL method
: param status _ callback _ event : Status callback events
: param status _ callback : Status callback URL
: param status _ callback _ method : Status callback URL method
: param kwargs : additional attributes
: returns : < Sip > element""" | return self . nest ( Sip ( sip_url , username = username , password = password , url = url , method = method , status_callback_event = status_callback_event , status_callback = status_callback , status_callback_method = status_callback_method , ** kwargs ) ) |
def flip ( self , axis = 0 , preserve_centroid = False ) :
'''Flip the mesh across the given axis : 0 for x , 1 for y , 2 for z .
When ` preserve _ centroid ` is True , translate after flipping to
preserve the location of the centroid .''' | self . v [ : , axis ] *= - 1
if preserve_centroid :
self . v [ : , axis ] -= 2 * self . centroid [ 0 ]
self . flip_faces ( ) |
def g ( x , a , c ) :
"""Christophe ' s suggestion for residuals ,
G [ i ] = Sqrt ( Sum _ j ( x [ j ] - a [ i , j ] ) ^ 2 ) - C [ i ]""" | return np . sqrt ( ( ( x - a ) ** 2 ) . sum ( 1 ) ) - c |
def mode ( data ) :
"""Return the most common data point from discrete or nominal data .
` ` mode ` ` assumes discrete data , and returns a single value . This is the
standard treatment of the mode as commonly taught in schools :
> > > mode ( [ 1 , 1 , 2 , 3 , 3 , 3 , 3 , 4 ] )
This also works with nominal ( non - numeric ) data :
> > > mode ( [ " red " , " blue " , " blue " , " red " , " green " , " red " , " red " ] )
' red '""" | # Generate a table of sorted ( value , frequency ) pairs .
hist = collections . Counter ( data )
top = hist . most_common ( 2 )
if len ( top ) == 1 :
return top [ 0 ] [ 0 ]
elif not top :
raise StatisticsError ( 'no mode for empty data' )
elif top [ 0 ] [ 1 ] == top [ 1 ] [ 1 ] :
raise StatisticsError ( 'no unique mode; found %d equally common values' % len ( hist ) )
else :
return top [ 0 ] [ 0 ] |
def children_rest_names ( self ) :
"""Gets the list of all possible children ReST names .
Returns :
list : list containing all possible rest names as string
Example :
> > > entity = NUEntity ( )
> > > entity . children _ rest _ names
[ " foo " , " bar " ]""" | names = [ ]
for fetcher in self . fetchers :
names . append ( fetcher . __class__ . managed_object_rest_name ( ) )
return names |
def _get_elements ( self , source ) :
"""Returns the list of HtmlElements for the source
: param source : The source list to parse
: type source : list
: returns : A list of HtmlElements
: rtype : list""" | return list ( chain ( * [ self . tree . xpath ( xpath ) for xpath in source ] ) ) |
def _fix_insert ( self , sql , params ) :
"""Wrap the passed SQL with IDENTITY _ INSERT statements and apply
other necessary fixes .""" | meta = self . query . get_meta ( )
if meta . has_auto_field :
if hasattr ( self . query , 'fields' ) : # django 1.4 replaced columns with fields
fields = self . query . fields
auto_field = meta . auto_field
else : # < django 1.4
fields = self . query . columns
auto_field = meta . auto_field . db_column or meta . auto_field . column
auto_in_fields = auto_field in fields
quoted_table = self . connection . ops . quote_name ( meta . db_table )
if not fields or ( auto_in_fields and len ( fields ) == 1 and not params ) : # convert format when inserting only the primary key without
# specifying a value
sql = 'INSERT INTO {0} DEFAULT VALUES' . format ( quoted_table )
params = [ ]
elif auto_in_fields : # wrap with identity insert
sql = 'SET IDENTITY_INSERT {table} ON;{sql};SET IDENTITY_INSERT {table} OFF' . format ( table = quoted_table , sql = sql , )
# mangle SQL to return ID from insert
# http : / / msdn . microsoft . com / en - us / library / ms177564 . aspx
if self . return_id and self . connection . features . can_return_id_from_insert :
col = self . connection . ops . quote_name ( meta . pk . db_column or meta . pk . get_attname ( ) )
# Determine datatype for use with the table variable that will return the inserted ID
pk_db_type = _re_data_type_terminator . split ( meta . pk . db_type ( self . connection ) ) [ 0 ]
# NOCOUNT ON to prevent additional trigger / stored proc related resultsets
sql = 'SET NOCOUNT ON;{declare_table_var};{sql};{select_return_id}' . format ( sql = sql , declare_table_var = "DECLARE @sqlserver_ado_return_id table ({col_name} {pk_type})" . format ( col_name = col , pk_type = pk_db_type , ) , select_return_id = "SELECT * FROM @sqlserver_ado_return_id" , )
output = self . _values_repl . format ( col = col )
sql = self . _re_values_sub . sub ( output , sql )
return sql , params |
def update ( self , key = values . unset , value = values . unset ) :
"""Update the VariableInstance
: param unicode key : The key
: param unicode value : The value
: returns : Updated VariableInstance
: rtype : twilio . rest . serverless . v1 . service . environment . variable . VariableInstance""" | data = values . of ( { 'Key' : key , 'Value' : value , } )
payload = self . _version . update ( 'POST' , self . _uri , data = data , )
return VariableInstance ( self . _version , payload , service_sid = self . _solution [ 'service_sid' ] , environment_sid = self . _solution [ 'environment_sid' ] , sid = self . _solution [ 'sid' ] , ) |
def form_invalid ( self , post_form , attachment_formset , ** kwargs ) :
"""Processes invalid forms .
Called if one of the forms is invalid . Re - renders the context data with the data - filled
forms and errors .""" | if ( attachment_formset and not attachment_formset . is_valid ( ) and len ( attachment_formset . errors ) ) :
messages . error ( self . request , self . attachment_formset_general_error_message )
return self . render_to_response ( self . get_context_data ( post_form = post_form , attachment_formset = attachment_formset , ** kwargs ) , ) |
def train ( self , net_sizes , epochs , batchsize ) :
"""Initialize the base trainer""" | self . trainer = ClassificationTrainer ( self . data , self . targets , net_sizes )
self . trainer . learn ( epochs , batchsize )
return self . trainer . evaluate ( batchsize ) |
def invalid_pixel_mask ( self ) :
"""Returns a binary mask for the NaN - and zero - valued pixels .
Serves as a mask for invalid pixels .
Returns
: obj : ` BinaryImage `
Binary image where a pixel value greater than zero indicates an invalid pixel .""" | # init mask buffer
mask = np . zeros ( [ self . height , self . width , 1 ] ) . astype ( np . uint8 )
# update invalid pixels
zero_pixels = self . zero_pixels ( )
nan_pixels = self . nan_pixels ( )
mask [ zero_pixels [ : , 0 ] , zero_pixels [ : , 1 ] ] = BINARY_IM_MAX_VAL
mask [ nan_pixels [ : , 0 ] , nan_pixels [ : , 1 ] ] = BINARY_IM_MAX_VAL
return BinaryImage ( mask , frame = self . frame ) |
def _read_body_by_length ( self , response , file ) :
'''Read the connection specified by a length .
Coroutine .''' | _logger . debug ( 'Reading body by length.' )
file_is_async = hasattr ( file , 'drain' )
try :
body_size = int ( response . fields [ 'Content-Length' ] )
if body_size < 0 :
raise ValueError ( 'Content length cannot be negative.' )
except ValueError as error :
_logger . warning ( __ ( _ ( 'Invalid content length: {error}' ) , error = error ) )
yield from self . _read_body_until_close ( response , file )
return
bytes_left = body_size
while bytes_left > 0 :
data = yield from self . _connection . read ( self . _read_size )
if not data :
break
bytes_left -= len ( data )
if bytes_left < 0 :
data = data [ : bytes_left ]
_logger . warning ( _ ( 'Content overrun.' ) )
self . close ( )
self . _data_event_dispatcher . notify_read ( data )
content_data = self . _decompress_data ( data )
if file :
file . write ( content_data )
if file_is_async :
yield from file . drain ( )
if bytes_left > 0 :
raise NetworkError ( 'Connection closed.' )
content_data = self . _flush_decompressor ( )
if file and content_data :
file . write ( content_data )
if file_is_async :
yield from file . drain ( ) |
def load_fasta_file_as_dict_of_seqrecords ( filename ) :
"""Load a FASTA file and return the sequences as a dict of { ID : SeqRecord }
Args :
filename ( str ) : Path to the FASTA file to load
Returns :
dict : Dictionary of IDs to their SeqRecords""" | results = { }
records = load_fasta_file ( filename )
for r in records :
results [ r . id ] = r
return results |
def p_load_code ( p ) :
"""statement : load _ or _ verify expr ID
| load _ or _ verify expr CODE
| load _ or _ verify expr CODE expr
| load _ or _ verify expr CODE expr COMMA expr""" | if p [ 2 ] . type_ != TYPE . string :
api . errmsg . syntax_error_expected_string ( p . lineno ( 3 ) , p [ 2 ] . type_ )
if len ( p ) == 4 :
if p [ 3 ] . upper ( ) not in ( 'SCREEN' , 'SCREEN$' , 'CODE' ) :
syntax_error ( p . lineno ( 3 ) , 'Unexpected "%s" ID. Expected "SCREEN$" instead' % p [ 3 ] )
return None
else :
if p [ 3 ] . upper ( ) == 'CODE' : # LOAD " . . . " CODE
start = make_number ( 0 , lineno = p . lineno ( 3 ) )
length = make_number ( 0 , lineno = p . lineno ( 3 ) )
else : # SCREEN $
start = make_number ( 16384 , lineno = p . lineno ( 3 ) )
length = make_number ( 6912 , lineno = p . lineno ( 3 ) )
else :
start = make_typecast ( TYPE . uinteger , p [ 4 ] , p . lineno ( 3 ) )
if len ( p ) == 5 :
length = make_number ( 0 , lineno = p . lineno ( 3 ) )
else :
length = make_typecast ( TYPE . uinteger , p [ 6 ] , p . lineno ( 5 ) )
p [ 0 ] = make_sentence ( p [ 1 ] , p [ 2 ] , start , length ) |
def default ( ) :
"""Retrieves a default Context object , creating it if necessary .
The default Context is a global shared instance used every time the default context is
retrieved .
Attempting to use a Context with no project _ id will raise an exception , so on first use
set _ project _ id must be called .
Returns :
An initialized and shared instance of a Context object .""" | credentials = _utils . get_credentials ( )
if Context . _global_context is None :
project = _project . Projects . get_default_id ( credentials )
Context . _global_context = Context ( project , credentials )
else : # Always update the credentials in case the access token is revoked or expired
Context . _global_context . set_credentials ( credentials )
return Context . _global_context |
def getLayer ( self , name ) :
"""Get the : class : ` BaseLayer ` with * * name * * .
> > > layer = font . getLayer ( " My Layer 2 " )""" | name = normalizers . normalizeLayerName ( name )
if name not in self . layerOrder :
raise ValueError ( "No layer with the name '%s' exists." % name )
layer = self . _getLayer ( name )
self . _setFontInLayer ( layer )
return layer |
def check_has_docstring ( self , api ) :
'''An API class must have a docstring .''' | if not api . __doc__ :
msg = 'The Api class "{}" lacks a docstring.'
return [ msg . format ( api . __name__ ) ] |
def grant_token ( self ) :
"""获取 Access Token 。
: return : 返回的 JSON 数据包""" | return self . get ( url = "https://api.weixin.qq.com/cgi-bin/token" , params = { "grant_type" : "client_credential" , "appid" : self . appid , "secret" : self . appsecret } ) |
def get_content ( self , url ) :
"""Returns the content of a cached resource .
Args :
url : The url of the resource
Returns :
The content of the cached resource or None if not in the cache""" | cache_path = self . _url_to_path ( url )
try :
with open ( cache_path , 'rb' ) as f :
return f . read ( )
except IOError :
return None |
def train ( self ) :
'''The ' train ' subcommand''' | # Initialize the train subcommand ' s argparser
parser = argparse . ArgumentParser ( description = 'Train a dialogue model on a dialogue corpus or a dsrt dataset' )
self . init_train_args ( parser )
# Parse the args we got
args = parser . parse_args ( sys . argv [ 2 : ] )
args . config = ConfigurationLoader ( args . config ) . load ( ) . model_config
print ( CLI_DIVIDER + '\n' )
Trainer ( ** vars ( args ) ) . run ( ) |
def prt_hier_down ( self , goid , prt = sys . stdout ) :
"""Write hierarchy for all GO IDs below GO ID in arg , goid .""" | wrhiercfg = self . _get_wrhiercfg ( )
obj = WrHierPrt ( self . gosubdag . go2obj , self . gosubdag . go2nt , wrhiercfg , prt )
obj . prt_hier_rec ( goid )
return obj . items_list |
def _get_attrs ( self ) :
"""An internal helper for the representation methods""" | attrs = [ ]
attrs . append ( ( "N Blocks" , self . n_blocks , "{}" ) )
bds = self . bounds
attrs . append ( ( "X Bounds" , ( bds [ 0 ] , bds [ 1 ] ) , "{:.3f}, {:.3f}" ) )
attrs . append ( ( "Y Bounds" , ( bds [ 2 ] , bds [ 3 ] ) , "{:.3f}, {:.3f}" ) )
attrs . append ( ( "Z Bounds" , ( bds [ 4 ] , bds [ 5 ] ) , "{:.3f}, {:.3f}" ) )
return attrs |
def gadf ( y , method = "Quantiles" , maxk = 15 , pct = 0.8 ) :
"""Evaluate the Goodness of Absolute Deviation Fit of a Classifier
Finds the minimum value of k for which gadf > pct
Parameters
y : array
( n , 1 ) values to be classified
method : { ' Quantiles , ' Fisher _ Jenks ' , ' Maximum _ Breaks ' , ' Natrual _ Breaks ' }
maxk : int
maximum value of k to evaluate
pct : float
The percentage of GADF to exceed
Returns
k : int
number of classes
cl : object
instance of the classifier at k
gadf : float
goodness of absolute deviation fit
Examples
> > > import mapclassify as mc
> > > cal = mc . load _ example ( )
> > > qgadf = mc . classifiers . gadf ( cal )
> > > qgadf [ 0]
15
> > > qgadf [ - 1]
0.3740257590909283
Quantiles fail to exceed 0.80 before 15 classes . If we lower the bar to
0.2 we see quintiles as a result
> > > qgadf2 = mc . classifiers . gadf ( cal , pct = 0.2)
> > > qgadf2[0]
> > > qgadf2 [ - 1]
0.21710231966462412
Notes
The GADF is defined as :
. . math : :
GADF = 1 - \ sum _ c \ sum _ { i \ in c }
| y _ i - y _ { c , med } | / \ sum _ i | y _ i - y _ { med } |
where : math : ` y _ { med } ` is the global median and : math : ` y _ { c , med } ` is
the median for class : math : ` c ` .
See Also
K _ classifiers""" | y = np . array ( y )
adam = ( np . abs ( y - np . median ( y ) ) ) . sum ( )
for k in range ( 2 , maxk + 1 ) :
cl = kmethods [ method ] ( y , k )
gadf = 1 - cl . adcm / adam
if gadf > pct :
break
return ( k , cl , gadf ) |
def get_basis_family ( basis_name , data_dir = None ) :
'''Lookup a family by a basis set name''' | data_dir = fix_data_dir ( data_dir )
bs_data = _get_basis_metadata ( basis_name , data_dir )
return bs_data [ 'family' ] |
def _assert_correct_model ( model_to_check , model_reference , obj_name ) :
"""Helper that asserts the model _ to _ check is the model _ reference or one of
its subclasses . If not , raise an ImplementationError , using " obj _ name "
to describe the name of the argument .""" | if not issubclass ( model_to_check , model_reference ) :
raise ConfigurationException ( 'The %s model must be a subclass of %s' % ( obj_name , model_reference . __name__ ) ) |
def read_machine_header ( data ) :
"""Parse binary header .
@ data - bytearray , contains binary header of file opened in ' rb ' mode
@ return - parsed binary header""" | if isinstance ( data , ( bytes , bytearray ) ) :
stream = io . BytesIO ( data )
elif isinstance ( data , io . BufferedReader ) :
stream = data
else :
raise ValueError ( "data should be either bytearray or file 'rb' mode." )
header = dict ( )
header_type = stream . read ( 6 )
if header_type == b"#!\x00\x01@\x00" :
header [ 'type' ] = header_type [ 2 : 6 ]
header [ 'time' ] = struct . unpack ( '>I' , stream . read ( 4 ) ) [ 0 ]
header [ 'meta_type' ] = struct . unpack ( '>I' , stream . read ( 4 ) ) [ 0 ]
header [ 'meta_len' ] = struct . unpack ( '>I' , stream . read ( 4 ) ) [ 0 ]
header [ 'data_type' ] = struct . unpack ( '>I' , stream . read ( 4 ) ) [ 0 ]
header [ 'data_len' ] = struct . unpack ( '>I' , stream . read ( 4 ) ) [ 0 ]
stream . read ( 4 )
elif header_type == b"#~DF02" :
header [ 'type' ] = header_type [ 2 : 6 ]
header [ 'meta_type' ] = stream . read ( 2 )
header [ 'meta_len' ] = struct . unpack ( '>I' , stream . read ( 4 ) ) [ 0 ]
header [ 'data_len' ] = struct . unpack ( '>I' , stream . read ( 4 ) ) [ 0 ]
stream . read ( 4 )
else :
raise NotImplementedError ( "Parser for machine header %s not implemented" % ( header_type . decode ( ) ) )
return header |
def strip_and_uniq ( tab ) :
"""Strip every element of a list and keep a list of ordered unique values
: param tab : list to strip
: type tab : list
: return : stripped list with unique values
: rtype : list""" | _list = [ ]
for elt in tab :
val = elt . strip ( )
if val and val not in _list :
_list . append ( val )
return _list |
def list_assigned_licenses ( entity , entity_display_name , license_keys = None , service_instance = None ) :
'''Lists the licenses assigned to an entity
entity
Dictionary representation of an entity .
See ` ` _ get _ entity ` ` docstrings for format .
entity _ display _ name
Entity name used in logging
license _ keys :
List of license keys to be retrieved . Default is None .
service _ instance
Service instance ( vim . ServiceInstance ) of the vCenter / ESXi host .
Default is None .
. . code - block : : bash
salt ' * ' vsphere . list _ assigned _ licenses
entity = { type : cluster , datacenter : dc , cluster : cl }
entiy _ display _ name = cl''' | log . trace ( 'Listing assigned licenses of entity %s' , entity )
_validate_entity ( entity )
assigned_licenses = salt . utils . vmware . get_assigned_licenses ( service_instance , entity_ref = _get_entity ( service_instance , entity ) , entity_name = entity_display_name )
return [ { 'key' : l . licenseKey , 'name' : l . name , 'description' : l . labels [ 0 ] . value if l . labels else None , # VMware handles unlimited capacity as 0
'capacity' : l . total if l . total > 0 else sys . maxsize } for l in assigned_licenses if ( license_keys is None ) or ( l . licenseKey in license_keys ) ] |
def vectorial_decomp ( self , symbols ) :
'''Compute the vectorial decomposition of the expression according to the given symbols .
symbols is a list that represents the input of the resulting
application . They are considerated as a flatten vector of bits .
Args :
symbols : TODO
Returns :
An : class : ` pytanque . App ` object
Example :
> > > mba = MBA ( 4)
> > > x = mba . var ( ' x ' )
> > > y = mba . var ( ' y ' )
> > > e = x ^ y ^ 6
> > > e . vectorial _ decomp ( [ x , y ] )
App NL = Vec ( [
AffApp matrix = Mat ( [
[1 , 0 , 0 , 0 , 1 , 0 , 0 , 0]
[0 , 1 , 0 , 0 , 0 , 1 , 0 , 0]
[0 , 0 , 1 , 0 , 0 , 0 , 1 , 0]
[0 , 0 , 0 , 1 , 0 , 0 , 0 , 1]
AffApp cst = Vec ( [''' | try :
symbols = [ s . vec for s in symbols ]
N = sum ( map ( lambda s : len ( s ) , symbols ) )
symbols_ = Vector ( N )
i = 0
for v in symbols :
for s in v :
symbols_ [ i ] = s
i += 1
symbols = symbols_
except TypeError :
pass
return self . mba . vectorial_decomp ( symbols , self . vec ) |
def errors ( source , model , wcshelper ) :
"""Convert pixel based errors into sky coord errors
Parameters
source : : class : ` AegeanTools . models . SimpleSource `
The source which was fit .
model : lmfit . Parameters
The model which was fit .
wcshelper : : class : ` AegeanTools . wcs _ helpers . WCSHelper `
WCS information .
Returns
source : : class : ` AegeanTools . models . SimpleSource `
The modified source obejct .""" | # if the source wasn ' t fit then all errors are - 1
if source . flags & ( flags . NOTFIT | flags . FITERR ) :
source . err_peak_flux = source . err_a = source . err_b = source . err_pa = ERR_MASK
source . err_ra = source . err_dec = source . err_int_flux = ERR_MASK
return source
# copy the errors from the model
prefix = "c{0}_" . format ( source . source )
err_amp = model [ prefix + 'amp' ] . stderr
xo , yo = model [ prefix + 'xo' ] . value , model [ prefix + 'yo' ] . value
err_xo = model [ prefix + 'xo' ] . stderr
err_yo = model [ prefix + 'yo' ] . stderr
sx , sy = model [ prefix + 'sx' ] . value , model [ prefix + 'sy' ] . value
err_sx = model [ prefix + 'sx' ] . stderr
err_sy = model [ prefix + 'sy' ] . stderr
theta = model [ prefix + 'theta' ] . value
err_theta = model [ prefix + 'theta' ] . stderr
source . err_peak_flux = err_amp
pix_errs = [ err_xo , err_yo , err_sx , err_sy , err_theta ]
log . debug ( "Pix errs: {0}" . format ( pix_errs ) )
ref = wcshelper . pix2sky ( [ xo , yo ] )
# check to see if the reference position has a valid WCS coordinate
# It is possible for this to fail , even if the ra / dec conversion works elsewhere
if not all ( np . isfinite ( ref ) ) :
source . flags |= flags . WCSERR
source . err_peak_flux = source . err_a = source . err_b = source . err_pa = ERR_MASK
source . err_ra = source . err_dec = source . err_int_flux = ERR_MASK
return source
# position errors
if model [ prefix + 'xo' ] . vary and model [ prefix + 'yo' ] . vary and all ( np . isfinite ( [ err_xo , err_yo ] ) ) :
offset = wcshelper . pix2sky ( [ xo + err_xo , yo + err_yo ] )
source . err_ra = gcd ( ref [ 0 ] , ref [ 1 ] , offset [ 0 ] , ref [ 1 ] )
source . err_dec = gcd ( ref [ 0 ] , ref [ 1 ] , ref [ 0 ] , offset [ 1 ] )
else :
source . err_ra = source . err_dec = - 1
if model [ prefix + 'theta' ] . vary and np . isfinite ( err_theta ) : # pa error
off1 = wcshelper . pix2sky ( [ xo + sx * np . cos ( np . radians ( theta ) ) , yo + sy * np . sin ( np . radians ( theta ) ) ] )
off2 = wcshelper . pix2sky ( [ xo + sx * np . cos ( np . radians ( theta + err_theta ) ) , yo + sy * np . sin ( np . radians ( theta + err_theta ) ) ] )
source . err_pa = abs ( bear ( ref [ 0 ] , ref [ 1 ] , off1 [ 0 ] , off1 [ 1 ] ) - bear ( ref [ 0 ] , ref [ 1 ] , off2 [ 0 ] , off2 [ 1 ] ) )
else :
source . err_pa = ERR_MASK
if model [ prefix + 'sx' ] . vary and model [ prefix + 'sy' ] . vary and all ( np . isfinite ( [ err_sx , err_sy ] ) ) : # major axis error
ref = wcshelper . pix2sky ( [ xo + sx * np . cos ( np . radians ( theta ) ) , yo + sy * np . sin ( np . radians ( theta ) ) ] )
offset = wcshelper . pix2sky ( [ xo + ( sx + err_sx ) * np . cos ( np . radians ( theta ) ) , yo + sy * np . sin ( np . radians ( theta ) ) ] )
source . err_a = gcd ( ref [ 0 ] , ref [ 1 ] , offset [ 0 ] , offset [ 1 ] ) * 3600
# minor axis error
ref = wcshelper . pix2sky ( [ xo + sx * np . cos ( np . radians ( theta + 90 ) ) , yo + sy * np . sin ( np . radians ( theta + 90 ) ) ] )
offset = wcshelper . pix2sky ( [ xo + sx * np . cos ( np . radians ( theta + 90 ) ) , yo + ( sy + err_sy ) * np . sin ( np . radians ( theta + 90 ) ) ] )
source . err_b = gcd ( ref [ 0 ] , ref [ 1 ] , offset [ 0 ] , offset [ 1 ] ) * 3600
else :
source . err_a = source . err_b = ERR_MASK
sqerr = 0
sqerr += ( source . err_peak_flux / source . peak_flux ) ** 2 if source . err_peak_flux > 0 else 0
sqerr += ( source . err_a / source . a ) ** 2 if source . err_a > 0 else 0
sqerr += ( source . err_b / source . b ) ** 2 if source . err_b > 0 else 0
if sqerr == 0 :
source . err_int_flux = ERR_MASK
else :
source . err_int_flux = abs ( source . int_flux * np . sqrt ( sqerr ) )
return source |
def loadDHCPOptions ( self , address_family , options ) :
"""Create a high level DHCPOptions object
: param str address _ family : Address family of the options . Can be either dhcpv4 or dhcpv6
: param dict options : Dictionary containing the option set to apply for this address family . Note : only those specified will be applied . Allowed options can be found in : attr : ` ns1 . ipam . DHCPOptions . OPTIONS `""" | import ns1 . ipam
return ns1 . ipam . DHCPOptions ( address_family , options ) |
def info ( package , long_description , classifiers , license ) :
"""Get info about a package or packages .""" | client = requests . Session ( )
for name_or_url in package :
package = get_package ( name_or_url , client )
if not package :
secho ( u'Invalid name or URL: "{name}"' . format ( name = name_or_url ) , fg = 'red' , file = sys . stderr )
continue
# Name and summary
try :
info = package . data [ 'info' ]
except NotFoundError :
secho ( u'No versions found for "{0}". ' u'Skipping. . .' . format ( package . name ) , fg = 'red' , file = sys . stderr )
continue
echo_header ( name_or_url )
if package . summary :
echo ( package . summary )
# Version info
echo ( )
echo ( 'Latest release: {version:12}' . format ( version = info [ 'version' ] ) )
# Long description
if long_description :
echo ( )
echo ( package . description )
# Download info
echo ( )
echo_download_summary ( package )
# Author info
echo ( )
author , author_email = package . author , package . author_email
if author :
echo ( u'Author: {author:12}' . format ( ** locals ( ) ) )
if author_email :
echo ( u'Author email: {author_email:12}' . format ( ** locals ( ) ) )
# Maintainer info
maintainer , maintainer_email = ( package . maintainer , package . maintainer_email )
if maintainer or maintainer_email :
echo ( )
if maintainer :
echo ( u'Maintainer: {maintainer:12}' . format ( ** locals ( ) ) )
if maintainer_email :
echo ( u'Maintainer email: {maintainer_email:12}' . format ( ** locals ( ) ) )
# URLS
echo ( )
echo ( u'PyPI URL: {pypi_url:12}' . format ( pypi_url = package . package_url ) )
if package . home_page :
echo ( u'Home Page: {home_page:12}' . format ( home_page = package . home_page ) )
if package . docs_url :
echo ( u'Documentation: {docs_url:12}' . format ( docs_url = package . docs_url ) )
# Classifiers
if classifiers :
echo ( )
echo ( u'Classifiers: ' )
for each in info . get ( 'classifiers' , [ ] ) :
echo ( '\t' + each )
if license and package . license :
echo ( )
echo ( u'License: ' , nl = False )
# license may be just a name , e . g . ' BSD ' or the full license text
# If a new line is found in the text , print a new line
if package . license . find ( '\n' ) >= 0 or len ( package . license ) > 80 :
echo ( )
echo ( package . license )
echo ( ) |
def closeEvent ( self , event ) :
"""Send last file signal on close event
: param event : The close event
: type event :
: returns : None
: rtype : None
: raises : None""" | lf = self . browser . get_current_selection ( )
if lf :
self . last_file . emit ( lf )
return super ( GenesisWin , self ) . close ( ) |
def make_db_data_fetcher ( postgresql_conn_info , template_path , reload_templates , query_cfg , io_pool ) :
"""Returns an object which is callable with the zoom and unpadded bounds and
which returns a list of rows .""" | sources = parse_source_data ( query_cfg )
queries_generator = make_queries_generator ( sources , template_path , reload_templates )
return DataFetcher ( postgresql_conn_info , queries_generator , io_pool ) |
def _get_var_array ( self , k , use_raw = False , layer = 'X' ) :
"""Get an array from the layer ( default layer = ' X ' ) along the variables dimension by first looking up
` ` var . keys ` ` and then ` ` obs . index ` ` .""" | in_raw_obs_names = k in self . raw . obs_names if self . raw is not None else False
if use_raw and self . raw is None :
raise ValueError ( '.raw doesn\'t exist' )
if k in self . var . keys ( ) :
x = self . _var [ k ]
elif in_raw_obs_names and use_raw and layer == 'X' :
x = self . raw [ k ] . X
elif k in self . obs_names and not use_raw and ( layer == 'X' or layer in self . layers . keys ( ) ) :
x = self [ k ] . X if layer == 'X' else self [ k ] . layers [ layer ]
elif use_raw and layer != 'X' :
raise ValueError ( 'No layers in .raw' )
elif layer != 'X' and layer not in self . layers . keys ( ) :
raise ValueError ( 'Did not find {} in layers.keys.' . format ( layer ) )
else :
raise ValueError ( 'Did not find {} in var.keys or obs_names.' . format ( k ) )
return x |
def update ( self , friendly_name = values . unset , chat_service_sid = values . unset , channel_type = values . unset , contact_identity = values . unset , enabled = values . unset , integration_type = values . unset , integration_flow_sid = values . unset , integration_url = values . unset , integration_workspace_sid = values . unset , integration_workflow_sid = values . unset , integration_channel = values . unset , integration_timeout = values . unset , integration_priority = values . unset , integration_creation_on_message = values . unset , long_lived = values . unset ) :
"""Update the FlexFlowInstance
: param unicode friendly _ name : Human readable description of this FlexFlow
: param unicode chat _ service _ sid : Service Sid .
: param FlexFlowInstance . ChannelType channel _ type : Channel type
: param unicode contact _ identity : Channel contact Identity
: param bool enabled : Boolean flag for enabling or disabling the FlexFlow
: param FlexFlowInstance . IntegrationType integration _ type : Integration type
: param unicode integration _ flow _ sid : Flow Sid .
: param unicode integration _ url : External Webhook Url
: param unicode integration _ workspace _ sid : Workspace Sid for a new task
: param unicode integration _ workflow _ sid : Workflow Sid for a new task
: param unicode integration _ channel : Task Channel for a new task
: param unicode integration _ timeout : Task timeout in seconds for a new task
: param unicode integration _ priority : Task priority for a new task
: param bool integration _ creation _ on _ message : Flag for task creation
: param bool long _ lived : Long Lived flag for new Channel
: returns : Updated FlexFlowInstance
: rtype : twilio . rest . flex _ api . v1 . flex _ flow . FlexFlowInstance""" | return self . _proxy . update ( friendly_name = friendly_name , chat_service_sid = chat_service_sid , channel_type = channel_type , contact_identity = contact_identity , enabled = enabled , integration_type = integration_type , integration_flow_sid = integration_flow_sid , integration_url = integration_url , integration_workspace_sid = integration_workspace_sid , integration_workflow_sid = integration_workflow_sid , integration_channel = integration_channel , integration_timeout = integration_timeout , integration_priority = integration_priority , integration_creation_on_message = integration_creation_on_message , long_lived = long_lived , ) |
def change_column ( self , table , column_name , field ) :
"""Change column .""" | operations = [ self . alter_change_column ( table , column_name , field ) ]
if not field . null :
operations . extend ( [ self . add_not_null ( table , column_name ) ] )
return operations |
def update ( self , app_id , data ) :
"""Update app identified by app _ id with data
: params :
* app _ id ( int ) id in the marketplace received with : method : ` create `
* data ( dict ) some keys are required :
* * name * : the title of the app . Maximum length 127
characters .
* * summary * : the summary of the app . Maximum length
255 characters .
* * categories * : a list of the categories , at least
two of the category ids provided from the category api
( see below ) .
* * support _ email * : the email address for support .
* * device _ types * : a list of the device types at least
one of : ' desktop ' , ' phone ' , ' tablet ' .
* * payment _ type * : only choice at this time is ' free ' .
: returns : HttResponse :
* status _ code ( int ) 202 if successful
* content ( dict ) or empty if successful""" | assert ( 'name' in data and data [ 'name' ] and 'summary' in data and 'categories' in data and data [ 'categories' ] and 'support_email' in data and data [ 'support_email' ] and 'device_types' in data and data [ 'device_types' ] and 'payment_type' in data and data [ 'payment_type' ] and 'privacy_policy' in data and data [ 'privacy_policy' ] )
return self . conn . fetch ( 'PUT' , self . url ( 'app' ) % app_id , data ) |
def resolve_variable ( var_name , var_def , provided_variable , blueprint_name ) :
"""Resolve a provided variable value against the variable definition .
Args :
var _ name ( str ) : The name of the defined variable on a blueprint .
var _ def ( dict ) : A dictionary representing the defined variables
attributes .
provided _ variable ( : class : ` stacker . variables . Variable ` ) : The variable
value provided to the blueprint .
blueprint _ name ( str ) : The name of the blueprint that the variable is
being applied to .
Returns :
object : The resolved variable value , could be any python object .
Raises :
MissingVariable : Raised when a variable with no default is not
provided a value .
UnresolvedVariable : Raised when the provided variable is not already
resolved .
ValueError : Raised when the value is not the right type and cannot be
cast as the correct type . Raised by
: func : ` stacker . blueprints . base . validate _ variable _ type `
ValidatorError : Raised when a validator raises an exception . Wraps the
original exception .""" | try :
var_type = var_def [ "type" ]
except KeyError :
raise VariableTypeRequired ( blueprint_name , var_name )
if provided_variable :
if not provided_variable . resolved :
raise UnresolvedVariable ( blueprint_name , provided_variable )
value = provided_variable . value
else : # Variable value not provided , try using the default , if it exists
# in the definition
try :
value = var_def [ "default" ]
except KeyError :
raise MissingVariable ( blueprint_name , var_name )
# If no validator , return the value as is , otherwise apply validator
validator = var_def . get ( "validator" , lambda v : v )
try :
value = validator ( value )
except Exception as exc :
raise ValidatorError ( var_name , validator . __name__ , value , exc )
# Ensure that the resulting value is the correct type
value = validate_variable_type ( var_name , var_type , value )
allowed_values = var_def . get ( "allowed_values" )
if not validate_allowed_values ( allowed_values , value ) :
message = ( "Invalid value passed to '%s' in blueprint: %s. Got: '%s', " "expected one of %s" ) % ( var_name , blueprint_name , value , allowed_values )
raise ValueError ( message )
return value |
def append ( self , obj ) :
"""Append an object to end . If the object is a string , appends a
: class : ` Word < Word > ` object .""" | if isinstance ( obj , basestring ) :
return self . _collection . append ( Word ( obj ) )
else :
return self . _collection . append ( obj ) |
def write_to_path ( self , path , suffix = '' , format = 'png' , overwrite = False ) :
"""Output the data the dataframe ' s ' image ' column to a directory structured by project - > sample and named by frame
Args :
path ( str ) : Where to write the directory of images
suffix ( str ) : for labeling the imaages you write
format ( str ) : default ' png ' format to write the file
overwrite ( bool ) : default False . if true can overwrite files in the path
Modifies :
Creates path folder if necessary and writes images to path""" | if os . path . exists ( path ) and overwrite is False :
raise ValueError ( "Error: use ovewrite=True to overwrite images" )
if not os . path . exists ( path ) :
os . makedirs ( path )
for i , r in self . iterrows ( ) :
spath = os . path . join ( path , r [ 'project_name' ] , r [ 'sample_name' ] )
if not os . path . exists ( spath ) :
os . makedirs ( spath )
if suffix == '' :
fname = os . path . join ( spath , r [ 'frame_name' ] + '.' + format )
else :
fname = os . path . join ( spath , r [ 'frame_name' ] + '_' + suffix + '.' + format )
imageio . imwrite ( fname , r [ 'image' ] , format = format ) |
def param ( name , value_info , is_required = True , label = None , desc = None ) :
"""Annotate a parameter of the action being defined .
@ param name : name of the parameter defined .
@ type name : unicode or str
@ param value _ info : the parameter value information .
@ type value _ info : value . IValueInfo
@ param is _ required : if the parameter is required or optional .
@ type is _ required : bool
@ param label : the parameter label or None .
@ type label : str or unicode or None
@ param desc : the parameter description or None .
@ type desc : str or unicode or None""" | _annotate ( "param" , name , value_info , is_required = is_required , label = label , desc = desc ) |
def wrapinstance ( ptr , base = None ) :
"""convert a pointer to a Qt class instance ( PySide / PyQt compatible )""" | if ptr is None :
return None
ptr = long ( ptr )
# Ensure type
from wishlib . qt import active , QtCore , QtGui
if active == "PySide" :
import shiboken
if base is None :
qObj = shiboken . wrapInstance ( ptr , QtCore . QObject )
metaObj = qObj . metaObject ( )
cls = metaObj . className ( )
superCls = metaObj . superClass ( ) . className ( )
if hasattr ( QtGui , cls ) :
base = getattr ( QtGui , cls )
elif hasattr ( QtGui , superCls ) :
base = getattr ( QtGui , superCls )
else :
base = QtGui . QWidget
return shiboken . wrapInstance ( ptr , base )
elif active == "PyQt4" :
import sip
return sip . wrapinstance ( ptr , QtGui . QWidget )
return None |
def socket_closed ( self , sock ) :
"""Return True if we know socket has been closed , False otherwise .""" | while True :
try :
if self . _poller :
with self . _lock :
self . _poller . register ( sock , _EVENT_MASK )
try :
rd = self . _poller . poll ( 0 )
finally :
self . _poller . unregister ( sock )
else :
rd , _ , _ = select . select ( [ sock ] , [ ] , [ ] , 0 )
except ( RuntimeError , KeyError ) : # RuntimeError is raised during a concurrent poll . KeyError
# is raised by unregister if the socket is not in the poller .
# These errors should not be possible since we protect the
# poller with a mutex .
raise
except ValueError : # ValueError is raised by register / unregister / select if the
# socket file descriptor is negative or outside the range for
# select ( > 1023 ) .
return True
except ( _SELECT_ERROR , IOError ) as exc :
if _errno_from_exception ( exc ) in ( errno . EINTR , errno . EAGAIN ) :
continue
return True
except Exception : # Any other exceptions should be attributed to a closed
# or invalid socket .
return True
return len ( rd ) > 0 |
def head_title ( request ) :
"""{ % head _ title request % }""" | try :
fragments = request . _feincms_fragments
except :
fragments = { }
if '_head_title' in fragments and fragments . get ( "_head_title" ) :
return fragments . get ( "_head_title" )
else : # append site name
site_name = getattr ( settings , 'LEONARDO_SITE_NAME' , '' )
if site_name != '' :
return getattr ( request . leonardo_page , "page_title" , request . leonardo_page . title ) + ' | ' + site_name
return getattr ( request . leonardo_page , "page_title" , request . leonardo_page . title ) |
def GetAdaptersAddresses ( AF = AF_UNSPEC ) :
"""Return all Windows Adapters addresses from iphlpapi""" | # We get the size first
size = ULONG ( )
flags = GAA_FLAG_INCLUDE_PREFIX
res = _GetAdaptersAddresses ( AF , flags , None , None , byref ( size ) )
if res != 0x6f : # BUFFER OVERFLOW - > populate size
raise RuntimeError ( "Error getting structure length (%d)" % res )
# Now let ' s build our buffer
pointer_type = POINTER ( IP_ADAPTER_ADDRESSES )
buffer = create_string_buffer ( size . value )
AdapterAddresses = ctypes . cast ( buffer , pointer_type )
# And call GetAdaptersAddresses
res = _GetAdaptersAddresses ( AF , flags , None , AdapterAddresses , byref ( size ) )
if res != NO_ERROR :
raise RuntimeError ( "Error retrieving table (%d)" % res )
results = _resolve_list ( AdapterAddresses )
del ( AdapterAddresses )
return results |
def kendall_tau ( query_dic , mark ) :
"""Calculate kendall _ tau metric result of a method
: param query _ dic : dict , key is qid , value is ( item , bleu ) tuple list , which will be ranked by ' item ' as key
: param mark : string , which indicates which method is evaluated , also used as output file name here .
: return : average kendall score""" | total = 0.0
with open ( kendall_tau_path + mark , 'w' ) as writer :
for k in query_dic :
candidate_lst = query_dic [ k ]
ordered_lst = sorted ( candidate_lst , key = lambda a : a [ 0 ] , reverse = True )
rank_lst = [ can [ 1 ] for can in ordered_lst ]
tau_value = calculate_lst_kendall ( rank_lst )
writer . write ( '%s %f\n' % ( k , tau_value ) )
total += tau_value
result_string = '%s\tkendall_tau:\t%f' % ( mark , total / len ( query_dic ) )
print result_string
# eval _ result _ dict [ ' Kendall ' ] . append ( result _ string )
return [ 'Kendall' , result_string ] |
def _init_prior_posterior ( self , rank , R , n_local_subj ) :
"""set prior for this subject
Parameters
rank : integer
The rank of this process
R : list of 2D arrays , element i has shape = [ n _ voxel , n _ dim ]
Each element in the list contains the scanner coordinate matrix
of fMRI data of one subject .
n _ local _ subj : integer
The number of subjects allocated to this process .
Returns
HTFA
Returns the instance itself .""" | if rank == 0 :
idx = np . random . choice ( n_local_subj , 1 )
self . global_prior_ , self . global_centers_cov , self . global_widths_var = self . get_template ( R [ idx [ 0 ] ] )
self . global_centers_cov_scaled = self . global_centers_cov / float ( self . n_subj )
self . global_widths_var_scaled = self . global_widths_var / float ( self . n_subj )
self . gather_posterior = np . zeros ( self . n_subj * self . prior_size )
self . global_posterior_ = np . zeros ( self . prior_size )
else :
self . global_prior_ = np . zeros ( self . prior_bcast_size )
self . global_posterior_ = None
self . gather_posterior = None
return self |
def OpenFileSystem ( cls , path_spec_object , resolver_context = None ) :
"""Opens a file system object defined by path specification .
Args :
path _ spec _ object ( PathSpec ) : path specification .
resolver _ context ( Optional [ Context ] ) : resolver context , where None
represents the built in context which is not multi process safe .
Returns :
FileSystem : file system or None if the path specification could not
be resolved or has no file system object .
Raises :
AccessError : if the access to open the file system was denied .
BackEndError : if the file system cannot be opened .
MountPointError : if the mount point specified in the path specification
does not exist .
PathSpecError : if the path specification is incorrect .
TypeError : if the path specification type is unsupported .""" | if not isinstance ( path_spec_object , path_spec . PathSpec ) :
raise TypeError ( 'Unsupported path specification type.' )
if resolver_context is None :
resolver_context = cls . _resolver_context
if path_spec_object . type_indicator == definitions . TYPE_INDICATOR_MOUNT :
if path_spec_object . HasParent ( ) :
raise errors . PathSpecError ( 'Unsupported mount path specification with parent.' )
mount_point = getattr ( path_spec_object , 'identifier' , None )
if not mount_point :
raise errors . PathSpecError ( 'Unsupported path specification without mount point identifier.' )
path_spec_object = mount_manager . MountPointManager . GetMountPoint ( mount_point )
if not path_spec_object :
raise errors . MountPointError ( 'No such mount point: {0:s}' . format ( mount_point ) )
file_system = resolver_context . GetFileSystem ( path_spec_object )
if not file_system :
resolver_helper = cls . _GetResolverHelper ( path_spec_object . type_indicator )
file_system = resolver_helper . NewFileSystem ( resolver_context )
try :
file_system . Open ( path_spec_object )
except ( IOError , ValueError ) as exception :
raise errors . BackEndError ( 'Unable to open file system with error: {0!s}' . format ( exception ) )
return file_system |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.