signature
stringlengths 29
44.1k
| implementation
stringlengths 0
85.2k
|
|---|---|
def _send_unsigned_int ( self , value ) :
"""Convert a numerical value into an integer , then to a bytes object . Check
bounds for unsigned int ."""
|
# Coerce to int . This will throw a ValueError if the value can ' t
# actually be converted .
if type ( value ) != int :
new_value = int ( value )
if self . give_warnings :
w = "Coercing {} into int ({})" . format ( value , new_value )
warnings . warn ( w , Warning )
value = new_value
# Range check
if value > self . board . unsigned_int_max or value < self . board . unsigned_int_min :
err = "Value {} exceeds the size of the board's unsigned int." . format ( value )
raise OverflowError ( err )
return struct . pack ( self . board . unsigned_int_type , value )
|
def run_algorithm ( start , end , initialize , capital_base , handle_data = None , before_trading_start = None , analyze = None , data_frequency = 'daily' , bundle = 'quantopian-quandl' , bundle_timestamp = None , trading_calendar = None , metrics_set = 'default' , benchmark_returns = None , default_extension = True , extensions = ( ) , strict_extensions = True , environ = os . environ , blotter = 'default' ) :
"""Run a trading algorithm .
Parameters
start : datetime
The start date of the backtest .
end : datetime
The end date of the backtest . .
initialize : callable [ context - > None ]
The initialize function to use for the algorithm . This is called once
at the very begining of the backtest and should be used to set up
any state needed by the algorithm .
capital _ base : float
The starting capital for the backtest .
handle _ data : callable [ ( context , BarData ) - > None ] , optional
The handle _ data function to use for the algorithm . This is called
every minute when ` ` data _ frequency = = ' minute ' ` ` or every day
when ` ` data _ frequency = = ' daily ' ` ` .
before _ trading _ start : callable [ ( context , BarData ) - > None ] , optional
The before _ trading _ start function for the algorithm . This is called
once before each trading day ( after initialize on the first day ) .
analyze : callable [ ( context , pd . DataFrame ) - > None ] , optional
The analyze function to use for the algorithm . This function is called
once at the end of the backtest and is passed the context and the
performance data .
data _ frequency : { ' daily ' , ' minute ' } , optional
The data frequency to run the algorithm at .
bundle : str , optional
The name of the data bundle to use to load the data to run the backtest
with . This defaults to ' quantopian - quandl ' .
bundle _ timestamp : datetime , optional
The datetime to lookup the bundle data for . This defaults to the
current time .
trading _ calendar : TradingCalendar , optional
The trading calendar to use for your backtest .
metrics _ set : iterable [ Metric ] or str , optional
The set of metrics to compute in the simulation . If a string is passed ,
resolve the set with : func : ` zipline . finance . metrics . load ` .
default _ extension : bool , optional
Should the default zipline extension be loaded . This is found at
` ` $ ZIPLINE _ ROOT / extension . py ` `
extensions : iterable [ str ] , optional
The names of any other extensions to load . Each element may either be
a dotted module path like ` ` a . b . c ` ` or a path to a python file ending
in ` ` . py ` ` like ` ` a / b / c . py ` ` .
strict _ extensions : bool , optional
Should the run fail if any extensions fail to load . If this is false ,
a warning will be raised instead .
environ : mapping [ str - > str ] , optional
The os environment to use . Many extensions use this to get parameters .
This defaults to ` ` os . environ ` ` .
blotter : str or zipline . finance . blotter . Blotter , optional
Blotter to use with this algorithm . If passed as a string , we look for
a blotter construction function registered with
` ` zipline . extensions . register ` ` and call it with no parameters .
Default is a : class : ` zipline . finance . blotter . SimulationBlotter ` that
never cancels orders .
Returns
perf : pd . DataFrame
The daily performance of the algorithm .
See Also
zipline . data . bundles . bundles : The available data bundles ."""
|
load_extensions ( default_extension , extensions , strict_extensions , environ )
return _run ( handle_data = handle_data , initialize = initialize , before_trading_start = before_trading_start , analyze = analyze , algofile = None , algotext = None , defines = ( ) , data_frequency = data_frequency , capital_base = capital_base , bundle = bundle , bundle_timestamp = bundle_timestamp , start = start , end = end , output = os . devnull , trading_calendar = trading_calendar , print_algo = False , metrics_set = metrics_set , local_namespace = False , environ = environ , blotter = blotter , benchmark_returns = benchmark_returns , )
|
def parse_known_args ( self , args = None , namespace = None ) :
"""this method hijacks the normal argparse Namespace generation ,
shimming configman into the process . The return value will be a
configman DotDict rather than an argparse Namespace ."""
|
# load the config _ manager within the scope of the method that uses it
# so that we avoid circular references in the outer scope
from configman . config_manager import ConfigurationManager
configuration_manager = ConfigurationManager ( definition_source = [ self . get_required_config ( ) ] , values_source_list = self . value_source_list , argv_source = args , app_name = self . prog , app_version = self . version , app_description = self . description , use_auto_help = False , )
conf = configuration_manager . get_config ( mapping_class = create_key_translating_dot_dict ( "HyphenUnderscoreDict" , ( ( '-' , '_' ) , ) ) )
return conf
|
def webui_url ( args ) :
'''show the url of web ui'''
|
nni_config = Config ( get_config_filename ( args ) )
print_normal ( '{0} {1}' . format ( 'Web UI url:' , ' ' . join ( nni_config . get_config ( 'webuiUrl' ) ) ) )
|
def content_negotiation ( ids = None , format = "bibtex" , style = 'apa' , locale = "en-US" , url = None , ** kwargs ) :
'''Get citations in various formats from CrossRef
: param ids : [ str ] Search by a single DOI or many DOIs , each a string . If many
passed in , do so in a list
: param format : [ str ] Name of the format . One of " rdf - xml " , " turtle " , " citeproc - json " ,
" citeproc - json - ish " , " text " , " ris " , " bibtex " ( Default ) , " crossref - xml " ,
" datacite - xml " , " bibentry " , or " crossref - tdm "
: param style : [ str ] A CSL style ( for text format only ) . See : func : ` ~ habanero . cn . csl _ styles `
for options . Default : " apa " . If there ' s a style that CrossRef doesn ' t support
you ' ll get a ` ( 500 ) Internal Server Error `
: param locale : [ str ] Language locale . See ` locale . locale _ alias `
: param url : [ str ] Base URL for the content negotiation request . Default : ` https : / / doi . org `
: param kwargs : any additional arguments will be passed on to ` requests . get `
: return : string , which can be parsed to various formats depending on what
format you request ( e . g . , JSON vs . XML vs . bibtex )
Usage : :
from habanero import cn
cn . content _ negotiation ( ids = ' 10.1126 / science . 169.3946.635 ' )
# get citeproc - json
cn . content _ negotiation ( ids = ' 10.1126 / science . 169.3946.635 ' , format = " citeproc - json " )
# some other formats
cn . content _ negotiation ( ids = " 10.1126 / science . 169.3946.635 " , format = " rdf - xml " )
cn . content _ negotiation ( ids = " 10.1126 / science . 169.3946.635 " , format = " crossref - xml " )
cn . content _ negotiation ( ids = " 10.1126 / science . 169.3946.635 " , format = " text " )
# return an R bibentry type
cn . content _ negotiation ( ids = " 10.1126 / science . 169.3946.635 " , format = " bibentry " )
cn . content _ negotiation ( ids = " 10.6084 / m9 . figshare . 97218 " , format = " bibentry " )
# return an apa style citation
cn . content _ negotiation ( ids = " 10.1126 / science . 169.3946.635 " , format = " text " , style = " apa " )
cn . content _ negotiation ( ids = " 10.1126 / science . 169.3946.635 " , format = " text " , style = " harvard3 " )
cn . content _ negotiation ( ids = " 10.1126 / science . 169.3946.635 " , format = " text " , style = " elsevier - harvard " )
cn . content _ negotiation ( ids = " 10.1126 / science . 169.3946.635 " , format = " text " , style = " ecoscience " )
cn . content _ negotiation ( ids = " 10.1126 / science . 169.3946.635 " , format = " text " , style = " heredity " )
cn . content _ negotiation ( ids = " 10.1126 / science . 169.3946.635 " , format = " text " , style = " oikos " )
# Using DataCite DOIs
# # some formats don ' t work
# cn . content _ negotiation ( ids = " 10.5284/1011335 " , format = " text " )
# cn . content _ negotiation ( ids = " 10.5284/1011335 " , format = " crossref - xml " )
# cn . content _ negotiation ( ids = " 10.5284/1011335 " , format = " crossref - tdm " )
# # But most do work
cn . content _ negotiation ( ids = " 10.5284/1011335 " , format = " datacite - xml " )
cn . content _ negotiation ( ids = " 10.5284/1011335 " , format = " rdf - xml " )
cn . content _ negotiation ( ids = " 10.5284/1011335 " , format = " turtle " )
cn . content _ negotiation ( ids = " 10.5284/1011335 " , format = " citeproc - json " )
cn . content _ negotiation ( ids = " 10.5284/1011335 " , format = " ris " )
cn . content _ negotiation ( ids = " 10.5284/1011335 " , format = " bibtex " )
cn . content _ negotiation ( ids = " 10.5284/1011335 " , format = " bibentry " )
cn . content _ negotiation ( ids = " 10.5284/1011335 " , format = " bibtex " )
# many DOIs
dois = [ ' 10.5167 / UZH - 30455 ' , ' 10.5167 / UZH - 49216 ' , ' 10.5167 / UZH - 503 ' , ' 10.5167 / UZH - 38402 ' , ' 10.5167 / UZH - 41217 ' ]
x = cn . content _ negotiation ( ids = dois )
# Use a different base url
url = " http : / / dx . doi . org "
cn . content _ negotiation ( ids = " 10.1126 / science . 169.3946.635 " , url = url )
cn . content _ negotiation ( ids = " 10.5284/1011335 " , url = url )'''
|
if url is None :
url = cn_base_url
return CNRequest ( url , ids , format , style , locale , ** kwargs )
|
def _transientSchedule ( self , when , now ) :
"""If the service is currently running , schedule a tick to happen no
later than C { when } .
@ param when : The time at which to tick .
@ type when : L { epsilon . extime . Time }
@ param now : The current time .
@ type now : L { epsilon . extime . Time }"""
|
if not self . running :
return
if self . timer is not None :
if self . timer . getTime ( ) < when . asPOSIXTimestamp ( ) :
return
self . timer . cancel ( )
delay = when . asPOSIXTimestamp ( ) - now . asPOSIXTimestamp ( )
# reactor . callLater allows only positive delay values . The scheduler
# may want to have scheduled things in the past and that ' s OK , since we
# are dealing with Time ( ) instances it ' s impossible to predict what
# they are relative to the current time from user code anyway .
delay = max ( _EPSILON , delay )
self . timer = self . callLater ( delay , self . tick )
self . nextEventAt = when
|
def div ( txt , * args , ** kwargs ) :
"""Create & return an HTML < div > element by wrapping the passed text buffer .
@ param txt ( basestring ) : the text buffer to use
@ param * args ( list ) : if present , \ c txt is considered a Python format
string , and the arguments are formatted into it
@ param kwargs ( dict ) : the \ c css field can contain the CSS class for the
< div > element"""
|
if args :
txt = txt . format ( * args )
css = kwargs . get ( 'css' , HTML_DIV_CLASS )
return u'<div class="{}">{!s}</div>' . format ( css , txt )
|
def insertTopLevelGroup ( self , groupName , position = None ) :
"""Inserts a top level group tree item .
Used to group all config nodes of ( for instance ) the current inspector ,
Returns the newly created CTI"""
|
groupCti = GroupCti ( groupName )
return self . _invisibleRootItem . insertChild ( groupCti , position = position )
|
def cee_map_priority_table_map_cos3_pgid ( self , ** kwargs ) :
"""Auto Generated Code"""
|
config = ET . Element ( "config" )
cee_map = ET . SubElement ( config , "cee-map" , xmlns = "urn:brocade.com:mgmt:brocade-cee-map" )
name_key = ET . SubElement ( cee_map , "name" )
name_key . text = kwargs . pop ( 'name' )
priority_table = ET . SubElement ( cee_map , "priority-table" )
map_cos3_pgid = ET . SubElement ( priority_table , "map-cos3-pgid" )
map_cos3_pgid . text = kwargs . pop ( 'map_cos3_pgid' )
callback = kwargs . pop ( 'callback' , self . _callback )
return callback ( config )
|
def has ( self , key ) :
"""See if a key is in the cache
Returns CACHE _ DISABLED if the cache is disabled
: param key : key to search for"""
|
if not self . options . enabled :
return CACHE_DISABLED
ret = key in self . _dict . keys ( ) and not self . _dict [ key ] . is_expired ( )
logger . debug ( 'has({}) == {}' . format ( repr ( key ) , ret ) )
return ret
|
def vtquery ( apikey , checksums ) :
"""Performs the query dealing with errors and throttling requests ."""
|
data = { 'apikey' : apikey , 'resource' : isinstance ( checksums , str ) and checksums or ', ' . join ( checksums ) }
while 1 :
response = requests . post ( VT_REPORT_URL , data = data )
response . raise_for_status ( )
if response . status_code == 200 :
return response . json ( )
elif response . status_code == 204 :
logging . debug ( "API key request rate limit reached, throttling." )
time . sleep ( VT_THROTTLE )
else :
raise RuntimeError ( "Response status code %s" % response . status_code )
|
def doDailies ( usr , dailyList ) :
"""Does a list of dailies and yields each result
Takes a list of valid dailies , initiates each one , and then proceeds
to play each one and yield it ' s resulting message . Note that the names
given in the list must be the same as the daily ' s class file .
Parameters
usr ( User ) - User to do the dailies with
dailyList ( list ) - List of all daily names to perform"""
|
# Load all supported dailies
dailies = [ ]
for daily in Daily . __subclasses__ ( ) :
dailies . append ( daily . __name__ )
# Verify the list is accurate
for daily in dailyList :
if not daily in dailies :
raise invalidDaily
for daily in Daily . __subclasses__ ( ) :
if not daily . __name__ in dailyList :
continue
inst = daily ( usr )
try :
inst . play ( )
yield daily . __name__ + ": " + inst . getMessage ( )
except dailyAlreadyDone :
yield daily . __name__ + ": This daily is already done!"
except parseException :
yield daily . __name__ + ": A serious error has occurred. Please refer to the logs."
except marrowNotAvailable :
yield daily . __name__ + ": Not available at this time!"
except tombolaClosed :
yield daily . __name__ + ": Tombola is currently closed!"
|
def _logprob_obs ( self , data , mean_pat , var ) :
"""Log probability of observing each timepoint under each event model
Computes the log probability of each observed timepoint being
generated by the Gaussian distribution for each event pattern
Parameters
data : voxel by time ndarray
fMRI data on which to compute log probabilities
mean _ pat : voxel by event ndarray
Centers of the Gaussians for each event
var : float or 1D array of length equal to the number of events
Variance of the event Gaussians . If scalar , all events are
assumed to have the same variance
Returns
logprob : time by event ndarray
Log probability of each timepoint under each event Gaussian"""
|
n_vox = data . shape [ 0 ]
t = data . shape [ 1 ]
# z - score both data and mean patterns in space , so that Gaussians
# are measuring Pearson correlations and are insensitive to overall
# activity changes
data_z = stats . zscore ( data , axis = 0 , ddof = 1 )
mean_pat_z = stats . zscore ( mean_pat , axis = 0 , ddof = 1 )
logprob = np . empty ( ( t , self . n_events ) )
if type ( var ) is not np . ndarray :
var = var * np . ones ( self . n_events )
for k in range ( self . n_events ) :
logprob [ : , k ] = - 0.5 * n_vox * np . log ( 2 * np . pi * var [ k ] ) - 0.5 * np . sum ( ( data_z . T - mean_pat_z [ : , k ] ) . T ** 2 , axis = 0 ) / var [ k ]
logprob /= n_vox
return logprob
|
def forward ( self , x ) :
"""Transforms from the packed to unpacked representations ( numpy )
: param x : packed numpy array . Must have shape ` self . num _ matrices x triangular _ number
: return : Reconstructed numpy array y of shape self . num _ matrices x N x N"""
|
fwd = np . zeros ( ( self . num_matrices , self . N , self . N ) , settings . float_type )
indices = np . tril_indices ( self . N , 0 )
z = np . zeros ( len ( indices [ 0 ] ) ) . astype ( int )
for i in range ( self . num_matrices ) :
fwd [ ( z + i , ) + indices ] = x [ i , : ]
return fwd . squeeze ( axis = 0 ) if self . squeeze else fwd
|
def _remove_node ( self , node_name ) :
"""Remove the given node from the continuum / ring .
: param node _ name : the node name ."""
|
try :
node_conf = self . _nodes . pop ( node_name )
except Exception :
raise KeyError ( 'node \'{}\' not found, available nodes: {}' . format ( node_name , self . _nodes . keys ( ) ) )
else :
self . _distribution . pop ( node_name )
for w in range ( 0 , node_conf [ 'vnodes' ] * node_conf [ 'weight' ] ) :
del self . _ring [ self . hashi ( '%s-%s' % ( node_name , w ) ) ]
self . _keys = sorted ( self . _ring . keys ( ) )
|
def add_handler ( cls , level , fmt , colorful , ** kwargs ) :
"""Add a configured handler to the global logger ."""
|
global g_logger
if isinstance ( level , str ) :
level = getattr ( logging , level . upper ( ) , logging . DEBUG )
handler = cls ( ** kwargs )
handler . setLevel ( level )
if colorful :
formatter = ColoredFormatter ( fmt , datefmt = '%Y-%m-%d %H:%M:%S' )
else :
formatter = logging . Formatter ( fmt , datefmt = '%Y-%m-%d %H:%M:%S' )
handler . setFormatter ( formatter )
g_logger . addHandler ( handler )
return handler
|
def add_to_item_list_by_name ( self , item_urls , item_list_name ) :
"""Instruct the server to add the given items to the specified
Item List ( which will be created if it does not already exist )
: type item _ urls : List or ItemGroup
: param item _ urls : List of URLs for the items to add ,
or an ItemGroup object
: type item _ list _ name : String
: param item _ list _ name : name of the item list to retrieve
: rtype : String
: returns : the server success message , if successful
: raises : APIError if the request was not successful"""
|
url_name = urlencode ( ( ( 'name' , item_list_name ) , ) )
request_url = '/item_lists?' + url_name
data = json . dumps ( { 'items' : list ( item_urls ) } )
resp = self . api_request ( request_url , method = 'POST' , data = data )
return self . __check_success ( resp )
|
def _is_string ( thing ) :
"""Check that * * thing * * is a string . The definition of the latter depends
upon the Python version .
: param thing : The thing to check if it ' s a string .
: rtype : bool
: returns : ` ` True ` ` if * * thing * * is string ( or unicode in Python2 ) ."""
|
if ( _py3k and isinstance ( thing , str ) ) :
return True
if ( not _py3k and isinstance ( thing , basestring ) ) :
return True
return False
|
def get_tracks ( self ) :
"""Retrieves all the tracks of the album if they haven ' t been retrieved yet
: return : List . Tracks of the current album"""
|
if not self . _track_list :
tracks = itunespy . lookup ( id = self . collection_id , entity = itunespy . entities [ 'song' ] ) [ 1 : ]
for track in tracks :
self . _track_list . append ( track )
return self . _track_list
|
def send_message ( self , body , to , quiet = False , html_body = None ) :
"""Send a message to a single member"""
|
if to . get ( 'MUTED' ) :
to [ 'QUEUED_MESSAGES' ] . append ( body )
else :
if not quiet :
logger . info ( 'message on %s to %s: %s' % ( self . name , to [ 'JID' ] , body ) )
message = xmpp . protocol . Message ( to = to [ 'JID' ] , body = body , typ = 'chat' )
if html_body :
html = xmpp . Node ( 'html' , { 'xmlns' : 'http://jabber.org/protocol/xhtml-im' } )
html . addChild ( node = xmpp . simplexml . XML2Node ( "<body xmlns='http://www.w3.org/1999/xhtml'>" + html_body . encode ( 'utf-8' ) + "</body>" ) )
message . addChild ( node = html )
self . client . send ( message )
|
def init ( cls , * args , ** kwargs ) :
"""Initialize the config like as you would a regular dict ."""
|
instance = cls ( )
instance . _values . update ( dict ( * args , ** kwargs ) )
return instance
|
def parse_expression ( self , expr ) :
"""split expression into prefix and expression
tested with
operator = =
std : : rel _ ops : : operator ! =
std : : atomic : : operator =
std : : array : : operator [ ]
std : : function : : operator ( )
std : : vector : : at
std : : relational operators
std : : vector : : begin
std : : abs ( float )
std : : fabs ( )"""
|
m = re . match ( r'^(.*?(?:::)?(?:operator)?)((?:::[^:]*|[^:]*)?)$' , expr ) ;
prefix = m . group ( 1 )
tail = m . group ( 2 )
return [ prefix , tail ]
|
def repeater ( pipe , how_many = 2 ) :
'''this function repeats each value in the pipeline however many times you need'''
|
r = range ( how_many )
for i in pipe :
for _ in r :
yield i
|
def inertia_tensor ( self ) :
"""the intertia tensor of the molecule"""
|
result = np . zeros ( ( 3 , 3 ) , float )
for i in range ( self . size ) :
r = self . coordinates [ i ] - self . com
# the diagonal term
result . ravel ( ) [ : : 4 ] += self . masses [ i ] * ( r ** 2 ) . sum ( )
# the outer product term
result -= self . masses [ i ] * np . outer ( r , r )
return result
|
def load_sound_font ( self , sf2 ) :
"""Load a sound font .
Return True on success , False on failure .
This function should be called before your audio can be played ,
since the instruments are kept in the sf2 file ."""
|
self . sfid = self . fs . sfload ( sf2 )
return not self . sfid == - 1
|
def ellipsoid_phantom ( space , ellipsoids , min_pt = None , max_pt = None ) :
"""Return a phantom given by ellipsoids .
Parameters
space : ` DiscreteLp `
Space in which the phantom should be created , must be 2 - or
3 - dimensional . If ` ` space . shape ` ` is 1 in an axis , a corresponding
slice of the phantom is created ( instead of squashing the whole
phantom into the slice ) .
ellipsoids : sequence of sequences
If ` ` space ` ` is 2 - dimensional , each row should contain the entries : :
' value ' ,
' axis _ 1 ' , ' axis _ 2 ' ,
' center _ x ' , ' center _ y ' ,
' rotation '
If ` ` space ` ` is 3 - dimensional , each row should contain the entries : :
' value ' ,
' axis _ 1 ' , ' axis _ 2 ' , ' axis _ 3 ' ,
' center _ x ' , ' center _ y ' , ' center _ z ' ,
' rotation _ phi ' , ' rotation _ theta ' , ' rotation _ psi '
The provided ellipsoids need to be specified relative to the
reference rectangle ` ` [ - 1 , - 1 ] x [ 1 , 1 ] ` ` , or analogously in 3d .
The angles are to be given in radians .
min _ pt , max _ pt : array - like , optional
If provided , use these vectors to determine the bounding box of the
phantom instead of ` ` space . min _ pt ` ` and ` ` space . max _ pt ` ` .
It is currently required that ` ` min _ pt > = space . min _ pt ` ` and
` ` max _ pt < = space . max _ pt ` ` , i . e . , shifting or scaling outside the
original space is not allowed .
Providing one of them results in a shift , e . g . , for ` ` min _ pt ` ` : :
new _ min _ pt = min _ pt
new _ max _ pt = space . max _ pt + ( min _ pt - space . min _ pt )
Providing both results in a scaled version of the phantom .
Notes
The phantom is created by adding the values of each ellipse . The
ellipses are defined by a center point
` ` ( center _ x , center _ y , [ center _ z ] ) ` ` , the lengths of its principial
axes ` ` ( axis _ 1 , axis _ 2 , [ axis _ 2 ] ) ` ` , and a rotation angle ` ` rotation ` `
in 2D or Euler angles ` ` ( rotation _ phi , rotation _ theta , rotation _ psi ) ` `
in 3D .
This function is heavily optimized , achieving runtimes about 20 times
faster than " trivial " implementations . It is therefore recommended to use
it in all phantoms where applicable .
The main optimization is that it only considers a subset of all the
points when updating for each ellipse . It does this by first finding
a subset of points that could possibly be inside the ellipse . This
optimization is very good for " spherical " ellipsoids , but not so
much for elongated or rotated ones .
It also does calculations wherever possible on the meshgrid instead of
individual points .
Examples
Create a circle with a smaller circle inside :
> > > space = odl . uniform _ discr ( [ - 1 , - 1 ] , [ 1 , 1 ] , [ 5 , 5 ] )
> > > ellipses = [ [ 1.0 , 1.0 , 1.0 , 0.0 , 0.0 , 0.0 ] ,
. . . [ 1.0 , 0.6 , 0.6 , 0.0 , 0.0 , 0.0 ] ]
> > > print ( ellipsoid _ phantom ( space , ellipses ) )
[ [ 0 . , 0 . , 1 . , 0 . , 0 . ] ,
[ 0 . , 1 . , 2 . , 1 . , 0 . ] ,
[ 1 . , 2 . , 2 . , 2 . , 1 . ] ,
[ 0 . , 1 . , 2 . , 1 . , 0 . ] ,
[ 0 . , 0 . , 1 . , 0 . , 0 . ] ]
See Also
odl . phantom . transmission . shepp _ logan : Classical Shepp - Logan phantom ,
typically used for transmission imaging
odl . phantom . transmission . shepp _ logan _ ellipsoids : Ellipses for the
Shepp - Logan phantom
odl . phantom . geometric . defrise _ ellipses : Ellipses for the
Defrise phantom"""
|
if space . ndim == 2 :
_phantom = _ellipse_phantom_2d
elif space . ndim == 3 :
_phantom = _ellipsoid_phantom_3d
else :
raise ValueError ( 'dimension not 2 or 3, no phantom available' )
if min_pt is None and max_pt is None :
return _phantom ( space , ellipsoids )
else : # Generate a temporary space with given ` min _ pt ` and ` max _ pt `
# ( snapped to the cell grid ) , create the phantom in that space and
# resize to the target size for ` space ` .
# The snapped points are constructed by finding the index of
# ` min / max _ pt ` in the space partition , indexing the partition with
# that index , yielding a single - cell partition , and then taking
# the lower - left / upper - right corner of that cell .
if min_pt is None :
snapped_min_pt = space . min_pt
else :
min_pt_cell = space . partition [ space . partition . index ( min_pt ) ]
snapped_min_pt = min_pt_cell . min_pt
if max_pt is None :
snapped_max_pt = space . max_pt
else :
max_pt_cell = space . partition [ space . partition . index ( max_pt ) ]
snapped_max_pt = max_pt_cell . max_pt
# Avoid snapping to the next cell where max _ pt falls exactly on
# a boundary
for i in range ( space . ndim ) :
if max_pt [ i ] in space . partition . cell_boundary_vecs [ i ] :
snapped_max_pt [ i ] = max_pt [ i ]
tmp_space = uniform_discr_fromdiscr ( space , min_pt = snapped_min_pt , max_pt = snapped_max_pt , cell_sides = space . cell_sides )
tmp_phantom = _phantom ( tmp_space , ellipsoids )
offset = space . partition . index ( tmp_space . min_pt )
return space . element ( resize_array ( tmp_phantom , space . shape , offset ) )
|
def has_ssd ( self ) :
"""Return true if any of the drive is ssd"""
|
for member in self . _drives_list ( ) :
if member . media_type == constants . MEDIA_TYPE_SSD :
return True
return False
|
def apply_signal ( signal_function , volume_signal , ) :
"""Combine the signal volume with its timecourse
Apply the convolution of the HRF and stimulus time course to the
volume .
Parameters
signal _ function : timepoint by timecourse array , float
The timecourse of the signal over time . If there is only one column
then the same timecourse is applied to all non - zero voxels in
volume _ signal . If there is more than one column then each column is
paired with a non - zero voxel in the volume _ signal ( a 3d numpy array
generated in generate _ signal ) .
volume _ signal : multi dimensional array , float
The volume containing the signal to be convolved with the same
dimensions as the output volume . The elements in volume _ signal
indicate how strong each signal in signal _ function are modulated by
in the output volume
Returns
signal : multidimensional array , float
The convolved signal volume with the same 3d as volume signal and
the same 4th dimension as signal _ function"""
|
# How many timecourses are there within the signal _ function
timepoints = signal_function . shape [ 0 ]
timecourses = signal_function . shape [ 1 ]
# Preset volume
signal = np . zeros ( [ volume_signal . shape [ 0 ] , volume_signal . shape [ 1 ] , volume_signal . shape [ 2 ] , timepoints ] )
# Find all the non - zero voxels in the brain
idxs = np . where ( volume_signal != 0 )
if timecourses == 1 : # If there is only one time course supplied then duplicate it for
# every voxel
signal_function = np . matlib . repmat ( signal_function , 1 , len ( idxs [ 0 ] ) )
elif len ( idxs [ 0 ] ) != timecourses :
raise IndexError ( 'The number of non-zero voxels in the volume and ' 'the number of timecourses does not match. Aborting' )
# For each coordinate with a non zero voxel , fill in the timecourse for
# that voxel
for idx_counter in range ( len ( idxs [ 0 ] ) ) :
x = idxs [ 0 ] [ idx_counter ]
y = idxs [ 1 ] [ idx_counter ]
z = idxs [ 2 ] [ idx_counter ]
# Pull out the function for this voxel
signal_function_temp = signal_function [ : , idx_counter ]
# Multiply the voxel value by the function timecourse
signal [ x , y , z , : ] = volume_signal [ x , y , z ] * signal_function_temp
return signal
|
def write ( self , file_path , hoys = None , write_hours = False ) :
"""Write the wea file .
WEA carries irradiance values from epw and is what gendaymtx uses to
generate the sky ."""
|
if not file_path . lower ( ) . endswith ( '.wea' ) :
file_path += '.wea'
# generate hoys in wea file based on timestep
full_wea = False
if not hoys :
hoys = self . hoys
full_wea = True
# write header
lines = [ self . header ]
if full_wea : # there is no user input for hoys , write it for all the hours
for dir_rad , dif_rad , dt in zip ( self . direct_normal_irradiance , self . diffuse_horizontal_irradiance , self . datetimes ) :
line = "%d %d %.3f %d %d\n" % ( dt . month , dt . day , dt . float_hour , dir_rad , dif_rad )
lines . append ( line )
else : # output wea based on user request
for hoy in hoys :
try :
dir_rad , dif_rad = self . get_irradiance_value_for_hoy ( hoy )
except IndexError :
print ( 'Warn: Wea data for {} is not available!' . format ( dt ) )
continue
dt = DateTime . from_hoy ( hoy )
dt = dt . add_minute ( 30 ) if self . timestep == 1 else dt
line = "%d %d %.3f %d %d\n" % ( dt . month , dt . day , dt . float_hour , dir_rad , dif_rad )
lines . append ( line )
file_data = '' . join ( lines )
write_to_file ( file_path , file_data , True )
if write_hours :
hrs_file_path = file_path [ : - 4 ] + '.hrs'
hrs_data = ',' . join ( str ( h ) for h in hoys ) + '\n'
write_to_file ( hrs_file_path , hrs_data , True )
return file_path
|
def get_target_state ( ) :
"""SDP target State .
Returns the target state ; allowed target states and time updated"""
|
sdp_state = SDPState ( )
errval , errdict = _check_status ( sdp_state )
if errval == "error" :
LOG . debug ( errdict [ 'reason' ] )
return dict ( current_target_state = "unknown" , last_updated = "unknown" , reason = errdict [ 'reason' ] )
LOG . debug ( 'Getting target state' )
target_state = sdp_state . target_state
LOG . debug ( 'Target state = %s' , target_state )
return dict ( current_target_state = target_state , allowed_target_states = sdp_state . allowed_target_states [ sdp_state . current_state ] , last_updated = sdp_state . target_timestamp . isoformat ( ) )
|
def update_release_resource ( self , release_update_metadata , project , release_id ) :
"""UpdateReleaseResource .
[ Preview API ] Update few properties of a release .
: param : class : ` < ReleaseUpdateMetadata > < azure . devops . v5_1 . release . models . ReleaseUpdateMetadata > ` release _ update _ metadata : Properties of release to update .
: param str project : Project ID or project name
: param int release _ id : Id of the release to update .
: rtype : : class : ` < Release > < azure . devops . v5_1 . release . models . Release > `"""
|
route_values = { }
if project is not None :
route_values [ 'project' ] = self . _serialize . url ( 'project' , project , 'str' )
if release_id is not None :
route_values [ 'releaseId' ] = self . _serialize . url ( 'release_id' , release_id , 'int' )
content = self . _serialize . body ( release_update_metadata , 'ReleaseUpdateMetadata' )
response = self . _send ( http_method = 'PATCH' , location_id = 'a166fde7-27ad-408e-ba75-703c2cc9d500' , version = '5.1-preview.8' , route_values = route_values , content = content )
return self . _deserialize ( 'Release' , response )
|
def assortativity_attributes ( user ) :
"""Computes the assortativity of the nominal attributes .
This indicator measures the homophily of the current user with his
correspondants , for each attributes . It returns a value between 0
( no assortativity ) and 1 ( all the contacts share the same value ) :
the percentage of contacts sharing the same value ."""
|
matrix = matrix_undirected_unweighted ( user )
neighbors = [ k for k in user . network . keys ( ) if k != user . name ]
neighbors_attrbs = { }
for i , u_name in enumerate ( matrix_index ( user ) ) :
correspondent = user . network . get ( u_name , None )
if correspondent is None or u_name == user . name or matrix [ 0 ] [ i ] == 0 :
continue
if correspondent . has_attributes :
neighbors_attrbs [ correspondent . name ] = correspondent . attributes
assortativity = { }
for a in user . attributes :
total = sum ( 1 for n in neighbors if n in neighbors_attrbs and user . attributes [ a ] == neighbors_attrbs [ n ] [ a ] )
den = sum ( 1 for n in neighbors if n in neighbors_attrbs )
assortativity [ a ] = total / den if den != 0 else None
return assortativity
|
def p_ParamList ( p ) :
'''ParamList :
| Param
| ParamList COMMA Param'''
|
if len ( p ) == 1 :
p [ 0 ] = ParamList ( None , None )
elif len ( p ) == 2 :
p [ 0 ] = ParamList ( None , p [ 1 ] )
else :
p [ 0 ] = ParamList ( p [ 1 ] , p [ 3 ] )
|
def replace_pattern ( name , pattern , repl , count = 0 , flags = 8 , bufsize = 1 , append_if_not_found = False , prepend_if_not_found = False , not_found_content = None , search_only = False , show_changes = True , backslash_literal = False , source = 'running' , path = None , test = False , replace = True , debug = False , commit = True ) :
'''. . versionadded : : 2019.2.0
Replace occurrences of a pattern in the configuration source . If
` ` show _ changes ` ` is ` ` True ` ` , then a diff of what changed will be returned ,
otherwise a ` ` True ` ` will be returned when changes are made , and ` ` False ` `
when no changes are made .
This is a pure Python implementation that wraps Python ' s : py : func : ` ~ re . sub ` .
pattern
A regular expression , to be matched using Python ' s
: py : func : ` ~ re . search ` .
repl
The replacement text .
count : ` ` 0 ` `
Maximum number of pattern occurrences to be replaced . If count is a
positive integer ` ` n ` ` , only ` ` n ` ` occurrences will be replaced ,
otherwise all occurrences will be replaced .
flags ( list or int ) : ` ` 8 ` `
A list of flags defined in the ` ` re ` ` module documentation from the
Python standard library . Each list item should be a string that will
correlate to the human - friendly flag name . E . g . , ` ` [ ' IGNORECASE ' ,
' MULTILINE ' ] ` ` . Optionally , ` ` flags ` ` may be an int , with a value
corresponding to the XOR ( ` ` | ` ` ) of all the desired flags . Defaults to
8 ( which supports ' MULTILINE ' ) .
bufsize ( int or str ) : ` ` 1 ` `
How much of the configuration to buffer into memory at once . The
default value ` ` 1 ` ` processes one line at a time . The special value
` ` file ` ` may be specified which will read the entire file into memory
before processing .
append _ if _ not _ found : ` ` False ` `
If set to ` ` True ` ` , and pattern is not found , then the content will be
appended to the file .
prepend _ if _ not _ found : ` ` False ` `
If set to ` ` True ` ` and pattern is not found , then the content will be
prepended to the file .
not _ found _ content
Content to use for append / prepend if not found . If None ( default ) , uses
` ` repl ` ` . Useful when ` ` repl ` ` uses references to group in pattern .
search _ only : ` ` False ` `
If set to true , this no changes will be performed on the file , and this
function will simply return ` ` True ` ` if the pattern was matched , and
` ` False ` ` if not .
show _ changes : ` ` True ` `
If ` ` True ` ` , return a diff of changes made . Otherwise , return ` ` True ` `
if changes were made , and ` ` False ` ` if not .
backslash _ literal : ` ` False ` `
Interpret backslashes as literal backslashes for the repl and not
escape characters . This will help when using append / prepend so that
the backslashes are not interpreted for the repl on the second run of
the state .
source : ` ` running ` `
The configuration source . Choose from : ` ` running ` ` , ` ` candidate ` ` , or
` ` startup ` ` . Default : ` ` running ` ` .
path
Save the temporary configuration to a specific path , then read from
there .
test : ` ` False ` `
Dry run ? If set as ` ` True ` ` , will apply the config , discard and return
the changes . Default : ` ` False ` ` and will commit the changes on the
device .
commit : ` ` True ` `
Commit the configuration changes ? Default : ` ` True ` ` .
debug : ` ` False ` `
Debug mode . Will insert a new key in the output dictionary , as
` ` loaded _ config ` ` containing the raw configuration loaded on the device .
replace : ` ` True ` `
Load and replace the configuration . Default : ` ` True ` ` .
If an equal sign ( ` ` = ` ` ) appears in an argument to a Salt command it is
interpreted as a keyword argument in the format ` ` key = val ` ` . That
processing can be bypassed in order to pass an equal sign through to the
remote shell command by manually specifying the kwarg :
State SLS Example :
. . code - block : : yaml
update _ policy _ name :
netconfig . replace _ pattern :
- pattern : OLD - POLICY - NAME
- repl : new - policy - name
- debug : true'''
|
ret = salt . utils . napalm . default_ret ( name )
# the user can override the flags the equivalent CLI args
# which have higher precedence
test = __salt__ [ 'config.merge' ] ( 'test' , test )
debug = __salt__ [ 'config.merge' ] ( 'debug' , debug )
commit = __salt__ [ 'config.merge' ] ( 'commit' , commit )
replace = __salt__ [ 'config.merge' ] ( 'replace' , replace )
# this might be a bit risky
replace_ret = __salt__ [ 'net.replace_pattern' ] ( pattern , repl , count = count , flags = flags , bufsize = bufsize , append_if_not_found = append_if_not_found , prepend_if_not_found = prepend_if_not_found , not_found_content = not_found_content , search_only = search_only , show_changes = show_changes , backslash_literal = backslash_literal , source = source , path = path , test = test , replace = replace , debug = debug , commit = commit )
return salt . utils . napalm . loaded_ret ( ret , replace_ret , test , debug )
|
def from_file ( cls , db_file = ALL_SETS_PATH ) :
"""Reads card data from a JSON - file .
: param db _ file : A file - like object or a path .
: return : A new : class : ` ~ mtgjson . CardDb ` instance ."""
|
if callable ( getattr ( db_file , 'read' , None ) ) :
return cls ( json . load ( db_file ) )
with io . open ( db_file , encoding = 'utf8' ) as inp :
return cls ( json . load ( inp ) )
|
def refresh_metrics ( self ) :
"""Refresh metrics based on the column metadata"""
|
metrics = self . get_metrics ( )
dbmetrics = ( db . session . query ( DruidMetric ) . filter ( DruidMetric . datasource_id == self . datasource_id ) . filter ( DruidMetric . metric_name . in_ ( metrics . keys ( ) ) ) )
dbmetrics = { metric . metric_name : metric for metric in dbmetrics }
for metric in metrics . values ( ) :
dbmetric = dbmetrics . get ( metric . metric_name )
if dbmetric :
for attr in [ 'json' , 'metric_type' ] :
setattr ( dbmetric , attr , getattr ( metric , attr ) )
else :
with db . session . no_autoflush :
metric . datasource_id = self . datasource_id
db . session . add ( metric )
|
def use_comparative_assessment_part_bank_view ( self ) :
"""Pass through to provider AssessmentPartBankSession . use _ comparative _ assessment _ part _ bank _ view"""
|
self . _bank_view = COMPARATIVE
# self . _ get _ provider _ session ( ' assessment _ part _ bank _ session ' ) # To make sure the session is tracked
for session in self . _get_provider_sessions ( ) :
try :
session . use_comparative_bank_view ( )
except AttributeError :
pass
|
def rename ( self , new_name , range = None ) :
"""Request a rename to the server ."""
|
self . log . debug ( 'rename: in' )
if not new_name :
new_name = self . editor . ask_input ( "Rename to:" )
self . editor . write ( noautocmd = True )
b , e = self . editor . word_under_cursor_pos ( )
current_file = self . editor . path ( )
self . editor . raw_message ( current_file )
self . send_refactor_request ( "RefactorReq" , { "typehint" : "RenameRefactorDesc" , "newName" : new_name , "start" : self . get_position ( b [ 0 ] , b [ 1 ] ) , "end" : self . get_position ( e [ 0 ] , e [ 1 ] ) + 1 , "file" : current_file , } , { "interactive" : False } )
|
def pkg_name_to_path ( pkgname ) :
"""todo : Docstring for pkg _ name _ to _ path
: param pkgname : arg description
: type pkgname : type description
: return :
: rtype :"""
|
logger . debug ( "'%s'" , pkgname )
fp = os . path . join ( settings . upkg_destdir , pkgname )
if os . path . isdir ( fp ) :
logger . debug ( "found %s" , fp )
return fp
# Try to find the repo dir if just needs an extension
for d in repo_dirlist ( ) :
logger . debug ( "trying %s" , d )
if d [ 'base' ] . startswith ( pkgname ) : # logger . debug ( " startwith " )
root , ext = os . path . splitext ( d [ 'base' ] )
if pkgname == root :
logger . debug ( "found %s" , d )
return d [ 'path' ]
# end for d in dlist
logger . debug ( "found nothing" )
return ""
|
def clear_caches ( ) : # suppress ( unused - function )
"""Clear all caches ."""
|
for _ , reader in _spellchecker_cache . values ( ) :
reader . close ( )
_spellchecker_cache . clear ( )
_valid_words_cache . clear ( )
_user_dictionary_cache . clear ( )
|
def require_admin ( func ) :
"""Requires an admin user to access this resource ."""
|
@ wraps ( func )
@ require_login
def decorated ( * args , ** kwargs ) :
user = current_user ( )
if user and user . is_admin :
return func ( * args , ** kwargs )
else :
return Response ( 'Forbidden' , 403 )
return decorated
|
def class_param_names ( cls , hidden = True ) :
"""Return the names of all class parameters .
: param hidden : if ` ` False ` ` , excludes parameters with a ` ` _ ` ` prefix .
: type hidden : : class : ` bool `
: return : set of parameter names
: rtype : : class : ` set `"""
|
param_names = set ( k for ( k , v ) in cls . __dict__ . items ( ) if isinstance ( v , Parameter ) )
for parent in cls . __bases__ :
if hasattr ( parent , 'class_param_names' ) :
param_names |= parent . class_param_names ( hidden = hidden )
if not hidden :
param_names = set ( n for n in param_names if not n . startswith ( '_' ) )
return param_names
|
def version ( self , calver : bool = False , pre_release : bool = False ) -> str :
"""Generate version number .
: param calver : Calendar versioning .
: param pre _ release : Pre - release .
: return : Version .
: Example :
0.2.1"""
|
# TODO : Optimize
version = '{}.{}.{}'
major , minor , patch = self . random . randints ( 3 , 0 , 10 )
if calver :
if minor == 0 :
minor += 1
if patch == 0 :
patch += 1
major = self . random . randint ( 2016 , 2018 )
return version . format ( major , minor , patch )
version = '{}.{}.{}' . format ( major , minor , patch )
if pre_release :
suffixes = ( 'alpha' , 'beta' , 'rc' )
suffix = self . random . choice ( suffixes )
number = self . random . randint ( 1 , 11 )
return '{}-{}.{}' . format ( version , suffix , number )
return version
|
def _fetch ( queryset , model_objs , unique_fields , update_fields , returning , sync , ignore_duplicate_updates = True , return_untouched = False ) :
"""Perfom the upsert and do an optional sync operation"""
|
model = queryset . model
if ( return_untouched or sync ) and returning is not True :
returning = set ( returning ) if returning else set ( )
returning . add ( model . _meta . pk . name )
upserted = [ ]
deleted = [ ]
# We must return untouched rows when doing a sync operation
return_untouched = True if sync else return_untouched
if model_objs :
sql , sql_args = _get_upsert_sql ( queryset , model_objs , unique_fields , update_fields , returning , ignore_duplicate_updates = ignore_duplicate_updates , return_untouched = return_untouched )
with connection . cursor ( ) as cursor :
cursor . execute ( sql , sql_args )
if cursor . description :
nt_result = namedtuple ( 'Result' , [ col [ 0 ] for col in cursor . description ] )
upserted = [ nt_result ( * row ) for row in cursor . fetchall ( ) ]
pk_field = model . _meta . pk . name
if sync :
orig_ids = queryset . values_list ( pk_field , flat = True )
deleted = set ( orig_ids ) - { getattr ( r , pk_field ) for r in upserted }
model . objects . filter ( pk__in = deleted ) . delete ( )
nt_deleted_result = namedtuple ( 'DeletedResult' , [ model . _meta . pk . name , 'status_' ] )
return UpsertResult ( upserted + [ nt_deleted_result ( ** { pk_field : d , 'status_' : 'd' } ) for d in deleted ] )
|
def clip ( self , x1 , y1 , x2 , y2 ) :
"""Activate a rectangular clip region , ( X1 , Y1 ) - ( X2 , Y2 ) .
You must call endclip ( ) after you completed drawing .
canvas . clip ( x , y , x2 , y2)
draw something . . .
canvas . endclip ( )"""
|
self . __clip_stack . append ( self . __clip_box )
self . __clip_box = _intersect_box ( self . __clip_box , ( x1 , y1 , x2 , y2 ) )
self . gsave ( )
self . newpath ( )
self . moveto ( xscale ( x1 ) , yscale ( y1 ) )
self . lineto ( xscale ( x1 ) , yscale ( y2 ) )
self . lineto ( xscale ( x2 ) , yscale ( y2 ) )
self . lineto ( xscale ( x2 ) , yscale ( y1 ) )
self . closepath ( )
self . clip_sub ( )
|
def runlist_remove ( name , ** kwargs ) :
"""Remove runlist from the storage ."""
|
ctx = Context ( ** kwargs )
ctx . execute_action ( 'runlist:remove' , ** { 'storage' : ctx . repo . create_secure_service ( 'storage' ) , 'name' : name , } )
|
def ystep ( self ) :
r"""Minimise Augmented Lagrangian with respect to
: math : ` \ mathbf { y } ` ."""
|
AXU = self . AX + self . U
Y0 = ( self . rho * ( self . block_sep0 ( AXU ) - self . S ) ) / ( self . W ** 2 + self . rho )
Y1 = self . Pcn ( self . block_sep1 ( AXU ) )
self . Y = self . block_cat ( Y0 , Y1 )
|
def set ( self , logicalId , resource ) :
"""Adds the resource to dictionary with given logical Id . It will overwrite , if the logicalId is already used .
: param string logicalId : Logical Id to set to
: param SamResource or dict resource : The actual resource data"""
|
resource_dict = resource
if isinstance ( resource , SamResource ) :
resource_dict = resource . to_dict ( )
self . resources [ logicalId ] = resource_dict
|
def addSettingsMenu ( menuName , parentMenuFunction = None ) :
'''Adds a ' open settings . . . ' menu to the plugin menu .
This method should be called from the initGui ( ) method of the plugin
: param menuName : The name of the plugin menu in which the settings menu is to be added
: param parentMenuFunction : a function from QgisInterface to indicate where to put the container plugin menu .
If not passed , it uses addPluginToMenu'''
|
parentMenuFunction = parentMenuFunction or iface . addPluginToMenu
namespace = _callerName ( ) . split ( "." ) [ 0 ]
settingsAction = QAction ( QgsApplication . getThemeIcon ( '/mActionOptions.svg' ) , "Plugin Settings..." , iface . mainWindow ( ) )
settingsAction . setObjectName ( namespace + "settings" )
settingsAction . triggered . connect ( lambda : openSettingsDialog ( namespace ) )
parentMenuFunction ( menuName , settingsAction )
global _settingActions
_settingActions [ menuName ] = settingsAction
|
def get_functions_by_search ( self , function_query , function_search ) :
"""Pass through to provider FunctionSearchSession . get _ functions _ by _ search"""
|
# Implemented from azosid template for -
# osid . resource . ResourceSearchSession . get _ resources _ by _ search _ template
if not self . _can ( 'search' ) :
raise PermissionDenied ( )
return self . _provider_session . get_functions_by_search ( function_query , function_search )
|
def os_details ( ) :
"""Returns a dictionary containing details about the operating system"""
|
# Compute architecture and linkage
bits , linkage = platform . architecture ( )
results = { # Machine details
"platform.arch.bits" : bits , "platform.arch.linkage" : linkage , "platform.machine" : platform . machine ( ) , "platform.process" : platform . processor ( ) , "sys.byteorder" : sys . byteorder , # OS details
"os.name" : os . name , "host.name" : socket . gethostname ( ) , "sys.platform" : sys . platform , "platform.system" : platform . system ( ) , "platform.release" : platform . release ( ) , "platform.version" : platform . version ( ) , "encoding.filesystem" : sys . getfilesystemencoding ( ) , }
# Paths and line separators
for name in "sep" , "altsep" , "pathsep" , "linesep" :
results [ "os.{0}" . format ( name ) ] = getattr ( os , name , None )
try : # Available since Python 3.4
results [ "os.cpu_count" ] = os . cpu_count ( )
except AttributeError :
results [ "os.cpu_count" ] = None
try : # Only for Unix
# pylint : disable = E1101
results [ "sys.dlopenflags" ] = sys . getdlopenflags ( )
except AttributeError :
results [ "sys.dlopenflags" ] = None
return results
|
def deploy_ray_func ( func , partition , kwargs ) : # pragma : no cover
"""Deploy a function to a partition in Ray .
Note : Ray functions are not detected by codecov ( thus pragma : no cover )
Args :
func : The function to apply .
partition : The partition to apply the function to .
kwargs : A dictionary of keyword arguments for the function .
Returns :
The result of the function ."""
|
try :
return func ( partition , ** kwargs )
# Sometimes Arrow forces us to make a copy of an object before we operate
# on it . We don ' t want the error to propagate to the user , and we want to
# avoid copying unless we absolutely have to .
except ValueError :
return func ( partition . copy ( ) , ** kwargs )
|
def _write_mosaic ( self , key , outfile ) :
"""Write out mosaic data ( or any new data generated within Ginga )
to single - extension FITS ."""
|
maxsize = self . settings . get ( 'max_mosaic_size' , 1e8 )
# Default 10k x 10k
channel = self . fv . get_channel ( self . chname )
image = channel . datasrc [ key ]
# Prevent writing very large mosaic
if ( image . width * image . height ) > maxsize :
s = 'Mosaic too large to be written {0}' . format ( image . shape )
self . w . status . set_text ( s )
self . logger . error ( s )
return
# Insert mosaic data and header into output HDU
hdu = fits . PrimaryHDU ( image . get_data ( ) )
self . _write_header ( image , hdu )
# Write history to PRIMARY
self . _write_history ( key , hdu )
# Write to file
if minversion ( astropy , '1.3' ) :
hdu . writeto ( outfile , overwrite = True )
else :
hdu . writeto ( outfile , clobber = True )
|
def normalize_conf ( conf ) :
'''Check , convert and adjust user passed config
Given a user configuration it returns a verified configuration with
all parameters converted to the types that are needed at runtime .'''
|
conf = conf . copy ( )
# check for type error
check_config ( conf )
# convert some fileds into python suitable format
from_json_format ( conf )
if 'dmode' not in conf :
conf [ 'dmode' ] = calc_dir_mode ( conf [ 'fmode' ] )
return conf
|
def fit ( self , X , y ) :
"""Fits the given model to the data and labels provided .
Parameters :
X : matrix , shape ( n _ samples , n _ features )
The samples , the train data .
y : vector , shape ( n _ samples , )
The target labels .
Returns :
self : instance of the model itself ( ` self ` )"""
|
X = np . array ( X , dtype = np . float32 )
y = np . array ( y , dtype = np . float32 )
assert X . shape [ 0 ] == y . shape [ 0 ]
return X , y
|
def list_permissions ( self , group_name = None , resource = None ) :
"""List permission sets associated filtering by group and / or resource .
Args :
group _ name ( string ) : Name of group .
resource ( intern . resource . boss . Resource ) : Identifies which data
model object to operate on .
Returns :
( list ) : List of permissions .
Raises :
requests . HTTPError on failure ."""
|
self . project_service . set_auth ( self . _token_project )
return self . project_service . list_permissions ( group_name , resource )
|
def choicebox ( msg = "Pick something." , title = " " , choices = ( ) ) :
"""Present the user with a list of choices .
return the choice that he selects .
return None if he cancels the selection selection .
@ arg msg : the msg to be displayed .
@ arg title : the window title
@ arg choices : a list or tuple of the choices to be displayed"""
|
if len ( choices ) == 0 :
choices = [ "Program logic error - no choices were specified." ]
global __choiceboxMultipleSelect
__choiceboxMultipleSelect = 0
return __choicebox ( msg , title , choices )
|
async def add_credential ( self , name = None , credential = None , cloud = None , owner = None , force = False ) :
"""Add or update a credential to the controller .
: param str name : Name of new credential . If None , the default
local credential is used . Name must be provided if a credential
is given .
: param CloudCredential credential : Credential to add . If not given ,
it will attempt to read from local data , if available .
: param str cloud : Name of cloud to associate the credential with .
Defaults to the same cloud as the controller .
: param str owner : Username that will own the credential . Defaults to
the current user .
: param bool force : Force indicates whether the update should be forced .
It ' s only supported for facade v3 or later .
Defaults to false .
: returns : Name of credential that was uploaded ."""
|
if not cloud :
cloud = await self . get_cloud ( )
if not owner :
owner = self . connection ( ) . info [ 'user-info' ] [ 'identity' ]
if credential and not name :
raise errors . JujuError ( 'Name must be provided for credential' )
if not credential :
name , credential = self . _connector . jujudata . load_credential ( cloud , name )
if credential is None :
raise errors . JujuError ( 'Unable to find credential: {}' . format ( name ) )
if credential . auth_type == 'jsonfile' and 'file' in credential . attrs : # file creds have to be loaded before being sent to the controller
try : # it might already be JSON
json . loads ( credential . attrs [ 'file' ] )
except json . JSONDecodeError : # not valid JSON , so maybe it ' s a file
cred_path = Path ( credential . attrs [ 'file' ] )
if cred_path . exists ( ) : # make a copy
cred_json = credential . to_json ( )
credential = client . CloudCredential . from_json ( cred_json )
# inline the cred
credential . attrs [ 'file' ] = cred_path . read_text ( )
log . debug ( 'Uploading credential %s' , name )
cloud_facade = client . CloudFacade . from_connection ( self . connection ( ) )
tagged_credentials = [ client . UpdateCloudCredential ( tag = tag . credential ( cloud , tag . untag ( 'user-' , owner ) , name ) , credential = credential , ) ]
if cloud_facade . version >= 3 : # UpdateCredentials was renamed to UpdateCredentialsCheckModels
# in facade version 3.
await cloud_facade . UpdateCredentialsCheckModels ( credentials = tagged_credentials , force = force , )
else :
await cloud_facade . UpdateCredentials ( tagged_credentials )
return name
|
def parametrize_peaks ( self , intervals , max_peakwidth = 50 , min_peakwidth = 25 , symmetric_bounds = True ) :
"""Computes and stores the intonation profile of an audio recording .
: param intervals : these will be the reference set of intervals to which peak positions
correspond to . For each interval , the properties of corresponding peak , if exists ,
will be computed and stored as intonation profile .
: param max _ peakwidth : the maximum allowed width of the peak at the base for computing
parameters of the distribution .
: param min _ peakwidth : the minimum allowed width of the peak at the base for computing
parameters of the distribution ."""
|
assert isinstance ( self . pitch_obj . pitch , np . ndarray )
valid_pitch = self . pitch_obj . pitch
valid_pitch = [ i for i in valid_pitch if i > - 10000 ]
valid_pitch = np . array ( valid_pitch )
parameters = { }
for i in xrange ( len ( self . histogram . peaks [ "peaks" ] [ 0 ] ) ) :
peak_pos = self . histogram . peaks [ "peaks" ] [ 0 ] [ i ]
# Set left and right bounds of the distribution .
max_leftbound = peak_pos - max_peakwidth
max_rightbound = peak_pos + max_peakwidth
leftbound = max_leftbound
rightbound = max_rightbound
nearest_valleyindex = utils . find_nearest_index ( self . histogram . peaks [ "valleys" ] [ 0 ] , peak_pos )
if peak_pos > self . histogram . peaks [ "valleys" ] [ 0 ] [ nearest_valleyindex ] :
leftbound = self . histogram . peaks [ "valleys" ] [ 0 ] [ nearest_valleyindex ]
if len ( self . histogram . peaks [ "valleys" ] [ 0 ] [ nearest_valleyindex + 1 : ] ) == 0 :
rightbound = peak_pos + max_peakwidth
else :
offset = nearest_valleyindex + 1
nearest_valleyindex = utils . find_nearest_index ( self . histogram . peaks [ "valleys" ] [ 0 ] [ offset : ] , peak_pos )
rightbound = self . histogram . peaks [ "valleys" ] [ 0 ] [ offset + nearest_valleyindex ]
else :
rightbound = self . histogram . peaks [ "valleys" ] [ 0 ] [ nearest_valleyindex ]
if len ( self . histogram . peaks [ "valleys" ] [ 0 ] [ : nearest_valleyindex ] ) == 0 :
leftbound = peak_pos - max_peakwidth
else :
nearest_valleyindex = utils . find_nearest_index ( self . histogram . peaks [ "valleys" ] [ 0 ] [ : nearest_valleyindex ] , peak_pos )
leftbound = self . histogram . peaks [ "valleys" ] [ 0 ] [ nearest_valleyindex ]
# In terms of x - axis , leftbound should be at least min _ peakwidth
# less than peak _ pos , and at max max _ peakwidth less than peak _ pos ,
# and viceversa for the rightbound .
if leftbound < max_leftbound :
leftbound = max_leftbound
elif leftbound > peak_pos - min_peakwidth :
leftbound = peak_pos - min_peakwidth
if rightbound > max_rightbound :
rightbound = max_rightbound
elif rightbound < peak_pos + min_peakwidth :
rightbound = peak_pos + min_peakwidth
# If symmetric bounds are asked for , then make the bounds symmetric
if symmetric_bounds :
if peak_pos - leftbound < rightbound - peak_pos :
imbalance = ( rightbound - peak_pos ) - ( peak_pos - leftbound )
rightbound -= imbalance
else :
imbalance = ( peak_pos - leftbound ) - ( rightbound - peak_pos )
leftbound += imbalance
# extract the distribution and estimate the parameters
distribution = valid_pitch [ valid_pitch >= leftbound ]
distribution = distribution [ distribution <= rightbound ]
# print peak _ pos , " \ t " , len ( distribution ) , " \ t " , leftbound , " \ t " , rightbound
interval_index = utils . find_nearest_index ( intervals , peak_pos )
interval = intervals [ interval_index ]
_mean = float ( np . mean ( distribution ) )
_variance = float ( variation ( distribution ) )
_skew = float ( skew ( distribution ) )
_kurtosis = float ( kurtosis ( distribution ) )
pearson_skew = float ( 3.0 * ( _mean - peak_pos ) / np . sqrt ( abs ( _variance ) ) )
parameters [ interval ] = { "position" : float ( peak_pos ) , "mean" : _mean , "amplitude" : float ( self . histogram . peaks [ "peaks" ] [ 1 ] [ i ] ) , "variance" : _variance , "skew1" : _skew , "skew2" : pearson_skew , "kurtosis" : _kurtosis }
self . intonation_profile = parameters
|
def remove_child_log ( self , log_id , child_id ) :
"""Removes a child from a log .
arg : log _ id ( osid . id . Id ) : the ` ` Id ` ` of a log
arg : child _ id ( osid . id . Id ) : the ` ` Id ` ` of the new child
raise : NotFound - ` ` log _ id ` ` not a parent of ` ` child _ id ` `
raise : NullArgument - ` ` log _ id ` ` or ` ` child _ id ` ` is ` ` null ` `
raise : OperationFailed - unable to complete request
raise : PermissionDenied - authorization failure
* compliance : mandatory - - This method must be implemented . *"""
|
# Implemented from template for
# osid . resource . BinHierarchyDesignSession . remove _ child _ bin _ template
if self . _catalog_session is not None :
return self . _catalog_session . remove_child_catalog ( catalog_id = log_id , child_id = child_id )
return self . _hierarchy_session . remove_child ( id_ = log_id , child_id = child_id )
|
def as_data_frame ( self , ** kwargs ) :
"""Return information for all Files tracked in the Layout as a pandas
DataFrame .
Args :
kwargs : Optional keyword arguments passed on to get ( ) . This allows
one to easily select only a subset of files for export .
Returns :
A pandas DataFrame , where each row is a file , and each column is
a tracked entity . NaNs are injected whenever a file has no
value for a given attribute ."""
|
try :
import pandas as pd
except ImportError :
raise ImportError ( "What are you doing trying to export a Layout " "as a pandas DataFrame when you don't have " "pandas installed? Eh? Eh?" )
if kwargs :
files = self . get ( return_type = 'obj' , ** kwargs )
else :
files = self . files . values ( )
data = pd . DataFrame . from_records ( [ f . entities for f in files ] )
data . insert ( 0 , 'path' , [ f . path for f in files ] )
return data
|
def _encode ( self , data : mx . sym . Symbol , data_length : mx . sym . Symbol , seq_len : int ) -> mx . sym . Symbol :
"""Bidirectionally encodes time - major data ."""
|
# ( seq _ len , batch _ size , num _ embed )
data_reverse = mx . sym . SequenceReverse ( data = data , sequence_length = data_length , use_sequence_length = True )
# ( seq _ length , batch , cell _ num _ hidden )
hidden_forward , _ , _ = self . forward_rnn . encode ( data , data_length , seq_len )
# ( seq _ length , batch , cell _ num _ hidden )
hidden_reverse , _ , _ = self . reverse_rnn . encode ( data_reverse , data_length , seq_len )
# ( seq _ length , batch , cell _ num _ hidden )
hidden_reverse = mx . sym . SequenceReverse ( data = hidden_reverse , sequence_length = data_length , use_sequence_length = True )
# ( seq _ length , batch , 2 * cell _ num _ hidden )
hidden_concat = mx . sym . concat ( hidden_forward , hidden_reverse , dim = 2 , name = "%s_rnn" % self . prefix )
return hidden_concat
|
def _restripe ( mountpoint , direction , * devices , ** kwargs ) :
'''Restripe BTRFS : add or remove devices from the particular mounted filesystem .'''
|
fs_log = [ ]
if salt . utils . fsutils . _is_device ( mountpoint ) :
raise CommandExecutionError ( "Mountpount expected, while device \"{0}\" specified" . format ( mountpoint ) )
mounted = False
for device , mntpoints in six . iteritems ( salt . utils . fsutils . _get_mounts ( "btrfs" ) ) :
for mntdata in mntpoints :
if mntdata [ 'mount_point' ] == mountpoint :
mounted = True
break
if not mounted :
raise CommandExecutionError ( "No BTRFS device mounted on \"{0}\" mountpoint" . format ( mountpoint ) )
if not devices :
raise CommandExecutionError ( "No devices specified." )
available_devices = __salt__ [ 'btrfs.devices' ] ( )
for device in devices :
if device not in six . iterkeys ( available_devices ) :
raise CommandExecutionError ( "Device \"{0}\" is not recognized" . format ( device ) )
cmd = [ 'btrfs device {0}' . format ( direction ) ]
for device in devices :
cmd . append ( device )
if direction == 'add' :
if kwargs . get ( "nodiscard" ) :
cmd . append ( "-K" )
if kwargs . get ( "force" ) :
cmd . append ( "-f" )
cmd . append ( mountpoint )
out = __salt__ [ 'cmd.run_all' ] ( ' ' . join ( cmd ) )
salt . utils . fsutils . _verify_run ( out )
if out [ 'stdout' ] :
fs_log . append ( out [ 'stdout' ] )
if direction == 'add' :
out = None
data_conversion = kwargs . get ( "dc" )
meta_conversion = kwargs . get ( "mc" )
if data_conversion and meta_conversion :
out = __salt__ [ 'cmd.run_all' ] ( "btrfs balance start -dconvert={0} -mconvert={1} {2}" . format ( data_conversion , meta_conversion , mountpoint ) )
else :
out = __salt__ [ 'cmd.run_all' ] ( "btrfs filesystem balance {0}" . format ( mountpoint ) )
salt . utils . fsutils . _verify_run ( out )
if out [ 'stdout' ] :
fs_log . append ( out [ 'stdout' ] )
# Summarize the result
ret = { }
if fs_log :
ret . update ( { 'log' : '\n' . join ( fs_log ) } )
ret . update ( __salt__ [ 'btrfs.info' ] ( mountpoint ) )
return ret
|
def log ( ltype , method , page , user_agent ) :
"""Writes to the log a message in the following format : :
" < datetime > : < exception > method < HTTP method > page < path > user agent < user _ agent > " """
|
try :
f = open ( settings . DJANGOSPAM_LOG , "a" )
f . write ( "%s: %s method %s page %s user agent %s\n" % ( datetime . datetime . now ( ) , ltype , method , page , user_agent ) )
f . close ( )
except :
if settings . DJANGOSPAM_FAIL_ON_LOG :
exc_type , exc_value = sys . exc_info ( ) [ : 2 ]
raise LogError ( exc_type , exc_value )
|
def get_card_prices ( ctx , currency ) :
"""Prints out lowest card prices for an application .
Comma - separated list of application IDs is supported ."""
|
appid = ctx . obj [ 'appid' ]
detailed = True
appids = [ appid ]
if ',' in appid :
appids = [ appid . strip ( ) for appid in appid . split ( ',' ) ]
detailed = False
for appid in appids :
print_card_prices ( appid , currency , detailed = detailed )
click . echo ( '' )
|
def create_alignment ( self , x_align = 0 , y_align = 0 , x_scale = 0 , y_scale = 0 ) :
"""Function creates an alignment"""
|
align = Gtk . Alignment ( )
align . set ( x_align , y_align , x_scale , y_scale )
return align
|
def mysql_batch_and_fetch ( mysql_config , * sql_queries ) :
"""Excute a series of SQL statements before the final Select query
Parameters
mysql _ config : dict
The user credentials as defined in MySQLdb . connect , e . g .
mysql _ conig = { ' user ' : ' myname ' , ' passwd ' : ' supersecret ' ,
' host ' : ' < ip adress or domain > ' , ' db ' : ' < myschema > ' }
sql _ queries : list or tuple
A list or tuple of SQL queries wheras the last SQL command
have to be final Select query .
( If a string is provided the semicolon " ; " is used to split
the string into a list of strings )
Returns
result _ table : tuple
The result table as tuple of tuples .
Sources
* http : / / mysqlclient . readthedocs . io / user _ guide . html"""
|
# load modules
import MySQLdb as mydb
import sys
import gc
# ensure that ` sqlqueries ` is a list / tuple
# split a string into a list
if len ( sql_queries ) == 1 :
if isinstance ( sql_queries [ 0 ] , str ) :
sql_queries = sql_queries [ 0 ] . split ( ";" )
if isinstance ( sql_queries [ 0 ] , ( list , tuple ) ) :
sql_queries = sql_queries [ 0 ]
# connect and execute queries
try :
conn = mydb . connect ( ** mysql_config )
curs = conn . cursor ( )
for sql_query in sql_queries :
if len ( sql_query ) > 0 :
curs . execute ( sql_query )
result_table = curs . fetchall ( )
except mydb . Error as err :
print ( err )
gc . collect ( )
sys . exit ( 1 )
else :
if conn :
conn . close ( )
gc . collect ( )
return result_table
|
def disassemble ( bytecode ) :
"""Generator . Disassembles Java bytecode into a sequence of ( offset ,
code , args ) tuples
: type bytecode : bytes"""
|
offset = 0
end = len ( bytecode )
while offset < end :
orig_offset = offset
code = bytecode [ offset ]
if not isinstance ( code , int ) : # Py3
code = ord ( code )
offset += 1
args = tuple ( )
fmt = get_arg_format ( code )
if fmt :
args , offset = fmt ( bytecode , offset )
yield ( orig_offset , code , args )
|
def _is_simple_query ( cls , query ) :
"""Inspect the internals of the Query and say if we think its WHERE clause
can be used in a HANDLER statement"""
|
return ( not query . low_mark and not query . high_mark and not query . select and not query . group_by and not query . distinct and not query . order_by and len ( query . alias_map ) <= 1 )
|
def register_course ( self , course_id ) :
"""履修申請する"""
|
# 何モジュール開講か取得
kdb = twins . kdb . Kdb ( )
first_module = kdb . get_course_info ( course_id ) [ "modules" ] [ : 2 ]
if not first_module . startswith ( "春" ) and not first_module . startswith ( "秋" ) :
raise RequestError ( )
module_code , gakkiKbnCode = { "春A" : ( 1 , "A" ) , "春B" : ( 2 , "A" ) , "春C" : ( 3 , "A" ) , "秋A" : ( 4 , "B" ) , "秋B" : ( 5 , "B" ) , "秋C" : ( 6 , "B" ) } . get ( first_module )
self . req ( "RSW0001000-flow" )
self . get ( { "_eventId" : "search" , "moduleCode" : module_code , "gakkiKbnCode" : gakkiKbnCode } )
self . post ( { "_eventId" : "input" , "yobi" : "1" , "jigen" : "1" } , True )
r = self . post ( { "_eventId" : "insert" , "nendo" : get_nendo ( ) , "jikanwariShozokuCode" : "" , "jikanwariCode" : course_id , "dummy" : "" } , True )
errmsg = pq ( r . text ) ( ".error" ) . text ( )
if errmsg != "" :
raise RequestError ( )
|
def create_sys_dsn ( driver : str , ** kw ) -> bool :
"""( Windows only . )
Create a system ODBC data source name ( DSN ) .
Args :
driver : ODBC driver name
kw : Driver attributes
Returns :
bool : was the DSN created ?"""
|
attributes = [ ]
# type : List [ str ]
for attr in kw . keys ( ) :
attributes . append ( "%s=%s" % ( attr , kw [ attr ] ) )
return bool ( ctypes . windll . ODBCCP32 . SQLConfigDataSource ( 0 , ODBC_ADD_SYS_DSN , driver , nul . join ( attributes ) ) )
|
def inspect_select_calculation ( self ) :
"""Inspect the result of the CifSelectCalculation , verifying that it produced a CifData output node ."""
|
try :
node = self . ctx . cif_select
self . ctx . cif = node . outputs . cif
except exceptions . NotExistent :
self . report ( 'aborting: CifSelectCalculation<{}> did not return the required cif output' . format ( node . uuid ) )
return self . exit_codes . ERROR_CIF_SELECT_FAILED
|
def find_handler ( self , request : httputil . HTTPServerRequest , ** kwargs : Any ) -> Optional [ httputil . HTTPMessageDelegate ] :
"""Must be implemented to return an appropriate instance of ` ~ . httputil . HTTPMessageDelegate `
that can serve the request .
Routing implementations may pass additional kwargs to extend the routing logic .
: arg httputil . HTTPServerRequest request : current HTTP request .
: arg kwargs : additional keyword arguments passed by routing implementation .
: returns : an instance of ` ~ . httputil . HTTPMessageDelegate ` that will be used to
process the request ."""
|
raise NotImplementedError ( )
|
def rule_generator ( * funcs ) :
"""Constructor for creating multivariate quadrature generator .
Args :
funcs ( : py : data : typing . Callable ) :
One dimensional integration rule where each rule returns
` ` abscissas ` ` and ` ` weights ` ` as one dimensional arrays . They must
take one positional argument ` ` order ` ` .
Returns :
( : py : data : typing . Callable ) :
Multidimensional integration quadrature function that takes the
arguments ` ` order ` ` and ` ` sparse ` ` , and a optional ` ` part ` ` . The
argument ` ` sparse ` ` is used to select for if Smolyak sparse grid is
used , and ` ` part ` ` defines if subset of rule should be generated
( for parallelization ) .
Example :
> > > clenshaw _ curtis = lambda order : chaospy . quad _ clenshaw _ curtis (
. . . order , lower = - 1 , upper = 1 , growth = True )
> > > gauss _ legendre = lambda order : chaospy . quad _ gauss _ legendre (
. . . order , lower = 0 , upper = 1)
> > > quad _ func = chaospy . rule _ generator ( clenshaw _ curtis , gauss _ legendre )
> > > abscissas , weights = quad _ func ( 1)
> > > print ( numpy . around ( abscissas , 4 ) )
[ [ - 1 . - 1 . 0 . 0 . 1 . 1 . ]
[ 0.2113 0.7887 0.2113 0.7887 0.2113 0.7887 ] ]
> > > print ( numpy . around ( weights , 4 ) )
[0.1667 0.1667 0.6667 0.6667 0.1667 0.1667]"""
|
dim = len ( funcs )
tensprod_rule = create_tensorprod_function ( funcs )
assert hasattr ( tensprod_rule , "__call__" )
mv_rule = create_mv_rule ( tensprod_rule , dim )
assert hasattr ( mv_rule , "__call__" )
return mv_rule
|
def wait_for_read_result ( self ) :
"""This is a utility function to wait for return data call back
@ return : Returns resultant data from callback"""
|
while not self . callback_data :
self . board . sleep ( .001 )
rval = self . callback_data
self . callback_data = [ ]
return rval
|
def create_binary_security_token ( key_file ) :
"""Create the BinarySecurityToken node containing the x509 certificate ."""
|
node = etree . Element ( ns_id ( 'BinarySecurityToken' , ns . wssens ) , nsmap = { ns . wssens [ 0 ] : ns . wssens [ 1 ] } )
node . set ( ns_id ( 'Id' , ns . wsuns ) , get_unique_id ( ) )
node . set ( 'EncodingType' , ns . wssns [ 1 ] + 'Base64Binary' )
node . set ( 'ValueType' , BINARY_TOKEN_TYPE )
with open ( key_file ) as fh :
cert = crypto . load_certificate ( crypto . FILETYPE_PEM , fh . read ( ) )
node . text = base64 . b64encode ( crypto . dump_certificate ( crypto . FILETYPE_ASN1 , cert ) )
return node
|
def subscribe ( self , * args , ** kwargs ) :
"""Subscribe to a publish port . Example :
` vexbot : ! subscribe tcp : / / 127.0.0.1:3000 `"""
|
for address in args :
try :
self . messaging . subscription_socket . connect ( address )
except Exception :
raise RuntimeError ( 'addresses need to be in the form of: tcp://address_here:port_number' ' example: tcp://10.2.3.4:80' 'address tried {}' . format ( address ) )
|
def process_path_part ( part , parameters ) :
"""Given a part of a path either :
- If it is a parameter :
parse it to a regex group
- Otherwise :
escape any special regex characters"""
|
if PARAMETER_REGEX . match ( part ) :
parameter_name = part . strip ( '{}' )
try :
parameter = find_parameter ( parameters , name = parameter_name , in_ = PATH )
except ValueError :
pass
else :
return construct_parameter_pattern ( parameter )
return escape_regex_special_chars ( part )
|
def is_tradetime_now ( ) :
"""判断目前是不是交易时间 , 同时考虑节假日
: return : bool"""
|
cal = Calendar ( 'china.sse' )
now_time = time . localtime ( )
today = Date ( now_time [ 0 ] , now_time [ 1 ] , now_time [ 2 ] )
if not cal . isBizDay ( today ) :
return False
now = ( now_time . tm_hour , now_time . tm_min , now_time . tm_sec )
if ( 9 , 15 , 0 ) <= now <= ( 11 , 30 , 0 ) or ( 13 , 0 , 0 ) <= now <= ( 15 , 0 , 0 ) :
return True
return False
|
def conll_ner2json ( input_data , ** kwargs ) :
"""Convert files in the CoNLL - 2003 NER format into JSON format for use with
train cli ."""
|
delimit_docs = "-DOCSTART- -X- O O"
output_docs = [ ]
for doc in input_data . strip ( ) . split ( delimit_docs ) :
doc = doc . strip ( )
if not doc :
continue
output_doc = [ ]
for sent in doc . split ( "\n\n" ) :
sent = sent . strip ( )
if not sent :
continue
lines = [ line . strip ( ) for line in sent . split ( "\n" ) if line . strip ( ) ]
words , tags , chunks , iob_ents = zip ( * [ line . split ( ) for line in lines ] )
biluo_ents = iob_to_biluo ( iob_ents )
output_doc . append ( { "tokens" : [ { "orth" : w , "tag" : tag , "ner" : ent } for ( w , tag , ent ) in zip ( words , tags , biluo_ents ) ] } )
output_docs . append ( { "id" : len ( output_docs ) , "paragraphs" : [ { "sentences" : output_doc } ] } )
output_doc = [ ]
return output_docs
|
def load_template ( name = None ) :
"""Loads a template of the specified name .
Templates are placed in the < template _ dir > directory in YAML format with
a . yaml extension .
If no name is specified then the function will return the default
template ( < template _ dir > / default . yaml ) if it exists .
: param name : The name of the template to load .
: type name : str or None ( default )"""
|
if name is None :
name = "default"
logger . info ( "Loading template with name %s" , name )
try :
template_file = open ( "%s/%s.yaml" % ( template_path , name ) )
except IOError :
raise TemplateNotFoundError
template = yaml . safe_load ( template_file )
template_file . close ( )
if "extends" in template :
logger . debug ( "Merging %s with %s" , name , template [ "extends" ] )
template = _merge ( load_template ( template [ "extends" ] ) , template )
return template
|
def get_page ( self , url , * args , ** kwds ) :
"""Define our own get _ page method so that we can easily override the
factory when we need to . This was copied from the following :
* twisted . web . client . getPage
* twisted . web . client . _ makeGetterFactory"""
|
contextFactory = None
scheme , host , port , path = parse ( url )
data = kwds . get ( 'postdata' , None )
self . _method = method = kwds . get ( 'method' , 'GET' )
self . request_headers = self . _headers ( kwds . get ( 'headers' , { } ) )
if ( self . body_producer is None ) and ( data is not None ) :
self . body_producer = FileBodyProducer ( StringIO ( data ) )
if self . endpoint . ssl_hostname_verification :
contextFactory = None
else :
contextFactory = WebClientContextFactory ( )
agent = _get_agent ( scheme , host , self . reactor , contextFactory )
if scheme == "https" :
self . client . url = url
d = agent . request ( method , url , self . request_headers , self . body_producer )
d . addCallback ( self . _handle_response )
return d
|
def get_all_key_pairs ( self , keynames = None , filters = None ) :
"""Get all key pairs associated with your account .
: type keynames : list
: param keynames : A list of the names of keypairs to retrieve .
If not provided , all key pairs will be returned .
: type filters : dict
: param filters : Optional filters that can be used to limit
the results returned . Filters are provided
in the form of a dictionary consisting of
filter names as the key and filter values
as the value . The set of allowable filter
names / values is dependent on the request
being performed . Check the EC2 API guide
for details .
: rtype : list
: return : A list of : class : ` boto . ec2 . keypair . KeyPair `"""
|
params = { }
if keynames :
self . build_list_params ( params , keynames , 'KeyName' )
if filters :
self . build_filter_params ( params , filters )
return self . get_list ( 'DescribeKeyPairs' , params , [ ( 'item' , KeyPair ) ] , verb = 'POST' )
|
def __stopOpenThreadWpan ( self ) :
"""stop OpenThreadWpan
Returns :
True : successfully stop OpenThreadWpan
False : failed to stop OpenThreadWpan"""
|
print 'call stopOpenThreadWpan'
try :
if self . __sendCommand ( WPANCTL_CMD + 'leave' ) [ 0 ] != 'Fail' and self . __sendCommand ( WPANCTL_CMD + 'dataset erase' ) [ 0 ] != 'Fail' :
return True
else :
return False
except Exception , e :
ModuleHelper . WriteIntoDebugLogger ( 'stopOpenThreadWpan() Error: ' + str ( e ) )
|
def update_models ( ctx , f = False ) :
"""Updates local django db projects models using salic database from
MinC"""
|
if f :
manage ( ctx , 'create_models_from_sql --force True' , env = { } )
else :
manage ( ctx , 'create_models_from_sql' , env = { } )
|
def _create_patterns ( self , properties = None ) :
"""Return a list ( of length patterns _ per _ label ) of
PatternGenerator instances . Should use pattern _ type and
pattern _ parameters to create each pattern .
properties is a dictionary , e . g . { ' pattern _ label ' :
pattern _ label } , which can be used to create PatternGenerators
depending on the requested pattern _ label"""
|
return [ self . pattern_type ( ** self . pattern_parameters ) for i in range ( self . patterns_per_label ) ]
|
def imshow ( self , * args , show_crosshair = True , show_mask = True , show_qscale = True , axes = None , invalid_color = 'black' , mask_opacity = 0.8 , show_colorbar = True , ** kwargs ) :
"""Plot the matrix ( imshow )
Keyword arguments [ and their default values ] :
show _ crosshair [ True ] : if a cross - hair marking the beam position is
to be plotted .
show _ mask [ True ] : if the mask is to be plotted .
show _ qscale [ True ] : if the horizontal and vertical axes are to be
scaled into q
axes [ None ] : the axes into which the image should be plotted . If
None , defaults to the currently active axes ( returned by plt . gca ( ) )
invalid _ color [ ' black ' ] : the color for invalid ( NaN or infinite ) pixels
mask _ opacity [ 0.8 ] : the opacity of the overlaid mask ( 1 is fully
opaque , 0 is fully transparent )
show _ colorbar [ True ] : if a colorbar is to be added . Can be a boolean
value ( True or False ) or an instance of matplotlib . axes . Axes , into
which the color bar should be drawn .
All other keywords are forwarded to plt . imshow ( ) or
matplotlib . Axes . imshow ( )
Returns : the image instance returned by imshow ( )"""
|
if 'aspect' not in kwargs :
kwargs [ 'aspect' ] = 'equal'
if 'interpolation' not in kwargs :
kwargs [ 'interpolation' ] = 'nearest'
if 'origin' not in kwargs :
kwargs [ 'origin' ] = 'upper'
if show_qscale :
ymin , xmin = self . pixel_to_q ( 0 , 0 )
ymax , xmax = self . pixel_to_q ( * self . shape )
if kwargs [ 'origin' ] . upper ( ) == 'UPPER' :
kwargs [ 'extent' ] = [ xmin , xmax , - ymax , - ymin ]
else :
kwargs [ 'extent' ] = [ xmin , xmax , ymin , ymax ]
bcx = 0
bcy = 0
else :
bcx = self . header . beamcenterx
bcy = self . header . beamcentery
xmin = 0
xmax = self . shape [ 1 ]
ymin = 0
ymax = self . shape [ 0 ]
if kwargs [ 'origin' ] . upper ( ) == 'UPPER' :
kwargs [ 'extent' ] = [ 0 , self . shape [ 1 ] , self . shape [ 0 ] , 0 ]
else :
kwargs [ 'extent' ] = [ 0 , self . shape [ 1 ] , 0 , self . shape [ 0 ] ]
if axes is None :
axes = plt . gca ( )
ret = axes . imshow ( self . intensity , ** kwargs )
if show_mask : # workaround : because of the colour - scaling we do here , full one and
# full zero masks look the SAME , i . e . all the image is shaded .
# Thus if we have a fully unmasked matrix , skip this section .
# This also conserves memory .
if ( self . mask == 0 ) . sum ( ) : # there are some masked pixels
# we construct another representation of the mask , where the masked pixels are 1.0 , and the
# unmasked ones will be np . nan . They will thus be not rendered .
mf = np . ones ( self . mask . shape , np . float )
mf [ self . mask != 0 ] = np . nan
kwargs [ 'cmap' ] = matplotlib . cm . gray_r
kwargs [ 'alpha' ] = mask_opacity
kwargs [ 'norm' ] = matplotlib . colors . Normalize ( )
axes . imshow ( mf , ** kwargs )
if show_crosshair :
ax = axes . axis ( )
# save zoom state
axes . plot ( [ xmin , xmax ] , [ bcy ] * 2 , 'w-' )
axes . plot ( [ bcx ] * 2 , [ ymin , ymax ] , 'w-' )
axes . axis ( ax )
# restore zoom state
axes . set_facecolor ( invalid_color )
if show_colorbar :
if isinstance ( show_colorbar , matplotlib . axes . Axes ) :
axes . figure . colorbar ( ret , cax = show_colorbar )
else : # try to find a suitable colorbar axes : check if the plot target axes already
# contains some images , then check if their colorbars exist as
# axes .
cax = [ i . colorbar [ 1 ] for i in axes . images if i . colorbar is not None ]
cax = [ c for c in cax if c in c . figure . axes ]
if cax :
cax = cax [ 0 ]
else :
cax = None
axes . figure . colorbar ( ret , cax = cax , ax = axes )
axes . figure . canvas . draw ( )
return ret
|
def queue_instances ( instances ) :
'''Queue a set of instances to be provisioned later . Expects a list .
Currently this only queries node data , and then places it in the cloud
cache ( if configured ) . If the salt - cloud - reactor is being used , these
instances will be automatically provisioned using that .
For more information about the salt - cloud - reactor , see :
https : / / github . com / saltstack - formulas / salt - cloud - reactor'''
|
for instance_id in instances :
node = _get_node ( instance_id = instance_id )
__utils__ [ 'cloud.cache_node' ] ( node , __active_provider_name__ , __opts__ )
|
def update ( self ) :
"""Update processes stats using the input method ."""
|
# Init new stats
stats = self . get_init_value ( )
if self . input_method == 'local' : # Update stats using the standard system lib
# Here , update is call for processcount AND processlist
glances_processes . update ( )
# Return the processes count
stats = glances_processes . getcount ( )
elif self . input_method == 'snmp' : # Update stats using SNMP
# Not avalaible
pass
# Update the stats
self . stats = stats
return self . stats
|
def _smb3kdf ( self , ki , label , context ) :
"""See SMB 3 . x key derivation function
https : / / blogs . msdn . microsoft . com / openspecification / 2017/05/26 / smb - 2 - and - smb - 3 - security - in - windows - 10 - the - anatomy - of - signing - and - cryptographic - keys /
: param ki : The session key is the KDK used as an input to the KDF
: param label : The purpose of this derived key as bytes string
: param context : The context information of this derived key as bytes
string
: return : Key derived by the KDF as specified by [ SP800-108 ] 5.1"""
|
kdf = KBKDFHMAC ( algorithm = hashes . SHA256 ( ) , mode = Mode . CounterMode , length = 16 , rlen = 4 , llen = 4 , location = CounterLocation . BeforeFixed , label = label , context = context , fixed = None , backend = default_backend ( ) )
return kdf . derive ( ki )
|
def stylesheet_url ( path , only_path = False , cache_buster = True ) :
"""Generates a path to an asset found relative to the project ' s css directory .
Passing a true value as the second argument will cause the only the path to
be returned instead of a ` url ( ) ` function"""
|
filepath = String . unquoted ( path ) . value
if callable ( config . STATIC_ROOT ) :
try :
_file , _storage = list ( config . STATIC_ROOT ( filepath ) ) [ 0 ]
except IndexError :
filetime = None
else :
filetime = getmtime ( _file , _storage )
if filetime is None :
filetime = 'NA'
else :
_path = os . path . join ( config . STATIC_ROOT , filepath . strip ( '/' ) )
filetime = getmtime ( _path )
if filetime is None :
filetime = 'NA'
BASE_URL = config . STATIC_URL
url = '%s%s' % ( BASE_URL , filepath )
if cache_buster :
url = add_cache_buster ( url , filetime )
if only_path :
return String . unquoted ( url )
else :
return Url . unquoted ( url )
|
def find_by_filter ( self , filters , all_items ) :
"""Find items by filters
: param filters : list of filters
: type filters : list
: param all _ items : monitoring items
: type : dict
: return : list of items
: rtype : list"""
|
items = [ ]
for i in self :
failed = False
if hasattr ( i , "host" ) :
all_items [ "service" ] = i
else :
all_items [ "host" ] = i
for filt in filters :
if not filt ( all_items ) :
failed = True
break
if failed is False :
items . append ( i )
return items
|
def get_key_from_url ( self , url ) :
"""parses the url into a list of folder locations
We take a URL like :
http : / / rest . ensembl . org / sequence / id / ENST00000538324 ? type = genomic ; expand _ 3prime = 10 ; expand _ 5prime = 10
and turn it into ' sequence . id . ENST00000538324 . genomic '
Args :
url : URL for the Ensembl REST service
Returns :
a parsed unique database key for the URLs data"""
|
key = url . split ( "/" ) [ 3 : ]
# fix the final bit of the url , none of which uniquely define the data
suffix = key . pop ( )
suffix = suffix . split ( ";" ) [ 0 ]
# convert " LONG _ ID ? feature = transcript " to [ ' LONG _ ID ' , " transcript " ] etc
id = suffix . split ( "?" , 1 )
suffix = id . pop ( )
if "=" in suffix :
_ , suffix = suffix . split ( "=" )
key += id + [ suffix ]
# replace characters not tolerated in keys and remove blank entries
key = ( x . replace ( ':' , '_' ) for x in key )
key = ( x for x in key if x != '' )
return "." . join ( key )
|
def merge_webhooks_runset ( runset ) :
"""Make some statistics on the run set ."""
|
min_started_at = min ( [ w [ 'started_at' ] for w in runset ] )
max_ended_at = max ( [ w [ 'ended_at' ] for w in runset ] )
ellapse = max_ended_at - min_started_at
errors_count = sum ( 1 for w in runset if 'error' in w )
total_count = len ( runset )
data = dict ( ellapse = ellapse , errors_count = errors_count , total_count = total_count , )
return data
|
def extract_db_info ( self , obj , keys ) :
"""Extract metadata from serialized file"""
|
objl = self . convert ( obj )
result = super ( LinesCatalog , self ) . extract_db_info ( objl , keys )
result [ 'tags' ] = { }
result [ 'type' ] = 'LinesCatalog'
result [ 'uuid' ] = str ( uuid . uuid1 ( ) )
result [ 'observation_date' ] = datetime . datetime . utcnow ( )
result [ 'quality_control' ] = QC . GOOD
result [ 'origin' ] = { }
return result
|
def report ( mount ) :
'''Report on quotas for a specific volume
CLI Example :
. . code - block : : bash
salt ' * ' quota . report / media / data'''
|
ret = { mount : { } }
ret [ mount ] [ 'User Quotas' ] = _parse_quota ( mount , '-u' )
ret [ mount ] [ 'Group Quotas' ] = _parse_quota ( mount , '-g' )
return ret
|
def find_two_letter_edits ( word_string ) :
'''Finds all possible two letter edits of word _ string :
- Splitting word _ string into two words at all character locations
- Deleting one letter at all character locations
- Switching neighbouring characters
- Replacing a character with every alphabetical letter
- Inserting all possible alphabetical characters between each character location including boundaries
This can be seen as a reapplication of find _ one _ letter _ edits to all words found via a first
instantiation of find _ one _ letter _ edits on word _ string .
Returns all two letter edits as a set instance .'''
|
if word_string is None :
return { }
elif isinstance ( word_string , str ) :
return ( e2 for e1 in find_one_letter_edits ( word_string ) for e2 in find_one_letter_edits ( e1 ) )
else :
raise InputError ( "string or none type variable not passed as argument to find_two_letter_edits" )
|
def main ( self , args = None , prog_name = None , complete_var = None , standalone_mode = True , ** extra ) :
"""This is the way to invoke a script with all the bells and
whistles as a command line application . This will always terminate
the application after a call . If this is not wanted , ` ` SystemExit ` `
needs to be caught .
This method is also available by directly calling the instance of
a : class : ` Command ` .
. . versionadded : : 3.0
Added the ` standalone _ mode ` flag to control the standalone mode .
: param args : the arguments that should be used for parsing . If not
provided , ` ` sys . argv [ 1 : ] ` ` is used .
: param prog _ name : the program name that should be used . By default
the program name is constructed by taking the file
name from ` ` sys . argv [ 0 ] ` ` .
: param complete _ var : the environment variable that controls the
bash completion support . The default is
` ` " _ < prog _ name > _ COMPLETE " ` ` with prog _ name in
uppercase .
: param standalone _ mode : the default behavior is to invoke the script
in standalone mode . Click will then
handle exceptions and convert them into
error messages and the function will never
return but shut down the interpreter . If
this is set to ` False ` they will be
propagated to the caller and the return
value of this function is the return value
of : meth : ` invoke ` .
: param extra : extra keyword arguments are forwarded to the context
constructor . See : class : ` Context ` for more information ."""
|
# If we are in Python 3 , we will verify that the environment is
# sane at this point or reject further execution to avoid a
# broken script .
if not PY2 :
_verify_python3_env ( )
else :
_check_for_unicode_literals ( )
if args is None :
args = get_os_args ( )
else :
args = list ( args )
if prog_name is None :
prog_name = make_str ( os . path . basename ( sys . argv and sys . argv [ 0 ] or __file__ ) )
# Hook for the Bash completion . This only activates if the Bash
# completion is actually enabled , otherwise this is quite a fast
# noop .
_bashcomplete ( self , prog_name , complete_var )
try :
try :
with self . make_context ( prog_name , args , ** extra ) as ctx :
rv = self . invoke ( ctx )
if not standalone_mode :
return rv
# it ' s not safe to ` ctx . exit ( rv ) ` here !
# note that ` rv ` may actually contain data like " 1 " which
# has obvious effects
# more subtle case : ` rv = [ None , None ] ` can come out of
# chained commands which all returned ` None ` - - so it ' s not
# even always obvious that ` rv ` indicates success / failure
# by its truthiness / falsiness
ctx . exit ( )
except ( EOFError , KeyboardInterrupt ) :
echo ( file = sys . stderr )
raise Abort ( )
except ClickException as e :
if not standalone_mode :
raise
e . show ( )
sys . exit ( e . exit_code )
except IOError as e :
if e . errno == errno . EPIPE :
sys . stdout = PacifyFlushWrapper ( sys . stdout )
sys . stderr = PacifyFlushWrapper ( sys . stderr )
sys . exit ( 1 )
else :
raise
except Exit as e :
if standalone_mode :
sys . exit ( e . exit_code )
else : # in non - standalone mode , return the exit code
# note that this is only reached if ` self . invoke ` above raises
# an Exit explicitly - - thus bypassing the check there which
# would return its result
# the results of non - standalone execution may therefore be
# somewhat ambiguous : if there are codepaths which lead to
# ` ctx . exit ( 1 ) ` and to ` return 1 ` , the caller won ' t be able to
# tell the difference between the two
return e . exit_code
except Abort :
if not standalone_mode :
raise
echo ( 'Aborted!' , file = sys . stderr )
sys . exit ( 1 )
|
def lineWidth ( requestContext , seriesList , width ) :
"""Takes one metric or a wildcard seriesList , followed by a float F .
Draw the selected metrics with a line width of F , overriding the default
value of 1 , or the & lineWidth = X . X parameter .
Useful for highlighting a single metric out of many , or having multiple
line widths in one graph .
Example : :
& target = lineWidth ( server01 . instance01 . memory . free , 5)"""
|
for series in seriesList :
series . options [ 'lineWidth' ] = width
return seriesList
|
def decode ( binary ) :
"""Decode ( gunzip ) binary data ."""
|
encoded = io . BytesIO ( binary )
with gzip . GzipFile ( mode = 'rb' , fileobj = encoded ) as file_ :
decoded = file_ . read ( )
return decoded
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.