signature
stringlengths 29
44.1k
| implementation
stringlengths 0
85.2k
|
|---|---|
def group ( self , * args , ** kwargs ) :
"""A group allows a command to have subcommands attached . This is the
most common way to implement nesting in Click .
: param name : the name of the group ( optional )
: param commands : a dictionary of commands ."""
|
return super ( ) . group ( * args , cls = kwargs . pop ( 'cls' , AppGroup ) or AppGroup , ** kwargs )
|
def get_datastream_info ( self , dsinfo ) :
'''Use regular expressions to pull datastream [ version ]
details ( id , mimetype , size , and checksum ) for binary content ,
in order to sanity check the decoded data .
: param dsinfo : text content just before a binaryContent tag
: returns : dict with keys for id , mimetype , size , type and digest ,
or None if no match is found'''
|
# we only need to look at the end of this section of content
dsinfo = dsinfo [ - 750 : ]
# if not enough content is present , include the end of
# the last read chunk , if available
if len ( dsinfo ) < 750 and self . end_of_last_chunk is not None :
dsinfo = self . end_of_last_chunk + dsinfo
# force text needed for python 3 compatibility ( in python 3
# dsinfo is bytes instead of a string )
try :
text = force_text ( dsinfo )
except UnicodeDecodeError as err : # it ' s possible to see a unicode character split across
# read blocks ; if we get an " invalid start byte " unicode
# decode error , try converting the text without the first
# character ; if that ' s the problem , it ' s not needed
# for datastream context
if 'invalid start byte' in force_text ( err ) :
text = force_text ( dsinfo [ 1 : ] )
else :
raise err
# in case the text contains multiple datastream ids , find
# all matches and then use the last , since we want the last one
# in this section , just before the datastream content
matches = list ( self . dsinfo_regex . finditer ( text ) )
if matches :
infomatch = matches [ - 1 ]
return infomatch . groupdict ( )
|
def detachChildren ( self ) :
"""Detach and return this element ' s children .
@ return : The element ' s children ( detached ) .
@ rtype : [ L { Element } , . . . ]"""
|
detached = self . children
self . children = [ ]
for child in detached :
child . parent = None
return detached
|
def _handle_mouse ( self , ev ) :
"""Handle mouse events . Return a list of KeyPress instances ."""
|
FROM_LEFT_1ST_BUTTON_PRESSED = 0x1
result = [ ]
# Check event type .
if ev . ButtonState == FROM_LEFT_1ST_BUTTON_PRESSED : # On a key press , generate both the mouse down and up event .
for event_type in [ MouseEventType . MOUSE_DOWN , MouseEventType . MOUSE_UP ] :
data = ';' . join ( [ event_type , str ( ev . MousePosition . X ) , str ( ev . MousePosition . Y ) ] )
result . append ( KeyPress ( Keys . WindowsMouseEvent , data ) )
return result
|
def indentby ( parser , token ) :
"""Add indentation to text between the tags by the given indentation level .
{ % indentby < indent _ level > [ if < statement > ] % }
{ % endindentby % }
Arguments :
indent _ level - Number of spaces to indent text with .
statement - Only apply indent _ level if the boolean statement evalutates to True ."""
|
args = token . split_contents ( )
largs = len ( args )
if largs not in ( 2 , 4 ) :
raise template . TemplateSyntaxError ( "indentby tag requires 1 or 3 arguments" )
indent_level = args [ 1 ]
if_statement = None
if largs == 4 :
if_statement = args [ 3 ]
nodelist = parser . parse ( ( 'endindentby' , ) )
parser . delete_first_token ( )
return IndentByNode ( nodelist , indent_level , if_statement )
|
def _only_if_file_not_exist ( func_ , * args , ** kwargs ) :
"""horribly non - atomic
: param func _ :
: param args :
: param kwargs :
: return :"""
|
obj_dict = args [ 1 ]
conn = args [ - 1 ]
try :
RBF . get ( obj_dict [ PRIMARY_FIELD ] ) . pluck ( PRIMARY_FIELD ) . run ( conn )
err_str = "Duplicate primary key `Name`: {}" . format ( obj_dict [ PRIMARY_FIELD ] )
err_dict = { 'errors' : 1 , 'first_error' : err_str }
return err_dict
except r . errors . ReqlNonExistenceError :
return func_ ( * args , ** kwargs )
|
def fromBrdict ( cls , master , brdict ) :
"""Construct a new L { BuildRequest } from a dictionary as returned by
L { BuildRequestsConnectorComponent . getBuildRequest } .
This method uses a cache , which may result in return of stale objects ;
for the most up - to - date information , use the database connector
methods .
@ param master : current build master
@ param brdict : build request dictionary
@ returns : L { BuildRequest } , via Deferred"""
|
cache = master . caches . get_cache ( "BuildRequests" , cls . _make_br )
return cache . get ( brdict [ 'buildrequestid' ] , brdict = brdict , master = master )
|
def _glob_escape ( pathname ) :
"""Escape all special characters ."""
|
drive , pathname = os . path . splitdrive ( pathname )
pathname = _magic_check . sub ( r'[\1]' , pathname )
return drive + pathname
|
def _setup_bar ( self ) :
"""Setup the process bar ."""
|
bar = u""
items_cnt = len ( PROGRESS_BAR_ITEMS )
bar_val = float ( self . _time_left ) / self . _section_time * self . num_progress_bars
while bar_val > 0 :
selector = int ( bar_val * items_cnt )
selector = min ( selector , items_cnt - 1 )
bar += PROGRESS_BAR_ITEMS [ selector ]
bar_val -= 1
bar = bar . ljust ( self . num_progress_bars )
return bar
|
def plot_dos ( self , sigma = 0.05 ) :
"""plot dos
Args :
sigma : a smearing
Returns :
a matplotlib object"""
|
plotter = DosPlotter ( sigma = sigma )
plotter . add_dos ( "t" , self . _bz . dos )
return plotter . get_plot ( )
|
def import_tasks ( self , path ) :
"""Attempts to load tasks from a given path .
: param path : Path to tasks .
: return : None ."""
|
for zz in os . walk ( path ) :
for module in zz [ 2 ] :
if module . endswith ( '.py' ) :
import importlib
# from tasks . message _ popup import *
importlib . import_module ( 'tasks.' + module [ : - 3 ] )
print 'imported' , module
|
def surface_tessellate ( v1 , v2 , v3 , v4 , vidx , tidx , trim_curves , tessellate_args ) :
"""Triangular tessellation algorithm for surfaces with no trims .
This function can be directly used as an input to : func : ` . make _ triangle _ mesh ` using ` ` tessellate _ func ` ` keyword
argument .
: param v1 : vertex 1
: type v1 : Vertex
: param v2 : vertex 2
: type v2 : Vertex
: param v3 : vertex 3
: type v3 : Vertex
: param v4 : vertex 4
: type v4 : Vertex
: param vidx : vertex numbering start value
: type vidx : int
: param tidx : triangle numbering start value
: type tidx : int
: param trim _ curves : trim curves
: type : list , tuple
: param tessellate _ args : tessellation arguments
: type tessellate _ args : dict
: return : lists of vertex and triangle objects in ( vertex _ list , triangle _ list ) format
: type : tuple"""
|
# Triangulate vertices
tris = polygon_triangulate ( tidx , v1 , v2 , v3 , v4 )
# Return vertex and triangle lists
return [ ] , tris
|
def commit_token_operation ( self , token_op , current_block_number ) :
"""Commit a token operation that debits one account and credits another
Returns the new canonicalized record ( with all compatibility quirks preserved )
DO NOT CALL THIS DIRECTLY"""
|
# have to have read - write disposition
if self . disposition != DISPOSITION_RW :
log . error ( "FATAL: borrowing violation: not a read-write connection" )
traceback . print_stack ( )
os . abort ( )
cur = self . db . cursor ( )
opcode = token_op . get ( 'opcode' , None )
clean_token_op = self . sanitize_op ( token_op )
try :
assert token_operation_is_valid ( token_op ) , 'Invalid token operation'
assert opcode is not None , 'No opcode given'
assert 'txid' in token_op , 'No txid'
assert 'vtxindex' in token_op , 'No vtxindex'
except Exception as e :
log . exception ( e )
log . error ( 'FATAL: failed to commit token operation' )
self . db . rollback ( )
os . abort ( )
table = token_operation_get_table ( token_op )
account_payment_info = token_operation_get_account_payment_info ( token_op )
account_credit_info = token_operation_get_account_credit_info ( token_op )
# fields must be set
try :
for key in account_payment_info :
assert account_payment_info [ key ] is not None , 'BUG: payment info key {} is None' . format ( key )
for key in account_credit_info :
assert account_credit_info [ key ] is not None , 'BUG: credit info key {} is not None' . format ( key )
# NOTE : do not check token amount and type , since in the future we want to support converting
# between tokens
except Exception as e :
log . exception ( e )
log . error ( "FATAL: invalid token debit or credit info" )
os . abort ( )
self . log_accept ( current_block_number , token_op [ 'vtxindex' ] , token_op [ 'op' ] , token_op )
# NOTE : this code is single - threaded , but this code must be atomic
self . commit_account_debit ( token_op , account_payment_info , current_block_number , token_op [ 'vtxindex' ] , token_op [ 'txid' ] )
self . commit_account_credit ( token_op , account_credit_info , current_block_number , token_op [ 'vtxindex' ] , token_op [ 'txid' ] )
namedb_history_save ( cur , opcode , token_op [ 'address' ] , None , None , current_block_number , token_op [ 'vtxindex' ] , token_op [ 'txid' ] , clean_token_op )
return clean_token_op
|
def to_dict ( self ) :
"""Transforms the object to a Python dictionary .
Note :
If an Input hasn ' t been signed yet , this method returns a
dictionary representation .
Returns :
dict : The Input as an alternative serialization format ."""
|
try :
fulfillment = self . fulfillment . serialize_uri ( )
except ( TypeError , AttributeError , ASN1EncodeError , ASN1DecodeError ) :
fulfillment = _fulfillment_to_details ( self . fulfillment )
try : # NOTE : ` self . fulfills ` can be ` None ` and that ' s fine
fulfills = self . fulfills . to_dict ( )
except AttributeError :
fulfills = None
input_ = { 'owners_before' : self . owners_before , 'fulfills' : fulfills , 'fulfillment' : fulfillment , }
return input_
|
def get_summary_string ( self ) :
"""Get a string summarising the state of Rez as a whole .
Returns :
String ."""
|
from rez . plugin_managers import plugin_manager
txt = "Rez %s" % __version__
txt += "\n\n%s" % plugin_manager . get_summary_string ( )
return txt
|
def table ( self , name : str ) :
"""Display info about a table : number of rows
and columns
: param name : name of the table
: type name : str
: example : ` ` tables = ds . table ( " mytable " ) ` `"""
|
if self . _check_db ( ) == False :
return
try :
res = self . getall ( name )
except Exception as e :
self . err ( e , self . table , "Can not get records from database" )
return
if res is None :
self . warning ( "Table" , name , "does not contain any record" )
return
num = len ( res )
self . info ( num , "rows" )
self . info ( "Fields:" , ", " . join ( list ( res ) ) )
|
def log_event ( event , logger = root_logger , ** log_dict ) :
"""Utility function for logging an event ( e . g . for metric analysis ) .
If no logger is given , fallback to the root logger ."""
|
msg = "event={}" . format ( event )
msg = add_items_to_message ( msg , log_dict )
log_dict . update ( { 'event' : event } )
logger . info ( msg , extra = log_dict )
|
def imp_print ( self , text , end ) :
"""Use win _ unicode _ console"""
|
PRINT ( text , end = end , file = win_unicode_console . streams . stdout_text_transcoded )
|
def _marker_line ( self ) : # type : ( ) - > str
"""Generate a correctly sized marker line .
e . g .
: return : str"""
|
output = ''
for col in sorted ( self . col_widths ) :
line = self . COLUMN_MARK + ( self . DASH * ( self . col_widths [ col ] + self . PADDING * 2 ) )
output += line
output += self . COLUMN_MARK + '\n'
return output
|
def heading2table ( soup , table , row ) :
"""add heading row to table"""
|
tr = Tag ( soup , name = "tr" )
table . append ( tr )
for attr in row :
th = Tag ( soup , name = "th" )
tr . append ( th )
th . append ( attr )
|
def get_coreml_model ( self , mode = 'classifier' ) :
"""Parameters
mode : str ( ' classifier ' , ' regressor ' or None )
Mode of the converted coreml model .
When mode = ' classifier ' , a NeuralNetworkClassifier spec will be constructed .
When mode = ' regressor ' , a NeuralNetworkRegressor spec will be constructed .
Returns
model : MLModel
Return the underlying model ."""
|
import mxnet as _mx
from . _mxnet import _mxnet_utils
from . _mxnet . _mxnet_to_coreml import _mxnet_converter
( sym , arg_params , aux_params ) = self . ptModel . mxmodel
fe_mxmodel = self . ptModel . mxmodel
if self . ptModel . is_feature_layer_final :
feature_layer_size = self . ptModel . feature_layer_size
num_dummy_classes = 10
feature_layer_sym = sym . get_children ( ) [ 0 ]
fc_symbol = _mx . symbol . FullyConnected ( feature_layer_sym , num_hidden = num_dummy_classes )
prob = _mx . symbol . SoftmaxOutput ( fc_symbol , name = sym . name , attr = sym . attr_dict ( ) [ sym . name ] )
arg_params [ '%s_weight' % fc_symbol . name ] = _mx . ndarray . zeros ( ( num_dummy_classes , feature_layer_size ) )
arg_params [ '%s_bias' % fc_symbol . name ] = _mx . ndarray . zeros ( ( num_dummy_classes ) )
fe_mxmodel = ( prob , arg_params , aux_params )
model = MXFeatureExtractor . _get_mx_module ( fe_mxmodel , self . data_layer , self . ptModel . output_layer , _mxnet_utils . get_mxnet_context ( max_devices = 1 ) , self . image_shape , label_layer = self . ptModel . label_layer )
preprocessor_args = { 'image_input_names' : [ self . data_layer ] }
return _mxnet_converter . convert ( model , mode = 'classifier' , input_shape = [ ( self . data_layer , ( 1 , ) + self . image_shape ) ] , class_labels = list ( map ( str , range ( self . ptModel . num_classes ) ) ) , preprocessor_args = preprocessor_args , verbose = False )
|
def worker_thread ( context ) :
"""The worker thread routines ."""
|
queue = context . task_queue
parameters = context . worker_parameters
if parameters . initializer is not None :
if not run_initializer ( parameters . initializer , parameters . initargs ) :
context . state = ERROR
return
for task in get_next_task ( context , parameters . max_tasks ) :
execute_next_task ( task )
queue . task_done ( )
|
def get_object_or_None ( klass , * args , ** kwargs ) :
"""Uses get ( ) to return an object or None if the object does not exist .
klass may be a Model , Manager , or QuerySet object . All other passed
arguments and keyword arguments are used in the get ( ) query .
Note : Like with get ( ) , a MultipleObjectsReturned will be raised if more than one
object is found ."""
|
queryset = _get_queryset ( klass )
try :
return queryset . get ( * args , ** kwargs )
except queryset . model . DoesNotExist :
return None
|
def CalculateForecastStats ( matched , available , possible = None ) :
"""Calculate forecast percentage stats .
Args :
matched : The number of matched impressions .
available : The number of available impressions .
possible : The optional number of possible impressions .
Returns :
The percentage of impressions that are available and possible ."""
|
if matched > 0 :
available_percent = ( float ( available ) / matched ) * 100.
else :
available_percent = 0
if possible is not None :
if matched > 0 :
possible_percent = ( possible / float ( matched ) ) * 100.
else :
possible_percent = 0
else :
possible_percent = None
return available_percent , possible_percent
|
def stack ( cls , areas ) :
"""Stacks an ( Nd ) Overlay of Area or Curve Elements by offsetting
their baselines . To stack a HoloMap or DynamicMap use the map
method ."""
|
if not len ( areas ) :
return areas
baseline = np . zeros ( len ( areas . values ( ) [ 0 ] ) )
stacked = areas . clone ( shared_data = False )
vdims = [ areas . values ( ) [ 0 ] . vdims [ 0 ] , 'Baseline' ]
for k , area in areas . items ( ) :
x , y = ( area . dimension_values ( i ) for i in range ( 2 ) )
stacked [ k ] = area . clone ( ( x , y + baseline , baseline ) , vdims = vdims , new_type = Area )
baseline = baseline + y
return stacked
|
def add_stylesheets ( self , * css_files ) :
"""add stylesheet files in HTML head"""
|
for css_file in css_files :
self . main_soup . style . append ( self . _text_file ( css_file ) )
|
def vis_splitting ( Verts , splitting , output = 'vtk' , fname = 'output.vtu' ) :
"""Coarse grid visualization for C / F splittings .
Parameters
Verts : { array }
coordinate array ( N x D )
splitting : { array }
coarse ( 1 ) / fine ( 0 ) flags
fname : { string , file object }
file to be written , e . g . ' output . vtu '
output : { string }
' vtk ' or ' matplotlib '
Returns
- Displays in screen or writes data to . vtu file for use in paraview
( xml 0.1 format )
Notes
dimension of coordinate space
# of vertices in the mesh represented in Verts
Ndof :
# of dof ( = ldof * N )
- simply color different points with different colors . This works
best with classical AMG .
- writes a file ( or opens a window ) for each dof
- for Ndof > 1 , they are assumed orderd [ . . . dof1 . . . , . . . dof2 . . . , etc ]
Examples
> > > import numpy as np
> > > from pyamg . vis . vis _ coarse import vis _ splitting
> > > Verts = np . array ( [ [ 0.0,0.0 ] ,
. . . [ 1.0,0.0 ] ,
. . . [ 0.0,1.0 ] ,
. . . [ 1.0,1.0 ] ] )
> > > splitting = np . array ( [ 0,1,0,1,1,0,1,0 ] ) # two variables
> > > vis _ splitting ( Verts , splitting , output = ' vtk ' , fname = ' output . vtu ' )
> > > from pyamg . classical import RS
> > > from pyamg . vis . vis _ coarse import vis _ splitting
> > > from pyamg . gallery import load _ example
> > > data = load _ example ( ' unit _ square ' )
> > > A = data [ ' A ' ] . tocsr ( )
> > > V = data [ ' vertices ' ]
> > > E2V = data [ ' elements ' ]
> > > splitting = RS ( A )
> > > vis _ splitting ( Verts = V , splitting = splitting , output = ' vtk ' ,
fname = ' output . vtu ' )"""
|
check_input ( Verts , splitting )
N = Verts . shape [ 0 ]
Ndof = int ( len ( splitting ) / N )
E2V = np . arange ( 0 , N , dtype = int )
# adjust name in case of multiple variables
a = fname . split ( '.' )
if len ( a ) < 2 :
fname1 = a [ 0 ]
fname2 = '.vtu'
elif len ( a ) >= 2 :
fname1 = "" . join ( a [ : - 1 ] )
fname2 = a [ - 1 ]
else :
raise ValueError ( 'problem with fname' )
new_fname = fname
for d in range ( 0 , Ndof ) : # for each variables , write a file or open a figure
if Ndof > 1 :
new_fname = fname1 + '_%d.' % ( d + 1 ) + fname2
cdata = splitting [ ( d * N ) : ( ( d + 1 ) * N ) ]
if output == 'vtk' :
write_basic_mesh ( Verts = Verts , E2V = E2V , mesh_type = 'vertex' , cdata = cdata , fname = new_fname )
elif output == 'matplotlib' :
from pylab import figure , show , plot , xlabel , ylabel , title , axis
cdataF = np . where ( cdata == 0 ) [ 0 ]
cdataC = np . where ( cdata == 1 ) [ 0 ]
xC = Verts [ cdataC , 0 ]
yC = Verts [ cdataC , 1 ]
xF = Verts [ cdataF , 0 ]
yF = Verts [ cdataF , 1 ]
figure ( )
plot ( xC , yC , 'r.' , xF , yF , 'b.' , clip_on = True )
title ( 'C/F splitting (red=coarse, blue=fine)' )
xlabel ( 'x' )
ylabel ( 'y' )
axis ( 'off' )
show ( )
else :
raise ValueError ( 'problem with outputtype' )
|
def ScanForStorageMediaImage ( self , source_path_spec ) :
"""Scans the path specification for a supported storage media image format .
Args :
source _ path _ spec ( PathSpec ) : source path specification .
Returns :
PathSpec : storage media image path specification or None if no supported
storage media image type was found .
Raises :
BackEndError : if the source cannot be scanned or more than one storage
media image type is found ."""
|
try :
type_indicators = analyzer . Analyzer . GetStorageMediaImageTypeIndicators ( source_path_spec , resolver_context = self . _resolver_context )
except RuntimeError as exception :
raise errors . BackEndError ( ( 'Unable to process source path specification with error: ' '{0!s}' ) . format ( exception ) )
if not type_indicators : # The RAW storage media image type cannot be detected based on
# a signature so we try to detect it based on common file naming schemas .
file_system = resolver . Resolver . OpenFileSystem ( source_path_spec , resolver_context = self . _resolver_context )
raw_path_spec = path_spec_factory . Factory . NewPathSpec ( definitions . TYPE_INDICATOR_RAW , parent = source_path_spec )
try : # The RAW glob function will raise a PathSpecError if the path
# specification is unsuitable for globbing .
glob_results = raw . RawGlobPathSpec ( file_system , raw_path_spec )
except errors . PathSpecError :
glob_results = None
file_system . Close ( )
if not glob_results :
return None
return raw_path_spec
if len ( type_indicators ) > 1 :
raise errors . BackEndError ( 'Unsupported source found more than one storage media image types.' )
return path_spec_factory . Factory . NewPathSpec ( type_indicators [ 0 ] , parent = source_path_spec )
|
def to_frame ( self , slot = 1 ) :
"""Return the current configuration as a YubiKeyFrame object ."""
|
data = self . to_string ( )
payload = data . ljust ( 64 , yubico_util . chr_byte ( 0x0 ) )
if slot is 1 :
if self . _update_config :
command = SLOT . UPDATE1
else :
command = SLOT . CONFIG
elif slot is 2 :
if self . _update_config :
command = SLOT . UPDATE2
else :
command = SLOT . CONFIG2
else :
assert ( )
if self . _swap_slots :
command = SLOT . SWAP
if self . _zap :
payload = b''
return yubikey_frame . YubiKeyFrame ( command = command , payload = payload )
|
def check_stoplimit_prices ( price , label ) :
"""Check to make sure the stop / limit prices are reasonable and raise
a BadOrderParameters exception if not ."""
|
try :
if not isfinite ( price ) :
raise BadOrderParameters ( msg = "Attempted to place an order with a {} price " "of {}." . format ( label , price ) )
# This catches arbitrary objects
except TypeError :
raise BadOrderParameters ( msg = "Attempted to place an order with a {} price " "of {}." . format ( label , type ( price ) ) )
if price < 0 :
raise BadOrderParameters ( msg = "Can't place a {} order with a negative price." . format ( label ) )
|
def write_boundaries ( self , filename ) :
"""Write boundary lines X1 Y1 X2 Y2 TYPE to file"""
|
fid = open ( filename , 'w' )
for i in self . Boundaries :
print ( i )
# fid . write ( ' { 0 } { 1 } { 2 } \ n ' . format ( i [ 0 ] , i [ 1 ] , i [ 2 ] ) )
fid . write ( '{0} {1} {2} {3} {4}\n' . format ( i [ 0 ] [ 0 ] , i [ 0 ] [ 1 ] , i [ 1 ] [ 0 ] , i [ 1 ] [ 1 ] , i [ 2 ] ) )
fid . close ( )
|
def set_my_info ( self , message , contact_info = "" ) :
"""set my contact info to _ _ _ _ : Set your emergency contact info ."""
|
contacts = self . load ( "contact_info" , { } )
contacts [ message . sender . handle ] = { "info" : contact_info , "name" : message . sender . name , }
self . save ( "contact_info" , contacts )
self . say ( "Got it." , message = message )
|
def start ( self ) :
'''Start listening for messages .'''
|
log . debug ( 'Creating the TCP server' )
if ':' in self . address :
self . skt = socket . socket ( socket . AF_INET6 , socket . SOCK_STREAM )
else :
self . skt = socket . socket ( socket . AF_INET , socket . SOCK_STREAM )
if self . reuse_port :
self . skt . setsockopt ( socket . SOL_SOCKET , socket . SO_REUSEADDR , 1 )
if hasattr ( socket , 'SO_REUSEPORT' ) :
self . skt . setsockopt ( socket . SOL_SOCKET , socket . SO_REUSEPORT , 1 )
else :
log . error ( 'SO_REUSEPORT not supported' )
try :
self . skt . bind ( ( self . address , int ( self . port ) ) )
except socket . error as msg :
error_string = 'Unable to bind to port {} on {}: {}' . format ( self . port , self . address , msg )
log . error ( error_string , exc_info = True )
raise BindException ( error_string )
log . debug ( 'Accepting max %d parallel connections' , self . max_clients )
self . skt . listen ( self . max_clients )
self . thread_serve = threading . Thread ( target = self . _serve_clients )
self . thread_serve . start ( )
|
def gen_req_sfc ( lat_x , lon_x , start , end , grid = [ 0.125 , 0.125 ] , scale = 0 ) :
'''generate a dict of reqs kwargs for ( lat _ x , lon _ x ) spanning [ start , end ]
Parameters
lat _ x : [ type ]
[ description ]
lon _ x : [ type ]
[ description ]
start : [ type ]
[ description ]
end : [ type ]
[ description ]
grid : list , optional
[ description ] ( the default is [ 0.125 , 0.125 ] , which [ default _ description ] )
scale : int , optional
[ description ] ( the default is 0 , which [ default _ description ] )
Returns
[ type ]
[ description ]
Examples
> > > gen _ req _ sfc ( 28 , 116 , ' 2015-01 ' , ' 2015-01-31 23 ' , grid = [ 0.125 , 0.125 ] , scale = 0)'''
|
# scale is a factor to rescale grid size
size = grid [ 0 ] * scale
# generate pd . Series for timestamps
ser_datetime = pd . date_range ( start , end , freq = '1h' ) . to_series ( )
# surface requests
lat_c , lon_c = ( roundPartial ( x , grid [ 0 ] ) for x in [ lat_x , lon_x ] )
area = [ lat_c + size , lon_c - size , lat_c - size , lon_c + size ]
dict_req_sfc = { 'variable' : list_var_sfc , 'product_type' : 'reanalysis' , 'area' : area , 'grid' : grid , 'format' : 'netcdf' }
list_dict_req_sfc = [ { ** dict_req_sfc , ** dict_dt } for dict_dt in list ( gen_dict_dt_sub ( ser_datetime ) . values ( ) ) ]
dict_req_sfc = { gen_fn ( dict_req ) : gen_dict_proc ( dict_req ) for dict_req in list_dict_req_sfc }
return dict_req_sfc
|
def checkMarkovInputs ( self ) :
'''Many parameters used by MarkovConsumerType are arrays . Make sure those arrays are the
right shape .
Parameters
None
Returns
None'''
|
StateCount = self . MrkvArray [ 0 ] . shape [ 0 ]
# Check that arrays are the right shape
assert self . Rfree . shape == ( StateCount , ) , 'Rfree not the right shape!'
# Check that arrays in lists are the right shape
for MrkvArray_t in self . MrkvArray :
assert MrkvArray_t . shape == ( StateCount , StateCount ) , 'MrkvArray not the right shape!'
for LivPrb_t in self . LivPrb :
assert LivPrb_t . shape == ( StateCount , ) , 'Array in LivPrb is not the right shape!'
for PermGroFac_t in self . LivPrb :
assert PermGroFac_t . shape == ( StateCount , ) , 'Array in PermGroFac is not the right shape!'
# Now check the income distribution .
# Note IncomeDstn is ( potentially ) time - varying , so it is in time _ vary .
# Therefore it is a list , and each element of that list responds to the income distribution
# at a particular point in time . Each income distribution at a point in time should itself
# be a list , with each element corresponding to the income distribution
# conditional on a particular Markov state .
for IncomeDstn_t in self . IncomeDstn :
assert len ( IncomeDstn_t ) == StateCount , 'List in IncomeDstn is not the right length!'
|
def load_data ( self , data_np ) :
"""Load raw numpy data into the viewer ."""
|
image = AstroImage . AstroImage ( logger = self . logger )
image . set_data ( data_np )
self . set_image ( image )
|
async def get_partition_ids_async ( self ) :
"""Returns a list of all the event hub partition IDs .
: rtype : list [ str ]"""
|
if not self . partition_ids :
try :
eh_client = EventHubClientAsync ( self . host . eh_config . client_address , debug = self . host . eph_options . debug_trace , http_proxy = self . host . eph_options . http_proxy )
try :
eh_info = await eh_client . get_eventhub_info_async ( )
self . partition_ids = eh_info [ 'partition_ids' ]
except Exception as err : # pylint : disable = broad - except
raise Exception ( "Failed to get partition ids" , repr ( err ) )
finally :
await eh_client . stop_async ( )
return self . partition_ids
|
def init_weights ( self , m ) :
"""Initialize the weights ."""
|
classname = m . __class__ . __name__
if classname . find ( 'Linear' ) != - 1 :
if hasattr ( m , 'weight' ) and m . weight is not None :
self . init_weight ( m . weight )
if hasattr ( m , 'bias' ) and m . bias is not None :
self . init_bias ( m . bias )
elif classname . find ( 'AdaptiveEmbedding' ) != - 1 :
if hasattr ( m , 'emb_projs' ) :
for i in range ( len ( m . emb_projs ) ) :
if m . emb_projs [ i ] is not None :
nn . init . normal_ ( m . emb_projs [ i ] , 0.0 , self . config . proj_init_std )
elif classname . find ( 'Embedding' ) != - 1 :
if hasattr ( m , 'weight' ) :
self . init_weight ( m . weight )
elif classname . find ( 'ProjectedAdaptiveLogSoftmax' ) != - 1 :
if hasattr ( m , 'cluster_weight' ) and m . cluster_weight is not None :
self . init_weight ( m . cluster_weight )
if hasattr ( m , 'cluster_bias' ) and m . cluster_bias is not None :
self . init_bias ( m . cluster_bias )
if hasattr ( m , 'out_projs' ) :
for i in range ( len ( m . out_projs ) ) :
if m . out_projs [ i ] is not None :
nn . init . normal_ ( m . out_projs [ i ] , 0.0 , self . config . proj_init_std )
elif classname . find ( 'LayerNorm' ) != - 1 :
if hasattr ( m , 'weight' ) :
nn . init . normal_ ( m . weight , 1.0 , self . config . init_std )
if hasattr ( m , 'bias' ) and m . bias is not None :
self . init_bias ( m . bias )
elif classname . find ( 'TransformerLM' ) != - 1 :
if hasattr ( m , 'r_emb' ) :
self . init_weight ( m . r_emb )
if hasattr ( m , 'r_w_bias' ) :
self . init_weight ( m . r_w_bias )
if hasattr ( m , 'r_r_bias' ) :
self . init_weight ( m . r_r_bias )
if hasattr ( m , 'r_bias' ) :
self . init_bias ( m . r_bias )
|
def on_menu_save_interpretation ( self , event ) :
'''save interpretations to a redo file'''
|
thellier_gui_redo_file = open ( os . path . join ( self . WD , "thellier_GUI.redo" ) , 'w' )
# write interpretations to thellier _ GUI . redo
spec_list = list ( self . Data . keys ( ) )
spec_list . sort ( )
redo_specimens_list = [ ]
for sp in spec_list :
if 'saved' not in self . Data [ sp ] [ 'pars' ] :
continue
if not self . Data [ sp ] [ 'pars' ] [ 'saved' ] :
continue
redo_specimens_list . append ( sp )
thellier_gui_redo_file . write ( "%s %.0f %.0f\n" % ( sp , self . Data [ sp ] [ 'pars' ] [ 'measurement_step_min' ] , self . Data [ sp ] [ 'pars' ] [ 'measurement_step_max' ] ) )
dlg1 = wx . MessageDialog ( self , caption = "Saved:" , message = "File thellier_GUI.redo is saved in MagIC working folder" , style = wx . OK )
result = self . show_dlg ( dlg1 )
if result == wx . ID_OK :
dlg1 . Destroy ( )
thellier_gui_redo_file . close ( )
return
thellier_gui_redo_file . close ( )
self . close_warning = False
|
def get_column_definition_all ( self , table ) :
"""Retrieve the column definition statement for all columns in a table ."""
|
# Get complete table definition
col_defs = self . get_table_definition ( table ) . split ( '\n' )
# Return only column definitions
return [ i [ 0 : - 1 ] . strip ( ) . replace ( ',' , ', ' ) for i in col_defs if i . strip ( ) . startswith ( '`' ) ]
|
def foreground_mask ( self , tolerance , ignore_black = True , use_hsv = False , scale = 8 , bgmodel = None ) :
"""Creates a binary image mask for the foreground of an image against
a uniformly colored background . The background is assumed to be the mode value of the histogram
for each of the color channels .
Parameters
tolerance : int
A + / - level from the detected mean backgroud color . Pixels withing
this range will be classified as background pixels and masked out .
ignore _ black : bool
If True , the zero pixels will be ignored
when computing the background model .
use _ hsv : bool
If True , image will be converted to HSV for background model
generation .
scale : int
Size of background histogram bins - - there will be BINARY _ IM _ MAX _ VAL / size bins
in the color histogram for each channel .
bgmodel : : obj : ` list ` of int
A list containing the red , green , and blue channel modes of the
background . If this is None , a background model will be generated
using the other parameters .
Returns
: obj : ` BinaryImage `
A binary image that masks out the background from the current
ColorImage ."""
|
# get a background model
if bgmodel is None :
bgmodel = self . background_model ( ignore_black = ignore_black , use_hsv = use_hsv , scale = scale )
# get the bounds
lower_bound = np . array ( [ bgmodel [ i ] - tolerance for i in range ( self . channels ) ] )
upper_bound = np . array ( [ bgmodel [ i ] + tolerance for i in range ( self . channels ) ] )
orig_zero_indices = np . where ( np . sum ( self . _data , axis = 2 ) == 0 )
# threshold
binary_data = cv2 . inRange ( self . data , lower_bound , upper_bound )
binary_data [ : , : , ] = ( BINARY_IM_MAX_VAL - binary_data [ : , : , ] )
binary_data [ orig_zero_indices [ 0 ] , orig_zero_indices [ 1 ] , ] = 0.0
binary_im = BinaryImage ( binary_data . astype ( np . uint8 ) , frame = self . frame )
return binary_im
|
def makedirs ( self , path , mode = 0x777 ) :
"Super - mkdir : create a leaf directory and all intermediate ones ."
|
self . directory_create ( path , mode , [ library . DirectoryCreateFlag . parents ] )
|
def decayWeights ( self , decayConst = 60 ) :
"""Decay the network ' s weights .
: param decayConst : The time constant ( in seconds ) to use for decay .
Note : If applied , decay must be used extremely carefully , as
it has a tendency to cause asymmetries in the network weights ."""
|
self . weightsII -= self . weightsII * self . dt / decayConst
self . weightsELI -= self . weightsELI * self . dt / decayConst
self . weightsERI -= self . weightsERI * self . dt / decayConst
self . weightsIEL -= self . weightsIEL * self . dt / decayConst
self . weightsIER -= self . weightsIER * self . dt / decayConst
|
def route ( cls , route , config = None ) :
"""This method provides a decorator for adding endpoints to the
http server .
Args :
route ( str ) : The url to be handled by the RequestHandled
config ( dict ) : Configuration for the request handler
Example :
. . code - block : : python
import nautilus
from nauilus . network . http import RequestHandler
class MyService ( nautilus . Service ) :
@ MyService . route ( ' / ' )
class HelloWorld ( RequestHandler ) :
def get ( self ) :
return self . finish ( ' hello world ' )"""
|
def decorator ( wrapped_class , ** kwds ) : # add the endpoint at the given route
cls . _routes . append ( dict ( url = route , request_handler = wrapped_class ) )
# return the class undecorated
return wrapped_class
# return the decorator
return decorator
|
def labeller ( rows = None , cols = None , multi_line = True , default = label_value , ** kwargs ) :
"""Return a labeller function
Parameters
rows : str | function | None
How to label the rows
cols : str | function | None
How to label the columns
multi _ line : bool
Whether to place each variable on a separate line
default : function | str
Fallback labelling function . If it is a string ,
it should be the name of one the labelling
functions provided by plotnine .
kwargs : dict
{ variable name : function | string } pairs for
renaming variables . A function to rename the variable
or a string name .
Returns
out : function
Function to do the labelling"""
|
# Sort out the labellers along each dimension
rows_labeller = as_labeller ( rows , default , multi_line )
cols_labeller = as_labeller ( cols , default , multi_line )
def _labeller ( label_info ) : # When there is no variable specific labeller ,
# use that of the dimension
if label_info . _meta [ 'dimension' ] == 'rows' :
margin_labeller = rows_labeller
else :
margin_labeller = cols_labeller
# Labelling functions expect string values
label_info = label_info . astype ( str )
# Each facetting variable is labelled independently
for name , value in label_info . iteritems ( ) :
func = as_labeller ( kwargs . get ( name ) , margin_labeller )
new_info = func ( label_info [ [ name ] ] )
label_info [ name ] = new_info [ name ]
if not multi_line :
label_info = collapse_label_lines ( label_info )
return label_info
return _labeller
|
def send ( cfg , ** kwargs ) :
"""Send a text message , file , or directory"""
|
for name , value in kwargs . items ( ) :
setattr ( cfg , name , value )
with cfg . timing . add ( "import" , which = "cmd_send" ) :
from . import cmd_send
return go ( cmd_send . send , cfg )
|
def retrieve_dcnm_subnet_info ( self , tenant_id , direc ) :
"""Retrieves the DCNM subnet info for a tenant ."""
|
serv_obj = self . get_service_obj ( tenant_id )
subnet_dict = serv_obj . get_dcnm_subnet_dict ( direc )
return subnet_dict
|
def verify_ed25519_signature ( public_key , contents , signature , message ) :
"""Verify that ` ` signature ` ` comes from ` ` public _ key ` ` and ` ` contents ` ` .
Args :
public _ key ( Ed25519PublicKey ) : the key to verify the signature
contents ( bytes ) : the contents that was signed
signature ( bytes ) : the signature to verify
message ( str ) : the error message to raise .
Raises :
ScriptWorkerEd25519Error : on failure"""
|
try :
public_key . verify ( signature , contents )
except InvalidSignature as exc :
raise ScriptWorkerEd25519Error ( message % { 'exc' : str ( exc ) } )
|
def fill_polygon ( self , polygons , colour = 7 , bg = 0 ) :
"""Draw a filled polygon .
This function uses the scan line algorithm to create the polygon . See
https : / / www . cs . uic . edu / ~ jbell / CourseNotes / ComputerGraphics / PolygonFilling . html for details .
: param polygons : A list of polygons ( which are each a list of ( x , y ) coordinates for the
points of the polygon ) - i . e . nested list of 2 - tuples .
: param colour : The foreground colour to use for the polygon
: param bg : The background colour to use for the polygon"""
|
def _add_edge ( a , b ) : # Ignore horizontal lines - they are redundant
if a [ 1 ] == b [ 1 ] :
return
# Ignore any edges that do not intersect the visible raster lines at all .
if ( a [ 1 ] < 0 and b [ 1 ] < 0 ) or ( a [ 1 ] >= self . height and b [ 1 ] >= self . height ) :
return
# Save off the edge , always starting at the lowest value of y .
new_edge = _DotDict ( )
if a [ 1 ] < b [ 1 ] :
new_edge . min_y = a [ 1 ]
new_edge . max_y = b [ 1 ]
new_edge . x = a [ 0 ]
new_edge . dx = ( b [ 0 ] - a [ 0 ] ) / ( b [ 1 ] - a [ 1 ] ) / 2
else :
new_edge . min_y = b [ 1 ]
new_edge . max_y = a [ 1 ]
new_edge . x = b [ 0 ]
new_edge . dx = ( a [ 0 ] - b [ 0 ] ) / ( a [ 1 ] - b [ 1 ] ) / 2
edges . append ( new_edge )
# Create a table of all the edges in the polygon , sorted on smallest x .
logger . debug ( "Processing polygon: %s" , polygons )
min_y = self . height
max_y = - 1
edges = [ ]
last = None
for polygon in polygons : # Ignore lines and polygons .
if len ( polygon ) <= 2 :
continue
# Ignore any polygons completely off the screen
x , y = zip ( * polygon )
p_min_x = min ( x )
p_max_x = max ( x )
p_min_y = min ( y )
p_max_y = max ( y )
if p_max_x < 0 or p_min_x >= self . width or p_max_y < 0 or p_min_y > self . height :
continue
# Build up the edge list , maintaining bounding coordinates on the Y axis .
min_y = min ( p_min_y , min_y )
max_y = max ( p_max_y , max_y )
for i , point in enumerate ( polygon ) :
if i != 0 :
_add_edge ( last , point )
last = point
_add_edge ( polygon [ 0 ] , polygon [ - 1 ] )
edges = sorted ( edges , key = lambda e : e . x )
# Check we still have something to do :
if len ( edges ) == 0 :
return
# Re - base all edges to visible Y coordinates of the screen .
for edge in edges :
if edge . min_y < 0 :
edge . x -= int ( edge . min_y * 2 ) * edge . dx
edge . min_y = 0
min_y = max ( 0 , min_y )
max_y = min ( max_y - min_y , self . height )
logger . debug ( "Resulting edges: %s" , edges )
# Render each line in the bounding rectangle .
for y in [ min_y + ( i / 2 ) for i in range ( 0 , int ( max_y ) * 2 ) ] : # Create a list of live edges ( for drawing this raster line ) and edges for next
# iteration of the raster .
live_edges = [ ]
new_edges = [ ]
for edge in edges :
if edge . min_y <= y <= edge . max_y :
live_edges . append ( edge )
if y < edge . max_y :
new_edges . append ( edge )
# Draw the portions of the line that are inside the polygon .
count = 0
last_x = 0
for edge in live_edges : # Draw the next segment
if 0 <= y < self . height :
if edge . max_y != y :
count += 1
if count % 2 == 1 :
last_x = edge . x
else : # Don ' t bother drawing lines entirely off the screen .
if not ( ( last_x < 0 and edge . x < 0 ) or ( last_x >= self . width and edge . x >= self . width ) ) : # Clip raster to screen width .
self . move ( max ( 0 , last_x ) , y )
self . draw ( min ( edge . x , self . width ) , y , colour = colour , bg = bg , thin = True )
# Update the x location for this active edge .
edge . x += edge . dx
# Rely on the fact that we have the same dicts in both live _ edges and new _ edges , so
# we just need to resort new _ edges for the next iteration .
edges = sorted ( new_edges , key = lambda e : e . x )
|
def run ( self ) :
"""actual consuming of incoming works starts here"""
|
self . input_channel . basic_consume ( self . handle_message , queue = self . INPUT_QUEUE_NAME , no_ack = True )
try :
self . input_channel . start_consuming ( )
except ( KeyboardInterrupt , SystemExit ) :
log . info ( " Exiting" )
self . exit ( )
|
def defer_sync ( self , func ) :
"""Arrange for ` func ( ) ` to execute on : class : ` Broker ` thread , blocking the
current thread until a result or exception is available .
: returns :
Return value of ` func ( ) ` ."""
|
latch = Latch ( )
def wrapper ( ) :
try :
latch . put ( func ( ) )
except Exception :
latch . put ( sys . exc_info ( ) [ 1 ] )
self . defer ( wrapper )
res = latch . get ( )
if isinstance ( res , Exception ) :
raise res
return res
|
def handler ( event , context ) : # pylint : disable = W0613
"""Historical security group event differ .
Listens to the Historical current table and determines if there are differences that need to be persisted in the
historical record ."""
|
# De - serialize the records :
records = deserialize_records ( event [ 'Records' ] )
for record in records :
process_dynamodb_differ_record ( record , CurrentSecurityGroupModel , DurableSecurityGroupModel )
|
def _get_struct_linestylearray ( self , shape_number ) :
"""Get the values for the LINESTYLEARRAY record ."""
|
obj = _make_object ( "LineStyleArray" )
obj . LineStyleCount = count = unpack_ui8 ( self . _src )
if count == 0xFF :
obj . LineStyleCountExtended = count = unpack_ui16 ( self . _src )
obj . LineStyles = line_styles = [ ]
for _ in range ( count ) :
if shape_number <= 3 :
record = _make_object ( "LineStyle" )
record . Width = unpack_ui16 ( self . _src )
if shape_number <= 2 :
record . Color = self . _get_struct_rgb ( )
else :
record . Color = self . _get_struct_rgba ( )
else :
record = _make_object ( "LineStyle2" )
record . Width = unpack_ui16 ( self . _src )
bc = BitConsumer ( self . _src )
record . StartCapStyle = bc . u_get ( 2 )
record . JoinStyle = bc . u_get ( 2 )
record . HasFillFlag = bc . u_get ( 1 )
record . NoHScaleFlag = bc . u_get ( 1 )
record . NoVScaleFlag = bc . u_get ( 1 )
record . PixelHintingFlag = bc . u_get ( 1 )
bc . u_get ( 5 )
# reserved
record . NoClose = bc . u_get ( 1 )
record . EndCapStyle = bc . u_get ( 2 )
if record . JoinStyle == 2 :
record . MiterLimitFactor = unpack_ui16 ( self . _src )
if record . HasFillFlag == 0 :
record . Color = self . _get_struct_rgba ( )
else :
record . Color = self . _get_struct_fillstyle ( shape_number )
line_styles . append ( record )
return obj
|
def _create_tooltips ( hist : Histogram1D , vega : dict , kwargs : dict ) :
"""In one - dimensional plots , show values above the value on hover ."""
|
if kwargs . pop ( "tooltips" , False ) :
vega [ "signals" ] = vega . get ( "signals" , [ ] )
vega [ "signals" ] . append ( { "name" : "tooltip" , "value" : { } , "on" : [ { "events" : "rect:mouseover" , "update" : "datum" } , { "events" : "rect:mouseout" , "update" : "{}" } ] } )
font_size = kwargs . get ( "fontsize" , DEFAULT_FONTSIZE )
vega [ "marks" ] = vega . get ( "marks" , [ ] )
vega [ "marks" ] . append ( { "type" : "text" , "encode" : { "enter" : { "align" : { "value" : "center" } , "baseline" : { "value" : "bottom" } , "fill" : { "value" : "#333" } , "fontSize" : { "value" : font_size } } , "update" : { "x" : { "scale" : "xscale" , "signal" : "(tooltip.x + tooltip.x2) / 2" , "band" : 0.5 } , "y" : { "scale" : "yscale" , "signal" : "tooltip.y" , "offset" : - 2 } , "text" : { "signal" : "tooltip.y" } , "fillOpacity" : [ { "test" : "datum === tooltip" , "value" : 0 } , { "value" : 1 } ] } } } )
|
def get_fw_dev_map ( self , fw_id ) :
"""Return the object dict and mgmt ip for a firewall ."""
|
for cnt in self . res :
if fw_id in self . res . get ( cnt ) . get ( 'fw_id_lst' ) :
return self . res [ cnt ] . get ( 'obj_dict' ) , ( self . res [ cnt ] . get ( 'mgmt_ip' ) )
return None , None
|
def update ( self , claim , ttl = None , grace = None ) :
"""Updates the specified claim with either a new TTL or grace period , or
both ."""
|
body = { }
if ttl is not None :
body [ "ttl" ] = ttl
if grace is not None :
body [ "grace" ] = grace
if not body :
raise exc . MissingClaimParameters ( "You must supply a value for " "'ttl' or 'grace' when calling 'update()'" )
uri = "/%s/%s" % ( self . uri_base , utils . get_id ( claim ) )
resp , resp_body = self . api . method_patch ( uri , body = body )
|
def action ( fun = None , cloudmap = None , names = None , provider = None , instance = None , ** kwargs ) :
'''Execute a single action on the given provider / instance
CLI Example :
. . code - block : : bash
salt minionname cloud . action start instance = myinstance
salt minionname cloud . action stop instance = myinstance
salt minionname cloud . action show _ image provider = my - ec2 - config image = ami - 1624987f'''
|
client = _get_client ( )
try :
info = client . action ( fun , cloudmap , names , provider , instance , kwargs )
except SaltCloudConfigError as err :
log . error ( err )
return None
return info
|
def start_task ( self , task_type_str , current_task_index = None ) :
"""Call when processing is about to start on a single task of the given task
type , typically at the top inside of the loop that processes the tasks .
Args :
task _ type _ str ( str ) :
The name of the task , used as a dict key and printed in the progress
updates .
current _ task _ index ( int ) :
If the task processing loop may skip or repeat tasks , the index of the
current task must be provided here . This parameter can normally be left
unset ."""
|
assert ( task_type_str in self . _task_dict ) , "Task type has not been started yet: {}" . format ( task_type_str )
if current_task_index is not None :
self . _task_dict [ task_type_str ] [ "task_idx" ] = current_task_index
else :
self . _task_dict [ task_type_str ] [ "task_idx" ] += 1
self . _log_progress_if_interval_elapsed ( )
|
def get_buildout_config ( buildout_filename ) :
"""Parse buildout config with zc . buildout ConfigParser"""
|
print ( "[localhost] get_buildout_config: {0:s}" . format ( buildout_filename ) )
buildout = Buildout ( buildout_filename , [ ( 'buildout' , 'verbosity' , '-100' ) ] )
while True :
try :
len ( buildout . items ( ) )
break
except OSError :
pass
return buildout
|
def value_type ( type_ ) :
"""returns reference to ` boost : : shared _ ptr ` or ` std : : shared _ ptr ` value type"""
|
if not smart_pointer_traits . is_smart_pointer ( type_ ) :
raise TypeError ( 'Type "%s" is not an instantiation of \
boost::shared_ptr or std::shared_ptr' % type_ . decl_string )
try :
return internal_type_traits . get_by_name ( type_ , "element_type" )
except runtime_errors . declaration_not_found_t :
return _search_in_bases ( type_ )
|
def order_by_raw ( self , sql , bindings = None ) :
"""Add a raw " order by " clause to the query
: param sql : The raw clause
: type sql : str
: param bindings : The bdings
: param bindings : list
: return : The current QueryBuilder instance
: rtype : QueryBuilder"""
|
if bindings is None :
bindings = [ ]
type = "raw"
self . orders . append ( { "type" : type , "sql" : sql } )
self . add_binding ( bindings , "order" )
return self
|
def _get_annotations ( self , text , language = '' ) :
"""Returns the list of annotations retrieved from the given text .
Args :
text ( str ) : Input text .
language ( : obj : ` str ` , optional ) : Language code .
Returns :
Results in a dictionary . : code : ` tokens ` contains the list of annotations
and : code : ` language ` contains the inferred language from the input ."""
|
body = { 'document' : { 'type' : 'PLAIN_TEXT' , 'content' : text , } , 'features' : { 'extract_syntax' : True , } , 'encodingType' : 'UTF32' , }
if language :
body [ 'document' ] [ 'language' ] = language
request = self . service . documents ( ) . annotateText ( body = body )
response = request . execute ( )
tokens = response . get ( 'tokens' , [ ] )
language = response . get ( 'language' )
return { 'tokens' : tokens , 'language' : language }
|
def search_bugs ( self , terms ) :
'''http : / / bugzilla . readthedocs . org / en / latest / api / core / v1 / bug . html # search - bugs
terms = [ { ' product ' : ' Infrastructure & Operations ' } , { ' status ' : ' NEW ' } ]'''
|
params = ''
for i in terms :
k = i . popitem ( )
params = '{p}&{new}={value}' . format ( p = params , new = quote_url ( k [ 0 ] ) , value = quote_url ( k [ 1 ] ) )
return DotDict ( self . _get ( 'bug' , params = params ) )
|
def dict_encode ( in_dict ) :
"""returns a new dictionary with encoded values useful for encoding http queries ( python < 3)"""
|
if _IS_PY2 :
out_dict = { }
for k , v in list ( in_dict . items ( ) ) :
if isinstance ( v , unicode ) :
v = v . encode ( 'utf8' )
elif isinstance ( v , str ) : # Must be encoded in UTF - 8
v . decode ( 'utf8' )
out_dict [ k ] = v
return out_dict
else :
raise NotImplementedError
|
def p_static_scalar ( p ) :
'''static _ scalar : common _ scalar
| QUOTE QUOTE
| QUOTE ENCAPSED _ AND _ WHITESPACE QUOTE'''
|
if len ( p ) == 2 :
p [ 0 ] = p [ 1 ]
elif len ( p ) == 3 :
p [ 0 ] = ''
else :
p [ 0 ] = p [ 2 ] . decode ( 'string_escape' )
|
def sign ( message : bytes , sign_key : SignKey ) -> Signature :
"""Signs the message and returns signature .
: param : message - Message to sign
: param : sign _ key - Sign key
: return : Signature"""
|
logger = logging . getLogger ( __name__ )
logger . debug ( "Bls::sign: >>> message: %r, sign_key: %r" , message , sign_key )
c_instance = c_void_p ( )
do_call ( 'indy_crypto_bls_sign' , message , len ( message ) , sign_key . c_instance , byref ( c_instance ) )
res = Signature ( c_instance )
logger . debug ( "Bls::sign: <<< res: %r" , res )
return res
|
def get_pickled_ontology ( filename ) :
"""try to retrieve a cached ontology"""
|
pickledfile = os . path . join ( ONTOSPY_LOCAL_CACHE , filename + ".pickle" )
# pickledfile = ONTOSPY _ LOCAL _ CACHE + " / " + filename + " . pickle "
if GLOBAL_DISABLE_CACHE :
printDebug ( "WARNING: DEMO MODE cache has been disabled in __init__.py ==============" , "red" )
if os . path . isfile ( pickledfile ) and not GLOBAL_DISABLE_CACHE :
try :
return cPickle . load ( open ( pickledfile , "rb" ) )
except :
print ( Style . DIM + "** WARNING: Cache is out of date ** ...recreating it... " + Style . RESET_ALL )
return None
else :
return None
|
def plot_all_spectra ( self , outdir ) :
"""This is a convenience function to plot ALL spectra currently
stored in the container . It is useful to asses whether data filters
do perform correctly .
Note that the function just iterates over all ids and plots the
corresponding spectra , thus it is slow .
Spectra a named using the format : \ % . 2i _ spectrum _ id _ \ { \ } . png .
Parameters
outdir : string
Output directory to store spectra in . Created if it does not
exist ."""
|
os . makedirs ( outdir , exist_ok = True )
g = self . data . groupby ( 'id' )
for nr , ( name , item ) in enumerate ( g ) :
print ( 'Plotting spectrum with id {} ({} / {})' . format ( name , nr , len ( g . groups . keys ( ) ) ) )
plot_filename = '' . join ( ( outdir + os . sep , '{:04}_spectrum_id_{}.png' . format ( nr , name ) ) )
spec_nor , spec_rec , spec_fig = self . get_spectrum ( nr_id = name , plot_filename = plot_filename )
plt . close ( spec_fig )
|
def add_host ( zone , name , ttl , ip , nameserver = '127.0.0.1' , replace = True , timeout = 5 , port = 53 , ** kwargs ) :
'''Add , replace , or update the A and PTR ( reverse ) records for a host .
CLI Example :
. . code - block : : bash
salt ns1 ddns . add _ host example . com host1 60 10.1.1.1'''
|
res = update ( zone , name , ttl , 'A' , ip , nameserver , timeout , replace , port , ** kwargs )
if res is False :
return False
fqdn = '{0}.{1}.' . format ( name , zone )
parts = ip . split ( '.' ) [ : : - 1 ]
popped = [ ]
# Iterate over possible reverse zones
while len ( parts ) > 1 :
p = parts . pop ( 0 )
popped . append ( p )
zone = '{0}.{1}' . format ( '.' . join ( parts ) , 'in-addr.arpa.' )
name = '.' . join ( popped )
ptr = update ( zone , name , ttl , 'PTR' , fqdn , nameserver , timeout , replace , port , ** kwargs )
if ptr :
return True
return res
|
def predictions ( self ) :
"""Returns the predictions .
: return : the predictions . None if not available
: rtype : list"""
|
preds = javabridge . get_collection_wrapper ( javabridge . call ( self . jobject , "predictions" , "()Ljava/util/ArrayList;" ) )
if self . discard_predictions :
result = None
else :
result = [ ]
for pred in preds :
if is_instance_of ( pred , "weka.classifiers.evaluation.NominalPrediction" ) :
result . append ( NominalPrediction ( pred ) )
elif is_instance_of ( pred , "weka.classifiers.evaluation.NumericPrediction" ) :
result . append ( NumericPrediction ( pred ) )
else :
result . append ( Prediction ( pred ) )
return result
|
def remove ( self , locator ) :
"""Removes a previously added reference that matches specified locator .
If many references match the locator , it removes only the first one .
When all references shall be removed , use [ [ removeAll ] ] method instead .
: param locator : a locator to remove reference
: return : the removed component reference ."""
|
if locator == None :
return None
self . _lock . acquire ( )
try :
for reference in reversed ( self . _references ) :
if reference . match ( locator ) :
self . _references . remove ( reference )
return reference . get_component ( )
finally :
self . _lock . release ( )
return None
|
def clean_up ( self ) :
""": return :"""
|
if self . auth_type == 'registry_rubber' and self . user :
self . _registry_rubber_uonce ( 'delete' )
# clean up old docker configs .
user_home = os . environ . get ( 'HOME' )
for file_name in os . listdir ( user_home ) :
if 'dockercfg' in file_name :
if file_name . count ( '.' ) is 2 :
try :
parts = file_name . split ( '.' )
delta = int ( time . time ( ) ) - int ( parts [ 1 ] )
# default to 30 seconds ?
if delta > 30 :
os . remove ( os . path . realpath ( os . path . join ( user_home , file_name ) ) )
except Exception :
pass
|
def output_aliases ( aliases ) :
"""Display git aliases"""
|
for alias in aliases :
cmd = '!legit ' + alias
click . echo ( columns ( [ colored . yellow ( 'git ' + alias ) , 20 ] , [ cmd , None ] ) )
|
def create_header_data ( coord , radius = 10. , ** kwargs ) :
"""Make an empty sky region at location of skydir
skydir : skymaps . SkyDir object
size : size of region ( deg . )
kwargs : arguments passed to create _ header"""
|
header = create_header ( coord , radius = radius , ** kwargs )
data = np . zeros ( ( header [ 'NAXIS1' ] , header [ 'NAXIS2' ] ) )
return header , data
|
def label ( self ) -> str :
"""A latex formatted label representing axis expression ."""
|
label = self . expression . replace ( "_" , "\\;" )
if self . units_kind :
symbol = wt_units . get_symbol ( self . units )
for v in self . variables :
vl = "%s_{%s}" % ( symbol , v . label )
vl = vl . replace ( "_{}" , "" )
# label can be empty , no empty subscripts
label = label . replace ( v . natural_name , vl )
units_dictionary = getattr ( wt_units , self . units_kind )
label += r"\,"
label += r"\left("
label += units_dictionary [ self . units ] [ 2 ]
label += r"\right)"
label = r"$\mathsf{%s}$" % label
return label
|
def SESSION_TIME ( stats , info ) :
"""Total time of this session .
Reports the time elapsed from the construction of the ` Stats ` object to
this ` submit ( ) ` call .
This is a flag you can pass to ` Stats . submit ( ) ` ."""
|
duration = time . time ( ) - stats . started_time
secs = int ( duration )
msecs = int ( ( duration - secs ) * 1000 )
info . append ( ( 'session_time' , '%d.%d' % ( secs , msecs ) ) )
|
def set_attr ( obj , path , value ) :
"""SAME AS object . _ _ setattr _ _ ( ) , BUT USES DOT - DELIMITED path
RETURN OLD VALUE"""
|
try :
return _set_attr ( obj , split_field ( path ) , value )
except Exception as e :
Log = get_logger ( )
if PATH_NOT_FOUND in e :
Log . warning ( PATH_NOT_FOUND + ": {{path}}" , path = path , cause = e )
else :
Log . error ( "Problem setting value" , cause = e )
|
def forget_area ( self , area_uuid ) :
"""Remove an Upload Area from out cache of known areas .
: param str area _ uuid : The RFC4122 - compliant UUID of the Upload Area ."""
|
if self . _config . upload . current_area == area_uuid :
self . _config . upload . current_area = None
if area_uuid in self . _config . upload . areas :
del self . _config . upload . areas [ area_uuid ]
self . save ( )
|
def transpose ( self ) :
"""Builds a new graph with the edges reversed .
Returns :
: class : ` stacker . dag . DAG ` : The transposed graph ."""
|
graph = self . graph
transposed = DAG ( )
for node , edges in graph . items ( ) :
transposed . add_node ( node )
for node , edges in graph . items ( ) : # for each edge A - > B , transpose it so that B - > A
for edge in edges :
transposed . add_edge ( edge , node )
return transposed
|
def to_date ( value , default = None ) :
"""Tries to convert the passed in value to Zope ' s DateTime
: param value : The value to be converted to a valid DateTime
: type value : str , DateTime or datetime
: return : The DateTime representation of the value passed in or default"""
|
if isinstance ( value , DateTime ) :
return value
if not value :
if default is None :
return None
return to_date ( default )
try :
if isinstance ( value , str ) and '.' in value : # https : / / docs . plone . org / develop / plone / misc / datetime . html # datetime - problems - and - pitfalls
return DateTime ( value , datefmt = 'international' )
return DateTime ( value )
except :
return to_date ( default )
|
def pipe_fetchsitefeed ( context = None , _INPUT = None , conf = None , ** kwargs ) :
"""A source that fetches and parses the first feed found on one or more
sites . Loopable .
Parameters
context : pipe2py . Context object
_ INPUT : pipeforever pipe or an iterable of items or fields
conf : URL - - url
Yields
_ OUTPUT : items"""
|
conf = DotDict ( conf )
urls = utils . listize ( conf [ 'URL' ] )
for item in _INPUT :
for item_url in urls :
url = utils . get_value ( DotDict ( item_url ) , DotDict ( item ) , ** kwargs )
url = utils . get_abspath ( url )
if context and context . verbose :
print "pipe_fetchsitefeed loading:" , url
for link in autorss . getRSSLink ( url . encode ( 'utf-8' ) ) :
parsed = speedparser . parse ( urlopen ( link ) . read ( ) )
for entry in utils . gen_entries ( parsed ) :
yield entry
if item . get ( 'forever' ) : # _ INPUT is pipeforever and not a loop ,
# so we just yield our item once
break
|
def add_one_en_passant_move ( self , direction , position ) :
"""Yields en _ passant moves in given direction if it is legal .
: type : direction : function
: type : position : Board
: rtype : gen"""
|
try :
if self . _is_en_passant_valid ( direction ( self . location ) , position ) :
yield self . create_move ( end_loc = self . square_in_front ( direction ( self . location ) ) , status = notation_const . EN_PASSANT )
except IndexError :
pass
|
def label_correcting_get_cycle ( self , j , pred ) :
'''API :
label _ correcting _ get _ cycle ( self , labelled , pred )
Description :
In label correcting check cycle it is decided pred has a cycle and
nodes in the cycle are labelled . We will create a list of nodes
in the cycle using labelled and pred inputs .
Pre :
This method should be called from label _ correcting _ check _ cycle ( ) ,
unless you are sure about what you are doing .
Input :
j : Node that predecessor is recently updated . We know that it is
in the cycle
pred : Predecessor dictionary that contains a cycle
Post :
Returns a list of nodes that represents cycle . It is in
[ n _ 1 , n _ 2 , . . . , n _ k ] form where the cycle has k nodes .'''
|
cycle = [ ]
cycle . append ( j )
current = pred [ j ]
while current != j :
cycle . append ( current )
current = pred [ current ]
cycle . reverse ( )
return cycle
|
def find_ML ( sampler , modelidx ) :
"""Find Maximum Likelihood parameters as those in the chain with a highest log
probability ."""
|
index = np . unravel_index ( np . argmax ( sampler . lnprobability ) , sampler . lnprobability . shape )
MLp = sampler . chain [ index ]
if modelidx is not None and hasattr ( sampler , "blobs" ) :
blob = sampler . blobs [ index [ 1 ] ] [ index [ 0 ] ] [ modelidx ]
if isinstance ( blob , u . Quantity ) :
modelx = sampler . data [ "energy" ] . copy ( )
model_ML = blob . copy ( )
elif len ( blob ) == 2 :
modelx = blob [ 0 ] . copy ( )
model_ML = blob [ 1 ] . copy ( )
else :
raise TypeError ( "Model {0} has wrong blob format" . format ( modelidx ) )
elif modelidx is not None and hasattr ( sampler , "modelfn" ) :
blob = _process_blob ( [ sampler . modelfn ( MLp , sampler . data ) ] , modelidx , energy = sampler . data [ "energy" ] , )
modelx , model_ML = blob [ 0 ] , blob [ 1 ] [ 0 ]
else :
modelx , model_ML = None , None
MLerr = [ ]
for dist in sampler . flatchain . T :
hilo = np . percentile ( dist , [ 16.0 , 84.0 ] )
MLerr . append ( ( hilo [ 1 ] - hilo [ 0 ] ) / 2.0 )
ML = sampler . lnprobability [ index ]
return ML , MLp , MLerr , ( modelx , model_ML )
|
def _GetCurrentControlSet ( self , key_path_suffix ) :
"""Virtual key callback to determine the current control set .
Args :
key _ path _ suffix ( str ) : current control set Windows Registry key path
suffix with leading path separator .
Returns :
WinRegistryKey : the current control set Windows Registry key or None
if not available ."""
|
select_key_path = 'HKEY_LOCAL_MACHINE\\System\\Select'
select_key = self . GetKeyByPath ( select_key_path )
if not select_key :
return None
# To determine the current control set check :
# 1 . The " Current " value .
# 2 . The " Default " value .
# 3 . The " LastKnownGood " value .
control_set = None
for value_name in ( 'Current' , 'Default' , 'LastKnownGood' ) :
value = select_key . GetValueByName ( value_name )
if not value or not value . DataIsInteger ( ) :
continue
control_set = value . GetDataAsObject ( )
# If the control set is 0 then we need to check the other values .
if control_set > 0 or control_set <= 999 :
break
if not control_set or control_set <= 0 or control_set > 999 :
return None
control_set_path = 'HKEY_LOCAL_MACHINE\\System\\ControlSet{0:03d}' . format ( control_set )
key_path = '' . join ( [ control_set_path , key_path_suffix ] )
return self . GetKeyByPath ( key_path )
|
def set_layout ( self , layout ) :
"""Sets the LayoutParams of this widget .
Since the available properties that may be set for the layout params
depends on the parent , actual creation of the params is delegated to
the parent
Parameters
layout : Dict
A dict of layout parameters the parent should used to layout this
child . The widget defaults are updated with user passed values ."""
|
# Update the layout with the widget defaults
update = self . layout_params is not None
params = self . default_layout . copy ( )
params . update ( layout )
# Create the layout params
parent = self . parent ( )
if not isinstance ( parent , AndroidView ) : # Root node
parent = self
update = True
parent . apply_layout ( self , params )
if update :
self . widget . setLayoutParams ( self . layout_params )
|
def kilometers ( meters = 0 , miles = 0 , feet = 0 , nautical = 0 ) :
"""TODO docs ."""
|
ret = 0.
if meters :
ret += meters / 1000.
if feet :
ret += feet / ft ( 1. )
if nautical :
ret += nautical / nm ( 1. )
ret += miles * 1.609344
return ret
|
def split_dataframe ( df , train_percentage = 0.6 , cv_percentage = 0.2 , test_percentage = 0.2 ) :
"""@ return training , cv , test
( as pandas dataframes )
@ param df : pandas dataframe
@ param train _ percentage : float | percentage of data for training set ( default = 0.6)
@ param cv _ percentage : float | percentage of data for cross validation set ( default = 0.2)
@ param test _ percentage : float | percentage of data for test set ( default = 0.2)"""
|
assert train_percentage + cv_percentage + test_percentage == 1.0
N = len ( df )
l = range ( N )
shuffle ( l )
# get splitting indices
train_len = int ( N * train_percentage )
cv_len = int ( N * cv_percentage )
test_len = int ( N * test_percentage )
# get training , cv , and test sets
training = df . ix [ l [ : train_len ] ]
cv = df . ix [ l [ train_len : train_len + cv_len ] ]
test = df . ix [ l [ train_len + cv_len : ] ]
# print len ( cl ) , len ( training ) , len ( cv ) , len ( test )
return training , cv , test
|
def cli ( env , identifier ) :
"""Cancel virtual servers ."""
|
vsi = SoftLayer . VSManager ( env . client )
vs_id = helpers . resolve_id ( vsi . resolve_ids , identifier , 'VS' )
if not ( env . skip_confirmations or formatting . no_going_back ( vs_id ) ) :
raise exceptions . CLIAbort ( 'Aborted' )
vsi . cancel_instance ( vs_id )
|
def _dates ( self , key , value ) :
"""Don ' t populate any key through the return value .
On the other hand , populates the ` ` date _ proposed ` ` , ` ` date _ approved ` ` ,
` ` date _ started ` ` , ` ` date _ cancelled ` ` , and the ` ` date _ completed ` ` keys
through side effects ."""
|
if value . get ( 'q' ) :
self [ 'date_proposed' ] = normalize_date ( value [ 'q' ] )
if value . get ( 'r' ) :
self [ 'date_approved' ] = normalize_date ( value [ 'r' ] )
if value . get ( 's' ) :
self [ 'date_started' ] = normalize_date ( value [ 's' ] )
if value . get ( 'c' ) :
self [ 'date_cancelled' ] = normalize_date ( value [ 'c' ] )
if value . get ( 't' ) :
self [ 'date_completed' ] = normalize_date ( value [ 't' ] )
raise IgnoreKey
|
def get_program_files_dir ( ) :
"""returns the location of the " program files " directory on a windows
platform"""
|
ProgramFiles = bjam . variable ( "ProgramFiles" )
if ProgramFiles :
ProgramFiles = ' ' . join ( ProgramFiles )
else :
ProgramFiles = "c:\\Program Files"
return ProgramFiles
|
def Validate ( self ) :
"""Check the source is well constructed ."""
|
self . _ValidateReturnedTypes ( )
self . _ValidatePaths ( )
self . _ValidateType ( )
self . _ValidateRequiredAttributes ( )
self . _ValidateCommandArgs ( )
|
def _args_for_remote ( self ) :
"""Generate arguments for ' terraform remote config ' . Return None if
not present in configuration .
: return : list of args for ' terraform remote config ' or None
: rtype : : std : term : ` list `"""
|
conf = self . config . get ( 'terraform_remote_state' )
if conf is None :
return None
args = [ '-backend=%s' % conf [ 'backend' ] ]
for k , v in sorted ( conf [ 'config' ] . items ( ) ) :
args . append ( '-backend-config="%s=%s"' % ( k , v ) )
return args
|
def _setup_direct_converter ( self , converter ) :
'''Given a converter , set up the direct _ output routes for conversions ,
which is used for transcoding between similar datatypes .'''
|
inputs = ( converter . direct_inputs if hasattr ( converter , 'direct_inputs' ) else converter . inputs )
for in_ in inputs :
for out in converter . direct_outputs :
self . direct_converters [ ( in_ , out ) ] = converter
|
def value ( self ) :
'''The final value , if it has arrived
: raises : AttributeError , if not yet complete
: raises : an exception if the Future was : meth : ` abort ` \ ed'''
|
if not self . _done . is_set ( ) :
raise AttributeError ( "value" )
if self . _failure :
raise self . _failure [ 0 ] , self . _failure [ 1 ] , self . _failure [ 2 ]
return self . _value
|
def fatal ( msg , * args , ** kwargs ) :
"""Print a red ` msg ` to STDERR and exit .
To be used in a context of an exception , also prints out the exception .
The message is formatted with ` args ` & ` kwargs ` ."""
|
exc_str = format_exc ( )
if exc_str . strip ( ) != 'NoneType: None' :
logger . info ( '{}' , format_exc ( ) )
fatal_noexc ( msg , * args , ** kwargs )
|
def _dpi ( self , resolution_tag ) :
"""Return the dpi value calculated for * resolution _ tag * , which can be
either TIFF _ TAG . X _ RESOLUTION or TIFF _ TAG . Y _ RESOLUTION . The
calculation is based on the values of both that tag and the
TIFF _ TAG . RESOLUTION _ UNIT tag in this parser ' s | _ IfdEntries | instance ."""
|
ifd_entries = self . _ifd_entries
if resolution_tag not in ifd_entries :
return 72
# resolution unit defaults to inches ( 2)
resolution_unit = ( ifd_entries [ TIFF_TAG . RESOLUTION_UNIT ] if TIFF_TAG . RESOLUTION_UNIT in ifd_entries else 2 )
if resolution_unit == 1 : # aspect ratio only
return 72
# resolution _ unit = = 2 for inches , 3 for centimeters
units_per_inch = 1 if resolution_unit == 2 else 2.54
dots_per_unit = ifd_entries [ resolution_tag ]
return int ( round ( dots_per_unit * units_per_inch ) )
|
def initialize ( self , name , reuse = False ) :
"""Create an empty Sorting Hat registry .
This method creates a new database including the schema of Sorting Hat .
Any attempt to create a new registry over an existing instance will
produce an error , except if reuse = True . In that case , the
database will be reused , assuming the database schema is correct
( it won ' t be created in this case ) .
: param name : name of the database
: param reuse : reuse database if it already exists"""
|
user = self . _kwargs [ 'user' ]
password = self . _kwargs [ 'password' ]
host = self . _kwargs [ 'host' ]
port = self . _kwargs [ 'port' ]
if '-' in name :
self . error ( "dabase name '%s' cannot contain '-' characters" % name )
return CODE_VALUE_ERROR
try :
Database . create ( user , password , name , host , port )
# Try to access and create schema
db = Database ( user , password , name , host , port )
# Load countries list
self . __load_countries ( db )
except DatabaseExists as e :
if not reuse :
self . error ( str ( e ) )
return CODE_DATABASE_EXISTS
except DatabaseError as e :
self . error ( str ( e ) )
return CODE_DATABASE_ERROR
except LoadError as e :
Database . drop ( user , password , name , host , port )
self . error ( str ( e ) )
return CODE_LOAD_ERROR
return CMD_SUCCESS
|
def right_axis_label ( self , label , position = None , rotation = - 60 , offset = 0.08 , ** kwargs ) :
"""Sets the label on the right axis .
Parameters
label : String
The axis label
position : 3 - Tuple of floats , None
The position of the text label
rotation : float , - 60
The angle of rotation of the label
offset : float ,
Used to compute the distance of the label from the axis
kwargs :
Any kwargs to pass through to matplotlib ."""
|
if not position :
position = ( 2. / 5 + offset , 3. / 5 , 0 )
self . _labels [ "right" ] = ( label , position , rotation , kwargs )
|
def _buildTemplates ( self ) :
"""do all the things necessary to build the viz
should be adapted to work for single - file viz , or multi - files etc .
: param output _ path :
: return :"""
|
# in this case we only have one
contents = self . _renderTemplate ( self . template_name , extraContext = None )
# the main url used for opening viz
f = self . main_file_name
main_url = self . _save2File ( contents , f , self . output_path )
return main_url
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.