signature
stringlengths 29
44.1k
| implementation
stringlengths 0
85.2k
|
|---|---|
def make_model_sources_image ( shape , model , source_table , oversample = 1 ) :
"""Make an image containing sources generated from a user - specified
model .
Parameters
shape : 2 - tuple of int
The shape of the output 2D image .
model : 2D astropy . modeling . models object
The model to be used for rendering the sources .
source _ table : ` ~ astropy . table . Table `
Table of parameters for the sources . Each row of the table
corresponds to a source whose model parameters are defined by
the column names , which must match the model parameter names .
Column names that do not match model parameters will be ignored .
Model parameters not defined in the table will be set to the
` ` model ` ` default value .
oversample : float , optional
The sampling factor used to discretize the models on a pixel
grid . If the value is 1.0 ( the default ) , then the models will
be discretized by taking the value at the center of the pixel
bin . Note that this method will not preserve the total flux of
very small sources . Otherwise , the models will be discretized
by taking the average over an oversampled grid . The pixels will
be oversampled by the ` ` oversample ` ` factor .
Returns
image : 2D ` ~ numpy . ndarray `
Image containing model sources .
See Also
make _ random _ models _ table , make _ gaussian _ sources _ image
Examples
. . plot : :
: include - source :
from collections import OrderedDict
from astropy . modeling . models import Moffat2D
from photutils . datasets import ( make _ random _ models _ table ,
make _ model _ sources _ image )
model = Moffat2D ( )
n _ sources = 10
shape = ( 100 , 100)
param _ ranges = [ ( ' amplitude ' , [ 100 , 200 ] ) ,
( ' x _ 0 ' , [ 0 , shape [ 1 ] ] ) ,
( ' y _ 0 ' , [ 0 , shape [ 0 ] ] ) ,
( ' gamma ' , [ 5 , 10 ] ) ,
( ' alpha ' , [ 1 , 2 ] ) ]
param _ ranges = OrderedDict ( param _ ranges )
sources = make _ random _ models _ table ( n _ sources , param _ ranges ,
random _ state = 12345)
data = make _ model _ sources _ image ( shape , model , sources )
plt . imshow ( data )"""
|
image = np . zeros ( shape , dtype = np . float64 )
y , x = np . indices ( shape )
params_to_set = [ ]
for param in source_table . colnames :
if param in model . param_names :
params_to_set . append ( param )
# Save the initial parameter values so we can set them back when
# done with the loop . It ' s best not to copy a model , because some
# models ( e . g . PSF models ) may have substantial amounts of data in
# them .
init_params = { param : getattr ( model , param ) for param in params_to_set }
try :
for i , source in enumerate ( source_table ) :
for param in params_to_set :
setattr ( model , param , source [ param ] )
if oversample == 1 :
image += model ( x , y )
else :
image += discretize_model ( model , ( 0 , shape [ 1 ] ) , ( 0 , shape [ 0 ] ) , mode = 'oversample' , factor = oversample )
finally :
for param , value in init_params . items ( ) :
setattr ( model , param , value )
return image
|
def GET ( self ) : # pylint : disable = arguments - differ
"""Display main course list page"""
|
if not self . app . welcome_page :
raise web . seeother ( "/courselist" )
return self . show_page ( self . app . welcome_page )
|
def render_select2_options_code ( self , options , id_ ) :
"""Render options for select2."""
|
output = [ ]
for key , value in options . items ( ) :
if isinstance ( value , ( dict , list ) ) :
value = json . dumps ( value )
output . append ( "data-{name}='{value}'" . format ( name = key , value = mark_safe ( value ) ) )
return mark_safe ( ' ' . join ( output ) )
|
def parse_case_snake_to_camel ( snake , upper_first = True ) :
"""Convert a string from snake _ case to CamelCase .
: param str snake : The snake _ case string to convert .
: param bool upper _ first : Whether or not to capitalize the first
character of the string .
: return : The CamelCase version of string .
: rtype : str"""
|
snake = snake . split ( '_' )
first_part = snake [ 0 ]
if upper_first :
first_part = first_part . title ( )
return first_part + '' . join ( word . title ( ) for word in snake [ 1 : ] )
|
def num ( value ) :
"""Convert a value from one of several bases to an int ."""
|
if re_hex_num . match ( value ) :
return int ( value , base = 16 )
else :
return int ( value )
|
def runSearchIndividuals ( self , request ) :
"""Runs the specified search SearchIndividualsRequest ."""
|
return self . runSearchRequest ( request , protocol . SearchIndividualsRequest , protocol . SearchIndividualsResponse , self . individualsGenerator )
|
def _proxy_conf_file ( proxyfile , test ) :
'''Check if proxy conf exists and update'''
|
changes_old = [ ]
changes_new = [ ]
success = True
if not os . path . exists ( proxyfile ) :
try :
if not test :
changes_new . append ( _write_proxy_conf ( proxyfile ) )
msg = 'Salt Proxy: Wrote proxy conf {0}' . format ( proxyfile )
else :
msg = 'Salt Proxy: Update required to proxy conf {0}' . format ( proxyfile )
except ( OSError , IOError ) as err :
success = False
msg = 'Salt Proxy: Error writing proxy file {0}' . format ( err )
log . error ( msg )
changes_new . append ( msg )
changes_new . append ( msg )
log . debug ( msg )
else :
msg = 'Salt Proxy: {0} already exists, skipping' . format ( proxyfile )
changes_old . append ( msg )
log . debug ( msg )
return success , changes_new , changes_old
|
def register_all_shape_checker ( shape_checker_function , arg_types , exclude = ( ) , ignore_existing = False ) :
"""Register a gradient adder for all combinations of given types .
This is a convenience shorthand for calling register _ add _ grad when registering
gradient adders for multiple types that can be interchanged for the purpose
of addition .
Args :
shape _ checker _ function : A shape checker , see register _ shape _ checker .
arg _ types : List of Python type objects . The shape checker will be
registered for all pairs of these types .
exclude : Optional list of type tuples to exclude .
ignore _ existing : Boolean . Whether to silently skip argument pairs that were
already registered ."""
|
for t1 in arg_types :
for t2 in arg_types :
if ( t1 , t2 ) in exclude :
continue
if ignore_existing and ( t1 , t2 ) in shape_checkers :
continue
register_shape_checker ( t1 , t2 , shape_checker_function )
|
def _get_segment ( self , start , request_size , check_response = True ) :
"""Get a segment of the file from Google Storage .
Args :
start : start offset of the segment . Inclusive . Have to be within the
range of the file .
request _ size : number of bytes to request . Have to be small enough
for a single urlfetch request . May go over the logical range of the
file .
check _ response : True to check the validity of GCS response automatically
before the future returns . False otherwise . See Yields section .
Yields :
If check _ response is True , the segment [ start , start + request _ size )
of the file .
Otherwise , a tuple . The first element is the unverified file segment .
The second element is a closure that checks response . Caller should
first invoke the closure before consuing the file segment .
Raises :
ValueError : if the file has changed while reading ."""
|
end = start + request_size - 1
content_range = '%d-%d' % ( start , end )
headers = { 'Range' : 'bytes=' + content_range }
status , resp_headers , content = yield self . _api . get_object_async ( self . _path , headers = headers )
def _checker ( ) :
errors . check_status ( status , [ 200 , 206 ] , self . _path , headers , resp_headers , body = content )
self . _check_etag ( resp_headers . get ( 'etag' ) )
if check_response :
_checker ( )
raise ndb . Return ( content )
raise ndb . Return ( content , _checker )
|
def _ncc_c_3dim ( x , y ) :
"""Variant of NCCc that operates with 2 dimensional X arrays and 2 dimensional
y vector
Returns a 3 dimensional array of normalized fourier transforms"""
|
den = norm ( x , axis = 1 ) [ : , None ] * norm ( y , axis = 1 )
den [ den == 0 ] = np . Inf
x_len = x . shape [ - 1 ]
fft_size = 1 << ( 2 * x_len - 1 ) . bit_length ( )
cc = ifft ( fft ( x , fft_size ) * np . conj ( fft ( y , fft_size ) ) [ : , None ] )
cc = np . concatenate ( ( cc [ : , : , - ( x_len - 1 ) : ] , cc [ : , : , : x_len ] ) , axis = 2 )
return np . real ( cc ) / den . T [ : , : , None ]
|
def get_cool_off_iso8601 ( delta : timedelta ) -> str :
"""Return datetime . timedelta translated to ISO 8601 formatted duration for use in e . g . cool offs ."""
|
seconds = delta . total_seconds ( )
minutes , seconds = divmod ( seconds , 60 )
hours , minutes = divmod ( minutes , 60 )
days , hours = divmod ( hours , 24 )
days_str = f'{days:.0f}D' if days else ''
time_str = '' . join ( f'{value:.0f}{designator}' for value , designator in [ [ hours , 'H' ] , [ minutes , 'M' ] , [ seconds , 'S' ] , ] if value )
if time_str :
return f'P{days_str}T{time_str}'
return f'P{days_str}'
|
def __analizar_controles ( self , ret ) :
"Comprueba y extrae controles si existen en la respuesta XML"
|
if 'arrayControles' in ret :
controles = ret [ 'arrayControles' ]
self . Controles = [ "%(tipo)s: %(descripcion)s" % ctl [ 'control' ] for ctl in controles ]
|
def save ( self , name ) :
"""Save the string buffer to a file . Finalizes prior to saving .
: param name : File path .
: type name : unicode | str"""
|
self . finalize ( )
with open ( name , 'wb+' ) as f :
if six . PY3 :
f . write ( self . fileobj . getbuffer ( ) )
else :
f . write ( self . fileobj . getvalue ( ) . encode ( 'utf-8' ) )
|
def _pipe ( content ) :
"""Helper funcation that converts text - based get response
to pipe separated values for additional manipulation ."""
|
response = _data_frame ( content ) . to_csv ( index = False , sep = '|' )
return response
|
def write ( self , outfile , encoding ) :
"""Method override to create self - closing elements .
https : / / docs . djangoproject . com / en / 2.0 / ref / utils / # django . utils . feedgenerator . SyndicationFeed . write
https : / / github . com / django / django / blob / 2.0 / django / utils / feedgenerator . py # L216"""
|
try :
handler = EscapeFriendlyXMLGenerator ( outfile , encoding , short_empty_elements = True )
except TypeError : # Python 2
handler = EscapeFriendlyXMLGenerator ( outfile , encoding )
handler . startDocument ( )
handler . startElement ( 'rss' , self . rss_attributes ( ) )
handler . startElement ( 'channel' , self . root_attributes ( ) )
self . add_root_elements ( handler )
self . write_items ( handler )
self . endChannelElement ( handler )
handler . endElement ( 'rss' )
|
def service_changed ( self , event ) : # type : ( ServiceEvent ) - > None
"""Called when a handler factory service is un / registered"""
|
# Call sub - methods
kind = event . get_kind ( )
svc_ref = event . get_service_reference ( )
if kind == ServiceEvent . REGISTERED : # Service coming
with self . __instances_lock :
self . __add_handler_factory ( svc_ref )
elif kind == ServiceEvent . UNREGISTERING : # Service gone
with self . __instances_lock :
self . __remove_handler_factory ( svc_ref )
|
def init_discrete_hmm_ml ( C_full , nstates , reversible = True , stationary = True , active_set = None , P = None , eps_A = None , eps_B = None , separate = None ) :
"""Initializes discrete HMM using maximum likelihood of observation counts"""
|
raise NotImplementedError ( 'ML-initialization not yet implemented' )
|
def get_meta ( self , table_name , constraints = None , column_to_field_name = None , is_view = False , is_partition = None ) :
"""Return a sequence comprising the lines of code necessary
to construct the inner Meta class for the model corresponding
to the given database table name ."""
|
# pylint : disable = arguments - differ , too - many - arguments , unused - argument
meta = [ " class Meta(models.Model.Meta):" , " db_table = '%s'" % table_name ]
if self . connection . vendor == 'salesforce' :
for line in self . connection . introspection . get_additional_meta ( table_name ) :
meta . append ( " " + line )
meta . append ( "" )
return meta
|
def mean ( self ) :
"""Mean of all the values in the SArray , or mean image .
Returns None on an empty SArray . Raises an exception if called on an
SArray with non - numeric type or non - Image type .
Returns
out : float | turicreate . Image
Mean of all values in SArray , or image holding per - pixel mean
across the input SArray ."""
|
with cython_context ( ) :
if self . dtype == _Image :
from . . import extensions
return extensions . generate_mean ( self )
else :
return self . __proxy__ . mean ( )
|
def FunctionTimer ( on_done = None ) :
'''To check execution time of a function
borrowed from https : / / medium . com / pythonhive / python - decorator - to - measure - the - execution - time - of - methods - fa04cb6bb36d
> > > def logger ( details , args , kwargs ) : # some function that uses the time output
. . . print ( details )
> > > @ FunctionTimer ( on _ done = logger )
. . . def foo ( t = 10 ) :
. . . print ( ' foo executing . . . ' )
. . . time . sleep ( t )
> > > @ FunctionTimer ( on _ done = logger )
. . . def bar ( t , n ) :
. . . for i in range ( n ) :
. . . print ( ' bar executing . . . ' )
. . . time . sleep ( 1)
. . . foo ( t )
> > > bar ( 3,2)
bar executing . . .
bar executing . . .
foo executing . . .
( ' foo ' , 3)
( ' bar ' , 5)'''
|
def decfn ( fn ) :
def timed ( * args , ** kwargs ) :
ts = time . time ( )
result = fn ( * args , ** kwargs )
te = time . time ( )
if on_done :
on_done ( ( fn . __name__ , int ( te - ts ) ) , args , kwargs )
else :
print ( ( '%r %d sec(s)' % ( fn . __name__ , ( te - ts ) ) ) )
return result
return timed
return decfn
|
def osPaste ( self ) :
"""Triggers the OS " paste " keyboard shortcut"""
|
from . InputEmulation import Keyboard
k = Keyboard ( )
k . keyDown ( "{CTRL}" )
k . type ( "v" )
k . keyUp ( "{CTRL}" )
|
def compare_filesystems ( fs0 , fs1 , concurrent = False ) :
"""Compares the two given filesystems .
fs0 and fs1 are two mounted GuestFS instances
containing the two disks to be compared .
If the concurrent flag is True ,
two processes will be used speeding up the comparison on multiple CPUs .
Returns a dictionary containing files created , removed and modified .
{ ' created _ files ' : [ < files in fs1 and not in fs0 > ] ,
' deleted _ files ' : [ < files in fs0 and not in fs1 > ] ,
' modified _ files ' : [ < files in both fs0 and fs1 but different > ] }"""
|
if concurrent :
future0 = concurrent_hash_filesystem ( fs0 )
future1 = concurrent_hash_filesystem ( fs1 )
files0 = future0 . result ( )
files1 = future1 . result ( )
else :
files0 = hash_filesystem ( fs0 )
files1 = hash_filesystem ( fs1 )
return file_comparison ( files0 , files1 )
|
def QA_util_get_pre_trade_date ( cursor_date , n = 1 ) :
"""得到前 n 个交易日 ( 不包含当前交易日 )
: param date :
: param n :"""
|
cursor_date = QA_util_format_date2str ( cursor_date )
if cursor_date in trade_date_sse :
return QA_util_date_gap ( cursor_date , n , "lt" )
real_aft_trade_date = QA_util_get_real_date ( cursor_date )
return QA_util_date_gap ( real_aft_trade_date , n , "lt" )
|
def DiscriminatorLayerLoss ( ) :
'''Calculate the discriminator layer loss'''
|
data = mx . sym . Variable ( 'data' )
label = mx . sym . Variable ( 'label' )
data = mx . sym . Flatten ( data )
label = mx . sym . Flatten ( label )
label = mx . sym . BlockGrad ( label )
zeros = mx . sym . zeros_like ( data )
output = - GaussianLogDensity ( label , data , zeros )
dloss = mx . symbol . MakeLoss ( mx . symbol . mean ( output ) , name = 'lloss' )
return dloss
|
def _call_method_from_namespace ( obj , method_name , namespace ) :
"""Call the method , retrieved from obj , with the correct arguments via
the namespace
Args :
obj : any kind of object
method _ name : method to be called
namespace : an argparse . Namespace object containing parsed command
line arguments"""
|
method = getattr ( obj , method_name )
method_parser = method . parser
arg_names = _get_args_name_from_parser ( method_parser )
if method_name == "__init__" :
return _call ( obj , arg_names , namespace )
return _call ( method , arg_names , namespace )
|
def _load ( self , scale = 0.001 ) :
"""Load the Landsat OLI relative spectral responses"""
|
with open_workbook ( self . path ) as wb_ :
for sheet in wb_ . sheets ( ) :
if sheet . name in [ 'Plot of AllBands' , ] :
continue
ch_name = OLI_BAND_NAMES . get ( sheet . name . strip ( ) )
if ch_name != self . bandname :
continue
wvl = sheet . col_values ( 0 , 2 )
resp = sheet . col_values ( 1 , 2 )
self . rsr = { 'wavelength' : np . array ( wvl ) / 1000. , 'response' : np . array ( resp ) }
break
|
def clean_unique_fields ( self ) :
"""Ensure ' unique fields ' are unique among entered data ."""
|
if not self . unique_fields :
return
keys = set ( )
duplicates = [ ]
for form in self . forms :
key = tuple ( form . cleaned_data [ field ] for field in self . unique_fields )
if key in keys :
duplicates . append ( "," . join ( key ) )
else :
keys . add ( key )
if duplicates :
raise forms . ValidationError ( "Fields %s should be unique; found duplicates for %s" % ( ',' . join ( self . unique_fields ) , duplicates ) )
|
def facetintervalrecordlookupone ( table , key , start , stop , include_stop = False , strict = True ) :
"""As : func : ` petl . transform . intervals . facetintervallookupone ` but return
records ."""
|
trees = facetrecordtrees ( table , key , start = start , stop = stop )
out = dict ( )
for k in trees :
out [ k ] = IntervalTreeLookupOne ( trees [ k ] , include_stop = include_stop , strict = strict )
return out
|
def init ( self ) :
"""Init the connection to the RESTful server ."""
|
if not self . export_enable :
return None
# Build the RESTful URL where the stats will be posted
url = '{}://{}:{}{}' . format ( self . protocol , self . host , self . port , self . path )
logger . info ( "Stats will be exported to the RESTful endpoint {}" . format ( url ) )
return url
|
def camel2snake ( name : str ) -> str :
"Change ` name ` from camel to snake style ."
|
s1 = re . sub ( _camel_re1 , r'\1_\2' , name )
return re . sub ( _camel_re2 , r'\1_\2' , s1 ) . lower ( )
|
def update ( self , style_sheet = values . unset ) :
"""Update the StyleSheetInstance
: param dict style _ sheet : The JSON string that describes the style sheet object
: returns : Updated StyleSheetInstance
: rtype : twilio . rest . autopilot . v1 . assistant . style _ sheet . StyleSheetInstance"""
|
return self . _proxy . update ( style_sheet = style_sheet , )
|
def map ( self , callback ) :
"""Run a map over each of the item .
: param callback : The map function
: type callback : callable
: rtype : Collection"""
|
return self . __class__ ( list ( map ( callback , self . items ) ) )
|
def feed ( self , data ) :
"""Handler for incoming data . ( Called by TelnetServer . )"""
|
assert isinstance ( data , binary_type )
self . parser . feed ( data )
# Render again .
self . cli . _redraw ( )
# When a return value has been set ( enter was pressed ) , handle command .
if self . cli . is_returning :
try :
return_value = self . cli . return_value ( )
except ( EOFError , KeyboardInterrupt ) as e : # Control - D or Control - C was pressed .
logger . info ( '%s, closing connection.' , type ( e ) . __name__ )
self . close ( )
return
# Handle CLI command
self . _handle_command ( return_value )
|
def effect_ratio ( self , mechanism , purview ) :
"""The effect ratio of the ` ` purview ` ` given ` ` mechanism ` ` ."""
|
return self . _ratio ( Direction . EFFECT , mechanism , purview )
|
def autoactivate ( client , endpoint_id , if_expires_in = None ) :
"""Attempts to auto - activate the given endpoint with the given client
If auto - activation fails , parses the returned activation requirements
to determine which methods of activation are supported , then tells
the user to use ' globus endpoint activate ' with the correct options ( s )"""
|
kwargs = { }
if if_expires_in is not None :
kwargs [ "if_expires_in" ] = if_expires_in
res = client . endpoint_autoactivate ( endpoint_id , ** kwargs )
if res [ "code" ] == "AutoActivationFailed" :
message = ( "The endpoint could not be auto-activated and must be " "activated before it can be used.\n\n" + activation_requirements_help_text ( res , endpoint_id ) )
safeprint ( message , write_to_stderr = True )
click . get_current_context ( ) . exit ( 1 )
else :
return res
|
def config ( env = DEFAULT_ENV , default = None , ** overrides ) :
"""Returns configured REDIS dictionary from REDIS _ URL ."""
|
config = { }
s = os . environ . get ( env , default )
if s :
config = parse ( s )
overrides = dict ( [ ( k . upper ( ) , v ) for k , v in overrides . items ( ) ] )
config . update ( overrides )
return config
|
def stop_log_child ( self ) :
'''Stop the logging child process .'''
|
if self . log_fd :
os . close ( self . log_fd )
self . log_fd = None
if self . log_child :
try :
self . debug ( 'children' , 'stopping log child with pid {0}' . format ( self . log_child ) )
os . kill ( self . log_child , signal . SIGTERM )
os . waitpid ( self . log_child , 0 )
except OSError , e :
if e . errno == errno . ESRCH or e . errno == errno . ECHILD : # already gone
pass
else :
raise
self . log_child = None
|
def commit ( self , session ) :
"""Commit phase for session .
: param session : sqlalchemy session"""
|
sp_key , sp_hkey = self . _keygen ( session )
with self . r . pipeline ( transaction = False ) as p :
p . srem ( sp_key , session . meepo_unique_id )
p . expire ( sp_hkey , 60 * 60 )
p . execute ( )
|
def write_chunks ( self , data , start , step , count ) -> None :
'''Split data to count equal parts .
Write the chunks using offsets calculated from start , step and stop .
Args :
data ( bytes ) : The data .
start ( int ) : First offset .
step ( int ) : Offset increment .
count ( int ) : The number of offsets .'''
|
self . mglo . write_chunks ( data , start , step , count )
|
def default_index ( func ) :
"""Decorator assuring the wrapped method may only run if we are the default
repository index . This is as we rely on git commands that operate
on that index only ."""
|
@ wraps ( func )
def check_default_index ( self , * args , ** kwargs ) :
if self . _file_path != self . _index_path ( ) :
raise AssertionError ( "Cannot call %r on indices that do not represent the default git index" % func . __name__ )
return func ( self , * args , ** kwargs )
# END wrapper method
return check_default_index
|
def _storage_list_keys ( bucket , pattern ) :
"""List all storage keys in a specified bucket that match a pattern ."""
|
data = [ { 'Name' : item . metadata . name , 'Type' : item . metadata . content_type , 'Size' : item . metadata . size , 'Updated' : item . metadata . updated_on } for item in _storage_get_keys ( bucket , pattern ) ]
return datalab . utils . commands . render_dictionary ( data , [ 'Name' , 'Type' , 'Size' , 'Updated' ] )
|
def getstate ( self ) :
"""Returns RUNNING , - 1
COMPLETE , 0
or
EXECUTOR _ ERROR , 255"""
|
state = "RUNNING"
exit_code = - 1
exitcode_file = os . path . join ( self . workdir , "exit_code" )
pid_file = os . path . join ( self . workdir , "pid" )
if os . path . exists ( exitcode_file ) :
with open ( exitcode_file ) as f :
exit_code = int ( f . read ( ) )
elif os . path . exists ( pid_file ) :
with open ( pid_file , "r" ) as pid :
pid = int ( pid . read ( ) )
try :
( _pid , exit_status ) = os . waitpid ( pid , os . WNOHANG )
if _pid != 0 :
exit_code = exit_status >> 8
with open ( exitcode_file , "w" ) as f :
f . write ( str ( exit_code ) )
os . unlink ( pid_file )
except OSError :
os . unlink ( pid_file )
exit_code = 255
if exit_code == 0 :
state = "COMPLETE"
elif exit_code != - 1 :
state = "EXECUTOR_ERROR"
return state , exit_code
|
def _handle_splits ( _str ) :
"""Check if incoming date has a ' - " or ' / ' , if so do stuff ."""
|
_str = _str . replace ( '/' , '-' )
_tmp_dict = { }
if '-' in _str :
start , stop = _str . split ( '-' )
if _check_number ( start ) :
start = regex . sub ( r'[0-9]+\?*' , start , stop )
elif _check_number ( stop ) :
stop = regex . sub ( r'[0-9]+\?*' , stop , start )
else :
start = _str
stop = _str
_tmp_dict [ 'start_raw' ] = start
_tmp_dict [ 'stop_raw' ] = stop
_tmp_dict [ 'start_epoch' ] = _get_epoch ( start )
_tmp_dict [ 'stop_epoch' ] = _get_epoch ( stop )
return _tmp_dict
|
def save_parameters ( self , path , grad_only = False ) :
"""Save all parameters into a file with the specified format .
Currently hdf5 and protobuf formats are supported .
Args :
path : path or file object
grad _ only ( bool , optional ) : Return parameters with ` need _ grad ` option as ` True ` ."""
|
params = self . get_parameters ( grad_only = grad_only )
nn . save_parameters ( path , params )
|
def run1 ( self ) :
"""Run one item ( a callback or an RPC wait _ any ) or sleep .
Returns :
True if something happened ; False if all queues are empty ."""
|
delay = self . run0 ( )
if delay is None :
return False
if delay > 0 :
self . clock . sleep ( delay )
return True
|
def get_node ( self , node_id ) :
"""Returns the node object identified by " node _ id " ."""
|
try :
node_object = self . nodes [ node_id ]
except KeyError :
raise NonexistentNodeError ( node_id )
return node_object
|
def absolute_links ( self ) -> _Links :
"""All found links on page , in absolute form
( ` learn more < https : / / www . navegabem . com / absolute - or - relative - links . html > ` _ ) ."""
|
def gen ( ) :
for link in self . links :
yield self . _make_absolute ( link )
return set ( gen ( ) )
|
def contour ( z , x = None , y = None , v = 5 , xlbl = None , ylbl = None , title = None , cfntsz = 10 , lfntsz = None , intrp = 'bicubic' , alpha = 0.5 , cmap = None , vmin = None , vmax = None , fgsz = None , fgnm = None , fig = None , ax = None ) :
"""Contour plot of a 2D surface . If a figure object is specified then the
plot is drawn in that figure , and ` ` fig . show ( ) ` ` is not called . The
figure is closed on key entry ' q ' .
Parameters
z : array _ like
2d array of data to plot
x : array _ like , optional ( default None )
Values for x - axis of the plot
y : array _ like , optional ( default None )
Values for y - axis of the plot
v : int or sequence of ints , optional ( default 5)
An int specifies the number of contours to plot , and a sequence
specifies the specific contour levels to plot .
xlbl : string , optional ( default None )
Label for x - axis
ylbl : string , optional ( default None )
Label for y - axis
title : string , optional ( default None )
Figure title
cfntsz : int or None , optional ( default 10)
Contour label font size . No contour labels are displayed if
set to 0 or None .
lfntsz : int , optional ( default None )
Axis label font size . The default font size is used if set to None .
intrp : string , optional ( default ' bicubic ' )
Specify type of interpolation used to display image underlying
contours ( see ` ` interpolation ` ` parameter of
: meth : ` matplotlib . axes . Axes . imshow ` )
alpha : float , optional ( default 0.5)
Underlying image display alpha value
cmap : : class : ` matplotlib . colors . Colormap ` , optional ( default None )
Colour map for surface . If none specifed , defaults to cm . coolwarm
vmin , vmax : float , optional ( default None )
Set upper and lower bounds for the colour map ( see the corresponding
parameters of : meth : ` matplotlib . axes . Axes . imshow ` )
fgsz : tuple ( width , height ) , optional ( default None )
Specify figure dimensions in inches
fgnm : integer , optional ( default None )
Figure number of figure
fig : : class : ` matplotlib . figure . Figure ` object , optional ( default None )
Draw in specified figure instead of creating one
ax : : class : ` matplotlib . axes . Axes ` object , optional ( default None )
Plot in specified axes instead of current axes of figure
Returns
fig : : class : ` matplotlib . figure . Figure ` object
Figure object for this figure
ax : : class : ` matplotlib . axes . Axes ` object
Axes object for this plot"""
|
figp = fig
if fig is None :
fig = plt . figure ( num = fgnm , figsize = fgsz )
fig . clf ( )
ax = fig . gca ( )
elif ax is None :
ax = fig . gca ( )
if cmap is None :
cmap = cm . coolwarm
if x is None :
x = np . arange ( z . shape [ 1 ] )
else :
x = np . array ( x )
if y is None :
y = np . arange ( z . shape [ 0 ] )
else :
y = np . array ( y )
xg , yg = np . meshgrid ( x , y )
cntr = ax . contour ( xg , yg , z , v , colors = 'black' )
if cfntsz is not None and cfntsz > 0 :
plt . clabel ( cntr , inline = True , fontsize = cfntsz )
im = ax . imshow ( z , origin = 'lower' , interpolation = intrp , aspect = 'auto' , extent = [ x . min ( ) , x . max ( ) , y . min ( ) , y . max ( ) ] , cmap = cmap , vmin = vmin , vmax = vmax , alpha = alpha )
ax . fmt_xdata = lambda x : "{: .2f}" . format ( x )
ax . fmt_ydata = lambda x : "{: .2f}" . format ( x )
if title is not None :
ax . set_title ( title )
if xlbl is not None :
ax . set_xlabel ( xlbl , fontsize = lfntsz )
if ylbl is not None :
ax . set_ylabel ( ylbl , fontsize = lfntsz )
divider = make_axes_locatable ( ax )
cax = divider . append_axes ( "right" , size = "5%" , pad = 0.2 )
plt . colorbar ( im , ax = ax , cax = cax )
attach_keypress ( fig )
attach_zoom ( ax )
if have_mpldc :
mpldc . datacursor ( )
if figp is None :
fig . show ( )
return fig , ax
|
def sbi_ids ( self ) -> List [ str ] :
"""Get the list of SBI Ids .
Returns :
list , list of SBI ids associated with this subarray ."""
|
return ast . literal_eval ( DB . get_hash_value ( self . _key , 'sbi_ids' ) )
|
def _create_setter ( self , func , * targets ) :
"""Returns a function wrapping the supplied function . The returned wrapper can be used as the
setter in a property definition . Raises a RuntimeError if the signature of the supplied
function is not compatible with the setter - concept ( exactly one argument except self ) .
: param func : Callable or name of method on one object in targets .
: param targets : List of targets with decreasing priority for finding func .
: return : Setter function for constructing a wrapper - property or ` ` None ` ` ."""
|
if not func :
return None
func = self . _get_callable ( func , * targets )
if not self . _function_has_n_args ( func , 1 ) :
raise RuntimeError ( 'The function \'{}\' does not look like a setter function. A valid setter ' 'function has exactly one argument without a default. The self-argument of ' 'methods does not count towards that number.' . format ( func . __name__ ) )
def setter ( obj , value ) :
func ( value )
return setter
|
def create ( self ) :
"""Update with current tools for each branch at the version chosen"""
|
self . add_handlers ( { '^Q' : self . quit } )
self . add ( npyscreen . TitleText , name = 'Select which tools to add from each branch selected:' , editable = False )
self . add ( npyscreen . Textfield , value = 'NOTE tools you have already installed will be ignored' , color = 'STANDOUT' , editable = False )
i = 6
for branch in self . parentApp . repo_value [ 'versions' ] :
self . tools_tc [ branch ] = { }
self . add ( npyscreen . TitleText , name = 'Branch: ' + branch , editable = False , rely = i , relx = 5 , max_width = 25 )
tools = self . repo_tools ( branch )
i += 1
for tool in tools :
value = True
if tool . startswith ( '/dev' ) :
value = False
# tool in base directory
if tool == '' or tool . startswith ( ':' ) :
tool = '/' + tool
self . tools_tc [ branch ] [ tool ] = self . add ( npyscreen . CheckBox , name = tool , value = value , relx = 10 )
i += 1
i += 2
|
def clean_sample_data ( samples ) :
"""Clean unnecessary information from sample data , reducing size for message passing ."""
|
out = [ ]
for data in ( utils . to_single_data ( x ) for x in samples ) :
if "dirs" in data :
data [ "dirs" ] = { "work" : data [ "dirs" ] [ "work" ] , "galaxy" : data [ "dirs" ] [ "galaxy" ] , "fastq" : data [ "dirs" ] . get ( "fastq" ) }
data [ "config" ] = { "algorithm" : data [ "config" ] [ "algorithm" ] , "resources" : data [ "config" ] [ "resources" ] }
for remove_attr in [ "config_file" , "algorithm" ] :
data . pop ( remove_attr , None )
out . append ( [ data ] )
return out
|
def register_view ( self , view ) :
"""Called when the view was registered"""
|
super ( StateMachineTreeController , self ) . register_view ( view )
self . view . connect ( 'button_press_event' , self . mouse_click )
self . view_is_registered = True
self . update ( with_expand = True )
|
def update ( cls , id , params ) :
"""Update an existing vlan ."""
|
cls . echo ( 'Updating your vlan.' )
result = cls . call ( 'hosting.vlan.update' , cls . usable_id ( id ) , params )
return result
|
def logs_blocks_sanity_check ( from_block : BlockSpecification , to_block : BlockSpecification ) -> None :
"""Checks that the from / to blocks passed onto log calls contain only appropriate types"""
|
is_valid_from = isinstance ( from_block , int ) or isinstance ( from_block , str )
assert is_valid_from , 'event log from block can be integer or latest,pending, earliest'
is_valid_to = isinstance ( to_block , int ) or isinstance ( to_block , str )
assert is_valid_to , 'event log to block can be integer or latest,pending, earliest'
|
def matches ( self , spec ) :
"""Whether the spec applies to this object .
Args :
spec : A function , spec or type to check for a match
* A ' type [ [ . group ] . label ] ' string which is compared
against the type , group and label of this object
* A function which is given the object and returns
a boolean .
* An object type matched using isinstance .
Returns :
bool : Whether the spec matched this object ."""
|
if callable ( spec ) and not isinstance ( spec , type ) :
return spec ( self )
elif isinstance ( spec , type ) :
return isinstance ( self , spec )
specification = ( self . __class__ . __name__ , self . group , self . label )
split_spec = tuple ( spec . split ( '.' ) ) if not isinstance ( spec , tuple ) else spec
split_spec , nocompare = zip ( * ( ( None , True ) if s == '*' or s is None else ( s , False ) for s in split_spec ) )
if all ( nocompare ) :
return True
match_fn = itemgetter ( * ( idx for idx , nc in enumerate ( nocompare ) if not nc ) )
self_spec = match_fn ( split_spec )
unescaped_match = match_fn ( specification [ : len ( split_spec ) ] ) == self_spec
if unescaped_match :
return True
sanitizers = [ util . sanitize_identifier , util . group_sanitizer , util . label_sanitizer ]
identifier_specification = tuple ( fn ( ident , escape = False ) for ident , fn in zip ( specification , sanitizers ) )
identifier_match = match_fn ( identifier_specification [ : len ( split_spec ) ] ) == self_spec
return identifier_match
|
def postpro_standardize ( data , report = None ) :
"""Standardizes everything in data ( along axis - 1 ) .
If report variable is passed , this is added to the report ."""
|
if not report :
report = { }
# First make dim 1 = time .
data = np . transpose ( data , [ 2 , 0 , 1 ] )
standardized_data = ( data - data . mean ( axis = 0 ) ) / data . std ( axis = 0 )
standardized_data = np . transpose ( standardized_data , [ 1 , 2 , 0 ] )
report [ 'standardize' ] = { }
report [ 'standardize' ] [ 'performed' ] = 'yes'
report [ 'standardize' ] [ 'method' ] = 'Z-score'
# The above makes self connections to nan , set to 1.
data = set_diagonal ( data , 1 )
return standardized_data , report
|
def halt ( self ) :
"""halt : None - > None
Stop the event , and remove the FD from the loop handler"""
|
if self . _callback :
self . _running . clear ( )
self . _ioloop . remove_handler ( self . serial . fd )
if self . _frame_future is not None :
self . _frame_future . set_result ( None )
self . _frame_future = None
|
def get_developer_certificate ( self , developer_certificate_id , authorization , ** kwargs ) : # noqa : E501
"""Fetch an existing developer certificate to connect to the bootstrap server . # noqa : E501
This REST API is intended to be used by customers to fetch an existing developer certificate ( a certificate that can be flashed into multiple devices to connect to bootstrap server ) . * * Example usage : * * curl - X GET \" http : / / api . us - east - 1 . mbedcloud . com / v3 / developer - certificates / THE _ CERTIFICATE _ ID \" - H \" accept : application / json \" - H \" Authorization : Bearer THE _ ACCESS _ TOKEN \" # noqa : E501
This method makes a synchronous HTTP request by default . To make an
asynchronous HTTP request , please pass asynchronous = True
> > > thread = api . get _ developer _ certificate ( developer _ certificate _ id , authorization , asynchronous = True )
> > > result = thread . get ( )
: param asynchronous bool
: param str developer _ certificate _ id : A unique identifier for the developer certificate . ( required )
: param str authorization : Bearer { Access Token } . ( required )
: return : DeveloperCertificateResponseData
If the method is called asynchronously ,
returns the request thread ."""
|
kwargs [ '_return_http_data_only' ] = True
if kwargs . get ( 'asynchronous' ) :
return self . get_developer_certificate_with_http_info ( developer_certificate_id , authorization , ** kwargs )
# noqa : E501
else :
( data ) = self . get_developer_certificate_with_http_info ( developer_certificate_id , authorization , ** kwargs )
# noqa : E501
return data
|
def zip ( self , destination : typing . Union [ str , Path ] = None , encode : bool = True ) -> str :
"""Write mission , dictionary etc . to a MIZ file
Args :
destination : target MIZ file ( if none , defaults to source MIZ + " _ EMIZ "
Returns : destination file"""
|
if encode :
self . _encode ( )
if destination is None :
destination_path = self . miz_path . parent . joinpath ( f'{self.miz_path.stem}_EMIZ.miz' )
else :
destination_path = elib . path . ensure_file ( destination , must_exist = False )
LOGGER . debug ( 'zipping mission to: %s' , destination_path )
destination_path . write_bytes ( dummy_miz )
with ZipFile ( str ( destination_path ) , mode = 'w' , compression = 8 ) as zip_file :
for root , _ , items in os . walk ( self . temp_dir . absolute ( ) ) :
for item in items :
item_abs_path = Path ( root , item ) . absolute ( )
item_rel_path = Path ( item_abs_path ) . relative_to ( self . temp_dir )
zip_file . write ( item_abs_path , arcname = item_rel_path )
return str ( destination_path )
|
def _GenerateLibraryPath ( self , tvFile , libraryDir ) :
"""Creates a full path for TV file in TV library .
This initially attempts to directly match a show directory in the database ,
if this fails it searches the library directory for the best match . The
user can then select an existing match or can propose a new directory to
use as the show root directory .
The season directory is also generated and added to the show and
library directories . This is then used by the tvFile GenerateNewFilePath
method to create a new path for the file .
Parameters
tvFile : tvfile . TVFile
Contains show and file info .
libraryDir : string
Root path of TV library directory .
Returns
tvfile . TVFile
This is an updated version of the input object ."""
|
goodlogging . Log . Info ( "RENAMER" , "Looking up library directory in database for show: {0}" . format ( tvFile . showInfo . showName ) )
goodlogging . Log . IncreaseIndent ( )
showID , showName , showDir = self . _db . SearchTVLibrary ( showName = tvFile . showInfo . showName ) [ 0 ]
if showDir is None :
goodlogging . Log . Info ( "RENAMER" , "No directory match found in database - looking for best match in library directory: {0}" . format ( libraryDir ) )
dirList = os . listdir ( libraryDir )
listDir = False
matchName = tvFile . showInfo . showName
while showDir is None :
if len ( dirList ) == 0 :
goodlogging . Log . Info ( "RENAMER" , "TV Library directory is empty" )
response = None
else :
if listDir is True :
goodlogging . Log . Info ( "RENAMER" , "TV library directory contains: {0}" . format ( ', ' . join ( dirList ) ) )
else :
matchDirList = util . GetBestMatch ( matchName , dirList )
listDir = False
if self . _skipUserInput is True :
if len ( matchDirList ) == 1 :
response = matchDirList [ 0 ]
goodlogging . Log . Info ( "RENAMER" , "Automatic selection of show directory: {0}" . format ( response ) )
else :
response = None
goodlogging . Log . Info ( "RENAMER" , "Could not make automatic selection of show directory" )
else :
listDirPrompt = "enter 'ls' to list all items in TV library directory"
response = util . UserAcceptance ( matchDirList , promptComment = listDirPrompt , promptOnly = listDir , xStrOverride = "to create new show directory" )
if response is None :
showDir = self . _CreateNewShowDir ( tvFile . showInfo . showName )
if showDir is None :
goodlogging . Log . DecreaseIndent ( )
return tvFile
elif response . lower ( ) == 'ls' :
listDir = True
elif response in matchDirList :
showDir = response
else :
matchName = response
self . _db . UpdateShowDirInTVLibrary ( showID , showDir )
# Add base directory to show path
showDir = os . path . join ( libraryDir , showDir )
goodlogging . Log . DecreaseIndent ( )
# Lookup and add season directory to show path
seasonDir = self . _LookUpSeasonDirectory ( showID , showDir , tvFile . showInfo . seasonNum )
if seasonDir is None :
return tvFile
else :
showDir = os . path . join ( showDir , seasonDir )
# Call tvFile function to generate file name
tvFile . GenerateNewFilePath ( showDir )
return tvFile
|
def add_registration ( self , username , first_name , last_name , email , password = "" ) :
"""Add a registration request for the user .
: rtype : RegisterUser"""
|
register_user = self . appbuilder . sm . add_register_user ( username , first_name , last_name , email , password )
if register_user :
if self . send_email ( register_user ) :
flash ( as_unicode ( self . message ) , "info" )
return register_user
else :
flash ( as_unicode ( self . error_message ) , "danger" )
self . appbuilder . sm . del_register_user ( register_user )
return None
|
def add_child_repository ( self , repository_id , child_id ) :
"""Adds a child to a repository .
arg : repository _ id ( osid . id . Id ) : the ` ` Id ` ` of a repository
arg : child _ id ( osid . id . Id ) : the ` ` Id ` ` of the new child
raise : AlreadyExists - ` ` repository _ id ` ` is already a parent of
` ` child _ id ` `
raise : NotFound - ` ` repository _ id ` ` or ` ` child _ id ` ` not found
raise : NullArgument - ` ` repository _ id ` ` or ` ` child _ id ` ` is
` ` null ` `
raise : OperationFailed - unable to complete request
raise : PermissionDenied - authorization failure
* compliance : mandatory - - This method must be implemented . *"""
|
# Implemented from template for
# osid . resource . BinHierarchyDesignSession . add _ child _ bin _ template
if self . _catalog_session is not None :
return self . _catalog_session . add_child_catalog ( catalog_id = repository_id , child_id = child_id )
return self . _hierarchy_session . add_child ( id_ = repository_id , child_id = child_id )
|
def remove_usb_controller ( self , name ) :
"""Removes a USB controller from the machine .
in name of type str
raises : class : ` VBoxErrorObjectNotFound `
A USB controller with given type doesn ' t exist ."""
|
if not isinstance ( name , basestring ) :
raise TypeError ( "name can only be an instance of type basestring" )
self . _call ( "removeUSBController" , in_p = [ name ] )
|
def update ( self , other ) :
"""Augment this SimLibrary with the information from another SimLibrary
: param other : The other SimLibrary"""
|
self . procedures . update ( other . procedures )
self . non_returning . update ( other . non_returning )
self . prototypes . update ( other . prototypes )
self . default_ccs . update ( other . default_ccs )
|
def _assemble_modification ( self , stmt ) :
"""Example : p ( HGNC : MAP2K1 ) = > p ( HGNC : MAPK1 , pmod ( Ph , Thr , 185 ) )"""
|
sub_agent = deepcopy ( stmt . sub )
sub_agent . mods . append ( stmt . _get_mod_condition ( ) )
activates = isinstance ( stmt , AddModification )
relation = get_causal_edge ( stmt , activates )
self . _add_nodes_edges ( stmt . enz , sub_agent , relation , stmt . evidence )
|
def get_sort_indicator ( self , field ) :
"""Returns a sort class for the active sort only . That is , if field is not
sort _ field , then nothing will be returned becaues the sort is not
active ."""
|
indicator = ''
if field == self . sort_field :
indicator = 'sort-asc'
if self . sort_order == '-' :
indicator = 'sort-desc'
return indicator
|
def _update_mappings ( self ) :
"""Update the mappings for the current index ."""
|
headers = { 'Content-Type' : 'application/json' , 'DB-Method' : 'PUT' }
url = '/v2/exchange/db/{}/{}/_mappings' . format ( self . domain , self . data_type )
r = self . tcex . session . post ( url , json = self . mapping , headers = headers )
self . tcex . log . debug ( 'update mapping. status_code: {}, response: "{}".' . format ( r . status_code , r . text ) )
|
def fill_cache ( self ) :
"""Fill the cache with new data from the sensor ."""
|
_LOGGER . debug ( 'Filling cache with new sensor data.' )
try :
self . firmware_version ( )
except BluetoothBackendException : # If a sensor doesn ' t work , wait 5 minutes before retrying
self . _last_read = datetime . now ( ) - self . _cache_timeout + timedelta ( seconds = 300 )
raise
with self . _bt_interface . connect ( self . _mac ) as connection :
try :
connection . wait_for_notification ( _HANDLE_READ_WRITE_SENSOR_DATA , self , 10 )
# pylint : disable = no - member
# If a sensor doesn ' t work , wait 5 minutes before retrying
except BluetoothBackendException :
self . _last_read = datetime . now ( ) - self . _cache_timeout + timedelta ( seconds = 300 )
return
|
def set_output ( self , state ) :
"""Sets whether the function generator is outputting a voltage ."""
|
if state :
self . instr . write ( 'OUTP ON' )
else :
self . instr . write ( 'OUTP OFF' )
|
def upload_token ( self , bucket , key = None , expires = 3600 , policy = None , strict_policy = True ) :
"""生成上传凭证
Args :
bucket : 上传的空间名
key : 上传的文件名 , 默认为空
expires : 上传凭证的过期时间 , 默认为3600s
policy : 上传策略 , 默认为空
Returns :
上传凭证"""
|
if bucket is None or bucket == '' :
raise ValueError ( 'invalid bucket name' )
scope = bucket
if key is not None :
scope = '{0}:{1}' . format ( bucket , key )
args = dict ( scope = scope , deadline = int ( time . time ( ) ) + expires , )
if policy is not None :
self . __copy_policy ( policy , args , strict_policy )
return self . __upload_token ( args )
|
def initpkg ( pkgname , exportdefs , attr = None , eager = False ) :
"""initialize given package from the export definitions ."""
|
attr = attr or { }
oldmod = sys . modules . get ( pkgname )
d = { }
f = getattr ( oldmod , '__file__' , None )
if f :
f = _py_abspath ( f )
d [ '__file__' ] = f
if hasattr ( oldmod , '__version__' ) :
d [ '__version__' ] = oldmod . __version__
if hasattr ( oldmod , '__loader__' ) :
d [ '__loader__' ] = oldmod . __loader__
if hasattr ( oldmod , '__path__' ) :
d [ '__path__' ] = [ _py_abspath ( p ) for p in oldmod . __path__ ]
if hasattr ( oldmod , '__package__' ) :
d [ '__package__' ] = oldmod . __package__
if '__doc__' not in exportdefs and getattr ( oldmod , '__doc__' , None ) :
d [ '__doc__' ] = oldmod . __doc__
d . update ( attr )
if hasattr ( oldmod , "__dict__" ) :
oldmod . __dict__ . update ( d )
mod = ApiModule ( pkgname , exportdefs , implprefix = pkgname , attr = d )
sys . modules [ pkgname ] = mod
# eagerload in bypthon to avoid their monkeypatching breaking packages
if 'bpython' in sys . modules or eager :
for module in list ( sys . modules . values ( ) ) :
if isinstance ( module , ApiModule ) :
module . __dict__
|
def xmoe_dense_4k ( ) :
"""Series of architectural experiments on cheap language models .
For all of these architectures , we run on languagemodel _ lm1b8k _ packed
for 32000 steps .
All log - perplexities are per - token - multiply by 1.298 for per - word
Results :
model params ( M ) einsum alltoall mxu - util log - ppl
xmoe _ dense _ 4k 30 3.0e12 0 45 % 3.31
xmoe _ dense _ 8k 46 4.7e12 0 49 % 3.24
xmoe _ dense _ 64k 282 2.8e13 0 3.06
xmoe _ top _ 2 282 4.0e12 3.4e8 36 % 3.07
xmoe _ top _ 2 _ c15 282 4.5e12 4.0e8 38 % 3.07
xmoe _ 2d 282 5.3e12 7.6e8 34 % 3.06
Trained at 4x the batch size :
xmoe _ 2d _ 88 1090 2.1e13 3.0e9 24 % 3.07
Note : configurations and code are likely to change without notice .
Returns :
a hparams"""
|
hparams = mtf_transformer . mtf_transformer_base_lm ( )
hparams . attention_dropout = 0.0
hparams . relu_dropout = 0.0
hparams . layer_prepostprocess_dropout = 0.0
# The following hparams are constant across all these experiments .
hparams . batch_size = 128
hparams . d_model = 512
hparams . d_kv = 128
hparams . num_heads = 4
hparams . decoder_layers = [ "att" , "drd" ] * 4
hparams . shared_embedding_and_softmax_weights = False
hparams . learning_rate_schedule = "rsqrt_decay"
# We will vary the following parameters related to the ffn / moe layers .
hparams . d_ff = 4096
hparams . layout = "batch:batch;vocab:model;d_ff:model;heads:model"
hparams . mesh_shape = "batch:8"
return hparams
|
def enable_cache ( self ) :
"""Enable client - side caching for the current request"""
|
self . set_header ( 'Cache-Control' , 'max-age=%d, public' % self . CACHE_TIME )
now = datetime . datetime . now ( )
expires = now + datetime . timedelta ( seconds = self . CACHE_TIME )
self . set_header ( 'Expires' , expires . strftime ( '%a, %d %b %Y %H:%M:%S' ) )
self . set_header ( 'access-control-max-age' , self . CACHE_TIME )
|
def run ( args ) :
"""Args :
args ( argparse . Namespace )"""
|
with warnings . catch_warnings ( ) :
warnings . simplefilter ( 'ignore' )
for line in args . input_file :
extractions = email_extractor . extract ( line )
for e in extractions :
print ( e . value )
|
def build_row ( self , line ) :
"""Line describes an image or images to show
Returns a dict with a list of dicts of image names or text items
Examples :
# A single image to display
> > > x . build _ row ( ' foo . png ' )
[ { ' image ' : ' foo . png ' } ]
# Two images with text in between :
> > > x . build _ row ( ' foo . png or bar . jpg ' )
[ { ' image ' : ' foo . png ' } , { ' text ' : ' or ' } , { ' image ' : ' bar . png ' } ]"""
|
items = [ ]
row = dict ( items = items )
fields = line . split ( ' ' )
image_exts = [ '.png' , '.jpg' ]
# nothing there , carry on
if not fields :
return row
for field in fields :
ext = os . path . splitext ( field ) [ - 1 ]
if ext . lower ( ) in image_exts :
items . append ( dict ( image = field ) )
else :
items . append ( dict ( text = field ) )
return row
|
def main_target_sources ( self , sources , main_target_name , no_renaming = 0 ) :
"""Return the list of sources to use , if main target rule is invoked
with ' sources ' . If there are any objects in ' sources ' , they are treated
as main target instances , and the name of such targets are adjusted to
be ' < name _ of _ this _ target > _ _ < name _ of _ source _ target > ' . Such renaming
is disabled is non - empty value is passed for ' no - renaming ' parameter ."""
|
assert is_iterable_typed ( sources , basestring )
assert isinstance ( main_target_name , basestring )
assert isinstance ( no_renaming , ( int , bool ) )
result = [ ]
for t in sources :
t = b2 . util . jam_to_value_maybe ( t )
if isinstance ( t , AbstractTarget ) :
name = t . name ( )
if not no_renaming :
name = main_target_name + '__' + name
t . rename ( name )
# Inline targets are not built by default .
p = t . project ( )
p . mark_targets_as_explicit ( [ name ] )
result . append ( name )
else :
result . append ( t )
return result
|
def plot_mmm ( self , data , lower = None , upper = None , ** kwargs ) :
"""Plot a ` Series ` as a line , with a shaded region around it .
The ` ` data ` ` ` Series ` is drawn , while the ` ` lower ` ` and ` ` upper ` `
` Series ` are plotted lightly below and above , with a fill
between them and the ` ` data ` ` .
All three ` Series ` should have the same ` ~ Series . index ` array .
Parameters
data : ` ~ gwpy . types . Series `
Data to plot normally .
lower : ` ~ gwpy . types . Series `
Lower boundary ( on Y - axis ) for shade .
upper : ` ~ gwpy . types . Series `
Upper boundary ( on Y - axis ) for shade .
* * kwargs
Any other keyword arguments acceptable for
: meth : ` ~ matplotlib . Axes . plot ` .
Returns
artists : ` tuple `
All of the drawn artists :
- ` ~ matplotlib . lines . Line2d ` for ` ` data ` ` ,
- ` ~ matplotlib . lines . Line2D ` for ` ` lower ` ` , if given
- ` ~ matplotlib . lines . Line2D ` for ` ` upper ` ` , if given
- ` ~ matplitlib . collections . PolyCollection ` for shading
See Also
matplotlib . axes . Axes . plot
for a full description of acceptable ` ` * args ` ` and ` ` * * kwargs ` `"""
|
alpha = kwargs . pop ( 'alpha' , .1 )
# plot mean
line , = self . plot ( data , ** kwargs )
out = [ line ]
# modify keywords for shading
kwargs . update ( { 'label' : '' , 'linewidth' : line . get_linewidth ( ) / 2 , 'color' : line . get_color ( ) , 'alpha' : alpha * 2 , } )
# plot lower and upper Series
fill = [ data . xindex . value , data . value , data . value ]
for i , bound in enumerate ( ( lower , upper ) ) :
if bound is not None :
out . extend ( self . plot ( bound , ** kwargs ) )
fill [ i + 1 ] = bound . value
# fill between
out . append ( self . fill_between ( * fill , alpha = alpha , color = kwargs [ 'color' ] , rasterized = kwargs . get ( 'rasterized' , True ) ) )
return out
|
def version_ok ( self , version ) :
"""Is ' version ' sufficiently up - to - date ?"""
|
return self . attribute is None or self . format is None or str ( version ) != "unknown" and version >= self . requested_version
|
def dict_find_other_sameval_keys ( dict_ , key ) :
"""Example :
> > > # DISABLE _ DOCTEST
> > > from utool . util _ dict import * # NOQA
> > > dict _ = { ' default ' : 1 , ' hierarchical ' : 5 , ' linear ' : 0 , ' kdtree ' : 1,
. . . ' composite ' : 3 , ' autotuned ' : 255 , ' saved ' : 254 , ' kmeans ' : 2,
. . . ' lsh ' : 6 , ' kdtree _ single ' : 4}
> > > key = ' default '
> > > found _ dict = dict _ find _ keys ( dict _ , val _ list )"""
|
value = dict_ [ key ]
found_dict = dict_find_keys ( dict_ , [ value ] )
other_keys = found_dict [ value ]
other_keys . remove ( key )
return other_keys
|
def Write ( self , Text ) :
"""Writes data to stream .
: Parameters :
Text : unicode
Data to send ."""
|
self . Application . _Alter ( 'WRITE' , '%s %s' % ( self . Handle , tounicode ( Text ) ) )
|
def init ( self , key_value_pairs ) :
"""Initialize datastore .
Only sets values for keys that are not in the datastore already .
: param dict key _ value _ pairs :
A set of key value pairs to use to initialize the datastore ."""
|
for k , v in key_value_pairs . items ( ) :
if k not in DatastoreLegacy . store [ self . domain ] :
DatastoreLegacy . store [ self . domain ] [ k ] = v
|
def download ( self , path = '' , name = None , overwrite = False , size = None ) :
"""Download the image .
: param path : The image will be downloaded to the folder specified at
path , if path is None ( default ) then the current working directory
will be used .
: param name : The name the image will be stored as ( not including file
extension ) . If name is None , then the title of the image will be
used . If the image doesn ' t have a title , it ' s id will be used . Note
that if the name given by name or title is an invalid filename ,
then the hash will be used as the name instead .
: param overwrite : If True overwrite already existing file with the same
name as what we want to save the file as .
: param size : Instead of downloading the image in it ' s original size , we
can choose to instead download a thumbnail of it . Options are
' small _ square ' , ' big _ square ' , ' small _ thumbnail ' ,
' medium _ thumbnail ' , ' large _ thumbnail ' or ' huge _ thumbnail ' .
: returns : Name of the new file ."""
|
def save_as ( filename ) :
local_path = os . path . join ( path , filename )
if os . path . exists ( local_path ) and not overwrite :
raise Exception ( "Trying to save as {0}, but file " "already exists." . format ( local_path ) )
with open ( local_path , 'wb' ) as out_file :
out_file . write ( resp . content )
return local_path
valid_sizes = { 'small_square' : 's' , 'big_square' : 'b' , 'small_thumbnail' : 't' , 'medium_thumbnail' : 'm' , 'large_thumbnail' : 'l' , 'huge_thumbnail' : 'h' }
if size is not None :
size = size . lower ( ) . replace ( ' ' , '_' )
if size not in valid_sizes :
raise LookupError ( 'Invalid size. Valid options are: {0}' . format ( ", " . join ( valid_sizes . keys ( ) ) ) )
suffix = valid_sizes . get ( size , '' )
base , sep , ext = self . link . rpartition ( '.' )
resp = requests . get ( base + suffix + sep + ext )
if name or self . title :
try :
return save_as ( ( name or self . title ) + suffix + sep + ext )
except IOError :
pass
# Invalid filename
return save_as ( self . id + suffix + sep + ext )
|
def upgrade ( ) :
"""Upgrade database ."""
|
op . create_table ( 'collection' , sa . Column ( 'id' , sa . Integer ( ) , nullable = False ) , sa . Column ( 'name' , sa . String ( length = 255 ) , nullable = False ) , sa . Column ( 'dbquery' , sa . Text ( ) , nullable = True ) , sa . Column ( 'rgt' , sa . Integer ( ) , nullable = False ) , sa . Column ( 'lft' , sa . Integer ( ) , nullable = False ) , sa . Column ( 'level' , sa . Integer ( ) , nullable = False ) , sa . Column ( 'parent_id' , sa . Integer ( ) , nullable = True ) , sa . Column ( 'tree_id' , sa . Integer ( ) , nullable = True ) , sa . ForeignKeyConstraint ( [ 'parent_id' ] , [ 'collection.id' ] , ondelete = 'CASCADE' ) , sa . PrimaryKeyConstraint ( 'id' ) )
op . create_index ( 'collection_level_idx' , 'collection' , [ 'level' ] , unique = False )
op . create_index ( 'collection_lft_idx' , 'collection' , [ 'lft' ] , unique = False )
op . create_index ( 'collection_rgt_idx' , 'collection' , [ 'rgt' ] , unique = False )
op . create_index ( op . f ( 'ix_collection_name' ) , 'collection' , [ 'name' ] , unique = True )
|
def get_study_items ( self ) :
"""Get all study items ( e . g . , geneids ) ."""
|
study_items = set ( )
for rec in self . goea_results :
study_items |= rec . study_items
return study_items
|
def annualize_return ( self ) :
"""年化收益
Returns :
[ type ] - - [ description ]"""
|
return round ( float ( self . calc_annualize_return ( self . assets , self . time_gap ) ) , 2 )
|
def _evaluate_standard ( op , op_str , a , b , ** eval_kwargs ) :
"""standard evaluation"""
|
if _TEST_MODE :
_store_test_result ( False )
with np . errstate ( all = 'ignore' ) :
return op ( a , b )
|
def put ( self , request ) :
"""Update a single profile for a given video .
Example request data :
' edx _ video _ id ' : ' 1234'
' profile ' : ' hls ' ,
' encode _ data ' : {
' url ' : ' foo . com / qwe . m3u8'
' file _ size ' : 34
' bitrate ' : 12"""
|
edx_video_id = request . data [ 'edx_video_id' ]
profile = request . data [ 'profile' ]
encode_data = request . data [ 'encode_data' ]
video = Video . objects . get ( edx_video_id = edx_video_id )
profile = Profile . objects . get ( profile_name = profile )
# Delete existing similar profile if its present and
# create new one with updated data .
EncodedVideo . objects . filter ( video = video , profile = profile ) . delete ( )
EncodedVideo . objects . create ( video = video , profile = profile , ** encode_data )
return Response ( status = status . HTTP_200_OK )
|
def docsfor ( self , rel ) : # pragma : nocover
'''Obtains the documentation for a link relation . Opens in a webbrowser
window'''
|
prefix , _rel = rel . split ( ':' )
if prefix in self . curies :
doc_url = uritemplate . expand ( self . curies [ prefix ] , { 'rel' : _rel } )
else :
doc_url = rel
print ( 'opening' , doc_url )
webbrowser . open ( doc_url )
|
def auth_publickey ( self , username , key , event = None ) :
"""Authenticate to the server using a private key . The key is used to
sign data from the server , so it must include the private part .
If an ` ` event ` ` is passed in , this method will return immediately , and
the event will be triggered once authentication succeeds or fails . On
success , ` is _ authenticated ` will return ` ` True ` ` . On failure , you may
use ` get _ exception ` to get more detailed error information .
Since 1.1 , if no event is passed , this method will block until the
authentication succeeds or fails . On failure , an exception is raised .
Otherwise , the method simply returns .
If the server requires multi - step authentication ( which is very rare ) ,
this method will return a list of auth types permissible for the next
step . Otherwise , in the normal case , an empty list is returned .
: param str username : the username to authenticate as
: param . PKey key : the private key to authenticate with
: param . threading . Event event :
an event to trigger when the authentication attempt is complete
( whether it was successful or not )
: return :
list of auth types permissible for the next stage of
authentication ( normally empty )
: raises :
` . BadAuthenticationType ` - - if public - key authentication isn ' t
allowed by the server for this user ( and no event was passed in )
: raises :
` . AuthenticationException ` - - if the authentication failed ( and no
event was passed in )
: raises : ` . SSHException ` - - if there was a network error"""
|
if ( not self . active ) or ( not self . initial_kex_done ) : # we should never try to authenticate unless we ' re on a secure link
raise SSHException ( "No existing session" )
if event is None :
my_event = threading . Event ( )
else :
my_event = event
self . auth_handler = AuthHandler ( self )
self . auth_handler . auth_publickey ( username , key , my_event )
if event is not None : # caller wants to wait for event themselves
return [ ]
return self . auth_handler . wait_for_response ( my_event )
|
def Bipartite ( graph , resolution_parameter_01 , resolution_parameter_0 = 0 , resolution_parameter_1 = 0 , degree_as_node_size = False , types = 'type' , ** kwargs ) :
"""Create three layers for bipartite partitions .
This creates three layers for bipartite partition necessary for detecting
communities in bipartite networks . These three layers should be passed to
: func : ` Optimiser . optimise _ partition _ multiplex ` with
` ` layer _ weights = [ 1 , - 1 , - 1 ] ` ` .
Parameters
graph : : class : ` ig . Graph `
Graph to define the bipartite partitions on .
resolution _ parameter _ 01 : double
Resolution parameter for in between two classes .
resolution _ parameter _ 0 : double
Resolution parameter for class 0.
resolution _ parameter _ 1 : double
Resolution parameter for class 1.
degree _ as _ node _ size : boolean
If ` ` True ` ` use degree as node size instead of 1 , to mimic modularity ,
see ` Notes < # notes - bipartite > ` _ .
types : vertex attribute or list
Indicator of the class for each vertex . If not 0 , 1 , it is automatically
converted .
* * kwargs
Additional arguments passed on to default constructor of
: class : ` CPMVertexPartition ` .
. . _ notes - bipartite :
Notes
For bipartite networks , we would like to be able to set three different
resolution parameters : one for within each class : math : ` \\ gamma _ 0,
\\ gamma _ 1 ` , and one for the links between classes , : math : ` \\ gamma _ { 01 } ` .
Then the formulation would be
. . math : : Q = \\ sum _ { ij }
[ A _ { ij }
- ( \\ gamma _ 0 \\ delta ( s _ i , 0 ) + \\ gamma _ 1 \\ delta ( s _ i , 1 ) ) \\ delta ( s _ i , s _ j )
- \\ gamma _ { 01 } ( 1 - \\ delta ( s _ i , s _ j ) )
] \\ delta ( \\ sigma _ i , \\ sigma _ j )
In terms of communities this is
. . math : : Q = \\ sum _ c ( e _ c
- \\ gamma _ { 01 } 2 n _ c ( 0 ) n _ c ( 1)
- \\ gamma _ 0 n ^ 2 _ c ( 0)
- \\ gamma _ 1 n ^ 2 _ c ( 1 ) )
where : math : ` n _ c ( 0 ) ` is the number of nodes in community : math : ` c ` of class 0
( and similarly for 1 ) and : math : ` e _ c ` is the number of edges within community
: math : ` c ` . We denote by : math : ` n _ c = n _ c ( 0 ) + n _ c ( 1 ) ` the total number of nodes
in community : math : ` c ` .
We achieve this by creating three layers : ( 1 ) all nodes have ` ` node _ size =
1 ` ` and all relevant links ; ( 2 ) only nodes of class 0 have ` ` node _ size =
1 ` ` and no links ; ( 3 ) only nodes of class 1 have ` ` node _ size = 1 ` ` and no
links . If we add the first with resolution parameter : math : ` \\ gamma _ { 01 } ` ,
and the others with resolution parameters : math : ` \\ gamma _ { 01 } - \\ gamma _ 0 `
and : math : ` \\ gamma _ { 01 } - \\ gamma _ 1 ` , but the latter two with a layer
weight of - 1 while the first layer has layer weight 1 , we obtain the
following :
. . math : : Q & = \\ sum _ c ( e _ c - \\ gamma _ { 01 } n _ c ^ 2)
- \\ sum _ c ( - ( \\ gamma _ { 01 } - \\ gamma _ 0 ) n _ c ( 0 ) ^ 2)
- \\ sum _ c ( - ( \\ gamma _ { 01 } - \\ gamma _ 1 ) n _ c ( 1 ) ^ 2 ) \\ \ &= \\ sum _ c [ e _ c - \\ gamma _ { 01 } 2 n _ c ( 0 ) n _ c ( 1)
- \\ gamma _ { 01 } n _ c ( 0 ) ^ 2
- \\ gamma _ { 01 } n _ c ( 1 ) ^ 2)
+ ( \\ gamma _ { 01 } - \\ gamma _ 0 ) n _ c ( 0 ) ^ 2
+ ( \\ gamma _ { 01 } - \\ gamma _ 1 ) n _ c ( 1 ) ^ 2
] \\ \ &= \\ sum _ c [ e _ c - \\ gamma _ { 01 } 2 n _ c ( 0 ) n _ c ( 1)
- \\ gamma _ { 0 } n _ c ( 0 ) ^ 2
- \\ gamma _ { 1 } n _ c ( 1 ) ^ 2]
Although the derivation above is using : math : ` n _ c ^ 2 ` , implicitly assuming a
direct graph with self - loops , similar derivations can be made for
undirected graphs using : math : ` \\ binom { n _ c } { 2 } ` , but the notation is then
somewhat more convoluted .
If we set node sizes equal to the degree , we get something similar to
modularity , except that the resolution parameter should still be divided by
: math : ` 2m ` . In particular , in general ( i . e . not specifically for bipartite
graph ) if ` ` node _ sizes = G . degree ( ) ` ` we then obtain
. . math : : Q = \\ sum _ { ij } A _ { ij } - \\ gamma k _ i k _ j
In the case of bipartite graphs something similar is obtained , but then
correctly adapted ( as long as the resolution parameter is also
appropriately rescaled ) .
. . note : : This function is not suited for directed graphs in the case of
using the degree as node sizes ."""
|
if types is not None :
if isinstance ( types , str ) :
types = graph . vs [ types ]
else : # Make sure it is a list
types = list ( types )
if set ( types ) != set ( [ 0 , 1 ] ) :
new_type = _ig . UniqueIdGenerator ( )
types = [ new_type [ t ] for t in types ]
if set ( types ) != set ( [ 0 , 1 ] ) :
raise ValueError ( "More than one type specified." )
if degree_as_node_size :
if ( graph . is_directed ( ) ) :
raise ValueError ( "This method is not suitable for directed graphs " + "when using degree as node sizes." )
node_sizes = graph . degree ( )
else :
node_sizes = [ 1 ] * graph . vcount ( )
partition_01 = CPMVertexPartition ( graph , node_sizes = node_sizes , resolution_parameter = resolution_parameter_01 , ** kwargs )
H_0 = graph . subgraph_edges ( [ ] , delete_vertices = False )
partition_0 = CPMVertexPartition ( H_0 , weights = None , node_sizes = [ s if t == 0 else 0 for v , s , t in zip ( graph . vs , node_sizes , types ) ] , resolution_parameter = resolution_parameter_01 - resolution_parameter_0 , ** kwargs )
H_1 = graph . subgraph_edges ( [ ] , delete_vertices = False )
partition_1 = CPMVertexPartition ( H_1 , weights = None , node_sizes = [ s if t == 1 else 0 for v , s , t in zip ( graph . vs , node_sizes , types ) ] , resolution_parameter = resolution_parameter_01 - resolution_parameter_1 , ** kwargs )
return partition_01 , partition_0 , partition_1
|
def sapm ( self , effective_irradiance , temp_cell , ** kwargs ) :
"""Use the : py : func : ` sapm ` function , the input parameters ,
and ` ` self . module _ parameters ` ` to calculate
Voc , Isc , Ix , Ixx , Vmp / Imp .
Parameters
poa _ direct : Series
The direct irradiance incident upon the module ( W / m ^ 2 ) .
poa _ diffuse : Series
The diffuse irradiance incident on module .
temp _ cell : Series
The cell temperature ( degrees C ) .
airmass _ absolute : Series
Absolute airmass .
aoi : Series
Angle of incidence ( degrees ) .
kwargs
See pvsystem . sapm for details
Returns
See pvsystem . sapm for details"""
|
return sapm ( effective_irradiance , temp_cell , self . module_parameters )
|
def list_market_profit_and_loss ( self , market_ids , include_settled_bets = False , include_bsp_bets = None , net_of_commission = None ) :
"""Retrieve profit and loss for a given list of markets .
: param list market _ ids : List of markets to calculate profit and loss
: param bool include _ settled _ bets : Option to include settled bets
: param bool include _ bsp _ bets : Option to include BSP bets
: param bool net _ of _ commission : Option to return profit and loss net of
users current commission rate for this market including any special
tariffs"""
|
return self . make_api_request ( 'Sports' , 'listMarketProfitAndLoss' , utils . get_kwargs ( locals ( ) ) , model = models . MarketProfitAndLoss , )
|
def x_y_by_col_lbl_inplace ( df , y_col_lbl ) :
"""Breaks the given dataframe into an X frame and a y series by the given
column name .
The original frame is returned , without the y series column , as the X
frame , so no new dataframes are created .
Parameters
df : pandas . DataFrame
The dataframe to split .
y _ col _ lbl : object
The label of the y column .
Returns
X , y : pandas . DataFrame , pandas . Series
A dataframe made up of all columns but the column with the given name
and a series made up of the same column , respectively .
Example
> > > import pandas as pd
> > > data = [ [ 23 , ' Jo ' , 4 ] , [ 19 , ' Mi ' , 3 ] ]
> > > df = pd . DataFrame ( data , [ 1 , 2 ] , [ ' Age ' , ' Name ' , ' D ' ] )
> > > X , y = x _ y _ by _ col _ lbl ( df , ' D ' )
Age Name
1 23 Jo
2 19 Mi
1 4
2 3
Name : D , dtype : int64"""
|
y = df [ y_col_lbl ]
df . drop ( labels = y_col_lbl , axis = 1 , inplace = True , )
return df , y
|
def from_bma_history ( cls : Type [ TransactionType ] , currency : str , tx_data : Dict ) -> TransactionType :
"""Get the transaction instance from json
: param currency : the currency of the tx
: param tx _ data : json data of the transaction
: return :"""
|
tx_data = tx_data . copy ( )
tx_data [ "currency" ] = currency
for data_list in ( 'issuers' , 'outputs' , 'inputs' , 'unlocks' , 'signatures' ) :
tx_data [ 'multiline_{0}' . format ( data_list ) ] = '\n' . join ( tx_data [ data_list ] )
if tx_data [ "version" ] >= 3 :
signed_raw = """Version: {version}
Type: Transaction
Currency: {currency}
Blockstamp: {blockstamp}
Locktime: {locktime}
Issuers:
{multiline_issuers}
Inputs:
{multiline_inputs}
Unlocks:
{multiline_unlocks}
Outputs:
{multiline_outputs}
Comment: {comment}
{multiline_signatures}
""" . format ( ** tx_data )
else :
signed_raw = """Version: {version}
Type: Transaction
Currency: {currency}
Locktime: {locktime}
Issuers:
{multiline_issuers}
Inputs:
{multiline_inputs}
Unlocks:
{multiline_unlocks}
Outputs:
{multiline_outputs}
Comment: {comment}
{multiline_signatures}
""" . format ( ** tx_data )
return cls . from_signed_raw ( signed_raw )
|
def VCInstallDir ( self ) :
"""Microsoft Visual C + + directory ."""
|
self . VSInstallDir
guess_vc = self . _guess_vc ( ) or self . _guess_vc_legacy ( )
# Try to get " VC + + for Python " path from registry as default path
reg_path = os . path . join ( self . ri . vc_for_python , '%0.1f' % self . vc_ver )
python_vc = self . ri . lookup ( reg_path , 'installdir' )
default_vc = os . path . join ( python_vc , 'VC' ) if python_vc else guess_vc
# Try to get path from registry , if fail use default path
path = self . ri . lookup ( self . ri . vc , '%0.1f' % self . vc_ver ) or default_vc
if not os . path . isdir ( path ) :
msg = 'Microsoft Visual C++ directory not found'
raise distutils . errors . DistutilsPlatformError ( msg )
return path
|
def copy ( self , nominal = None , uncertainties = None ) :
"""Returns a deep copy of the number instance . When * nominal * or * uncertainties * are set , they
overwrite the fields of the copied instance ."""
|
if nominal is None :
nominal = self . nominal
if uncertainties is None :
uncertainties = self . uncertainties
return self . __class__ ( nominal , uncertainties = uncertainties )
|
def append ( self , element ) :
'''Append a PileupElement to this Pileup . If an identical PileupElement is
already part of this Pileup , do nothing .'''
|
assert element . locus == self . locus , ( "Element locus (%s) != Pileup locus (%s)" % ( element . locus , self . locus ) )
self . elements [ element ] = None
|
def connect ( self ) :
"""Create new connection unless we already have one ."""
|
if not getattr ( self . _local , 'conn' , None ) :
try :
server = self . _servers . get ( )
logger . debug ( 'Connecting to %s' , server )
self . _local . conn = ClientTransport ( server , self . _framed_transport , self . _timeout , self . _recycle )
except ( Thrift . TException , socket . timeout , socket . error ) :
logger . warning ( 'Connection to %s failed.' , server )
self . _servers . mark_dead ( server )
return self . connect ( )
return self . _local . conn
|
def _get_path_to_jar ( cls , coursier_cache_path , pants_jar_path_base , jar_path ) :
"""Create the path to the jar that will live in . pants . d
: param coursier _ cache _ path : coursier cache location
: param pants _ jar _ path _ base : location under pants workdir to store the hardlink to the coursier cache
: param jar _ path : path of the jar
: return :"""
|
if os . path . abspath ( coursier_cache_path ) not in os . path . abspath ( jar_path ) : # Appending the string ' absolute ' to the jar _ path and joining that is a hack to work around
# python ' s os . path . join behavior of throwing away all components that come before an
# absolute path . See https : / / docs . python . org / 3.3 / library / os . path . html # os . path . join
return os . path . join ( pants_jar_path_base , os . path . normpath ( 'absolute/' + jar_path ) )
else :
return os . path . join ( pants_jar_path_base , 'relative' , os . path . relpath ( jar_path , coursier_cache_path ) )
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.