signature stringlengths 29 44.1k | implementation stringlengths 0 85.2k |
|---|---|
def posterior_inf ( self , X = None , posterior = None ) :
"""Do the posterior inference on the parameters given this kernels functions
and the model posterior , which has to be a GPy posterior , usually found at m . posterior , if m is a GPy model .
If not given we search for the the highest parent to be a model , containing the posterior , and for X accordingly .""" | if X is None :
try :
X = self . _highest_parent_ . X
except NameError :
raise RuntimeError ( "This kernel is not part of a model and cannot be used for posterior inference" )
if posterior is None :
try :
posterior = self . _highest_parent_ . posterior
except NameError :
raise RuntimeError ( "This kernel is not part of a model and cannot be used for posterior inference" )
phi_alpha = self . phi ( X ) * self . variance
return ( phi_alpha ) . T . dot ( posterior . woodbury_vector ) , ( np . eye ( phi_alpha . shape [ 1 ] ) * self . variance - mdot ( phi_alpha . T , posterior . woodbury_inv , phi_alpha ) ) |
def install_middleware ( self ) :
"""Attempts to insert the ScoutApm middleware as the first middleware
( first on incoming requests , last on outgoing responses ) .""" | from django . conf import settings
# If MIDDLEWARE is set , update that , with handling of tuple vs array forms
if getattr ( settings , "MIDDLEWARE" , None ) is not None :
if isinstance ( settings . MIDDLEWARE , tuple ) :
settings . MIDDLEWARE = ( ( "scout_apm.django.middleware.MiddlewareTimingMiddleware" , ) + settings . MIDDLEWARE + ( "scout_apm.django.middleware.ViewTimingMiddleware" , ) )
else :
settings . MIDDLEWARE . insert ( 0 , "scout_apm.django.middleware.MiddlewareTimingMiddleware" )
settings . MIDDLEWARE . append ( "scout_apm.django.middleware.ViewTimingMiddleware" )
# Otherwise , we ' re doing old style middleware , do the same thing with
# the same handling of tuple vs array forms
else :
if isinstance ( settings . MIDDLEWARE_CLASSES , tuple ) :
settings . MIDDLEWARE_CLASSES = ( ( "scout_apm.django.middleware.OldStyleMiddlewareTimingMiddleware" , ) + settings . MIDDLEWARE_CLASSES + ( "scout_apm.django.middleware.OldStyleViewMiddleware" , ) )
else :
settings . MIDDLEWARE_CLASSES . insert ( 0 , "scout_apm.django.middleware.OldStyleMiddlewareTimingMiddleware" )
settings . MIDDLEWARE_CLASSES . append ( "scout_apm.django.middleware.OldStyleViewMiddleware" ) |
def is_cython_function ( fn ) :
"""Checks if a function is compiled w / Cython .""" | if hasattr ( fn , "__func__" ) :
fn = fn . __func__
# Class method , static method
name = type ( fn ) . __name__
return ( name == "method_descriptor" or name == "cython_function_or_method" or name == "builtin_function_or_method" ) |
def request_stop ( self , message = '' , exit_code = 0 ) :
"""Remove pid and stop daemon
: return : None""" | # Log an error message if exit code is not 0
# Force output to stderr
if exit_code :
if message :
logger . error ( message )
try :
sys . stderr . write ( message )
except Exception : # pylint : disable = broad - except
pass
logger . error ( "Sorry, I bail out, exit code: %d" , exit_code )
try :
sys . stderr . write ( "Sorry, I bail out, exit code: %d" % exit_code )
except Exception : # pylint : disable = broad - except
pass
else :
if message :
logger . info ( message )
self . unlink ( )
self . do_stop ( )
logger . info ( "Stopped %s." , self . name )
sys . exit ( exit_code ) |
def get ( self , filename ) :
"""Download a distribution archive from the configured Amazon S3 bucket .
: param filename : The filename of the distribution archive ( a string ) .
: returns : The pathname of a distribution archive on the local file
system or : data : ` None ` .
: raises : : exc : ` . CacheBackendError ` when any underlying method fails .""" | timer = Timer ( )
self . check_prerequisites ( )
with PatchedBotoConfig ( ) : # Check if the distribution archive is available .
raw_key = self . get_cache_key ( filename )
logger . info ( "Checking if distribution archive is available in S3 bucket: %s" , raw_key )
key = self . s3_bucket . get_key ( raw_key )
if key is None :
logger . debug ( "Distribution archive is not available in S3 bucket." )
else : # Download the distribution archive to the local binary index .
# TODO Shouldn ' t this use LocalCacheBackend . put ( ) instead of
# implementing the same steps manually ? !
logger . info ( "Downloading distribution archive from S3 bucket .." )
file_in_cache = os . path . join ( self . config . binary_cache , filename )
makedirs ( os . path . dirname ( file_in_cache ) )
with AtomicReplace ( file_in_cache ) as temporary_file :
key . get_contents_to_filename ( temporary_file )
logger . debug ( "Finished downloading distribution archive from S3 bucket in %s." , timer )
return file_in_cache |
def update ( self , attributes = None ) :
"""Updates the entry with attributes .""" | if attributes is None :
attributes = { }
attributes [ 'content_type_id' ] = self . sys [ 'content_type' ] . id
return super ( Entry , self ) . update ( attributes ) |
async def _redirect_async ( self , redirect , auth ) :
"""Redirect the client endpoint using a Link DETACH redirect
response .
: param redirect : The Link DETACH redirect details .
: type redirect : ~ uamqp . errors . LinkRedirect
: param auth : Authentication credentials to the redirected endpoint .
: type auth : ~ uamqp . authentication . common . AMQPAuth""" | # pylint : disable = protected - access
if not self . _connection . cbs :
_logger . info ( "Closing non-CBS session." )
await asyncio . shield ( self . _session . destroy_async ( ) )
self . _session = None
self . _auth = auth
self . _hostname = self . _remote_address . hostname
await self . _connection . redirect_async ( redirect , auth )
if not self . _connection . cbs and isinstance ( self . _auth , authentication . CBSAsyncAuthMixin ) :
self . _connection . cbs = await asyncio . shield ( self . _auth . create_authenticator_async ( self . _connection , debug = self . _debug_trace , incoming_window = self . _incoming_window , outgoing_window = self . _outgoing_window , handle_max = self . _handle_max , on_attach = self . _on_attach , loop = self . loop ) )
self . _session = self . _auth . _session
elif self . _connection . cbs :
self . _session = self . _auth . _session
else :
self . _session = self . session_type ( self . _connection , incoming_window = self . _incoming_window , outgoing_window = self . _outgoing_window , handle_max = self . _handle_max , on_attach = self . _on_attach , loop = self . loop ) |
def parseSyslog ( msg ) :
"""Parses Syslog messages ( RFC 5424)
The ` Syslog Message Format ( RFC 5424)
< https : / / tools . ietf . org / html / rfc5424 # section - 6 > ` _ can be parsed with
simple whitespace tokenization : :
SYSLOG - MSG = HEADER SP STRUCTURED - DATA [ SP MSG ]
HEADER = PRI VERSION SP TIMESTAMP SP HOSTNAME
SP APP - NAME SP PROCID SP MSGID
NILVALUE = " - "
This method does not return STRUCTURED - DATA . It parses NILVALUE
( " - " ) STRUCTURED - DATA or simple STRUCTURED - DATA which does not
contain ( escaped ) ' ] ' .
: returns : A dictionary keyed by the constituent parts of the
Syslog message .""" | tokens = msg . split ( ' ' , 6 )
result = { }
if len ( tokens ) > 0 :
pri = tokens [ 0 ]
start = pri . find ( '<' )
stop = pri . find ( '>' )
if start != - 1 and stop != - 1 :
result [ 'pri' ] = pri [ start + 1 : stop ]
else :
result [ 'pri' ] = ''
if stop != - 1 and len ( pri ) > stop :
result [ 'version' ] = pri [ stop + 1 : ]
else :
result [ 'version' ] = ''
result [ 'timestamp' ] = tokens [ 1 ] if len ( tokens ) > 1 else ''
result [ 'hostname' ] = tokens [ 2 ] if len ( tokens ) > 2 else ''
result [ 'appname' ] = tokens [ 3 ] if len ( tokens ) > 3 else ''
result [ 'procid' ] = tokens [ 4 ] if len ( tokens ) > 4 else ''
result [ 'msgid' ] = tokens [ 5 ] if len ( tokens ) > 5 else ''
result [ 'msg' ] = ''
if len ( tokens ) > 6 : # The following will work for NILVALUE STRUCTURED - DATA or
# simple STRUCTURED - DATA which does not contain ' ] ' .
rest = tokens [ 6 ]
start = rest . find ( '-' )
if start == - 1 :
start = rest . find ( ']' )
if len ( rest ) > start :
result [ 'msg' ] = rest [ start + 1 : ] . strip ( )
return result |
def _diff_cache_subnet_group ( current , desired ) :
'''If you need to enhance what modify _ cache _ subnet _ group ( ) considers when deciding what is to be
( or can be ) updated , add it to ' modifiable ' below . It ' s a dict mapping the param as used
in modify _ cache _ subnet _ group ( ) to that in describe _ cache _ subnet _ group ( ) . Any data fiddlery that
needs to be done to make the mappings meaningful should be done in the munging section
below as well .
This function will ONLY touch settings that are explicitly called out in ' desired ' - any
settings which might have previously been changed from their ' default ' values will not be
changed back simply by leaving them out of ' desired ' . This is both intentional , and
much , much easier to code : )''' | modifiable = { 'CacheSubnetGroupDescription' : 'CacheSubnetGroupDescription' , 'SubnetIds' : 'SubnetIds' }
need_update = { }
for m , o in modifiable . items ( ) :
if m in desired :
if not o : # Always pass these through - let AWS do the math . . .
need_update [ m ] = desired [ m ]
else :
if m in current : # Equivalence testing works fine for current simple type comparisons
# This might need enhancement if more complex structures enter the picture
if current [ m ] != desired [ m ] :
need_update [ m ] = desired [ m ]
return need_update |
def chisq ( psr , formbats = False ) :
"""Return the total chisq for the current timing solution ,
removing noise - averaged mean residual , and ignoring deleted points .""" | if formbats :
psr . formbats ( )
res , err = psr . residuals ( removemean = False ) [ psr . deleted == 0 ] , psr . toaerrs [ psr . deleted == 0 ]
res -= numpy . sum ( res / err ** 2 ) / numpy . sum ( 1 / err ** 2 )
return numpy . sum ( res * res / ( 1e-12 * err * err ) ) |
def get_torrent_file ( self , fp , headers = None , cb = None , num_cb = 10 ) :
"""Get a torrent file ( see to get _ file )
: type fp : file
: param fp : The file pointer of where to put the torrent
: type headers : dict
: param headers : Headers to be passed
: type cb : function
: param cb : a callback function that will be called to report
progress on the upload . The callback should accept
two integer parameters , the first representing the
number of bytes that have been successfully
transmitted to S3 and the second representing the
size of the to be transmitted object .
: type cb : int
: param num _ cb : ( optional ) If a callback is specified with
the cb parameter this parameter determines the
granularity of the callback by defining
the maximum number of times the callback will
be called during the file transfer .""" | return self . get_file ( fp , headers , cb , num_cb , torrent = True ) |
def write_collection_from_tmpfile ( self , collection_id , tmpfi , parent_sha , auth_info , commit_msg = '' ) :
"""Given a collection _ id , temporary filename of content , branch and auth _ info""" | return self . write_doc_from_tmpfile ( collection_id , tmpfi , parent_sha , auth_info , commit_msg , doctype_display_name = "collection" ) |
def convert_to_row_table ( self , add_units = True ) :
'''Converts the block into row titled elements . These elements are copied into the return
table , which can be much longer than the original block .
Args :
add _ units : Indicates if units should be appened to each row item .
Returns :
A row - titled table representing the data in the block .''' | rtable = [ ]
if add_units :
relavent_units = self . get_relavent_units ( )
# Create a row for each data element
for row_index in range ( self . start [ 0 ] , self . end [ 0 ] ) :
for column_index in range ( self . start [ 1 ] , self . end [ 1 ] ) :
cell = self . table [ row_index ] [ column_index ]
if cell != None and isinstance ( cell , ( int , float , long ) ) :
titles = self . _find_titles ( row_index , column_index )
titles . append ( cell )
if add_units :
titles . append ( relavent_units . get ( ( row_index , column_index ) ) )
rtable . append ( titles )
# If we had all ' titles ' , just return the original block
if not rtable :
for row_index in range ( self . start [ 0 ] , self . end [ 0 ] ) :
row = [ ]
rtable . append ( row )
for column_index in range ( self . start [ 1 ] , self . end [ 1 ] ) :
row . append ( self . table [ row_index ] [ column_index ] )
if add_units :
row . append ( relavent_units . get ( ( row_index , column_index ) ) )
return rtable |
def load_info ( self ) :
'''Get info from logged account''' | headers = { "Content-type" : "application/x-www-form-urlencoded" , "Accept" : "text/plain" , 'Referer' : 'http://' + self . domain + '/login.phtml' , "User-Agent" : user_agent }
req = self . session . get ( 'http://' + self . domain + '/team_news.phtml' , headers = headers ) . content
soup = BeautifulSoup ( req )
self . title = soup . title . string
estado = soup . find ( 'div' , { 'id' : 'content' } ) . find ( 'div' , { 'id' : 'manager' } ) . string
if estado :
print estado . strip ( )
return
[ s . extract ( ) for s in soup ( 'strong' ) ]
if ( soup . find ( 'div' , { 'id' : 'userid' } ) != None ) :
self . myid = soup . find ( 'div' , { 'id' : 'userid' } ) . p . text . strip ( ) [ 2 : ]
self . money = int ( soup . find ( 'div' , { 'id' : 'manager_money' } ) . p . text . strip ( ) . replace ( "." , "" ) [ : - 2 ] )
self . teamvalue = int ( soup . find ( 'div' , { 'id' : 'teamvalue' } ) . p . text . strip ( ) . replace ( "." , "" ) [ : - 2 ] )
self . community_id = soup . find ( 'link' ) [ 'href' ] [ 24 : ]
self . username = soup . find ( 'div' , { 'id' : 'username' } ) . p . a . text |
def cov_dvrpmllbb_to_vxyz_single ( d , e_d , e_vr , pmll , pmbb , cov_pmllbb , l , b ) :
"""NAME :
cov _ dvrpmllbb _ to _ vxyz
PURPOSE :
propagate distance , radial velocity , and proper motion uncertainties to
Galactic coordinates for scalar inputs
INPUT :
d - distance [ kpc , as / mas for plx ]
e _ d - distance uncertainty [ kpc , [ as / mas ] for plx ]
e _ vr - low velocity uncertainty [ km / s ]
pmll - proper motion in l ( * cos ( b ) ) [ [ as / mas ] / yr ]
pmbb - proper motion in b [ [ as / mas ] / yr ]
cov _ pmllbb - uncertainty covariance for proper motion
l - Galactic longitude [ rad ]
b - Galactic lattitude [ rad ]
OUTPUT :
cov ( vx , vy , vz ) [ 3,3]
HISTORY :
2010-04-12 - Written - Bovy ( NYU )""" | M = _K * sc . array ( [ [ pmll , d , 0. ] , [ pmbb , 0. , d ] ] )
cov_dpmllbb = sc . zeros ( ( 3 , 3 ) )
cov_dpmllbb [ 0 , 0 ] = e_d ** 2.
cov_dpmllbb [ 1 : 3 , 1 : 3 ] = cov_pmllbb
cov_vlvb = sc . dot ( M , sc . dot ( cov_dpmllbb , M . T ) )
cov_vrvlvb = sc . zeros ( ( 3 , 3 ) )
cov_vrvlvb [ 0 , 0 ] = e_vr ** 2.
cov_vrvlvb [ 1 : 3 , 1 : 3 ] = cov_vlvb
R = sc . array ( [ [ m . cos ( l ) * m . cos ( b ) , m . sin ( l ) * m . cos ( b ) , m . sin ( b ) ] , [ - m . sin ( l ) , m . cos ( l ) , 0. ] , [ - m . cos ( l ) * m . sin ( b ) , - m . sin ( l ) * m . sin ( b ) , m . cos ( b ) ] ] )
return sc . dot ( R . T , sc . dot ( cov_vrvlvb , R ) ) |
def get_neighbors ( self , site , r , include_index = False , include_image = False ) :
"""Get all neighbors to a site within a sphere of radius r . Excludes the
site itself .
Args :
site ( Site ) : Which is the center of the sphere .
r ( float ) : Radius of sphere .
include _ index ( bool ) : Whether the non - supercell site index
is included in the returned data
include _ image ( bool ) : Whether to include the supercell image
is included in the returned data
Returns :
[ ( site , dist ) . . . ] since most of the time , subsequent processing
requires the distance .
If include _ index = = True , the tuple for each neighbor also includes
the index of the neighbor .
If include _ supercell = = True , the tuple for each neighbor also includes
the index of supercell .""" | nn = self . get_sites_in_sphere ( site . coords , r , include_index = include_index , include_image = include_image )
return [ d for d in nn if site != d [ 0 ] ] |
def diff_content ( a , reference , progressbar = None ) :
"""Return list of tuples where content differ .
Tuple structure :
( identifier , hash in a , hash in reference )
Assumes list of identifiers in a and b are identical .
Storage broker of reference used to generate hash for files in a .
: param a : first : class : ` dtoolcore . DataSet `
: param b : second : class : ` dtoolcore . DataSet `
: returns : list of tuples for all items with different content""" | difference = [ ]
for i in a . identifiers :
fpath = a . item_content_abspath ( i )
calc_hash = reference . _storage_broker . hasher ( fpath )
ref_hash = reference . item_properties ( i ) [ "hash" ]
if calc_hash != ref_hash :
info = ( i , calc_hash , ref_hash )
difference . append ( info )
if progressbar :
progressbar . update ( 1 )
return difference |
def init ( size = 250 ) :
"""Initialize mpv .""" | player = mpv . MPV ( start_event_thread = False )
player [ "force-window" ] = "immediate"
player [ "keep-open" ] = "yes"
player [ "geometry" ] = f"{size}x{size}"
player [ "autofit" ] = f"{size}x{size}"
player [ "title" ] = "bum"
return player |
def save ( arr , filename , hdr = False , force = True , use_compression = False ) :
r"""Save the image ` ` arr ` ` as filename using information encoded in ` ` hdr ` ` . The target image
format is determined by the ` ` filename ` ` suffix . If the ` ` force ` ` parameter is set to true ,
an already existing image is overwritten silently . Otherwise an error is thrown .
The header ( ` ` hdr ` ` ) object is the one returned by ` ~ medpy . io . load . load ` and is used
opportunistically , possibly loosing some meta - information .
Generally this function does not guarantee , that metadata other than the image shape
and pixel data type are kept .
MedPy relies on SimpleITK , which enables the power of ITK for image loading and saving .
The supported image file formats should include at least the following .
Medical formats :
- ITK MetaImage ( . mha / . raw , . mhd )
- Neuroimaging Informatics Technology Initiative ( NIfTI ) ( . nia , . nii , . nii . gz , . hdr , . img , . img . gz )
- Analyze ( plain , SPM99 , SPM2 ) ( . hdr / . img , . img . gz )
- Digital Imaging and Communications in Medicine ( DICOM ) ( . dcm , . dicom )
- Digital Imaging and Communications in Medicine ( DICOM ) series ( < directory > / )
- Nearly Raw Raster Data ( Nrrd ) ( . nrrd , . nhdr )
- Medical Imaging NetCDF ( MINC ) ( . mnc , . MNC )
- Guys Image Processing Lab ( GIPL ) ( . gipl , . gipl . gz )
Microscopy formats :
- Medical Research Council ( MRC ) ( . mrc , . rec )
- Bio - Rad ( . pic , . PIC )
- LSM ( Zeiss ) microscopy images ( . tif , . TIF , . tiff , . TIFF , . lsm , . LSM )
- Stimulate / Signal Data ( SDT ) ( . sdt )
Visualization formats :
- VTK images ( . vtk )
Other formats :
- Portable Network Graphics ( PNG ) ( . png , . PNG )
- Joint Photographic Experts Group ( JPEG ) ( . jpg , . JPG , . jpeg , . JPEG )
- Tagged Image File Format ( TIFF ) ( . tif , . TIF , . tiff , . TIFF )
- Windows bitmap ( . bmp , . BMP )
- Hierarchical Data Format ( HDF5 ) ( . h5 , . hdf5 , . he5)
- MSX - DOS Screen - x ( . ge4 , . ge5)
For informations about which image formats , dimensionalities and pixel data types
your current configuration supports , run ` python3 tests / support . py > myformats . log ` .
Further information see https : / / simpleitk . readthedocs . io .
Parameters
arr : array _ like
The image data with order ` x , y , z , c ` .
filename : string
Where to save the image ; path and filename including the image suffix .
hdr : object
The image header containing the metadata .
force : bool
Set to True to overwrite already exiting image silently .
use _ compression : bool
Use data compression of the target format supports it .
Raises
ImageSavingError
If the image could not be saved due to various reasons""" | logger = Logger . getInstance ( )
logger . info ( 'Saving image as {}...' . format ( filename ) )
# Check image file existance
if not force and os . path . exists ( filename ) :
raise ImageSavingError ( 'The target file {} already exists.' . format ( filename ) )
# Roll axes from x , y , z , c to z , y , x , c
if arr . ndim == 4 :
arr = np . moveaxis ( arr , - 1 , 0 )
arr = arr . T
sitkimage = sitk . GetImageFromArray ( arr )
# Copy met - data as far as possible
if hdr :
hdr . copy_to ( sitkimage )
sitk . WriteImage ( sitkimage , filename , use_compression ) |
def putenv ( key , value ) :
"""Like ` os . putenv ` but takes unicode under Windows + Python 2
Args :
key ( pathlike ) : The env var to get
value ( pathlike ) : The value to set
Raises :
ValueError""" | key = path2fsn ( key )
value = path2fsn ( value )
if is_win and PY2 :
try :
set_windows_env_var ( key , value )
except WindowsError : # py3 + win fails here
raise ValueError
else :
try :
os . putenv ( key , value )
except OSError : # win + py3 raise here for invalid keys which is probably a bug .
# ValueError seems better
raise ValueError |
def sync_allocations ( self ) :
"""Synchronize vxlan _ allocations table with configured tunnel ranges .""" | # determine current configured allocatable vnis
vxlan_vnis = set ( )
for tun_min , tun_max in self . tunnel_ranges :
vxlan_vnis |= set ( six . moves . range ( tun_min , tun_max + 1 ) )
session = bc . get_writer_session ( )
with session . begin ( subtransactions = True ) : # remove from table unallocated tunnels not currently allocatable
# fetch results as list via all ( ) because we ' ll be iterating
# through them twice
allocs = ( session . query ( nexus_models_v2 . NexusVxlanAllocation ) . with_lockmode ( "update" ) . all ( ) )
# collect all vnis present in db
existing_vnis = set ( alloc . vxlan_vni for alloc in allocs )
# collect those vnis that needs to be deleted from db
vnis_to_remove = [ alloc . vxlan_vni for alloc in allocs if ( alloc . vxlan_vni not in vxlan_vnis and not alloc . allocated ) ]
# Immediately delete vnis in chunks . This leaves no work for
# flush at the end of transaction
bulk_size = 100
chunked_vnis = ( vnis_to_remove [ i : i + bulk_size ] for i in range ( 0 , len ( vnis_to_remove ) , bulk_size ) )
for vni_list in chunked_vnis :
session . query ( nexus_models_v2 . NexusVxlanAllocation ) . filter ( nexus_models_v2 . NexusVxlanAllocation . vxlan_vni . in_ ( vni_list ) ) . delete ( synchronize_session = False )
# collect vnis that need to be added
vnis = list ( vxlan_vnis - existing_vnis )
chunked_vnis = ( vnis [ i : i + bulk_size ] for i in range ( 0 , len ( vnis ) , bulk_size ) )
for vni_list in chunked_vnis :
bulk = [ { 'vxlan_vni' : vni , 'allocated' : False } for vni in vni_list ]
session . execute ( nexus_models_v2 . NexusVxlanAllocation . __table__ . insert ( ) , bulk ) |
def reduce ( self , key , values ) :
"""Select the image with the minimum distance
Args :
key : ( see mapper )
values : ( see mapper )
Yields :
A tuple in the form of ( key , value )
key : Image name
value : Image as jpeg byte data""" | dist , key , value = min ( values , key = lambda x : x [ 0 ] )
print ( 'MinDist[%f]' % dist )
yield key , value |
def uniq_to_level_ipix ( uniq ) :
"""Convert a HEALPix cell uniq number to its ( level , ipix ) equivalent .
A uniq number is a 64 bits integer equaling to : ipix + 4 * ( 4 * * level ) . Please read
this ` paper < http : / / ivoa . net / documents / MOC / 20140602 / REC - MOC - 1.0-20140602 . pdf > ` _
for more details about uniq numbers .
Parameters
uniq : int
The uniq number of a HEALPix cell .
Returns
level , ipix : int , int
The level and index of the HEALPix cell computed from ` ` uniq ` ` .""" | uniq = np . asarray ( uniq , dtype = np . int64 )
level = ( np . log2 ( uniq // 4 ) ) // 2
level = level . astype ( np . int64 )
_validate_level ( level )
ipix = uniq - ( 1 << 2 * ( level + 1 ) )
_validate_npix ( level , ipix )
return level , ipix |
def write_taxon_info ( taxon , include_anc , output ) :
"""Writes out data from ` taxon ` to the ` output ` stream to demonstrate
the attributes of a taxon object .
( currently some lines are commented out until the web - services call returns more info . See :
https : / / github . com / OpenTreeOfLife / taxomachine / issues / 85
If ` include _ anc ` is True , then ancestor information was requested ( so a None parent is only
expected at the root of the tree )""" | output . write ( u'Taxon info for OTT ID (ot:ottId) = {}\n' . format ( taxon . ott_id ) )
output . write ( u' name (ot:ottTaxonName) = "{}"\n' . format ( taxon . name ) )
if taxon . synonyms :
output . write ( u' known synonyms: "{}"\n' . format ( '", "' . join ( taxon . synonyms ) ) )
else :
output . write ( u' known synonyms: \n' )
output . write ( u' OTT flags for this taxon: {}\n' . format ( taxon . flags ) )
output . write ( u' The taxonomic rank associated with this name is: {}\n' . format ( taxon . rank ) )
output . write ( u' The (unstable) node ID in the current taxomachine instance is: {}\n' . format ( taxon . taxomachine_node_id ) )
if include_anc :
if taxon . parent is not None :
output . write ( u'Taxon {c} is a child of {p}.\n' . format ( c = taxon . ott_id , p = taxon . parent . ott_id ) )
write_taxon_info ( taxon . parent , True , output )
else :
output . write ( 'uTaxon {c} is the root of the taxonomy.' . format ( c = taxon . ott_id ) ) |
def get_objective_bank_query_session ( self , proxy ) :
"""Gets the OsidSession associated with the objective bank query service .
: param proxy : a proxy
: type proxy : ` ` osid . proxy . Proxy ` `
: return : an ` ` ObjectiveBankQuerySession ` `
: rtype : ` ` osid . learning . ObjectiveBankQuerySession ` `
: raise : ` ` NullArgument ` ` - - ` ` proxy ` ` is ` ` null ` `
: raise : ` ` OperationFailed ` ` - - unable to complete request
: raise : ` ` Unimplemented ` ` - - ` ` supports _ objective _ bank _ query ( ) is false ` `
* compliance : optional - - This method must be implemented if ` ` supports _ objective _ bank _ query ( ) ` ` is true . *""" | if not self . supports_objective_bank_query ( ) :
raise Unimplemented ( )
try :
from . import sessions
except ImportError :
raise OperationFailed ( )
proxy = self . _convert_proxy ( proxy )
try :
session = sessions . ObjectiveBankQuerySession ( proxy = proxy , runtime = self . _runtime )
except AttributeError :
raise OperationFailed ( )
return session |
def addLOADDEV ( rh ) :
"""Sets the LOADDEV statement in the virtual machine ' s directory entry .
Input :
Request Handle with the following properties :
function - ' CHANGEVM '
subfunction - ' ADDLOADDEV '
userid - userid of the virtual machine
parms [ ' boot ' ] - Boot program number
parms [ ' addr ' ] - Logical block address of the boot record
parms [ ' lun ' ] - One to eight - byte logical unit number
of the FCP - I / O device .
parms [ ' wwpn ' ] - World - Wide Port Number
parms [ ' scpDataType ' ] - SCP data type
parms [ ' scpData ' ] - Designates information to be passed to the
program is loaded during guest IPL .
Note that any of the parms may be left blank , in which case
we will not update them .
Output :
Request Handle updated with the results .
Return code - 0 : ok , non - zero : error""" | rh . printSysLog ( "Enter changeVM.addLOADDEV" )
# scpDataType and scpData must appear or disappear concurrently
if ( 'scpData' in rh . parms and 'scpDataType' not in rh . parms ) :
msg = msgs . msg [ '0014' ] [ 1 ] % ( modId , "scpData" , "scpDataType" )
rh . printLn ( "ES" , msg )
rh . updateResults ( msgs . msg [ '0014' ] [ 0 ] )
return
if ( 'scpDataType' in rh . parms and 'scpData' not in rh . parms ) :
if rh . parms [ 'scpDataType' ] . lower ( ) == "delete" :
scpDataType = 1
else : # scpDataType and scpData must appear or disappear
# concurrently unless we ' re deleting data
msg = msgs . msg [ '0014' ] [ 1 ] % ( modId , "scpDataType" , "scpData" )
rh . printLn ( "ES" , msg )
rh . updateResults ( msgs . msg [ '0014' ] [ 0 ] )
return
scpData = ""
if 'scpDataType' in rh . parms :
if rh . parms [ 'scpDataType' ] . lower ( ) == "hex" :
scpData = rh . parms [ 'scpData' ]
scpDataType = 3
elif rh . parms [ 'scpDataType' ] . lower ( ) == "ebcdic" :
scpData = rh . parms [ 'scpData' ]
scpDataType = 2
# scpDataType not hex , ebcdic or delete
elif rh . parms [ 'scpDataType' ] . lower ( ) != "delete" :
msg = msgs . msg [ '0016' ] [ 1 ] % ( modId , rh . parms [ 'scpDataType' ] )
rh . printLn ( "ES" , msg )
rh . updateResults ( msgs . msg [ '0016' ] [ 0 ] )
return
else : # Not specified , 0 for do nothing
scpDataType = 0
scpData = ""
if 'boot' not in rh . parms :
boot = ""
else :
boot = rh . parms [ 'boot' ]
if 'addr' not in rh . parms :
block = ""
else :
block = rh . parms [ 'addr' ]
if 'lun' not in rh . parms :
lun = ""
else :
lun = rh . parms [ 'lun' ]
# Make sure it doesn ' t have the 0x prefix
lun . replace ( "0x" , "" )
if 'wwpn' not in rh . parms :
wwpn = ""
else :
wwpn = rh . parms [ 'wwpn' ]
# Make sure it doesn ' t have the 0x prefix
wwpn . replace ( "0x" , "" )
parms = [ "-T" , rh . userid , "-b" , boot , "-k" , block , "-l" , lun , "-p" , wwpn , "-s" , str ( scpDataType ) ]
if scpData != "" :
parms . extend ( [ "-d" , scpData ] )
results = invokeSMCLI ( rh , "Image_SCSI_Characteristics_Define_DM" , parms )
# SMAPI API failed .
if results [ 'overallRC' ] != 0 :
rh . printLn ( "ES" , results [ 'response' ] )
rh . updateResults ( results )
rh . printSysLog ( "Exit changeVM.addLOADDEV, rc: " + str ( rh . results [ 'overallRC' ] ) )
return rh . results [ 'overallRC' ] |
def cublasCgeru ( handle , m , n , alpha , x , incx , y , incy , A , lda ) :
"""Rank - 1 operation on complex general matrix .""" | status = _libcublas . cublasCgeru_v2 ( handle , m , n , ctypes . byref ( cuda . cuFloatComplex ( alpha . real , alpha . imag ) ) , int ( x ) , incx , int ( y ) , incy , int ( A ) , lda )
cublasCheckStatus ( status ) |
def inpaint ( self , rescale_factor = 1.0 ) :
"""Fills in the zero pixels in the image .
Parameters
rescale _ factor : float
amount to rescale the image for inpainting , smaller numbers increase speed
Returns
: obj : ` DepthImage `
depth image with zero pixels filled in""" | # get original shape
orig_shape = ( self . height , self . width )
# form inpaint kernel
inpaint_kernel = np . array ( [ [ 1 , 1 , 1 ] , [ 1 , 0 , 1 ] , [ 1 , 1 , 1 ] ] )
# resize the image
resized_data = self . resize ( rescale_factor , interp = 'nearest' ) . data
# inpaint the smaller image
cur_data = resized_data . copy ( )
zeros = ( cur_data == 0 )
while np . any ( zeros ) :
neighbors = ssg . convolve2d ( ( cur_data != 0 ) , inpaint_kernel , mode = 'same' , boundary = 'symm' )
avg_depth = ssg . convolve2d ( cur_data , inpaint_kernel , mode = 'same' , boundary = 'symm' )
avg_depth [ neighbors > 0 ] = avg_depth [ neighbors > 0 ] / neighbors [ neighbors > 0 ]
avg_depth [ neighbors == 0 ] = 0
avg_depth [ resized_data > 0 ] = resized_data [ resized_data > 0 ]
cur_data = avg_depth
zeros = ( cur_data == 0 )
# fill in zero pixels with inpainted and resized image
inpainted_im = DepthImage ( cur_data , frame = self . frame )
filled_data = inpainted_im . resize ( orig_shape , interp = 'bilinear' ) . data
new_data = np . copy ( self . data )
new_data [ self . data == 0 ] = filled_data [ self . data == 0 ]
return DepthImage ( new_data , frame = self . frame ) |
def validate ( self , value ) :
"""Validates the inputted value against this columns rules . If the inputted value does not pass , then
a validation error will be raised . Override this method in column sub - classes for more
specialized validation .
: param value | < variant >
: return < bool > success""" | # check for the required flag
if self . testFlag ( self . Flags . Required ) and not self . testFlag ( self . Flags . AutoAssign ) :
if self . isNull ( value ) :
msg = '{0} is a required column.' . format ( self . name ( ) )
raise orb . errors . ColumnValidationError ( self , msg )
# otherwise , we ' re good
return True |
def _parse_commit ( self , ref ) :
"""Parse a commit command .""" | lineno = self . lineno
mark = self . _get_mark_if_any ( )
author = self . _get_user_info ( b'commit' , b'author' , False )
more_authors = [ ]
while True :
another_author = self . _get_user_info ( b'commit' , b'author' , False )
if another_author is not None :
more_authors . append ( another_author )
else :
break
committer = self . _get_user_info ( b'commit' , b'committer' )
message = self . _get_data ( b'commit' , b'message' )
from_ = self . _get_from ( )
merges = [ ]
while True :
merge = self . _get_merge ( )
if merge is not None : # while the spec suggests it ' s illegal , git - fast - export
# outputs multiple merges on the one line , e . g .
# merge : x : y : z
these_merges = merge . split ( b' ' )
merges . extend ( these_merges )
else :
break
properties = { }
while True :
name_value = self . _get_property ( )
if name_value is not None :
name , value = name_value
properties [ name ] = value
else :
break
return commands . CommitCommand ( ref , mark , author , committer , message , from_ , merges , list ( self . iter_file_commands ( ) ) , lineno = lineno , more_authors = more_authors , properties = properties ) |
def url_assembler ( query_string , no_redirect = 0 , no_html = 0 , skip_disambig = 0 ) :
"""Assembler of parameters for building request query .
Args :
query _ string : Query to be passed to DuckDuckGo API .
no _ redirect : Skip HTTP redirects ( for ! bang commands ) . Default - False .
no _ html : Remove HTML from text , e . g . bold and italics . Default - False .
skip _ disambig : Skip disambiguation ( D ) Type . Default - False .
Returns :
A “ percent - encoded ” string which is used as a part of the query .""" | params = [ ( 'q' , query_string . encode ( "utf-8" ) ) , ( 'format' , 'json' ) ]
if no_redirect :
params . append ( ( 'no_redirect' , 1 ) )
if no_html :
params . append ( ( 'no_html' , 1 ) )
if skip_disambig :
params . append ( ( 'skip_disambig' , 1 ) )
return '/?' + urlencode ( params ) |
def clean_tuple ( t0 , clean_item_fn = None ) :
"""Return a json - clean tuple . Will log info message for failures .""" | clean_item_fn = clean_item_fn if clean_item_fn else clean_item
l = list ( )
for index , item in enumerate ( t0 ) :
cleaned_item = clean_item_fn ( item )
l . append ( cleaned_item )
return tuple ( l ) |
def addContentLen ( self , content , len ) :
"""Append the extra substring to the node content . NOTE : In
contrast to xmlNodeSetContentLen ( ) , @ content is supposed to
be raw text , so unescaped XML special chars are allowed ,
entity references are not supported .""" | libxml2mod . xmlNodeAddContentLen ( self . _o , content , len ) |
def resource_path ( package_name : str , relative_path : typing . Union [ str , Path ] ) -> Path :
"""Get absolute path to resource , works for dev and for PyInstaller""" | relative_path = Path ( relative_path )
methods = [ _get_from_dev , _get_from_package , _get_from_sys , ]
for method in methods :
path = method ( package_name , relative_path )
if path . exists ( ) :
return path
raise FileNotFoundError ( relative_path ) |
def _get_types_from_sample ( result_vars , sparql_results_json ) :
"""Return types if homogenous within sample
Compare up to 10 rows of results to determine homogeneity .
DESCRIBE and CONSTRUCT queries , for example ,
: param result _ vars :
: param sparql _ results _ json :""" | total_bindings = len ( sparql_results_json [ 'results' ] [ 'bindings' ] )
homogeneous_types = { }
for result_var in result_vars :
var_types = set ( )
var_datatypes = set ( )
for i in range ( 0 , min ( total_bindings , 10 ) ) :
binding = sparql_results_json [ 'results' ] [ 'bindings' ] [ i ]
rdf_term = binding . get ( result_var )
if rdf_term is not None : # skip missing values
var_types . add ( rdf_term . get ( 'type' ) )
var_datatypes . add ( rdf_term . get ( 'datatype' ) )
if len ( var_types ) > 1 or len ( var_datatypes ) > 1 :
return None
# Heterogeneous types
else :
homogeneous_types [ result_var ] = { 'type' : var_types . pop ( ) if var_types else None , 'datatype' : var_datatypes . pop ( ) if var_datatypes else None }
return homogeneous_types |
def get_mfd ( self , slip , area , shear_modulus = 30.0 ) :
'''Calculates activity rate on the fault
: param float slip :
Slip rate in mm / yr
: param fault _ width :
Width of the fault ( km )
: param float disp _ length _ ratio :
Displacement to length ratio ( dimensionless )
: param float shear _ modulus :
Shear modulus of the fault ( GPa )
: returns :
* Minimum Magnitude ( float )
* Bin width ( float )
* Occurrence Rates ( numpy . ndarray )''' | # Working in Nm so convert : shear _ modulus - GPa - > Nm
# area - km * * 2 . - > m * * 2.
# slip - mm / yr - > m / yr
moment_rate = ( shear_modulus * 1.E9 ) * ( area * 1.E6 ) * ( slip / 1000. )
moment_mag = _scale_moment ( self . mmax , in_nm = True )
characteristic_rate = moment_rate / moment_mag
if self . sigma and ( fabs ( self . sigma ) > 1E-5 ) :
self . mmin = self . mmax + ( self . lower_bound * self . sigma )
mag_upper = self . mmax + ( self . upper_bound * self . sigma )
mag_range = np . arange ( self . mmin , mag_upper + self . bin_width , self . bin_width )
self . occurrence_rate = characteristic_rate * ( truncnorm . cdf ( mag_range + ( self . bin_width / 2. ) , self . lower_bound , self . upper_bound , loc = self . mmax , scale = self . sigma ) - truncnorm . cdf ( mag_range - ( self . bin_width / 2. ) , self . lower_bound , self . upper_bound , loc = self . mmax , scale = self . sigma ) )
else : # Returns only a single rate
self . mmin = self . mmax
self . occurrence_rate = np . array ( [ characteristic_rate ] , dtype = float )
return self . mmin , self . bin_width , self . occurrence_rate |
def load_servers_from_env ( self , filter = [ ] , dynamic = None ) :
'''Load the name servers environment variable and parse each server in
the list .
@ param filter Restrict the parsed objects to only those in this
path . For example , setting filter to [ [ ' / ' ,
' localhost ' , ' host . cxt ' , ' comp1 . rtc ' ] ] will
prevent ' comp2 . rtc ' in the same naming context
from being parsed .
@ param dynamic Override the tree - wide dynamic setting . If not provided ,
the value given when the tree was created will be used .''' | if dynamic == None :
dynamic = self . _dynamic
if NAMESERVERS_ENV_VAR in os . environ :
servers = [ s for s in os . environ [ NAMESERVERS_ENV_VAR ] . split ( ';' ) if s ]
self . _parse_name_servers ( servers , filter , dynamic ) |
def _gzip_fastq ( in_file , out_dir = None ) :
"""gzip a fastq file if it is not already gzipped , handling conversion
from bzip to gzipped files""" | if fastq . is_fastq ( in_file ) and not objectstore . is_remote ( in_file ) :
if utils . is_bzipped ( in_file ) :
return _bzip_gzip ( in_file , out_dir )
elif not utils . is_gzipped ( in_file ) :
if out_dir :
gzipped_file = os . path . join ( out_dir , os . path . basename ( in_file ) + ".gz" )
else :
gzipped_file = in_file + ".gz"
if file_exists ( gzipped_file ) :
return gzipped_file
message = "gzipping {in_file} to {gzipped_file}." . format ( in_file = in_file , gzipped_file = gzipped_file )
with file_transaction ( gzipped_file ) as tx_gzipped_file :
do . run ( "gzip -c {in_file} > {tx_gzipped_file}" . format ( ** locals ( ) ) , message )
return gzipped_file
return in_file |
def tem ( fEM , off , freq , time , signal , ft , ftarg , conv = True ) :
r"""Return the time - domain response of the frequency - domain response fEM .
This function is called from one of the above modelling routines . No
input - check is carried out here . See the main description of : mod : ` model `
for information regarding input and output parameters .
This function can be directly used if you are sure the provided input is in
the correct format . This is useful for inversion routines and similar , as
it can speed - up the calculation by omitting input - checks .""" | # 1 . Scale frequencies if switch - on / off response
# Step function for causal times is like a unit fct , therefore an impulse
# in frequency domain
if signal in [ - 1 , 1 ] : # Divide by signal / ( 2j * pi * f ) to obtain step response
fact = signal / ( 2j * np . pi * freq )
else :
fact = 1
# 2 . f - > t transform
tEM = np . zeros ( ( time . size , off . size ) )
for i in range ( off . size ) :
out = getattr ( transform , ft ) ( fEM [ : , i ] * fact , time , freq , ftarg )
tEM [ : , i ] += out [ 0 ]
conv *= out [ 1 ]
return tEM * 2 / np . pi , conv |
def resize ( self , shape ) :
"""Resize all attached buffers with the given shape
Parameters
shape : tuple of two integers
New buffer shape ( h , w ) , to be applied to all currently
attached buffers . For buffers that are a texture , the number
of color channels is preserved .""" | # Check
if not ( isinstance ( shape , tuple ) and len ( shape ) == 2 ) :
raise ValueError ( 'RenderBuffer shape must be a 2-element tuple' )
# Resize our buffers
for buf in ( self . color_buffer , self . depth_buffer , self . stencil_buffer ) :
if buf is None :
continue
shape_ = shape
if isinstance ( buf , Texture2D ) :
shape_ = shape + ( self . color_buffer . shape [ - 1 ] , )
buf . resize ( shape_ , buf . format ) |
def _apply_dvportgroup_config ( pg_name , pg_spec , pg_conf ) :
'''Applies the values in conf to a distributed portgroup spec
pg _ name
The name of the portgroup
pg _ spec
The vim . DVPortgroupConfigSpec to apply the config to
pg _ conf
The portgroup config''' | log . trace ( 'Building portgroup\'s \'%s\' spec' , pg_name )
if 'name' in pg_conf :
pg_spec . name = pg_conf [ 'name' ]
if 'description' in pg_conf :
pg_spec . description = pg_conf [ 'description' ]
if 'num_ports' in pg_conf :
pg_spec . numPorts = pg_conf [ 'num_ports' ]
if 'type' in pg_conf :
pg_spec . type = pg_conf [ 'type' ]
if not pg_spec . defaultPortConfig :
for prop in [ 'vlan_id' , 'out_shaping' , 'security_policy' , 'teaming' ] :
if prop in pg_conf :
pg_spec . defaultPortConfig = vim . VMwareDVSPortSetting ( )
if 'vlan_id' in pg_conf :
pg_spec . defaultPortConfig . vlan = vim . VmwareDistributedVirtualSwitchVlanIdSpec ( )
pg_spec . defaultPortConfig . vlan . vlanId = pg_conf [ 'vlan_id' ]
if 'out_shaping' in pg_conf :
if not pg_spec . defaultPortConfig . outShapingPolicy :
pg_spec . defaultPortConfig . outShapingPolicy = vim . DVSTrafficShapingPolicy ( )
_apply_dvportgroup_out_shaping ( pg_name , pg_spec . defaultPortConfig . outShapingPolicy , pg_conf [ 'out_shaping' ] )
if 'security_policy' in pg_conf :
if not pg_spec . defaultPortConfig . securityPolicy :
pg_spec . defaultPortConfig . securityPolicy = vim . DVSSecurityPolicy ( )
_apply_dvportgroup_security_policy ( pg_name , pg_spec . defaultPortConfig . securityPolicy , pg_conf [ 'security_policy' ] )
if 'teaming' in pg_conf :
if not pg_spec . defaultPortConfig . uplinkTeamingPolicy :
pg_spec . defaultPortConfig . uplinkTeamingPolicy = vim . VmwareUplinkPortTeamingPolicy ( )
_apply_dvportgroup_teaming ( pg_name , pg_spec . defaultPortConfig . uplinkTeamingPolicy , pg_conf [ 'teaming' ] ) |
def compute_tasks ( self , ** kwargs ) :
"""perfrom checks and build tasks
: return : list of tasks
: rtype : list ( kser . sequencing . operation . Operation )""" | params = self . _prebuild ( ** kwargs )
if not params :
params = dict ( kwargs )
return self . _build_tasks ( ** params ) |
async def get_controller ( self ) :
"""Return a Controller instance for the currently connected model .
: return Controller :""" | from juju . controller import Controller
controller = Controller ( jujudata = self . _connector . jujudata )
kwargs = self . connection ( ) . connect_params ( )
kwargs . pop ( 'uuid' )
await controller . _connect_direct ( ** kwargs )
return controller |
def plot ( parameterized , fignum = None , ax = None , colors = None , figsize = ( 12 , 6 ) ) :
"""Plot latent space X in 1D :
- if fig is given , create input _ dim subplots in fig and plot in these
- if ax is given plot input _ dim 1D latent space plots of X into each ` axis `
- if neither fig nor ax is given create a figure with fignum and plot in there
colors :
colors of different latent space dimensions input _ dim""" | if ax is None :
fig = pb . figure ( num = fignum , figsize = figsize )
if colors is None :
from . . Tango import mediumList
from itertools import cycle
colors = cycle ( mediumList )
pb . clf ( )
else :
colors = iter ( colors )
lines = [ ]
fills = [ ]
bg_lines = [ ]
means , variances = parameterized . mean . values , parameterized . variance . values
x = np . arange ( means . shape [ 0 ] )
for i in range ( means . shape [ 1 ] ) :
if ax is None :
a = fig . add_subplot ( means . shape [ 1 ] , 1 , i + 1 )
elif isinstance ( ax , ( tuple , list ) ) :
a = ax [ i ]
else :
raise ValueError ( "Need one ax per latent dimension input_dim" )
bg_lines . append ( a . plot ( means , c = 'k' , alpha = .3 ) )
lines . extend ( a . plot ( x , means . T [ i ] , c = next ( colors ) , label = r"$\mathbf{{X_{{{}}}}}$" . format ( i ) ) )
fills . append ( a . fill_between ( x , means . T [ i ] - 2 * np . sqrt ( variances . T [ i ] ) , means . T [ i ] + 2 * np . sqrt ( variances . T [ i ] ) , facecolor = lines [ - 1 ] . get_color ( ) , alpha = .3 ) )
a . legend ( borderaxespad = 0. )
a . set_xlim ( x . min ( ) , x . max ( ) )
if i < means . shape [ 1 ] - 1 :
a . set_xticklabels ( '' )
pb . draw ( )
a . figure . tight_layout ( h_pad = .01 )
# , rect = ( 0 , 0 , 1 , . 95 ) )
return dict ( lines = lines , fills = fills , bg_lines = bg_lines ) |
def monitor_session_span_command_src_tengigabitethernet_val ( self , ** kwargs ) :
"""Auto Generated Code""" | config = ET . Element ( "config" )
monitor = ET . SubElement ( config , "monitor" , xmlns = "urn:brocade.com:mgmt:brocade-span" )
session = ET . SubElement ( monitor , "session" )
session_number_key = ET . SubElement ( session , "session-number" )
session_number_key . text = kwargs . pop ( 'session_number' )
span_command = ET . SubElement ( session , "span-command" )
src_tengigabitethernet_val = ET . SubElement ( span_command , "src-tengigabitethernet-val" )
src_tengigabitethernet_val . text = kwargs . pop ( 'src_tengigabitethernet_val' )
callback = kwargs . pop ( 'callback' , self . _callback )
return callback ( config ) |
def get_precomposed_chars ( ) :
"""Return the set of IPA characters that are defined in normal form C in the
spec . As of 2015 , this is only the voiceless palatal fricative , ç .""" | return set ( [ letter for letter in chart . consonants if unicodedata . normalize ( 'NFD' , letter ) != letter ] ) |
def as_dataframe ( self , pattern = '*' , max_rows = None ) :
"""Creates a pandas dataframe from the descriptors that match the filters .
Args :
pattern : An optional pattern to further filter the descriptors . This can
include Unix shell - style wildcards . E . g . ` ` " aws * " ` ` , ` ` " * cluster * " ` ` .
max _ rows : The maximum number of descriptors to return . If None , return
all .
Returns :
A pandas dataframe containing matching resource descriptors .""" | data = [ ]
for i , resource in enumerate ( self . list ( pattern ) ) :
if max_rows is not None and i >= max_rows :
break
labels = ', ' . join ( [ l . key for l in resource . labels ] )
data . append ( [ resource . type , resource . display_name , labels ] )
return pandas . DataFrame ( data , columns = self . _DISPLAY_HEADERS ) |
def _get_url ( self , obj ) :
"""Gets object url""" | format_kwargs = { 'app_label' : obj . _meta . app_label , }
try :
format_kwargs [ 'model_name' ] = getattr ( obj . __class__ , 'get_url_name' ) ( )
except AttributeError :
format_kwargs [ 'model_name' ] = obj . _meta . object_name . lower ( )
return self . _default_view_name % format_kwargs |
def shadow_calc ( data ) :
"""计算上下影线
Arguments :
data { DataStruct . slice } - - 输入的是一个行情切片
Returns :
up _ shadow { float } - - 上影线
down _ shdow { float } - - 下影线
entity { float } - - 实体部分
date { str } - - 时间
code { str } - - 代码""" | up_shadow = abs ( data . high - ( max ( data . open , data . close ) ) )
down_shadow = abs ( data . low - ( min ( data . open , data . close ) ) )
entity = abs ( data . open - data . close )
towards = True if data . open < data . close else False
print ( '=' * 15 )
print ( 'up_shadow : {}' . format ( up_shadow ) )
print ( 'down_shadow : {}' . format ( down_shadow ) )
print ( 'entity: {}' . format ( entity ) )
print ( 'towards : {}' . format ( towards ) )
return up_shadow , down_shadow , entity , data . date , data . code |
def group ( self ) :
"""Returns the periodic table group of the element .""" | z = self . Z
if z == 1 :
return 1
if z == 2 :
return 18
if 3 <= z <= 18 :
if ( z - 2 ) % 8 == 0 :
return 18
elif ( z - 2 ) % 8 <= 2 :
return ( z - 2 ) % 8
else :
return 10 + ( z - 2 ) % 8
if 19 <= z <= 54 :
if ( z - 18 ) % 18 == 0 :
return 18
else :
return ( z - 18 ) % 18
if ( z - 54 ) % 32 == 0 :
return 18
elif ( z - 54 ) % 32 >= 18 :
return ( z - 54 ) % 32 - 14
else :
return ( z - 54 ) % 32 |
def put ( self ) :
"""Updates this task type on the saltant server .
Returns :
: class : ` saltant . models . container _ task _ type . ContainerTaskType ` :
A task type model instance representing the task type
just updated .""" | return self . manager . put ( id = self . id , name = self . name , description = self . description , command_to_run = self . command_to_run , environment_variables = self . environment_variables , required_arguments = self . required_arguments , required_arguments_default_values = ( self . required_arguments_default_values ) , logs_path = self . logs_path , results_path = self . results_path , container_image = self . container_image , container_type = self . container_type , ) |
def put ( self , source , rel_path , metadata = None ) :
"""Puts to only the first upstream . This is to be symmetric with put _ stream .""" | return self . upstreams [ 0 ] . put ( source , rel_path , metadata ) |
def cache_connect ( database = None ) :
"""Returns a connection object to a sqlite database .
Args :
database ( str , optional ) : The path to the database the user wishes
to connect to . If not specified , a default is chosen using
: func : ` . cache _ file ` . If the special database name ' : memory : '
is given , then a temporary database is created in memory .
Returns :
: class : ` sqlite3 . Connection `""" | if database is None :
database = cache_file ( )
if os . path . isfile ( database ) : # just connect to the database as - is
conn = sqlite3 . connect ( database )
else : # we need to populate the database
conn = sqlite3 . connect ( database )
conn . executescript ( schema )
with conn as cur : # turn on foreign keys , allows deletes to cascade .
cur . execute ( "PRAGMA foreign_keys = ON;" )
conn . row_factory = sqlite3 . Row
return conn |
def _request ( self , ** kwargs ) :
'''a helper method for processing all request types''' | response = None
error = ''
code = 0
# send request
from requests import request
try :
response = request ( ** kwargs )
# handle response
if self . handle_response :
response , error , code = self . handle_response ( response )
else :
code = response . status_code
# handle errors
except Exception as err :
from requests import Request
request_object = Request ( ** kwargs )
try :
request_details = self . handle_requests ( request_object )
error = request_details [ 'error' ]
except :
error = str ( err )
return response , error , code |
def get_email_context ( self , ** kwargs ) :
'''Overrides EmailRecipientMixin''' | includeName = kwargs . pop ( 'includeName' , True )
context = super ( TemporaryEventRegistration , self ) . get_email_context ( ** kwargs )
context . update ( { 'title' : self . event . name , 'start' : self . event . firstOccurrenceTime , 'end' : self . event . lastOccurrenceTime , } )
if includeName :
context . update ( { 'first_name' : self . registration . firstName , 'last_name' : self . registration . lastName , } )
return context |
def find_record ( self , domain , record_type , name = None , data = None ) :
"""Returns a single record for this domain that matches the supplied
search criteria .
If no record matches , a DomainRecordNotFound exception will be raised .
If more than one matches , a DomainRecordNotUnique exception will
be raised .""" | return domain . find_record ( record_type = record_type , name = name , data = data ) |
def publish ( self , message_type , client_id , client_storage , * args , ** kwargs ) :
"""Publishes a message""" | p = self . pack ( message_type , client_id , client_storage , args , kwargs )
self . client . publish ( self . channel , p ) |
def make_plot ( self ) :
"""Make the horizon plot .""" | self . get_contour_values ( )
# sets levels of main contour plot
colors1 = [ 'blue' , 'green' , 'red' , 'purple' , 'orange' , 'gold' , 'magenta' ]
# set contour value . Default is SNR _ CUT .
self . snr_contour_value = ( self . SNR_CUT if self . snr_contour_value is None else self . snr_contour_value )
# plot contours
for j in range ( len ( self . zvals ) ) :
hz = self . axis . contour ( self . xvals [ j ] , self . yvals [ j ] , self . zvals [ j ] , np . array ( [ self . snr_contour_value ] ) , colors = colors1 [ j ] , linewidths = 1. , linestyles = 'solid' )
# plot invisible lines for purpose of creating a legend
if self . legend_labels != [ ] : # plot a curve off of the grid with same color for legend label .
self . axis . plot ( [ 0.1 , 0.2 ] , [ 0.1 , 0.2 ] , color = colors1 [ j ] , label = self . legend_labels [ j ] )
if self . add_legend :
self . axis . legend ( ** self . legend_kwargs )
return |
def check ( self ) :
"""Check that this table is complete , that is , every character of this
table can be followed by a new character .
: return : True if the table is complete , False otherwise .""" | for character , followers in self . items ( ) :
for follower in followers :
if follower not in self :
return False
return True |
def run ( self , message_cb ) :
"""Run the event loop to receive messages from Nvim .
While the event loop is running , ` message _ cb ` will be called whenever
a message has been successfully parsed from the input stream .""" | self . _message_cb = message_cb
self . loop . run ( self . _on_data )
self . _message_cb = None |
def uses_super ( func ) :
"""Check if the function / property / classmethod / staticmethod uses the ` super ` builtin""" | if isinstance ( func , property ) :
return any ( uses_super ( f ) for f in ( func . fget , func . fset , func . fdel ) if f )
elif isinstance ( func , ( staticmethod , classmethod ) ) :
if sys . version_info >= ( 2 , 7 ) :
func = func . __func__
elif isinstance ( func , staticmethod ) :
func = func . __get__ ( True )
else : # classmethod
func = func . __get__ ( True ) . im_func
if sys . version_info [ 0 ] >= 3 :
return 'super' in func . __code__ . co_names
return 'super' in func . func_code . co_names |
def paginate ( self , * , page_size , ** options ) :
"""Run this query and return a page iterator .
Parameters :
page _ size ( int ) : The number of entities to fetch per page .
\**options(QueryOptions, optional)
Returns :
Pages : An iterator for this query ' s pages of results .""" | return Pages ( self . _prepare ( ) , page_size , QueryOptions ( self , ** options ) ) |
def model_loss ( y , model , mean = True ) :
"""Define loss of TF graph
: param y : correct labels
: param model : output of the model
: param mean : boolean indicating whether should return mean of loss
or vector of losses for each input of the batch
: return : return mean of loss if True , otherwise return vector with per
sample loss""" | warnings . warn ( "This function is deprecated and will be removed on or after" " 2019-04-05. Switch to cleverhans.train.train." )
op = model . op
if op . type == "Softmax" :
logits , = op . inputs
else :
logits = model
out = softmax_cross_entropy_with_logits ( logits = logits , labels = y )
if mean :
out = reduce_mean ( out )
return out |
def canorth ( S ) :
"Canonical orthogonalization U / sqrt ( lambda )" | E , U = np . linalg . eigh ( S )
for i in range ( len ( E ) ) :
U [ : , i ] = U [ : , i ] / np . sqrt ( E [ i ] )
return U |
def configure_logging ( args ) :
"""Logging to console""" | log_format = logging . Formatter ( '%(levelname)s:%(name)s:line %(lineno)s:%(message)s' )
log_level = logging . INFO if args . verbose else logging . WARN
log_level = logging . DEBUG if args . debug else log_level
console = logging . StreamHandler ( )
console . setFormatter ( log_format )
console . setLevel ( log_level )
root_logger = logging . getLogger ( )
if len ( root_logger . handlers ) == 0 :
root_logger . addHandler ( console )
root_logger . setLevel ( log_level )
root_logger . handlers [ 0 ] . setFormatter ( log_format )
logging . getLogger ( __name__ ) |
def _make_wrapper ( f ) :
"""return a wrapped function with a copy of the _ pecan context""" | @ wraps ( f )
def wrapper ( * args , ** kwargs ) :
return f ( * args , ** kwargs )
wrapper . _pecan = f . _pecan . copy ( )
return wrapper |
def save_data ( self ) :
"""save _ data""" | state = ""
try :
state = "create_json_archive"
log . info ( "creating json archive" )
self . create_json_archive ( )
state = "building_unique_keys"
log . info ( "processing all unique keys" )
self . build_all_keys_dict ( )
state = "flattening"
log . info ( "flattening all data" )
self . flatten_all ( )
state = "converting"
log . info ( "converting to df" )
self . convert_to_df ( )
state = "saving"
log . info ( "saving to df" )
self . save_df_as_csv ( )
if ANTINEX_PUBLISH_ENABLED :
log . info ( ( "publishing stream to rest={}" ) . format ( ANTINEX_URL ) )
self . publish_predictions_to_core ( )
# end of if publishing to the core
except Exception as e :
log . error ( ( "Failed state={} with ex={} to " "save={}" ) . format ( state , e , self . save_to_file ) ) |
def __get_attr ( what , type_attr , value_attr , ** kwargs ) :
"""get the value of a parm
: param what : string parm
: param type _ attr : type of parm
: param value _ attr :
: param kwargs :
: return : value of the parm""" | if what in kwargs :
value = int ( kwargs [ what ] ) if type_attr == 'int' else kwargs [ what ]
if value in value_attr :
return value |
def _print_graph ( targets , components , tasks ) :
"""Print dependency information using a dot directed graph . The graph will
contain explicitly requested targets plus any dependencies .
If there ' s a circular dependency , those nodes and their dependencies will
be colored red .
Arguments
targets - the targets explicitly requested
components - full configuration for all components in a project""" | indentation = " " * 4
_do_dot ( targets , components , tasks , lambda resolved , dep_fn : dep_fn ( indentation , resolved ) , ) |
def overlay_gateway_sflow_sflow_remote_endpoint ( self , ** kwargs ) :
"""Auto Generated Code""" | config = ET . Element ( "config" )
overlay_gateway = ET . SubElement ( config , "overlay-gateway" , xmlns = "urn:brocade.com:mgmt:brocade-tunnels" )
name_key = ET . SubElement ( overlay_gateway , "name" )
name_key . text = kwargs . pop ( 'name' )
sflow = ET . SubElement ( overlay_gateway , "sflow" )
sflow_profile_name_key = ET . SubElement ( sflow , "sflow-profile-name" )
sflow_profile_name_key . text = kwargs . pop ( 'sflow_profile_name' )
sflow_remote_endpoint = ET . SubElement ( sflow , "sflow-remote-endpoint" )
sflow_remote_endpoint . text = kwargs . pop ( 'sflow_remote_endpoint' )
callback = kwargs . pop ( 'callback' , self . _callback )
return callback ( config ) |
def pposition ( hd , details = False ) :
"""Parse string into angular position .
A string containing 2 or 6 numbers is parsed , and the numbers are
converted into decimal numbers . In the former case the numbers are
assumed to be floats . In the latter case , the numbers are assumed
to be sexagesimal .
Parameters
hd : str
String containing 2 or 6 numbers . The numbers can be spearated
with character or characters other than " . " , " - " , " + " .
The string must contain either 2 or 6 numbers .
details : bool
The detailed result from parsing the string is returned . See
" Returns " section below .
Default is False .
Returns
x : ( float , float ) or dict
A tuple containing decimal equivalents of the parsed numbers . If
the string contains 6 numbers then they are assumed be
sexagesimal components .
If ` ` details ` ` is True then a dictionary with the following keys
is returned :
x : float
The first number .
y : float
The second number
numvals : int
Number of items parsed ; 2 or 6.
raw _ x : dict
The result returned by ` ` phmsdms ` ` for the first number .
raw _ y : dict
The result returned by ` ` phmsdms ` ` for the second number .
It is up to the user to interpret the units of the numbers
returned .
Raises
ValueError :
The exception is raised if the string cannot be interpreted as a
sequence of 2 or 6 numbers .
Examples
The position of M100 reported by SIMBAD is
"12 22 54.899 + 15 49 20.57 " . This can be easily parsed in the
following manner .
> > > from angles import pposition
> > > ra , de = pposition ( " 12 22 54.899 + 15 49 20.57 " )
> > > ra
12.3819163888889
> > > de
15.822380555556""" | # : TODO : split two angles based on user entered separator and process each part separately .
# Split at any character other than a digit , " . " , " - " , and " + " .
p = re . split ( r"[^\d\-+.]*" , hd )
if len ( p ) not in [ 2 , 6 ] :
raise ValueError ( "Input must contain either 2 or 6 numbers." )
# Two floating point numbers if string has 2 numbers .
if len ( p ) == 2 :
x , y = float ( p [ 0 ] ) , float ( p [ 1 ] )
if details :
numvals = 2
raw_x = p [ 0 ]
raw_y = p [ 1 ]
# Two sexagesimal numbers if string has 6 numbers .
elif len ( p ) == 6 :
x_p = phmsdms ( " " . join ( p [ : 3 ] ) )
x = sexa2deci ( x_p [ 'sign' ] , * x_p [ 'vals' ] )
y_p = phmsdms ( " " . join ( p [ 3 : ] ) )
y = sexa2deci ( y_p [ 'sign' ] , * y_p [ 'vals' ] )
if details :
raw_x = x_p
raw_y = y_p
numvals = 6
if details :
result = dict ( x = x , y = y , numvals = numvals , raw_x = raw_x , raw_y = raw_y )
else :
result = x , y
return result |
def start_blocking ( self ) :
"""Start the advertiser in the background , but wait until it is ready""" | self . _cav_started . clear ( )
self . start ( )
self . _cav_started . wait ( ) |
def submit_commands ( self , devices , execution ) :
"""Submit device command executions .
Returns : a list of concurrent . futures for scheduled executions .""" | fs = [ ]
for device in devices :
if device [ key_id_ ] != self . device_id :
logging . warning ( 'Ignoring command for unknown device: %s' % device [ key_id_ ] )
continue
if not execution :
logging . warning ( 'Ignoring noop execution' )
continue
for command in execution :
f = self . executor . submit ( self . dispatch_command , ** command )
fs . append ( f )
return fs |
def _handle_autologin ( self , event ) :
"""Automatic logins for client configurations that allow it""" | self . log ( "Verifying automatic login request" )
# TODO : Check for a common secret
# noinspection PyBroadException
try :
client_config = objectmodels [ 'client' ] . find_one ( { 'uuid' : event . requestedclientuuid } )
except Exception :
client_config = None
if client_config is None or client_config . autologin is False :
self . log ( "Autologin failed:" , event . requestedclientuuid , lvl = error )
self . _fail ( event )
return
try :
user_account = objectmodels [ 'user' ] . find_one ( { 'uuid' : client_config . owner } )
if user_account is None :
raise AuthenticationError
self . log ( "Autologin for" , user_account . name , lvl = debug )
except Exception as e :
self . log ( "No user object due to error: " , e , type ( e ) , lvl = error )
self . _fail ( event )
return
if user_account . active is False :
self . log ( "Account deactivated." )
self . _fail ( event , 'Account deactivated.' )
return
user_profile = self . _get_profile ( user_account )
self . _login ( event , user_account , user_profile , client_config )
self . log ( "Autologin successful!" , lvl = warn ) |
def main ( ) :
"""NAME
k15 _ s . py
DESCRIPTION
converts . k15 format data to . s format .
assumes Jelinek Kappabridge measurement scheme
SYNTAX
k15 _ s . py [ - h ] [ - i ] [ command line options ] [ < filename ]
OPTIONS
- h prints help message and quits
- i allows interactive entry of options
- f FILE , specifies input file , default : standard input
- F FILE , specifies output file , default : standard output
- crd [ g , t ] specifies [ g ] eographic rotation ,
or geographic AND tectonic rotation
INPUT
name [ az , pl , strike , dip ] , followed by
3 rows of 5 measurements for each specimen
OUTPUT
least squares matrix elements and sigma :
x11 , x22 , x33 , x12 , x23 , x13 , sigma""" | firstline , itilt , igeo , linecnt , key = 1 , 0 , 0 , 0 , ""
out = ""
data , k15 = [ ] , [ ]
dir = './'
ofile = ""
if '-WD' in sys . argv :
ind = sys . argv . index ( '-WD' )
dir = sys . argv [ ind + 1 ] + '/'
if '-h' in sys . argv :
print ( main . __doc__ )
sys . exit ( )
if '-i' in sys . argv :
file = input ( "Input file name [.k15 format]: " )
f = open ( file , 'r' )
data = f . readlines ( )
f . close ( )
file = input ( "Output file name [.s format]: " )
out = open ( file , 'w' )
print ( " [g]eographic, [t]ilt corrected, " )
tg = input ( " [return for specimen coordinates]: " )
if tg == 'g' :
igeo = 1
elif tg == 't' :
igeo , itilt = 1 , 1
elif '-f' in sys . argv :
ind = sys . argv . index ( '-f' )
file = dir + sys . argv [ ind + 1 ]
f = open ( file , 'r' )
data = f . readlines ( )
f . close ( )
else :
data = sys . stdin . readlines ( )
if len ( data ) == 0 :
print ( main . __doc__ )
sys . exit ( )
if '-F' in sys . argv :
ind = sys . argv . index ( '-F' )
ofile = dir + sys . argv [ ind + 1 ]
out = open ( ofile , 'w' )
if '-crd' in sys . argv :
ind = sys . argv . index ( '-crd' )
tg = sys . argv [ ind + 1 ]
if tg == 'g' :
igeo = 1
if tg == 't' :
igeo , itilt = 1 , 1
for line in data :
rec = line . split ( )
if firstline == 1 :
firstline = 0
nam = rec [ 0 ]
if igeo == 1 :
az , pl = float ( rec [ 1 ] ) , float ( rec [ 2 ] )
if itilt == 1 :
bed_az , bed_dip = 90. + float ( rec [ 3 ] ) , float ( rec [ 4 ] )
else :
linecnt += 1
for i in range ( 5 ) :
k15 . append ( float ( rec [ i ] ) )
if linecnt == 3 :
sbar , sigma , bulk = pmag . dok15_s ( k15 )
if igeo == 1 :
sbar = pmag . dosgeo ( sbar , az , pl )
if itilt == 1 :
sbar = pmag . dostilt ( sbar , bed_az , bed_dip )
outstring = ""
for s in sbar :
outstring += '%10.8f ' % ( s )
outstring += '%10.8f' % ( sigma )
if out == "" :
print ( outstring )
else :
out . write ( outstring + '\n' )
linecnt , firstline , k15 = 0 , 1 , [ ]
if ofile != "" :
print ( 'Output saved in ' , ofile ) |
def clean ( self , value ) :
"""Cleans and returns the given value , or raises a ParameterNotValidError exception""" | if isinstance ( value , six . string_types ) :
return value
elif isinstance ( value , numbers . Number ) :
return str ( value )
raise ParameterNotValidError |
def when ( self ) :
"""A string describing when the event occurs ( in the local time zone ) .""" | offset = 0
timeFrom = dateFrom = timeTo = dateTo = None
fromDt = self . _getFromDt ( )
if fromDt is not None :
offset = timezone . localtime ( fromDt ) . toordinal ( ) - fromDt . toordinal ( )
dateFrom , timeFrom = getLocalDateAndTime ( fromDt . date ( ) , self . time_from , self . tz , dt . time . min )
daysDelta = dt . timedelta ( days = self . num_days - 1 )
dateTo , timeTo = getLocalDateAndTime ( fromDt . date ( ) + daysDelta , self . time_to , self . tz )
if dateFrom == dateTo :
retval = _ ( "{repeat} {atTime}" ) . format ( repeat = self . repeat . _getWhen ( offset ) , atTime = timeFormat ( timeFrom , timeTo , gettext ( "at " ) ) )
else :
localNumDays = ( dateTo - dateFrom ) . days + 1
retval = _ ( "{repeat} {startFinishTime}" ) . format ( repeat = self . repeat . _getWhen ( offset , localNumDays ) , startFinishTime = timeFormat ( timeFrom , timeTo , prefix = gettext ( "starting at " ) , infix = gettext ( "finishing at" ) ) )
return retval . strip ( ) |
def path_to_str ( path ) :
"""Convert pathlib . Path objects to str ; return other objects as - is .""" | try :
from pathlib import Path as _Path
except ImportError : # Python < 3.4
class _Path :
pass
if isinstance ( path , _Path ) :
return str ( path )
return path |
def build ( self , builder ) :
"""Build XML by appending to builder""" | params = { }
# Add in the transaction type
if self . transaction_type is not None :
params [ "TransactionType" ] = self . transaction_type
if self . seqnum is None : # SeqNum is not optional ( and defaulted )
raise ValueError ( "SeqNum is not set." )
# pragma : no cover
params [ "SeqNum" ] = str ( self . seqnum )
if self . annotation_id is not None : # If an Annotation is contained with an Annotations element ,
# the ID attribute is required .
params [ "ID" ] = self . annotation_id
builder . start ( "Annotation" , params )
if self . flags in ( None , [ ] ) :
raise ValueError ( 'Flag is not set.' )
# populate the flags
for flag in self . flags :
flag . build ( builder )
# add the Comment , if it exists
if self . comment is not None :
self . comment . build ( builder )
builder . end ( "Annotation" ) |
def refresh ( self , scope_list = None ) :
"""Update the auth data ( tokens ) using the refresh token in auth .""" | request_data = self . get_refresh_token_params ( scope_list )
res = self . _session . post ( ** request_data )
if res . status_code != 200 :
raise APIException ( request_data [ 'url' ] , res . status_code , response = res . content , request_param = request_data , response_header = res . headers )
json_res = res . json ( )
self . update_token ( json_res )
return json_res |
def get_port_channel_detail_input_last_aggregator_id ( self , ** kwargs ) :
"""Auto Generated Code""" | config = ET . Element ( "config" )
get_port_channel_detail = ET . Element ( "get_port_channel_detail" )
config = get_port_channel_detail
input = ET . SubElement ( get_port_channel_detail , "input" )
last_aggregator_id = ET . SubElement ( input , "last-aggregator-id" )
last_aggregator_id . text = kwargs . pop ( 'last_aggregator_id' )
callback = kwargs . pop ( 'callback' , self . _callback )
return callback ( config ) |
def build_package_data ( self ) :
"""Copy data files into build directory""" | for package , src_dir , build_dir , filenames in self . data_files :
for filename in filenames :
target = os . path . join ( build_dir , filename )
self . mkpath ( os . path . dirname ( target ) )
srcfile = os . path . join ( src_dir , filename )
outf , copied = self . copy_file ( srcfile , target )
srcfile = os . path . abspath ( srcfile )
if ( copied and srcfile in self . distribution . convert_2to3_doctests ) :
self . __doctests_2to3 . append ( outf ) |
def load_json_file ( i ) :
"""Input : {
json _ file - name of file with json
Output : {
return - return code = 0 , if successful
= 16 , if file not found ( may be warning )
> 0 , if error
( error ) - error text if return > 0
dict - dict from json file""" | fn = i [ 'json_file' ]
try :
if sys . version_info [ 0 ] > 2 :
f = open ( fn , 'r' , encoding = 'utf8' )
else :
f = open ( fn , 'r' )
except Exception as e :
return { 'return' : 16 , 'error' : 'problem opening json file=' + fn + ' (' + format ( e ) + ')' }
try :
s = f . read ( )
except Exception as e :
f . close ( )
return { 'return' : 1 , 'error' : 'problem reading json file=' + fn + ' (' + format ( e ) + ')' }
f . close ( )
try :
if sys . version_info [ 0 ] > 2 :
d = json . loads ( s )
else :
d = json . loads ( s , encoding = 'utf8' )
except Exception as e :
return { 'return' : 1 , 'error' : 'problem parsing json from file=' + fn + ' (' + format ( e ) + ')' }
return { 'return' : 0 , 'dict' : d } |
def _combine_ranges ( ranges ) :
"""This function takes a list of row - ranges ( as returned by ` _ parse _ row ` )
ordered by rows , and produces a list of distinct rectangular ranges
within this grid .
Within this function we define a 2d - range as a rectangular set of cells
such that :
- there are no empty rows / columns within this rectangle ;
- the rectangle is surrounded by empty rows / columns on all sides ;
- no subset of this rectangle comprises a valid 2d - range ;
- separate 2d - ranges are allowed to touch at a corner .""" | ranges2d = [ ]
for irow , rowranges in enumerate ( ranges ) :
ja = 0
jb = 0
while jb < len ( rowranges ) :
bcol0 , bcol1 = rowranges [ jb ]
if ja < len ( ranges2d ) :
_ , arow1 , acol0 , acol1 = ranges2d [ ja ]
if arow1 < irow :
ja += 1
continue
assert arow1 == irow or arow1 == irow + 1
else :
acol0 = acol1 = 1000000000
if bcol0 == acol0 and bcol1 == acol1 :
ranges2d [ ja ] [ 1 ] = irow + 1
ja += 1
jb += 1
elif bcol1 <= acol0 :
ranges2d . insert ( ja , [ irow , irow + 1 , bcol0 , bcol1 ] )
ja += 1
jb += 1
elif bcol0 >= acol1 :
ja += 1
else :
assert ja < len ( ranges2d )
ranges2d [ ja ] [ 1 ] = irow + 1
if bcol0 < acol0 :
ranges2d [ ja ] [ 2 ] = bcol0
if bcol1 > acol1 :
ranges2d [ ja ] [ 3 ] = acol1 = bcol1
ja = _collapse_ranges ( ranges2d , ja )
jb += 1
return ranges2d |
def command ( self ) :
"""Returns a string representing the command you have to type to obtain the same packet""" | f = [ ]
for fn , fv in self . fields . items ( ) :
fld = self . get_field ( fn )
if isinstance ( fv , Packet ) :
fv = fv . command ( )
elif fld . islist and fld . holds_packets and type ( fv ) is list : # fv = " [ % s ] " % " , " . join ( map ( Packet . command , fv ) )
fv = "[%s]" % "," . join ( [ Packet . command ( i ) for i in fv ] )
else :
fv = repr ( fv )
f . append ( "%s=%s" % ( fn , fv ) )
c = "%s(%s)" % ( self . __class__ . __name__ , ", " . join ( f ) )
pc = self . payload . command ( )
if pc :
c += "/" + pc
return c |
def _parse_team_data ( self , team_data ) :
"""Parses a value for every attribute .
This function looks through every attribute with the exception of
' _ rank ' and retrieves the value according to the parsing scheme and
index of the attribute from the passed HTML data . Once the value is
retrieved , the attribute ' s value is updated with the returned result .
Note that this method is called directly once Team is invoked and does
not need to be called manually .
Parameters
team _ data : string
A string containing all of the rows of stats for a given team . If
multiple tables are being referenced , this will be comprised of
multiple rows in a single string .""" | for field in self . __dict__ : # The short field truncates the leading ' _ ' in the attribute name .
short_field = str ( field ) [ 1 : ]
# The rank attribute is passed directly to the class during
# instantiation .
if field == '_rank' or field == '_year' :
continue
elif field == '_name' :
self . _parse_name ( team_data )
continue
# Default to returning the first element returned unless a
# subsequent element is desired . For example , total runs and
# runs per game are two different fields , but they both share
# the same attribute of ' R ' in the HTML tables .
index = 0
if short_field in ELEMENT_INDEX . keys ( ) :
index = ELEMENT_INDEX [ short_field ]
value = utils . _parse_field ( PARSING_SCHEME , team_data , short_field , index )
setattr ( self , field , value ) |
def extend_substation ( grid , critical_stations , grid_level ) :
"""Reinforce MV or LV substation by exchanging the existing trafo and
installing a parallel one if necessary .
First , all available transformers in a ` critical _ stations ` are extended to
maximum power . If this does not solve all present issues , additional
transformers are build .
Parameters
grid : GridDing0
Ding0 grid container
critical _ stations : : any : ` list `
List of stations with overloading
grid _ level : str
Either " LV " or " MV " . Basis to select right equipment .
Notes
Curently straight forward implemented for LV stations
Returns
type
# TODO : Description of return . Change type in the previous line accordingly""" | load_factor_lv_trans_lc_normal = cfg_ding0 . get ( 'assumptions' , 'load_factor_lv_trans_lc_normal' )
load_factor_lv_trans_fc_normal = cfg_ding0 . get ( 'assumptions' , 'load_factor_lv_trans_fc_normal' )
trafo_params = grid . network . _static_data [ '{grid_level}_trafos' . format ( grid_level = grid_level ) ]
trafo_s_max_max = max ( trafo_params [ 'S_nom' ] )
for station in critical_stations : # determine if load or generation case and apply load factor
if station [ 's_max' ] [ 0 ] > station [ 's_max' ] [ 1 ] :
case = 'load'
lf_lv_trans_normal = load_factor_lv_trans_lc_normal
else :
case = 'gen'
lf_lv_trans_normal = load_factor_lv_trans_fc_normal
# cumulative maximum power of transformers installed
s_max_trafos = sum ( [ _ . s_max_a for _ in station [ 'station' ] . _transformers ] )
# determine missing trafo power to solve overloading issue
s_trafo_missing = max ( station [ 's_max' ] ) - ( s_max_trafos * lf_lv_trans_normal )
# list of trafos with rated apparent power below ` trafo _ s _ max _ max `
extendable_trafos = [ _ for _ in station [ 'station' ] . _transformers if _ . s_max_a < trafo_s_max_max ]
# try to extend power of existing trafos
while ( s_trafo_missing > 0 ) and extendable_trafos : # only work with first of potentially multiple trafos
trafo = extendable_trafos [ 0 ]
trafo_s_max_a_before = trafo . s_max_a
# extend power of first trafo to next higher size available
extend_trafo_power ( extendable_trafos , trafo_params )
# diminish missing trafo power by extended trafo power and update
# extendable trafos list
s_trafo_missing -= ( ( trafo . s_max_a * lf_lv_trans_normal ) - trafo_s_max_a_before )
extendable_trafos = [ _ for _ in station [ 'station' ] . _transformers if _ . s_max_a < trafo_s_max_max ]
# build new trafos inside station until
if s_trafo_missing > 0 :
trafo_type , trafo_cnt = select_transformers ( grid , s_max = { 's_max' : s_trafo_missing , 'case' : case } )
# create transformers and add them to station of LVGD
for t in range ( 0 , trafo_cnt ) :
lv_transformer = TransformerDing0 ( grid = grid , id_db = id , v_level = 0.4 , s_max_longterm = trafo_type [ 'S_nom' ] , r = trafo_type [ 'R' ] , x = trafo_type [ 'X' ] )
# add each transformer to its station
grid . _station . add_transformer ( lv_transformer )
logger . info ( "{stations_cnt} have been reinforced due to overloading " "issues." . format ( stations_cnt = len ( critical_stations ) ) ) |
def get_binary_dist ( self , requirement ) :
"""Get or create a cached binary distribution archive .
: param requirement : A : class : ` . Requirement ` object .
: returns : An iterable of tuples with two values each : A
: class : ` tarfile . TarInfo ` object and a file - like object .
Gets the cached binary distribution that was previously built for the
given requirement . If no binary distribution has been cached yet , a new
binary distribution is built and added to the cache .
Uses : func : ` build _ binary _ dist ( ) ` to build binary distribution
archives . If this fails with a build error : func : ` get _ binary _ dist ( ) `
will use : class : ` . SystemPackageManager ` to check for and install
missing system packages and retry the build when missing system
packages were installed .""" | cache_file = self . cache . get ( requirement )
if cache_file :
if self . needs_invalidation ( requirement , cache_file ) :
logger . info ( "Invalidating old %s binary (source has changed) .." , requirement )
cache_file = None
else :
logger . debug ( "%s hasn't been cached yet, doing so now." , requirement )
if not cache_file : # Build the binary distribution .
try :
raw_file = self . build_binary_dist ( requirement )
except BuildFailed :
logger . warning ( "Build of %s failed, checking for missing dependencies .." , requirement )
if self . system_package_manager . install_dependencies ( requirement ) :
raw_file = self . build_binary_dist ( requirement )
else :
raise
# Transform the binary distribution archive into a form that we can re - use .
fd , transformed_file = tempfile . mkstemp ( prefix = 'pip-accel-bdist-' , suffix = '.tar.gz' )
try :
archive = tarfile . open ( transformed_file , 'w:gz' )
try :
for member , from_handle in self . transform_binary_dist ( raw_file ) :
archive . addfile ( member , from_handle )
finally :
archive . close ( )
# Push the binary distribution archive to all available backends .
with open ( transformed_file , 'rb' ) as handle :
self . cache . put ( requirement , handle )
finally : # Close file descriptor before removing the temporary file .
# Without closing Windows is complaining that the file cannot
# be removed because it is used by another process .
os . close ( fd )
# Cleanup the temporary file .
os . remove ( transformed_file )
# Get the absolute pathname of the file in the local cache .
cache_file = self . cache . get ( requirement )
# Enable checksum based cache invalidation .
self . persist_checksum ( requirement , cache_file )
archive = tarfile . open ( cache_file , 'r:gz' )
try :
for member in archive . getmembers ( ) :
yield member , archive . extractfile ( member . name )
finally :
archive . close ( ) |
def _get_indices ( self , element , labels = 'all' , mode = 'or' ) :
r"""This is the actual method for getting indices , but should not be called
directly . Use ` ` pores ` ` or ` ` throats ` ` instead .""" | # Parse and validate all input values .
element = self . _parse_element ( element , single = True )
labels = self . _parse_labels ( labels = labels , element = element )
if element + '.all' not in self . keys ( ) :
raise Exception ( 'Cannot proceed without {}.all' . format ( element ) )
# Begin computing label array
if mode in [ 'or' , 'any' , 'union' ] :
union = sp . zeros_like ( self [ element + '.all' ] , dtype = bool )
for item in labels : # Iterate over labels and collect all indices
union = union + self [ element + '.' + item . split ( '.' ) [ - 1 ] ]
ind = union
elif mode in [ 'and' , 'all' , 'intersection' ] :
intersect = sp . ones_like ( self [ element + '.all' ] , dtype = bool )
for item in labels : # Iterate over labels and collect all indices
intersect = intersect * self [ element + '.' + item . split ( '.' ) [ - 1 ] ]
ind = intersect
elif mode in [ 'xor' , 'exclusive_or' ] :
xor = sp . zeros_like ( self [ element + '.all' ] , dtype = int )
for item in labels : # Iterate over labels and collect all indices
info = self [ element + '.' + item . split ( '.' ) [ - 1 ] ]
xor = xor + sp . int8 ( info )
ind = ( xor == 1 )
elif mode in [ 'nor' , 'not' , 'none' ] :
nor = sp . zeros_like ( self [ element + '.all' ] , dtype = int )
for item in labels : # Iterate over labels and collect all indices
info = self [ element + '.' + item . split ( '.' ) [ - 1 ] ]
nor = nor + sp . int8 ( info )
ind = ( nor == 0 )
elif mode in [ 'nand' ] :
nand = sp . zeros_like ( self [ element + '.all' ] , dtype = int )
for item in labels : # Iterate over labels and collect all indices
info = self [ element + '.' + item . split ( '.' ) [ - 1 ] ]
nand = nand + sp . int8 ( info )
ind = ( nand < len ( labels ) ) * ( nand > 0 )
elif mode in [ 'xnor' , 'nxor' ] :
xnor = sp . zeros_like ( self [ element + '.all' ] , dtype = int )
for item in labels : # Iterate over labels and collect all indices
info = self [ element + '.' + item . split ( '.' ) [ - 1 ] ]
xnor = xnor + sp . int8 ( info )
ind = ( xnor > 1 )
else :
raise Exception ( 'Unsupported mode: ' + mode )
# Extract indices from boolean mask
ind = sp . where ( ind ) [ 0 ]
ind = ind . astype ( dtype = int )
return ind |
def update_active_breakpoint_flag ( cls ) :
"""Checks all breakpoints to find wether at least one is active and
update ` any _ active _ breakpoint ` accordingly .""" | cls . any_active_breakpoint = any ( [ bp . enabled for bp in cls . breakpoints_by_number if bp ] ) |
def setCurrentNetwork ( self , body , verbose = None ) :
"""Sets the current network .
: param body : SUID of the Network - - Not required , can be None
: param verbose : print more
: returns : 200 : successful operation""" | response = api ( url = self . ___url + 'networks/currentNetwork' , method = "PUT" , body = body , verbose = verbose )
return response |
def check_inclusions ( item , included = [ ] , excluded = [ ] ) :
"""Everything passes if both are empty , otherwise , we have to check if empty or is present .""" | if ( len ( included ) == 0 ) :
if len ( excluded ) == 0 or item not in excluded :
return True
else :
return False
else :
if item in included :
return True
return False |
def detect_type ( err , install_rdf = None , xpi_package = None ) :
"""Determines the type of add - on being validated based on
install . rdf , file extension , and other properties .""" | # The types in the install . rdf don ' t pair up 1:1 with the type
# system that we ' re using for expectations and the like . This is
# to help translate between the two .
translated_types = { '2' : PACKAGE_EXTENSION , '4' : PACKAGE_THEME , '8' : PACKAGE_LANGPACK , '32' : PACKAGE_MULTI , '64' : PACKAGE_DICTIONARY , # New " experiment " types : see bug 1220097 and
# https : / / github . com / mozilla / addons - server / issues / 3315
'128' : PACKAGE_EXTENSION , '256' : PACKAGE_EXTENSION , }
# If we ' re missing our install . rdf file , we can try to make some
# assumptions .
if install_rdf is None :
types = { 'xpi' : PACKAGE_DICTIONARY }
err . notice ( ( 'typedetection' , 'detect_type' , 'missing_install_rdf' ) , 'install.rdf was not found.' , 'The type should be determined by install.rdf if present. ' "If it isn't, we still need to know the type." )
# If we know what the file type might be , return it .
if xpi_package . extension in types :
return types [ xpi_package . extension ]
# Otherwise , we ' re out of luck : (
else :
return None
# Attempt to locate the < em : type > node in the RDF doc .
type_uri = install_rdf . uri ( 'type' )
type_ = install_rdf . get_object ( None , type_uri )
# Dictionaries are weird too , they might not have the obligatory
# em : type . We can assume that if they have a / dictionaries / folder ,
# they are a dictionary because even if they aren ' t , dictionaries
# have an extraordinarily strict set of rules and file filters that
# must be passed . It ' s so crazy secure that it ' s cool if we use it
# as kind of a fallback .
if any ( file_ for file_ in xpi_package if file_ . startswith ( 'dictionaries/' ) ) :
if type_ != '64' :
err . error ( ( 'typedetection' , 'dictionary_valid_type' , 'invalid_em_type' ) , 'Invalid <em:type> value.' , 'The package appears to be a dictionary but does not have ' 'the correct <em:type> set in the install manifest.' )
return PACKAGE_DICTIONARY
if type_ is not None :
if type_ in translated_types :
err . save_resource ( 'is_multipackage' , type_ == '32' , pushable = True )
# Make sure we translate back to the normalized version
return translated_types [ type_ ]
else :
err . error ( ( 'typedetection' , 'detect_type' , 'invalid_em_type' ) , 'Invalid <em:type> value.' , 'The only valid values for <em:type> are 2, 4, 8, and ' '32. Any other values are either invalid or deprecated.' , 'install.rdf' )
return
else :
err . notice ( err_id = ( 'typedetection' , 'detect_type' , 'no_em:type' ) , notice = 'No <em:type> element found in install.rdf' , description = "It isn't always required, but it is the most reliable " 'method for determining add-on type.' , filename = 'install.rdf' )
# There ' s no type element , so the spec says that it ' s either a
# theme or an extension . At this point , we know that it isn ' t
# a dictionary , language pack , or multiple extension pack .
extensions = { 'jar' : '4' , 'xpi' : '2' }
# If the package ' s extension is listed in the [ tiny ] extension
# dictionary , then just return that . We ' ll validate against that
# add - on type ' s layout later . Better to false positive than to false
# negative .
if xpi_package . extension in extensions : # Make sure it gets translated back to the normalized version
install_rdf_type = extensions [ xpi_package . extension ]
return translated_types [ install_rdf_type ] |
def starttls ( self , ssl_context = None , post_handshake_callback = None ) :
"""Start a TLS stream on top of the socket . This is an invalid operation
if the stream is not in RAW _ OPEN state .
If ` ssl _ context ` is set , it overrides the ` ssl _ context ` passed to the
constructor . If ` post _ handshake _ callback ` is set , it overrides the
` post _ handshake _ callback ` passed to the constructor .
. . versionchanged : : 0.4
This method is now a barrier with respect to reads and writes :
before the handshake is completed ( including the post handshake
callback , if any ) , no data is received or sent .""" | if self . _state != _State . RAW_OPEN or self . _closing :
raise self . _invalid_state ( "starttls() called" )
if ssl_context is not None :
self . _ssl_context = ssl_context
self . _extra . update ( sslcontext = ssl_context )
else :
self . _ssl_context = self . _ssl_context_factory ( self )
if post_handshake_callback is not None :
self . _tls_post_handshake_callback = post_handshake_callback
self . _waiter = asyncio . Future ( )
self . _waiter . add_done_callback ( self . _waiter_done )
self . _initiate_tls ( )
try :
yield from self . _waiter
finally :
self . _waiter = None |
def draw_variables ( self ) :
"""Draw parameters from the approximating distributions""" | z = self . q [ 0 ] . draw_variable_local ( self . sims )
for i in range ( 1 , len ( self . q ) ) :
z = np . vstack ( ( z , self . q [ i ] . draw_variable_local ( self . sims ) ) )
return z |
def plot_correlation ( self , on , x_col = None , plot_type = "jointplot" , stat_func = pearsonr , show_stat_func = True , plot_kwargs = { } , ** kwargs ) :
"""Plot the correlation between two variables .
Parameters
on : list or dict of functions or strings
See ` cohort . load . as _ dataframe `
x _ col : str , optional
If ` on ` is a dict , this guarantees we have the expected ordering .
plot _ type : str , optional
Specify " jointplot " , " regplot " , " boxplot " , or " barplot " .
stat _ func : function , optional .
Specify which function to use for the statistical test .
show _ stat _ func : bool , optional
Whether or not to show the stat _ func result in the plot itself .
plot _ kwargs : dict , optional
kwargs to pass through to plotting functions .""" | if plot_type not in [ "boxplot" , "barplot" , "jointplot" , "regplot" ] :
raise ValueError ( "Invalid plot_type %s" % plot_type )
plot_cols , df = self . as_dataframe ( on , return_cols = True , ** kwargs )
if len ( plot_cols ) != 2 :
raise ValueError ( "Must be comparing two columns, but there are %d columns" % len ( plot_cols ) )
for plot_col in plot_cols :
df = filter_not_null ( df , plot_col )
if x_col is None :
x_col = plot_cols [ 0 ]
y_col = plot_cols [ 1 ]
else :
if x_col == plot_cols [ 0 ] :
y_col = plot_cols [ 1 ]
else :
y_col = plot_cols [ 0 ]
series_x = df [ x_col ]
series_y = df [ y_col ]
coeff , p_value = stat_func ( series_x , series_y )
if plot_type == "jointplot" :
plot = sb . jointplot ( data = df , x = x_col , y = y_col , stat_func = stat_func if show_stat_func else None , ** plot_kwargs )
elif plot_type == "regplot" :
plot = sb . regplot ( data = df , x = x_col , y = y_col , ** plot_kwargs )
elif plot_type == "boxplot" :
plot = stripboxplot ( data = df , x = x_col , y = y_col , ** plot_kwargs )
else :
plot = sb . barplot ( data = df , x = x_col , y = y_col , ** plot_kwargs )
return CorrelationResults ( coeff = coeff , p_value = p_value , stat_func = stat_func , series_x = series_x , series_y = series_y , plot = plot ) |
def create_variable_is_dict ( self ) :
"""Append code for creating variable with bool if it ' s instance of list
with a name ` ` { variable } _ is _ dict ` ` . Similar to ` create _ variable _ with _ length ` .""" | variable_name = '{}_is_dict' . format ( self . _variable )
if variable_name in self . _variables :
return
self . _variables . add ( variable_name )
self . l ( '{variable}_is_dict = isinstance({variable}, dict)' ) |
async def install_mediaroom_protocol ( responses_callback , box_ip = None ) :
"""Install an asyncio protocol to process NOTIFY messages .""" | from . import version
_LOGGER . debug ( version )
loop = asyncio . get_event_loop ( )
mediaroom_protocol = MediaroomProtocol ( responses_callback , box_ip )
sock = create_socket ( )
await loop . create_datagram_endpoint ( lambda : mediaroom_protocol , sock = sock )
return mediaroom_protocol |
def teleport ( self , location = None , rotation = None ) :
"""Teleports the agent to a specific location , with a specific rotation .
Args :
location ( np . ndarray , optional ) : An array with three elements specifying the target world coordinate in meters .
If None , keeps the current location . Defaults to None .
rotation ( np . ndarray , optional ) : An array with three elements specifying the target rotation of the agent .
If None , keeps the current rotation . Defaults to None .
Returns :
None""" | val = 0
if location is not None :
val += 1
np . copyto ( self . _teleport_buffer , location )
if rotation is not None :
np . copyto ( self . _rotation_buffer , rotation )
val += 2
self . _teleport_bool_buffer [ 0 ] = val |
def power_law_anisotropy ( self , r , kwargs_profile , kwargs_anisotropy , kwargs_light ) :
"""equation ( 19 ) in Suyu + 2010
: param r :
: return :""" | # first term
theta_E = kwargs_profile [ 'theta_E' ]
gamma = kwargs_profile [ 'gamma' ]
r_ani = kwargs_anisotropy [ 'r_ani' ]
a = 0.551 * kwargs_light [ 'r_eff' ]
rho0_r0_gamma = self . _rho0_r0_gamma ( theta_E , gamma )
prefac1 = 4 * np . pi * const . G * a ** ( - gamma ) * rho0_r0_gamma / ( 3 - gamma )
prefac2 = r * ( r + a ) ** 3 / ( r ** 2 + r_ani ** 2 )
hyp1 = vel_util . hyp_2F1 ( a = 2 + gamma , b = gamma , c = 3 + gamma , z = 1. / ( 1 + r / a ) )
hyp2 = vel_util . hyp_2F1 ( a = 3 , b = gamma , c = 1 + gamma , z = - a / r )
fac = r_ani ** 2 / a ** 2 * hyp1 / ( ( 2 + gamma ) * ( r / a + 1 ) ** ( 2 + gamma ) ) + hyp2 / ( gamma * ( r / a ) ** gamma )
sigma2_dim_less = prefac1 * prefac2 * fac
return sigma2_dim_less * ( self . cosmo . arcsec2phys_lens ( 1. ) * const . Mpc / 1000 ) ** 2 |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.