signature
stringlengths 29
44.1k
| implementation
stringlengths 0
85.2k
|
|---|---|
def get_bank ( self ) :
"""Gets the ` ` Bank ` ` at this node .
return : ( osid . assessment . Bank ) - the bank represented by this
node
* compliance : mandatory - - This method must be implemented . *"""
|
if self . _lookup_session is None :
mgr = get_provider_manager ( 'ASSESSMENT' , runtime = self . _runtime , proxy = self . _proxy )
self . _lookup_session = mgr . get_bank_lookup_session ( proxy = getattr ( self , "_proxy" , None ) )
return self . _lookup_session . get_bank ( Id ( self . _my_map [ 'id' ] ) )
|
def get_entries ( self ) :
"""Return a list of the entries ( messages ) that would be part of the
current " view " ; that is , all of the ones from this . po file matching the
current query or msg _ filter ."""
|
if self . query : # Scenario # 1 : terms matching a search query
rx = re . compile ( re . escape ( self . query ) , re . IGNORECASE )
def concat_entry ( e ) :
return ( six . text_type ( e . msgstr ) + six . text_type ( e . msgid ) + six . text_type ( e . msgctxt ) + six . text_type ( e . comment ) + u'' . join ( [ o [ 0 ] for o in e . occurrences ] ) + six . text_type ( e . msgid_plural ) + u'' . join ( e . msgstr_plural . values ( ) ) )
entries = [ e_ for e_ in self . po_file if not e_ . obsolete and rx . search ( concat_entry ( e_ ) ) ]
else : # Scenario # 2 : filtered list of messages
if self . msg_filter == 'untranslated' :
entries = self . po_file . untranslated_entries ( )
elif self . msg_filter == 'translated' :
entries = self . po_file . translated_entries ( )
elif self . msg_filter == 'fuzzy' :
entries = [ e_ for e_ in self . po_file . fuzzy_entries ( ) if not e_ . obsolete ]
else : # ( " all " )
entries = [ e_ for e_ in self . po_file if not e_ . obsolete ]
return entries
|
def canOrder ( self , commit : Commit ) -> Tuple [ bool , Optional [ str ] ] :
"""Return whether the specified commitRequest can be returned to the node .
Decision criteria :
- If have got just n - f Commit requests then return request to node
- If less than n - f of commit requests then probably don ' t have
consensus on the request ; don ' t return request to node
- If more than n - f then already returned to node ; don ' t return request
to node
: param commit : the COMMIT"""
|
quorum = self . quorums . commit . value
if not self . commits . hasQuorum ( commit , quorum ) :
return False , "no quorum ({}): {} commits where f is {}" . format ( quorum , commit , self . f )
key = ( commit . viewNo , commit . ppSeqNo )
if self . has_already_ordered ( * key ) :
return False , "already ordered"
if commit . ppSeqNo > 1 and not self . all_prev_ordered ( commit ) :
viewNo , ppSeqNo = commit . viewNo , commit . ppSeqNo
if viewNo not in self . stashed_out_of_order_commits :
self . stashed_out_of_order_commits [ viewNo ] = { }
self . stashed_out_of_order_commits [ viewNo ] [ ppSeqNo ] = commit
self . startRepeating ( self . process_stashed_out_of_order_commits , self . config . PROCESS_STASHED_OUT_OF_ORDER_COMMITS_INTERVAL )
return False , "stashing {} since out of order" . format ( commit )
return True , None
|
def _port_action_vxlan ( self , port , segment , func ) :
"""Verify configuration and then process event ."""
|
# If the segment is None , just log a warning message and return .
if segment is None :
self . _log_missing_segment ( )
return
device_id = port . get ( 'device_id' )
mcast_group = segment . get ( api . PHYSICAL_NETWORK )
host_id = port . get ( bc . portbindings . HOST_ID )
vni = segment . get ( api . SEGMENTATION_ID )
if vni and device_id and mcast_group and host_id :
func ( vni , device_id , mcast_group , host_id )
return vni
else :
fields = "vni " if not vni else ""
fields += "device_id " if not device_id else ""
fields += "mcast_group " if not mcast_group else ""
fields += "host_id" if not host_id else ""
raise excep . NexusMissingRequiredFields ( fields = fields )
|
def get_children_to_delete ( self ) :
"""Return all children that are not referenced
: returns : list or : class : ` Reftrack `
: rtype : list
: raises : None"""
|
refobjinter = self . get_refobjinter ( )
children = self . get_all_children ( )
todelete = [ ]
for c in children :
if c . status ( ) is None : # if child is not in scene we do not have to delete it
continue
rby = refobjinter . referenced_by ( c . get_refobj ( ) )
if rby is None : # child is not part of another reference .
# we have to delete it for sure
todelete . append ( c )
continue
# check if child is referenced by any parent up to self
# if it is not referenced by any refrence of a parent , then we
# can assume it is referenced by a parent of a greater scope ,
# e . g . the parent of self . because we do not delete anything above self
# we would have to delete the child manually
parent = c . get_parent ( )
while parent != self . get_parent ( ) :
if refobjinter . get_reference ( parent . get_refobj ( ) ) == rby : # is referenced by a parent so it will get delted when the parent is deleted .
break
parent = parent . get_parent ( )
else :
todelete . append ( c )
return todelete
|
def undefine ( self ) :
"""Undefine the Template .
Python equivalent of the CLIPS undeftemplate command .
The object becomes unusable after this method has been called ."""
|
if lib . EnvUndeftemplate ( self . _env , self . _tpl ) != 1 :
raise CLIPSError ( self . _env )
|
def owner_type ( self , value ) :
"""Set ` ` owner _ type ` ` to the given value .
In addition :
* Update the internal type of the ` ` owner ` ` field .
* Update the value of the ` ` owner ` ` field if a value is already set ."""
|
self . _owner_type = value
if value == 'User' :
self . _fields [ 'owner' ] = entity_fields . OneToOneField ( User )
if hasattr ( self , 'owner' ) : # pylint : disable = no - member
self . owner = User ( self . _server_config , id = self . owner . id if isinstance ( self . owner , Entity ) else self . owner )
elif value == 'Usergroup' :
self . _fields [ 'owner' ] = entity_fields . OneToOneField ( UserGroup )
if hasattr ( self , 'owner' ) : # pylint : disable = no - member
self . owner = UserGroup ( self . _server_config , id = self . owner . id if isinstance ( self . owner , Entity ) else self . owner )
|
def create ( self , build_sid ) :
"""Create a new DeploymentInstance
: param unicode build _ sid : The build _ sid
: returns : Newly created DeploymentInstance
: rtype : twilio . rest . serverless . v1 . service . environment . deployment . DeploymentInstance"""
|
data = values . of ( { 'BuildSid' : build_sid , } )
payload = self . _version . create ( 'POST' , self . _uri , data = data , )
return DeploymentInstance ( self . _version , payload , service_sid = self . _solution [ 'service_sid' ] , environment_sid = self . _solution [ 'environment_sid' ] , )
|
def set_vads_payment_config ( self ) :
"""vads _ payment _ config can be set only after object saving .
A custom payment config can be set once PaymentRequest saved
( adding elements to the m2m relationship ) . As a consequence
we set vads _ payment _ config just before sending data elements
to payzen ."""
|
self . vads_payment_config = tools . get_vads_payment_config ( self . payment_config , self . custom_payment_config . all ( ) )
|
def restore ( name = None , ** kwargs ) :
'''Make sure that the system contains the packages and repos from a
frozen state .
Read the list of packages and repositories from the freeze file ,
and compare it with the current list of packages and repos . If
there is any difference , all the missing packages are repos will
be installed , and all the extra packages and repos will be
removed .
As this module is build on top of the pkg module , the user can
send extra attributes to the underlying pkg module via kwargs .
This function will call ` ` pkg . list _ repos ` ` , ` ` pkg . mod _ repo ` ` ,
` ` pkg . list _ pkgs ` ` , ` ` pkg . install ` ` , ` ` pkg . remove ` ` and
` ` pkg . del _ repo ` ` , and any additional arguments will be passed
through to those functions .
name
Name of the frozen state . Optional .
CLI Example :
. . code - block : : bash
salt ' * ' freezer . restore
salt ' * ' freezer . restore root = / chroot'''
|
if not status ( name ) :
raise CommandExecutionError ( 'Frozen state not found.' )
frozen_pkgs = { }
frozen_repos = { }
for name , content in zip ( _paths ( name ) , ( frozen_pkgs , frozen_repos ) ) :
with fopen ( name ) as fp :
content . update ( json . load ( fp ) )
# The ordering of removing or adding packages and repos can be
# relevant , as maybe some missing package comes from a repo that
# is also missing , so it cannot be installed . But can also happend
# that a missing package comes from a repo that is present , but
# will be removed .
# So the proposed order is ;
# - Add missing repos
# - Add missing packages
# - Remove extra packages
# - Remove extra repos
safe_kwargs = clean_kwargs ( ** kwargs )
# Note that we expect that the information stored in list _ XXX
# match with the mod _ XXX counterpart . If this is not the case the
# recovery will be partial .
res = { 'pkgs' : { 'add' : [ ] , 'remove' : [ ] } , 'repos' : { 'add' : [ ] , 'remove' : [ ] } , 'comment' : [ ] , }
# Add missing repositories
repos = __salt__ [ 'pkg.list_repos' ] ( ** safe_kwargs )
missing_repos = set ( frozen_repos ) - set ( repos )
for repo in missing_repos :
try : # In Python 2 we cannot do advance destructuring , so we
# need to create a temporary dictionary that will merge
# all the parameters
_tmp_kwargs = frozen_repos [ repo ] . copy ( )
_tmp_kwargs . update ( safe_kwargs )
__salt__ [ 'pkg.mod_repo' ] ( repo , ** _tmp_kwargs )
res [ 'repos' ] [ 'add' ] . append ( repo )
log . info ( 'Added missing repository %s' , repo )
except Exception as e :
msg = 'Error adding %s repository: %s'
log . error ( msg , repo , e )
res [ 'comment' ] . append ( msg % ( repo , e ) )
# Add missing packages
# NOTE : we can remove the ` for ` using ` pkgs ` . This will improve
# performance , but I want to have a more detalied report of what
# packages are installed or failled .
pkgs = __salt__ [ 'pkg.list_pkgs' ] ( ** safe_kwargs )
missing_pkgs = set ( frozen_pkgs ) - set ( pkgs )
for pkg in missing_pkgs :
try :
__salt__ [ 'pkg.install' ] ( name = pkg , ** safe_kwargs )
res [ 'pkgs' ] [ 'add' ] . append ( pkg )
log . info ( 'Added missing package %s' , pkg )
except Exception as e :
msg = 'Error adding %s package: %s'
log . error ( msg , pkg , e )
res [ 'comment' ] . append ( msg % ( pkg , e ) )
# Remove extra packages
pkgs = __salt__ [ 'pkg.list_pkgs' ] ( ** safe_kwargs )
extra_pkgs = set ( pkgs ) - set ( frozen_pkgs )
for pkg in extra_pkgs :
try :
__salt__ [ 'pkg.remove' ] ( name = pkg , ** safe_kwargs )
res [ 'pkgs' ] [ 'remove' ] . append ( pkg )
log . info ( 'Removed extra package %s' , pkg )
except Exception as e :
msg = 'Error removing %s package: %s'
log . error ( msg , pkg , e )
res [ 'comment' ] . append ( msg % ( pkg , e ) )
# Remove extra repositories
repos = __salt__ [ 'pkg.list_repos' ] ( ** safe_kwargs )
extra_repos = set ( repos ) - set ( frozen_repos )
for repo in extra_repos :
try :
__salt__ [ 'pkg.del_repo' ] ( repo , ** safe_kwargs )
res [ 'repos' ] [ 'remove' ] . append ( repo )
log . info ( 'Removed extra repository %s' , repo )
except Exception as e :
msg = 'Error removing %s repository: %s'
log . error ( msg , repo , e )
res [ 'comment' ] . append ( msg % ( repo , e ) )
return res
|
def open_only ( f ) :
"decorator"
|
@ functools . wraps ( f )
def f2 ( self , * args , ** kwargs ) :
if self . closed :
raise NotSupportedError ( 'connection is closed' )
return f ( self , * args , ** kwargs )
return f2
|
def implied_vol ( self , value , precision = 1.0e-5 , iters = 100 ) :
"""Get implied vol at the specified price using an iterative approach .
There is no closed - form inverse of BSM - value as a function of sigma ,
so start at an anchoring volatility level from Brenner & Subrahmanyam
(1988 ) and work iteratively from there .
Resources
Brenner & Subrahmanyan , A Simple Formula to Compute the Implied
Standard Deviation , 1988."""
|
vol = np . sqrt ( 2.0 * np . pi / self . T ) * ( value / self . S0 )
for _ in itertools . repeat ( None , iters ) : # Faster than range
opt = BSM ( S0 = self . S0 , K = self . K , T = self . T , r = self . r , sigma = vol , kind = self . kind , )
diff = value - opt . value ( )
if abs ( diff ) < precision :
return vol
vol = vol + diff / opt . vega ( )
return vol
|
def copy ( self ) :
"""Returns a copy of Filter ."""
|
other = ConcurrentMeanStdFilter ( self . shape )
other . sync ( self )
return other
|
def _set_redistribute_isis ( self , v , load = False ) :
"""Setter method for redistribute _ isis , mapped from YANG variable / routing _ system / router / router _ bgp / address _ family / ipv6 / ipv6 _ unicast / default _ vrf / af _ ipv6 _ uc _ and _ vrf _ cmds _ call _ point _ holder / redistribute / redistribute _ isis ( container )
If this variable is read - only ( config : false ) in the
source YANG file , then _ set _ redistribute _ isis is considered as a private
method . Backends looking to populate this variable should
do so via calling thisObj . _ set _ redistribute _ isis ( ) directly .
YANG Description : ISIS routes"""
|
if hasattr ( v , "_utype" ) :
v = v . _utype ( v )
try :
t = YANGDynClass ( v , base = redistribute_isis . redistribute_isis , is_container = 'container' , presence = True , yang_name = "redistribute-isis" , rest_name = "isis" , parent = self , path_helper = self . _path_helper , extmethods = self . _extmethods , register_paths = True , extensions = { u'tailf-common' : { u'info' : u'ISIS routes' , u'alt-name' : u'isis' } } , namespace = 'urn:brocade.com:mgmt:brocade-bgp' , defining_module = 'brocade-bgp' , yang_type = 'container' , is_config = True )
except ( TypeError , ValueError ) :
raise ValueError ( { 'error-string' : """redistribute_isis must be of a type compatible with container""" , 'defined-type' : "container" , 'generated-type' : """YANGDynClass(base=redistribute_isis.redistribute_isis, is_container='container', presence=True, yang_name="redistribute-isis", rest_name="isis", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, extensions={u'tailf-common': {u'info': u'ISIS routes', u'alt-name': u'isis'}}, namespace='urn:brocade.com:mgmt:brocade-bgp', defining_module='brocade-bgp', yang_type='container', is_config=True)""" , } )
self . __redistribute_isis = t
if hasattr ( self , '_set' ) :
self . _set ( )
|
def kegg_mapping_and_metadata_parallelize ( self , sc , kegg_organism_code , custom_gene_mapping = None , outdir = None , set_as_representative = False , force_rerun = False ) :
"""Map all genes in the model to KEGG IDs using the KEGG service .
Steps :
1 . Download all metadata and sequence files in the sequences directory
2 . Creates a KEGGProp object in the protein . sequences attribute
3 . Returns a Pandas DataFrame of mapping results
Args :
sc ( SparkContext ) : Spark Context to parallelize this function
kegg _ organism _ code ( str ) : The three letter KEGG code of your organism
custom _ gene _ mapping ( dict ) : If your model genes differ from the gene IDs you want to map ,
custom _ gene _ mapping allows you to input a dictionary which maps model gene IDs to new ones .
Dictionary keys must match model gene IDs .
outdir ( str ) : Path to output directory of downloaded files , must be set if GEM - PRO directories
were not created initially
set _ as _ representative ( bool ) : If mapped KEGG IDs should be set as representative sequences
force _ rerun ( bool ) : If you want to overwrite any existing mappings and files"""
|
# First map all of the organism ' s KEGG genes to UniProt
kegg_to_uniprot = ssbio . databases . kegg . map_kegg_all_genes ( organism_code = kegg_organism_code , target_db = 'uniprot' )
# Parallelize the genes list
genes_rdd = sc . parallelize ( self . genes )
# Write a sub - function to carry out the bulk of the original function
def gp_kegg_sc ( g ) :
if custom_gene_mapping :
kegg_g = custom_gene_mapping [ g . id ]
else :
kegg_g = g . id
# Download both FASTA and KEGG metadata files
kegg_prop = g . protein . load_kegg ( kegg_id = kegg_g , kegg_organism_code = kegg_organism_code , download = True , outdir = outdir , set_as_representative = set_as_representative , force_rerun = force_rerun )
# Update potentially old UniProt ID
if kegg_g in kegg_to_uniprot . keys ( ) :
kegg_prop . uniprot = kegg_to_uniprot [ kegg_g ]
if g . protein . representative_sequence :
if g . protein . representative_sequence . kegg == kegg_prop . kegg :
g . protein . representative_sequence . uniprot = kegg_to_uniprot [ kegg_g ]
# Tracker for if it mapped successfully to KEGG
if kegg_prop . sequence_file :
success = True
else :
success = False
return g , success
# Run a map operation to execute the function on all items in the RDD
result = genes_rdd . map ( gp_kegg_sc ) . collect ( )
# Copy the results over to the GEM - PRO object ' s genes using the GenePro function " copy _ modified _ gene "
# Also count how many genes mapped to KEGG
successfully_mapped_counter = 0
for modified_g , success in result :
original_gene = self . genes . get_by_id ( modified_g . id )
original_gene . copy_modified_gene ( modified_g )
if success :
successfully_mapped_counter += 1
log . info ( '{}/{}: number of genes mapped to KEGG' . format ( successfully_mapped_counter , len ( self . genes ) ) )
log . info ( 'Completed ID mapping --> KEGG. See the "df_kegg_metadata" attribute for a summary dataframe.' )
|
def get ( self , repi , mag ) :
""": param repi : an array of epicentral distances in the range self . repi
: param mag : a magnitude in the range self . mags
: returns : an array of equivalent distances"""
|
mag_idx = numpy . abs ( mag - self . mags ) . argmin ( )
dists = [ ]
for dist in repi :
repi_idx = numpy . abs ( dist - self . repi ) . argmin ( )
dists . append ( self . reqv [ repi_idx , mag_idx ] )
return numpy . array ( dists )
|
def connected_components ( G ) :
"""Compute the connected components of a graph .
The connected components of a graph G , which is represented by a
symmetric sparse matrix , are labeled with the integers 0,1 , . . ( K - 1 ) where
K is the number of components .
Parameters
G : symmetric matrix , preferably in sparse CSR or CSC format
The nonzeros of G represent the edges of an undirected graph .
Returns
components : ndarray
An array of component labels for each vertex of the graph .
Notes
If the nonzero structure of G is not symmetric , then the
result is undefined .
Examples
> > > from pyamg . graph import connected _ components
> > > print connected _ components ( [ [ 0,1,0 ] , [ 1,0,1 ] , [ 0,1,0 ] ] )
[0 0 0]
> > > print connected _ components ( [ [ 0,1,0 ] , [ 1,0,0 ] , [ 0,0,0 ] ] )
[0 0 1]
> > > print connected _ components ( [ [ 0,0,0 ] , [ 0,0,0 ] , [ 0,0,0 ] ] )
[0 1 2]
> > > print connected _ components ( [ [ 0,1,0,0 ] , [ 1,0,0,0 ] , [ 0,0,0,1 ] , [ 0,0,1,0 ] ] )
[0 0 1 1]"""
|
G = asgraph ( G )
N = G . shape [ 0 ]
components = np . empty ( N , G . indptr . dtype )
fn = amg_core . connected_components
fn ( N , G . indptr , G . indices , components )
return components
|
def subscribe ( self , client , channel_name ) :
"""Register a new client to receive messages on a channel ."""
|
if channel_name not in self . channels :
self . channels [ channel_name ] = channel = Channel ( channel_name )
channel . start ( )
self . channels [ channel_name ] . subscribe ( client )
|
def send ( self , ws , seq ) :
"""Sends heartbeat message to Discord
Attributes :
ws : Websocket connection to discord
seq : Sequence number of heartbeat"""
|
payload = { 'op' : 1 , 'd' : seq }
payload = json . dumps ( payload )
logger . debug ( "Sending heartbeat with payload {}" . format ( payload ) )
ws . send ( payload )
return
|
def find ( query ) :
"""Search by Name , SMILES , InChI , InChIKey , etc . Returns first 100 Compounds"""
|
assert type ( query ) == str or type ( query ) == str , 'query not a string object'
searchurl = 'http://www.chemspider.com/Search.asmx/SimpleSearch?query=%s&token=%s' % ( urlquote ( query ) , TOKEN )
response = urlopen ( searchurl )
tree = ET . parse ( response )
elem = tree . getroot ( )
csid_tags = elem . getiterator ( '{http://www.chemspider.com/}int' )
compoundlist = [ ]
for tag in csid_tags :
compoundlist . append ( Compound ( tag . text ) )
return compoundlist if compoundlist else None
|
def shape ( self ) :
"""Raster shape ."""
|
if self . _shape is None :
self . _populate_from_rasterio_object ( read_image = False )
return self . _shape
|
def listeners_iter ( self ) :
"""Return an iterator over the mapping of event = > listeners bound .
The listener list ( s ) returned should * * not * * be mutated .
NOTE ( harlowja ) : Each listener in the yielded ( event , listeners )
tuple is an instance of the : py : class : ` ~ . Listener ` type , which
itself wraps a provided callback ( and its details filter
callback , if any ) ."""
|
topics = set ( six . iterkeys ( self . _topics ) )
while topics :
event_type = topics . pop ( )
try :
yield event_type , self . _topics [ event_type ]
except KeyError :
pass
|
def transformToNative ( obj ) :
"""Turn obj . value into a datetime . timedelta ."""
|
if obj . isNative :
return obj
obj . isNative = True
obj . value = obj . value
if obj . value == '' :
return obj
else :
deltalist = stringToDurations ( obj . value )
# When can DURATION have multiple durations ? For now :
if len ( deltalist ) == 1 :
obj . value = deltalist [ 0 ]
return obj
else :
raise ParseError ( "DURATION must have a single duration string." )
|
def add ( self , cmd ) :
'''Add a new command ( waypoint ) at the end of the command list .
. . note : :
Commands are sent to the vehicle only after you call : : py : func : ` upload ( ) < Vehicle . commands . upload > ` .
: param Command cmd : The command to be added .'''
|
self . wait_ready ( )
self . _vehicle . _handler . fix_targets ( cmd )
self . _vehicle . _wploader . add ( cmd , comment = 'Added by DroneKit' )
self . _vehicle . _wpts_dirty = True
|
def get_observer_look ( self , utc_time , lon , lat , alt ) :
"""Calculate observers look angle to a satellite .
http : / / celestrak . com / columns / v02n02/
utc _ time : Observation time ( datetime object )
lon : Longitude of observer position on ground in degrees east
lat : Latitude of observer position on ground in degrees north
alt : Altitude above sea - level ( geoid ) of observer position on ground in km
Return : ( Azimuth , Elevation )"""
|
utc_time = dt2np ( utc_time )
( pos_x , pos_y , pos_z ) , ( vel_x , vel_y , vel_z ) = self . get_position ( utc_time , normalize = False )
( opos_x , opos_y , opos_z ) , ( ovel_x , ovel_y , ovel_z ) = astronomy . observer_position ( utc_time , lon , lat , alt )
lon = np . deg2rad ( lon )
lat = np . deg2rad ( lat )
theta = ( astronomy . gmst ( utc_time ) + lon ) % ( 2 * np . pi )
rx = pos_x - opos_x
ry = pos_y - opos_y
rz = pos_z - opos_z
sin_lat = np . sin ( lat )
cos_lat = np . cos ( lat )
sin_theta = np . sin ( theta )
cos_theta = np . cos ( theta )
top_s = sin_lat * cos_theta * rx + sin_lat * sin_theta * ry - cos_lat * rz
top_e = - sin_theta * rx + cos_theta * ry
top_z = cos_lat * cos_theta * rx + cos_lat * sin_theta * ry + sin_lat * rz
az_ = np . arctan ( - top_e / top_s )
az_ = np . where ( top_s > 0 , az_ + np . pi , az_ )
az_ = np . where ( az_ < 0 , az_ + 2 * np . pi , az_ )
rg_ = np . sqrt ( rx * rx + ry * ry + rz * rz )
el_ = np . arcsin ( top_z / rg_ )
return np . rad2deg ( az_ ) , np . rad2deg ( el_ )
|
def add_paths_to_os ( self , key = None , update = None ) :
'''Add the paths in tree environ into the os environ
This code goes through the tree environ and checks
for existence in the os environ , then adds them
Parameters :
key ( str ) :
The section name to check against / add
update ( bool ) :
If True , overwrites existing tree environment variables in your
local environment . Default is False .'''
|
if key is not None :
allpaths = key if isinstance ( key , list ) else [ key ]
else :
allpaths = [ k for k in self . environ . keys ( ) if 'default' not in k ]
for key in allpaths :
paths = self . get_paths ( key )
self . check_paths ( paths , update = update )
|
def _get_decimal128 ( data , position , dummy0 , dummy1 , dummy2 ) :
"""Decode a BSON decimal128 to bson . decimal128 . Decimal128."""
|
end = position + 16
return Decimal128 . from_bid ( data [ position : end ] ) , end
|
def fast_cov ( x , y = None , destination = None ) :
"""calculate the covariance matrix for the columns of x ( MxN ) , or optionally , the covariance matrix between the
columns of x and and the columns of y ( MxP ) . ( In the language of statistics , the columns are variables , the rows
are observations ) .
Args :
x ( numpy array - like ) MxN in shape
y ( numpy array - like ) MxP in shape
destination ( numpy array - like ) optional location where to store the results as they are calculated ( e . g . a numpy
memmap of a file )
returns ( numpy array - like ) array of the covariance values
for defaults ( y = None ) , shape is NxN
if y is provided , shape is NxP"""
|
validate_inputs ( x , y , destination )
if y is None :
y = x
if destination is None :
destination = numpy . zeros ( ( x . shape [ 1 ] , y . shape [ 1 ] ) )
mean_x = numpy . mean ( x , axis = 0 )
mean_y = numpy . mean ( y , axis = 0 )
mean_centered_x = ( x - mean_x ) . astype ( destination . dtype )
mean_centered_y = ( y - mean_y ) . astype ( destination . dtype )
numpy . dot ( mean_centered_x . T , mean_centered_y , out = destination )
numpy . divide ( destination , ( x . shape [ 0 ] - 1 ) , out = destination )
return destination
|
def neighbor ( pos , tune_params ) :
"""return a random neighbor of pos"""
|
size = len ( pos )
pos_out = [ ]
# random mutation
# expected value is set that values all dimensions attempt to get mutated
for i in range ( size ) :
key = list ( tune_params . keys ( ) ) [ i ]
values = tune_params [ key ]
if random . random ( ) < 0.2 : # replace with random value
new_value = random_val ( i , tune_params )
else : # adjacent value
ind = values . index ( pos [ i ] )
if random . random ( ) > 0.5 :
ind += 1
else :
ind -= 1
ind = min ( max ( ind , 0 ) , len ( values ) - 1 )
new_value = values [ ind ]
pos_out . append ( new_value )
return pos_out
|
def _read_file ( path ) :
'''Reads and returns the contents of a text file'''
|
try :
with salt . utils . files . flopen ( path , 'rb' ) as contents :
return [ salt . utils . stringutils . to_str ( line ) for line in contents . readlines ( ) ]
except ( OSError , IOError ) :
return ''
|
def _set_l2_spf6_timer ( self , v , load = False ) :
"""Setter method for l2 _ spf6 _ timer , mapped from YANG variable / isis _ state / router _ isis _ config / l2 _ spf6 _ timer ( container )
If this variable is read - only ( config : false ) in the
source YANG file , then _ set _ l2 _ spf6 _ timer is considered as a private
method . Backends looking to populate this variable should
do so via calling thisObj . _ set _ l2 _ spf6 _ timer ( ) directly ."""
|
if hasattr ( v , "_utype" ) :
v = v . _utype ( v )
try :
t = YANGDynClass ( v , base = l2_spf6_timer . l2_spf6_timer , is_container = 'container' , presence = False , yang_name = "l2-spf6-timer" , rest_name = "l2-spf6-timer" , parent = self , path_helper = self . _path_helper , extmethods = self . _extmethods , register_paths = True , extensions = { u'tailf-common' : { u'callpoint' : u'isis-spf-timer-l2-spf6-timer-1' } } , namespace = 'urn:brocade.com:mgmt:brocade-isis-operational' , defining_module = 'brocade-isis-operational' , yang_type = 'container' , is_config = False )
except ( TypeError , ValueError ) :
raise ValueError ( { 'error-string' : """l2_spf6_timer must be of a type compatible with container""" , 'defined-type' : "container" , 'generated-type' : """YANGDynClass(base=l2_spf6_timer.l2_spf6_timer, is_container='container', presence=False, yang_name="l2-spf6-timer", rest_name="l2-spf6-timer", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, extensions={u'tailf-common': {u'callpoint': u'isis-spf-timer-l2-spf6-timer-1'}}, namespace='urn:brocade.com:mgmt:brocade-isis-operational', defining_module='brocade-isis-operational', yang_type='container', is_config=False)""" , } )
self . __l2_spf6_timer = t
if hasattr ( self , '_set' ) :
self . _set ( )
|
def addcommenttocommit ( self , project_id , author , sha , path , line , note ) :
"""Adds an inline comment to a specific commit
: param project _ id : project id
: param author : The author info as returned by create mergerequest
: param sha : The name of a repository branch or tag or if not given the default branch
: param path : The file path
: param line : The line number
: param note : Text of comment
: return : True or False"""
|
data = { 'author' : author , 'note' : note , 'path' : path , 'line' : line , 'line_type' : 'new' }
request = requests . post ( '{0}/{1}/repository/commits/{2}/comments' . format ( self . projects_url , project_id , sha ) , headers = self . headers , data = data , verify = self . verify_ssl )
if request . status_code == 201 :
return True
else :
return False
|
def merge ( self , * args ) :
"""Merge multiple dictionary objects into one .
: param variadic args : Multiple dictionary items
: return dict"""
|
values = [ ]
for entry in args :
values = values + list ( entry . items ( ) )
return dict ( values )
|
def get_hash ( name , password = None ) :
'''Returns the hash of a certificate in the keychain .
name
The name of the certificate ( which you can get from keychain . get _ friendly _ name ) or the
location of a p12 file .
password
The password that is used in the certificate . Only required if your passing a p12 file .
Note : This will be outputted to logs
CLI Example :
. . code - block : : bash
salt ' * ' keychain . get _ hash / tmp / test . p12 test123'''
|
if '.p12' in name [ - 4 : ] :
cmd = 'openssl pkcs12 -in {0} -passin pass:{1} -passout pass:{1}' . format ( name , password )
else :
cmd = 'security find-certificate -c "{0}" -m -p' . format ( name )
out = __salt__ [ 'cmd.run' ] ( cmd )
matches = re . search ( '-----BEGIN CERTIFICATE-----(.*)-----END CERTIFICATE-----' , out , re . DOTALL | re . MULTILINE )
if matches :
return matches . group ( 1 )
else :
return False
|
def delete_persistent_volume ( self , name , ** kwargs ) : # noqa : E501
"""delete _ persistent _ volume # noqa : E501
delete a PersistentVolume # noqa : E501
This method makes a synchronous HTTP request by default . To make an
asynchronous HTTP request , please pass async _ req = True
> > > thread = api . delete _ persistent _ volume ( name , async _ req = True )
> > > result = thread . get ( )
: param async _ req bool
: param str name : name of the PersistentVolume ( required )
: param str pretty : If ' true ' , then the output is pretty printed .
: param str dry _ run : When present , indicates that modifications should not be persisted . An invalid or unrecognized dryRun directive will result in an error response and no further processing of the request . Valid values are : - All : all dry run stages will be processed
: param int grace _ period _ seconds : The duration in seconds before the object should be deleted . Value must be non - negative integer . The value zero indicates delete immediately . If this value is nil , the default grace period for the specified type will be used . Defaults to a per object value if not specified . zero means delete immediately .
: param bool orphan _ dependents : Deprecated : please use the PropagationPolicy , this field will be deprecated in 1.7 . Should the dependent objects be orphaned . If true / false , the \" orphan \" finalizer will be added to / removed from the object ' s finalizers list . Either this field or PropagationPolicy may be set , but not both .
: param str propagation _ policy : Whether and how garbage collection will be performed . Either this field or OrphanDependents may be set , but not both . The default policy is decided by the existing finalizer set in the metadata . finalizers and the resource - specific default policy . Acceptable values are : ' Orphan ' - orphan the dependents ; ' Background ' - allow the garbage collector to delete the dependents in the background ; ' Foreground ' - a cascading policy that deletes all dependents in the foreground .
: param V1DeleteOptions body :
: return : V1Status
If the method is called asynchronously ,
returns the request thread ."""
|
kwargs [ '_return_http_data_only' ] = True
if kwargs . get ( 'async_req' ) :
return self . delete_persistent_volume_with_http_info ( name , ** kwargs )
# noqa : E501
else :
( data ) = self . delete_persistent_volume_with_http_info ( name , ** kwargs )
# noqa : E501
return data
|
def get_object ( self , binding_name , cls ) :
"""Get a reference to a remote object using CORBA"""
|
return self . _state . get_object ( self , binding_name , cls )
|
def remove_redistribution ( self , protocol ) :
"""Removes a protocol redistribution to OSPF
Args :
protocol ( str ) : protocol to redistribute
route _ map _ name ( str ) : route - map to be used to
filter the protocols
Returns :
bool : True if the command completes successfully
Exception :
ValueError : This will be raised if the protocol pass is not one
of the following : [ rip , bgp , static , connected ]"""
|
protocols = [ 'bgp' , 'rip' , 'static' , 'connected' ]
if protocol not in protocols :
raise ValueError ( 'redistributed protocol must be' 'bgp, connected, rip or static' )
cmd = 'no redistribute {}' . format ( protocol )
return self . configure_ospf ( cmd )
|
def _fix ( self , fmt = 'i' ) :
"""Read pre - or suffix of line at current position with given
format ` fmt ` ( default ' i ' ) ."""
|
fmt = self . endian + fmt
fix = self . read ( struct . calcsize ( fmt ) )
if fix :
return struct . unpack ( fmt , fix ) [ 0 ]
else :
raise EOFError
|
async def _load ( self , data , check = True ) :
"""Looking for proxies in the passed data .
Transform the passed data from [ raw string | file - like object | list ]
to set { ( host , port ) , . . . } : { ( ' 192.168.0.1 ' , ' 80 ' ) , }"""
|
log . debug ( 'Load proxies from the raw data' )
if isinstance ( data , io . TextIOWrapper ) :
data = data . read ( )
if isinstance ( data , str ) :
data = IPPortPatternLine . findall ( data )
proxies = set ( data )
for proxy in proxies :
await self . _handle ( proxy , check = check )
await self . _on_check . join ( )
self . _done ( )
|
def nguHanh ( tenHanh ) :
"""Args :
tenHanh ( string ) : Tên Hành trong ngũ hành , Kim hoặc K , Moc hoặc M ,
Thuy hoặc T , Hoa hoặc H , Tho hoặc O
Returns :
Dictionary : ID của Hành , tên đầy đủ của Hành , số Cục của Hành
Raises :
Exception : Description"""
|
if tenHanh in [ "Kim" , "K" ] :
return { "id" : 1 , "tenHanh" : "Kim" , "cuc" : 4 , "tenCuc" : "Kim tứ Cục" , "css" : "hanhKim" }
elif tenHanh == "Moc" or tenHanh == "M" :
return { "id" : 2 , "tenHanh" : "Mộc" , "cuc" : 3 , "tenCuc" : "Mộc tam Cục" , "css" : "hanhMoc" }
elif tenHanh == "Thuy" or tenHanh == "T" :
return { "id" : 3 , "tenHanh" : "Thủy" , "cuc" : 2 , "tenCuc" : "Thủy nhị Cục" , "css" : "hanhThuy" }
elif tenHanh == "Hoa" or tenHanh == "H" :
return { "id" : 4 , "tenHanh" : "Hỏa" , "cuc" : 6 , "tenCuc" : "Hỏa lục Cục" , "css" : "hanhHoa" }
elif tenHanh == "Tho" or tenHanh == "O" :
return { "id" : 5 , "tenHanh" : "Thổ" , "cuc" : 5 , "tenCuc" : "Thổ ngũ Cục" , "css" : "hanhTho" }
else :
raise Exception ( "Tên Hành phải thuộc Kim (K), Mộc (M), Thủy (T), \
Hỏa (H) hoặc Thổ (O)" )
|
def inner_products ( self , vec ) :
"""Get the inner product of a vector with every embedding .
: param ( np . array ) vector : the query vector
: return ( list [ tuple [ str , float ] ] ) : a map of embeddings to inner products"""
|
products = self . array . dot ( vec )
return self . _word_to_score ( np . arange ( len ( products ) ) , products )
|
def constructor ( self , random , args ) :
"""Return a candidate solution for an ant colony optimization ."""
|
self . _use_ants = True
candidate = [ ]
while len ( candidate ) < len ( self . components ) : # Find feasible components
feasible_components = [ ]
if len ( candidate ) == 0 :
feasible_components = self . components
else :
remaining_capacity = self . capacity - sum ( [ c . element for c in candidate ] )
if self . duplicates :
feasible_components = [ c for c in self . components if c . element <= remaining_capacity ]
else :
feasible_components = [ c for c in self . components if c not in candidate and c . element <= remaining_capacity ]
if len ( feasible_components ) == 0 :
break
else : # Choose a feasible component
if random . random ( ) <= self . bias :
next_component = max ( feasible_components )
else :
next_component = selectors . fitness_proportionate_selection ( random , feasible_components , { 'num_selected' : 1 } ) [ 0 ]
candidate . append ( next_component )
return candidate
|
def _transform_data ( self , X ) :
"""Binarize the data for each column separately ."""
|
if self . _binarizers == [ ] :
raise NotFittedError ( )
if self . binarize is not None :
X = binarize ( X , threshold = self . binarize )
if len ( self . _binarizers ) != X . shape [ 1 ] :
raise ValueError ( "Expected input with %d features, got %d instead" % ( len ( self . _binarizers ) , X . shape [ 1 ] ) )
X_parts = [ ]
for i in range ( X . shape [ 1 ] ) :
X_i = self . _binarizers [ i ] . transform ( X [ : , i ] )
# sklearn returns ndarray with shape ( samples , 1 ) on binary input .
if self . _binarizers [ i ] . classes_ . shape [ 0 ] == 2 :
X_parts . append ( 1 - X_i )
X_parts . append ( X_i )
return np . concatenate ( X_parts , axis = 1 )
|
def pack ( self ) :
"Pack and save file"
|
pack_name = self . args . prefix + op . basename ( self . path )
pack_path = op . join ( self . args . output or self . basedir , pack_name )
self . out ( "Packing: %s" % self . path )
self . out ( "Output: %s" % pack_path )
if self . args . format :
ext = self . get_ext ( self . path )
self . parsers [ ext ] = self . args . format
out = "" . join ( self . merge ( self . parse ( self . path ) ) )
try :
open ( pack_path , 'w' ) . write ( out )
self . out ( "Linked file saved as: '%s'." % pack_path )
except IOError , ex :
raise ZetaError ( ex )
|
def InitPrivateKey ( self ) :
"""Makes sure this client has a private key set .
It first tries to load an RSA key from the certificate .
If no certificate is found , or it is invalid , we make a new random RSA key ,
and store it as our certificate .
Returns :
An RSA key - either from the certificate or a new random key ."""
|
if self . private_key :
try :
self . common_name = rdf_client . ClientURN . FromPrivateKey ( self . private_key )
logging . info ( "Starting client %s" , self . common_name )
return self . private_key
except type_info . TypeValueError :
pass
# We either have an invalid key or no key . We just generate a new one .
key = rdf_crypto . RSAPrivateKey . GenerateKey ( bits = config . CONFIG [ "Client.rsa_key_length" ] )
self . common_name = rdf_client . ClientURN . FromPrivateKey ( key )
logging . info ( "Client pending enrolment %s" , self . common_name )
# Save the keys
self . SavePrivateKey ( key )
return key
|
def relabel ( self , i ) :
'''API : relabel ( self , i )
Description :
Used by max _ flow _ preflowpush ( ) method for relabelling node i .
Input :
i : Node that is being relabelled .
Post :
' distance ' attribute of node i is updated .'''
|
min_distance = 2 * len ( self . get_node_list ( ) ) + 1
for j in self . get_neighbors ( i ) :
if ( self . get_node_attr ( j , 'distance' ) < min_distance and ( self . get_edge_attr ( i , j , 'flow' ) < self . get_edge_attr ( i , j , 'capacity' ) ) ) :
min_distance = self . get_node_attr ( j , 'distance' )
for j in self . get_in_neighbors ( i ) :
if ( self . get_node_attr ( j , 'distance' ) < min_distance and self . get_edge_attr ( j , i , 'flow' ) > 0 ) :
min_distance = self . get_node_attr ( j , 'distance' )
self . set_node_attr ( i , 'distance' , min_distance + 1 )
|
def infer_transportation_mode ( self , clf , min_time ) :
"""In - place transportation mode inferring of segments
Returns :
This track"""
|
for segment in self . segments :
segment . infer_transportation_mode ( clf , min_time )
return self
|
def files ( self ) :
"""Files in torrent .
List of namedtuples ( filepath , size ) .
: rtype : list [ TorrentFile ]"""
|
files = [ ]
info = self . _struct . get ( 'info' )
if not info :
return files
if 'files' in info :
base = info [ 'name' ]
for f in info [ 'files' ] :
files . append ( TorrentFile ( join ( base , * f [ 'path' ] ) , f [ 'length' ] ) )
else :
files . append ( TorrentFile ( info [ 'name' ] , info [ 'length' ] ) )
return files
|
def get_service_account_token ( request , service_account = 'default' ) :
"""Get the OAuth 2.0 access token for a service account .
Args :
request ( google . auth . transport . Request ) : A callable used to make
HTTP requests .
service _ account ( str ) : The string ' default ' or a service account email
address . The determines which service account for which to acquire
an access token .
Returns :
Union [ str , datetime ] : The access token and its expiration .
Raises :
google . auth . exceptions . TransportError : if an error occurred while
retrieving metadata ."""
|
token_json = get ( request , 'instance/service-accounts/{0}/token' . format ( service_account ) )
token_expiry = _helpers . utcnow ( ) + datetime . timedelta ( seconds = token_json [ 'expires_in' ] )
return token_json [ 'access_token' ] , token_expiry
|
def get_scratch_path ( self , local_file ) :
"""Construct and return a path in the scratch area from a local file ."""
|
( local_dirname , local_basename ) = self . split_local_path ( local_file )
return self . construct_scratch_path ( local_dirname , local_basename )
|
def _process_slice ( self , arg ) :
"""process the input slice for use calling the C code"""
|
start = arg . start
stop = arg . stop
step = arg . step
nrows = self . _info [ 'nrows' ]
if step is None :
step = 1
if start is None :
start = 0
if stop is None :
stop = nrows
if start < 0 :
start = nrows + start
if start < 0 :
raise IndexError ( "Index out of bounds" )
if stop < 0 :
stop = nrows + start + 1
if stop < start : # will return an empty struct
stop = start
if stop > nrows :
stop = nrows
return slice ( start , stop , step )
|
def mappability ( args ) :
"""% prog mappability reference . fasta
Generate 50mer mappability for reference genome . Commands are based on gem
mapper . See instructions :
< https : / / github . com / xuefzhao / Reference . Mappability >"""
|
p = OptionParser ( mappability . __doc__ )
p . add_option ( "--mer" , default = 50 , type = "int" , help = "User mer size" )
p . set_cpus ( )
opts , args = p . parse_args ( args )
if len ( args ) != 1 :
sys . exit ( not p . print_help ( ) )
ref , = args
K = opts . mer
pf = ref . rsplit ( "." , 1 ) [ 0 ]
mm = MakeManager ( )
gem = pf + ".gem"
cmd = "gem-indexer -i {} -o {}" . format ( ref , pf )
mm . add ( ref , gem , cmd )
mer = pf + ".{}mer" . format ( K )
mapb = mer + ".mappability"
cmd = "gem-mappability -I {} -l {} -o {} -T {}" . format ( gem , K , mer , opts . cpus )
mm . add ( gem , mapb , cmd )
wig = mer + ".wig"
cmd = "gem-2-wig -I {} -i {} -o {}" . format ( gem , mapb , mer )
mm . add ( mapb , wig , cmd )
bw = mer + ".bw"
cmd = "wigToBigWig {} {}.sizes {}" . format ( wig , mer , bw )
mm . add ( wig , bw , cmd )
bg = mer + ".bedGraph"
cmd = "bigWigToBedGraph {} {}" . format ( bw , bg )
mm . add ( bw , bg , cmd )
merged = mer + ".filtered-1.merge.bed"
cmd = "python -m jcvi.formats.bed filterbedgraph {} 1" . format ( bg )
mm . add ( bg , merged , cmd )
mm . write ( )
|
def parse_pseudo_lang ( self , sel , m , has_selector ) :
"""Parse pseudo language ."""
|
values = m . group ( 'values' )
patterns = [ ]
for token in RE_VALUES . finditer ( values ) :
if token . group ( 'split' ) :
continue
value = token . group ( 'value' )
if value . startswith ( ( '"' , "'" ) ) :
parts = css_unescape ( value [ 1 : - 1 ] , True ) . split ( '-' )
else :
parts = css_unescape ( value ) . split ( '-' )
new_parts = [ ]
first = True
for part in parts :
if part == '*' and first :
new_parts . append ( '(?!x\b)[a-z0-9]+?' )
elif part != '*' :
new_parts . append ( ( '' if first else '(-(?!x\b)[a-z0-9]+)*?\\-' ) + re . escape ( part ) )
if first :
first = False
patterns . append ( re . compile ( r'^{}(?:-.*)?$' . format ( '' . join ( new_parts ) ) , re . I ) )
sel . lang . append ( ct . SelectorLang ( patterns ) )
has_selector = True
return has_selector
|
def get_fws ( value ) :
"""FWS = 1 * WSP
This isn ' t the RFC definition . We ' re using fws to represent tokens where
folding can be done , but when we are parsing the * un * folding has already
been done so we don ' t need to watch out for CRLF ."""
|
newvalue = value . lstrip ( )
fws = WhiteSpaceTerminal ( value [ : len ( value ) - len ( newvalue ) ] , 'fws' )
return fws , newvalue
|
def partition_ordered ( sequence , key = None ) :
"""Partition ordered sequence by key .
Sequence is expected to already be ordered .
Parameters
sequence : iterable data .
key : partition key function
Yields
iterable tuple ( s ) of partition key , data list pairs .
Examples
1 . By object attributes .
Partition sequence of objects by a height and weight attributes
into an ordered dict .
> > attributes = ( ' height ' , ' weight ' )
> > OrderedDict ( partition _ ordered ( sequence , attrgetter ( * attributes ) ) )
2 . By index items .
Partition sequence by the first character index of each element .
> > index = 0
> > sequence = [ ' 112 ' , ' 124 ' , ' 289 ' , ' 220 ' , ' Z23 ' ]
> > list ( partition _ ordered ( sequence , itemgetter ( index ) ) )"""
|
yield from ( ( k , list ( g ) ) for k , g in groupby ( sequence , key = key ) )
|
def auto_delete_cohort ( instance , ** kwargs ) :
"Deletes and auto - created cohort named after the instance ."
|
cohorts = Cohort . objects . filter ( autocreated = True )
if isinstance ( instance , Project ) :
cohorts = cohorts . filter ( project = instance )
elif isinstance ( instance , Batch ) :
cohorts = cohorts . filter ( batch = instance )
else :
return
count = cohorts . count ( )
cohorts . delete ( )
log . info ( 'Delete {0} autocreated cohorts for {1}' . format ( count , instance ) )
|
def main ( args_list = None ) :
"""Script which loads variants and annotates them with overlapping genes
and predicted coding effects .
Example usage :
varcode
- - vcf mutect . vcf - - vcf strelka . vcf - - maf tcga _ brca . maf - - variant chr1 498584 C G - - json - variants more _ variants . json"""
|
print_version_info ( )
if args_list is None :
args_list = sys . argv [ 1 : ]
args = arg_parser . parse_args ( args_list )
variants = variant_collection_from_args ( args )
effects = variants . effects ( )
if args . only_coding :
effects = effects . drop_silent_and_noncoding ( )
if args . one_per_variant :
variant_to_effect_dict = effects . top_priority_effect_per_variant ( )
effects = effects . clone_with_new_elements ( list ( variant_to_effect_dict . values ( ) ) )
effects_dataframe = effects . to_dataframe ( )
logger . info ( '\n%s' , effects )
if args . output_csv :
effects_dataframe . to_csv ( args . output_csv , index = False )
|
def angleOfView ( XY , shape = None , a = None , f = None , D = None , center = None ) :
'''Another vignetting equation from :
M . Koentges , M . Siebert , and D . Hinken , " Quantitative analysis of PV - modules by electroluminescence images for quality control "
2009
f - - > Focal length
D - - > Diameter of the aperture
BOTH , D AND f NEED TO HAVE SAME UNIT [ PX , mm . . . ]
a - - > Angular aperture
center - > optical center [ y , x ]'''
|
if a is None :
assert f is not None and D is not None
# https : / / en . wikipedia . org / wiki / Angular _ aperture
a = 2 * np . arctan2 ( D / 2 , f )
x , y = XY
try :
c0 , c1 = center
except :
s0 , s1 = shape
c0 , c1 = s0 / 2 , s1 / 2
rx = ( x - c0 ) ** 2
ry = ( y - c1 ) ** 2
return 1 / ( 1 + np . tan ( a ) * ( ( rx + ry ) / c0 ) ) ** 0.5
|
def filter ( self , * args , ** kwargs ) :
"""See : py : meth : ` nornir . core . inventory . Inventory . filter `
Returns :
: obj : ` Nornir ` : A new object with same configuration as ` ` self ` ` but filtered inventory ."""
|
b = Nornir ( ** self . __dict__ )
b . inventory = self . inventory . filter ( * args , ** kwargs )
return b
|
def var_explained ( y_true , y_pred ) :
"""Fraction of variance explained ."""
|
var_resid = K . var ( y_true - y_pred )
var_y_true = K . var ( y_true )
return 1 - var_resid / var_y_true
|
def results_class_wise_metrics ( self ) :
"""Class - wise metrics
Returns
dict
results in a dictionary format"""
|
results = { }
for scene_id , scene_label in enumerate ( self . scene_label_list ) :
if scene_label not in results :
results [ scene_label ] = { }
results [ scene_label ] [ 'count' ] = { }
results [ scene_label ] [ 'count' ] [ 'Ncorr' ] = self . scene_wise [ scene_label ] [ 'Ncorr' ]
results [ scene_label ] [ 'count' ] [ 'Nref' ] = self . scene_wise [ scene_label ] [ 'Nref' ]
results [ scene_label ] [ 'count' ] [ 'Nsys' ] = self . scene_wise [ scene_label ] [ 'Nsys' ]
results [ scene_label ] [ 'accuracy' ] = { 'accuracy' : metric . accuracy_corr ( Ncorr = self . scene_wise [ scene_label ] [ 'Ncorr' ] , N = self . scene_wise [ scene_label ] [ 'Nref' ] ) }
return results
|
def Increment ( self , delta , fields = None ) :
"""Increments counter value by a given delta ."""
|
if delta < 0 :
raise ValueError ( "Counter increment should not be < 0 (received: %d)" % delta )
self . _metric_values [ _FieldsToKey ( fields ) ] = self . Get ( fields = fields ) + delta
|
def ph2full ( ptrans , htrans ) :
"""Convert a p - state transition matrix and h - state matrices to the full transation matrix
The full transmat hase N = n _ pstates * n _ hstates states"""
|
n_pstates = len ( ptrans )
n_hstates = len ( htrans [ 0 , 0 ] )
N = n_pstates * n_hstates
trans = np . zeros ( ( N , N ) )
for pidx in range ( n_pstates ) :
for hidx in range ( n_hstates ) :
trans [ pidx * n_hstates + hidx ] = ( ptrans [ pidx , : , np . newaxis ] * htrans [ pidx , : , hidx ] ) . flatten ( )
return trans
|
def _install_toolplus ( args ) :
"""Install additional tools we cannot distribute , updating local manifest ."""
|
manifest_dir = os . path . join ( _get_data_dir ( ) , "manifest" )
toolplus_manifest = os . path . join ( manifest_dir , "toolplus-packages.yaml" )
system_config = os . path . join ( _get_data_dir ( ) , "galaxy" , "bcbio_system.yaml" )
# Handle toolplus installs inside Docker container
if not os . path . exists ( system_config ) :
docker_system_config = os . path . join ( _get_data_dir ( ) , "config" , "bcbio_system.yaml" )
if os . path . exists ( docker_system_config ) :
system_config = docker_system_config
toolplus_dir = os . path . join ( _get_data_dir ( ) , "toolplus" )
for tool in args . toolplus :
if tool . name in set ( [ "gatk" , "mutect" ] ) :
print ( "Installing %s" % tool . name )
_install_gatk_jar ( tool . name , tool . fname , toolplus_manifest , system_config , toolplus_dir )
else :
raise ValueError ( "Unexpected toolplus argument: %s %s" % ( tool . name , tool . fname ) )
|
def wasModified ( self ) :
"""Check to see if this module has been modified on disk since the last
time it was cached .
@ return : True if it has been modified , False if not ."""
|
self . filePath . restat ( )
mtime = self . filePath . getmtime ( )
if mtime >= self . lastModified :
return True
else :
return False
|
def from_string ( string ) :
"""Reads a string representation to a Cssr object .
Args :
string ( str ) : A string representation of a CSSR .
Returns :
Cssr object ."""
|
lines = string . split ( "\n" )
toks = lines [ 0 ] . split ( )
lengths = [ float ( i ) for i in toks ]
toks = lines [ 1 ] . split ( )
angles = [ float ( i ) for i in toks [ 0 : 3 ] ]
latt = Lattice . from_lengths_and_angles ( lengths , angles )
sp = [ ]
coords = [ ]
for l in lines [ 4 : ] :
m = re . match ( r"\d+\s+(\w+)\s+([0-9\-\.]+)\s+([0-9\-\.]+)\s+([0-9\-\.]+)" , l . strip ( ) )
if m :
sp . append ( m . group ( 1 ) )
coords . append ( [ float ( m . group ( i ) ) for i in range ( 2 , 5 ) ] )
return Cssr ( Structure ( latt , sp , coords ) )
|
def debug ( self ) :
"""Return debug setting"""
|
debug = False
if os . path . isfile ( os . path . join ( self . tcex . args . tc_temp_path , 'DEBUG' ) ) :
debug = True
return debug
|
def get_owner_asset_ids ( self , address ) :
"""Get the list of assets owned by an address owner .
: param address : ethereum account address , hex str
: return :"""
|
block_filter = self . _get_event_filter ( owner = address )
log_items = block_filter . get_all_entries ( max_tries = 5 )
did_list = [ ]
for log_i in log_items :
did_list . append ( id_to_did ( log_i . args [ '_did' ] ) )
return did_list
|
def set_server ( self , pos , key , value ) :
"""Set the key to the value for the pos ( position in the list ) ."""
|
self . _ports_list [ pos ] [ key ] = value
|
def execute ( self , context , goals ) :
"""Executes the supplied goals and their dependencies against the given context .
: param context : The pants run context .
: param list goals : A list of ` ` Goal ` ` objects representing the command line goals explicitly
requested .
: returns int : An exit code of 0 upon success and non - zero otherwise ."""
|
try :
self . attempt ( context , goals )
return 0
except TaskError as e :
message = str ( e )
if message :
print ( '\nFAILURE: {0}\n' . format ( message ) )
else :
print ( '\nFAILURE\n' )
return e . exit_code
|
def push ( self , value ) :
"""SNEAK value TO FRONT OF THE QUEUE"""
|
if self . closed and not self . allow_add_after_close :
Log . error ( "Do not push to closed queue" )
with self . lock :
self . _wait_for_queue_space ( )
if not self . closed :
self . queue . appendleft ( value )
return self
|
def update_package_versions ( self , batch_request , feed_id ) :
"""UpdatePackageVersions .
[ Preview API ] Update several packages from a single feed in a single request . The updates to the packages do not happen atomically .
: param : class : ` < NuGetPackagesBatchRequest > < azure . devops . v5_0 . nuget . models . NuGetPackagesBatchRequest > ` batch _ request : Information about the packages to update , the operation to perform , and its associated data .
: param str feed _ id : Name or ID of the feed ."""
|
route_values = { }
if feed_id is not None :
route_values [ 'feedId' ] = self . _serialize . url ( 'feed_id' , feed_id , 'str' )
content = self . _serialize . body ( batch_request , 'NuGetPackagesBatchRequest' )
self . _send ( http_method = 'POST' , location_id = '00c58ea7-d55f-49de-b59f-983533ae11dc' , version = '5.0-preview.1' , route_values = route_values , content = content )
|
def view_500 ( request , url = None ) :
"""it returns a 500 http response"""
|
res = render_to_response ( "500.html" , context_instance = RequestContext ( request ) )
res . status_code = 500
return res
|
def select ( self , Class , set = None , recursive = True , ignore = True , node = None ) :
"""See : meth : ` AbstractElement . select `"""
|
if self . include :
return self . subdoc . data [ 0 ] . select ( Class , set , recursive , ignore , node )
# pass it on to the text node of the subdoc
else :
return iter ( [ ] )
|
def network_profile_name_list ( self , obj ) :
"""Get AP profile names ."""
|
profile_list = pointer ( WLAN_PROFILE_INFO_LIST ( ) )
self . _wlan_get_profile_list ( self . _handle , byref ( obj [ 'guid' ] ) , byref ( profile_list ) )
profiles = cast ( profile_list . contents . ProfileInfo , POINTER ( WLAN_PROFILE_INFO ) )
profile_name_list = [ ]
for i in range ( profile_list . contents . dwNumberOfItems ) :
profile_name = ''
for j in range ( len ( profiles [ i ] . strProfileName ) ) :
profile_name += profiles [ i ] . strProfileName [ j ]
profile_name_list . append ( profile_name )
return profile_name_list
|
def yosys_area_delay ( library , abc_cmd = None , block = None ) :
"""Synthesize with Yosys and return estimate of area and delay .
: param library : stdcell library file to target in liberty format
: param abc _ cmd : string of commands for yosys to pass to abc for synthesis
: param block : pyrtl block to analyze
: return : a tuple of numbers : area , delay
The area and delay are returned in units as defined by the stdcell
library . In the standard vsc 130nm library , the area is in a number of
" tracks " , each of which is about 1.74 square um ( see area estimation
for more details ) and the delay is in ps .
http : / / www . vlsitechnology . org / html / vsc _ description . html
May raise ` PyrtlError ` if yosys is not configured correctly , and
` PyrtlInternalError ` if the call to yosys was not able successfully"""
|
if abc_cmd is None :
abc_cmd = 'strash;scorr;ifraig;retime;dch,-f;map;print_stats;'
else : # first , replace whitespace with commas as per yosys requirements
re . sub ( r"\s+" , ',' , abc_cmd )
# then append with " print _ stats " to generate the area and delay info
abc_cmd = '%s;print_stats;' % abc_cmd
def extract_area_delay_from_yosys_output ( yosys_output ) :
report_lines = [ line for line in yosys_output . split ( '\n' ) if 'ABC: netlist' in line ]
area = re . match ( '.*area\s*=\s*([0-9\.]*)' , report_lines [ 0 ] ) . group ( 1 )
delay = re . match ( '.*delay\s*=\s*([0-9\.]*)' , report_lines [ 0 ] ) . group ( 1 )
return float ( area ) , float ( delay )
yosys_arg_template = """-p
read_verilog %s;
synth -top toplevel;
dfflibmap -liberty %s;
abc -liberty %s -script +%s
"""
temp_d , temp_path = tempfile . mkstemp ( suffix = '.v' )
try : # write the verilog to a temp
with os . fdopen ( temp_d , 'w' ) as f :
OutputToVerilog ( f , block = block )
# call yosys on the temp , and grab the output
yosys_arg = yosys_arg_template % ( temp_path , library , library , abc_cmd )
yosys_output = subprocess . check_output ( [ 'yosys' , yosys_arg ] )
area , delay = extract_area_delay_from_yosys_output ( yosys_output )
except ( subprocess . CalledProcessError , ValueError ) as e :
print ( 'Error with call to yosys...' , file = sys . stderr )
print ( '---------------------------------------------' , file = sys . stderr )
print ( e . output , file = sys . stderr )
print ( '---------------------------------------------' , file = sys . stderr )
raise PyrtlError ( 'Yosys callfailed' )
except OSError as e :
print ( 'Error with call to yosys...' , file = sys . stderr )
raise PyrtlError ( 'Call to yosys failed (not installed or on path?)' )
finally :
os . remove ( temp_path )
return area , delay
|
def installed ( name , features = None , recurse = False , restart = False , source = None , exclude = None ) :
'''Install the windows feature . To install a single feature , use the ` ` name ` `
parameter . To install multiple features , use the ` ` features ` ` parameter .
. . note : :
Some features require reboot after un / installation . If so , until the
server is restarted other features can not be installed !
Args :
name ( str ) :
Short name of the feature ( the right column in
win _ servermanager . list _ available ) . This can be a single feature or a
string of features in a comma delimited list ( no spaces )
. . note : :
A list is not allowed in the name parameter of any state . Use
the ` ` features ` ` parameter if you want to pass the features as a
list
features ( Optional [ list ] ) :
A list of features to install . If this is passed it will be used
instead of the ` ` name ` ` parameter .
. . versionadded : : 2018.3.0
recurse ( Optional [ bool ] ) :
Install all sub - features as well . If the feature is installed but
one of its sub - features are not installed set this will install
additional sub - features
source ( Optional [ str ] ) :
Path to the source files if missing from the target system . None
means that the system will use windows update services to find the
required files . Default is None
restart ( Optional [ bool ] ) :
Restarts the computer when installation is complete , if required by
the role / feature installed . Default is False
exclude ( Optional [ str ] ) :
The name of the feature to exclude when installing the named
feature . This can be a single feature , a string of features in a
comma - delimited list ( no spaces ) , or a list of features .
. . warning : :
As there is no exclude option for the ` ` Add - WindowsFeature ` `
or ` ` Install - WindowsFeature ` ` PowerShell commands the features
named in ` ` exclude ` ` will be installed with other sub - features
and will then be removed . * * If the feature named in ` ` exclude ` `
is not a sub - feature of one of the installed items it will still
be removed . * *
Example :
Do not use the role or feature names mentioned in the PKGMGR
documentation . To get a list of available roles and features run the
following command :
. . code - block : : bash
salt < minion _ name > win _ servermanager . list _ available
Use the name in the right column of the results .
. . code - block : : yaml
# Installs the IIS Web Server Role ( Web - Server )
IIS - WebServerRole :
win _ servermanager . installed :
- recurse : True
- name : Web - Server
# Install multiple features , exclude the Web - Service
install _ multiple _ features :
win _ servermanager . installed :
- recurse : True
- features :
- RemoteAccess
- XPS - Viewer
- SNMP - Service
- exclude :
- Web - Server'''
|
ret = { 'name' : name , 'result' : True , 'changes' : { } , 'comment' : '' }
# Check if features is not passed , use name . Split commas
if features is None :
features = name . split ( ',' )
# Make sure features is a list , split commas
if not isinstance ( features , list ) :
features = features . split ( ',' )
# Determine if the feature is installed
old = __salt__ [ 'win_servermanager.list_installed' ] ( )
cur_feat = [ ]
for feature in features :
if feature not in old :
ret [ 'changes' ] [ feature ] = 'Will be installed recurse={0}' . format ( recurse )
elif recurse :
ret [ 'changes' ] [ feature ] = 'Already installed but might install sub-features'
else :
cur_feat . append ( feature )
if cur_feat :
cur_feat . insert ( 0 , 'The following features are already installed:' )
ret [ 'comment' ] = '\n- ' . join ( cur_feat )
if not ret [ 'changes' ] :
return ret
if __opts__ [ 'test' ] :
ret [ 'result' ] = None
return ret
# Install the features
status = __salt__ [ 'win_servermanager.install' ] ( features , recurse = recurse , restart = restart , source = source , exclude = exclude )
ret [ 'result' ] = status [ 'Success' ]
# Show items failed to install
fail_feat = [ ]
new_feat = [ ]
rem_feat = [ ]
for feature in status [ 'Features' ] : # Features that failed to install or be removed
if not status [ 'Features' ] [ feature ] . get ( 'Success' , True ) :
fail_feat . append ( '- {0}' . format ( feature ) )
# Features that installed
elif '(exclude)' not in status [ 'Features' ] [ feature ] [ 'Message' ] :
new_feat . append ( '- {0}' . format ( feature ) )
# Show items that were removed because they were part of ` exclude `
elif '(exclude)' in status [ 'Features' ] [ feature ] [ 'Message' ] :
rem_feat . append ( '- {0}' . format ( feature ) )
if fail_feat :
fail_feat . insert ( 0 , 'Failed to install the following:' )
if new_feat :
new_feat . insert ( 0 , 'Installed the following:' )
if rem_feat :
rem_feat . insert ( 0 , 'Removed the following (exclude):' )
ret [ 'comment' ] = '\n' . join ( fail_feat + new_feat + rem_feat )
# Get the changes
new = __salt__ [ 'win_servermanager.list_installed' ] ( )
ret [ 'changes' ] = salt . utils . data . compare_dicts ( old , new )
return ret
|
def binary2pb ( b , proto_id , proto_fmt_type ) :
"""Transfer binary to pb message
: param b : binary content to be transformed to pb message
: return : pb message"""
|
rsp = pb_map [ proto_id ]
if rsp is None :
return None
if proto_fmt_type == ProtoFMT . Json :
return json2pb ( type ( rsp ) , b . decode ( 'utf-8' ) )
elif proto_fmt_type == ProtoFMT . Protobuf :
rsp = type ( rsp ) ( )
# logger . debug ( ( proto _ id ) )
if IS_PY2 :
rsp . ParseFromString ( str ( b ) )
else :
rsp . ParseFromString ( b )
return rsp
else :
raise Exception ( "binary2str: unknown proto format." )
|
def add_metrics ( last_metrics : Collection [ Rank0Tensor ] , mets : Union [ Rank0Tensor , Collection [ Rank0Tensor ] ] ) :
"Return a dictionary for updating ` last _ metrics ` with ` mets ` ."
|
last_metrics , mets = listify ( last_metrics ) , listify ( mets )
return { 'last_metrics' : last_metrics + mets }
|
def multipart_upload_lister ( bucket , key_marker = '' , upload_id_marker = '' , headers = None ) :
"""A generator function for listing multipart uploads in a bucket ."""
|
more_results = True
k = None
while more_results :
rs = bucket . get_all_multipart_uploads ( key_marker = key_marker , upload_id_marker = upload_id_marker , headers = headers )
for k in rs :
yield k
key_marker = rs . next_key_marker
upload_id_marker = rs . next_upload_id_marker
more_results = rs . is_truncated
|
def analyze ( self , using = None , ** kwargs ) :
"""Perform the analysis process on a text and return the tokens breakdown
of the text .
Any additional keyword arguments will be passed to
` ` Elasticsearch . indices . analyze ` ` unchanged ."""
|
return self . _get_connection ( using ) . indices . analyze ( index = self . _name , ** kwargs )
|
def _import_object ( self , path , look_for_cls_method ) :
"""Imports the module that contains the referenced method .
Args :
path : python path of class / function
look _ for _ cls _ method ( bool ) : If True , treat the last part of path as class method .
Returns :
Tuple . ( class object , class name , method to be called )"""
|
last_nth = 2 if look_for_cls_method else 1
path = path . split ( '.' )
module_path = '.' . join ( path [ : - last_nth ] )
class_name = path [ - last_nth ]
module = importlib . import_module ( module_path )
if look_for_cls_method and path [ - last_nth : ] [ 0 ] == path [ - last_nth ] :
class_method = path [ - last_nth : ] [ 1 ]
else :
class_method = None
return getattr ( module , class_name ) , class_name , class_method
|
def _expand_variable_match ( positional_vars , named_vars , match ) :
"""Expand a matched variable with its value .
Args :
positional _ vars ( list ) : A list of positonal variables . This list will
be modified .
named _ vars ( dict ) : A dictionary of named variables .
match ( re . Match ) : A regular expression match .
Returns :
str : The expanded variable to replace the match .
Raises :
ValueError : If a positional or named variable is required by the
template but not specified or if an unexpected template expression
is encountered ."""
|
positional = match . group ( "positional" )
name = match . group ( "name" )
if name is not None :
try :
return six . text_type ( named_vars [ name ] )
except KeyError :
raise ValueError ( "Named variable '{}' not specified and needed by template " "`{}` at position {}" . format ( name , match . string , match . start ( ) ) )
elif positional is not None :
try :
return six . text_type ( positional_vars . pop ( 0 ) )
except IndexError :
raise ValueError ( "Positional variable not specified and needed by template " "`{}` at position {}" . format ( match . string , match . start ( ) ) )
else :
raise ValueError ( "Unknown template expression {}" . format ( match . group ( 0 ) ) )
|
def _init_idxs_float ( self , usr_hdrs ) :
"""List of indexes whose values will be floats ."""
|
self . idxs_float = [ Idx for Hdr , Idx in self . hdr2idx . items ( ) if Hdr in usr_hdrs and Hdr in self . float_hdrs ]
|
async def prepare_decrypter ( client , cdn_client , cdn_redirect ) :
"""Prepares a new CDN decrypter .
: param client : a TelegramClient connected to the main servers .
: param cdn _ client : a new client connected to the CDN .
: param cdn _ redirect : the redirect file object that caused this call .
: return : ( CdnDecrypter , first chunk file data )"""
|
cdn_aes = AESModeCTR ( key = cdn_redirect . encryption_key , # 12 first bytes of the IV . . 4 bytes of the offset ( 0 , big endian )
iv = cdn_redirect . encryption_iv [ : 12 ] + bytes ( 4 ) )
# We assume that cdn _ redirect . cdn _ file _ hashes are ordered by offset ,
# and that there will be enough of these to retrieve the whole file .
decrypter = CdnDecrypter ( cdn_client , cdn_redirect . file_token , cdn_aes , cdn_redirect . cdn_file_hashes )
cdn_file = await cdn_client ( GetCdnFileRequest ( file_token = cdn_redirect . file_token , offset = cdn_redirect . cdn_file_hashes [ 0 ] . offset , limit = cdn_redirect . cdn_file_hashes [ 0 ] . limit ) )
if isinstance ( cdn_file , CdnFileReuploadNeeded ) : # We need to use the original client here
await client ( ReuploadCdnFileRequest ( file_token = cdn_redirect . file_token , request_token = cdn_file . request_token ) )
# We want to always return a valid upload . CdnFile
cdn_file = decrypter . get_file ( )
else :
cdn_file . bytes = decrypter . cdn_aes . encrypt ( cdn_file . bytes )
cdn_hash = decrypter . cdn_file_hashes . pop ( 0 )
decrypter . check ( cdn_file . bytes , cdn_hash )
return decrypter , cdn_file
|
def delete ( self , key ) :
""": param key : a string with length of [ 0 , 32]"""
|
if not is_string ( key ) :
raise Exception ( "Key must be string" )
if len ( key ) > 32 :
raise Exception ( "Max key length is 32" )
self . root_node = self . _delete_and_delete_storage ( self . root_node , bin_to_nibbles ( to_string ( key ) ) )
self . _update_root_hash ( )
|
def list_to_1d_numpy ( data , dtype = np . float32 , name = 'list' ) :
"""Convert data to 1 - D numpy array ."""
|
if is_numpy_1d_array ( data ) :
if data . dtype == dtype :
return data
else :
return data . astype ( dtype = dtype , copy = False )
elif is_1d_list ( data ) :
return np . array ( data , dtype = dtype , copy = False )
elif isinstance ( data , Series ) :
return data . values . astype ( dtype )
else :
raise TypeError ( "Wrong type({0}) for {1}.\n" "It should be list, numpy 1-D array or pandas Series" . format ( type ( data ) . __name__ , name ) )
|
def _get_node_name ( self , node ) :
"""Get the name of the node - check for node . name and
node . type . declname . Not sure why the second one occurs
exactly - it happens with declaring a new struct field
with parameters"""
|
res = getattr ( node , "name" , None )
if res is None :
return res
if isinstance ( res , AST . TypeDecl ) :
return res . declname
return res
|
def plot_predict ( self , h = 5 , past_values = 20 , intervals = True , ** kwargs ) :
"""Plots forecasts with the estimated model
Parameters
h : int ( default : 5)
How many steps ahead would you like to forecast ?
past _ values : int ( default : 20)
How many past observations to show on the forecast graph ?
intervals : boolean
Would you like to show prediction intervals for the forecast ?
Returns
- Plot of the forecast"""
|
figsize = kwargs . get ( 'figsize' , ( 10 , 7 ) )
if self . latent_variables . estimated is False :
raise Exception ( "No latent variables estimated!" )
else :
import matplotlib . pyplot as plt
import seaborn as sns
# Retrieve data , dates and ( transformed ) latent variables
mu , Y = self . _model ( self . latent_variables . get_z_values ( ) )
date_index = self . shift_dates ( h )
t_z = self . transform_z ( )
# Get mean prediction and simulations ( for errors )
mean_values = self . _mean_prediction ( mu , Y , h , t_z )
if intervals is True :
sim_values = self . _sim_prediction ( mu , Y , h , t_z , 15000 )
else :
sim_values = self . _sim_prediction ( mu , Y , h , t_z , 2 )
error_bars , forecasted_values , plot_values , plot_index = self . _summarize_simulations ( mean_values , sim_values , date_index , h , past_values )
plt . figure ( figsize = figsize )
if intervals is True :
alpha = [ 0.15 * i / float ( 100 ) for i in range ( 50 , 12 , - 2 ) ]
for count , pre in enumerate ( error_bars ) :
plt . fill_between ( date_index [ - h - 1 : ] , forecasted_values - pre , forecasted_values + pre , alpha = alpha [ count ] )
plt . plot ( plot_index , plot_values )
plt . title ( "Forecast for " + self . data_name )
plt . xlabel ( "Time" )
plt . ylabel ( self . data_name )
plt . show ( )
|
def system_monitor_sfp_alert_action ( self , ** kwargs ) :
"""Auto Generated Code"""
|
config = ET . Element ( "config" )
system_monitor = ET . SubElement ( config , "system-monitor" , xmlns = "urn:brocade.com:mgmt:brocade-system-monitor" )
sfp = ET . SubElement ( system_monitor , "sfp" )
alert = ET . SubElement ( sfp , "alert" )
action = ET . SubElement ( alert , "action" )
action . text = kwargs . pop ( 'action' )
callback = kwargs . pop ( 'callback' , self . _callback )
return callback ( config )
|
def parse_config_file ( self , path : str , final : bool = True ) -> None :
"""Parses and loads the config file at the given path .
The config file contains Python code that will be executed ( so
it is * * not safe * * to use untrusted config files ) . Anything in
the global namespace that matches a defined option will be
used to set that option ' s value .
Options may either be the specified type for the option or
strings ( in which case they will be parsed the same way as in
` . parse _ command _ line ` )
Example ( using the options defined in the top - level docs of
this module ) : :
port = 80
mysql _ host = ' mydb . example . com : 3306'
# Both lists and comma - separated strings are allowed for
# multiple = True .
memcache _ hosts = [ ' cache1 . example . com : 11011 ' ,
' cache2 . example . com : 11011 ' ]
memcache _ hosts = ' cache1 . example . com : 11011 , cache2 . example . com : 11011'
If ` ` final ` ` is ` ` False ` ` , parse callbacks will not be run .
This is useful for applications that wish to combine configurations
from multiple sources .
. . note : :
` tornado . options ` is primarily a command - line library .
Config file support is provided for applications that wish
to use it , but applications that prefer config files may
wish to look at other libraries instead .
. . versionchanged : : 4.1
Config files are now always interpreted as utf - 8 instead of
the system default encoding .
. . versionchanged : : 4.4
The special variable ` ` _ _ file _ _ ` ` is available inside config
files , specifying the absolute path to the config file itself .
. . versionchanged : : 5.1
Added the ability to set options via strings in config files ."""
|
config = { "__file__" : os . path . abspath ( path ) }
with open ( path , "rb" ) as f :
exec_in ( native_str ( f . read ( ) ) , config , config )
for name in config :
normalized = self . _normalize_name ( name )
if normalized in self . _options :
option = self . _options [ normalized ]
if option . multiple :
if not isinstance ( config [ name ] , ( list , str ) ) :
raise Error ( "Option %r is required to be a list of %s " "or a comma-separated string" % ( option . name , option . type . __name__ ) )
if type ( config [ name ] ) == str and option . type != str :
option . parse ( config [ name ] )
else :
option . set ( config [ name ] )
if final :
self . run_parse_callbacks ( )
|
def harden ( overrides = None ) :
"""Hardening decorator .
This is the main entry point for running the hardening stack . In order to
run modules of the stack you must add this decorator to charm hook ( s ) and
ensure that your charm config . yaml contains the ' harden ' option set to
one or more of the supported modules . Setting these will cause the
corresponding hardening code to be run when the hook fires .
This decorator can and should be applied to more than one hook or function
such that hardening modules are called multiple times . This is because
subsequent calls will perform auditing checks that will report any changes
to resources hardened by the first run ( and possibly perform compliance
actions as a result of any detected infractions ) .
: param overrides : Optional list of stack modules used to override those
provided with ' harden ' config .
: returns : Returns value returned by decorated function once executed ."""
|
if overrides is None :
overrides = [ ]
def _harden_inner1 ( f ) : # As this has to be py2.7 compat , we can ' t use nonlocal . Use a trick
# to capture the dictionary that can then be updated .
_logged = { 'done' : False }
def _harden_inner2 ( * args , ** kwargs ) : # knock out hardening via a config var ; normally it won ' t get
# disabled .
if _DISABLE_HARDENING_FOR_UNIT_TEST :
return f ( * args , ** kwargs )
if not _logged [ 'done' ] :
log ( "Hardening function '%s'" % ( f . __name__ ) , level = DEBUG )
_logged [ 'done' ] = True
RUN_CATALOG = OrderedDict ( [ ( 'os' , run_os_checks ) , ( 'ssh' , run_ssh_checks ) , ( 'mysql' , run_mysql_checks ) , ( 'apache' , run_apache_checks ) ] )
enabled = overrides [ : ] or ( config ( "harden" ) or "" ) . split ( )
if enabled :
modules_to_run = [ ]
# modules will always be performed in the following order
for module , func in six . iteritems ( RUN_CATALOG ) :
if module in enabled :
enabled . remove ( module )
modules_to_run . append ( func )
if enabled :
log ( "Unknown hardening modules '%s' - ignoring" % ( ', ' . join ( enabled ) ) , level = WARNING )
for hardener in modules_to_run :
log ( "Executing hardening module '%s'" % ( hardener . __name__ ) , level = DEBUG )
hardener ( )
else :
log ( "No hardening applied to '%s'" % ( f . __name__ ) , level = DEBUG )
return f ( * args , ** kwargs )
return _harden_inner2
return _harden_inner1
|
def cmdHISTORY ( self , params ) :
"""Display the command history"""
|
cnt = 0
self . writeline ( 'Command history\n' )
for line in self . history :
cnt = cnt + 1
self . writeline ( "%-5d : %s" % ( cnt , '' . join ( line ) ) )
|
def markov_network_bqm ( MN ) :
"""Construct a binary quadratic model for a markov network .
Parameters
G : NetworkX graph
A Markov Network as returned by : func : ` . markov _ network `
Returns
bqm : : obj : ` dimod . BinaryQuadraticModel `
A binary quadratic model ."""
|
bqm = dimod . BinaryQuadraticModel . empty ( dimod . BINARY )
# the variable potentials
for v , ddict in MN . nodes ( data = True , default = None ) :
potential = ddict . get ( 'potential' , None )
if potential is None :
continue
# for single nodes we don ' t need to worry about order
phi0 = potential [ ( 0 , ) ]
phi1 = potential [ ( 1 , ) ]
bqm . add_variable ( v , phi1 - phi0 )
bqm . add_offset ( phi0 )
# the interaction potentials
for u , v , ddict in MN . edges ( data = True , default = None ) :
potential = ddict . get ( 'potential' , None )
if potential is None :
continue
# in python < = 3.5 the edge order might not be consistent so we use the
# one that was stored
order = ddict [ 'order' ]
u , v = order
phi00 = potential [ ( 0 , 0 ) ]
phi01 = potential [ ( 0 , 1 ) ]
phi10 = potential [ ( 1 , 0 ) ]
phi11 = potential [ ( 1 , 1 ) ]
bqm . add_variable ( u , phi10 - phi00 )
bqm . add_variable ( v , phi01 - phi00 )
bqm . add_interaction ( u , v , phi11 - phi10 - phi01 + phi00 )
bqm . add_offset ( phi00 )
return bqm
|
def _get_description ( self , element ) :
"""Returns the description of element .
: param element : The element .
: type element : hatemile . util . html . htmldomelement . HTMLDOMElement
: return : The description of element .
: rtype : str"""
|
description = None
if element . has_attribute ( 'title' ) :
description = element . get_attribute ( 'title' )
elif element . has_attribute ( 'aria-label' ) :
description = element . get_attribute ( 'aria-label' )
elif element . has_attribute ( 'alt' ) :
description = element . get_attribute ( 'alt' )
elif element . has_attribute ( 'label' ) :
description = element . get_attribute ( 'label' )
elif ( ( element . has_attribute ( 'aria-labelledby' ) ) or ( element . has_attribute ( 'aria-describedby' ) ) ) :
if element . has_attribute ( 'aria-labelledby' ) :
description_ids = re . split ( '[ \n\r\t]+' , element . get_attribute ( 'aria-labelledby' ) . strip ( ) )
else :
description_ids = re . split ( '[ \n\r\t]+' , element . get_attribute ( 'aria-describedby' ) . strip ( ) )
for description_id in description_ids :
element_description = self . parser . find ( '#' + description_id ) . first_result ( )
if element_description is not None :
description = element_description . get_text_content ( )
break
elif ( ( element . get_tag_name ( ) == 'INPUT' ) and ( element . has_attribute ( 'type' ) ) ) :
type_attribute = element . get_attribute ( 'type' ) . lower ( )
if ( ( ( type_attribute == 'button' ) or ( type_attribute == 'submit' ) or ( type_attribute == 'reset' ) ) and ( element . has_attribute ( 'value' ) ) ) :
description = element . get_attribute ( 'value' )
if not bool ( description ) :
description = element . get_text_content ( )
return re . sub ( '[ \n\r\t]+' , ' ' , description . strip ( ) )
|
def list_role_binding_for_all_namespaces ( self , ** kwargs ) :
"""list or watch objects of kind RoleBinding
This method makes a synchronous HTTP request by default . To make an
asynchronous HTTP request , please pass async _ req = True
> > > thread = api . list _ role _ binding _ for _ all _ namespaces ( async _ req = True )
> > > result = thread . get ( )
: param async _ req bool
: param str _ continue : The continue option should be set when retrieving more results from the server . Since this value is server defined , clients may only use the continue value from a previous query result with identical query parameters ( except for the value of continue ) and the server may reject a continue value it does not recognize . If the specified continue value is no longer valid whether due to expiration ( generally five to fifteen minutes ) or a configuration change on the server , the server will respond with a 410 ResourceExpired error together with a continue token . If the client needs a consistent list , it must restart their list without the continue field . Otherwise , the client may send another list request with the token received with the 410 error , the server will respond with a list starting from the next key , but from the latest snapshot , which is inconsistent from the previous list results - objects that are created , modified , or deleted after the first list request will be included in the response , as long as their keys are after the \" next key \" . This field is not supported when watch is true . Clients may start a watch from the last resourceVersion value returned by the server and not miss any modifications .
: param str field _ selector : A selector to restrict the list of returned objects by their fields . Defaults to everything .
: param str label _ selector : A selector to restrict the list of returned objects by their labels . Defaults to everything .
: param int limit : limit is a maximum number of responses to return for a list call . If more items exist , the server will set the ` continue ` field on the list metadata to a value that can be used with the same initial query to retrieve the next set of results . Setting a limit may return fewer than the requested amount of items ( up to zero items ) in the event all requested objects are filtered out and clients should only use the presence of the continue field to determine whether more results are available . Servers may choose not to support the limit argument and will return all of the available results . If limit is specified and the continue field is empty , clients may assume that no more results are available . This field is not supported if watch is true . The server guarantees that the objects returned when using continue will be identical to issuing a single list call without a limit - that is , no objects created , modified , or deleted after the first request is issued will be included in any subsequent continued requests . This is sometimes referred to as a consistent snapshot , and ensures that a client that is using limit to receive smaller chunks of a very large result can ensure they see all possible objects . If objects are updated during a chunked list the version of the object that was present at the time the first list result was calculated is returned .
: param str pretty : If ' true ' , then the output is pretty printed .
: param str resource _ version : When specified with a watch call , shows changes that occur after that particular version of a resource . Defaults to changes from the beginning of history . When specified for list : - if unset , then the result is returned from remote storage based on quorum - read flag ; - if it ' s 0 , then we simply return what we currently have in cache , no guarantee ; - if set to non zero , then the result is at least as fresh as given rv .
: param int timeout _ seconds : Timeout for the list / watch call . This limits the duration of the call , regardless of any activity or inactivity .
: param bool watch : Watch for changes to the described resources and return them as a stream of add , update , and remove notifications . Specify resourceVersion .
: return : V1RoleBindingList
If the method is called asynchronously ,
returns the request thread ."""
|
kwargs [ '_return_http_data_only' ] = True
if kwargs . get ( 'async_req' ) :
return self . list_role_binding_for_all_namespaces_with_http_info ( ** kwargs )
else :
( data ) = self . list_role_binding_for_all_namespaces_with_http_info ( ** kwargs )
return data
|
def make_duplicate_request ( request ) :
"""Since werkzeug request objects are immutable , this is needed to create an
identical reuet object with immutable values so it can be retried after a
POST failure ."""
|
class FakeRequest ( object ) :
method = 'GET'
path = request . path
headers = request . headers
GET = request . GET
POST = request . POST
user = getattr ( request , 'user' , None )
cookies = request . cookies
is_xhr = request . is_xhr
return FakeRequest ( )
|
def topil ( self ) :
"""Returns a PIL . Image version of this Pix"""
|
from PIL import Image
# Leptonica manages data in words , so it implicitly does an endian
# swap . Tell Pillow about this when it reads the data .
pix = self
if sys . byteorder == 'little' :
if self . mode == 'RGB' :
raw_mode = 'XBGR'
elif self . mode == 'RGBA' :
raw_mode = 'ABGR'
elif self . mode == '1' :
raw_mode = '1;I'
pix = Pix ( lept . pixEndianByteSwapNew ( pix . _cdata ) )
else :
raw_mode = self . mode
pix = Pix ( lept . pixEndianByteSwapNew ( pix . _cdata ) )
else :
raw_mode = self . mode
# no endian swap needed
size = ( pix . _cdata . w , pix . _cdata . h )
bytecount = pix . _cdata . wpl * 4 * pix . _cdata . h
buf = ffi . buffer ( pix . _cdata . data , bytecount )
stride = pix . _cdata . wpl * 4
im = Image . frombytes ( self . mode , size , buf , 'raw' , raw_mode , stride )
return im
|
def _report_command ( self , cmd , procs = None ) :
"""Writes a command to both stdout and to the commands log file
( self . pipeline _ commands _ file ) .
: param str cmd : command to report
: param str | list [ str ] procs : process numbers for processes in the command"""
|
if isinstance ( procs , list ) :
procs = "," . join ( map ( str , procs ) )
if procs :
line = "\n> `{cmd}` ({procs})\n" . format ( cmd = str ( cmd ) , procs = procs )
else :
line = "\n> `{cmd}`\n" . format ( cmd = str ( cmd ) )
print ( line )
with open ( self . pipeline_commands_file , "a" ) as myfile :
myfile . write ( line + "\n\n" )
|
def step ( self , y , u , t , h ) :
"""This is called by solve , but can be called by the user who wants to
run through an integration with a control force .
y - state at t
u - control inputs at t
t - time
h - step size"""
|
k1 = h * self . func ( t , y , u )
k2 = h * self . func ( t + .5 * h , y + .5 * h * k1 , u )
k3 = h * self . func ( t + .5 * h , y + .5 * h * k2 , u )
k4 = h * self . func ( t + h , y + h * k3 , u )
return y + ( k1 + 2 * k2 + 2 * k3 + k4 ) / 6.0
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.