signature
stringlengths 29
44.1k
| implementation
stringlengths 0
85.2k
|
|---|---|
def rse ( label , pred ) :
"""computes the root relative squared error ( condensed using standard deviation formula )"""
|
numerator = np . sqrt ( np . mean ( np . square ( label - pred ) , axis = None ) )
denominator = np . std ( label , axis = None )
return numerator / denominator
|
def _scan_pages_for_same ( self , progress_cb = _stub_progress ) :
"""! @ brief Read the full page data to determine if it is unchanged .
When this function exits , the same flag will be set to either True or False for
every page . In addition , sectors that need at least one page programmed will have
the same flag set to False for all pages within that sector ."""
|
progress = 0
# Read page data if unknown - after this page . same will be True or False
unknown_pages = [ page for page in self . page_list if page . same is None ]
if unknown_pages :
self . _enable_read_access ( )
for page in unknown_pages :
if page . cached_estimate_data is not None :
data = page . cached_estimate_data
offset = len ( data )
else :
data = [ ]
offset = 0
assert len ( page . data ) == page . size
data . extend ( self . flash . target . read_memory_block8 ( page . addr + offset , page . size - offset ) )
page . same = same ( page . data , data )
page . cached_estimate_data = None
# This data isn ' t needed anymore .
progress += page . get_verify_weight ( )
# Update progress
if self . sector_erase_weight > 0 :
progress_cb ( float ( progress ) / float ( self . sector_erase_weight ) )
# If we have to program any pages of a sector , then mark all pages of that sector
# as needing to be programmed , since the sector will be erased .
for sector in self . sector_list :
if sector . are_any_pages_not_same ( ) :
sector . mark_all_pages_not_same ( )
return progress
|
def run ( self ) :
"""Discovers the mongo cluster and creates a thread for each primary ."""
|
# Reset the global minimum MongoDB version
update_mininum_mongodb_version ( None )
self . main_conn = self . create_authed_client ( )
LOG . always ( "Source MongoDB version: %s" , self . main_conn . admin . command ( "buildInfo" ) [ "version" ] , )
for dm in self . doc_managers :
name = dm . __class__ . __module__
module = sys . modules [ name ]
version = "unknown"
if hasattr ( module , "__version__" ) :
version = module . __version__
elif hasattr ( module , "version" ) :
version = module . version
LOG . always ( "Target DocManager: %s version: %s" , name , version )
self . read_oplog_progress ( )
conn_type = None
try :
self . main_conn . admin . command ( "isdbgrid" )
except pymongo . errors . OperationFailure :
conn_type = "REPLSET"
if conn_type == "REPLSET" : # Make sure we are connected to a replica set
is_master = self . main_conn . admin . command ( "isMaster" )
if "setName" not in is_master :
LOG . error ( 'No replica set at "%s"! A replica set is required ' "to run mongo-connector. Shutting down..." % self . address )
return
# Establish a connection to the replica set as a whole
self . main_conn . close ( )
self . main_conn = self . create_authed_client ( replicaSet = is_master [ "setName" ] )
self . update_version_from_client ( self . main_conn )
# non sharded configuration
oplog = OplogThread ( self . main_conn , self . doc_managers , self . oplog_progress , self . namespace_config , ** self . kwargs )
self . shard_set [ 0 ] = oplog
LOG . info ( "MongoConnector: Starting connection thread %s" % self . main_conn )
oplog . start ( )
while self . can_run :
shard_thread = self . shard_set [ 0 ]
if not ( shard_thread . running and shard_thread . is_alive ( ) ) :
LOG . error ( "MongoConnector: OplogThread" " %s unexpectedly stopped! Shutting down" % ( str ( self . shard_set [ 0 ] ) ) )
self . oplog_thread_join ( )
for dm in self . doc_managers :
dm . stop ( )
return
self . write_oplog_progress ( )
time . sleep ( 1 )
else : # sharded cluster
while self . can_run : # The backup role does not provide the listShards privilege ,
# so use the config . shards collection instead .
for shard_doc in retry_until_ok ( lambda : list ( self . main_conn . config . shards . find ( ) ) ) :
shard_id = shard_doc [ "_id" ]
if shard_id in self . shard_set :
shard_thread = self . shard_set [ shard_id ]
if not ( shard_thread . running and shard_thread . is_alive ( ) ) :
LOG . error ( "MongoConnector: OplogThread " "%s unexpectedly stopped! Shutting " "down" % ( str ( self . shard_set [ shard_id ] ) ) )
self . oplog_thread_join ( )
for dm in self . doc_managers :
dm . stop ( )
return
self . write_oplog_progress ( )
time . sleep ( 1 )
continue
try :
repl_set , hosts = shard_doc [ "host" ] . split ( "/" )
except ValueError :
cause = "The system only uses replica sets!"
LOG . exception ( "MongoConnector: %s" , cause )
self . oplog_thread_join ( )
for dm in self . doc_managers :
dm . stop ( )
return
shard_conn = self . create_authed_client ( hosts , replicaSet = repl_set )
self . update_version_from_client ( shard_conn )
oplog = OplogThread ( shard_conn , self . doc_managers , self . oplog_progress , self . namespace_config , mongos_client = self . main_conn , ** self . kwargs )
self . shard_set [ shard_id ] = oplog
msg = "Starting connection thread"
LOG . info ( "MongoConnector: %s %s" % ( msg , shard_conn ) )
oplog . start ( )
if self . signal is not None :
LOG . info ( "recieved signal %s: shutting down..." , self . signal )
self . oplog_thread_join ( )
self . write_oplog_progress ( )
|
def provides ( self , call_name ) :
"""Provide a call .
The call acts as a stub - - no error is raised if it is not called . : :
> > > session = Fake ( ' session ' ) . provides ( ' open ' ) . provides ( ' close ' )
> > > import fudge
> > > fudge . clear _ expectations ( ) # from any previously declared fakes
> > > fudge . clear _ calls ( )
> > > session . open ( )
> > > fudge . verify ( ) # close ( ) not called but no error
Declaring ` ` provides ( ) ` ` multiple times is the same as
declaring : func : ` fudge . Fake . next _ call `"""
|
if call_name in self . _declared_calls :
return self . next_call ( for_method = call_name )
self . _last_declared_call_name = call_name
c = Call ( self , call_name )
self . _declare_call ( call_name , c )
return self
|
def discover ( ) :
"""Automatically discover the paths to various data folders in this project
and compose a Project instance .
Returns :
A constructed Project object .
Raises :
ValueError : if the paths could not be figured out automatically .
In this case , you have to create a Project manually using the initializer ."""
|
# Try . . / data : we ' re most likely running a Jupyter notebook from the ' notebooks ' directory
candidate_path = os . path . abspath ( os . path . join ( os . curdir , os . pardir , 'data' ) )
if os . path . exists ( candidate_path ) :
return Project ( os . path . abspath ( os . path . join ( candidate_path , os . pardir ) ) )
# Try . / data
candidate_path = os . path . abspath ( os . path . join ( os . curdir , 'data' ) )
if os . path . exists ( candidate_path ) :
return Project ( os . path . abspath ( os . curdir ) )
# Try . . / . . / data
candidate_path = os . path . abspath ( os . path . join ( os . curdir , os . pardir , 'data' ) )
if os . path . exists ( candidate_path ) :
return Project ( os . path . abspath ( os . path . join ( candidate_path , os . pardir , os . pardir ) ) )
# Out of ideas at this point .
raise ValueError ( 'Cannot discover the structure of the project. Make sure that the data directory exists' )
|
def send ( self , op , cmd , integration_id , * args ) :
"""Formats and sends the requested command to the Lutron controller ."""
|
out_cmd = "," . join ( ( cmd , str ( integration_id ) ) + tuple ( ( str ( x ) for x in args ) ) )
self . _conn . send ( op + out_cmd )
|
def content ( self , request , id ) :
"""Returns the content of the gist
Arguments :
request : an initial request object
id : the gist identifier
Returns :
A dict containing the contents of each file in the gist"""
|
gist = self . send ( request , id ) . json ( )
def convert ( data ) :
return base64 . b64decode ( data ) . decode ( 'utf-8' )
content = { }
for name , data in gist [ 'files' ] . items ( ) :
content [ name ] = convert ( data [ 'content' ] )
return content
|
def send ( self , stream = False ) :
"""Send the HTTP request via Python Requests modules .
This method will send the request to the remote endpoint . It will try to handle
temporary communications issues by retrying the request automatically .
Args :
stream ( bool ) : Boolean to enable stream download .
Returns :
Requests . Response : The Request response"""
|
# api request ( gracefully handle temporary communications issues with the API )
try :
response = self . session . request ( self . _http_method , self . _url , auth = self . _basic_auth , data = self . _body , files = self . _files , headers = self . _headers , params = self . _payload , stream = stream , timeout = self . _timeout , )
except Exception as e :
err = 'Failed making HTTP request ({}).' . format ( e )
raise RuntimeError ( err )
# self . tcex . log . info ( u ' URL ( { } ) : { } ' . format ( self . _ http _ method , response . url ) )
self . tcex . log . info ( u'Status Code: {}' . format ( response . status_code ) )
return response
|
def many_init ( cls , * args , ** kwargs ) :
"""This method handles creating a parent ` ManyRelatedField ` instance
when the ` many = True ` keyword argument is passed .
Typically you won ' t need to override this method .
Note that we ' re over - cautious in passing most arguments to both parent
and child classes in order to try to cover the general case . If you ' re
overriding this method you ' ll probably want something much simpler , eg :
. . code : : python
@ classmethod
def many _ init ( cls , * args , * * kwargs ) :
kwargs [ ' child ' ] = cls ( )
return CustomManyRelatedField ( * args , * * kwargs )"""
|
list_kwargs = { 'child_relation' : cls ( * args , ** kwargs ) }
for key in kwargs :
if key in MANY_RELATION_KWARGS :
list_kwargs [ key ] = kwargs [ key ]
return ManyRelatedFieldWithNoData ( ** list_kwargs )
|
def _raw_mul ( self , plaintext ) :
"""Returns the integer E ( a * plaintext ) , where E ( a ) = ciphertext
Args :
plaintext ( int ) : number by which to multiply the
` EncryptedNumber ` . * plaintext * is typically an encoding .
0 < = * plaintext * < : attr : ` ~ PaillierPublicKey . n `
Returns :
int : Encryption of the product of ` self ` and the scalar
encoded in * plaintext * .
Raises :
TypeError : if * plaintext * is not an int .
ValueError : if * plaintext * is not between 0 and
: attr : ` PaillierPublicKey . n ` ."""
|
if not isinstance ( plaintext , int ) :
raise TypeError ( 'Expected ciphertext to be int, not %s' % type ( plaintext ) )
if plaintext < 0 or plaintext >= self . public_key . n :
raise ValueError ( 'Scalar out of bounds: %i' % plaintext )
if self . public_key . n - self . public_key . max_int <= plaintext : # Very large plaintext , play a sneaky trick using inverses
neg_c = invert ( self . ciphertext ( False ) , self . public_key . nsquare )
neg_scalar = self . public_key . n - plaintext
return powmod ( neg_c , neg_scalar , self . public_key . nsquare )
else :
return powmod ( self . ciphertext ( False ) , plaintext , self . public_key . nsquare )
|
def _trace_full ( frame , event , arg ) :
"""Trace every executed line ."""
|
if event == "line" :
_trace_line ( frame , event , arg )
else :
_trace ( frame , event , arg )
return _trace_full
|
def add_ipv4 ( self , id_network_ipv4 , id_equipamento , descricao ) :
"""Allocate an IP on a network to an equipment .
Insert new IP for network and associate to the equipment
: param id _ network _ ipv4 : ID for NetworkIPv4.
: param id _ equipamento : ID for Equipment .
: param descricao : Description for IP .
: return : Following dictionary :
{ ' ip ' : { ' id ' : < id _ ip > ,
' id _ network _ ipv4 ' : < id _ network _ ipv4 > ,
' oct1 ’ : < oct1 > ,
' oct2 ' : < oct2 > ,
' oct3 ' : < oct3 > ,
' oct4 ' : < oct4 > ,
' descricao ' : < descricao > } }
: raise InvalidParameterError : Invalid ID for NetworkIPv4 or Equipment .
: raise InvalidParameterError : The value of description is invalid .
: raise EquipamentoNaoExisteError : Equipment not found .
: raise RedeIPv4NaoExisteError : NetworkIPv4 not found .
: raise IPNaoDisponivelError : There is no network address is available to create the VLAN .
: raise ConfigEnvironmentInvalidError : Invalid Environment Configuration or not registered
: raise DataBaseError : Networkapi failed to access the database .
: raise XMLError : Networkapi failed to generate the XML response ."""
|
ip_map = dict ( )
ip_map [ 'id_network_ipv4' ] = id_network_ipv4
ip_map [ 'description' ] = descricao
ip_map [ 'id_equipment' ] = id_equipamento
code , xml = self . submit ( { 'ip' : ip_map } , 'POST' , 'ipv4/' )
return self . response ( code , xml )
|
def input ( self , opt ) :
"""Description :
Set the input
Call with no arguments to get current setting
Arguments :
opt : string
Name provided from input list or key from yaml ( " HDMI 1 " or " hdmi _ 1 " )"""
|
for key in self . command [ 'input' ] :
if ( key == opt ) or ( self . command [ 'input' ] [ key ] [ 'name' ] == opt ) :
return self . _send_command ( [ 'input' , key , 'command' ] )
return False
|
def model_length ( gene , domain ) :
"""get length of model"""
|
if gene == '16S' :
domain2max = { 'E_coli_K12' : int ( 1538 ) , 'bacteria' : int ( 1689 ) , 'archaea' : int ( 1563 ) , 'eukarya' : int ( 2652 ) }
return domain2max [ domain ]
elif gene == '23S' :
domain2max = { 'E_coli_K12' : int ( 2903 ) , 'bacteria' : int ( 3146 ) , 'archaea' : int ( 3774 ) , 'eukarya' : int ( 9079 ) }
return domain2max [ domain ]
else :
print ( sys . stderr , '# length unknown for gene: %s, domain: %s' % ( gene , domain ) )
exit ( )
|
def connect ( self , timeout = 2 ) :
"""Connect to the KNX / IP tunnelling interface .
If the remote address is " 0.0.0.0 " , it will use the Gateway scanner
to automatically detect a KNX gateway and it will connect to it if one
has been found .
Returns true if a connection could be established , false otherwise"""
|
if self . connected :
logging . info ( "KNXIPTunnel connect request ignored, " "already connected" )
return True
if self . remote_ip == "0.0.0.0" :
scanner = GatewayScanner ( )
try :
ipaddr , port = scanner . start_search ( )
logging . info ( "Found KNX gateway %s/%s" , ipaddr , port )
self . remote_ip = ipaddr
self . remote_port = port
except TypeError :
logging . error ( "No KNX/IP gateway given and no gateway " "found by scanner, aborting %s" )
# Clean up cache
self . value_cache . clear ( )
# Find my own IP
sock = socket . socket ( socket . AF_INET , socket . SOCK_DGRAM )
sock . connect ( ( self . remote_ip , self . remote_port ) )
local_ip = sock . getsockname ( ) [ 0 ]
if self . data_server :
logging . info ( "Data server already running, not starting again" )
else :
self . data_server = DataServer ( ( local_ip , 0 ) , DataRequestHandler , self )
dummy_ip , self . data_port = self . data_server . server_address
data_server_thread = threading . Thread ( target = self . data_server . serve_forever )
data_server_thread . daemon = True
data_server_thread . start ( )
logging . debug ( "Started data server on UDP port %s" , self . data_port )
self . control_socket = socket . socket ( socket . AF_INET , socket . SOCK_DGRAM )
self . control_socket . bind ( ( local_ip , 0 ) )
self . control_socket . settimeout ( timeout )
# Connect packet
frame = KNXIPFrame ( KNXIPFrame . CONNECT_REQUEST )
# Control endpoint
body = [ ]
body . extend ( [ 0x08 , 0x01 ] )
# length 8 bytes , UPD
dummy_ip , port = self . control_socket . getsockname ( )
body . extend ( ip_to_array ( local_ip ) )
body . extend ( int_to_array ( port , 2 ) )
# Data endpoint
body . extend ( [ 0x08 , 0x01 ] )
# length 8 bytes , UPD
body . extend ( ip_to_array ( local_ip ) )
body . extend ( int_to_array ( self . data_port , 2 ) )
body . extend ( [ 0x04 , 0x04 , 0x02 , 0x00 ] )
frame . body = body
try :
self . control_socket . sendto ( bytes ( frame . to_frame ( ) ) , ( self . remote_ip , self . remote_port ) )
received = self . control_socket . recv ( 1024 )
except socket . error :
self . control_socket . close ( )
self . control_socket = None
logging . error ( "KNX/IP gateway did not respond to connect request" )
return False
# Check if the response is an TUNNELING ACK
r_sid = received [ 2 ] * 256 + received [ 3 ]
if r_sid == KNXIPFrame . CONNECT_RESPONSE :
self . channel = received [ 6 ]
status = received [ 7 ]
if status == 0 :
hpai = received [ 8 : 10 ]
logging . debug ( "Connected KNX IP tunnel " + "(Channel: {}, HPAI: {} {})" . format ( self . channel , hpai [ 0 ] , hpai [ 1 ] ) )
else :
logging . error ( "KNX IP tunnel connect error:" + "(Channel: {}, Status: {})" . format ( self . channel , status ) )
return False
else :
logging . error ( "Could not initiate tunnel connection, STI = {0:%s}" , r_sid )
return False
self . connected = True
return True
|
def save ( self , render_name = False , * args , ** kwargs ) : # pylint : disable = keyword - arg - before - vararg
"""Save the data model ."""
|
if self . name != self . _original_name :
self . named_by_user = True
create = self . pk is None
if create :
fill_with_defaults ( self . input , self . process . input_schema )
# pylint : disable = no - member
if not self . name :
self . _render_name ( )
else :
self . named_by_user = True
self . checksum = get_data_checksum ( self . input , self . process . slug , self . process . version )
# pylint : disable = no - member
elif render_name :
self . _render_name ( )
self . save_storage ( self . output , self . process . output_schema )
# pylint : disable = no - member
if self . status != Data . STATUS_ERROR :
hydrate_size ( self )
# If only specified fields are updated ( e . g . in executor ) , size needs to be added
if 'update_fields' in kwargs :
kwargs [ 'update_fields' ] . append ( 'size' )
# Input Data objects are validated only upon creation as they can be deleted later .
skip_missing_data = not create
validate_schema ( self . input , self . process . input_schema , skip_missing_data = skip_missing_data # pylint : disable = no - member
)
render_descriptor ( self )
if self . descriptor_schema :
try :
validate_schema ( self . descriptor , self . descriptor_schema . schema )
# pylint : disable = no - member
self . descriptor_dirty = False
except DirtyError :
self . descriptor_dirty = True
elif self . descriptor and self . descriptor != { } :
raise ValueError ( "`descriptor_schema` must be defined if `descriptor` is given" )
if self . status != Data . STATUS_ERROR :
output_schema = self . process . output_schema
# pylint : disable = no - member
if self . status == Data . STATUS_DONE :
validate_schema ( self . output , output_schema , data_location = self . location , skip_missing_data = True )
else :
validate_schema ( self . output , output_schema , data_location = self . location , test_required = False )
with transaction . atomic ( ) :
self . _perform_save ( * args , ** kwargs )
# We can only save dependencies after the data object has been saved . This
# is why a transaction block is needed and the save method must be called first .
if create :
self . save_dependencies ( self . input , self . process . input_schema )
# pylint : disable = no - member
self . create_entity ( )
|
def prepare_series ( self , memory : set ) -> None :
"""Call | IOSequence . activate _ ram | of all sequences selected by
the given output element of the actual XML file .
Use the memory argument to pass in already prepared sequences ;
newly prepared sequences will be added .
> > > from hydpy . core . examples import prepare _ full _ example _ 1
> > > prepare _ full _ example _ 1 ( )
> > > from hydpy import HydPy , TestIO , XMLInterface
> > > hp = HydPy ( ' LahnH ' )
> > > with TestIO ( ) :
. . . hp . prepare _ network ( )
. . . hp . init _ models ( )
. . . interface = XMLInterface ( ' single _ run . xml ' )
> > > interface . update _ timegrids ( )
> > > series _ io = interface . series _ io
> > > memory = set ( )
> > > pc = hp . elements . land _ dill . model . sequences . fluxes . pc
> > > pc . ramflag
False
> > > series _ io . writers [ 0 ] . prepare _ series ( memory )
> > > pc in memory
True
> > > pc . ramflag
True
> > > pc . deactivate _ ram ( )
> > > pc . ramflag
False
> > > series _ io . writers [ 0 ] . prepare _ series ( memory )
> > > pc . ramflag
False"""
|
for sequence in self . _iterate_sequences ( ) :
if sequence not in memory :
memory . add ( sequence )
sequence . activate_ram ( )
|
def unregister_signals ( self ) :
"""Unregister signals ."""
|
# Unregister Record signals
if hasattr ( self , 'update_function' ) :
records_signals . before_record_insert . disconnect ( self . update_function )
records_signals . before_record_update . disconnect ( self . update_function )
self . unregister_signals_oaiset ( )
|
def _render_frame ( self ) :
"""Renders the frame on the line after clearing it ."""
|
frame = self . frame ( )
output = '\r{0}' . format ( frame )
self . clear ( )
try :
self . _stream . write ( output )
except UnicodeEncodeError :
self . _stream . write ( encode_utf_8_text ( output ) )
|
def add_tile ( self , address , tile ) :
"""Add a tile to handle all RPCs at a given address .
Args :
address ( int ) : The address of the tile
tile ( RPCDispatcher ) : A tile object that inherits from RPCDispatcher"""
|
if address in self . _tiles :
raise ArgumentError ( "Tried to add two tiles at the same address" , address = address )
self . _tiles [ address ] = tile
|
def drop_table ( self , dbname , name , deleteData ) :
"""Parameters :
- dbname
- name
- deleteData"""
|
self . send_drop_table ( dbname , name , deleteData )
self . recv_drop_table ( )
|
def encode_tf ( self , s ) :
"""Encode a tf . Scalar string to a tf . Tensor .
This will be necessary for on - the - fly tokenization .
Args :
s : a tf . Scalar with dtype tf . string
Returns :
a 1d tf . Tensor with dtype tf . int32"""
|
ids = subword_text_encoder_ops . subword_text_encoder_encode ( s , self . _filepath )
# the c + + op apppends 1 = EOS - drop it .
return ids [ : - 1 ]
|
def text_alignment ( x , y ) :
"""Align text labels based on the x - and y - axis coordinate values .
This function is used for computing the appropriate alignment of the text
label .
For example , if the text is on the " right " side of the plot , we want it to
be left - aligned . If the text is on the " top " side of the plot , we want it
to be bottom - aligned .
: param x , y : ( ` int ` or ` float ` ) x - and y - axis coordinate respectively .
: returns : A 2 - tuple of strings , the horizontal and vertical alignments
respectively ."""
|
if x == 0 :
ha = "center"
elif x > 0 :
ha = "left"
else :
ha = "right"
if y == 0 :
va = "center"
elif y > 0 :
va = "bottom"
else :
va = "top"
return ha , va
|
def exception_to_signal ( sig : Union [ SignalException , signal . Signals ] ) :
"""Rollback any changes done by : py : func : ` signal _ to _ exception ` ."""
|
if isinstance ( sig , SignalException ) :
signum = sig . signum
else :
signum = sig . value
signal . signal ( signum , signal . SIG_DFL )
|
def map_field ( fn , m ) :
"""Maps a field name , given a mapping file .
Returns input if fieldname is unmapped ."""
|
if m is None :
return fn
if fn in m :
return m [ fn ]
else :
return fn
|
def uni_to ( self , target , * args , ** kwargs ) :
"""Unified to ."""
|
logging . debug ( _ ( 'target: %s, args: %s, kwargs: %s' ) , target , args , kwargs )
return getattr ( self , self . func_dict [ target ] ) ( * args , ** kwargs )
|
def optimize ( model , cand ) :
"""optimize : function for solving the model , updating candidate solutions ' list
Will add to cand all the intermediate solutions found , as well as the optimum
Parameters :
- model : Gurobi model object
- cand : list of pairs of objective functions ( for appending more solutions )
Returns the solver ' s exit status"""
|
model . hideOutput ( )
model . optimize ( )
x , y , C , T = model . data
status = model . getStatus ( )
if status == "optimal" : # collect suboptimal solutions
solutions = model . getSols ( )
for sol in solutions :
cand . append ( ( model . getSolVal ( T , sol ) , model . getSolVal ( C ) ) )
return status
|
def lambda_B_calc ( classes , table , TOP , POP ) :
"""Calculate Goodman and Kruskal ' s lambda B .
: param classes : confusion matrix classes
: type classes : list
: param table : confusion matrix table
: type table : dict
: param TOP : test outcome positive
: type TOP : dict
: param POP : population
: type POP : int
: return : Goodman and Kruskal ' s lambda B as float"""
|
try :
result = 0
length = POP
maxresponse = max ( list ( TOP . values ( ) ) )
for i in classes :
result += max ( list ( table [ i ] . values ( ) ) )
result = ( result - maxresponse ) / ( length - maxresponse )
return result
except Exception :
return "None"
|
def draw_pl_vote ( m , gamma ) :
"""Description :
Generate a Plackett - Luce vote given the model parameters .
Parameters :
m : number of alternatives
gamma : parameters of the Plackett - Luce model"""
|
localgamma = np . copy ( gamma )
# work on a copy of gamma
localalts = np . arange ( m )
# enumeration of the candidates
vote = [ ]
for j in range ( m ) : # generate position in vote for every alternative
# transform local gamma into intervals up to 1.0
localgammaintervals = np . copy ( localgamma )
prev = 0.0
for k in range ( len ( localgammaintervals ) ) :
localgammaintervals [ k ] += prev
prev = localgammaintervals [ k ]
selection = np . random . random ( )
# pick random number
# selection will fall into a gamma interval
for l in range ( len ( localgammaintervals ) ) : # determine position
if selection <= localgammaintervals [ l ] :
vote . append ( localalts [ l ] )
localgamma = np . delete ( localgamma , l )
# remove that gamma
localalts = np . delete ( localalts , l )
# remove the alternative
localgamma /= np . sum ( localgamma )
# renormalize
break
return vote
|
def set ( self , option , value = None ) :
"""Set an option for chaining .
Args :
option ( str ) : option name
value ( str ) : value , default None"""
|
option = self . _container . optionxform ( option )
if option in self . options ( ) :
self . __getitem__ ( option ) . value = value
else :
self . __setitem__ ( option , value )
return self
|
def update_remote_ids ( self , remote_project ) :
"""Compare against remote _ project saving off the matching uuids of of matching content .
: param remote _ project : RemoteProject project to compare against"""
|
if remote_project :
self . remote_id = remote_project . id
_update_remote_children ( remote_project , self . children )
|
def pretty_str ( self , indent = 0 ) :
"""Return a human - readable string representation of this object .
Kwargs :
indent ( int ) : The amount of spaces to use as indentation ."""
|
indent = ' ' * indent
pretty = '{}({})' if self . parenthesis else '{}{}'
args = ', ' . join ( map ( pretty_str , self . arguments ) )
if self . method_of :
call = '{}.{}({})' . format ( self . method_of . pretty_str ( ) , self . name , args )
elif self . is_constructor :
call = 'new {}({})' . format ( self . name , args )
else :
call = '{}({})' . format ( self . name , args )
return pretty . format ( indent , call )
|
def get_next_unanswered_question ( self , assessment_section_id , item_id ) :
"""Gets the next unanswered question in this assesment section .
arg : assessment _ section _ id ( osid . id . Id ) : ` ` Id ` ` of the
` ` AssessmentSection ` `
arg : item _ id ( osid . id . Id ) : ` ` Id ` ` of the ` ` Item ` `
return : ( osid . assessment . Question ) - the next unanswered
question
raise : IllegalState - ` ` has _ next _ unanswered _ question ( ) ` ` is
` ` false ` `
raise : NotFound - ` ` assessment _ section _ id or item _ id is not
found , or item _ id not part of assessment _ section _ id ` `
raise : NullArgument - ` ` assessment _ section _ id or item _ id is
null ` `
raise : OperationFailed - unable to complete request
raise : PermissionDenied - authorization failure occurred
* compliance : mandatory - - This method must be implemented . *"""
|
# Or this could call through to get _ next _ question in the section
questions = self . get_unanswered_questions ( assessment_section_id )
for question in questions :
if question . get_id ( ) == item_id :
if questions . available ( ) :
return questions . next ( )
else :
raise errors . IllegalState ( 'No next unanswered question is available' )
raise errors . NotFound ( 'item_id is not found in Section' )
|
def share_extension ( self , publisher_name , extension_name , account_name ) :
"""ShareExtension .
[ Preview API ]
: param str publisher _ name :
: param str extension _ name :
: param str account _ name :"""
|
route_values = { }
if publisher_name is not None :
route_values [ 'publisherName' ] = self . _serialize . url ( 'publisher_name' , publisher_name , 'str' )
if extension_name is not None :
route_values [ 'extensionName' ] = self . _serialize . url ( 'extension_name' , extension_name , 'str' )
if account_name is not None :
route_values [ 'accountName' ] = self . _serialize . url ( 'account_name' , account_name , 'str' )
self . _send ( http_method = 'POST' , location_id = 'a1e66d8f-f5de-4d16-8309-91a4e015ee46' , version = '5.1-preview.1' , route_values = route_values )
|
def list_files ( directory ) :
'''Return a list of all files found under directory ( and its subdirectories )'''
|
ret = set ( )
ret . add ( directory )
for root , dirs , files in safe_walk ( directory ) :
for name in files :
ret . add ( os . path . join ( root , name ) )
for name in dirs :
ret . add ( os . path . join ( root , name ) )
return list ( ret )
|
def inverse_transform ( self , maps ) :
r"""Computes : math : ` y = \ mathrm { logistic } ( x ; a , b ) ` .
The codomain : math : ` a , b ` of : math : ` y ` are given by the class ' s bounds .
Parameters
maps : dict or FieldArray
A dictionary or FieldArray which provides a map between the
parameter name of the variable to transform and its value ( s ) .
Returns
out : dict or FieldArray
A map between the transformed variable name and value ( s ) , along
with the original variable name and value ( s ) ."""
|
y = maps [ self . _outputvar ]
out = { self . _inputvar : self . logistic ( y , self . _a , self . _b ) }
return self . format_output ( maps , out )
|
def download ( self , url , path ) :
"""Download url and save data to path ."""
|
# original _ url = url
# print ( url )
qurl = QUrl ( url )
url = to_text_string ( qurl . toEncoded ( ) , encoding = 'utf-8' )
logger . debug ( str ( ( url , path ) ) )
if url in self . _workers :
while not self . _workers [ url ] . finished :
return self . _workers [ url ]
worker = DownloadWorker ( url , path )
# Check download folder exists
folder = os . path . dirname ( os . path . abspath ( path ) )
if not os . path . isdir ( folder ) :
os . makedirs ( folder )
request = QNetworkRequest ( qurl )
self . _head_requests [ url ] = request
self . _paths [ url ] = path
self . _workers [ url ] = worker
self . _manager . head ( request )
self . _timer . start ( )
return worker
|
def find_bright_peaks ( self , data , threshold = None , sigma = 5 , radius = 5 ) :
"""Find bright peak candidates in ( data ) . ( threshold ) specifies a
threshold value below which an object is not considered a candidate .
If threshold is blank , a default is calculated using ( sigma ) .
( radius ) defines a pixel radius for determining local maxima - - if the
desired objects are larger in size , specify a larger radius .
The routine returns a list of candidate object coordinate tuples
( x , y ) in data ."""
|
if threshold is None : # set threshold to default if none provided
threshold = self . get_threshold ( data , sigma = sigma )
self . logger . debug ( "threshold defaults to %f (sigma=%f)" % ( threshold , sigma ) )
# self . logger . debug ( " filtering " )
data_max = filters . maximum_filter ( data , radius )
maxima = ( data == data_max )
diff = data_max > threshold
maxima [ diff == 0 ] = 0
# self . logger . debug ( " finding " )
labeled , num_objects = ndimage . label ( maxima )
slices = ndimage . find_objects ( labeled )
peaks = [ ]
for dy , dx in slices :
xc = ( dx . start + dx . stop - 1 ) / 2.0
yc = ( dy . start + dy . stop - 1 ) / 2.0
# This is only an approximate center ; use FWHM or centroid
# calculation to refine further
peaks . append ( ( xc , yc ) )
self . logger . debug ( "peaks=%s" % ( str ( peaks ) ) )
return peaks
|
def rsa_public_key_pkcs1_to_pkcs8 ( pkcs1_key ) :
"""Convert a PKCS1 - encoded RSA private key to PKCS8."""
|
algorithm = RsaAlgorithmIdentifier ( )
algorithm [ "rsaEncryption" ] = RSA_ENCRYPTION_ASN1_OID
pkcs8_key = PublicKeyInfo ( )
pkcs8_key [ "algorithm" ] = algorithm
pkcs8_key [ "publicKey" ] = univ . BitString . fromOctetString ( pkcs1_key )
return encoder . encode ( pkcs8_key )
|
def parse ( self , parser , xml ) :
"""Parses the rawtext to extract contents and references ."""
|
# We can only process references if the XML tag has inner - XML
if xml . text is not None :
matches = parser . RE_REFS . finditer ( xml . text )
if matches :
for match in matches : # Handle " special " references to this . name and param . name here .
self . references . append ( match . group ( "reference" ) )
# We also need to get all the XML attributes into the element
for key in list ( xml . keys ( ) ) :
self . attributes [ key ] = xml . get ( key )
|
def present ( profile = 'pagerduty' , subdomain = None , api_key = None , ** kwargs ) :
'''Ensure pagerduty service exists .
This method accepts as arguments everything defined in
https : / / developer . pagerduty . com / documentation / rest / services / create
Note that many arguments are mutually exclusive , depending on the " type " argument .
Examples :
. . code - block : : yaml
# create a PagerDuty email service at test - email @ DOMAIN . pagerduty . com
ensure generic email service exists :
pagerduty _ service . present :
- name : my email service
- service :
description : " email service controlled by salt "
escalation _ policy _ id : " my escalation policy "
type : " generic _ email "
service _ key : " test - email "
. . code - block : : yaml
# create a pagerduty service using cloudwatch integration
ensure my cloudwatch service exists :
pagerduty _ service . present :
- name : my cloudwatch service
- service :
escalation _ policy _ id : " my escalation policy "
type : aws _ cloudwatch
description : " my cloudwatch service controlled by salt "'''
|
# TODO : aws _ cloudwatch type should be integrated with boto _ sns
# for convenience , we accept id , name , or email for users
# and we accept the id or name for schedules
kwargs [ 'service' ] [ 'name' ] = kwargs [ 'name' ]
# make args mirror PD API structure
escalation_policy_id = kwargs [ 'service' ] [ 'escalation_policy_id' ]
escalation_policy = __salt__ [ 'pagerduty_util.get_resource' ] ( 'escalation_policies' , escalation_policy_id , [ 'name' , 'id' ] , profile = profile , subdomain = subdomain , api_key = api_key )
if escalation_policy :
kwargs [ 'service' ] [ 'escalation_policy_id' ] = escalation_policy [ 'id' ]
r = __salt__ [ 'pagerduty_util.resource_present' ] ( 'services' , [ 'name' , 'id' ] , _diff , profile , subdomain , api_key , ** kwargs )
return r
|
def parse_connection_option ( header : str , pos : int , header_name : str ) -> Tuple [ ConnectionOption , int ] :
"""Parse a Connection option from ` ` header ` ` at the given position .
Return the protocol value and the new position .
Raise : exc : ` ~ websockets . exceptions . InvalidHeaderFormat ` on invalid inputs ."""
|
item , pos = parse_token ( header , pos , header_name )
return cast ( ConnectionOption , item ) , pos
|
def POST ( self ) :
"""The combined values from : attr : ` forms ` and : attr : ` files ` . Values are
either strings ( form values ) or instances of
: class : ` cgi . FieldStorage ` ( file uploads ) ."""
|
post = MultiDict ( )
safe_env = { 'QUERY_STRING' : '' }
# Build a safe environment for cgi
for key in ( 'REQUEST_METHOD' , 'CONTENT_TYPE' , 'CONTENT_LENGTH' ) :
if key in self . environ :
safe_env [ key ] = self . environ [ key ]
if NCTextIOWrapper :
fb = NCTextIOWrapper ( self . body , encoding = 'ISO-8859-1' , newline = '\n' )
else :
fb = self . body
data = cgi . FieldStorage ( fp = fb , environ = safe_env , keep_blank_values = True )
for item in data . list or [ ] :
post [ item . name ] = item if item . filename else item . value
return post
|
def write_source_description ( self , capability_lists = None , outfile = None , links = None ) :
"""Write a ResourceSync Description document to outfile or STDOUT ."""
|
rsd = SourceDescription ( ln = links )
rsd . pretty_xml = self . pretty_xml
if ( capability_lists is not None ) :
for uri in capability_lists :
rsd . add_capability_list ( uri )
if ( outfile is None ) :
print ( rsd . as_xml ( ) )
else :
rsd . write ( basename = outfile )
|
def calc ( path ) :
'''Takes a path as an argument and returns the total size in bytes of the file
or directory . If the path is a directory the size will be calculated
recursively .'''
|
total = 0
err = None
if os . path . isdir ( path ) :
try :
for entry in os . scandir ( path ) :
try :
is_dir = entry . is_dir ( follow_symlinks = False )
except ( PermissionError , FileNotFoundError ) :
err = "!"
return total , err
if is_dir :
result = calc ( entry . path )
total += result [ 0 ]
err = result [ 1 ]
if err :
return total , err
else :
try :
total += entry . stat ( follow_symlinks = False ) . st_size
except ( PermissionError , FileNotFoundError ) :
err = "!"
return total , err
except ( PermissionError , FileNotFoundError ) :
err = "!"
return total , err
else :
total += os . path . getsize ( path )
return total , err
|
def accel_toggle_hide_on_lose_focus ( self , * args ) :
"""Callback toggle whether the window should hide when it loses
focus . Called by the accel key ."""
|
if self . settings . general . get_boolean ( 'window-losefocus' ) :
self . settings . general . set_boolean ( 'window-losefocus' , False )
else :
self . settings . general . set_boolean ( 'window-losefocus' , True )
return True
|
def finish_response ( self ) :
"""Completes the response and performs the following tasks :
- Remove the ` ' ws4py . socket ' ` and ` ' ws4py . websocket ' `
environ keys .
- Attach the returned websocket , if any , to the WSGI server
using its ` ` link _ websocket _ to _ server ` ` method ."""
|
# force execution of the result iterator until first actual content
rest = iter ( self . result )
first = list ( itertools . islice ( rest , 1 ) )
self . result = itertools . chain ( first , rest )
# now it ' s safe to look if environ was modified
ws = None
if self . environ :
self . environ . pop ( 'ws4py.socket' , None )
ws = self . environ . pop ( 'ws4py.websocket' , None )
try :
SimpleHandler . finish_response ( self )
except :
if ws :
ws . close ( 1011 , reason = 'Something broke' )
raise
else :
if ws :
self . request_handler . server . link_websocket_to_server ( ws )
|
def composite ( ) -> Iterator [ int ] :
"""Generate the composite numbers using the sieve of Eratosthenes .
https : / / oeis . org / A002808"""
|
p1 = 3
for p2 in eratosthenes ( ) :
for n in range ( p1 + 1 , p2 ) :
yield n
p1 = p2
|
def set_preference ( data , chunk_size ) :
"""Return the median of the distribution of pairwise L2 Euclidean distances
between samples ( the rows of ' data ' ) as the default preference parameter
for Affinity Propagation clustering .
Parameters
data : array of shape ( N _ samples , N _ features )
The data - set submitted for Affinity Propagation clustering .
chunk _ size : int
The size of random subsamples from the data - set whose similarity
matrix is computed . The resulting median of the distribution of
pairwise distances between the data - points selected as part of a
given subsample is stored into a list of medians .
Returns
preference : float
The preference parameter for Affinity Propagation clustering is computed
as the median of the list of median pairwise distances between the data - points
selected as part of each of 15 rounds of random subsampling ."""
|
N_samples , N_features = data . shape
rng = np . arange ( 0 , N_samples , dtype = int )
medians = [ ]
for i in range ( 15 ) :
selected_samples = np . random . choice ( N_samples , size = chunk_size , replace = False )
samples = data [ selected_samples , : ]
S = - euclidean_distances ( samples , data , squared = True )
n = chunk_size * N_samples - ( chunk_size * ( chunk_size + 1 ) / 2 )
rows = np . zeros ( 0 , dtype = int )
for i in range ( chunk_size ) :
rows = np . append ( rows , np . full ( N_samples - i , i , dtype = int ) )
cols = np . zeros ( 0 , dtype = int )
for i in range ( chunk_size ) :
cols = np . append ( cols , np . delete ( rng , selected_samples [ : i + 1 ] ) )
triu_indices = tuple ( ( rows , cols ) )
preference = np . median ( S , overwrite_input = True )
medians . append ( preference )
del S
if i % 4 == 3 :
gc . collect ( )
preference = np . median ( medians )
return preference
|
def _upload ( self , project_id , updating , file_path , language_code = None , overwrite = False , sync_terms = False , tags = None , fuzzy_trigger = None ) :
"""Internal : updates terms / translations
File uploads are limited to one every 30 seconds"""
|
options = [ self . UPDATING_TERMS , self . UPDATING_TERMS_TRANSLATIONS , self . UPDATING_TRANSLATIONS ]
if updating not in options :
raise POEditorArgsException ( 'Updating arg must be in {}' . format ( options ) )
options = [ self . UPDATING_TERMS_TRANSLATIONS , self . UPDATING_TRANSLATIONS ]
if language_code is None and updating in options :
raise POEditorArgsException ( 'Language code is required only if updating is ' 'terms_translations or translations)' )
if updating == self . UPDATING_TRANSLATIONS :
tags = None
sync_terms = None
# Special content type :
tags = tags or ''
language_code = language_code or ''
sync_terms = '1' if sync_terms else '0'
overwrite = '1' if overwrite else '0'
fuzzy_trigger = '1' if fuzzy_trigger else '0'
project_id = str ( project_id )
with open ( file_path , 'r+b' ) as local_file :
data = self . _run ( url_path = "projects/upload" , id = project_id , language = language_code , file = local_file , updating = updating , tags = tags , sync_terms = sync_terms , overwrite = overwrite , fuzzy_trigger = fuzzy_trigger )
return data [ 'result' ]
|
def fromlalcache ( cachefile , coltype = int ) :
"""Construct a segmentlist representing the times spanned by the files
identified in the LAL cache contained in the file object file . The
segmentlist will be created with segments whose boundaries are of
type coltype , which should raise ValueError if it cannot convert
its string argument .
Example :
> > > from pycbc _ glue . lal import LIGOTimeGPS
> > > cache _ seglists = fromlalcache ( open ( filename ) , coltype = LIGOTimeGPS ) . coalesce ( )
See also :
pycbc _ glue . lal . CacheEntry"""
|
return segments . segmentlist ( lal . CacheEntry ( l , coltype = coltype ) . segment for l in cachefile )
|
def dump_stats ( self , pattern ) :
"""Dumps VM statistics .
in pattern of type str
The selection pattern . A bit similar to filename globbing ."""
|
if not isinstance ( pattern , basestring ) :
raise TypeError ( "pattern can only be an instance of type basestring" )
self . _call ( "dumpStats" , in_p = [ pattern ] )
|
def validate_search_space_content ( experiment_config ) :
'''Validate searchspace content ,
if the searchspace file is not json format or its values does not contain _ type and _ value which must be specified ,
it will not be a valid searchspace file'''
|
try :
search_space_content = json . load ( open ( experiment_config . get ( 'searchSpacePath' ) , 'r' ) )
for value in search_space_content . values ( ) :
if not value . get ( '_type' ) or not value . get ( '_value' ) :
print_error ( 'please use _type and _value to specify searchspace!' )
exit ( 1 )
except :
print_error ( 'searchspace file is not a valid json format!' )
exit ( 1 )
|
def order_by_json_path ( self , json_path , language_code = None , order = 'asc' ) :
"""Makes the method available through the manager ( i . e . ` Model . objects ` ) .
Usage example :
MyModel . objects . order _ by _ json _ path ( ' title ' , order = ' desc ' )
MyModel . objects . order _ by _ json _ path ( ' title ' , language _ code = ' en _ us ' , order = ' desc ' )"""
|
return self . get_queryset ( language_code ) . order_by_json_path ( json_path , language_code = language_code , order = order )
|
def _get_sorted_relationships ( self , goterm ) :
"""Traverse GO Terms above the current GO Term . Then add current GO Term to sorted ."""
|
if goterm . id in self . goids_seen :
return
self . goids_seen . add ( goterm . id )
for goterm_upper in goterm . get_goterms_upper ( ) :
self . _get_sorted_relationships ( goterm_upper )
self . goterms_sorted . append ( goterm )
|
def _states ( self ) :
"""Sets grid states"""
|
# The currently visible table
self . current_table = 0
# The cell that has been selected before the latest selection
self . _last_selected_cell = 0 , 0 , 0
# If we are viewing cells based on their frozen status or normally
# ( When true , a cross - hatch is displayed for frozen cells )
self . _view_frozen = False
# Timer for updating frozen cells
self . timer_running = False
|
def _change_sample_name ( in_file , sample_name , data = None ) :
"""Fix name in feature counts log file to get the same
name in multiqc report ."""
|
out_file = append_stem ( in_file , "_fixed" )
with file_transaction ( data , out_file ) as tx_out :
with open ( tx_out , "w" ) as out_handle :
with open ( in_file ) as in_handle :
for line in in_handle :
if line . startswith ( "Status" ) :
line = "Status\t%s.bam" % sample_name
out_handle . write ( "%s\n" % line . strip ( ) )
return out_file
|
def activated_services ( self , user , provider = None ) :
"""get the activated services added from the administrator
: param user : user
: param provider : the selected provider
: type user : current user
: type provider : string
: return : list of activated services
: rtype : list"""
|
services = UserService . objects . filter ( name__status = 1 , user = user )
choices = [ ]
data = ( )
if provider is not None :
services = services . exclude ( name__exact = provider )
for class_name in services :
data = ( class_name . name , class_name . name . name . rsplit ( 'Service' , 1 ) [ 1 ] )
choices . append ( data )
return choices
|
def format_unencoded ( self , tokensource , outfile ) :
"""Format ` ` tokensource ` ` , an iterable of ` ` ( tokentype , tokenstring ) ` `
tuples and write it into ` ` outfile ` ` .
For our implementation we put all lines in their own ' line group ' ."""
|
x = self . xoffset
y = self . yoffset
if not self . nowrap :
if self . encoding :
outfile . write ( '<?xml version="1.0" encoding="%s"?>\n' % self . encoding )
else :
outfile . write ( '<?xml version="1.0"?>\n' )
outfile . write ( '<!DOCTYPE svg PUBLIC "-//W3C//DTD SVG 1.0//EN" ' '"http://www.w3.org/TR/2001/REC-SVG-20010904/DTD/' 'svg10.dtd">\n' )
outfile . write ( '<svg xmlns="http://www.w3.org/2000/svg">\n' )
outfile . write ( '<g font-family="%s" font-size="%s">\n' % ( self . fontfamily , self . fontsize ) )
outfile . write ( '<text x="%s" y="%s" xml:space="preserve">' % ( x , y ) )
for ttype , value in tokensource :
style = self . _get_style ( ttype )
tspan = style and '<tspan' + style + '>' or ''
tspanend = tspan and '</tspan>' or ''
value = escape_html ( value )
if self . spacehack :
value = value . expandtabs ( ) . replace ( ' ' , ' ' )
parts = value . split ( '\n' )
for part in parts [ : - 1 ] :
outfile . write ( tspan + part + tspanend )
y += self . ystep
outfile . write ( '</text>\n<text x="%s" y="%s" ' 'xml:space="preserve">' % ( x , y ) )
outfile . write ( tspan + parts [ - 1 ] + tspanend )
outfile . write ( '</text>' )
if not self . nowrap :
outfile . write ( '</g></svg>\n' )
|
def raise_ise ( text ) :
"""Turn a failed request response into a BackendError that represents
an Internal Server Error . Handy for reflecting HTTP errors from farther
back in the call chain as failures of your service .
Parameters
text : ` str `
Error text .
Raises
: class : ` apikit . BackendError `
The ` status _ code ` will be ` 500 ` , and the reason ` Internal Server
Error ` . Its ` content ` will be the text you passed ."""
|
if isinstance ( text , Exception ) : # Just in case we are exuberantly passed the entire Exception and
# not its textual representation .
text = str ( text )
raise BackendError ( status_code = 500 , reason = "Internal Server Error" , content = text )
|
def taskCompleted ( self , * args , ** kwargs ) :
"""Task Completed Messages
When a task is successfully completed by a worker a message is posted
this exchange .
This message is routed using the ` runId ` , ` workerGroup ` and ` workerId `
that completed the task . But information about additional runs is also
available from the task status structure .
This exchange outputs : ` ` v1 / task - completed - message . json # ` ` This exchange takes the following keys :
* routingKeyKind : Identifier for the routing - key kind . This is always ` ' primary ' ` for the formalized routing key . ( required )
* taskId : ` taskId ` for the task this message concerns ( required )
* runId : ` runId ` of latest run for the task , ` _ ` if no run is exists for the task . ( required )
* workerGroup : ` workerGroup ` of latest run for the task , ` _ ` if no run is exists for the task . ( required )
* workerId : ` workerId ` of latest run for the task , ` _ ` if no run is exists for the task . ( required )
* provisionerId : ` provisionerId ` this task is targeted at . ( required )
* workerType : ` workerType ` this task must run on . ( required )
* schedulerId : ` schedulerId ` this task was created by . ( required )
* taskGroupId : ` taskGroupId ` this task was created in . ( required )
* reserved : Space reserved for future routing - key entries , you should always match this entry with ` # ` . As automatically done by our tooling , if not specified ."""
|
ref = { 'exchange' : 'task-completed' , 'name' : 'taskCompleted' , 'routingKey' : [ { 'constant' : 'primary' , 'multipleWords' : False , 'name' : 'routingKeyKind' , } , { 'multipleWords' : False , 'name' : 'taskId' , } , { 'multipleWords' : False , 'name' : 'runId' , } , { 'multipleWords' : False , 'name' : 'workerGroup' , } , { 'multipleWords' : False , 'name' : 'workerId' , } , { 'multipleWords' : False , 'name' : 'provisionerId' , } , { 'multipleWords' : False , 'name' : 'workerType' , } , { 'multipleWords' : False , 'name' : 'schedulerId' , } , { 'multipleWords' : False , 'name' : 'taskGroupId' , } , { 'multipleWords' : True , 'name' : 'reserved' , } , ] , 'schema' : 'v1/task-completed-message.json#' , }
return self . _makeTopicExchange ( ref , * args , ** kwargs )
|
def putNetworkVisualPropBypass ( self , networkId , viewId , visualProperty , body , verbose = None ) :
"""Bypasses the Visual Style of the Network with the Visual Property specificed by the ` visualProperty ` , ` viewId ` , and ` networkId ` parameters .
Additional details on common Visual Properties can be found in the [ Basic Visual Lexicon JavaDoc API ] ( http : / / chianti . ucsd . edu / cytoscape - 3.6.1 / API / org / cytoscape / view / presentation / property / BasicVisualLexicon . html )
: param networkId : SUID of the Network
: param viewId : SUID of the Network View
: param visualProperty : Name of the Visual Property
: param body : A Visual Property and its value .
: param verbose : print more
: returns : 200 : successful operation"""
|
response = api ( url = self . ___url + 'networks/' + str ( networkId ) + '/views/' + str ( viewId ) + '/network/' + str ( visualProperty ) + '/bypass' , method = "PUT" , body = body , verbose = verbose )
return response
|
def write_struct_field ( self , struct_name , field_name , values , x , y , p = 0 ) :
"""Write a value into a struct .
This method is particularly useful for writing values into the ` ` sv ` `
struct which contains some configuration data . See ` ` sark . h ` ` for
details .
Parameters
struct _ name : string
Name of the struct to write to , e . g . , ` " sv " `
field _ name : string
Name of the field to write , e . g . , ` " random " `
values :
Value ( s ) to be written into the field .
. . warning : :
Fields which are arrays must currently be written in their
entirety ."""
|
# Look up the struct and field
field , address , pack_chars = self . _get_struct_field_and_address ( struct_name , field_name )
if field . length != 1 :
assert len ( values ) == field . length
data = struct . pack ( pack_chars , * values )
else :
data = struct . pack ( pack_chars , values )
# Perform the write
self . write ( address , data , x , y , p )
|
def parse_bitcode ( bitcode , context = None ) :
"""Create Module from a LLVM * bitcode * ( a bytes object ) ."""
|
if context is None :
context = get_global_context ( )
buf = c_char_p ( bitcode )
bufsize = len ( bitcode )
with ffi . OutputString ( ) as errmsg :
mod = ModuleRef ( ffi . lib . LLVMPY_ParseBitcode ( context , buf , bufsize , errmsg ) , context )
if errmsg :
mod . close ( )
raise RuntimeError ( "LLVM bitcode parsing error\n{0}" . format ( errmsg ) )
return mod
|
def add_mutations_and_flush ( self , table , muts ) :
"""Add mutations to a table without the need to create and manage a batch writer ."""
|
if not isinstance ( muts , list ) and not isinstance ( muts , tuple ) :
muts = [ muts ]
cells = { }
for mut in muts :
cells . setdefault ( mut . row , [ ] ) . extend ( mut . updates )
self . client . updateAndFlush ( self . login , table , cells )
|
def get_colorscale ( scale ) :
"""Returns a color scale to be used for a plotly figure
Parameters :
scale : str or list
Color scale name
If the color name is preceded by a minus ( - )
then the scale is inversed .
Also accepts a list of colors ( rgb , rgba , hex )
Example :
get _ colorscale ( ' accent ' )
get _ colorscale ( [ ' rgb ( 127,201,127 ) ' , ' rgb ( 190,174,212 ) ' , ' rgb ( 253,192,134 ) ' ] )"""
|
if type ( scale ) in string_types :
scale = get_scales ( scale )
else :
if type ( scale ) != list :
raise Exception ( "scale needs to be either a scale name or list of colors" )
cs = [ [ 1.0 * c / ( len ( scale ) - 1 ) , scale [ c ] ] for c in range ( len ( scale ) ) ]
cs . sort ( )
return cs
|
def _in_items ( self , restrictions ) :
"""Generate argument pairs for queries like where ( id = [ 1 , 2 ] )"""
|
def build_in ( table , name , value ) :
return "{}.{} IN ({})" . format ( table , name , ", " . join ( [ "?" ] * len ( value ) ) )
in_items = self . _build_where ( restrictions , for_in = True )
names = [ build_in ( * restriction ) for restriction in in_items ]
values = list ( chain ( * [ item [ 2 ] for item in in_items ] ) )
return ( names , values )
|
def t_ID ( self , t ) :
r'[ a - zA - Z _ @ ] [ a - zA - Z0-9 _ @ \ - ] *'
|
t . type = self . reserved_words . get ( t . value , 'ID' )
return t
|
def _read_frame ( self ) :
"""Read a frame from the XYZ file"""
|
size = self . read_size ( )
title = self . _f . readline ( ) [ : - 1 ]
if self . symbols is None :
symbols = [ ]
coordinates = np . zeros ( ( size , 3 ) , float )
for counter in range ( size ) :
line = self . _f . readline ( )
if len ( line ) == 0 :
raise StopIteration
words = line . split ( )
if len ( words ) < 4 :
raise StopIteration
if self . symbols is None :
symbols . append ( words [ 0 ] )
try :
coordinates [ counter , 0 ] = float ( words [ 1 ] )
coordinates [ counter , 1 ] = float ( words [ 2 ] )
coordinates [ counter , 2 ] = float ( words [ 3 ] )
except ValueError :
raise StopIteration
coordinates *= self . file_unit
if self . symbols is None :
self . symbols = symbols
return title , coordinates
|
def _recycle ( self ) :
"""Reclaim buffer space before the origin .
Note : modifies buffer size"""
|
origin = self . _origin
if origin == 0 :
return False
available = self . _extent - origin
self . _data [ : available ] = self . _data [ origin : self . _extent ]
self . _extent = available
self . _origin = 0
# log _ debug ( " Recycled % d bytes " % origin )
return True
|
async def download_cot_artifact ( chain , task_id , path ) :
"""Download an artifact and verify its SHA against the chain of trust .
Args :
chain ( ChainOfTrust ) : the chain of trust object
task _ id ( str ) : the task ID to download from
path ( str ) : the relative path to the artifact to download
Returns :
str : the full path of the downloaded artifact
Raises :
CoTError : on failure ."""
|
link = chain . get_link ( task_id )
log . debug ( "Verifying {} is in {} cot artifacts..." . format ( path , task_id ) )
if not link . cot :
log . warning ( 'Chain of Trust for "{}" in {} does not exist. See above log for more details. \
Skipping download of this artifact' . format ( path , task_id ) )
return
if path not in link . cot [ 'artifacts' ] :
raise CoTError ( "path {} not in {} {} chain of trust artifacts!" . format ( path , link . name , link . task_id ) )
url = get_artifact_url ( chain . context , task_id , path )
loggable_url = get_loggable_url ( url )
log . info ( "Downloading Chain of Trust artifact:\n{}" . format ( loggable_url ) )
await download_artifacts ( chain . context , [ url ] , parent_dir = link . cot_dir , valid_artifact_task_ids = [ task_id ] )
full_path = link . get_artifact_full_path ( path )
for alg , expected_sha in link . cot [ 'artifacts' ] [ path ] . items ( ) :
if alg not in chain . context . config [ 'valid_hash_algorithms' ] :
raise CoTError ( "BAD HASH ALGORITHM: {}: {} {}!" . format ( link . name , alg , full_path ) )
real_sha = get_hash ( full_path , hash_alg = alg )
if expected_sha != real_sha :
raise CoTError ( "BAD HASH on file {}: {}: Expected {} {}; got {}!" . format ( full_path , link . name , alg , expected_sha , real_sha ) )
log . debug ( "{} matches the expected {} {}" . format ( full_path , alg , expected_sha ) )
return full_path
|
def create ( self , name = None , prefix = None , pkgs = None , channels = None ) :
"""Create an environment with a specified set of packages ."""
|
logger . debug ( str ( ( prefix , pkgs , channels ) ) )
# TODO : Fix temporal hack
if ( not pkgs or ( not isinstance ( pkgs , ( list , tuple ) ) and not is_text_string ( pkgs ) ) ) :
raise TypeError ( 'must specify a list of one or more packages to ' 'install into new environment' )
cmd_list = [ 'create' , '--yes' , '--json' , '--mkdir' ]
if name :
ref = name
search = [ os . path . join ( d , name ) for d in self . info ( ) . communicate ( ) [ 0 ] [ 'envs_dirs' ] ]
cmd_list . extend ( [ '--name' , name ] )
elif prefix :
ref = prefix
search = [ prefix ]
cmd_list . extend ( [ '--prefix' , prefix ] )
else :
raise TypeError ( 'must specify either an environment name or a ' 'path for new environment' )
if any ( os . path . exists ( prefix ) for prefix in search ) :
raise CondaEnvExistsError ( 'Conda environment {0} already ' 'exists' . format ( ref ) )
# TODO : Fix temporal hack
if isinstance ( pkgs , ( list , tuple ) ) :
cmd_list . extend ( pkgs )
elif is_text_string ( pkgs ) :
cmd_list . extend ( [ '--file' , pkgs ] )
# TODO : Check if correct
if channels :
cmd_list . extend ( [ '--override-channels' ] )
for channel in channels :
cmd_list . extend ( [ '--channel' ] )
cmd_list . extend ( [ channel ] )
return self . _call_and_parse ( cmd_list )
|
def _multi_take_opportunity ( self , tup ) :
"""Check whether there is the possibility to use ` ` _ multi _ take ` ` .
Currently the limit is that all axes being indexed must be indexed with
list - likes .
Parameters
tup : tuple
Tuple of indexers , one per axis
Returns
boolean : Whether the current indexing can be passed through _ multi _ take"""
|
if not all ( is_list_like_indexer ( x ) for x in tup ) :
return False
# just too complicated
if any ( com . is_bool_indexer ( x ) for x in tup ) :
return False
return True
|
def notify_duration_exceeded ( self , participants , reference_time ) :
"""The bot participant has been working longer than the time defined in
the " duration " config value ."""
|
for participant in participants :
participant . status = "rejected"
session . commit ( )
|
def unsubscribe ( self , event ) :
"""Unsubscribe from an object ' s future changes"""
|
# TODO : Automatic Unsubscription
uuids = event . data
if not isinstance ( uuids , list ) :
uuids = [ uuids ]
result = [ ]
for uuid in uuids :
if uuid in self . subscriptions :
self . subscriptions [ uuid ] . pop ( event . client . uuid )
if len ( self . subscriptions [ uuid ] ) == 0 :
del ( self . subscriptions [ uuid ] )
result . append ( uuid )
result = { 'component' : 'hfos.events.objectmanager' , 'action' : 'unsubscribe' , 'data' : { 'uuid' : result , 'success' : True } }
self . _respond ( None , result , event )
|
def visit_exact_match_value ( self , node , fieldnames = None ) :
"""Generates a term query ( exact search in ElasticSearch ) ."""
|
if not fieldnames :
fieldnames = [ '_all' ]
else :
fieldnames = force_list ( fieldnames )
if ElasticSearchVisitor . KEYWORD_TO_ES_FIELDNAME [ 'exact-author' ] == fieldnames [ 0 ] :
return self . _generate_exact_author_query ( node . value )
elif ElasticSearchVisitor . KEYWORD_TO_ES_FIELDNAME [ 'type-code' ] == fieldnames [ 0 ] :
return self . _generate_type_code_query ( node . value )
elif ElasticSearchVisitor . KEYWORD_TO_ES_FIELDNAME [ 'journal' ] == fieldnames :
return self . _generate_journal_nested_queries ( node . value )
bai_fieldnames = self . _generate_fieldnames_if_bai_query ( node . value , bai_field_variation = FieldVariations . raw , query_bai_field_if_dots_in_name = False )
if ElasticSearchVisitor . KEYWORD_TO_ES_FIELDNAME [ 'date' ] == fieldnames :
term_queries = [ ]
for field in fieldnames :
term_query = { 'term' : { field : _truncate_date_value_according_on_date_field ( field , node . value ) . dumps ( ) } }
term_queries . append ( generate_nested_query ( ElasticSearchVisitor . DATE_NESTED_QUERY_PATH , term_query ) if field in ElasticSearchVisitor . DATE_NESTED_FIELDS else term_query )
elif ElasticSearchVisitor . KEYWORD_TO_ES_FIELDNAME [ 'author' ] in fieldnames :
term_queries = [ generate_nested_query ( ElasticSearchVisitor . AUTHORS_NESTED_QUERY_PATH , { 'term' : { field : node . value } } ) for field in ( bai_fieldnames or fieldnames ) ]
else :
term_queries = [ { 'term' : { field : node . value } } for field in ( bai_fieldnames or fieldnames ) ]
return wrap_queries_in_bool_clauses_if_more_than_one ( term_queries , use_must_clause = False )
|
def get_psk ( self , endpoint_name , ** kwargs ) :
"""Get"""
|
api = self . _get_api ( bootstrap . PreSharedKeysApi )
return PreSharedKey ( api . get_pre_shared_key ( endpoint_name = endpoint_name ) )
|
def get_clipboard ( self ) :
"""Returns the clipboard content
If a bitmap is contained then it is returned .
Otherwise , the clipboard text is returned ."""
|
bmpdata = wx . BitmapDataObject ( )
textdata = wx . TextDataObject ( )
if self . clipboard . Open ( ) :
is_bmp_present = self . clipboard . GetData ( bmpdata )
self . clipboard . GetData ( textdata )
self . clipboard . Close ( )
else :
wx . MessageBox ( _ ( "Can't open the clipboard" ) , _ ( "Error" ) )
if is_bmp_present :
return bmpdata . GetBitmap ( )
else :
return textdata . GetText ( )
|
def ok ( self , * msg ) :
"""Prints a message with an ok prefix"""
|
label = colors . green ( "OK" )
self . _msg ( label , * msg )
|
def _from_dict ( cls , _dict ) :
"""Initialize a TableReturn object from a json dictionary ."""
|
args = { }
if 'document' in _dict :
args [ 'document' ] = DocInfo . _from_dict ( _dict . get ( 'document' ) )
if 'model_id' in _dict :
args [ 'model_id' ] = _dict . get ( 'model_id' )
if 'model_version' in _dict :
args [ 'model_version' ] = _dict . get ( 'model_version' )
if 'tables' in _dict :
args [ 'tables' ] = [ Tables . _from_dict ( x ) for x in ( _dict . get ( 'tables' ) ) ]
return cls ( ** args )
|
def read ( self , amt = None , decode_content = None , cache_content = False ) :
"""Similar to : meth : ` httplib . HTTPResponse . read ` , but with two additional
parameters : ` ` decode _ content ` ` and ` ` cache _ content ` ` .
: param amt :
How much of the content to read . If specified , caching is skipped
because it doesn ' t make sense to cache partial content as the full
response .
: param decode _ content :
If True , will attempt to decode the body based on the
' content - encoding ' header .
: param cache _ content :
If True , will save the returned data such that the same result is
returned despite of the state of the underlying file object . This
is useful if you want the ` ` . data ` ` property to continue working
after having ` ` . read ( ) ` ` the file object . ( Overridden if ` ` amt ` ` is
set . )"""
|
# Note : content - encoding value should be case - insensitive , per RFC 7230
# Section 3.2
content_encoding = self . headers . get ( 'content-encoding' , '' ) . lower ( )
if self . _decoder is None :
if content_encoding in self . CONTENT_DECODERS :
self . _decoder = _get_decoder ( content_encoding )
if decode_content is None :
decode_content = self . decode_content
if self . _fp is None :
return
flush_decoder = False
try :
try :
if amt is None : # cStringIO doesn ' t like amt = None
data = self . _fp . read ( )
flush_decoder = True
else :
cache_content = False
data = self . _fp . read ( amt )
if amt != 0 and not data : # Platform - specific : Buggy versions of Python .
# Close the connection when no data is returned
# This is redundant to what httplib / http . client _ should _
# already do . However , versions of python released before
# December 15 , 2012 ( http : / / bugs . python . org / issue16298 ) do
# not properly close the connection in all cases . There is
# no harm in redundantly calling close .
self . _fp . close ( )
flush_decoder = True
except SocketTimeout : # FIXME : Ideally we ' d like to include the url in the ReadTimeoutError but
# there is yet no clean way to get at it from this context .
raise ReadTimeoutError ( self . _pool , None , 'Read timed out.' )
except BaseSSLError as e : # FIXME : Is there a better way to differentiate between SSLErrors ?
if not 'read operation timed out' in str ( e ) : # Defensive :
# This shouldn ' t happen but just in case we ' re missing an edge
# case , let ' s avoid swallowing SSL errors .
raise
raise ReadTimeoutError ( self . _pool , None , 'Read timed out.' )
except HTTPException as e : # This includes IncompleteRead .
raise ProtocolError ( 'Connection broken: %r' % e , e )
self . _fp_bytes_read += len ( data )
try :
if decode_content and self . _decoder :
data = self . _decoder . decompress ( data )
except ( IOError , zlib . error ) as e :
raise DecodeError ( "Received response with content-encoding: %s, but " "failed to decode it." % content_encoding , e )
if flush_decoder and decode_content and self . _decoder :
buf = self . _decoder . decompress ( binary_type ( ) )
data += buf + self . _decoder . flush ( )
if cache_content :
self . _body = data
return data
finally :
if self . _original_response and self . _original_response . isclosed ( ) :
self . release_conn ( )
|
def unacknowledge_problem ( self ) :
"""Remove the acknowledge , reset the flag . The comment is deleted
: return : None"""
|
if self . problem_has_been_acknowledged :
logger . debug ( "[item::%s] deleting acknowledge of %s" , self . get_name ( ) , self . get_full_name ( ) )
self . problem_has_been_acknowledged = False
if self . my_type == 'host' :
self . broks . append ( self . acknowledgement . get_expire_brok ( self . get_name ( ) ) )
else :
self . broks . append ( self . acknowledgement . get_expire_brok ( self . host_name , self . get_name ( ) ) )
# delete the comment of the item related with the acknowledge
if hasattr ( self . acknowledgement , 'comment_id' ) and self . acknowledgement . comment_id in self . comments :
del self . comments [ self . acknowledgement . comment_id ]
# Should not be deleted , a None is Good
self . acknowledgement = None
self . broks . append ( self . get_update_status_brok ( ) )
self . raise_unacknowledge_log_entry ( )
|
def avail_images ( call = None ) :
'''returns a list of images available to you'''
|
all_servers = list_nodes_full ( )
templates = { }
for server in all_servers :
if server [ "IsTemplate" ] :
templates . update ( { "Template Name" : server [ "Name" ] } )
return templates
|
def _flush ( self ) :
"""Flush the write buffers of the stream if applicable .
In write mode , send the buffer content to the cloud object ."""
|
# Flush buffer to specified range
buffer = self . _get_buffer ( )
start = self . _buffer_size * ( self . _seek - 1 )
end = start + len ( buffer )
future = self . _workers . submit ( self . _flush_range , buffer = buffer , start = start , end = end )
self . _write_futures . append ( future )
future . add_done_callback ( partial ( self . _update_size , end ) )
|
def remove_outside_collaborator ( self , collaborator ) :
""": calls : ` DELETE / orgs / : org / outside _ collaborators / : username < https : / / developer . github . com / v3 / orgs / outside _ collaborators > ` _
: param collaborator : : class : ` github . NamedUser . NamedUser `
: rtype : None"""
|
assert isinstance ( collaborator , github . NamedUser . NamedUser ) , collaborator
headers , data = self . _requester . requestJsonAndCheck ( "DELETE" , self . url + "/outside_collaborators/" + collaborator . _identity )
|
def register_for_json ( * args , ** kwargs ) -> Any :
"""Class decorator to register classes with our JSON system .
- If method is ` ` ' provides _ init _ args _ kwargs ' ` ` , the class provides a
function
. . code - block : : python
def init _ args _ kwargs ( self ) - > Tuple [ List [ Any ] , Dict [ str , Any ] ]
that returns an ` ` ( args , kwargs ) ` ` tuple , suitable for passing to its
` ` _ _ init _ _ ( ) ` ` function as ` ` _ _ init _ _ ( * args , * * kwargs ) ` ` .
- If method is ` ` ' provides _ init _ kwargs ' ` ` , the class provides a function
. . code - block : : python
def init _ kwargs ( self ) - > Dict
that returns a dictionary ` ` kwargs ` ` suitable for passing to its
` ` _ _ init _ _ ( ) ` ` function as ` ` _ _ init _ _ ( * * kwargs ) ` ` .
- Otherwise , the method argument is as for ` ` register _ class _ for _ json ( ) ` ` .
Usage looks like :
. . code - block : : python
@ register _ for _ json ( method = METHOD _ STRIP _ UNDERSCORE )
class TableId ( object ) :
def _ _ init _ _ ( self , db : str = ' ' , schema : str = ' ' ,
table : str = ' ' ) - > None :
self . _ db = db
self . _ schema = schema
self . _ table = table"""
|
if DEBUG :
print ( "register_for_json: args = {}" . format ( repr ( args ) ) )
print ( "register_for_json: kwargs = {}" . format ( repr ( kwargs ) ) )
# http : / / stackoverflow . com / questions / 653368 / how - to - create - a - python - decorator - that - can - be - used - either - with - or - without - paramet # noqa
# In brief ,
# @ decorator
# means
# x = decorator ( x )
# so
# @ decorator ( args )
# means
# x = decorator ( args ) ( x )
if len ( args ) == 1 and len ( kwargs ) == 0 and callable ( args [ 0 ] ) :
if DEBUG :
print ( "... called as @register_for_json" )
# called as @ decorator
# . . . the single argument is the class itself , e . g . Thing in :
# @ decorator
# class Thing ( object ) :
# . . . e . g . :
# args = ( < class ' _ _ main _ _ . unit _ tests . < locals > . SimpleThing ' > , )
# kwargs = { }
cls = args [ 0 ]
# type : ClassType
register_class_for_json ( cls , method = METHOD_SIMPLE )
return cls
# Otherwise :
if DEBUG :
print ( "... called as @register_for_json(*args, **kwargs)" )
# called as @ decorator ( * args , * * kwargs )
# . . . e . g . :
# args = ( )
# kwargs = { ' method ' : ' provides _ to _ init _ args _ kwargs _ dict ' }
method = kwargs . pop ( 'method' , METHOD_SIMPLE )
# type : str
obj_to_dict_fn = kwargs . pop ( 'obj_to_dict_fn' , None )
# type : InstanceToDictFnType # noqa
dict_to_obj_fn = kwargs . pop ( 'dict_to_obj_fn' , initdict_to_instance )
# type : DictToInstanceFnType # noqa
default_factory = kwargs . pop ( 'default_factory' , None )
# type : DefaultFactoryFnType # noqa
check_result = kwargs . pop ( 'check_results' , True )
# type : bool
def register_json_class ( cls_ : ClassType ) -> ClassType :
odf = obj_to_dict_fn
dof = dict_to_obj_fn
if method == METHOD_PROVIDES_INIT_ARGS_KWARGS :
if hasattr ( cls_ , INIT_ARGS_KWARGS_FN_NAME ) :
odf = wrap_args_kwargs_to_initdict ( getattr ( cls_ , INIT_ARGS_KWARGS_FN_NAME ) , typename = cls_ . __qualname__ , check_result = check_result )
else :
raise ValueError ( "Class type {} does not provide function {}" . format ( cls_ , INIT_ARGS_KWARGS_FN_NAME ) )
elif method == METHOD_PROVIDES_INIT_KWARGS :
if hasattr ( cls_ , INIT_KWARGS_FN_NAME ) :
odf = wrap_kwargs_to_initdict ( getattr ( cls_ , INIT_KWARGS_FN_NAME ) , typename = cls_ . __qualname__ , check_result = check_result )
else :
raise ValueError ( "Class type {} does not provide function {}" . format ( cls_ , INIT_KWARGS_FN_NAME ) )
elif method == METHOD_NO_ARGS :
odf = obj_with_no_args_to_init_dict
register_class_for_json ( cls_ , method = method , obj_to_dict_fn = odf , dict_to_obj_fn = dof , default_factory = default_factory )
return cls_
return register_json_class
|
def check_every ( kls , every ) :
"""Decorator for registering a check to run every ` every ` ( cronspec )"""
|
def wrapper ( func ) :
func . dashmat_check = Checker ( func , every )
return func
return wrapper
|
def get_resource ( self , service_name , resource_name , base_class = None ) :
"""Retrieves a resource class from the cache , if available .
: param service _ name : The service a given ` ` Resource ` ` talks to . Ex .
` ` sqs ` ` , ` ` sns ` ` , ` ` dynamodb ` ` , etc .
: type service _ name : string
: param resource _ name : The name of the ` ` Resource ` ` . Ex .
` ` Queue ` ` , ` ` Notification ` ` , ` ` Table ` ` , etc .
: type resource _ name : string
: param base _ class : ( Optional ) The base class of the object . Prevents
" magically " loading the wrong class ( one with a different base ) .
Default is ` ` default ` ` .
: type base _ class : class
: returns : A < kotocore . resources . Resource > subclass"""
|
classpath = self . build_classpath ( base_class )
service = self . services . get ( service_name , { } )
resources = service . get ( 'resources' , { } )
resource_options = resources . get ( resource_name , { } )
resource_class = resource_options . get ( classpath , None )
if not resource_class :
msg = "Resource '{0}' for {1} is not present in the cache."
raise NotCached ( msg . format ( resource_name , service_name ) )
return resource_class
|
def _update ( self ) :
"""Initialize the 1D interpolation ."""
|
if self . strains . size and self . strains . size == self . values . size :
x = np . log ( self . strains )
y = self . values
if x . size < 4 :
self . _interpolater = interp1d ( x , y , 'linear' , bounds_error = False , fill_value = ( y [ 0 ] , y [ - 1 ] ) )
else :
self . _interpolater = interp1d ( x , y , 'cubic' , bounds_error = False , fill_value = ( y [ 0 ] , y [ - 1 ] ) )
|
def get_default ( self , node ) :
"""If not explicitly set , check if the opposite was set first before returning
default"""
|
if self . opposite_property in node . inst . properties :
return not node . inst . properties [ self . opposite_property ]
else :
return self . default
|
def rm_permissions ( obj_name , principal , ace_type = 'all' , obj_type = 'file' ) :
r'''Remove a user ' s ACE from an object . This can be a file , folder , registry
key , printer , service , etc . . .
Args :
obj _ name ( str ) :
The object from which to remove the ace . This can be the
path to a file or folder , a registry key , printer , etc . For more
information about how to format the name see :
https : / / msdn . microsoft . com / en - us / library / windows / desktop / aa379593 ( v = vs . 85 ) . aspx
principal ( str ) :
The name of the user or group for which to set permissions . Can also
pass a SID .
ace _ type ( Optional [ str ] ) :
The type of ace to remove . There are two types of ACEs , ' grant ' and
' deny ' . ' all ' will remove all ACEs for the user . Default is ' all '
obj _ type ( Optional [ str ] ) :
The type of object for which to set permissions . Default is ' file '
Returns :
bool : True if successful , raises an error otherwise
Usage :
. . code - block : : python
# Remove jsnuffy ' s grant ACE from C : \ Temp
salt . utils . win _ dacl . rm _ permissions ( ' C : \ \ Temp ' , ' jsnuffy ' , ' grant ' )
# Remove all ACEs for jsnuffy from C : \ Temp
salt . utils . win _ dacl . rm _ permissions ( ' C : \ \ Temp ' , ' jsnuffy ' )'''
|
obj_dacl = dacl ( obj_name , obj_type )
obj_dacl . rm_ace ( principal , ace_type )
obj_dacl . save ( obj_name )
return True
|
def ltime ( etobs , obs , direct , targ ) :
"""This routine computes the transmit ( or receive ) time
of a signal at a specified target , given the receive
( or transmit ) time at a specified observer . The elapsed
time between transmit and receive is also returned .
http : / / naif . jpl . nasa . gov / pub / naif / toolkit _ docs / C / cspice / ltime _ c . html
: param etobs : Epoch of a signal at some observer
: type etobs : float
: param obs : NAIF ID of some observer
: type obs : int
: param direct : Direction the signal travels ( " - > " or " < - " )
: type direct : str
: param targ : NAIF ID of the target object
: type targ : int
: return : epoch and time
: rtype : tuple"""
|
etobs = ctypes . c_double ( etobs )
obs = ctypes . c_int ( obs )
direct = stypes . stringToCharP ( direct )
targ = ctypes . c_int ( targ )
ettarg = ctypes . c_double ( )
elapsd = ctypes . c_double ( )
libspice . ltime_c ( etobs , obs , direct , targ , ctypes . byref ( ettarg ) , ctypes . byref ( elapsd ) )
return ettarg . value , elapsd . value
|
def strip_output ( nb ) :
"""strip the outputs from a notebook object"""
|
nb . metadata . pop ( 'signature' , None )
nb . metadata . pop ( 'widgets' , None )
for cell in _cells ( nb ) :
if 'outputs' in cell :
cell [ 'outputs' ] = [ ]
if 'prompt_number' in cell :
cell [ 'prompt_number' ] = None
return nb
|
def initialize ( self , seed = None ) :
"""Initialize handler operation .
This method will generate new encryption keys and must be called prior
to doing authentication or verification ."""
|
self . seed = seed or os . urandom ( 32 )
# Generate new seed if not provided
signing_key = SigningKey ( self . seed )
verifying_key = signing_key . get_verifying_key ( )
self . _auth_private = signing_key . to_seed ( )
self . _auth_public = verifying_key . to_bytes ( )
log_binary ( _LOGGER , 'Authentication keys' , Private = self . _auth_private , Public = self . _auth_public )
|
def get_config ( self , retrieve = 'all' ) :
"""Implementation of get _ config for IOS .
Returns the startup or / and running configuration as dictionary .
The keys of the dictionary represent the type of configuration
( startup or running ) . The candidate is always empty string ,
since IOS does not support candidate configuration ."""
|
configs = { 'startup' : '' , 'running' : '' , 'candidate' : '' , }
if retrieve in ( 'startup' , 'all' ) :
command = 'show startup-config'
output = self . _send_command ( command )
configs [ 'startup' ] = output
if retrieve in ( 'running' , 'all' ) :
command = 'show running-config'
output = self . _send_command ( command )
configs [ 'running' ] = output
return configs
|
def stop_loop ( self ) :
"""stop QUERY thread ."""
|
hub . kill ( self . _querier_thread )
self . _querier_thread = None
self . _datapath = None
self . logger . info ( "stopped a querier." )
|
def _dense_var_to_tensor ( self , dtype = None , name = None , as_ref = False ) :
"""Converts a variable to a tensor ."""
|
# pylint : disable = protected - access
if _enclosing_tpu_context ( ) is None :
if hasattr ( self . _primary_var , '_dense_var_to_tensor' ) :
return self . _primary_var . _dense_var_to_tensor ( dtype , name , as_ref )
else :
return ops . convert_to_tensor ( self . _primary_var )
# pylint : enable = protected - access
if dtype is not None and dtype != self . dtype :
return NotImplemented
if as_ref :
return self . handle
else :
return self . read_value ( )
|
def display_ioc ( self , width = 120 , sep = ' ' , params = False ) :
"""Get a string representation of an IOC .
: param width : Width to print the description too .
: param sep : Separator used for displaying the contents of the criteria nodes .
: param params : Boolean , set to True in order to display node parameters .
: return :"""
|
s = 'Name: {}\n' . format ( self . metadata . findtext ( 'short_description' , default = 'No Name' ) )
s += 'ID: {}\n' . format ( self . root . attrib . get ( 'id' ) )
s += 'Created: {}\n' . format ( self . metadata . findtext ( 'authored_date' , default = 'No authored_date' ) )
s += 'Updated: {}\n\n' . format ( self . root . attrib . get ( 'last-modified' , default = 'No last-modified attrib' ) )
s += 'Author: {}\n' . format ( self . metadata . findtext ( 'authored_by' , default = 'No authored_by' ) )
desc = self . metadata . findtext ( 'description' , default = 'No Description' )
desc = textwrap . wrap ( desc , width = width )
desc = '\n' . join ( desc )
s += 'Description:\n{}\n\n' . format ( desc )
links = self . link_text ( )
if links :
s += '{}' . format ( links )
content_text = self . criteria_text ( sep = sep , params = params )
s += '\nCriteria:\n{}' . format ( content_text )
return s
|
def julian ( mon , day , year ) :
"""returns julian day"""
|
ig = 15 + 31 * ( 10 + 12 * 1582 )
if year == 0 :
print ( "Julian no can do" )
return
if year < 0 :
year = year + 1
if mon > 2 :
julian_year = year
julian_month = mon + 1
else :
julian_year = year - 1
julian_month = mon + 13
j1 = int ( 365.25 * julian_year )
j2 = int ( 30.6001 * julian_month )
j3 = day + 1720995
julian_day = j1 + j2 + j3
if day + 31 * ( mon + 12 * year ) >= ig :
jadj = int ( 0.01 * julian_year )
julian_day = julian_day + 2 - jadj + int ( 0.25 * jadj )
return julian_day
|
def compute_gradient ( self , * args , ** kwargs ) :
"""Compute the " gradient " of the model for the current parameters
The default implementation computes the gradients numerically using
a first order forward scheme . For better performance , this method
should be overloaded by subclasses . The output of this function
should be an array where the first dimension is ` ` full _ size ` ` ."""
|
_EPS = 1.254e-5
vector = self . get_parameter_vector ( )
value0 = self . get_value ( * args , ** kwargs )
grad = np . empty ( [ len ( vector ) ] + list ( value0 . shape ) , dtype = np . float64 )
for i , v in enumerate ( vector ) :
vector [ i ] = v + _EPS
self . set_parameter_vector ( vector )
value = self . get_value ( * args , ** kwargs )
vector [ i ] = v
self . set_parameter_vector ( vector )
grad [ i ] = ( value - value0 ) / _EPS
return grad
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.