signature
stringlengths 29
44.1k
| implementation
stringlengths 0
85.2k
|
|---|---|
def fetch_viewers ( self , game ) :
"""Query the viewers and channels of the given game and
set them on the object
: returns : the given game
: rtype : : class : ` models . Game `
: raises : None"""
|
r = self . kraken_request ( 'GET' , 'streams/summary' , params = { 'game' : game . name } ) . json ( )
game . viewers = r [ 'viewers' ]
game . channels = r [ 'channels' ]
return game
|
def from_traverse ( cls , traverse_block ) :
"""Create a GremlinFoldedTraverse block as a copy of the given Traverse block ."""
|
if isinstance ( traverse_block , Traverse ) :
return cls ( traverse_block . direction , traverse_block . edge_name )
else :
raise AssertionError ( u'Tried to initialize an instance of GremlinFoldedTraverse ' u'with block of type {}' . format ( type ( traverse_block ) ) )
|
def exact_ ( self , column , * values ) :
"""Returns a Dataswim instance with rows that has the exact string
value in a column"""
|
df = self . _exact ( column , * values )
if df is None :
self . err ( "Can not select exact data" )
return self . _duplicate_ ( df )
|
def index_from_filename ( self , path ) :
"""Checks if the path is already open in an editor tab .
: param path : path to check
: returns : The tab index if found or - 1"""
|
if path :
for i in range ( self . count ( ) ) :
widget = self . widget ( i )
try :
if widget . file . path == path :
return i
except AttributeError :
pass
# not an editor widget
return - 1
|
def get_choices ( cls , category ) :
"""Get all available options for a category ."""
|
value = cls . _DEFAULTS [ category ]
if not isinstance ( value , list ) :
raise ValueError ( "{} does not offer choices" . format ( category ) )
return value
|
def get_distance ( node1 , node2 ) :
"""Reports the distance in the machine topology between two nodes .
The factors are a multiple of 10 . It returns 0 when the distance cannot be determined . A node has
distance 10 to itself . Reporting the distance requires a Linux kernel version of 2.6.10 or newer .
@ param node1 : node idx
@ type node1 : C { int }
@ param node2 : node idx
@ type node2 : C { int }
@ rtype : C { int }"""
|
if node1 < 0 or node1 > get_max_node ( ) :
raise ValueError ( node1 )
if node2 < 0 or node2 > get_max_node ( ) :
raise ValueError ( node2 )
return libnuma . numa_distance ( node1 , node2 )
|
def get_node_name_from_id ( node_id , nodes ) :
"""Get the name of a node when given the node _ id
: param int node _ id : The ID of a node
: param list nodes : list of nodes from : py : meth : ` generate _ nodes `
: return : node name
: rtype : str"""
|
node_name = ''
for node in nodes :
if node [ 'id' ] == node_id :
node_name = node [ 'properties' ] [ 'name' ]
break
return node_name
|
def keys ( self , args ) :
"""keys wrapper that queries every shard . This is an expensive
operation .
This method should be invoked on a TwemRedis instance as if it
were being invoked directly on a StrictRedis instance ."""
|
results = { }
# TODO : parallelize
for shard_num in range ( 0 , self . num_shards ( ) ) :
shard = self . get_shard_by_num ( shard_num )
results [ shard_num ] = shard . keys ( args )
return results
|
def getDistanceRoad ( self , edgeID1 , pos1 , edgeID2 , pos2 , isDriving = False ) :
"""getDistanceRoad ( string , double , string , double , boolean ) - > double
Reads two positions on the road network and an indicator whether the air or the driving distance shall be computed . Returns the according distance ."""
|
distType = tc . REQUEST_AIRDIST
if isDriving :
distType = tc . REQUEST_DRIVINGDIST
self . _connection . _beginMessage ( tc . CMD_GET_SIM_VARIABLE , tc . DISTANCE_REQUEST , "" , 1 + 4 + 1 + 4 + len ( edgeID1 ) + 8 + 1 + 1 + 4 + len ( edgeID2 ) + 8 + 1 + 1 )
self . _connection . _string += struct . pack ( "!Bi" , tc . TYPE_COMPOUND , 3 )
self . _connection . _packString ( edgeID1 , tc . POSITION_ROADMAP )
self . _connection . _string += struct . pack ( "!dB" , pos1 , 0 )
self . _connection . _packString ( edgeID2 , tc . POSITION_ROADMAP )
self . _connection . _string += struct . pack ( "!dBB" , pos2 , 0 , distType )
return self . _connection . _checkResult ( tc . CMD_GET_SIM_VARIABLE , tc . DISTANCE_REQUEST , "" ) . readDouble ( )
|
def __parse_hgvs_syntax ( self , aa_hgvs ) :
"""Convert HGVS syntax for amino acid change into attributes .
Specific details of the mutation are stored in attributes like
self . intial ( prior to mutation ) , sel . pos ( mutation position ) ,
self . mutated ( mutation ) , and self . stop _ pos ( position of stop codon ,
if any ) .
Args :
aa _ hgvs ( str ) : amino acid string following HGVS syntax"""
|
self . is_valid = True
# assume initially the syntax is legitimate
self . is_synonymous = False
# assume not synonymous until proven
if self . unknown_effect or self . is_no_protein : # unknown effect from mutation . usually denoted as p . ?
self . pos = None
pass
elif self . is_lost_stop :
self . initial = aa_hgvs [ 0 ]
self . mutated = re . findall ( '([A-Z?*]+)$' , aa_hgvs ) [ 0 ]
self . pos = int ( re . findall ( '^\*(\d+)' , aa_hgvs ) [ 0 ] )
self . stop_pos = None
elif self . is_lost_start :
self . initial = aa_hgvs [ 0 ]
self . mutated = aa_hgvs [ - 1 ]
self . pos = int ( aa_hgvs [ 1 : - 1 ] )
elif self . is_missense :
self . initial = aa_hgvs [ 0 ]
self . mutated = aa_hgvs [ - 1 ]
self . pos = int ( aa_hgvs [ 1 : - 1 ] )
self . stop_pos = None
# not a nonsense mutation
if self . initial == self . mutated :
self . is_synonymous = True
self . is_non_silent = False
elif self . mutated == '*' :
self . is_nonsense_mutation = True
elif self . is_indel :
if self . is_insertion :
if not self . is_missing_info :
self . initial = re . findall ( '([A-Z])\d+' , aa_hgvs ) [ : 2 ]
# first two
self . pos = tuple ( map ( int , re . findall ( '[A-Z](\d+)' , aa_hgvs ) [ : 2 ] ) )
# first two
self . mutated = re . findall ( '(?<=INS)[A-Z0-9?*]+' , aa_hgvs ) [ 0 ]
self . mutated = self . mutated . strip ( '?' )
# remove the missing info ' ? '
else :
self . initial = ''
self . pos = tuple ( )
self . mutated = ''
elif self . is_deletion :
if not self . is_missing_info :
self . initial = re . findall ( '([A-Z])\d+' , aa_hgvs )
self . pos = tuple ( map ( int , re . findall ( '[A-Z](\d+)' , aa_hgvs ) ) )
self . mutated = re . findall ( '(?<=DEL)[A-Z]*' , aa_hgvs ) [ 0 ]
else :
self . initial = ''
self . pos = tuple ( )
self . mutated = ''
elif self . is_frame_shift :
self . initial = aa_hgvs [ 0 ]
self . mutated = ''
try :
self . pos = int ( re . findall ( '[A-Z*](\d+)' , aa_hgvs ) [ 0 ] )
if self . is_premature_stop_codon :
self . stop_pos = int ( re . findall ( '\*>?(\d+)$' , aa_hgvs ) [ 0 ] )
else :
self . stop_pos = None
except IndexError : # unconventional usage of indicating frameshifts will cause
# index errors . For example , in some cases ' fs ' is not used .
# In other cases , either amino acids were not included or
# just designated as a ' ? '
self . logger . debug ( '(Parsing-Problem) frame shift hgvs string: "%s"' % aa_hgvs )
self . pos = None
self . stop_pos = None
self . is_missing_info = True
elif self . is_nonsense_mutation :
self . initial = aa_hgvs [ 0 ]
self . mutated = '*'
# there is actually a stop codon
self . stop_pos = 0
# indicates same position is stop codon
try :
self . pos = int ( aa_hgvs [ 1 : - 1 ] )
except ValueError : # wierd error of p . E217 > D *
self . is_valid = False
self . pos = None
self . logger . debug ( '(Parsing-Problem) Invalid HGVS Amino Acid ' 'syntax: ' + aa_hgvs )
if self . initial == self . mutated : # classify nonsense - to - nonsense mutations as synonymous
self . is_synonymous = True
self . is_non_silent = False
else :
self . is_valid = False
# did not match any of the possible cases
self . logger . debug ( '(Parsing-Problem) Invalid HGVS Amino Acid ' 'syntax: ' + aa_hgvs )
|
def ltrim1 ( l , proportiontocut , tail = 'right' ) :
"""Slices off the passed proportion of items from ONE end of the passed
list ( i . e . , if proportiontocut = 0.1 , slices off ' leftmost ' or ' rightmost '
10 % of scores ) . Slices off LESS if proportion results in a non - integer
slice index ( i . e . , conservatively slices off proportiontocut ) .
Usage : ltrim1 ( l , proportiontocut , tail = ' right ' ) or set tail = ' left '
Returns : trimmed version of list l"""
|
if tail == 'right' :
lowercut = 0
uppercut = len ( l ) - int ( proportiontocut * len ( l ) )
elif tail == 'left' :
lowercut = int ( proportiontocut * len ( l ) )
uppercut = len ( l )
return l [ lowercut : uppercut ]
|
def add_network_ipv6 ( self , id_vlan , id_tipo_rede , id_ambiente_vip = None , prefix = None ) :
"""Add new networkipv6
: param id _ vlan : Identifier of the Vlan . Integer value and greater than zero .
: param id _ tipo _ rede : Identifier of the NetworkType . Integer value and greater than zero .
: param id _ ambiente _ vip : Identifier of the Environment Vip . Integer value and greater than zero .
: param prefix : Prefix .
: return : Following dictionary :
{ ' vlan ' : { ' id ' : < id _ vlan > ,
' nome ' : < nome _ vlan > ,
' num _ vlan ' : < num _ vlan > ,
' id _ tipo _ rede ' : < id _ tipo _ rede > ,
' id _ ambiente ' : < id _ ambiente > ,
' rede _ oct1 ' : < rede _ oct1 > ,
' rede _ oct2 ' : < rede _ oct2 > ,
' rede _ oct3 ' : < rede _ oct3 > ,
' rede _ oct4 ' : < rede _ oct4 > ,
' rede _ oct5 ' : < rede _ oct4 > ,
' rede _ oct6 ' : < rede _ oct4 > ,
' rede _ oct7 ' : < rede _ oct4 > ,
' rede _ oct8 ' : < rede _ oct4 > ,
' bloco ' : < bloco > ,
' mascara _ oct1 ' : < mascara _ oct1 > ,
' mascara _ oct2 ' : < mascara _ oct2 > ,
' mascara _ oct3 ' : < mascara _ oct3 > ,
' mascara _ oct4 ' : < mascara _ oct4 > ,
' mascara _ oct5 ' : < mascara _ oct4 > ,
' mascara _ oct6 ' : < mascara _ oct4 > ,
' mascara _ oct7 ' : < mascara _ oct4 > ,
' mascara _ oct8 ' : < mascara _ oct4 > ,
' broadcast ' : < broadcast > ,
' descricao ' : < descricao > ,
' acl _ file _ name ' : < acl _ file _ name > ,
' acl _ valida ' : < acl _ valida > ,
' ativada ' : < ativada > } }
: raise TipoRedeNaoExisteError : NetworkType not found .
: raise InvalidParameterError : Invalid ID for Vlan or NetworkType .
: raise EnvironmentVipNotFoundError : Environment VIP not registered .
: raise IPNaoDisponivelError : Network address unavailable to create a NetworkIPv6.
: raise ConfigEnvironmentInvalidError : Invalid Environment Configuration or not registered
: raise DataBaseError : Networkapi failed to access the database .
: raise XMLError : Networkapi failed to generate the XML response ."""
|
vlan_map = dict ( )
vlan_map [ 'id_vlan' ] = id_vlan
vlan_map [ 'id_tipo_rede' ] = id_tipo_rede
vlan_map [ 'id_ambiente_vip' ] = id_ambiente_vip
vlan_map [ 'prefix' ] = prefix
code , xml = self . submit ( { 'vlan' : vlan_map } , 'POST' , 'network/ipv6/add/' )
return self . response ( code , xml )
|
def ReplaceIxes ( self , path , old_prefix , old_suffix , new_prefix , new_suffix ) :
"""Replace old _ prefix with new _ prefix and old _ suffix with new _ suffix .
env - Environment used to interpolate variables .
path - the path that will be modified .
old _ prefix - construction variable for the old prefix .
old _ suffix - construction variable for the old suffix .
new _ prefix - construction variable for the new prefix .
new _ suffix - construction variable for the new suffix ."""
|
old_prefix = self . subst ( '$' + old_prefix )
old_suffix = self . subst ( '$' + old_suffix )
new_prefix = self . subst ( '$' + new_prefix )
new_suffix = self . subst ( '$' + new_suffix )
dir , name = os . path . split ( str ( path ) )
if name [ : len ( old_prefix ) ] == old_prefix :
name = name [ len ( old_prefix ) : ]
if name [ - len ( old_suffix ) : ] == old_suffix :
name = name [ : - len ( old_suffix ) ]
return os . path . join ( dir , new_prefix + name + new_suffix )
|
def should_stop ( self , result ) :
"""Whether the given result meets this trial ' s stopping criteria ."""
|
if result . get ( DONE ) :
return True
for criteria , stop_value in self . stopping_criterion . items ( ) :
if criteria not in result :
raise TuneError ( "Stopping criteria {} not provided in result {}." . format ( criteria , result ) )
if result [ criteria ] >= stop_value :
return True
return False
|
def cmd_cammsg ( self , args ) :
'''cammsg'''
|
params = [ 0 , 0 , 0 , 0 , 1 , 0 , 0 ]
# fill in any args passed by user
for i in range ( min ( len ( args ) , len ( params ) ) ) :
params [ i ] = float ( args [ i ] )
print ( "Sent DIGICAM_CONTROL CMD_LONG" )
self . master . mav . command_long_send ( self . settings . target_system , # target _ system
0 , # target _ component
mavutil . mavlink . MAV_CMD_DO_DIGICAM_CONTROL , # command
0 , # confirmation
params [ 0 ] , # param1
params [ 1 ] , # param2
params [ 2 ] , # param3
params [ 3 ] , # param4
params [ 4 ] , # param5
params [ 5 ] , # param6
params [ 6 ] )
|
def largest_divisor ( n : int ) -> int :
"""For a given positive number n , find the largest number that divides n evenly , smaller than n .
Args :
n ( int ) : number to find the largest divisor for , n > 0
Returns :
int : the largest divisor of n that is less than n
Examples :
> > > largest _ divisor ( 15)
> > > largest _ divisor ( 17)"""
|
for i in range ( n - 1 , 0 , - 1 ) :
if n % i == 0 :
return i
# the smallest positive number does not have a divisor other than 1
return 1
|
def read_byte ( self , addr ) :
"""Read a single byte from static memory area ( blocks 0-14 ) ."""
|
if addr < 0 or addr > 127 :
raise ValueError ( "invalid byte address" )
log . debug ( "read byte at address {0} ({0:02X}h)" . format ( addr ) )
cmd = "\x01" + chr ( addr ) + "\x00" + self . uid
return self . transceive ( cmd ) [ - 1 ]
|
def logger ( ref = 0 ) :
"""Finds a module logger .
If the argument passed is a module , find the logger for that module using
the modules ' name ; if it ' s a string , finds a logger of that name ; if an
integer , walks the stack to the module at that height .
The logger is always extended with a ` ` . configure ( ) ` ` method allowing
its log levels for syslog and stderr to be adjusted or automatically
initialized as per the documentation for ` configure ( ) ` below ."""
|
if inspect . ismodule ( ref ) :
return extend ( logging . getLogger ( ref . __name__ ) )
if isinstance ( ref , basestring ) :
return extend ( logging . getLogger ( ref ) )
return extend ( logging . getLogger ( stackclimber ( ref + 1 ) ) )
|
def _check_forward_mode_input_array ( self , X : np . ndarray ) -> int :
"""Check whether one forward mode input array is of valid shape
Returns inferred value of T"""
|
# Find the length of each variable to infer T
if not isinstance ( X , np . ndarray ) :
raise ValueError ( 'X must be a numpy array, dict, or scalar' )
# Get the shape and tensor rank
shape = X . shape
tensor_rank = len ( shape )
T = 0
# Only 1D and 2D arrays are supported
if tensor_rank not in ( 0 , 1 , 2 ) :
raise ValueError ( f'Shape of X = {X.shape}. Numpy array must be a 1D vector or 2D matrix' )
if tensor_rank == 0 :
T = 1
# If the input was a 1D vector , its length must EITHER ( 1 ) be T , or ( 2 ) m , with T = = 1
if tensor_rank == 1 and ( shape [ 0 ] != self . m ) and self . m != 1 :
raise ValueError ( f'Error: X has shape {X.shape}, incompatible with m = {self.m} on fluxion.' )
# Return the value of T in this situation
if tensor_rank == 1 and shape [ 0 ] == self . m :
T = 1
if tensor_rank == 1 and self . m == 1 :
T = shape [ 0 ]
# If the input was a 2D vector , it must be of shape Txn
if tensor_rank == 2 and ( shape [ 1 ] != self . m ) :
raise ValueError ( f'Error: X has shape {X.shape}, incompatible with m = {self.m} on fluxion.' )
if tensor_rank == 2 :
T = shape [ 0 ]
return T
|
def connect ( self ) :
"""connect to the device
: return : bool"""
|
self . end_live_capture = False
if not self . ommit_ping and not self . helper . test_ping ( ) :
raise ZKNetworkError ( "can't reach device (ping %s)" % self . __address [ 0 ] )
if not self . force_udp and self . helper . test_tcp ( ) == 0 :
self . user_packet_size = 72
# default zk8
self . __create_socket ( )
self . __session_id = 0
self . __reply_id = const . USHRT_MAX - 1
cmd_response = self . __send_command ( const . CMD_CONNECT )
self . __session_id = self . __header [ 2 ]
if cmd_response . get ( 'code' ) == const . CMD_ACK_UNAUTH :
if self . verbose :
print ( "try auth" )
command_string = make_commkey ( self . __password , self . __session_id )
cmd_response = self . __send_command ( const . CMD_AUTH , command_string )
if cmd_response . get ( 'status' ) :
self . is_connect = True
return self
else :
if cmd_response [ "code" ] == const . CMD_ACK_UNAUTH :
raise ZKErrorResponse ( "Unauthenticated" )
if self . verbose :
print ( "connect err response {} " . format ( cmd_response [ "code" ] ) )
raise ZKErrorResponse ( "Invalid response: Can't connect" )
|
def save ( self , filename ) :
"""Saves the data for this settings instance to the given filename .
: param filename | < str >"""
|
dirname = os . path . dirname ( filename )
if not os . path . exists ( dirname ) :
os . makedirs ( dirname )
try :
f = open ( filename , 'w' )
except StandardError :
log . error ( 'Failed to access file: {0}' . format ( filename ) )
return False
try :
f . write ( yaml . dump ( self . _root , default_flow_style = False ) )
except StandardError :
log . error ( 'Failed to save settings: {0}' . format ( filename ) )
return False
finally :
f . close ( )
return True
|
def congestion ( self ) :
"""Retrieves the congestion information of the incident / incidents from
the output response
Returns :
congestion ( namedtuple ) : List of named tuples of congestion info of
the incident / incidents"""
|
resource_list = self . traffic_incident ( )
congestion = namedtuple ( 'congestion' , 'congestion' )
if len ( resource_list ) == 1 and resource_list [ 0 ] is None :
return None
else :
try :
return [ congestion ( resource [ 'congestion' ] ) for resource in resource_list ]
except ( KeyError , TypeError ) :
try :
return [ congestion ( resource [ 'CongestionInfo' ] ) for resource in resource_list ]
except KeyError :
return None
|
def _run_in_parallel ( programs , nsamples , cxn ) :
"""See docs for ` ` run _ in _ parallel ( ) ` ` .
: param Union [ np . ndarray , List [ List [ Program ] ] ] programs : A rectangular list of lists , or a 2d
array of Quil Programs . The outer list iterates over disjoint qubit groups as targets , the
inner list over programs to run on those qubits , e . g . , tomographic sequences .
: param int nsamples : Number of repetitions for executing each Program .
: param QPUConnection | QVMConnection cxn : The quantum machine connection .
: return : An array of 2d arrays that provide bitstring histograms for each input program .
The axis of the outer array iterates over the disjoint qubit groups , the outer axis of the
inner 2d array iterates over the programs for that group and the inner most axis iterates
over all possible bitstrings for the qubit group under consideration . The bitstrings are
enumerated in lexicographical order , i . e . , for a program with qubits { 3 , 1 , 2 } the qubits
are first sorted - > [ 1 , 2 , 3 ] and then the bitstrings are enumerated as 000 , 001 , 010,
where the bits ijk correspond to the states of qubits 1,2 and 3 , respectively .
: rtype np . array"""
|
n_groups = len ( programs )
n_progs_per_group = len ( programs [ 0 ] )
for progs in programs [ 1 : ] :
if not len ( progs ) == n_progs_per_group :
raise ValueError ( "Non-rectangular grid of programs specified: {}" . format ( programs ) )
# identify qubit groups , ensure disjointedness
qubit_groups = [ set ( ) for _ in range ( n_groups ) ]
for group_idx , group in enumerate ( qubit_groups ) :
for prog in programs [ group_idx ] :
group . update ( set ( prog . get_qubits ( ) ) )
# test that groups are actually disjoint by comparing with the ones already created
for other_idx , other_group in enumerate ( qubit_groups [ : group_idx ] ) :
intersection = other_group & group
if intersection :
raise ValueError ( "Programs from groups {} and {} intersect on qubits {}" . format ( other_idx , group_idx , intersection ) )
qubit_groups = [ sorted ( c ) for c in qubit_groups ]
all_qubits = sum ( qubit_groups , [ ] )
n_qubits_per_group = [ len ( c ) for c in qubit_groups ]
# create joint programs
parallel_programs = [ sum ( progsj , Program ( ) ) for progsj in zip ( * programs ) ]
# execute on cxn
all_results = [ ]
for i , prog in izip ( TRANGE ( n_progs_per_group ) , parallel_programs ) :
try :
results = cxn . run_and_measure ( prog , all_qubits , nsamples )
all_results . append ( np . array ( results ) )
except QPUError as e :
_log . error ( "Could not execute parallel program:\n%s" , prog . out ( ) )
raise e
# generate histograms per qubit group
all_histograms = np . array ( [ np . zeros ( ( n_progs_per_group , 2 ** n_qubits ) , dtype = int ) for n_qubits in n_qubits_per_group ] )
for idx , results in enumerate ( all_results ) :
n_qubits_seen = 0
for jdx , n_qubits in enumerate ( n_qubits_per_group ) :
group_results = results [ : , n_qubits_seen : n_qubits_seen + n_qubits ]
outcome_labels = list ( map ( bitlist_to_int , group_results ) )
dimension = 2 ** n_qubits
all_histograms [ jdx ] [ idx ] = make_histogram ( outcome_labels , dimension )
n_qubits_seen += n_qubits
return all_histograms
|
def get_payload ( self , data ) :
"""Parse length of payload and return it ."""
|
start = 2
length = ord ( data [ 1 : 2 ] )
if length == 126 : # Payload information are an extra 2 bytes .
start = 4
length , = unpack ( ">H" , data [ 2 : 4 ] )
elif length == 127 : # Payload information are an extra 6 bytes .
start = 8
length , = unpack ( ">I" , data [ 2 : 6 ] )
end = start + length
payload = json . loads ( data [ start : end ] . decode ( ) )
extra_data = data [ end : ]
return payload , extra_data
|
def check_statement ( self , stmt , max_paths = 1 , max_path_length = 5 ) :
"""Check a single Statement against the model .
Parameters
stmt : indra . statements . Statement
The Statement to check .
max _ paths : Optional [ int ]
The maximum number of specific paths to return for each Statement
to be explained . Default : 1
max _ path _ length : Optional [ int ]
The maximum length of specific paths to return . Default : 5
Returns
boolean
True if the model satisfies the Statement ."""
|
# Make sure the influence map is initialized
self . get_im ( )
# Check if this is one of the statement types that we can check
if not isinstance ( stmt , ( Modification , RegulateAmount , RegulateActivity , Influence ) ) :
return PathResult ( False , 'STATEMENT_TYPE_NOT_HANDLED' , max_paths , max_path_length )
# Get the polarity for the statement
if isinstance ( stmt , Modification ) :
target_polarity = - 1 if isinstance ( stmt , RemoveModification ) else 1
elif isinstance ( stmt , RegulateActivity ) :
target_polarity = 1 if stmt . is_activation else - 1
elif isinstance ( stmt , RegulateAmount ) :
target_polarity = - 1 if isinstance ( stmt , DecreaseAmount ) else 1
elif isinstance ( stmt , Influence ) :
target_polarity = - 1 if stmt . overall_polarity ( ) == - 1 else 1
# Get the subject and object ( works also for Modifications )
subj , obj = stmt . agent_list ( )
# Get a list of monomer patterns matching the subject FIXME Currently
# this will match rules with the corresponding monomer pattern on it .
# In future , this statement should ( possibly ) also match rules in which
# 1 ) the agent is in its active form , or 2 ) the agent is tagged as the
# enzyme in a rule of the appropriate activity ( e . g . , a phosphorylation
# rule ) FIXME
if subj is not None :
subj_mps = list ( pa . grounded_monomer_patterns ( self . model , subj , ignore_activities = True ) )
if not subj_mps :
logger . debug ( 'No monomers found corresponding to agent %s' % subj )
return PathResult ( False , 'SUBJECT_MONOMERS_NOT_FOUND' , max_paths , max_path_length )
else :
subj_mps = [ None ]
# Observables may not be found for an activation since there may be no
# rule in the model activating the object , and the object may not have
# an " active " site of the appropriate type
obs_names = self . stmt_to_obs [ stmt ]
if not obs_names :
logger . debug ( "No observables for stmt %s, returning False" % stmt )
return PathResult ( False , 'OBSERVABLES_NOT_FOUND' , max_paths , max_path_length )
for subj_mp , obs_name in itertools . product ( subj_mps , obs_names ) : # NOTE : Returns on the path found for the first enz _ mp / obs combo
result = self . _find_im_paths ( subj_mp , obs_name , target_polarity , max_paths , max_path_length )
# If a path was found , then we return it ; otherwise , that means
# there was no path for this observable , so we have to try the next
# one
if result . path_found :
return result
# If we got here , then there was no path for any observable
return PathResult ( False , 'NO_PATHS_FOUND' , max_paths , max_path_length )
|
def setup_actions ( self ) :
"""Connects slots to signals"""
|
self . actionOpen . triggered . connect ( self . on_open )
self . actionNew . triggered . connect ( self . on_new )
self . actionSave . triggered . connect ( self . on_save )
self . actionSave_as . triggered . connect ( self . on_save_as )
self . actionQuit . triggered . connect ( QtWidgets . QApplication . instance ( ) . quit )
self . tabWidget . current_changed . connect ( self . on_current_tab_changed )
self . actionAbout . triggered . connect ( self . on_about )
|
def validate_line ( self , line ) :
"""Validate Unicode IPA string relative to panphon .
line - - String of IPA characters . Can contain whitespace and limited
punctuation ."""
|
line0 = line
pos = 0
while line :
seg_m = self . ft . seg_regex . match ( line )
wsp_m = self . ws_punc_regex . match ( line )
if seg_m :
length = len ( seg_m . group ( 0 ) )
line = line [ length : ]
pos += length
elif wsp_m :
length = len ( wsp_m . group ( 0 ) )
line = line [ length : ]
pos += length
else :
msg = 'IPA not valid at position {} in "{}".' . format ( pos , line0 . strip ( ) )
# msg = msg . decode ( ' utf - 8 ' )
print ( msg , file = sys . stderr )
line = line [ 1 : ]
pos += 1
|
def assert_close ( a , b , rtol = 1e-07 , atol = 0 , context = None ) :
"""Compare for equality up to a given precision two composite objects
which may contain floats . NB : if the objects are or contain generators ,
they are exhausted .
: param a : an object
: param b : another object
: param rtol : relative tolerance
: param atol : absolute tolerance"""
|
if isinstance ( a , float ) or isinstance ( a , numpy . ndarray ) and a . shape : # shortcut
numpy . testing . assert_allclose ( a , b , rtol , atol )
return
if isinstance ( a , ( str , bytes , int ) ) : # another shortcut
assert a == b , ( a , b )
return
if hasattr ( a , '_slots_' ) : # record - like objects
assert a . _slots_ == b . _slots_
for x in a . _slots_ :
assert_close ( getattr ( a , x ) , getattr ( b , x ) , rtol , atol , x )
return
if hasattr ( a , 'keys' ) : # dict - like objects
assert a . keys ( ) == b . keys ( )
for x in a :
if x != '__geom__' :
assert_close ( a [ x ] , b [ x ] , rtol , atol , x )
return
if hasattr ( a , '__dict__' ) : # objects with an attribute dictionary
assert_close ( vars ( a ) , vars ( b ) , context = a )
return
if hasattr ( a , '__iter__' ) : # iterable objects
xs , ys = list ( a ) , list ( b )
assert len ( xs ) == len ( ys ) , ( 'Lists of different lenghts: %d != %d' % ( len ( xs ) , len ( ys ) ) )
for x , y in zip ( xs , ys ) :
assert_close ( x , y , rtol , atol , x )
return
if a == b : # last attempt to avoid raising the exception
return
ctx = '' if context is None else 'in context ' + repr ( context )
raise AssertionError ( '%r != %r %s' % ( a , b , ctx ) )
|
def ReadFrom ( self , byte_stream ) :
"""Read values from a byte stream .
Args :
byte _ stream ( bytes ) : byte stream .
Returns :
tuple [ object , . . . ] : values copies from the byte stream .
Raises :
IOError : if byte stream cannot be read .
OSError : if byte stream cannot be read ."""
|
try :
return self . _struct . unpack_from ( byte_stream )
except ( TypeError , struct . error ) as exception :
raise IOError ( 'Unable to read byte stream with error: {0!s}' . format ( exception ) )
|
def __method_descriptor ( self , service , method_info , operation_id , protorpc_method_info , security_definitions ) :
"""Describes a method .
Args :
service : endpoints . Service , Implementation of the API as a service .
method _ info : _ MethodInfo , Configuration for the method .
operation _ id : string , Operation ID of the method
protorpc _ method _ info : protorpc . remote . _ RemoteMethodInfo , ProtoRPC
description of the method .
security _ definitions : list of dicts , security definitions for the API .
Returns :
Dictionary describing the method ."""
|
descriptor = { }
request_message_type = ( resource_container . ResourceContainer . get_request_message ( protorpc_method_info . remote ) )
request_kind = self . __get_request_kind ( method_info )
remote_method = protorpc_method_info . remote
path = method_info . get_path ( service . api_info )
descriptor [ 'parameters' ] = self . __request_message_descriptor ( request_kind , request_message_type , method_info . method_id ( service . api_info ) , path )
descriptor [ 'responses' ] = self . __response_message_descriptor ( remote_method . response_type ( ) , method_info . method_id ( service . api_info ) )
descriptor [ 'operationId' ] = operation_id
# Insert the auth audiences , if any
api_key_required = method_info . is_api_key_required ( service . api_info )
if method_info . audiences is not None :
descriptor [ 'security' ] = self . __security_descriptor ( method_info . audiences , security_definitions , api_key_required = api_key_required )
elif service . api_info . audiences is not None or api_key_required :
descriptor [ 'security' ] = self . __security_descriptor ( service . api_info . audiences , security_definitions , api_key_required = api_key_required )
# Insert the metric costs , if any
if method_info . metric_costs :
descriptor [ 'x-google-quota' ] = self . __x_google_quota_descriptor ( method_info . metric_costs )
return descriptor
|
def _clean_props ( self ) :
"""Makes sure all properties are legit for isochrone .
Not done in _ _ init _ _ in order to save speed on loading ."""
|
remove = [ ]
for p in self . properties . keys ( ) :
if not hasattr ( self . ic , p ) and p not in self . ic . bands and p not in [ 'parallax' , 'feh' , 'age' , 'mass_B' , 'mass_C' ] and not re . search ( 'delta_' , p ) :
remove . append ( p )
for p in remove :
del self . properties [ p ]
if len ( remove ) > 0 :
logging . warning ( 'Properties removed from Model because ' + 'not present in {}: {}' . format ( type ( self . ic ) , remove ) )
remove = [ ]
for p in self . properties . keys ( ) :
try :
val = self . properties [ p ] [ 0 ]
if not np . isfinite ( val ) :
remove . append ( p )
except :
pass
for p in remove :
del self . properties [ p ]
if len ( remove ) > 0 :
logging . warning ( 'Properties removed from Model because ' + 'value is nan or inf: {}' . format ( remove ) )
self . _props_cleaned = True
|
def GetMetadataAttribute ( self , attribute_name ) :
"""Retrieves the metadata attribute .
Args :
attribute _ name ( str ) : name of the metadata attribute .
Returns :
str : the metadata attribute or None .
Raises :
RuntimeError : if more than one value is found in the database ."""
|
table_name = 'metadata'
has_table = self . _database_file . HasTable ( table_name )
if not has_table :
return None
column_names = [ 'value' ]
condition = 'name == "{0:s}"' . format ( attribute_name )
values = list ( self . _database_file . GetValues ( [ table_name ] , column_names , condition ) )
number_of_values = len ( values )
if number_of_values == 0 :
return None
if number_of_values == 1 :
return values [ 0 ] [ 'value' ]
raise RuntimeError ( 'More than one value found in database.' )
|
def fill_values_to_nan ( masked_array ) :
"""Replaces the fill _ values of the masked array by NaNs
If the array is None or it does not contain floating point values , it cannot contain NaNs .
In that case the original array is returned ."""
|
if masked_array is not None and masked_array . dtype . kind == 'f' :
check_class ( masked_array , ma . masked_array )
logger . debug ( "Replacing fill_values by NaNs" )
masked_array [ : ] = ma . filled ( masked_array , np . nan )
masked_array . set_fill_value ( np . nan )
else :
return masked_array
|
def _attrib_to_transform ( attrib ) :
"""Extract a homogenous transform from a dictionary .
Parameters
attrib : dict , optionally containing ' transform '
Returns
transform : ( 4 , 4 ) float , homogeonous transformation"""
|
transform = np . eye ( 4 , dtype = np . float64 )
if 'transform' in attrib : # wangle their transform format
values = np . array ( attrib [ 'transform' ] . split ( ) , dtype = np . float64 ) . reshape ( ( 4 , 3 ) ) . T
transform [ : 3 , : 4 ] = values
return transform
|
def decode_int ( stream , signed = False ) :
"""Decode C { int } ."""
|
n = result = 0
b = stream . read_uchar ( )
while b & 0x80 != 0 and n < 3 :
result <<= 7
result |= b & 0x7f
b = stream . read_uchar ( )
n += 1
if n < 3 :
result <<= 7
result |= b
else :
result <<= 8
result |= b
if result & 0x10000000 != 0 :
if signed :
result -= 0x20000000
else :
result <<= 1
result += 1
return result
|
def pipe_datebuilder ( context = None , _INPUT = None , conf = None , ** kwargs ) :
"""A date module that converts a text string into a datetime value . Useful
as terminal data . Loopable .
Parameters
context : pipe2py . Context object
_ INPUT : pipeforever pipe or an iterable of items
conf : { ' DATE ' : { ' type ' : ' datetime ' , ' value ' : ' 12/2/2014 ' } }
Yields
_ OUTPUT : date timetuples"""
|
conf = DotDict ( conf )
for item in _INPUT :
_input = DotDict ( item )
date = utils . get_value ( conf [ 'DATE' ] , _input , ** kwargs ) . lower ( )
if date . endswith ( ' day' ) or date . endswith ( ' days' ) :
count = int ( date . split ( ' ' ) [ 0 ] )
new_date = dt . today ( ) + timedelta ( days = count )
elif date . endswith ( ' year' ) or date . endswith ( ' years' ) :
count = int ( date . split ( ' ' ) [ 0 ] )
new_date = dt . today ( ) . replace ( year = dt . today ( ) . year + count )
else :
new_date = SWITCH . get ( date )
if not new_date :
new_date = utils . get_date ( date )
if not new_date :
raise Exception ( 'Unrecognized date string: %s' % date )
yield new_date . timetuple ( )
|
def change_node_affiliations ( self , jid , node , affiliations_to_set ) :
"""Update the affiliations at a node .
: param jid : Address of the PubSub service .
: type jid : : class : ` aioxmpp . JID `
: param node : Name of the node to modify
: type node : : class : ` str `
: param affiliations _ to _ set : The affiliations to set at the node .
: type affiliations _ to _ set : : class : ` ~ collections . abc . Iterable ` of tuples
consisting of the JID to affiliate and the affiliation to use .
: raises aioxmpp . errors . XMPPError : as returned by the service
` affiliations _ to _ set ` must be an iterable of pairs ( ` jid ` ,
` affiliation ` ) , where the ` jid ` indicates the JID for which the
` affiliation ` is to be set ."""
|
iq = aioxmpp . stanza . IQ ( type_ = aioxmpp . structs . IQType . SET , to = jid , payload = pubsub_xso . OwnerRequest ( pubsub_xso . OwnerAffiliations ( node , affiliations = [ pubsub_xso . OwnerAffiliation ( jid , affiliation ) for jid , affiliation in affiliations_to_set ] ) ) )
yield from self . client . send ( iq )
|
def _label_names_correct ( self , labels ) :
"""Raise exception ( ValueError ) if labels not correct"""
|
for k , v in labels . items ( ) : # Check reserved labels
if k in RESTRICTED_LABELS_NAMES :
raise ValueError ( "Labels not correct" )
# Check prefixes
if any ( k . startswith ( i ) for i in RESTRICTED_LABELS_PREFIXES ) :
raise ValueError ( "Labels not correct" )
return True
|
def from_attrdict ( cls , attrdict : OrderedNamespace ) -> object :
"""Builds a new instance of the ORM object from values in an attrdict ."""
|
dictionary = attrdict . __dict__
# noinspection PyArgumentList
return cls ( ** dictionary )
|
def query ( self , variables , evidence = None , args = 'exact' ) :
"""Query method for Dynamic Bayesian Network using Interface Algorithm .
Parameters :
variables : list
list of variables for which you want to compute the probability
evidence : dict
a dict key , value pair as { var : state _ of _ var _ observed }
None if no evidence
Examples :
> > > from pgmpy . factors . discrete import TabularCPD
> > > from pgmpy . models import DynamicBayesianNetwork as DBN
> > > from pgmpy . inference import DBNInference
> > > dbnet = DBN ( )
> > > dbnet . add _ edges _ from ( [ ( ( ' Z ' , 0 ) , ( ' X ' , 0 ) ) , ( ( ' X ' , 0 ) , ( ' Y ' , 0 ) ) ,
. . . ( ( ' Z ' , 0 ) , ( ' Z ' , 1 ) ) ] )
> > > z _ start _ cpd = TabularCPD ( ( ' Z ' , 0 ) , 2 , [ [ 0.5 , 0.5 ] ] )
> > > x _ i _ cpd = TabularCPD ( ( ' X ' , 0 ) , 2 , [ [ 0.6 , 0.9 ] ,
. . . [ 0.4 , 0.1 ] ] ,
. . . evidence = [ ( ' Z ' , 0 ) ] ,
. . . evidence _ card = [ 2 ] )
> > > y _ i _ cpd = TabularCPD ( ( ' Y ' , 0 ) , 2 , [ [ 0.2 , 0.3 ] ,
. . . [ 0.8 , 0.7 ] ] ,
. . . evidence = [ ( ' X ' , 0 ) ] ,
. . . evidence _ card = [ 2 ] )
> > > z _ trans _ cpd = TabularCPD ( ( ' Z ' , 1 ) , 2 , [ [ 0.4 , 0.7 ] ,
. . . [ 0.6 , 0.3 ] ] ,
. . . evidence = [ ( ' Z ' , 0 ) ] ,
. . . evidence _ card = [ 2 ] )
> > > dbnet . add _ cpds ( z _ start _ cpd , z _ trans _ cpd , x _ i _ cpd , y _ i _ cpd )
> > > dbnet . initialize _ initial _ state ( )
> > > dbn _ inf = DBNInference ( dbnet )
> > > dbn _ inf . query ( [ ( ' X ' , 0 ) ] , { ( ' Y ' , 0 ) : 0 , ( ' Y ' , 1 ) : 1 , ( ' Y ' , 2 ) : 1 } ) [ ( ' X ' , 0 ) ] . values
array ( [ 0.66594382 , 0.33405618 ] )"""
|
if args == 'exact' :
return self . backward_inference ( variables , evidence )
|
def unpack ( self , buff , offset = 0 ) :
"""Unpack a binary message into this object ' s attributes .
Unpack the binary value * buff * and update this object attributes based
on the results .
Args :
buff ( bytes ) : Binary data package to be unpacked .
offset ( int ) : Where to begin unpacking .
Raises :
Exception : If there is a struct unpacking error ."""
|
header = UBInt16 ( )
header . unpack ( buff [ offset : offset + 2 ] )
self . tlv_type = header . value >> 9
length = header . value & 511
begin , end = offset + 2 , offset + 2 + length
sub_type = UBInt8 ( )
sub_type . unpack ( buff [ begin : begin + 1 ] )
self . sub_type = sub_type . value
self . sub_value = BinaryData ( buff [ begin + 1 : end ] )
|
def kill_session ( self ) :
"""` ` $ tmux kill - session ` ` ."""
|
proc = self . cmd ( 'kill-session' , '-t%s' % self . id )
if proc . stderr :
raise exc . LibTmuxException ( proc . stderr )
|
def getCeilingZoom ( self , resolution , unit = 'meters' ) :
"""Return the maximized zoom level for a given resolution
Parameters :
resolution - - max . resolution
unit - - unit for output ( default = ' meters ' )"""
|
if resolution in self . RESOLUTIONS :
return self . getZoom ( resolution )
lo , hi = self . _getZoomLevelRange ( resolution , unit )
if lo == 0 or lo == hi :
return lo
if hi == len ( self . RESOLUTIONS ) :
return hi - 1
return lo + 1
|
def roll_die ( qvm , number_of_sides ) :
"""Roll an n - sided quantum die ."""
|
die_compiled = qvm . compile ( die_program ( number_of_sides ) )
return process_results ( qvm . run ( die_compiled ) )
|
def _parse_sequence_tag ( self ) :
'''Parses the sequence and atomic mass .'''
|
# main _ tags = self . _ dom . getElementsByTagName ( " uniprot " )
# assert ( len ( main _ tags ) = = 1)
# entry _ tags = main _ tags [ 0 ] . getElementsByTagName ( " entry " )
# assert ( len ( entry _ tags ) = = 1)
# entry _ tags [ 0]
entry_tag = self . entry_tag
# only get sequence tags that are direct children of the entry tag ( sequence tags can also be children of entry . comment . conflict )
sequence_tags = [ child for child in entry_tag . childNodes if child . nodeType == child . ELEMENT_NODE and child . tagName == 'sequence' ]
assert ( len ( sequence_tags ) == 1 )
sequence_tag = sequence_tags [ 0 ]
# atomic mass , sequence , CRC64 digest
self . atomic_mass = float ( sequence_tag . getAttribute ( "mass" ) )
self . sequence = "" . join ( sequence_tag . firstChild . nodeValue . strip ( ) . split ( "\n" ) )
self . sequence_length = int ( sequence_tag . getAttribute ( "length" ) )
self . CRC64Digest = sequence_tag . getAttribute ( "checksum" )
|
def contrail_error_handler ( f ) :
"""Handle HTTP errors returned by the API server"""
|
@ wraps ( f )
def wrapper ( * args , ** kwargs ) :
try :
return f ( * args , ** kwargs )
except HttpError as e : # Replace message by details to provide a
# meaningful message
if e . details :
e . message , e . details = e . details , e . message
e . args = ( "%s (HTTP %s)" % ( e . message , e . http_status ) , )
raise
return wrapper
|
def SaveName_Conv ( Mod = None , Cls = None , Type = None , Name = None , Deg = None , Exp = None , Diag = None , shot = None , version = None , usr = None , Include = defInclude ) :
"""Return a default name for saving the object
Includes key info for fast identification of the object from file name
Used on object creation by : class : ` ~ tofu . pathfile . ID `
It is recommended to use this default name ."""
|
Modstr = dModes [ Mod ] if Mod is not None else None
Include = defInclude if Include is None else Include
if Cls is not None and Type is not None and 'Type' in Include :
Clsstr = Cls + Type
else :
Clsstr = Cls
Dict = { 'Mod' : Modstr , 'Cls' : Clsstr , 'Name' : Name }
for ii in Include :
if not ii in [ 'Mod' , 'Cls' , 'Type' , 'Name' ] :
Dict [ ii ] = None
if ii == 'Deg' and Deg is not None :
Dict [ ii ] = dPref [ ii ] + '{0:02.0f}' . format ( Deg )
elif ii == 'shot' and shot is not None :
Dict [ ii ] = dPref [ ii ] + '{0:05.0f}' . format ( shot )
elif not ii in [ 'Mod' , 'Cls' , 'Type' , 'Name' ] and eval ( ii + ' is not None' ) :
Dict [ ii ] = dPref [ ii ] + eval ( ii )
if 'Data' in Cls :
Order = [ 'Mod' , 'Cls' , 'Exp' , 'Deg' , 'Diag' , 'shot' , 'Name' , 'version' , 'usr' ]
else :
Order = [ 'Mod' , 'Cls' , 'Exp' , 'Deg' , 'Diag' , 'Name' , 'shot' , 'version' , 'usr' ]
SVN = ""
for ii in range ( 0 , len ( Order ) ) :
if Order [ ii ] in Include and Dict [ Order [ ii ] ] is not None :
SVN += '_' + Dict [ Order [ ii ] ]
SVN = SVN . replace ( '__' , '_' )
if SVN [ 0 ] == '_' :
SVN = SVN [ 1 : ]
return SVN
|
def post ( self , resource_endpoint , data = { } , files = None ) :
"""Don ' t use it ."""
|
url = self . _create_request_url ( resource_endpoint )
if files :
data = self . _prepare_params_for_file_upload ( data )
return req . post ( url , headers = self . auth_header , files = files , data = data )
else :
return req . post ( url , headers = self . auth_header , json = data )
|
def _readResponse ( self ) :
"""Yield each row of response untill ! done is received .
: throws TrapError : If one ! trap is received .
: throws MultiTrapError : If > 1 ! trap is received ."""
|
traps = [ ]
reply_word = None
while reply_word != '!done' :
reply_word , words = self . _readSentence ( )
if reply_word == '!trap' :
traps . append ( TrapError ( ** words ) )
elif reply_word in ( '!re' , '!done' ) and words :
yield words
if len ( traps ) > 1 :
raise MultiTrapError ( * traps )
elif len ( traps ) == 1 :
raise traps [ 0 ]
|
def index ( self ) :
"""Returns the first occurrence of the pedalboard in your bank"""
|
if self . bank is None :
raise IndexError ( 'Pedalboard not contains a bank' )
return self . bank . pedalboards . index ( self )
|
def pretty_echo ( cls , message ) :
"""Display message using pretty print formatting ."""
|
if cls . intty ( ) :
if message :
from pprint import pprint
pprint ( message )
|
def is_read_only ( object ) :
"""Returns if given object is read only ( built - in or extension ) .
: param object : Object .
: type object : object
: return : Is object read only .
: rtype : bool"""
|
try :
attribute = "_trace__read__"
setattr ( object , attribute , True )
delattr ( object , attribute )
return False
except ( TypeError , AttributeError ) :
return True
|
def plot ( data , pconfig = None ) :
"""Plot a line graph with X , Y data .
: param data : 2D dict , first keys as sample names , then x : y data pairs
: param pconfig : optional dict with config key : value pairs . See CONTRIBUTING . md
: return : HTML and JS , ready to be inserted into the page"""
|
# Don ' t just use { } as the default argument as it ' s mutable . See :
# http : / / python - guide - pt - br . readthedocs . io / en / latest / writing / gotchas /
if pconfig is None :
pconfig = { }
# Allow user to overwrite any given config for this plot
if 'id' in pconfig and pconfig [ 'id' ] and pconfig [ 'id' ] in config . custom_plot_config :
for k , v in config . custom_plot_config [ pconfig [ 'id' ] ] . items ( ) :
pconfig [ k ] = v
# Given one dataset - turn it into a list
if type ( data ) is not list :
data = [ data ]
# Smooth dataset if requested in config
if pconfig . get ( 'smooth_points' , None ) is not None :
sumcounts = pconfig . get ( 'smooth_points_sumcounts' , True )
for i , d in enumerate ( data ) :
if type ( sumcounts ) is list :
sumc = sumcounts [ i ]
else :
sumc = sumcounts
data [ i ] = smooth_line_data ( d , pconfig [ 'smooth_points' ] , sumc )
# Add sane plotting config defaults
for idx , yp in enumerate ( pconfig . get ( 'yPlotLines' , [ ] ) ) :
pconfig [ 'yPlotLines' ] [ idx ] [ "width" ] = pconfig [ 'yPlotLines' ] [ idx ] . get ( "width" , 2 )
# Add initial axis labels if defined in ` data _ labels ` but not main config
if pconfig . get ( 'ylab' ) is None :
try :
pconfig [ 'ylab' ] = pconfig [ 'data_labels' ] [ 0 ] [ 'ylab' ]
except ( KeyError , IndexError ) :
pass
if pconfig . get ( 'xlab' ) is None :
try :
pconfig [ 'xlab' ] = pconfig [ 'data_labels' ] [ 0 ] [ 'xlab' ]
except ( KeyError , IndexError ) :
pass
# Generate the data dict structure expected by HighCharts series
plotdata = list ( )
for data_index , d in enumerate ( data ) :
thisplotdata = list ( )
for s in sorted ( d . keys ( ) ) : # Ensure any overwritting conditionals from data _ labels ( e . g . ymax ) are taken in consideration
series_config = pconfig . copy ( )
if 'data_labels' in pconfig and type ( pconfig [ 'data_labels' ] [ data_index ] ) is dict : # if not a dict : only dataset name is provided
series_config . update ( pconfig [ 'data_labels' ] [ data_index ] )
pairs = list ( )
maxval = 0
if 'categories' in series_config :
pconfig [ 'categories' ] = list ( )
for k in d [ s ] . keys ( ) :
pconfig [ 'categories' ] . append ( k )
pairs . append ( d [ s ] [ k ] )
maxval = max ( maxval , d [ s ] [ k ] )
else :
for k in sorted ( d [ s ] . keys ( ) ) :
if k is not None :
if 'xmax' in series_config and float ( k ) > float ( series_config [ 'xmax' ] ) :
continue
if 'xmin' in series_config and float ( k ) < float ( series_config [ 'xmin' ] ) :
continue
if d [ s ] [ k ] is not None :
if 'ymax' in series_config and float ( d [ s ] [ k ] ) > float ( series_config [ 'ymax' ] ) :
continue
if 'ymin' in series_config and float ( d [ s ] [ k ] ) < float ( series_config [ 'ymin' ] ) :
continue
pairs . append ( [ k , d [ s ] [ k ] ] )
try :
maxval = max ( maxval , d [ s ] [ k ] )
except TypeError :
pass
if maxval > 0 or series_config . get ( 'hide_empty' ) is not True :
this_series = { 'name' : s , 'data' : pairs }
try :
this_series [ 'color' ] = series_config [ 'colors' ] [ s ]
except :
pass
thisplotdata . append ( this_series )
plotdata . append ( thisplotdata )
# Add on annotation data series
try :
if pconfig . get ( 'extra_series' ) :
extra_series = pconfig [ 'extra_series' ]
if type ( pconfig [ 'extra_series' ] ) == dict :
extra_series = [ [ pconfig [ 'extra_series' ] ] ]
elif type ( pconfig [ 'extra_series' ] ) == list and type ( pconfig [ 'extra_series' ] [ 0 ] ) == dict :
extra_series = [ pconfig [ 'extra_series' ] ]
for i , es in enumerate ( extra_series ) :
for s in es :
plotdata [ i ] . append ( s )
except ( KeyError , IndexError ) :
pass
# Make a plot - template custom , or interactive or flat
try :
return get_template_mod ( ) . linegraph ( plotdata , pconfig )
except ( AttributeError , TypeError ) :
if config . plots_force_flat or ( not config . plots_force_interactive and len ( plotdata [ 0 ] ) > config . plots_flat_numseries ) :
try :
return matplotlib_linegraph ( plotdata , pconfig )
except :
logger . error ( "############### Error making MatPlotLib figure! Falling back to HighCharts." )
return highcharts_linegraph ( plotdata , pconfig )
else : # Use MatPlotLib to generate static plots if requested
if config . export_plots :
matplotlib_linegraph ( plotdata , pconfig )
# Return HTML for HighCharts dynamic plot
return highcharts_linegraph ( plotdata , pconfig )
|
def userContent ( self ) :
"""allows access into the individual user ' s content to get at the
items owned by the current user"""
|
replace_start = self . _url . lower ( ) . find ( "/community/" )
len_replace = len ( "/community/" )
url = self . _url . replace ( self . _url [ replace_start : replace_start + len_replace ] , '/content/' )
from . _content import User as UserContent
return UserContent ( url = url , securityHandler = self . _securityHandler , proxy_url = self . _proxy_url , proxy_port = self . _proxy_port )
|
def get_execution_engine ( self , name ) :
"""Return an execution engine instance ."""
|
try :
return self . execution_engines [ name ]
except KeyError :
raise InvalidEngineError ( "Unsupported execution engine: {}" . format ( name ) )
|
def _load_class_entry_point ( cls , entry_point ) :
"""Load ` entry _ point ` , and set the ` entry _ point . name ` as the
attribute ` plugin _ name ` on the loaded object"""
|
class_ = entry_point . load ( )
setattr ( class_ , 'plugin_name' , entry_point . name )
return class_
|
def forkexec ( argv , env = None ) :
"""Fork a child process ."""
|
child_pid = os . fork ( )
if child_pid == 0 :
os . closerange ( 3 , MAXFD )
environ = os . environ . copy ( )
if env is not None :
environ . update ( env )
os . execve ( argv [ 0 ] , argv , environ )
return child_pid
|
def get_repository ( self , repository_id ) :
"""Gets the ` ` Repository ` ` specified by its ` ` Id ` ` .
In plenary mode , the exact ` ` Id ` ` is found or a ` ` NotFound ` `
results . Otherwise , the returned ` ` Repository ` ` may have a
different ` ` Id ` ` than requested , such as the case where a
duplicate ` ` Id ` ` was assigned to a ` ` Repository ` ` and retained
for compatibility .
arg : repository _ id ( osid . id . Id ) : ` ` Id ` ` of the ` ` Repository ` `
return : ( osid . repository . Repository ) - the repository
raise : NotFound - ` ` repository _ id ` ` not found
raise : NullArgument - ` ` repository _ id ` ` is ` ` null ` `
raise : OperationFailed - unable to complete request
raise : PermissionDenied - authorization failure
* compliance : mandatory - - This method is must be implemented . *"""
|
# Implemented from template for
# osid . resource . BinLookupSession . get _ bin
if self . _catalog_session is not None :
return self . _catalog_session . get_catalog ( catalog_id = repository_id )
collection = JSONClientValidated ( 'repository' , collection = 'Repository' , runtime = self . _runtime )
# Need to consider how to best deal with the " phantom root " catalog issue
if repository_id . get_identifier ( ) == PHANTOM_ROOT_IDENTIFIER :
return self . _get_phantom_root_catalog ( cat_class = objects . Repository , cat_name = 'Repository' )
try :
result = collection . find_one ( { '_id' : ObjectId ( self . _get_id ( repository_id , 'repository' ) . get_identifier ( ) ) } )
except errors . NotFound : # Try creating an orchestrated Repository . Let it raise errors . NotFound ( )
result = self . _create_orchestrated_cat ( repository_id , 'repository' , 'Repository' )
return objects . Repository ( osid_object_map = result , runtime = self . _runtime , proxy = self . _proxy )
|
def HandleVerack ( self ) :
"""Handle the ` verack ` response ."""
|
m = Message ( 'verack' )
self . SendSerializedMessage ( m )
self . leader . NodeCount += 1
self . identifier = self . leader . NodeCount
logger . debug ( f"{self.prefix} Handshake complete!" )
self . handshake_complete = True
self . ProtocolReady ( )
|
def fan_maxcfm ( ddtt ) :
"""return the fan max cfm"""
|
if str ( ddtt . Maximum_Flow_Rate ) . lower ( ) == 'autosize' : # str can fail with unicode chars : - (
return 'autosize'
else :
m3s = float ( ddtt . Maximum_Flow_Rate )
return m3s2cfm ( m3s )
|
def find_column ( token ) :
"""Compute column :
input is the input text string
token is a token instance"""
|
i = token . lexpos
input = token . lexer . lexdata
while i > 0 :
if input [ i - 1 ] == '\n' :
break
i -= 1
column = token . lexpos - i + 1
return column
|
def recursive_build_tree ( self , intervals ) :
"""recursively builds a BST based on the elementary intervals .
each node is an array : [ interval value , left descendent nodes , right descendent nodes , [ ids ] ] .
nodes with no descendents have a - 1 value in left / right descendent positions .
for example , a node with two empty descendents :
[500 , interval value
[ - 1 , - 1 , - 1 , [ ' id5 ' , ' id6 ' ] ] , left descendent
[ - 1 , - 1 , - 1 , [ ' id4 ' ] ] , right descendent
[ ' id1 ' , id2 ' , id3 ' ] ] data values"""
|
center = int ( round ( len ( intervals ) / 2 ) )
left = intervals [ : center ]
right = intervals [ center + 1 : ]
node = intervals [ center ]
if len ( left ) > 1 :
left = self . recursive_build_tree ( left )
elif len ( left ) == 1 :
left = [ left [ 0 ] , [ - 1 , - 1 , - 1 , [ ] ] , [ - 1 , - 1 , - 1 , [ ] ] , [ ] ]
else :
left = [ - 1 , - 1 , - 1 , [ ] ]
if len ( right ) > 1 :
right = self . recursive_build_tree ( right )
elif len ( right ) == 1 :
right = [ right [ 0 ] , [ - 1 , - 1 , - 1 , [ ] ] , [ - 1 , - 1 , - 1 , [ ] ] , [ ] ]
else :
right = [ - 1 , - 1 , - 1 , [ ] ]
return [ node , left , right , [ ] ]
|
def _set_adj_use ( self , v , load = False ) :
"""Setter method for adj _ use , mapped from YANG variable / adj _ neighbor _ entries _ state / adj _ neighbor / adj _ use ( isis - spf - level )
If this variable is read - only ( config : false ) in the
source YANG file , then _ set _ adj _ use is considered as a private
method . Backends looking to populate this variable should
do so via calling thisObj . _ set _ adj _ use ( ) directly .
YANG Description : Adjacency Level Usage"""
|
if hasattr ( v , "_utype" ) :
v = v . _utype ( v )
try :
t = YANGDynClass ( v , base = RestrictedClassType ( base_type = unicode , restriction_type = "dict_key" , restriction_arg = { u'level-1-2' : { 'value' : 3 } , u'level-2' : { 'value' : 2 } , u'level-1' : { 'value' : 1 } } , ) , is_leaf = True , yang_name = "adj-use" , rest_name = "adj-use" , parent = self , path_helper = self . _path_helper , extmethods = self . _extmethods , register_paths = True , namespace = 'urn:brocade.com:mgmt:brocade-isis-operational' , defining_module = 'brocade-isis-operational' , yang_type = 'isis-spf-level' , is_config = False )
except ( TypeError , ValueError ) :
raise ValueError ( { 'error-string' : """adj_use must be of a type compatible with isis-spf-level""" , 'defined-type' : "brocade-isis-operational:isis-spf-level" , 'generated-type' : """YANGDynClass(base=RestrictedClassType(base_type=unicode, restriction_type="dict_key", restriction_arg={u'level-1-2': {'value': 3}, u'level-2': {'value': 2}, u'level-1': {'value': 1}},), is_leaf=True, yang_name="adj-use", rest_name="adj-use", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, namespace='urn:brocade.com:mgmt:brocade-isis-operational', defining_module='brocade-isis-operational', yang_type='isis-spf-level', is_config=False)""" , } )
self . __adj_use = t
if hasattr ( self , '_set' ) :
self . _set ( )
|
def check_compound_consistency ( database , solver , exchange = set ( ) , zeromass = set ( ) ) :
"""Yield each compound in the database with assigned mass
Each compound will be assigned a mass and the number of compounds having a
positive mass will be approximately maximized .
This is an implementation of the solution originally proposed by
[ Gevorgyan08 ] _ but using the new method proposed by [ Thiele14 ] _ to avoid
MILP constraints . This is similar to the way Fastcore avoids MILP
contraints ."""
|
# Create mass balance problem
prob = solver . create_problem ( )
compound_set = _non_localized_compounds ( database )
mass_compounds = compound_set . difference ( zeromass )
# Define mass variables
m = prob . namespace ( mass_compounds , lower = 0 )
# Define z variables
z = prob . namespace ( mass_compounds , lower = 0 , upper = 1 )
prob . set_objective ( z . sum ( mass_compounds ) )
prob . add_linear_constraints ( m . set ( mass_compounds ) >= z . set ( mass_compounds ) )
massbalance_lhs = { reaction_id : 0 for reaction_id in database . reactions }
for ( compound , reaction_id ) , value in iteritems ( database . matrix ) :
if compound not in zeromass :
mass_var = m ( compound . in_compartment ( None ) )
massbalance_lhs [ reaction_id ] += mass_var * value
for reaction_id , lhs in iteritems ( massbalance_lhs ) :
if reaction_id not in exchange :
prob . add_linear_constraints ( lhs == 0 )
# Solve
try :
prob . solve ( lp . ObjectiveSense . Maximize )
except lp . SolverError as e :
raise_from ( MassConsistencyError ( 'Failed to solve mass consistency: {}' . format ( e ) ) , e )
for compound in mass_compounds :
yield compound , m . value ( compound )
|
def calculate_entropy ( self , entropy_string ) :
"""Calculates the entropy of a string based on known frequency of
English letters .
Args :
entropy _ string : A str representing the string to calculate .
Returns :
A negative float with the total entropy of the string ( higher
is better ) ."""
|
total = 0
for char in entropy_string :
if char . isalpha ( ) :
prob = self . frequency [ char . lower ( ) ]
total += - math . log ( prob ) / math . log ( 2 )
logging . debug ( "Entropy score: {0}" . format ( total ) )
return total
|
def is_connection_dropped ( conn ) : # Platform - specific
"""Returns True if the connection is dropped and should be closed .
: param conn :
: class : ` httplib . HTTPConnection ` object .
Note : For platforms like AppEngine , this will always return ` ` False ` ` to
let the platform handle connection recycling transparently for us ."""
|
sock = getattr ( conn , 'sock' , False )
if sock is False : # Platform - specific : AppEngine
return False
if sock is None : # Connection already closed ( such as by httplib ) .
return True
if not HAS_SELECT :
return False
try :
return bool ( wait_for_read ( sock , timeout = 0.0 ) )
except SelectorError :
return True
|
def check_output_format ( expected_formats ) :
"""Decorator for stream outputs that checks the format of the outputs after modifiers have been applied
: param expected _ formats : The expected output formats
: type expected _ formats : tuple , set
: return : the decorator"""
|
def output_format_decorator ( func ) :
def func_wrapper ( * args , ** kwargs ) :
self = args [ 0 ]
if self . output_format not in expected_formats :
raise ValueError ( "expected output format {}, got {}" . format ( 'doc_gen' , self . output_format ) )
return func ( * args , ** kwargs )
return func_wrapper
return output_format_decorator
|
def _make_tonnetz_matrix ( ) :
"""Return the tonnetz projection matrix ."""
|
pi = np . pi
chroma = np . arange ( 12 )
# Define each row of the transform matrix
fifth_x = r_fifth * ( np . sin ( ( 7 * pi / 6 ) * chroma ) )
fifth_y = r_fifth * ( np . cos ( ( 7 * pi / 6 ) * chroma ) )
minor_third_x = r_minor_thirds * ( np . sin ( 3 * pi / 2 * chroma ) )
minor_third_y = r_minor_thirds * ( np . cos ( 3 * pi / 2 * chroma ) )
major_third_x = r_major_thirds * ( np . sin ( 2 * pi / 3 * chroma ) )
major_third_y = r_major_thirds * ( np . cos ( 2 * pi / 3 * chroma ) )
# Return the tonnetz matrix
return np . vstack ( ( fifth_x , fifth_y , minor_third_x , minor_third_y , major_third_x , major_third_y ) )
|
def process_log_config_section ( config , log_config ) :
"""Processes the log section from a configuration data dict .
: param config : The config reference of the object that will hold the
configuration data from the config _ data .
: param log _ config : Log section from a config data dict ."""
|
if 'format' in log_config :
config . log [ 'format' ] = log_config [ 'format' ]
if 'level' in log_config :
config . log [ 'level' ] = log_level_from_string ( log_config [ 'level' ] )
|
def indication ( self , apdu ) :
"""This function is called after the device has bound a new transaction
and wants to start the process rolling ."""
|
if _debug :
ClientSSM . _debug ( "indication %r" , apdu )
# make sure we ' re getting confirmed requests
if ( apdu . apduType != ConfirmedRequestPDU . pduType ) :
raise RuntimeError ( "invalid APDU (1)" )
# save the request and set the segmentation context
self . set_segmentation_context ( apdu )
# if the max apdu length of the server isn ' t known , assume that it
# is the same size as our own and will be the segment size
if ( not self . device_info ) or ( self . device_info . maxApduLengthAccepted is None ) :
self . segmentSize = self . maxApduLengthAccepted
# if the max npdu length of the server isn ' t known , assume that it
# is the same as the max apdu length accepted
elif self . device_info . maxNpduLength is None :
self . segmentSize = self . device_info . maxApduLengthAccepted
# the segment size is the minimum of the size of the largest packet
# that can be delivered to the server and the largest it can accept
else :
self . segmentSize = min ( self . device_info . maxNpduLength , self . device_info . maxApduLengthAccepted )
if _debug :
ClientSSM . _debug ( " - segment size: %r" , self . segmentSize )
# save the invoke ID
self . invokeID = apdu . apduInvokeID
if _debug :
ClientSSM . _debug ( " - invoke ID: %r" , self . invokeID )
# compute the segment count
if not apdu . pduData : # always at least one segment
self . segmentCount = 1
else : # split into chunks , maybe need one more
self . segmentCount , more = divmod ( len ( apdu . pduData ) , self . segmentSize )
if more :
self . segmentCount += 1
if _debug :
ClientSSM . _debug ( " - segment count: %r" , self . segmentCount )
# make sure we support segmented transmit if we need to
if self . segmentCount > 1 :
if self . segmentationSupported not in ( 'segmentedTransmit' , 'segmentedBoth' ) :
if _debug :
ClientSSM . _debug ( " - local device can't send segmented requests" )
abort = self . abort ( AbortReason . segmentationNotSupported )
self . response ( abort )
return
if not self . device_info :
if _debug :
ClientSSM . _debug ( " - no server info for segmentation support" )
elif self . device_info . segmentationSupported not in ( 'segmentedReceive' , 'segmentedBoth' ) :
if _debug :
ClientSSM . _debug ( " - server can't receive segmented requests" )
abort = self . abort ( AbortReason . segmentationNotSupported )
self . response ( abort )
return
# make sure we dont exceed the number of segments in our request
# that the server said it was willing to accept
if not self . device_info :
if _debug :
ClientSSM . _debug ( " - no server info for maximum number of segments" )
elif not self . device_info . maxSegmentsAccepted :
if _debug :
ClientSSM . _debug ( " - server doesn't say maximum number of segments" )
elif self . segmentCount > self . device_info . maxSegmentsAccepted :
if _debug :
ClientSSM . _debug ( " - server can't receive enough segments" )
abort = self . abort ( AbortReason . apduTooLong )
self . response ( abort )
return
# send out the first segment ( or the whole thing )
if self . segmentCount == 1 : # unsegmented
self . sentAllSegments = True
self . retryCount = 0
self . set_state ( AWAIT_CONFIRMATION , self . apduTimeout )
else : # segmented
self . sentAllSegments = False
self . retryCount = 0
self . segmentRetryCount = 0
self . initialSequenceNumber = 0
self . actualWindowSize = None
# segment ack will set value
self . set_state ( SEGMENTED_REQUEST , self . segmentTimeout )
# deliver to the device
self . request ( self . get_segment ( 0 ) )
|
def _convert_to_border ( cls , border_dict ) :
"""Convert ` ` border _ dict ` ` to an openpyxl v2 Border object
Parameters
border _ dict : dict
A dict with zero or more of the following keys ( or their synonyms ) .
' left '
' right '
' top '
' bottom '
' diagonal '
' diagonal _ direction '
' vertical '
' horizontal '
' diagonalUp ' ( ' diagonalup ' )
' diagonalDown ' ( ' diagonaldown ' )
' outline '
Returns
border : openpyxl . styles . Border"""
|
from openpyxl . styles import Border
_border_key_map = { 'diagonalup' : 'diagonalUp' , 'diagonaldown' : 'diagonalDown' , }
border_kwargs = { }
for k , v in border_dict . items ( ) :
if k in _border_key_map :
k = _border_key_map [ k ]
if k == 'color' :
v = cls . _convert_to_color ( v )
if k in [ 'left' , 'right' , 'top' , 'bottom' , 'diagonal' ] :
v = cls . _convert_to_side ( v )
border_kwargs [ k ] = v
return Border ( ** border_kwargs )
|
def dateheure ( objet ) :
"""abstractRender d ' une date - heure datetime . datetime au format JJ / MM / AAAAàHH : mm"""
|
if objet :
return "{}/{}/{} à {:02}:{:02}" . format ( objet . day , objet . month , objet . year , objet . hour , objet . minute )
return ""
|
def sailthru_http_request ( url , data , method , file_data = None , headers = None , request_timeout = 10 ) :
"""Perform an HTTP GET / POST / DELETE request"""
|
data = flatten_nested_hash ( data )
method = method . upper ( )
params , data = ( None , data ) if method == 'POST' else ( data , None )
sailthru_headers = { 'User-Agent' : 'Sailthru API Python Client %s; Python Version: %s' % ( '2.3.5' , platform . python_version ( ) ) }
if headers and isinstance ( headers , dict ) :
for key , value in sailthru_headers . items ( ) :
headers [ key ] = value
else :
headers = sailthru_headers
try :
response = requests . request ( method , url , params = params , data = data , files = file_data , headers = headers , timeout = request_timeout )
return SailthruResponse ( response )
except requests . HTTPError as e :
raise SailthruClientError ( str ( e ) )
except requests . RequestException as e :
raise SailthruClientError ( str ( e ) )
|
def splitext ( path ) : # type : ( Text ) - > Tuple [ Text , Text ]
"""Split the extension from the path .
Arguments :
path ( str ) : A path to split .
Returns :
( str , str ) : A tuple containing the path and the extension .
Example :
> > > splitext ( ' baz . txt ' )
( ' baz ' , ' . txt ' )
> > > splitext ( ' foo / bar / baz . txt ' )
( ' foo / bar / baz ' , ' . txt ' )
> > > splitext ( ' foo / bar / . foo ' )
( ' foo / bar / . foo ' , ' ' )"""
|
parent_path , pathname = split ( path )
if pathname . startswith ( "." ) and pathname . count ( "." ) == 1 :
return path , ""
if "." not in pathname :
return path , ""
pathname , ext = pathname . rsplit ( "." , 1 )
path = join ( parent_path , pathname )
return path , "." + ext
|
def update ( self , points , pointvol = 0. , vol_dec = 0.5 , vol_check = 2. , rstate = None , bootstrap = 0 , pool = None , mc_integrate = False ) :
"""Update the set of ellipsoids to bound the collection of points .
Parameters
points : ` ~ numpy . ndarray ` with shape ( npoints , ndim )
The set of points to bound .
pointvol : float , optional
The minimum volume associated with each point . Default is ` 0 . ` .
vol _ dec : float , optional
The required fractional reduction in volume after splitting
an ellipsoid in order to to accept the split .
Default is ` 0.5 ` .
vol _ check : float , optional
The factor used when checking if the volume of the original
bounding ellipsoid is large enough to warrant ` > 2 ` splits
via ` ell . vol > vol _ check * nlive * pointvol ` .
Default is ` 2.0 ` .
rstate : ` ~ numpy . random . RandomState ` , optional
` ~ numpy . random . RandomState ` instance .
bootstrap : int , optional
The number of bootstrapped realizations of the ellipsoids . The
maximum distance to the set of points " left out " during each
iteration is used to enlarge the resulting volumes .
Default is ` 0 ` .
pool : user - provided pool , optional
Use this pool of workers to execute operations in parallel .
mc _ integrate : bool , optional
Whether to use Monte Carlo methods to compute the effective
volume and fractional overlap of the final union of ellipsoids
with the unit cube . Default is ` False ` ."""
|
if rstate is None :
rstate = np . random
if not HAVE_KMEANS :
raise ValueError ( "scipy.cluster.vq.kmeans2 is required " "to compute ellipsoid decompositions." )
npoints , ndim = points . shape
# Calculate the bounding ellipsoid for the points , possibly
# enlarged to a minimum volume .
firstell = bounding_ellipsoid ( points , pointvol = pointvol )
# Recursively split the bounding ellipsoid using ` vol _ check `
# until the volume of each split no longer decreases by a
# factor of ` vol _ dec ` .
ells = _bounding_ellipsoids ( points , firstell , pointvol = pointvol , vol_dec = vol_dec , vol_check = vol_check )
# Update the set of ellipsoids .
self . nells = len ( ells )
self . ells = ells
self . ctrs = np . array ( [ ell . ctr for ell in self . ells ] )
self . covs = np . array ( [ ell . cov for ell in self . ells ] )
self . ams = np . array ( [ ell . am for ell in self . ells ] )
self . vols = np . array ( [ ell . vol for ell in self . ells ] )
self . vol_tot = sum ( self . vols )
# Compute expansion factor .
expands = np . array ( [ ell . expand for ell in self . ells ] )
vols_orig = self . vols / expands
vol_tot_orig = sum ( vols_orig )
self . expand_tot = self . vol_tot / vol_tot_orig
# Use bootstrapping to determine the volume expansion factor .
if bootstrap > 0 : # If provided , compute bootstraps in parallel using a pool .
if pool is None :
M = map
else :
M = pool . map
ps = [ points for it in range ( bootstrap ) ]
pvs = [ pointvol for it in range ( bootstrap ) ]
vds = [ vol_dec for it in range ( bootstrap ) ]
vcs = [ vol_check for it in range ( bootstrap ) ]
args = zip ( ps , pvs , vds , vcs )
expands = list ( M ( _ellipsoids_bootstrap_expand , args ) )
# Conservatively set the expansion factor to be the maximum
# factor derived from our set of bootstraps .
expand = max ( expands )
# If our ellipsoids are overly constrained , expand them .
if expand > 1. :
vs = self . vols * expand ** ndim
self . scale_to_vols ( vs )
# Estimate the volume and fractional overlap with the unit cube
# using Monte Carlo integration .
if mc_integrate :
self . vol , self . funit = self . monte_carlo_vol ( return_overlap = True )
|
def call_command ( self , cmd , * argv ) :
"""Runs a command .
: param cmd : command to run ( key at the registry )
: param argv : arguments that would be passed to the command"""
|
parser = self . get_parser ( )
args = [ cmd ] + list ( argv )
namespace = parser . parse_args ( args )
self . run_command ( namespace )
|
def process ( self ) :
"""Processes the Client queue , and passes the data to the respective
methods .
: return :"""
|
while self . running :
if self . _processor_lock . acquire ( blocking = False ) :
if self . ping_timer :
try :
self . _check_ping ( )
except TimeoutError :
log . exception ( "BitfinexWSS.ping(): TimedOut! (%ss)" % self . ping_timer )
except ( WebSocketConnectionClosedException , ConnectionResetError ) :
log . exception ( "BitfinexWSS.ping(): Connection Error!" )
self . conn = None
if not self . conn : # The connection was killed - initiate restart
self . _controller_q . put ( 'restart' )
skip_processing = False
try :
ts , data = self . receiver_q . get ( timeout = 0.1 )
except queue . Empty :
skip_processing = True
ts = time . time ( )
data = None
if not skip_processing :
log . debug ( "Processing Data: %s" , data )
if isinstance ( data , list ) :
self . handle_data ( ts , data )
else : # Not a list , hence it could be a response
try :
self . handle_response ( ts , data )
except UnknownEventError : # We don ' t know what event this is - Raise an
# error & log data !
log . exception ( "main() - UnknownEventError: %s" , data )
log . info ( "main() - Shutting Down due to " "Unknown Error!" )
self . _controller_q . put ( 'stop' )
except ConnectionResetError :
log . info ( "processor Thread: Connection Was reset, " "initiating restart" )
self . _controller_q . put ( 'restart' )
self . _check_heartbeats ( ts )
self . _processor_lock . release ( )
else :
time . sleep ( 0.5 )
|
def get_artist ( self ) :
""": returns : the : mod : ` Artist < deezer . resources . Artist > ` of the resource
: raises AssertionError : if the object is not album or track"""
|
# pylint : disable = E1101
assert isinstance ( self , ( Album , Track ) )
return self . client . get_artist ( self . artist . id )
|
def get_asset_from_edit_extension_draft ( self , publisher_name , draft_id , asset_type , extension_name , ** kwargs ) :
"""GetAssetFromEditExtensionDraft .
[ Preview API ]
: param str publisher _ name :
: param str draft _ id :
: param str asset _ type :
: param str extension _ name :
: rtype : object"""
|
route_values = { }
if publisher_name is not None :
route_values [ 'publisherName' ] = self . _serialize . url ( 'publisher_name' , publisher_name , 'str' )
if draft_id is not None :
route_values [ 'draftId' ] = self . _serialize . url ( 'draft_id' , draft_id , 'str' )
if asset_type is not None :
route_values [ 'assetType' ] = self . _serialize . url ( 'asset_type' , asset_type , 'str' )
query_parameters = { }
if extension_name is not None :
query_parameters [ 'extensionName' ] = self . _serialize . query ( 'extension_name' , extension_name , 'str' )
response = self . _send ( http_method = 'GET' , location_id = '88c0b1c8-b4f1-498a-9b2a-8446ef9f32e7' , version = '5.0-preview.1' , route_values = route_values , query_parameters = query_parameters , accept_media_type = 'application/octet-stream' )
if "callback" in kwargs :
callback = kwargs [ "callback" ]
else :
callback = None
return self . _client . stream_download ( response , callback = callback )
|
def get_server_list ( self , filter_text , max_servers = 10 , timeout = 20 ) :
"""Get list of servers . Works similiarly to : meth : ` query ` , but the response has more details .
: param filter _ text : filter for servers
: type filter _ text : str
: param max _ servers : ( optional ) number of servers to return
: type max _ servers : int
: param timeout : ( optional ) timeout for request in seconds
: type timeout : int
: returns : list of servers , see below . ( ` ` None ` ` is returned steam doesn ' t respond )
: rtype : : class : ` list ` , : class : ` None `
: raises : : class : ` . UnifiedMessageError `
Sample response :
. . code : : python
[ { ' addr ' : ' 1.2.3.4:27067 ' ,
' appid ' : 730,
' bots ' : 0,
' dedicated ' : True ,
' gamedir ' : ' csgo ' ,
' gameport ' : 27067,
' gametype ' : ' valve _ ds , empty , secure ' ,
' map ' : ' de _ dust2 ' ,
' max _ players ' : 10,
' name ' : ' Valve CS : GO Asia Server ( srcdsXXX . XXX . XXX ) ' ,
' os ' : ' l ' ,
' players ' : 0,
' product ' : ' csgo ' ,
' region ' : 5,
' secure ' : True ,
' steamid ' : SteamID ( id = 3279818759 , type = ' AnonGameServer ' , universe = ' Public ' , instance = 7011 ) ,
' version ' : ' 1.35.4.0 ' }"""
|
resp , error = self . _um . send_and_wait ( "GameServers.GetServerList#1" , { "filter" : filter_text , "limit" : max_servers , } , timeout = 20 , )
if error :
raise error
if resp is None :
return None
resp = proto_to_dict ( resp )
if not resp :
return [ ]
else :
for server in resp [ 'servers' ] :
server [ 'steamid' ] = SteamID ( server [ 'steamid' ] )
return resp [ 'servers' ]
|
def update_bbox ( self ) :
"""Recalculates the bbox region attribute for the entire file .
Useful after adding and / or removing features .
No need to use this method just for saving , because saving
automatically updates the bbox ."""
|
xmins , ymins , xmaxs , ymaxs = zip ( * ( feat . geometry . bbox for feat in self if feat . geometry . type != "Null" ) )
bbox = [ min ( xmins ) , min ( ymins ) , max ( xmaxs ) , max ( ymaxs ) ]
self . _data [ "bbox" ] = bbox
|
def _itodq ( self , n ) :
"""Convert long to dotquad or hextet ."""
|
if self . v == 4 :
return '.' . join ( map ( str , [ ( n >> 24 ) & 0xff , ( n >> 16 ) & 0xff , ( n >> 8 ) & 0xff , n & 0xff , ] ) )
else :
n = '%032x' % n
return ':' . join ( n [ 4 * x : 4 * x + 4 ] for x in range ( 0 , 8 ) )
|
def _set_autocommit ( connection ) :
"""Make sure a connection is in autocommit mode ."""
|
if hasattr ( connection . connection , "autocommit" ) :
if callable ( connection . connection . autocommit ) :
connection . connection . autocommit ( True )
else :
connection . connection . autocommit = True
elif hasattr ( connection . connection , "set_isolation_level" ) :
connection . connection . set_isolation_level ( 0 )
|
def get_dag_configs ( self ) -> Dict [ str , Dict [ str , Any ] ] :
"""Returns configuration for each the DAG in factory
: returns : dict with configuration for dags"""
|
return { dag : self . config [ dag ] for dag in self . config . keys ( ) if dag != "default" }
|
def collect_frames_for_random_starts ( storage_env , stacked_env , agent , frame_stack_size , random_starts_step_limit , log_every_steps = None ) :
"""Collects frames from real env for random starts of simulated env ."""
|
del frame_stack_size
storage_env . start_new_epoch ( 0 )
tf . logging . info ( "Collecting %d frames for random starts." , random_starts_step_limit )
rl_utils . run_rollouts ( stacked_env , agent , stacked_env . reset ( ) , step_limit = random_starts_step_limit , many_rollouts_from_each_env = True , log_every_steps = log_every_steps , )
# Save unfinished rollouts to history .
stacked_env . reset ( )
|
def iterdfs ( self , start , end = None , forward = True ) :
"""Collecting nodes in some depth first traversal .
The forward parameter specifies whether it is a forward or backward
traversal ."""
|
visited , stack = set ( [ start ] ) , deque ( [ start ] )
if forward :
get_edges = self . out_edges
get_next = self . tail
else :
get_edges = self . inc_edges
get_next = self . head
while stack :
curr_node = stack . pop ( )
yield curr_node
if curr_node == end :
break
for edge in sorted ( get_edges ( curr_node ) ) :
tail = get_next ( edge )
if tail not in visited :
visited . add ( tail )
stack . append ( tail )
|
def main ( ) :
"""Handles calling this module as a script
: return : None"""
|
log = logging . getLogger ( mod_logger + '.main' )
parser = argparse . ArgumentParser ( description = 'This Python module retrieves artifacts from Nexus.' )
parser . add_argument ( '-u' , '--url' , help = 'Nexus Server URL' , required = False )
parser . add_argument ( '-g' , '--groupId' , help = 'Group ID' , required = True )
parser . add_argument ( '-a' , '--artifactId' , help = 'Artifact ID' , required = True )
parser . add_argument ( '-v' , '--version' , help = 'Artifact Version' , required = True )
parser . add_argument ( '-c' , '--classifier' , help = 'Artifact Classifier' , required = False )
parser . add_argument ( '-p' , '--packaging' , help = 'Artifact Packaging' , required = True )
parser . add_argument ( '-r' , '--repo' , help = 'Nexus repository name' , required = False )
parser . add_argument ( '-d' , '--destinationDir' , help = 'Directory to download to' , required = True )
parser . add_argument ( '-n' , '--username' , help = 'Directory to download to' , required = True )
parser . add_argument ( '-w' , '--password' , help = 'Directory to download to' , required = True )
args = parser . parse_args ( )
try :
get_artifact ( nexus_url = args . url , group_id = args . groupId , artifact_id = args . artifactId , version = args . version , classifier = args . classifier , packaging = args . packaging , repo = args . repo , destination_dir = args . destinationDir , username = args . username , password = args . password )
except Exception as e :
msg = 'Caught exception {n}, unable for download artifact from Nexus\n{s}' . format ( n = e . __class__ . __name__ , s = e )
log . error ( msg )
return
|
def add_user ( self , recipient_email ) :
"""Add user to encryption"""
|
self . import_key ( emailid = recipient_email )
emailid_list = self . list_user_emails ( )
self . y = self . decrypt ( )
emailid_list . append ( recipient_email )
self . encrypt ( emailid_list = emailid_list )
|
def save_structure_to_file ( structure : JsonExportable , filename : str ) -> None :
"""Saves a : class : ` Post ` , : class : ` Profile ` or : class : ` StoryItem ` to a ' . json ' or ' . json . xz ' file such that it can
later be loaded by : func : ` load _ structure _ from _ file ` .
If the specified filename ends in ' . xz ' , the file will be LZMA compressed . Otherwise , a pretty - printed JSON file
will be created .
: param structure : : class : ` Post ` , : class : ` Profile ` or : class : ` StoryItem `
: param filename : Filename , ends in ' . json ' or ' . json . xz '"""
|
json_structure = { 'node' : structure . _asdict ( ) , 'instaloader' : { 'version' : __version__ , 'node_type' : structure . __class__ . __name__ } }
compress = filename . endswith ( '.xz' )
if compress :
with lzma . open ( filename , 'wt' , check = lzma . CHECK_NONE ) as fp :
json . dump ( json_structure , fp = fp , separators = ( ',' , ':' ) )
else :
with open ( filename , 'wt' ) as fp :
json . dump ( json_structure , fp = fp , indent = 4 , sort_keys = True )
|
def stop_socket ( self , conn_key ) :
"""Stop a websocket given the connection key
: param conn _ key : Socket connection key
: type conn _ key : string
: returns : connection key string if successful , False otherwise"""
|
if conn_key not in self . _conns :
return
# disable reconnecting if we are closing
self . _conns [ conn_key ] . factory = WebSocketClientFactory ( self . STREAM_URL + 'tmp_path' )
self . _conns [ conn_key ] . disconnect ( )
del ( self . _conns [ conn_key ] )
# check if we have a user stream socket
if len ( conn_key ) >= 60 and conn_key [ : 60 ] == self . _user_listen_key :
self . _stop_user_socket ( )
|
def _construct_permission ( self , function , source_arn = None , source_account = None , suffix = "" , event_source_token = None ) :
"""Constructs the Lambda Permission resource allowing the source service to invoke the function this event
source triggers .
: returns : the permission resource
: rtype : model . lambda _ . LambdaPermission"""
|
lambda_permission = LambdaPermission ( self . logical_id + 'Permission' + suffix , attributes = function . get_passthrough_resource_attributes ( ) )
try : # Name will not be available for Alias resources
function_name_or_arn = function . get_runtime_attr ( "name" )
except NotImplementedError :
function_name_or_arn = function . get_runtime_attr ( "arn" )
lambda_permission . Action = 'lambda:invokeFunction'
lambda_permission . FunctionName = function_name_or_arn
lambda_permission . Principal = self . principal
lambda_permission . SourceArn = source_arn
lambda_permission . SourceAccount = source_account
lambda_permission . EventSourceToken = event_source_token
return lambda_permission
|
def patch ( self , resource_id ) :
"""Return an HTTP response object resulting from an HTTP PATCH call .
: returns : ` ` HTTP 200 ` ` if the resource already exists
: returns : ` ` HTTP 400 ` ` if the request is malformed
: returns : ` ` HTTP 404 ` ` if the resource is not found
: param resource _ id : The value of the resource ' s primary key"""
|
resource = self . _resource ( resource_id )
error_message = is_valid_method ( self . __model__ , resource )
if error_message :
raise BadRequestException ( error_message )
if not request . json :
raise BadRequestException ( 'No JSON data received' )
resource . update ( request . json )
db . session ( ) . merge ( resource )
db . session ( ) . commit ( )
return jsonify ( resource )
|
def _scatter ( sequence , n ) :
"""Scatters elements of ` ` sequence ` ` into ` ` n ` ` blocks ."""
|
chunklen = int ( math . ceil ( float ( len ( sequence ) ) / float ( n ) ) )
return [ sequence [ i * chunklen : ( i + 1 ) * chunklen ] for i in range ( n ) ]
|
def _diagonalize ( self ) :
"""Performs SVD on covariance matrices and save left , right singular vectors and values in the model .
Parameters
scaling : None or string , default = None
Scaling to be applied to the VAMP modes upon transformation
* None : no scaling will be applied , variance of the singular
functions is 1
* ' kinetic map ' or ' km ' : singular functions are scaled by
singular value . Note that only the left singular functions
induce a kinetic map ."""
|
L0 = spd_inv_split ( self . C00 , epsilon = self . epsilon )
self . _rank0 = L0 . shape [ 1 ] if L0 . ndim == 2 else 1
Lt = spd_inv_split ( self . Ctt , epsilon = self . epsilon )
self . _rankt = Lt . shape [ 1 ] if Lt . ndim == 2 else 1
W = np . dot ( L0 . T , self . C0t ) . dot ( Lt )
from scipy . linalg import svd
A , s , BT = svd ( W , compute_uv = True , lapack_driver = 'gesvd' )
self . _singular_values = s
# don ' t pass any values in the argument list that call _ diagonalize again ! ! !
m = VAMPModel . _dimension ( self . _rank0 , self . _rankt , self . dim , self . _singular_values )
U = np . dot ( L0 , A [ : , : m ] )
V = np . dot ( Lt , BT [ : m , : ] . T )
# scale vectors
if self . scaling is not None :
U *= s [ np . newaxis , 0 : m ]
# scaled left singular functions induce a kinetic map
V *= s [ np . newaxis , 0 : m ]
# scaled right singular functions induce a kinetic map wrt . backward propagator
self . _U = U
self . _V = V
self . _svd_performed = True
|
def start_parallel ( self ) :
"""Initialize all queues and start the worker processes and the log
thread ."""
|
self . num_processes = get_num_processes ( )
self . task_queue = multiprocessing . Queue ( maxsize = Q_MAX_SIZE )
self . result_queue = multiprocessing . Queue ( )
self . log_queue = multiprocessing . Queue ( )
# Used to signal worker processes when a result is found that allows
# the computation to terminate early .
self . complete = multiprocessing . Event ( )
args = ( self . compute , self . task_queue , self . result_queue , self . log_queue , self . complete ) + self . context
self . processes = [ multiprocessing . Process ( target = self . worker , args = args , daemon = True ) for i in range ( self . num_processes ) ]
for process in self . processes :
process . start ( )
self . log_thread = LogThread ( self . log_queue )
self . log_thread . start ( )
self . initialize_tasks ( )
|
def load_spectrum ( path , smoothing = 181 , DF = - 8. ) :
"""Load a Phoenix model atmosphere spectrum .
path : string
The file path to load .
smoothing : integer
Smoothing to apply . If None , do not smooth . If an integer , smooth with a
Hamming window . Otherwise , the variable is assumed to be a different
smoothing window , and the data will be convolved with it .
DF : float
Numerical factor used to compute the emergent flux density .
Returns a Pandas DataFrame containing the columns :
wlen
Sample wavelength in Angstrom .
flam
Flux density in erg / cm2 / s / Å . See ` pwkit . synphot ` for related tools .
The values of * flam * returned by this function are computed from the
second column of the data file as specified in the documentation : ` ` flam =
10 * * ( col2 + DF ) ` ` . The documentation states that the default value , - 8 , is
appropriate for most modern models ; but some older models use other
values .
Loading takes about 5 seconds on my current laptop . Un - smoothed spectra
have about 630,000 samples ."""
|
try :
ang , lflam = np . loadtxt ( path , usecols = ( 0 , 1 ) ) . T
except ValueError : # In some files , the numbers in the first columns fill up the
# whole 12 - character column width , and are given in exponential
# notation with a ' D ' character , so we must be more careful :
with open ( path , 'rb' ) as f :
def lines ( ) :
for line in f :
yield line . replace ( b'D' , b'e' )
ang , lflam = np . genfromtxt ( lines ( ) , delimiter = ( 13 , 12 ) ) . T
# Data files do not come sorted !
z = ang . argsort ( )
ang = ang [ z ]
flam = 10 ** ( lflam [ z ] + DF )
del z
if smoothing is not None :
if isinstance ( smoothing , int ) :
smoothing = np . hamming ( smoothing )
else :
smoothing = np . asarray ( smoothing )
wnorm = np . convolve ( np . ones_like ( smoothing ) , smoothing , mode = 'valid' )
smoothing = smoothing / wnorm
# do not alter original array .
smooth = lambda a : np . convolve ( a , smoothing , mode = 'valid' ) [ : : smoothing . size ]
ang = smooth ( ang )
flam = smooth ( flam )
return pd . DataFrame ( { 'wlen' : ang , 'flam' : flam } )
|
def get_bids_examples ( data_dir = None , url = None , resume = True , verbose = 1 , variant = 'BIDS-examples-1-1.0.0-rc3u5' ) :
"""Download BIDS - examples - 1"""
|
warn ( DEPRECATION_MSG )
variant = 'BIDS-examples-1-1.0.0-rc3u5' if variant not in BIDS_EXAMPLES else variant
if url is None :
url = BIDS_EXAMPLES [ variant ] [ 0 ]
md5 = BIDS_EXAMPLES [ variant ] [ 1 ]
return fetch_file ( variant , url , data_dir , resume = resume , verbose = verbose , md5sum = md5 )
|
def get_return_list ( self , data ) :
"""Get the list of returned values .
The list contains tuples ( name = None , desc , type = None )
: param data : the data to proceed"""
|
return_list = [ ]
lst = self . get_list_key ( data , 'return' )
for l in lst :
name , desc , rtype = l
if l [ 2 ] is None :
rtype = l [ 0 ]
name = None
desc = desc . strip ( )
return_list . append ( ( name , desc , rtype ) )
return return_list
|
def put ( self , request , * args , ** kwargs ) :
"""custom put method to support django - reversion"""
|
with reversion . create_revision ( ) :
reversion . set_user ( request . user )
reversion . set_comment ( 'changed through the RESTful API from ip %s' % request . META [ 'REMOTE_ADDR' ] )
return self . update ( request , * args , ** kwargs )
|
def handle_api_errors ( self , status_code , json_data ) :
"""This method parses all the HTTP responses sent by the REST API and
raises exceptions if required . Basically tries to find responses with
this format :
' error ' : [ ' The domain foo . com already exists . ' ]
Or this other :
" scans " : {
" _ _ all _ _ " : [
" Not a verified domain . You need to verify . . . "
And raise TagCubeAPIException with the correct message .
: param status _ code : The HTTP response code
: param json _ data : The HTTP response body decoded as JSON"""
|
error_list = [ ]
if 'error' in json_data and len ( json_data ) == 1 and isinstance ( json_data , dict ) and isinstance ( json_data [ 'error' ] , list ) :
error_list = json_data [ 'error' ]
elif status_code == 400 :
for main_error_key in json_data :
for sub_error_key in json_data [ main_error_key ] :
error_list . extend ( json_data [ main_error_key ] [ sub_error_key ] )
# Only raise an exception if we had any errors
if error_list :
error_string = u' ' . join ( error_list )
raise TagCubeAPIException ( error_string )
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.