signature
stringlengths 29
44.1k
| implementation
stringlengths 0
85.2k
|
|---|---|
def encrypt_item ( table_name , rsa_wrapping_private_key_bytes , rsa_signing_private_key_bytes ) :
"""Demonstrate use of EncryptedTable to transparently encrypt an item ."""
|
index_key = { "partition_attribute" : "is this" , "sort_attribute" : 55 }
plaintext_item = { "example" : "data" , "some numbers" : 99 , "and some binary" : Binary ( b"\x00\x01\x02" ) , "leave me" : "alone" , # We want to ignore this attribute
}
# Collect all of the attributes that will be encrypted ( used later ) .
encrypted_attributes = set ( plaintext_item . keys ( ) )
encrypted_attributes . remove ( "leave me" )
# Collect all of the attributes that will not be encrypted ( used later ) .
unencrypted_attributes = set ( index_key . keys ( ) )
unencrypted_attributes . add ( "leave me" )
# Add the index pairs to the item .
plaintext_item . update ( index_key )
# Create a normal table resource .
table = boto3 . resource ( "dynamodb" ) . Table ( table_name )
# Create a crypto materials provider using the provided wrapping and signing keys .
# We show private keys used here , but public keys could be used as well , allowing
# only wrapping or signature verification .
wrapping_key = JceNameLocalDelegatedKey ( key = rsa_wrapping_private_key_bytes , algorithm = "RSA" , key_type = EncryptionKeyType . PRIVATE , key_encoding = KeyEncodingType . DER , )
signing_key = JceNameLocalDelegatedKey ( key = rsa_signing_private_key_bytes , algorithm = "SHA512withRSA" , key_type = EncryptionKeyType . PRIVATE , key_encoding = KeyEncodingType . DER , )
wrapped_cmp = WrappedCryptographicMaterialsProvider ( wrapping_key = wrapping_key , unwrapping_key = wrapping_key , signing_key = signing_key )
# Create attribute actions that tells the encrypted table to encrypt all attributes except one .
actions = AttributeActions ( default_action = CryptoAction . ENCRYPT_AND_SIGN , attribute_actions = { "leave me" : CryptoAction . DO_NOTHING } )
# Use these objects to create an encrypted table resource .
encrypted_table = EncryptedTable ( table = table , materials_provider = wrapped_cmp , attribute_actions = actions )
# Put the item to the table , using the encrypted table resource to transparently encrypt it .
encrypted_table . put_item ( Item = plaintext_item )
# Get the encrypted item using the standard table resource .
encrypted_item = table . get_item ( Key = index_key ) [ "Item" ]
# Get the item using the encrypted table resource , transparently decyrpting it .
decrypted_item = encrypted_table . get_item ( Key = index_key ) [ "Item" ]
# Verify that all of the attributes are different in the encrypted item
for name in encrypted_attributes :
assert encrypted_item [ name ] != plaintext_item [ name ]
assert decrypted_item [ name ] == plaintext_item [ name ]
# Verify that all of the attributes that should not be encrypted were not .
for name in unencrypted_attributes :
assert decrypted_item [ name ] == encrypted_item [ name ] == plaintext_item [ name ]
# Clean up the item
encrypted_table . delete_item ( Key = index_key )
|
def create_script ( create = None ) : # noqa : E501
"""Create a new script
Create a new script # noqa : E501
: param create : The data needed to create this script
: type create : dict | bytes
: rtype : Response"""
|
if connexion . request . is_json :
create = Create . from_dict ( connexion . request . get_json ( ) )
# noqa : E501
return 'do some magic!'
|
def db_is_indexing ( cls , impl , working_dir ) :
"""Is the system indexing ?
Return True if so , False if not ."""
|
indexing_lockfile_path = config . get_lockfile_filename ( impl , working_dir )
return os . path . exists ( indexing_lockfile_path )
|
def gettrace ( self , burn = 0 , thin = 1 , chain = - 1 , slicing = None ) :
"""Return the trace ( last by default ) .
Input :
- burn ( int ) : The number of transient steps to skip .
- thin ( int ) : Keep one in thin .
- chain ( int ) : The index of the chain to fetch . If None , return all
chains . By default , the last chain is returned .
- slicing : A slice , overriding burn and thin assignement ."""
|
# warnings . warn ( ' Use Sampler . trace method instead . ' ,
# DeprecationWarning )
if not slicing :
slicing = slice ( burn , None , thin )
# If chain is None , get the data from all chains .
if chain is None :
self . db . cur . execute ( 'SELECT * FROM [%s]' % self . name )
trace = self . db . cur . fetchall ( )
else : # Deal with negative chains ( starting from the end )
if chain < 0 :
chain = range ( self . db . chains ) [ chain ]
self . db . cur . execute ( 'SELECT * FROM [%s] WHERE trace=%s' % ( self . name , chain ) )
trace = self . db . cur . fetchall ( )
trace = np . array ( trace ) [ : , 2 : ]
if len ( self . _shape ) > 1 :
trace = trace . reshape ( - 1 , * self . _shape )
return squeeze ( trace [ slicing ] )
|
def get_by_model ( self , model ) :
"""Gets all object by a specific model ."""
|
content_type = ContentType . objects . get_for_model ( model )
return self . filter ( content_type = content_type )
|
def create_issue ( self , title , content , priority = None , milestone = None , tags = None , assignee = None , private = None ) :
"""Create a new issue .
: param title : the title of the issue
: param content : the description of the issue
: param priority : the priority of the ticket
: param milestone : the milestone of the ticket
: param tags : comma sperated list of tag for the ticket
: param assignee : the assignee of the ticket
: param private : whether create this issue as private
: return :"""
|
request_url = "{}new_issue" . format ( self . create_basic_url ( ) )
payload = { 'title' : title , 'issue_content' : content }
if priority is not None :
payload [ 'priority' ] = priority
if milestone is not None :
payload [ 'milestone' ] = milestone
if tags is not None :
payload [ 'tag' ] = tags
if assignee is not None :
payload [ 'assignee' ] = assignee
if private is not None :
payload [ 'private' ] = private
return_value = self . _call_api ( request_url , method = 'POST' , data = payload )
LOG . debug ( return_value )
|
def fetch ( self ) :
"""Fetch a CompositionSettingsInstance
: returns : Fetched CompositionSettingsInstance
: rtype : twilio . rest . video . v1 . composition _ settings . CompositionSettingsInstance"""
|
params = values . of ( { } )
payload = self . _version . fetch ( 'GET' , self . _uri , params = params , )
return CompositionSettingsInstance ( self . _version , payload , )
|
def hybrid_forward ( self , F , words1 , words2 , weight ) : # pylint : disable = arguments - differ
"""Predict the similarity of words1 and words2.
Parameters
words1 : Symbol or NDArray
The indices of the words the we wish to compare to the words in words2.
words2 : Symbol or NDArray
The indices of the words the we wish to compare to the words in words1.
Returns
similarity : Symbol or NDArray
The similarity computed by WordEmbeddingSimilarity . similarity _ function ."""
|
embeddings_words1 = F . Embedding ( words1 , weight , input_dim = self . _vocab_size , output_dim = self . _embed_size )
embeddings_words2 = F . Embedding ( words2 , weight , input_dim = self . _vocab_size , output_dim = self . _embed_size )
similarity = self . similarity ( embeddings_words1 , embeddings_words2 )
return similarity
|
def fix_ticks ( ax ) :
"""Center ticklabels and hide any outside axes limits .
By Joe Kington"""
|
plt . setp ( ax . get_yticklabels ( ) , ha = 'center' , x = 0.5 , transform = ax . _yaxis_transform )
# We ' ll still wind up with some tick labels beyond axes limits for reasons
# I don ' t fully understand . . .
limits = ax . get_ylim ( )
for label , loc in zip ( ax . yaxis . get_ticklabels ( ) , ax . yaxis . get_ticklocs ( ) ) :
if loc < min ( limits ) or loc > max ( limits ) :
label . set ( visible = False )
else :
label . set ( visible = True )
|
def time_annotated ( func , * args , ** kwargs ) :
"""Annotate the decorated function or method with the total execution
time .
The result is annotated with a ` time ` attribute ."""
|
start = time ( )
result = func ( * args , ** kwargs )
end = time ( )
result . time = round ( end - start , config . PRECISION )
return result
|
def _get_filename ( table ) :
"""Get the filename from a data table . If it doesn ' t exist , create a new one based on table hierarchy in metadata file .
format : < dataSetName > . < section > < idx > < table > < idx > . csv
example : ODP1098B . Chron1 . ChronMeasurementTable . csv
: param dict table : Table data
: param str crumbs : Crumbs
: return str filename : Filename"""
|
try :
filename = table [ "filename" ]
except KeyError :
logger_csvs . info ( "get_filename: KeyError: missing filename for a table" )
print ( "Error: Missing filename for a table" )
filename = ""
except Exception as e :
logger_csvs . error ( "get_filename: {}" . format ( e ) )
filename = ""
return filename
|
def junos_cli ( command , format = None , dev_timeout = None , dest = None , ** kwargs ) :
'''. . versionadded : : 2019.2.0
Execute a CLI command and return the output in the specified format .
command
The command to execute on the Junos CLI .
format : ` ` text ` `
Format in which to get the CLI output ( either ` ` text ` ` or ` ` xml ` ` ) .
dev _ timeout : ` ` 30 ` `
The NETCONF RPC timeout ( in seconds ) .
dest
Destination file where the RPC output is stored . Note that the file will
be stored on the Proxy Minion . To push the files to the Master , use
: mod : ` cp . push < salt . modules . cp . push > ` .
CLI Example :
. . code - block : : bash
salt ' * ' napalm . junos _ cli ' show lldp neighbors ' '''
|
prep = _junos_prep_fun ( napalm_device )
# pylint : disable = undefined - variable
if not prep [ 'result' ] :
return prep
return __salt__ [ 'junos.cli' ] ( command , format = format , dev_timeout = dev_timeout , dest = dest , ** kwargs )
|
def autocorrelation ( x , lag ) :
"""Calculates the autocorrelation of the specified lag , according to the formula [ 1]
. . math : :
\\ frac { 1 } { ( n - l ) \ sigma ^ { 2 } } \\ sum _ { t = 1 } ^ { n - l } ( X _ { t } - \\ mu ) ( X _ { t + l } - \\ mu )
where : math : ` n ` is the length of the time series : math : ` X _ i ` , : math : ` \ sigma ^ 2 ` its variance and : math : ` \ mu ` its
mean . ` l ` denotes the lag .
. . rubric : : References
[1 ] https : / / en . wikipedia . org / wiki / Autocorrelation # Estimation
: param x : the time series to calculate the feature of
: type x : numpy . ndarray
: param lag : the lag
: type lag : int
: return : the value of this feature
: return type : float"""
|
# This is important : If a series is passed , the product below is calculated
# based on the index , which corresponds to squaring the series .
if type ( x ) is pd . Series :
x = x . values
if len ( x ) < lag :
return np . nan
# Slice the relevant subseries based on the lag
y1 = x [ : ( len ( x ) - lag ) ]
y2 = x [ lag : ]
# Subtract the mean of the whole series x
x_mean = np . mean ( x )
# The result is sometimes referred to as " covariation "
sum_product = np . sum ( ( y1 - x_mean ) * ( y2 - x_mean ) )
# Return the normalized unbiased covariance
v = np . var ( x )
if np . isclose ( v , 0 ) :
return np . NaN
else :
return sum_product / ( ( len ( x ) - lag ) * v )
|
def getCmd ( snmpEngine , authData , transportTarget , contextData , * varBinds , ** options ) :
"""Performs SNMP GET query .
Based on passed parameters , prepares SNMP GET packet
( : RFC : ` 1905 # section - 4.2.1 ` ) and schedules its transmission by
: mod : ` twisted ` I / O framework at a later point of time .
Parameters
snmpEngine : : class : ` ~ pysnmp . hlapi . SnmpEngine `
Class instance representing SNMP engine .
authData : : class : ` ~ pysnmp . hlapi . CommunityData ` or : class : ` ~ pysnmp . hlapi . UsmUserData `
Class instance representing SNMP credentials .
transportTarget : : class : ` ~ pysnmp . hlapi . twisted . UdpTransportTarget ` or : class : ` ~ pysnmp . hlapi . twisted . Udp6TransportTarget `
Class instance representing transport type along with SNMP peer address .
contextData : : class : ` ~ pysnmp . hlapi . ContextData `
Class instance representing SNMP ContextEngineId and ContextName values .
\*varBinds : :class:`~pysnmp.smi.rfc1902.ObjectType`
One or more class instances representing MIB variables to place
into SNMP request .
Other Parameters
\*\*options :
Request options :
* ` lookupMib ` - load MIB and resolve response MIB variables at
the cost of slightly reduced performance . Default is ` True ` .
Returns
deferred : : class : ` ~ twisted . internet . defer . Deferred `
Twisted Deferred object representing work - in - progress . User
is expected to attach his own ` success ` and ` error ` callback
functions to the Deferred object though
: meth : ` ~ twisted . internet . defer . Deferred . addCallbacks ` method .
Raises
PySnmpError
Or its derivative indicating that an error occurred while
performing SNMP operation .
Notes
User ` success ` callback is called with the following tuple as
its first argument :
* errorStatus ( str ) : True value indicates SNMP PDU error .
* errorIndex ( int ) : Non - zero value refers to ` varBinds [ errorIndex - 1 ] `
* varBinds ( tuple ) : A sequence of
: class : ` ~ pysnmp . smi . rfc1902 . ObjectType ` class instances representing
MIB variables returned in SNMP response .
User ` error ` callback is called with ` errorIndication ` object wrapped
in : class : ` ~ twisted . python . failure . Failure ` object .
Examples
> > > from twisted . internet . task import react
> > > from pysnmp . hlapi . twisted import *
> > > def success ( args ) :
. . . ( errorStatus , errorIndex , varBinds ) = args
. . . print ( errorStatus , errorIndex , varBind )
> > > def failure ( errorIndication ) :
. . . print ( errorIndication )
> > > def run ( reactor ) :
. . . d = getCmd ( SnmpEngine ( ) ,
. . . CommunityData ( ' public ' ) ,
. . . UdpTransportTarget ( ( ' demo . snmplabs . com ' , 161 ) ) ,
. . . ContextData ( ) ,
. . . ObjectType ( ObjectIdentity ( ' SNMPv2 - MIB ' , ' sysDescr ' , 0 ) ) )
. . . d . addCallback ( success ) . addErrback ( failure )
. . . return d
> > > react ( run )
(0 , 0 , [ ObjectType ( ObjectIdentity ( ObjectName ( ' 1.3.6.1.2.1.1.1.0 ' ) ) , DisplayString ( ' SunOS zeus . snmplabs . com 4.1.3 _ U1 1 sun4m ' ) ) ] )"""
|
def __cbFun ( snmpEngine , sendRequestHandle , errorIndication , errorStatus , errorIndex , varBinds , cbCtx ) :
lookupMib , deferred = cbCtx
if errorIndication :
deferred . errback ( Failure ( errorIndication ) )
else :
try :
varBinds = VB_PROCESSOR . unmakeVarBinds ( snmpEngine . cache , varBinds , lookupMib )
except Exception as e :
deferred . errback ( Failure ( e ) )
else :
deferred . callback ( ( errorStatus , errorIndex , varBinds ) )
addrName , paramsName = LCD . configure ( snmpEngine , authData , transportTarget , contextData . contextName )
varBinds = VB_PROCESSOR . makeVarBinds ( snmpEngine . cache , varBinds )
deferred = Deferred ( )
cmdgen . GetCommandGenerator ( ) . sendVarBinds ( snmpEngine , addrName , contextData . contextEngineId , contextData . contextName , varBinds , __cbFun , ( options . get ( 'lookupMib' , True ) , deferred ) )
return deferred
|
def sign_execute_cancellation ( cancellation_params , key_pair ) :
"""Function to sign the parameters required to execute a cancellation request on the Switcheo Exchange .
Execution of this function is as follows : :
sign _ execute _ cancellation ( cancellation _ params = signable _ params , key _ pair = key _ pair )
The expected return result for this function is as follows : :
' signature ' : ' 6a40d6c011b7517f8fd3f2d0de32dd486adfd1d424d06d56c80eb . . . . '
: param cancellation _ params : Parameters the Switcheo Exchange returns from the create cancellation .
: type cancellation _ params : dict
: param key _ pair : The KeyPair for the wallet being used to sign deposit message .
: type key _ pair : KeyPair
: return : Dictionary of signed message to send to the Switcheo API ."""
|
signature = sign_transaction ( transaction = cancellation_params [ 'transaction' ] , private_key_hex = private_key_to_hex ( key_pair = key_pair ) )
return { 'signature' : signature }
|
def cluster_coincs ( stat , time1 , time2 , timeslide_id , slide , window , argmax = numpy . argmax ) :
"""Cluster coincident events for each timeslide separately , across
templates , based on the ranking statistic
Parameters
stat : numpy . ndarray
vector of ranking values to maximize
time1 : numpy . ndarray
first time vector
time2 : numpy . ndarray
second time vector
timeslide _ id : numpy . ndarray
vector that determines the timeslide offset
slide : float
length of the timeslides offset interval
window : float
length to cluster over
Returns
cindex : numpy . ndarray
The set of indices corresponding to the surviving coincidences ."""
|
logging . info ( 'clustering coinc triggers over %ss window' % window )
if len ( time1 ) == 0 or len ( time2 ) == 0 :
logging . info ( 'No coinc triggers in one, or both, ifos.' )
return numpy . array ( [ ] )
if numpy . isfinite ( slide ) : # for a time shifted coinc , time1 is greater than time2 by approximately timeslide _ id * slide
# adding this quantity gives a mean coinc time located around time1
time = ( time1 + time2 + timeslide_id * slide ) / 2
else :
time = 0.5 * ( time2 + time1 )
tslide = timeslide_id . astype ( numpy . float128 )
time = time . astype ( numpy . float128 )
span = ( time . max ( ) - time . min ( ) ) + window * 10
time = time + span * tslide
cidx = cluster_over_time ( stat , time , window , argmax )
return cidx
|
def vdm_b ( vdm , lat ) :
"""Converts a virtual dipole moment ( VDM ) or a virtual axial dipole moment
( VADM ; input in units of Am ^ 2 ) to a local magnetic field value ( output in
units of tesla )
Parameters
vdm : V ( A ) DM in units of Am ^ 2
lat : latitude of site in degrees
Returns
B : local magnetic field strength in tesla"""
|
rad = old_div ( np . pi , 180. )
# changed radius of the earth from 3.367e6 3/12/2010
fact = ( ( 6.371e6 ) ** 3 ) * 1e7
colat = ( 90. - lat ) * rad
return vdm * ( np . sqrt ( 1 + 3 * ( np . cos ( colat ) ** 2 ) ) ) / fact
|
def _family_notes_path ( family , data_dir ) :
'''Form a path to the notes for a family'''
|
data_dir = fix_data_dir ( data_dir )
family = family . lower ( )
if not family in get_families ( data_dir ) :
raise RuntimeError ( "Family '{}' does not exist" . format ( family ) )
file_name = 'NOTES.' + family . lower ( )
file_path = os . path . join ( data_dir , file_name )
return file_path
|
def parse_rich_header ( self ) :
"""Parses the rich header
see http : / / www . ntcore . com / files / richsign . htm for more information
Structure :
00 DanS ^ checksum , checksum , checksum , checksum
10 Symbol RVA ^ checksum , Symbol size ^ checksum . . .
XX Rich , checksum , 0 , 0 , . . ."""
|
# Rich Header constants
DANS = 0x536E6144
# ' DanS ' as dword
RICH = 0x68636952
# ' Rich ' as dword
rich_index = self . __data__ . find ( b'Rich' , 0x80 , self . OPTIONAL_HEADER . get_file_offset ( ) )
if rich_index == - 1 :
return None
# Read a block of data
try : # The end of the structure is 8 bytes after the start of the Rich
# string .
rich_data = self . get_data ( 0x80 , rich_index + 8 )
# Make the data have length a multiple of 4 , otherwise the
# subsequent parsing will fail . It ' s not impossible that we retrieve
# truncated data that it ' s not a multiple .
rich_data = rich_data [ : 4 * int ( len ( rich_data ) / 4 ) ]
data = list ( struct . unpack ( '<{0}I' . format ( int ( len ( rich_data ) / 4 ) ) , rich_data ) )
if RICH not in data :
return None
except PEFormatError :
return None
# get key , raw _ data and clear _ data
key = struct . pack ( '<L' , data [ data . index ( RICH ) + 1 ] )
result = { "key" : key }
raw_data = rich_data [ : rich_data . find ( b'Rich' ) ]
result [ "raw_data" ] = raw_data
ord_ = lambda c : ord ( c ) if not isinstance ( c , int ) else c
clear_data = bytearray ( )
for i in range ( len ( raw_data ) ) :
clear_data . append ( ( ord_ ( raw_data [ i ] ) ^ ord_ ( key [ i % len ( key ) ] ) ) )
result [ "clear_data" ] = bytes ( clear_data )
# the checksum should be present 3 times after the DanS signature
checksum = data [ 1 ]
if ( data [ 0 ] ^ checksum != DANS or data [ 2 ] != checksum or data [ 3 ] != checksum ) :
return None
result [ "checksum" ] = checksum
headervalues = [ ]
result [ "values" ] = headervalues
data = data [ 4 : ]
for i in range ( int ( len ( data ) / 2 ) ) : # Stop until the Rich footer signature is found
if data [ 2 * i ] == RICH : # it should be followed by the checksum
if data [ 2 * i + 1 ] != checksum :
self . __warnings . append ( 'Rich Header is malformed' )
break
# header values come by pairs
headervalues += [ data [ 2 * i ] ^ checksum , data [ 2 * i + 1 ] ^ checksum ]
return result
|
def apply ( self , X , ntree_limit = 0 ) :
"""Return the predicted leaf every tree for each sample .
Parameters
X : array _ like , shape = [ n _ samples , n _ features ]
Input features matrix .
ntree _ limit : int
Limit number of trees in the prediction ; defaults to 0 ( use all trees ) .
Returns
X _ leaves : array _ like , shape = [ n _ samples , n _ trees ]
For each datapoint x in X and for each tree , return the index of the
leaf x ends up in . Leaves are numbered within
` ` [ 0 ; 2 * * ( self . max _ depth + 1 ) ) ` ` , possibly with gaps in the numbering ."""
|
test_dmatrix = DMatrix ( X , missing = self . missing , nthread = self . n_jobs )
return self . get_booster ( ) . predict ( test_dmatrix , pred_leaf = True , ntree_limit = ntree_limit )
|
def grant_sudo_privileges ( request , max_age = COOKIE_AGE ) :
"""Assigns a random token to the user ' s session
that allows them to have elevated permissions"""
|
user = getattr ( request , 'user' , None )
# If there ' s not a user on the request , just noop
if user is None :
return
if not user . is_authenticated ( ) :
raise ValueError ( 'User needs to be logged in to be elevated to sudo' )
# Token doesn ' t need to be unique ,
# just needs to be unpredictable and match the cookie and the session
token = get_random_string ( )
request . session [ COOKIE_NAME ] = token
request . _sudo = True
request . _sudo_token = token
request . _sudo_max_age = max_age
return token
|
def assertFileSizeLessEqual ( self , filename , size , msg = None ) :
'''Fail if ` ` filename ` ` ' s size is not less than or equal to
` ` size ` ` as determined by the ' < = ' operator .
Parameters
filename : str , bytes , file - like
size : int , float
msg : str
If not provided , the : mod : ` marbles . mixins ` or
: mod : ` unittest ` standard message will be used .
Raises
TypeError
If ` ` filename ` ` is not a str or bytes object and is not
file - like .'''
|
fsize = self . _get_file_size ( filename )
self . assertLessEqual ( fsize , size , msg = msg )
|
def set_meta ( self , _props ) :
"""Set metadata values for collection ."""
|
if self . is_fake :
return
props = { }
for key , value in _props . items ( ) :
key , value = self . meta_mappings . map_set ( key , value )
props [ key ] = value
# Pop out tag which we don ' t want
props . pop ( "tag" , None )
self . journal . update_info ( { } )
self . journal . update_info ( props )
self . journal . save ( )
|
def list ( self ) :
"""Return a list of bots .
: return : all of your bots
: rtype : : class : ` list `"""
|
response = self . session . get ( self . url )
return [ Bot ( self , ** bot ) for bot in response . data ]
|
def l_system ( axiom , transformations , iterations = 1 , angle = 45 , resolution = 1 ) :
"""Generates a texture by running transformations on a turtle program .
First , the given transformations are applied to the axiom . This is
repeated ` iterations ` times . Then , the output is run as a turtle
program to get a texture , which is returned .
For more background see : https : / / en . wikipedia . org / wiki / L - system
Args :
axiom ( str ) : the axiom of the Lindenmeyer system ( a string )
transformations ( dict ) : a dictionary mapping each char to the string that is
substituted for it when the rule is applied
iterations ( int ) : the number of times to apply the transformations
angle ( float ) : the angle to use for turns when interpreting the string
as a turtle graphics program
resolution ( int ) : the number of midpoints to create in each turtle step
Returns :
A texture"""
|
turtle_program = transform_multiple ( axiom , transformations , iterations )
return turtle_to_texture ( turtle_program , angle , resolution = resolution )
|
def path_wo_ns ( obj ) :
"""Return path of an instance or instance path without host or namespace .
Creates copy of the object so the original is not changed ."""
|
if isinstance ( obj , pywbem . CIMInstance ) :
path = obj . path . copy ( )
elif isinstance ( obj , pywbem . CIMInstanceName ) :
path = obj . copy ( )
else :
assert False
path . host = None
path . namespace = None
return path
|
def segs ( self , word ) :
"""Returns a list of segments from a word
Args :
word ( unicode ) : input word as Unicode IPA string
Returns :
list : list of strings corresponding to segments found in ` word `"""
|
return [ m . group ( 'all' ) for m in self . seg_regex . finditer ( word ) ]
|
def validate_marked_location ( location ) :
"""Validate that a Location object is safe for marking , and not at a field ."""
|
if not isinstance ( location , ( Location , FoldScopeLocation ) ) :
raise TypeError ( u'Expected Location or FoldScopeLocation location, got: {} {}' . format ( type ( location ) . __name__ , location ) )
if location . field is not None :
raise GraphQLCompilationError ( u'Cannot mark location at a field: {}' . format ( location ) )
|
def invalidate ( self ) :
"""Clear out cached properties"""
|
if hasattr ( self , '_avail_backups' ) :
del self . _avail_backups
if hasattr ( self , '_ips' ) :
del self . _ips
Base . invalidate ( self )
|
def get_atoms ( self , ligands = True , inc_alt_states = False ) :
"""Flat list of all the Atoms in the Polymer .
Parameters
inc _ alt _ states : bool
If true atoms from alternate conformations are included rather
than only the " active " states .
Returns
atoms : itertools . chain
Returns an iterator of all the atoms . Convert to list if you
require indexing ."""
|
if ligands and self . ligands :
monomers = self . _monomers + self . ligands . _monomers
else :
monomers = self . _monomers
atoms = itertools . chain ( * ( list ( m . get_atoms ( inc_alt_states = inc_alt_states ) ) for m in monomers ) )
return atoms
|
def validate ( self , instance , value ) :
"""Checks that the value is a valid file open in the correct mode
If value is a string , it attempts to open it with the given mode ."""
|
if isinstance ( value , string_types ) and self . mode is not None :
try :
value = open ( value , self . mode )
except ( IOError , TypeError ) :
self . error ( instance , value , extra = 'Cannot open file: {}' . format ( value ) )
if not all ( [ hasattr ( value , attr ) for attr in ( 'read' , 'seek' ) ] ) :
self . error ( instance , value , extra = 'Not a file-like object' )
if not hasattr ( value , 'mode' ) or self . valid_modes is None :
pass
elif value . mode not in self . valid_modes :
self . error ( instance , value , extra = 'Invalid mode: {}' . format ( value . mode ) )
if getattr ( value , 'closed' , False ) :
self . error ( instance , value , extra = 'File is closed.' )
return value
|
def moving_hfs_rank ( h , size , start = 0 , stop = None ) :
"""Helper function for plotting haplotype frequencies in moving windows .
Parameters
h : array _ like , int , shape ( n _ variants , n _ haplotypes )
Haplotype array .
size : int
The window size ( number of variants ) .
start : int , optional
The index at which to start .
stop : int , optional
The index at which to stop .
Returns
hr : ndarray , int , shape ( n _ windows , n _ haplotypes )
Haplotype rank array ."""
|
# determine windows
windows = np . asarray ( list ( index_windows ( h , size = size , start = start , stop = stop , step = None ) ) )
# setup output
hr = np . zeros ( ( windows . shape [ 0 ] , h . shape [ 1 ] ) , dtype = 'i4' )
# iterate over windows
for i , ( window_start , window_stop ) in enumerate ( windows ) : # extract haplotypes for the current window
hw = h [ window_start : window_stop ]
# count haplotypes
hc = hw . distinct_counts ( )
# ensure sorted descending
hc . sort ( )
hc = hc [ : : - 1 ]
# compute ranks for non - singleton haplotypes
cp = 0
for j , c in enumerate ( hc ) :
if c > 1 :
hr [ i , cp : cp + c ] = j + 1
cp += c
return hr
|
def node_to_ini ( node , output = sys . stdout ) :
"""Convert a Node object with the right structure into a . ini file .
: params node : a Node object
: params output : a file - like object opened in write mode"""
|
for subnode in node :
output . write ( u'\n[%s]\n' % subnode . tag )
for name , value in sorted ( subnode . attrib . items ( ) ) :
output . write ( u'%s=%s\n' % ( name , value ) )
output . flush ( )
|
def matrix_height ( self , zoom ) :
"""Tile matrix height ( number of rows ) at zoom level .
- zoom : zoom level"""
|
validate_zoom ( zoom )
height = int ( math . ceil ( self . grid . shape . height * 2 ** ( zoom ) / self . metatiling ) )
return 1 if height < 1 else height
|
def write ( self , path ) :
"""Write RSS content to file ."""
|
with open ( path , 'wb' ) as f :
f . write ( self . getXML ( ) )
|
def member_command ( self , repl_id , member_id , command ) :
"""apply command ( start , stop , restart ) to the member of replica set
Args :
repl _ id - replica set identity
member _ id - member index
command - command : start , stop , restart
return True if operation success otherwise False"""
|
repl = self [ repl_id ]
result = repl . member_command ( member_id , command )
self [ repl_id ] = repl
return result
|
def ycbcr2rgb ( y__ , cb_ , cr_ ) :
"""Convert the three YCbCr channels to RGB channels ."""
|
kb_ = 0.114
kr_ = 0.299
r__ = 2 * cr_ / ( 1 - kr_ ) + y__
b__ = 2 * cb_ / ( 1 - kb_ ) + y__
g__ = ( y__ - kr_ * r__ - kb_ * b__ ) / ( 1 - kr_ - kb_ )
return r__ , g__ , b__
|
def get_queryset ( self , request ) :
"""Limit to Tenants that this user can access ."""
|
qs = super ( TenantAdmin , self ) . get_queryset ( request )
if not request . user . is_superuser :
tenants_by_group_manager_role = qs . filter ( group__tenantrole__user = request . user , group__tenantrole__role = TenantRole . ROLE_GROUP_MANAGER )
tenants_by_tenant_manager_role = qs . filter ( tenantrole__user = request . user , tenantrole__role = TenantRole . ROLE_TENANT_MANAGER )
return tenants_by_group_manager_role | tenants_by_tenant_manager_role
return qs
|
def factorial ( n , mod = None ) :
"""Calculates factorial iteratively .
If mod is not None , then return ( n ! % mod )
Time Complexity - O ( n )"""
|
if not ( isinstance ( n , int ) and n >= 0 ) :
raise ValueError ( "'n' must be a non-negative integer." )
if mod is not None and not ( isinstance ( mod , int ) and mod > 0 ) :
raise ValueError ( "'mod' must be a positive integer" )
result = 1
if n == 0 :
return 1
for i in range ( 2 , n + 1 ) :
result *= i
if mod :
result %= mod
return result
|
def get_rate ( self , currency , date ) :
"""Get the exchange rate for ` ` currency ` ` against ` ` _ INTERNAL _ CURRENCY ` `
If implementing your own backend , you should probably override : meth : ` _ get _ rate ( ) `
rather than this ."""
|
if str ( currency ) == defaults . INTERNAL_CURRENCY :
return Decimal ( 1 )
cached = cache . get ( _cache_key ( currency , date ) )
if cached :
return Decimal ( cached )
else : # Expect self . _ get _ rate ( ) to implement caching
return Decimal ( self . _get_rate ( currency , date ) )
|
def _publish_actor_class_to_key ( self , key , actor_class_info ) :
"""Push an actor class definition to Redis .
The is factored out as a separate function because it is also called
on cached actor class definitions when a worker connects for the first
time .
Args :
key : The key to store the actor class info at .
actor _ class _ info : Information about the actor class ."""
|
# We set the driver ID here because it may not have been available when
# the actor class was defined .
self . _worker . redis_client . hmset ( key , actor_class_info )
self . _worker . redis_client . rpush ( "Exports" , key )
|
def logger_focus ( self , i , focus_shift = 16 ) :
"""focuses the logger on an index 12 entries below i
@ param : i - > index to focus on"""
|
if self . logger . GetItemCount ( ) - 1 > i + focus_shift :
i += focus_shift
else :
i = self . logger . GetItemCount ( ) - 1
self . logger . Focus ( i )
|
def createPlotDataItem ( self ) :
"""Creates a PyQtGraph PlotDataItem from the config values"""
|
antialias = self . antiAliasCti . configValue
color = self . penColor
if self . lineCti . configValue :
pen = QtGui . QPen ( )
pen . setCosmetic ( True )
pen . setColor ( color )
pen . setWidthF ( self . lineWidthCti . configValue )
pen . setStyle ( self . lineStyleCti . configValue )
shadowCti = self . lineCti . findByNodePath ( 'shadow' )
shadowPen = shadowCti . createPen ( altStyle = pen . style ( ) , altWidth = 2.0 * pen . widthF ( ) )
else :
pen = None
shadowPen = None
drawSymbols = self . symbolCti . configValue
symbolShape = self . symbolShapeCti . configValue if drawSymbols else None
symbolSize = self . symbolSizeCti . configValue if drawSymbols else 0.0
symbolPen = None
# otherwise the symbols will also have dotted / solid line .
symbolBrush = QtGui . QBrush ( color ) if drawSymbols else None
plotDataItem = pg . PlotDataItem ( antialias = antialias , pen = pen , shadowPen = shadowPen , symbol = symbolShape , symbolSize = symbolSize , symbolPen = symbolPen , symbolBrush = symbolBrush )
return plotDataItem
|
def CallFunction ( self ) :
"""Calls the function via RPC ."""
|
if self . _xmlrpc_proxy is None :
return None
rpc_call = getattr ( self . _xmlrpc_proxy , self . _RPC_FUNCTION_NAME , None )
if rpc_call is None :
return None
try :
return rpc_call ( )
# pylint : disable = not - callable
except ( expat . ExpatError , SocketServer . socket . error , xmlrpclib . Fault ) as exception :
logger . warning ( 'Unable to make RPC call with error: {0!s}' . format ( exception ) )
return None
|
def add_summary_page ( self ) :
"""Build a table which is shown on the first page which gives an overview of the portfolios"""
|
s = PortfolioSummary ( )
s . include_long_short ( )
pieces = [ ]
for r in self . results :
tmp = s ( r . port , PortfolioSummary . analyze_returns )
tmp [ 'desc' ] = r . desc
tmp [ 'sid' ] = r . sid
tmp = tmp . set_index ( [ 'sid' , 'desc' ] , append = 1 ) . reorder_levels ( [ 2 , 1 , 0 ] )
pieces . append ( tmp )
frame = pd . concat ( pieces )
tf = self . pdf . table_formatter ( frame )
tf . apply_basic_style ( cmap = self . table_style )
# [ col . guess _ format ( pcts = 1 , trunc _ dot _ zeros = 1 ) for col in tf . cells . iter _ cols ( ) ]
tf . cells . match_column_labels ( [ 'nmonths' , 'cnt' , 'win cnt' , 'lose cnt' , 'dur max' ] ) . int_format ( )
tf . cells . match_column_labels ( [ 'sharpe ann' , 'sortino' , 'dur avg' ] ) . float_format ( precision = 1 )
tf . cells . match_column_labels ( [ 'maxdd dt' ] ) . apply_format ( new_datetime_formatter ( '%d-%b-%y' ) )
tf . cells . match_column_labels ( [ 'cagr' , 'mret avg' , 'mret std ann' , 'ret std' , 'mret avg ann' , 'maxdd' , 'avg dd' , 'winpct' , 'ret avg' , 'ret min' , 'ret max' ] ) . percent_format ( )
self . pdf . build_page ( 'summary' , { 'F1' : tf . build ( ) } )
|
def bind ( self , server , net = None , address = None ) :
"""Create a network adapter object and bind ."""
|
if _debug :
NetworkServiceAccessPoint . _debug ( "bind %r net=%r address=%r" , server , net , address )
# make sure this hasn ' t already been called with this network
if net in self . adapters :
raise RuntimeError ( "already bound" )
# create an adapter object , add it to our map
adapter = NetworkAdapter ( self , net )
self . adapters [ net ] = adapter
if _debug :
NetworkServiceAccessPoint . _debug ( " - adapters[%r]: %r" , net , adapter )
# if the address was given , make it the " local " one
if address and not self . local_address :
self . local_adapter = adapter
self . local_address = address
# bind to the server
bind ( adapter , server )
|
def on_any_event ( self , event ) :
"""On any event method"""
|
for delegate in self . delegates :
if hasattr ( delegate , "on_any_event" ) :
delegate . on_any_event ( event )
|
def pca_loadings ( adata , components = None , show = None , save = None ) :
"""Rank genes according to contributions to PCs .
Parameters
adata : : class : ` ~ anndata . AnnData `
Annotated data matrix .
components : str or list of integers , optional
For example , ` ` ' 1,2,3 ' ` ` means ` ` [ 1 , 2 , 3 ] ` ` , first , second , third
principal component .
show : bool , optional ( default : ` None ` )
Show the plot , do not return axis .
save : ` bool ` or ` str ` , optional ( default : ` None ` )
If ` True ` or a ` str ` , save the figure . A string is appended to the
default filename . Infer the filetype if ending on { ' . pdf ' , ' . png ' , ' . svg ' } ."""
|
if components is None :
components = [ 1 , 2 , 3 ]
elif isinstance ( components , str ) :
components = components . split ( ',' )
components = np . array ( components ) - 1
ranking ( adata , 'varm' , 'PCs' , indices = components )
utils . savefig_or_show ( 'pca_loadings' , show = show , save = save )
|
def resample ( df , rule , time_index , groupby = None , aggregation = 'mean' ) :
"""pd . DataFrame . resample adapter .
Call the ` df . resample ` method on the given time _ index
and afterwards call the indicated aggregation .
Optionally group the dataframe by the indicated columns before
performing the resampling .
If groupby option is used , the result is a multi - index datagrame .
Args :
df ( pandas . DataFrame ) : DataFrame to resample .
rule ( str ) : The offset string or object representing target conversion .
groupby ( list ) : Optional list of columns to group by .
time _ index ( str ) : Name of the column to use as the time index .
aggregation ( str ) : Name of the aggregation function to use .
Returns :
pandas . Dataframe : resampled dataframe"""
|
if groupby :
df = df . groupby ( groupby )
df = df . resample ( rule , on = time_index )
df = getattr ( df , aggregation ) ( )
for column in groupby :
del df [ column ]
return df
|
def symlink ( target , link , target_is_directory = False ) :
"""An implementation of os . symlink for Windows ( Vista and greater )"""
|
target_is_directory = ( target_is_directory or _is_target_a_directory ( link , target ) )
# normalize the target ( MS symlinks don ' t respect forward slashes )
target = os . path . normpath ( target )
handle_nonzero_success ( api . CreateSymbolicLink ( link , target , target_is_directory ) )
|
def schedule_snapshot ( self ) :
"""Trigger snapshot to be uploaded to AWS .
Return success state ."""
|
# Notes :
# - Snapshots are not immediate .
# - Snapshots will be cached for predefined amount
# of time .
# - Snapshots are not balanced . To get a better
# image , it must be taken from the stream , a few
# seconds after stream start .
url = SNAPSHOTS_ENDPOINT
params = SNAPSHOTS_BODY
params [ 'from' ] = "{0}_web" . format ( self . user_id )
params [ 'to' ] = self . device_id
params [ 'resource' ] = "cameras/{0}" . format ( self . device_id )
params [ 'transId' ] = "web!{0}" . format ( self . xcloud_id )
# override headers
headers = { 'xCloudId' : self . xcloud_id }
_LOGGER . debug ( "Snapshot device %s" , self . name )
_LOGGER . debug ( "Device params %s" , params )
_LOGGER . debug ( "Device headers %s" , headers )
ret = self . _session . query ( url , method = 'POST' , extra_params = params , extra_headers = headers )
_LOGGER . debug ( "Snapshot results %s" , ret )
return ret is not None and ret . get ( 'success' )
|
def _dict_seq_locus ( list_c , loci_obj , seq_obj ) :
"""return dict with sequences = [ cluster1 , cluster2 . . . ]"""
|
seqs = defaultdict ( set )
# n = len ( list _ c . keys ( ) )
for c in list_c . values ( ) :
for l in c . loci2seq :
[ seqs [ s ] . add ( c . id ) for s in c . loci2seq [ l ] ]
common = [ s for s in seqs if len ( seqs [ s ] ) > 1 ]
seqs_in_c = defaultdict ( float )
for c in list_c . values ( ) :
for l in c . loci2seq : # total = sum ( [ v for v in loci _ obj [ l ] . coverage . values ( ) ] )
for s in c . loci2seq [ l ] :
if s in common :
pos = seq_obj [ s ] . pos [ l ]
# cov = 1.0 * loci _ obj [ l ] . coverage [ pos ] / total
cov = 1.0 * loci_obj [ l ] . coverage [ pos ]
if seqs_in_c [ ( s , c . id ) ] < cov :
seqs_in_c [ ( s , c . id ) ] = cov
seqs_in_c = _transform ( seqs_in_c )
return seqs_in_c
|
def run ( self ) :
"""Run the configured method and write the HTTP response status and text
to the output stream ."""
|
region = AWSServiceRegion ( access_key = self . key , secret_key = self . secret , uri = self . endpoint )
query = self . query_factory ( action = self . action , creds = region . creds , endpoint = region . ec2_endpoint , other_params = self . parameters )
def write_response ( response ) :
print >> self . output , "URL: %s" % query . client . url
print >> self . output
print >> self . output , "HTTP status code: %s" % query . client . status
print >> self . output
print >> self . output , response
def write_error ( failure ) :
if failure . check ( AWSError ) :
message = failure . value . original
else :
message = failure . getErrorMessage ( )
if message . startswith ( "Error Message: " ) :
message = message [ len ( "Error Message: " ) : ]
print >> self . output , "URL: %s" % query . client . url
print >> self . output
if getattr ( query . client , "status" , None ) is not None :
print >> self . output , "HTTP status code: %s" % ( query . client . status , )
print >> self . output
print >> self . output , message
if getattr ( failure . value , "response" , None ) is not None :
print >> self . output
print >> self . output , failure . value . response
deferred = query . submit ( )
deferred . addCallback ( write_response )
deferred . addErrback ( write_error )
return deferred
|
def to_bytes ( value ) :
"""Get a byte array representing the value"""
|
if isinstance ( value , unicode ) :
return value . encode ( 'utf8' )
elif not isinstance ( value , str ) :
return str ( value )
return value
|
def publish_date ( self , publish_date ) :
"""Set Report publish date"""
|
self . _group_data [ 'publishDate' ] = self . _utils . format_datetime ( publish_date , date_format = '%Y-%m-%dT%H:%M:%SZ' )
|
def restart ( self ) :
"""Restart all the processes"""
|
Global . LOGGER . info ( "restarting the flow manager" )
self . _stop_actions ( )
# stop the old actions
self . actions = [ ]
# clear the action list
self . _start_actions ( )
# start the configured actions
Global . LOGGER . debug ( "flow manager restarted" )
|
def post ( self ) :
"""Create a new config item"""
|
self . reqparse . add_argument ( 'namespacePrefix' , type = str , required = True )
self . reqparse . add_argument ( 'description' , type = str , required = True )
self . reqparse . add_argument ( 'key' , type = str , required = True )
self . reqparse . add_argument ( 'value' , required = True )
self . reqparse . add_argument ( 'type' , type = str , required = True )
args = self . reqparse . parse_args ( )
if not self . dbconfig . namespace_exists ( args [ 'namespacePrefix' ] ) :
return self . make_response ( 'The namespace doesnt exist' , HTTP . NOT_FOUND )
if self . dbconfig . key_exists ( args [ 'namespacePrefix' ] , args [ 'key' ] ) :
return self . make_response ( 'This config item already exists' , HTTP . CONFLICT )
self . dbconfig . set ( args [ 'namespacePrefix' ] , args [ 'key' ] , _to_dbc_class ( args ) , description = args [ 'description' ] )
auditlog ( event = 'configItem.create' , actor = session [ 'user' ] . username , data = args )
return self . make_response ( 'Config item added' , HTTP . CREATED )
|
def dump_orm_object_as_insert_sql ( engine : Engine , obj : object , fileobj : TextIO ) -> None :
"""Takes a SQLAlchemy ORM object , and writes ` ` INSERT ` ` SQL to replicate it
to the output file - like object .
Args :
engine : SQLAlchemy : class : ` Engine `
obj : SQLAlchemy ORM object to write
fileobj : file - like object to write to"""
|
# literal _ query = make _ literal _ query _ fn ( engine . dialect )
insp = inspect ( obj )
# insp : an InstanceState
# http : / / docs . sqlalchemy . org / en / latest / orm / internals . html # sqlalchemy . orm . state . InstanceState # noqa
# insp . mapper : a Mapper
# http : / / docs . sqlalchemy . org / en / latest / orm / mapping _ api . html # sqlalchemy . orm . mapper . Mapper # noqa
# Don ' t do this :
# table = insp . mapper . mapped _ table
# Do this instead . The method above gives you fancy data types like list
# and Arrow on the Python side . We want the bog - standard datatypes drawn
# from the database itself .
meta = MetaData ( bind = engine )
table_name = insp . mapper . mapped_table . name
# log . debug ( " table _ name : { } " , table _ name )
table = Table ( table_name , meta , autoload = True )
# log . debug ( " table : { } " , table )
# NewRecord = quick _ mapper ( table )
# columns = table . columns . keys ( )
query = select ( table . columns )
# log . debug ( " query : { } " , query )
for orm_pkcol in insp . mapper . primary_key :
core_pkcol = table . columns . get ( orm_pkcol . name )
pkval = getattr ( obj , orm_pkcol . name )
query = query . where ( core_pkcol == pkval )
# log . debug ( " query : { } " , query )
cursor = engine . execute ( query )
row = cursor . fetchone ( )
# should only be one . . .
row_dict = dict ( row )
# log . debug ( " obj : { } " , obj )
# log . debug ( " row _ dict : { } " , row _ dict )
statement = table . insert ( values = row_dict )
# insert _ str = literal _ query ( statement )
insert_str = get_literal_query ( statement , bind = engine )
writeline_nl ( fileobj , insert_str )
|
def expanduser ( path ) :
"""Expand ~ and ~ user constructs .
If user or $ HOME is unknown , do nothing ."""
|
if path [ : 1 ] != '~' :
return path
i , n = 1 , len ( path )
while i < n and path [ i ] not in '/\\' :
i = i + 1
if 'HOME' in os . environ :
userhome = os . environ [ 'HOME' ]
elif 'USERPROFILE' in os . environ :
userhome = os . environ [ 'USERPROFILE' ]
elif not 'HOMEPATH' in os . environ :
return path
else :
try :
drive = os . environ [ 'HOMEDRIVE' ]
except KeyError :
drive = ''
userhome = join ( drive , os . environ [ 'HOMEPATH' ] )
if i != 1 : # ~ user
userhome = join ( dirname ( userhome ) , path [ 1 : i ] )
return userhome + path [ i : ]
|
def click ( self , x , y , button , press ) :
'''Print Fibonacci numbers when the left click is pressed .'''
|
if button == 1 :
if press :
print ( self . fibo . next ( ) )
else : # Exit if any other mouse button used
self . stop ( )
|
def barycenter ( A , M , weights = None , verbose = False , log = False , solver = 'interior-point' ) :
"""Compute the Wasserstein barycenter of distributions A
The function solves the following optimization problem [ 16 ] :
. . math : :
\mathbf{a} = arg\min_\mathbf{a} \sum_i W_{1}(\mathbf{a},\mathbf{a}_i)
where :
- : math : ` W _ 1 ( \ cdot , \ cdot ) ` is the Wasserstein distance ( see ot . emd . sinkhorn )
- : math : ` \ mathbf { a } _ i ` are training distributions in the columns of matrix : math : ` \ mathbf { A } `
The linear program is solved using the interior point solver from scipy . optimize .
If cvxopt solver if installed it can use cvxopt
Note that this problem do not scale well ( both in memory and computational time ) .
Parameters
A : np . ndarray ( d , n )
n training distributions a _ i of size d
M : np . ndarray ( d , d )
loss matrix for OT
reg : float
Regularization term > 0
weights : np . ndarray ( n , )
Weights of each histogram a _ i on the simplex ( barycentric coodinates )
verbose : bool , optional
Print information along iterations
log : bool , optional
record log if True
solver : string , optional
the solver used , default ' interior - point ' use the lp solver from
scipy . optimize . None , or ' glpk ' or ' mosek ' use the solver from cvxopt .
Returns
a : ( d , ) ndarray
Wasserstein barycenter
log : dict
log dictionary return only if log = = True in parameters
References
. . [ 16 ] Agueh , M . , & Carlier , G . ( 2011 ) . Barycenters in the Wasserstein space . SIAM Journal on Mathematical Analysis , 43(2 ) , 904-924."""
|
if weights is None :
weights = np . ones ( A . shape [ 1 ] ) / A . shape [ 1 ]
else :
assert ( len ( weights ) == A . shape [ 1 ] )
n_distributions = A . shape [ 1 ]
n = A . shape [ 0 ]
n2 = n * n
c = np . zeros ( ( 0 ) )
b_eq1 = np . zeros ( ( 0 ) )
for i in range ( n_distributions ) :
c = np . concatenate ( ( c , M . ravel ( ) * weights [ i ] ) )
b_eq1 = np . concatenate ( ( b_eq1 , A [ : , i ] ) )
c = np . concatenate ( ( c , np . zeros ( n ) ) )
lst_idiag1 = [ sps . kron ( sps . eye ( n ) , np . ones ( ( 1 , n ) ) ) for i in range ( n_distributions ) ]
# row constraints
A_eq1 = sps . hstack ( ( sps . block_diag ( lst_idiag1 ) , sps . coo_matrix ( ( n_distributions * n , n ) ) ) )
# columns constraints
lst_idiag2 = [ ]
lst_eye = [ ]
for i in range ( n_distributions ) :
if i == 0 :
lst_idiag2 . append ( sps . kron ( np . ones ( ( 1 , n ) ) , sps . eye ( n ) ) )
lst_eye . append ( - sps . eye ( n ) )
else :
lst_idiag2 . append ( sps . kron ( np . ones ( ( 1 , n ) ) , sps . eye ( n - 1 , n ) ) )
lst_eye . append ( - sps . eye ( n - 1 , n ) )
A_eq2 = sps . hstack ( ( sps . block_diag ( lst_idiag2 ) , sps . vstack ( lst_eye ) ) )
b_eq2 = np . zeros ( ( A_eq2 . shape [ 0 ] ) )
# full problem
A_eq = sps . vstack ( ( A_eq1 , A_eq2 ) )
b_eq = np . concatenate ( ( b_eq1 , b_eq2 ) )
if not cvxopt or solver in [ 'interior-point' ] : # cvxopt not installed or interior point
if solver is None :
solver = 'interior-point'
options = { 'sparse' : True , 'disp' : verbose }
sol = sp . optimize . linprog ( c , A_eq = A_eq , b_eq = b_eq , method = solver , options = options )
x = sol . x
b = x [ - n : ]
else :
h = np . zeros ( ( n_distributions * n2 + n ) )
G = - sps . eye ( n_distributions * n2 + n )
sol = solvers . lp ( matrix ( c ) , scipy_sparse_to_spmatrix ( G ) , matrix ( h ) , A = scipy_sparse_to_spmatrix ( A_eq ) , b = matrix ( b_eq ) , solver = solver )
x = np . array ( sol [ 'x' ] )
b = x [ - n : ] . ravel ( )
if log :
return b , sol
else :
return b
|
def traceroute6 ( target , dport = 80 , minttl = 1 , maxttl = 30 , sport = RandShort ( ) , l4 = None , timeout = 2 , verbose = None , ** kargs ) :
"""Instant TCP traceroute using IPv6 :
traceroute6 ( target , [ maxttl = 30 ] , [ dport = 80 ] , [ sport = 80 ] ) - > None"""
|
if verbose is None :
verbose = conf . verb
if l4 is None :
a , b = sr ( IPv6 ( dst = target , hlim = ( minttl , maxttl ) ) / TCP ( seq = RandInt ( ) , sport = sport , dport = dport ) , timeout = timeout , filter = "icmp6 or tcp" , verbose = verbose , ** kargs )
else :
a , b = sr ( IPv6 ( dst = target , hlim = ( minttl , maxttl ) ) / l4 , timeout = timeout , verbose = verbose , ** kargs )
a = TracerouteResult6 ( a . res )
if verbose :
a . display ( )
return a , b
|
def load ( self , value ) :
"""enforce env > value when loading from file"""
|
self . reset ( value , validator = self . __dict__ . get ( 'validator' ) , env = self . __dict__ . get ( 'env' ) , )
|
def run ( self , * , # Force keyword args .
program : Union [ circuits . Circuit , Schedule ] , job_config : Optional [ JobConfig ] = None , param_resolver : ParamResolver = ParamResolver ( { } ) , repetitions : int = 1 , priority : int = 50 , processor_ids : Sequence [ str ] = ( 'xmonsim' , ) ) -> TrialResult :
"""Runs the supplied Circuit or Schedule via Quantum Engine .
Args :
program : The Circuit or Schedule to execute . If a circuit is
provided , a moment by moment schedule will be used .
job _ config : Configures the names of programs and jobs .
param _ resolver : Parameters to run with the program .
repetitions : The number of repetitions to simulate .
priority : The priority to run at , 0-100.
processor _ ids : The engine processors to run against .
Returns :
A single TrialResult for this run ."""
|
return list ( self . run_sweep ( program = program , job_config = job_config , params = [ param_resolver ] , repetitions = repetitions , priority = priority , processor_ids = processor_ids ) ) [ 0 ]
|
def _from_java ( cls , java_stage ) :
"""Given a Java OneVsRestModel , create and return a Python wrapper of it .
Used for ML persistence ."""
|
featuresCol = java_stage . getFeaturesCol ( )
labelCol = java_stage . getLabelCol ( )
predictionCol = java_stage . getPredictionCol ( )
classifier = JavaParams . _from_java ( java_stage . getClassifier ( ) )
models = [ JavaParams . _from_java ( model ) for model in java_stage . models ( ) ]
py_stage = cls ( models = models ) . setPredictionCol ( predictionCol ) . setLabelCol ( labelCol ) . setFeaturesCol ( featuresCol ) . setClassifier ( classifier )
py_stage . _resetUid ( java_stage . uid ( ) )
return py_stage
|
def rule ( self ) :
"""The ( partial ) url rule for this route ."""
|
if self . _rule :
return self . _rule
return self . _make_rule ( member_param = self . _member_param , unique_member_param = self . _unique_member_param )
|
def make_path ( * path_or_str_or_segments ) :
""": param path _ or _ str _ or _ segments :
: return :
: rtype : cifparser . path . Path"""
|
if len ( path_or_str_or_segments ) == 0 :
return ROOT_PATH
elif len ( path_or_str_or_segments ) == 1 :
single_item = path_or_str_or_segments [ 0 ]
if isinstance ( single_item , Path ) :
return single_item
if isinstance ( single_item , str ) :
try :
return path_parser . parseString ( single_item , True ) . asList ( ) [ 0 ]
except :
raise ValueError ( )
raise TypeError ( )
else :
segments = path_or_str_or_segments
return sum ( map ( lambda x : make_path ( x ) , segments ) , ROOT_PATH )
|
def get_full_xml_representation ( entity , private_key ) :
"""Get full XML representation of an entity .
This contains the < XML > < post > . . < / post > < / XML > wrapper .
Accepts either a Base entity or a Diaspora entity .
Author ` private _ key ` must be given so that certain entities can be signed ."""
|
from federation . entities . diaspora . mappers import get_outbound_entity
diaspora_entity = get_outbound_entity ( entity , private_key )
xml = diaspora_entity . to_xml ( )
return "<XML><post>%s</post></XML>" % etree . tostring ( xml ) . decode ( "utf-8" )
|
def atomic_open_for_write ( target , binary = False , newline = None , encoding = None ) :
"""Atomically open ` target ` for writing .
This is based on Lektor ' s ` atomic _ open ( ) ` utility , but simplified a lot
to handle only writing , and skip many multi - process / thread edge cases
handled by Werkzeug .
: param str target : Target filename to write
: param bool binary : Whether to open in binary mode , default False
: param str newline : The newline character to use when writing , determined from system if not supplied
: param str encoding : The encoding to use when writing , defaults to system encoding
How this works :
* Create a temp file ( in the same directory of the actual target ) , and
yield for surrounding code to write to it .
* If some thing goes wrong , try to remove the temp file . The actual target
is not touched whatsoever .
* If everything goes well , close the temp file , and replace the actual
target with this new file .
. . code : : python
> > > fn = " test _ file . txt "
> > > def read _ test _ file ( filename = fn ) :
with open ( filename , ' r ' ) as fh :
print ( fh . read ( ) . strip ( ) )
> > > with open ( fn , " w " ) as fh :
fh . write ( " this is some test text " )
> > > read _ test _ file ( )
this is some test text
> > > def raise _ exception _ while _ writing ( filename ) :
with open ( filename , " w " ) as fh :
fh . write ( " writing some new text " )
raise RuntimeError ( " Uh oh , hope your file didn ' t get overwritten " )
> > > raise _ exception _ while _ writing ( fn )
Traceback ( most recent call last ) :
RuntimeError : Uh oh , hope your file didn ' t get overwritten
> > > read _ test _ file ( )
writing some new text
# Now try with vistir
> > > def raise _ exception _ while _ writing ( filename ) :
with vistir . contextmanagers . atomic _ open _ for _ write ( filename ) as fh :
fh . write ( " Overwriting all the text from before with even newer text " )
raise RuntimeError ( " But did it get overwritten now ? " )
> > > raise _ exception _ while _ writing ( fn )
Traceback ( most recent call last ) :
RuntimeError : But did it get overwritten now ?
> > > read _ test _ file ( )
writing some new text"""
|
mode = "w+b" if binary else "w"
f = NamedTemporaryFile ( dir = os . path . dirname ( target ) , prefix = ".__atomic-write" , mode = mode , encoding = encoding , newline = newline , delete = False , )
# set permissions to 0644
os . chmod ( f . name , stat . S_IWUSR | stat . S_IRUSR | stat . S_IRGRP | stat . S_IROTH )
try :
yield f
except BaseException :
f . close ( )
try :
os . remove ( f . name )
except OSError :
pass
raise
else :
f . close ( )
try :
os . remove ( target )
# This is needed on Windows .
except OSError :
pass
os . rename ( f . name , target )
|
def is_sw_writable ( self ) :
"""Field is writable by software"""
|
sw = self . get_property ( 'sw' )
return sw in ( rdltypes . AccessType . rw , rdltypes . AccessType . rw1 , rdltypes . AccessType . w , rdltypes . AccessType . w1 )
|
def match ( self , subject : Union [ Expression , FlatTerm ] ) -> Iterator [ Tuple [ T , Substitution ] ] :
"""Match the given subject against all patterns in the net .
Args :
subject :
The subject that is matched . Must be constant .
Yields :
A tuple : code : ` ( final label , substitution ) ` , where the first component is the final label associated with
the pattern as given when using : meth : ` add ( ) ` and the second one is the match substitution ."""
|
for index in self . _match ( subject ) :
pattern , label = self . _patterns [ index ]
subst = Substitution ( )
if subst . extract_substitution ( subject , pattern . expression ) :
for constraint in pattern . constraints :
if not constraint ( subst ) :
break
else :
yield label , subst
|
def store ( self , name = None ) :
"""Get a cache store instance by name .
: param name : The cache store name
: type name : str
: rtype : Repository"""
|
if name is None :
name = self . get_default_driver ( )
self . _stores [ name ] = self . _get ( name )
return self . _stores [ name ]
|
def reduction ( input_type , output_type ) :
"""Define a user - defined reduction function that takes N pandas Series
or scalar values as inputs and produces one row of output .
Parameters
input _ type : List [ ibis . expr . datatypes . DataType ]
A list of the types found in : mod : ` ~ ibis . expr . datatypes ` . The
length of this list must match the number of arguments to the
function . Variadic arguments are not yet supported .
output _ type : ibis . expr . datatypes . DataType
The return type of the function .
Examples
> > > import ibis
> > > import ibis . expr . datatypes as dt
> > > from ibis . pandas . udf import udf
> > > @ udf . reduction ( input _ type = [ dt . string ] , output _ type = dt . int64)
. . . def my _ string _ length _ agg ( series , * * kwargs ) :
. . . return ( series . str . len ( ) * 2 ) . sum ( )"""
|
return udf . _grouped ( input_type , output_type , base_class = ops . Reduction , output_type_method = operator . attrgetter ( 'scalar_type' ) , )
|
def search ( self , read_cache = True , ** kwparams ) :
"""Returns records corresponding to the given search query .
See docstring of invenio . legacy . search _ engine . perform _ request _ search ( )
for an overview of available parameters .
@ raise InvenioConnectorAuthError : if authentication fails"""
|
parse_results = False
of = kwparams . get ( 'of' , "" )
if of == "" :
parse_results = True
of = "xm"
kwparams [ 'of' ] = of
params = urllib . urlencode ( kwparams , doseq = 1 )
# Are we running locally ? If so , better directly access the
# search engine directly
if self . local and of != 't' : # See if user tries to search any restricted collection
c = kwparams . get ( 'c' , "" )
if c != "" :
if type ( c ) is list :
colls = c
else :
colls = [ c ]
for collection in colls :
if collection_restricted_p ( collection ) :
if self . user :
self . _check_credentials ( )
continue
raise InvenioConnectorAuthError ( "You are trying to search a restricted collection. Please authenticate yourself.\n" )
kwparams [ 'of' ] = 'id'
results = perform_request_search ( ** kwparams )
if of . lower ( ) != 'id' :
results = format_records ( results , of )
else :
if params + str ( parse_results ) not in self . cached_queries or not read_cache :
if self . user :
results = self . browser . open ( self . server_url + "/search?" + params )
else :
results = urllib2 . urlopen ( self . server_url + "/search?" + params )
if 'youraccount/login' in results . geturl ( ) : # Current user not able to search collection
raise InvenioConnectorAuthError ( "You are trying to search a restricted collection. Please authenticate yourself.\n" )
else :
return self . cached_queries [ params + str ( parse_results ) ]
if parse_results : # FIXME : we should not try to parse if results is string
parsed_records = self . _parse_results ( results , self . cached_records )
self . cached_queries [ params + str ( parse_results ) ] = parsed_records
return parsed_records
else : # pylint : disable = E1103
# The whole point of the following code is to make sure we can
# handle two types of variable .
try :
res = results . read ( )
except AttributeError :
res = results
# pylint : enable = E1103
if of == "id" :
try :
if type ( res ) is str : # Transform to list
res = [ int ( recid . strip ( ) ) for recid in res . strip ( "[]" ) . split ( "," ) if recid . strip ( ) != "" ]
res . reverse ( )
except ( ValueError , AttributeError ) :
res = [ ]
self . cached_queries [ params + str ( parse_results ) ] = res
return self . cached_queries [ params + str ( parse_results ) ]
|
def MIS ( G , weights , maxiter = None ) :
"""Compute a maximal independent set of a graph in parallel .
Parameters
G : csr _ matrix
Matrix graph , G [ i , j ] ! = 0 indicates an edge
weights : ndarray
Array of weights for each vertex in the graph G
maxiter : int
Maximum number of iterations ( default : None )
Returns
mis : array
Array of length of G of zeros / ones indicating the independent set
Examples
> > > from pyamg . gallery import poisson
> > > from pyamg . classical import MIS
> > > import numpy as np
> > > G = poisson ( ( 7 , ) , format = ' csr ' ) # 1D mesh with 7 vertices
> > > w = np . ones ( ( G . shape [ 0 ] , 1 ) ) . ravel ( )
> > > mis = MIS ( G , w )
See Also
fn = amg _ core . maximal _ independent _ set _ parallel"""
|
if not isspmatrix_csr ( G ) :
raise TypeError ( 'expected csr_matrix' )
G = remove_diagonal ( G )
mis = np . empty ( G . shape [ 0 ] , dtype = 'intc' )
mis [ : ] = - 1
fn = amg_core . maximal_independent_set_parallel
if maxiter is None :
fn ( G . shape [ 0 ] , G . indptr , G . indices , - 1 , 1 , 0 , mis , weights , - 1 )
else :
if maxiter < 0 :
raise ValueError ( 'maxiter must be >= 0' )
fn ( G . shape [ 0 ] , G . indptr , G . indices , - 1 , 1 , 0 , mis , weights , maxiter )
return mis
|
def sendMessage ( self , data ) :
"""Send websocket data frame to the client .
If data is a unicode object then the frame is sent as Text .
If the data is a bytearray object then the frame is sent as Binary ."""
|
opcode = BINARY
if isinstance ( data , unicode ) :
opcode = TEXT
self . _sendMessage ( False , opcode , data )
|
def retrieve ( self , request , _id ) :
"""Returns the document containing the given _ id or 404"""
|
_id = deserialize ( _id )
retrieved = self . collection . find_one ( { '_id' : _id } )
if retrieved :
return Response ( serialize ( retrieved ) )
else :
return Response ( response = serialize ( DocumentNotFoundError ( self . collection . __name__ , _id ) ) , status = 400 )
|
def simple_periodic_send ( bus ) :
"""Sends a message every 20ms with no explicit timeout
Sleeps for 2 seconds then stops the task ."""
|
print ( "Starting to send a message every 200ms for 2s" )
msg = can . Message ( arbitration_id = 0x123 , data = [ 1 , 2 , 3 , 4 , 5 , 6 ] , is_extended_id = False )
task = bus . send_periodic ( msg , 0.20 )
assert isinstance ( task , can . CyclicSendTaskABC )
time . sleep ( 2 )
task . stop ( )
print ( "stopped cyclic send" )
|
def read ( self ) -> None :
"""Call method | NetCDFFile . read | of all handled | NetCDFFile | objects ."""
|
for folder in self . folders . values ( ) :
for file_ in folder . values ( ) :
file_ . read ( )
|
def from_locus_read ( cls , locus_read , n_ref ) :
"""Given a single LocusRead object , return either an AlleleRead or None
Parameters
locus _ read : LocusRead
Read which overlaps a variant locus but doesn ' t necessarily contain the
alternate nucleotides
n _ ref : int
Number of reference positions we are expecting to be modified or
deleted ( for insertions this should be 0)"""
|
sequence = locus_read . sequence
reference_positions = locus_read . reference_positions
# positions of the nucleotides before and after the variant within
# the read sequence
read_pos_before = locus_read . base0_read_position_before_variant
read_pos_after = locus_read . base0_read_position_after_variant
# positions of the nucleotides before and after the variant on the
# reference genome
ref_pos_before = reference_positions [ read_pos_before ]
if ref_pos_before is None :
logger . warn ( "Missing reference pos for nucleotide before variant on read: %s" , locus_read )
return None
ref_pos_after = reference_positions [ read_pos_after ]
if ref_pos_after is None :
logger . warn ( "Missing reference pos for nucleotide after variant on read: %s" , locus_read )
return None
if n_ref == 0 :
if ref_pos_after - ref_pos_before != 1 : # if the number of nucleotides skipped isn ' t the same
# as the number of reference nucleotides in the variant then
# don ' t use this read
logger . debug ( "Positions before (%d) and after (%d) variant should be adjacent on read %s" , ref_pos_before , ref_pos_after , locus_read )
return None
# insertions require a sequence of non - aligned bases
# followed by the subsequence reference position
ref_positions_for_inserted = reference_positions [ read_pos_before + 1 : read_pos_after ]
if any ( insert_pos is not None for insert_pos in ref_positions_for_inserted ) : # all these inserted nucleotides should * not * align to the
# reference
logger . debug ( "Skipping read, inserted nucleotides shouldn't map to reference" )
return None
else : # substitutions and deletions
if ref_pos_after - ref_pos_before != n_ref + 1 : # if the number of nucleotides skipped isn ' t the same
# as the number of reference nucleotides in the variant then
# don ' t use this read
logger . debug ( ( "Positions before (%d) and after (%d) variant should be " "adjacent on read %s" ) , ref_pos_before , ref_pos_after , locus_read )
return None
nucleotides_at_variant_locus = sequence [ read_pos_before + 1 : read_pos_after ]
prefix = sequence [ : read_pos_before + 1 ]
suffix = sequence [ read_pos_after : ]
prefix , suffix = convert_from_bytes_if_necessary ( prefix , suffix )
prefix , suffix = trim_N_nucleotides ( prefix , suffix )
return cls ( prefix , nucleotides_at_variant_locus , suffix , name = locus_read . name )
|
def plot ( self , skip_start : int = 10 , skip_end : int = 5 , suggestion : bool = False , return_fig : bool = None , ** kwargs ) -> Optional [ plt . Figure ] :
"Plot learning rate and losses , trimmed between ` skip _ start ` and ` skip _ end ` . Optionally plot and return min gradient"
|
lrs = self . _split_list ( self . lrs , skip_start , skip_end )
losses = self . _split_list ( self . losses , skip_start , skip_end )
losses = [ x . item ( ) for x in losses ]
if 'k' in kwargs :
losses = self . smoothen_by_spline ( lrs , losses , ** kwargs )
fig , ax = plt . subplots ( 1 , 1 )
ax . plot ( lrs , losses )
ax . set_ylabel ( "Loss" )
ax . set_xlabel ( "Learning Rate" )
ax . set_xscale ( 'log' )
ax . xaxis . set_major_formatter ( plt . FormatStrFormatter ( '%.0e' ) )
if suggestion :
try :
mg = ( np . gradient ( np . array ( losses ) ) ) . argmin ( )
except :
print ( "Failed to compute the gradients, there might not be enough points." )
return
print ( f"Min numerical gradient: {lrs[mg]:.2E}" )
ax . plot ( lrs [ mg ] , losses [ mg ] , markersize = 10 , marker = 'o' , color = 'red' )
self . min_grad_lr = lrs [ mg ]
if ifnone ( return_fig , defaults . return_fig ) :
return fig
if not IN_NOTEBOOK :
plot_sixel ( fig )
|
def require ( * requirements , ** kwargs ) :
"""Decorator that can be used to require requirements .
: param requirements : List of requirements that should be verified
: param none _ on _ failure : If true , does not raise a PrerequisiteFailedError , but instead returns None"""
|
# TODO : require ( * requirements , none _ on _ failure = False ) is not supported by Python 2
none_on_failure = kwargs . get ( 'none_on_failure' , False )
def inner ( f ) :
@ functools . wraps ( f )
def wrapper ( * args , ** kwargs ) :
for req in requirements :
if none_on_failure :
if not getattr ( req , 'is_available' ) :
return None
else :
getattr ( req , 'require' ) ( )
return f ( * args , ** kwargs )
return wrapper
return inner
|
def netconf_config_change_changed_by_server_or_user_server_server ( self , ** kwargs ) :
"""Auto Generated Code"""
|
config = ET . Element ( "config" )
netconf_config_change = ET . SubElement ( config , "netconf-config-change" , xmlns = "urn:ietf:params:xml:ns:yang:ietf-netconf-notifications" )
changed_by = ET . SubElement ( netconf_config_change , "changed-by" )
server_or_user = ET . SubElement ( changed_by , "server-or-user" )
server = ET . SubElement ( server_or_user , "server" )
server = ET . SubElement ( server , "server" )
callback = kwargs . pop ( 'callback' , self . _callback )
return callback ( config )
|
def dataset_upload_file ( self , path , quiet ) :
"""upload a dataset file
Parameters
path : the complete path to upload
quiet : suppress verbose output ( default is False )"""
|
file_name = os . path . basename ( path )
content_length = os . path . getsize ( path )
last_modified_date_utc = int ( os . path . getmtime ( path ) )
result = FileUploadInfo ( self . process_response ( self . datasets_upload_file_with_http_info ( file_name , content_length , last_modified_date_utc ) ) )
success = self . upload_complete ( path , result . createUrl , quiet )
if success :
return result . token
return None
|
def get_array ( self ) :
"""Returns an numpy array containing the values from start ( inclusive )
to stop ( exclusive ) in step steps .
Returns
array : ndarray
Array of values from start ( inclusive )
to stop ( exclusive ) in step steps ."""
|
s = self . slice
array = _np . arange ( s . start , s . stop , s . step )
return array
|
def process_waypoint_request ( self , m , master ) :
'''process a waypoint request from the master'''
|
if ( not self . loading_waypoints or time . time ( ) > self . loading_waypoint_lasttime + 10.0 ) :
self . loading_waypoints = False
self . console . error ( "not loading waypoints" )
return
if m . seq >= self . wploader . count ( ) :
self . console . error ( "Request for bad waypoint %u (max %u)" % ( m . seq , self . wploader . count ( ) ) )
return
wp = self . wploader . wp ( m . seq )
wp . target_system = self . target_system
wp . target_component = self . target_component
self . master . mav . send ( self . wploader . wp ( m . seq ) )
self . loading_waypoint_lasttime = time . time ( )
self . console . writeln ( "Sent waypoint %u : %s" % ( m . seq , self . wploader . wp ( m . seq ) ) )
if m . seq == self . wploader . count ( ) - 1 :
self . loading_waypoints = False
self . console . writeln ( "Sent all %u waypoints" % self . wploader . count ( ) )
|
def options ( self , section ) :
"""Return a list of option names for the given section name ."""
|
try :
opts = self . _sections [ section ] . copy ( )
except KeyError :
raise from_none ( NoSectionError ( section ) )
opts . update ( self . _defaults )
return list ( opts . keys ( ) )
|
def debug_consec_list ( list_ ) :
"""Returns :
tuple of ( missing _ items , missing _ indices , duplicate _ items )"""
|
if not issorted ( list_ ) :
print ( 'warning list is not sorted. indices will not match' )
sortedlist = sorted ( list_ )
start = sortedlist [ 0 ]
last = start - 1
missing_vals = [ ]
missing_indices = [ ]
duplicate_items = [ ]
for count , item in enumerate ( sortedlist ) :
diff = item - last
if diff > 1 :
missing_indices . append ( count )
for miss in range ( last + 1 , last + diff ) :
missing_vals . append ( miss )
elif diff == 0 :
duplicate_items . append ( item )
elif diff == 1 : # Expected case
pass
else :
raise AssertionError ( 'We sorted the list. diff can not be negative' )
last = item
return missing_vals , missing_indices , duplicate_items
|
def get_files_with_extensions ( folder , extensions ) :
"""walk dir and return . * files as a list
Note : directories are walked recursively"""
|
out = [ ]
for root , dirs , files in os . walk ( folder ) :
for file in files :
filename , file_extension = os . path . splitext ( file )
if file_extension . replace ( "." , "" ) in extensions :
out += [ os . path . join ( root , file ) ]
# break
return out
|
def push_repository ( self , repository , docker_executable = 'docker' , shutit_pexpect_child = None , expect = None , note = None , loglevel = logging . INFO ) :
"""Pushes the repository .
@ param repository : Repository to push .
@ param docker _ executable : Defaults to ' docker '
@ param expect : See send ( )
@ param shutit _ pexpect _ child : See send ( )
@ type repository : string
@ type docker _ executable : string"""
|
shutit_global . shutit_global_object . yield_to_draw ( )
self . handle_note ( note )
shutit_pexpect_child = shutit_pexpect_child or self . get_shutit_pexpect_session_from_id ( 'host_child' ) . pexpect_child
expect = expect or self . expect_prompts [ 'ORIGIN_ENV' ]
send = docker_executable + ' push ' + self . repository [ 'user' ] + '/' + repository
timeout = 99999
self . log ( 'Running: ' + send , level = logging . INFO )
self . multisend ( docker_executable + ' login' , { 'Username' : self . repository [ 'user' ] , 'Password' : self . repository [ 'password' ] , 'Email' : self . repository [ 'email' ] } , shutit_pexpect_child = shutit_pexpect_child , expect = expect )
self . send ( send , shutit_pexpect_child = shutit_pexpect_child , expect = expect , timeout = timeout , check_exit = False , fail_on_empty_before = False , loglevel = loglevel )
self . handle_note_after ( note )
return True
|
def _multiline_convert ( config , start = "banner login" , end = "EOF" , depth = 1 ) :
"""Converts running - config HEREDOC into EAPI JSON dict"""
|
ret = list ( config )
# Don ' t modify list in - place
try :
s = ret . index ( start )
e = s
while depth :
e = ret . index ( end , e + 1 )
depth = depth - 1
except ValueError : # Couldn ' t find end , abort
return ret
ret [ s ] = { "cmd" : ret [ s ] , "input" : "\n" . join ( ret [ s + 1 : e ] ) }
del ret [ s + 1 : e + 1 ]
return ret
|
def setup_recovery ( working_dir ) :
"""Set up the recovery metadata so we can fully recover secondary state ,
like subdomains ."""
|
db = get_db_state ( working_dir )
bitcoind_session = get_bitcoind ( new = True )
assert bitcoind_session is not None
_ , current_block = virtualchain . get_index_range ( 'bitcoin' , bitcoind_session , virtualchain_hooks , working_dir )
assert current_block , 'Failed to connect to bitcoind'
set_recovery_range ( working_dir , db . lastblock , current_block - NUM_CONFIRMATIONS )
return True
|
def get_version ( self , as_tuple = False ) :
"""Returns uWSGI version string or tuple .
: param bool as _ tuple :
: rtype : str | tuple"""
|
if as_tuple :
return uwsgi . version_info
return decode ( uwsgi . version )
|
def attributes_in_restriction ( self ) :
""": return : list of attributes that are probably used in the restriction .
The function errs on the side of false positives .
For example , if the restriction is " val = ' id ' " , then the attribute ' id ' would be flagged .
This is used internally for optimizing SQL statements ."""
|
return set ( name for name in self . heading . names if re . search ( r'\b' + name + r'\b' , self . where_clause ) )
|
def _process_response ( self , msg ) :
'''处理响应消息'''
|
status , headers , body = self . _parse_response ( msg )
rsp_cseq = int ( headers [ 'cseq' ] )
if self . _cseq_map [ rsp_cseq ] != 'GET_PARAMETER' :
PRINT ( self . _get_time_str ( ) + '\n' + msg )
if status == 302 :
self . location = headers [ 'location' ]
if status != 200 :
self . do_teardown ( )
if self . _cseq_map [ rsp_cseq ] == 'DESCRIBE' :
track_id_str = self . _parse_track_id ( body )
self . do_setup ( track_id_str )
elif self . _cseq_map [ rsp_cseq ] == 'SETUP' :
self . _session_id = headers [ 'session' ]
self . do_play ( CUR_RANGE , CUR_SCALE )
self . send_heart_beat_msg ( )
elif self . _cseq_map [ rsp_cseq ] == 'PLAY' :
self . playing = True
|
def get_namespace_by_keyword_pattern ( self , keyword : str , pattern : str ) -> Optional [ Namespace ] :
"""Get a namespace with a given keyword and pattern ."""
|
filt = and_ ( Namespace . keyword == keyword , Namespace . pattern == pattern )
return self . session . query ( Namespace ) . filter ( filt ) . one_or_none ( )
|
def is_choked_turbulent_l ( dP , P1 , Psat , FF , FL = None , FLP = None , FP = None ) :
r'''Calculates if a liquid flow in IEC 60534 calculations is critical or
not , for use in IEC 60534 liquid valve sizing calculations .
Either FL may be provided or FLP and FP , depending on the calculation
process .
. . math : :
\ Delta P > F _ L ^ 2 ( P _ 1 - F _ F P _ { sat } )
. . math : :
\ Delta P > = \ left ( \ frac { F _ { LP } } { F _ P } \ right ) ^ 2 ( P _ 1 - F _ F P _ { sat } )
Parameters
dP : float
Differential pressure across the valve , with reducer / expanders [ Pa ]
P1 : float
Pressure of the fluid before the valve and reducers / expanders [ Pa ]
Psat : float
Saturation pressure of the fluid at inlet temperature [ Pa ]
FF : float
Liquid critical pressure ratio factor [ - ]
FL : float , optional
Liquid pressure recovery factor of a control valve without attached fittings [ - ]
FLP : float , optional
Combined liquid pressure recovery factor with piping geometry factor ,
for a control valve with attached fittings [ - ]
FP : float , optional
Piping geometry factor [ - ]
Returns
choked : bool
Whether or not the flow is choked [ - ]
Examples
> > > is _ choked _ turbulent _ l ( 460.0 , 680.0 , 70.1 , 0.94 , 0.9)
False
> > > is _ choked _ turbulent _ l ( 460.0 , 680.0 , 70.1 , 0.94 , 0.6)
True
References
. . [ 1 ] IEC 60534-2-1 / ISA - 75.01.01-2007'''
|
if FLP and FP :
return dP >= ( FLP / FP ) ** 2 * ( P1 - FF * Psat )
elif FL :
return dP >= FL ** 2 * ( P1 - FF * Psat )
else :
raise Exception ( 'Either (FLP and FP) or FL is needed' )
|
def align_seqs ( found_seqs , sequence , locus , start_pos , missing , annotated , cutoff = 0.90 , verbose = False , verbosity = 0 ) :
"""align _ seqs - Aligns sequences with clustalo
: param found _ seqs : List of the reference sequences
: type found _ seqs : ` ` List ` `
: param sequence : The input consensus sequence .
: type sequence : SeqRecord
: param locus : The gene locus associated with the sequence .
: type locus : ` ` str ` `
: param annotated : dictonary of the annotated features
: type annotated : ` ` dict ` `
: param start _ pos : Where the reference sequence starts
: type start _ pos : ` ` int ` `
: param missing : List of the unmapped features
: type missing : ` ` List ` `
: param cutoff : The alignment cutoff
: type cutoff : ` ` float ` `
: param verbose : Flag for running in verbose mode .
: type verbose : ` ` bool ` `
: param verbosity : Numerical value to indicate how verbose the output will be in verbose mode .
: type verbosity : ` ` int ` `
: rtype : : ref : ` ann `"""
|
logger = logging . getLogger ( "Logger." + __name__ )
seqs = [ found_seqs , sequence ]
if verbose and verbosity > 0 :
logger . info ( "found_seqs length = " + str ( len ( found_seqs ) ) )
logger . info ( "sequence length = " + str ( len ( sequence ) ) )
seqs = [ ]
seqs . append ( found_seqs )
seqs . append ( sequence )
align = [ ]
# piping to clustalo failed
# when sequences were over ~ 7k bp
if len ( sequence ) > 7000 : # Writing sequences out to fasta files . .
if verbose :
logger . info ( "Sequence too large to use pipe" )
randid = randomid ( )
input_fasta = str ( randid ) + ".fasta"
output_clu = str ( randid ) + ".clu"
SeqIO . write ( seqs , input_fasta , "fasta" )
clustalomega_cline = ClustalOmegaCommandline ( infile = input_fasta , outfile = output_clu , outfmt = 'clu' , wrap = 20000 , verbose = True , auto = True )
stdout , stderr = clustalomega_cline ( )
aligns = AlignIO . read ( output_clu , "clustal" )
for aln in aligns :
align . append ( str ( aln . seq ) )
# Delete files
cleanup ( randid )
else : # Running clustalo by piping in sequences
indata = flatten ( [ [ ">" + str ( s . id ) , str ( s . seq ) ] for s in seqs ] )
child = Popen ( [ 'clustalo' , '--outfmt' , 'clu' , '--wrap=50000' , '--auto' , '-i' , '-' ] , stdout = PIPE , stderr = STDOUT , stdin = PIPE )
stdout = child . communicate ( input = str . encode ( "\n" . join ( indata ) ) )
child . wait ( )
lines = bytes . decode ( stdout [ 0 ] ) . split ( "\n" )
for line in lines :
if re . search ( "\w" , line ) and not re . search ( "CLUSTAL" , line ) :
alignment = re . findall ( r"[\S']+" , line )
if len ( alignment ) == 2 :
align . append ( list ( alignment [ 1 ] ) )
child . terminate ( )
# Print out what blocks haven ' t been annotated
if verbose and len ( align ) > 0 :
logger . info ( "* ClustalOmega alignment succeeded *" )
insers , dels = 0 , 0
all_features = [ ]
if len ( align ) - 2 == 0 :
infeats = get_seqfeat ( seqs [ 0 ] )
diffs = count_diffs ( align , infeats , sequence , locus , cutoff , verbose , verbosity )
if isinstance ( diffs , Annotation ) :
if verbose :
logger . info ( "Run alignment with " + found_seqs . id )
logger . info ( "***********************" )
return diffs , 0 , 0
else :
insers , dels = diffs [ 0 ] , diffs [ 1 ]
f = find_features ( infeats , align [ 0 ] , annotated , start_pos , cutoff )
all_features . append ( f )
else :
for i in range ( 0 , len ( align ) - 2 ) :
infeats = get_seqfeat ( seqs [ i ] )
f = find_features ( infeats , align [ i ] , annotated , start_pos , cutoff )
all_features . append ( f )
if len ( all_features ) > 0 :
if verbose :
logger . info ( "-- Resolving features -- " )
for f in all_features [ 0 ] :
logger . info ( "Resolving -> " + f )
annotation = resolve_feats ( all_features , align [ len ( align ) - 1 ] , align [ 0 ] , start_pos , locus , missing , verbose , verbosity )
if verbose :
logger . info ( "Run alignment with " + found_seqs . id )
logger . info ( "Missing features = " + "," . join ( list ( missing . keys ( ) ) ) )
logger . info ( "Number of features found = " + str ( len ( all_features ) ) )
logger . info ( "Features found = " + "," . join ( list ( all_features [ 0 ] . keys ( ) ) ) )
logger . info ( "Features annotated = " + "," . join ( list ( annotation . annotation . keys ( ) ) ) )
logger . info ( "***********************" )
return annotation , insers , dels
else :
if verbose :
logger . info ( "***********************" )
return Annotation ( complete_annotation = False ) , 0 , 0
|
def ParseAccountInformation ( self , parser_mediator , query , row , ** unused_kwargs ) :
"""Parses account information .
Args :
parser _ mediator ( ParserMediator ) : mediates interactions between parsers
and other components , such as storage and dfvfs .
query ( str ) : query that created the row .
row ( sqlite3 . Row ) : row with account information ."""
|
query_hash = hash ( query )
display_name = self . _GetRowValue ( query_hash , row , 'given_displayname' )
fullname = self . _GetRowValue ( query_hash , row , 'fullname' )
# TODO : Move this to the formatter , and ensure username is rendered
# properly when fullname and / or display _ name is None .
username = '{0!s} <{1!s}>' . format ( fullname , display_name )
event_data = SkypeAccountEventData ( )
event_data . country = self . _GetRowValue ( query_hash , row , 'country' )
event_data . display_name = display_name
event_data . email = self . _GetRowValue ( query_hash , row , 'emails' )
event_data . offset = self . _GetRowValue ( query_hash , row , 'id' )
event_data . query = query
event_data . username = username
timestamp = self . _GetRowValue ( query_hash , row , 'profile_timestamp' )
if timestamp :
date_time = dfdatetime_posix_time . PosixTime ( timestamp = timestamp )
event = time_events . DateTimeValuesEvent ( date_time , 'Profile Changed' )
parser_mediator . ProduceEventWithEventData ( event , event_data )
timestamp = self . _GetRowValue ( query_hash , row , 'authreq_timestamp' )
if timestamp :
date_time = dfdatetime_posix_time . PosixTime ( timestamp = timestamp )
event = time_events . DateTimeValuesEvent ( date_time , 'Authenticate Request' )
parser_mediator . ProduceEventWithEventData ( event , event_data )
timestamp = self . _GetRowValue ( query_hash , row , 'lastonline_timestamp' )
if timestamp :
date_time = dfdatetime_posix_time . PosixTime ( timestamp = timestamp )
event = time_events . DateTimeValuesEvent ( date_time , 'Last Online' )
parser_mediator . ProduceEventWithEventData ( event , event_data )
timestamp = self . _GetRowValue ( query_hash , row , 'mood_timestamp' )
if timestamp :
date_time = dfdatetime_posix_time . PosixTime ( timestamp = timestamp )
event = time_events . DateTimeValuesEvent ( date_time , 'Mood Event' )
parser_mediator . ProduceEventWithEventData ( event , event_data )
timestamp = self . _GetRowValue ( query_hash , row , 'sent_authrequest_time' )
if timestamp :
date_time = dfdatetime_posix_time . PosixTime ( timestamp = timestamp )
event = time_events . DateTimeValuesEvent ( date_time , 'Auth Request Sent' )
parser_mediator . ProduceEventWithEventData ( event , event_data )
timestamp = self . _GetRowValue ( query_hash , row , 'lastused_timestamp' )
if timestamp :
date_time = dfdatetime_posix_time . PosixTime ( timestamp = timestamp )
event = time_events . DateTimeValuesEvent ( date_time , 'Last Used' )
parser_mediator . ProduceEventWithEventData ( event , event_data )
|
def _config_win32_domain ( self , domain ) :
"""Configure a Domain registry entry ."""
|
# we call str ( ) on domain to convert it from unicode to ascii
self . domain = dns . name . from_text ( str ( domain ) )
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.