signature
stringlengths 29
44.1k
| implementation
stringlengths 0
85.2k
|
|---|---|
def __run ( self ) :
"""The main loop"""
|
already_cleaned = False
try :
while not self . _done_event . is_set ( ) :
try : # Wait for an action ( blocking )
task = self . _queue . get ( True , self . _timeout )
if task is self . _done_event : # Stop event in the queue : get out
self . _queue . task_done ( )
return
except queue . Empty : # Nothing to do yet
pass
else :
with self . __lock :
self . __nb_active_threads += 1
# Extract elements
method , args , kwargs , future = task
try : # Call the method
future . execute ( method , args , kwargs )
except Exception as ex :
self . _logger . exception ( "Error executing %s: %s" , method . __name__ , ex )
finally : # Mark the action as executed
self . _queue . task_done ( )
# Thread is not active anymore
with self . __lock :
self . __nb_pending_task -= 1
self . __nb_active_threads -= 1
# Clean up thread if necessary
with self . __lock :
extra_threads = self . __nb_threads - self . __nb_active_threads
if ( self . __nb_threads > self . _min_threads and extra_threads > self . _queue . qsize ( ) ) : # No more work for this thread
# if there are more non active _ thread than task
# and we ' re above the minimum number of threads :
# stop this one
self . __nb_threads -= 1
# To avoid a race condition : decrease the number of
# threads here and mark it as already accounted for
already_cleaned = True
return
finally : # Always clean up
with self . __lock : # Thread stops : clean up references
try :
self . _threads . remove ( threading . current_thread ( ) )
except ValueError :
pass
if not already_cleaned :
self . __nb_threads -= 1
|
def flush ( self ) :
"""Flushes all log files ."""
|
self . acquire ( )
try :
self . stream . flush ( )
except ( EnvironmentError , ValueError ) : # A ValueError is thrown if we try to flush a closed file .
pass
finally :
self . release ( )
|
def encoding ( self ) :
"""the character encoding of the request , usually only set in POST type requests"""
|
encoding = None
ct = self . get_header ( 'content-type' )
if ct :
ah = AcceptHeader ( ct )
if ah . media_types :
encoding = ah . media_types [ 0 ] [ 2 ] . get ( "charset" , None )
return encoding
|
def YieldFromList ( service , request , global_params = None , limit = None , batch_size = 100 , method = 'List' , field = 'items' , predicate = None , current_token_attribute = 'pageToken' , next_token_attribute = 'nextPageToken' , batch_size_attribute = 'maxResults' ) :
"""Make a series of List requests , keeping track of page tokens .
Args :
service : apitools _ base . BaseApiService , A service with a . List ( ) method .
request : protorpc . messages . Message , The request message
corresponding to the service ' s . List ( ) method , with all the
attributes populated except the . maxResults and . pageToken
attributes .
global _ params : protorpc . messages . Message , The global query parameters to
provide when calling the given method .
limit : int , The maximum number of records to yield . None if all available
records should be yielded .
batch _ size : int , The number of items to retrieve per request .
method : str , The name of the method used to fetch resources .
field : str , The field in the response that will be a list of items .
predicate : lambda , A function that returns true for items to be yielded .
current _ token _ attribute : str , The name of the attribute in a
request message holding the page token for the page being
requested .
next _ token _ attribute : str , The name of the attribute in a
response message holding the page token for the next page .
batch _ size _ attribute : str , The name of the attribute in a
response message holding the maximum number of results to be
returned . None if caller - specified batch size is unsupported .
Yields :
protorpc . message . Message , The resources listed by the service ."""
|
request = encoding . CopyProtoMessage ( request )
setattr ( request , current_token_attribute , None )
while limit is None or limit :
if batch_size_attribute : # On Py3 , None is not comparable so min ( ) below will fail .
# On Py2 , None is always less than any number so if batch _ size
# is None , the request _ batch _ size will always be None regardless
# of the value of limit . This doesn ' t generally strike me as the
# correct behavior , but this change preserves the existing Py2
# behavior on Py3.
if batch_size is None :
request_batch_size = None
else :
request_batch_size = min ( batch_size , limit or batch_size )
setattr ( request , batch_size_attribute , request_batch_size )
response = getattr ( service , method ) ( request , global_params = global_params )
items = getattr ( response , field )
if predicate :
items = list ( filter ( predicate , items ) )
for item in items :
yield item
if limit is None :
continue
limit -= 1
if not limit :
return
token = getattr ( response , next_token_attribute )
if not token :
return
setattr ( request , current_token_attribute , token )
|
def basis_function_one ( degree , knot_vector , span , knot ) :
"""Computes the value of a basis function for a single parameter .
Implementation of Algorithm 2.4 from The NURBS Book by Piegl & Tiller .
: param degree : degree , : math : ` p `
: type degree : int
: param knot _ vector : knot vector
: type knot _ vector : list , tuple
: param span : knot span , : math : ` i `
: type span : int
: param knot : knot or parameter , : math : ` u `
: type knot : float
: return : basis function , : math : ` N _ { i , p } `
: rtype : float"""
|
# Special case at boundaries
if ( span == 0 and knot == knot_vector [ 0 ] ) or ( span == len ( knot_vector ) - degree - 2 ) and knot == knot_vector [ len ( knot_vector ) - 1 ] :
return 1.0
# Knot is outside of span range
if knot < knot_vector [ span ] or knot >= knot_vector [ span + degree + 1 ] :
return 0.0
N = [ 0.0 for _ in range ( degree + span + 1 ) ]
# Initialize the zeroth degree basis functions
for j in range ( 0 , degree + 1 ) :
if knot_vector [ span + j ] <= knot < knot_vector [ span + j + 1 ] :
N [ j ] = 1.0
# Computing triangular table of basis functions
for k in range ( 1 , degree + 1 ) : # Detecting zeros saves computations
saved = 0.0
if N [ 0 ] != 0.0 :
saved = ( ( knot - knot_vector [ span ] ) * N [ 0 ] ) / ( knot_vector [ span + k ] - knot_vector [ span ] )
for j in range ( 0 , degree - k + 1 ) :
Uleft = knot_vector [ span + j + 1 ]
Uright = knot_vector [ span + j + k + 1 ]
# Zero detection
if N [ j + 1 ] == 0.0 :
N [ j ] = saved
saved = 0.0
else :
temp = N [ j + 1 ] / ( Uright - Uleft )
N [ j ] = saved + ( Uright - knot ) * temp
saved = ( knot - Uleft ) * temp
return N [ 0 ]
|
def clicked ( self ) :
"""Selected item was double - clicked or enter / return was pressed"""
|
fnames = self . get_selected_filenames ( )
for fname in fnames :
if osp . isdir ( fname ) :
self . directory_clicked ( fname )
else :
self . open ( [ fname ] )
|
def is_valid_request ( self , request , parameters = { } , fake_method = None , handle_error = True ) :
'''Validates an OAuth request using the python - oauth2 library :
https : / / github . com / simplegeo / python - oauth2'''
|
try : # Set the parameters to be what we were passed earlier
# if we didn ' t get any passed to us now
if not parameters and hasattr ( self , 'params' ) :
parameters = self . params
method , url , headers , parameters = self . parse_request ( request , parameters , fake_method )
oauth_request = oauth2 . Request . from_request ( method , url , headers = headers , parameters = parameters )
self . oauth_server . verify_request ( oauth_request , self . oauth_consumer , { } )
except oauth2 . MissingSignature , e :
if handle_error :
return False
else :
raise e
# Signature was valid
return True
|
def decrypt ( self ) :
"""Decrypt decrypts the secret and returns the plaintext .
Calling decrypt ( ) may incur side effects such as a call to a remote service for decryption ."""
|
if not self . _crypter :
return b''
try :
plaintext = self . _crypter . decrypt ( self . _ciphertext , ** self . _decrypt_params )
return plaintext
except Exception as e :
exc_info = sys . exc_info ( )
six . reraise ( ValueError ( 'Invalid ciphertext "%s", error: %s' % ( self . _ciphertext , e ) ) , None , exc_info [ 2 ] )
|
def define_attribute ( self , name , atype , data = None ) :
"""Define a new attribute . atype has to be one of ' integer ' , ' real ' , ' numeric ' , ' string ' , ' date ' or ' nominal ' .
For nominal attributes , pass the possible values as data .
For date attributes , pass the format as data ."""
|
self . attributes . append ( name )
assert atype in TYPES , "Unknown type '%s'. Must be one of: %s" % ( atype , ', ' . join ( TYPES ) , )
self . attribute_types [ name ] = atype
self . attribute_data [ name ] = data
|
def _launch_editor ( starting_text = '' ) :
"Launch editor , let user write text , then return that text ."
|
# TODO : What is a reasonable default for windows ? Does this approach even
# make sense on windows ?
editor = os . environ . get ( 'EDITOR' , 'vim' )
with tempfile . TemporaryDirectory ( ) as dirname :
filename = pathlib . Path ( dirname ) / 'metadata.yml'
with filename . open ( mode = 'wt' ) as handle :
handle . write ( starting_text )
subprocess . call ( [ editor , filename ] )
with filename . open ( mode = 'rt' ) as handle :
text = handle . read ( )
return text
|
def get_profile_data ( self , auth_response ) :
"""Retrieve profile data from provider"""
|
res = auth_response
token = res . get ( 'access_token' )
me = res . get ( 'user' )
if not me . get ( 'id' ) :
raise x . UserException ( 'Instagram must return a user id' )
data = dict ( provider = self . provider , email = None , id = me . get ( 'id' ) , token = token , )
return data
|
def delete_user ( self , id , ** kwargs ) : # noqa : E501
"""Deletes a user identified by id # noqa : E501
# noqa : E501
This method makes a synchronous HTTP request by default . To make an
asynchronous HTTP request , please pass async _ req = True
> > > thread = api . delete _ user ( id , async _ req = True )
> > > result = thread . get ( )
: param async _ req bool
: param str id : ( required )
: return : None
If the method is called asynchronously ,
returns the request thread ."""
|
kwargs [ '_return_http_data_only' ] = True
if kwargs . get ( 'async_req' ) :
return self . delete_user_with_http_info ( id , ** kwargs )
# noqa : E501
else :
( data ) = self . delete_user_with_http_info ( id , ** kwargs )
# noqa : E501
return data
|
def subset ( self , subtxns ) :
"""Construct a new Positions object from the new Txns object ( which is assumed to be a subset )
of current Txns object"""
|
result = Positions ( subtxns )
if hasattr ( self , '_frame' ) :
result . _frame = self . _frame . ix [ subtxns . pids ]
# passing in array results in index name being removed for some reason ? ? ?
if result . _frame . index . name != self . _frame . index . name :
result . _frame . index . name = self . _frame . index . name
return result
|
def from_start_and_end ( cls , start , end , aa = None , major_pitch = 225.8 , major_radius = 5.07 , major_handedness = 'l' , minor_helix_type = 'alpha' , orientation = 1 , phi_c_alpha = 0.0 , minor_repeat = None ) :
"""Creates a ` HelicalHelix ` between a ` start ` and ` end ` point ."""
|
start = numpy . array ( start )
end = numpy . array ( end )
if aa is None :
minor_rise_per_residue = _helix_parameters [ minor_helix_type ] [ 1 ]
aa = int ( ( numpy . linalg . norm ( end - start ) / minor_rise_per_residue ) + 1 )
instance = cls ( aa = aa , major_pitch = major_pitch , major_radius = major_radius , major_handedness = major_handedness , minor_helix_type = minor_helix_type , orientation = orientation , phi_c_alpha = phi_c_alpha , minor_repeat = minor_repeat )
instance . move_to ( start = start , end = end )
return instance
|
def list_known_codes ( s , unique = True , rgb_mode = False ) :
"""Find and print all known escape codes in a string ,
using get _ known _ codes ."""
|
total = 0
for codedesc in get_known_codes ( s , unique = unique , rgb_mode = rgb_mode ) :
total += 1
print ( codedesc )
plural = 'code' if total == 1 else 'codes'
codetype = ' unique' if unique else ''
print ( '\nFound {}{} escape {}.' . format ( total , codetype , plural ) )
return 0 if total > 0 else 1
|
def _process_incoming ( self , xmlstream , queue_entry ) :
"""Dispatch to the different methods responsible for the different stanza
types or handle a non - stanza stream - level element from ` stanza _ obj ` ,
which has arrived over the given ` xmlstream ` ."""
|
stanza_obj , exc = queue_entry
# first , handle SM stream objects
if isinstance ( stanza_obj , nonza . SMAcknowledgement ) :
self . _logger . debug ( "received SM ack: %r" , stanza_obj )
if not self . _sm_enabled :
self . _logger . warning ( "received SM ack, but SM not enabled" )
return
self . sm_ack ( stanza_obj . counter )
return
elif isinstance ( stanza_obj , nonza . SMRequest ) :
self . _logger . debug ( "received SM request: %r" , stanza_obj )
if not self . _sm_enabled :
self . _logger . warning ( "received SM request, but SM not enabled" )
return
response = nonza . SMAcknowledgement ( )
response . counter = self . _sm_inbound_ctr
self . _logger . debug ( "sending SM ack: %r" , response )
xmlstream . send_xso ( response )
return
# raise if it is not a stanza
if not isinstance ( stanza_obj , stanza . StanzaBase ) :
raise RuntimeError ( "unexpected stanza class: {}" . format ( stanza_obj ) )
# now handle stanzas , these always increment the SM counter
if self . _sm_enabled :
self . _sm_inbound_ctr += 1
self . _sm_inbound_ctr &= 0xffffffff
# check if the stanza has errors
if exc is not None :
self . _process_incoming_erroneous_stanza ( stanza_obj , exc )
return
if isinstance ( stanza_obj , stanza . IQ ) :
self . _process_incoming_iq ( stanza_obj )
elif isinstance ( stanza_obj , stanza . Message ) :
self . _process_incoming_message ( stanza_obj )
elif isinstance ( stanza_obj , stanza . Presence ) :
self . _process_incoming_presence ( stanza_obj )
|
def create_countries_geo_zone ( cls , countries_geo_zone , ** kwargs ) :
"""Create CountriesGeoZone
Create a new CountriesGeoZone
This method makes a synchronous HTTP request by default . To make an
asynchronous HTTP request , please pass async = True
> > > thread = api . create _ countries _ geo _ zone ( countries _ geo _ zone , async = True )
> > > result = thread . get ( )
: param async bool
: param CountriesGeoZone countries _ geo _ zone : Attributes of countriesGeoZone to create ( required )
: return : CountriesGeoZone
If the method is called asynchronously ,
returns the request thread ."""
|
kwargs [ '_return_http_data_only' ] = True
if kwargs . get ( 'async' ) :
return cls . _create_countries_geo_zone_with_http_info ( countries_geo_zone , ** kwargs )
else :
( data ) = cls . _create_countries_geo_zone_with_http_info ( countries_geo_zone , ** kwargs )
return data
|
def end_workunit ( self , workunit ) :
"""Implementation of Reporter callback ."""
|
if not self . is_under_main_root ( workunit ) :
return
if workunit . outcome ( ) != WorkUnit . SUCCESS and not self . _show_output ( workunit ) : # Emit the suppressed workunit output , if any , to aid in debugging the problem .
if self . _get_label_format ( workunit ) != LabelFormat . FULL :
self . _emit_indented_workunit_label ( workunit )
for name , outbuf in workunit . outputs ( ) . items ( ) :
self . emit ( self . _prefix ( workunit , '\n==== {} ====\n' . format ( name ) ) )
self . emit ( self . _prefix ( workunit , outbuf . read_from ( 0 ) . decode ( 'utf-8' ) ) )
self . flush ( )
|
def _match_stmt ( ctx , stmt , specs , canonical ) :
"""Match stmt against the spec .
Return None | spec '
spec ' is an updated spec with the matching spec consumed"""
|
( spec , canspec ) = specs
i = 0
while i < len ( spec ) :
( keywd , occurance ) = spec [ i ]
if keywd == '$any' :
return ( spec , canspec )
if keywd == '$1.1' :
( keywd , occurance ) = occurance
if ( stmt . i_module . i_version == '1' and keywd == stmt . keyword ) :
return None
if keywd == stmt . keyword :
if occurance == '1' or occurance == '?' : # consume this match
if canonical == True :
return ( spec [ i + 1 : ] , spec_del_kwd ( keywd , canspec ) )
else :
return ( spec [ : i ] + spec [ i + 1 : ] , canspec )
if occurance == '+' : # mark that we have found the one that was needed
c = ( keywd , '*' )
if canonical == True :
return ( [ c ] + spec [ i + 1 : ] , canspec )
else :
return ( spec [ : i ] + [ c ] + spec [ i + 1 : ] , canspec )
else : # occurane = = ' * '
if canonical == True :
return ( spec [ i : ] , canspec )
else :
return ( spec , canspec )
elif keywd == '$choice' :
cases = occurance
j = 0
while j < len ( cases ) : # check if this alternative matches - check for a
# match with each optional keyword
save_errors = copy . copy ( ctx . errors )
if spec == top_stmts :
match_res = _match_stmt ( ctx , stmt , ( cases [ j ] , [ ] ) , False )
else :
match_res = _match_stmt ( ctx , stmt , ( cases [ j ] , cases [ j ] ) , canonical )
if match_res != None : # this case branch matched , use it .
# remove the choice and add res to the spec .
nspec = spec [ : i ] + match_res [ 0 ] + spec [ i + 1 : ]
return ( nspec , canspec )
# we must not report errors on non - matching branches
ctx . errors = save_errors
j += 1
elif keywd == '$interleave' :
cspec = occurance
match_res = _match_stmt ( ctx , stmt , ( cspec , cspec ) , canonical )
if match_res != None : # we got a match
return ( spec , canspec )
elif util . is_prefixed ( stmt . keyword ) : # allow extension statements mixed with these
# set canonical to False in this call to just remove the
# matching stmt from the spec
match_res = _match_stmt ( ctx , stmt , ( spec [ i + 1 : ] , canspec ) , False )
if match_res != None :
return ( spec [ : i + 1 ] + match_res [ 0 ] , canspec )
else :
return None
elif keywd == '$cut' : # any non - optional statements left are errors
for ( keywd , occurance ) in spec [ : i ] :
if occurance == '1' or occurance == '+' :
error . err_add ( ctx . errors , stmt . pos , 'UNEXPECTED_KEYWORD_1' , ( util . keyword_to_str ( stmt . raw_keyword ) , util . keyword_to_str ( keywd ) ) )
# consume them so we don ' t report the same error again
spec = spec [ i : ]
i = 0
elif canonical == True :
if occurance == '1' or occurance == '+' :
error . err_add ( ctx . errors , stmt . pos , 'UNEXPECTED_KEYWORD_CANONICAL_1' , ( util . keyword_to_str ( stmt . raw_keyword ) , util . keyword_to_str ( keywd ) ) )
# consume it so we don ' t report the same error again
spec = spec [ i : ]
i = 0
# check next in spec
i += 1
return None
|
def _buildTraitCovar ( self , trait_covar_type = 'freeform' , rank = 1 , fixed_trait_covar = None , jitter = 1e-4 ) :
"""Internal functions that builds the trait covariance matrix using the LIMIX framework
Args :
trait _ covar _ type : type of covaraince to use . Default ' freeform ' . possible values are
rank : rank of a possible lowrank component ( default 1)
fixed _ trait _ covar : PxP matrix for the ( predefined ) trait - to - trait covariance matrix if fixed type is used
jitter : diagonal contribution added to freeform covariance matrices for regularization
Returns :
LIMIX : : Covariance for Trait covariance matrix"""
|
assert trait_covar_type in [ 'freeform' , 'diag' , 'lowrank' , 'lowrank_id' , 'lowrank_diag' , 'block' , 'block_id' , 'block_diag' , 'fixed' ] , 'VarianceDecomposition:: trait_covar_type not valid'
if trait_covar_type == 'freeform' :
cov = FreeFormCov ( self . P , jitter = jitter )
elif trait_covar_type == 'fixed' :
assert fixed_trait_covar is not None , 'VarianceDecomposition:: set fixed_trait_covar'
assert fixed_trait_covar . shape [ 0 ] == self . P , 'VarianceDecomposition:: Incompatible shape for fixed_trait_covar'
assert fixed_trait_covar . shape [ 1 ] == self . P , 'VarianceDecomposition:: Incompatible shape for fixed_trait_covar'
cov = FixedCov ( fixed_trait_covar )
elif trait_covar_type == 'diag' :
cov = DiagonalCov ( self . P )
elif trait_covar_type == 'lowrank' :
cov = LowRankCov ( self . P , rank = rank )
elif trait_covar_type == 'lowrank_id' :
cov = SumCov ( LowRankCov ( self . P , rank = rank ) , FixedCov ( sp . eye ( self . P ) ) )
elif trait_covar_type == 'lowrank_diag' :
cov = SumCov ( LowRankCov ( self . P , rank = rank ) , DiagonalCov ( self . P ) )
elif trait_covar_type == 'block' :
cov = FixedCov ( sp . ones ( [ self . P , self . P ] ) )
elif trait_covar_type == 'block_id' :
cov1 = FixedCov ( sp . ones ( [ self . P , self . P ] ) )
cov2 = FixedCov ( sp . eye ( self . P ) )
cov = SumCov ( cov1 , cov2 )
elif trait_covar_type == 'block_diag' :
cov1 = FixedCov ( sp . ones ( [ self . P , self . P ] ) )
cov2 = FixedCov ( sp . eye ( self . P ) )
cov = SumCov ( cov1 , cov2 )
return cov
|
def export_obo ( self , path_to_export_file , name_of_ontology = "uniprot" , taxids = None ) :
"""export complete database to OBO ( http : / / www . obofoundry . org / ) file
: param path _ to _ export _ file : path to export file
: param taxids : NCBI taxonomy identifiers to export ( optional )"""
|
fd = open ( path_to_export_file , 'w' )
header = "format-version: 0.1\ndata: {}\n" . format ( time . strftime ( "%d:%m:%Y %H:%M" ) )
header += "ontology: {}\n" . format ( name_of_ontology )
header += 'synonymtypedef: GENE_NAME "GENE NAME"\nsynonymtypedef: ALTERNATIVE_NAME "ALTERNATIVE NAME"\n'
fd . write ( header )
query = self . session . query ( models . Entry ) . limit ( 100 )
if taxids :
query = query . filter ( models . Entry . taxid . in_ ( taxids ) )
for entry in query . all ( ) :
fd . write ( '\n[Term]\nid: SWISSPROT:{}\n' . format ( entry . accessions [ 0 ] ) )
if len ( entry . accessions ) > 1 :
for accession in entry . accessions [ 1 : ] :
fd . write ( 'alt_id: {}\n' . format ( accession ) )
fd . write ( 'name: {}\n' . format ( entry . recommended_full_name ) )
for alternative_full_name in entry . alternative_full_names :
fd . write ( 'synonym: "{}" EXACT ALTERNATIVE_NAME []\n' . format ( alternative_full_name . name ) )
for alternative_short_name in entry . alternative_short_names :
fd . write ( 'synonym: "{}" EXACT ALTERNATIVE_NAME []\n' . format ( alternative_short_name . name ) )
fd . write ( 'synonym: "{}" EXACT GENE_NAME []\n' . format ( entry . gene_name ) )
for xref in entry . db_references :
if xref . type_ in [ 'GO' , 'HGNC' ] :
xref . identifier = ':' . join ( xref . identifier . split ( ':' ) [ 1 : ] )
fd . write ( 'xref: {}:{}\n' . format ( xref . type_ , xref . identifier . replace ( '\\' , '\\\\' ) ) )
fd . close ( )
|
def map_verbose ( func , seq , msg = "{}" , every = 25 , start = True , end = True , offset = 0 , callback = None ) :
"""Same as the built - in map function but prints a * msg * after chunks of size * every * iterations .
When * start * ( * stop * ) is * True * , the * msg * is also printed after the first ( last ) iteration .
Note that * msg * is supposed to be a template string that will be formatted with the current
iteration number ( starting at 0 ) plus * offset * using ` ` str . format ` ` . When * callback * is
callable , it is invoked instead of the default print method with the current iteration number
( without * offset * ) as the only argument . Example :
. . code - block : : python
func = lambda x : x * * 2
msg = " computing square of { } "
squares = map _ verbose ( func , range ( 7 ) , msg , every = 3)
# computing square of 0
# computing square of 2
# computing square of 5
# computing square of 6"""
|
# default callable
if not callable ( callback ) :
def callback ( i ) :
print ( msg . format ( i + offset ) )
results = [ ]
for i , obj in enumerate ( seq ) :
results . append ( func ( obj ) )
do_call = ( start and i == 0 ) or ( i + 1 ) % every == 0
if do_call :
callback ( i )
else :
if end and results and not do_call :
callback ( i )
return results
|
def _build ( self , build_method ) :
"""build image from provided build _ args
: return : BuildResults"""
|
logger . info ( "building image '%s'" , self . image )
self . ensure_not_built ( )
self . temp_dir = tempfile . mkdtemp ( )
temp_path = os . path . join ( self . temp_dir , BUILD_JSON )
try :
with open ( temp_path , 'w' ) as build_json :
json . dump ( self . build_args , build_json )
self . build_container_id = build_method ( self . build_image , self . temp_dir )
try :
logs_gen = self . dt . logs ( self . build_container_id , stream = True )
wait_for_command ( logs_gen )
return_code = self . dt . wait ( self . build_container_id )
except KeyboardInterrupt :
logger . info ( "killing build container on user's request" )
self . dt . remove_container ( self . build_container_id , force = True )
results = BuildResults ( )
results . return_code = 1
return results
else :
results = self . _load_results ( self . build_container_id )
results . return_code = return_code
return results
finally :
shutil . rmtree ( self . temp_dir )
|
def copy ( self ) :
"""Return a copy of the current ColorVisuals object .
Returns
copied : ColorVisuals
Contains the same information as self"""
|
copied = ColorVisuals ( )
copied . _data . data = copy . deepcopy ( self . _data . data )
return copied
|
def setup_dirs ( ) :
"""Make required directories to hold logfile .
: returns : str"""
|
try :
top_dir = os . path . abspath ( os . path . expanduser ( os . environ [ "XDG_CACHE_HOME" ] ) )
except KeyError :
top_dir = os . path . abspath ( os . path . expanduser ( "~/.cache" ) )
our_cache_dir = os . path . join ( top_dir , PROJECT_NAME )
os . makedirs ( our_cache_dir , mode = 0o775 , exist_ok = True )
return our_cache_dir
|
def turn_off_syncing ( for_post_save = True , for_post_delete = True , for_m2m_changed = True , for_post_bulk_operation = True ) :
"""Disables all of the signals for syncing entities . By default , everything is turned off . If the user wants
to turn off everything but one signal , for example the post _ save signal , they would do :
turn _ off _ sync ( for _ post _ save = False )"""
|
if for_post_save :
post_save . disconnect ( save_entity_signal_handler , dispatch_uid = 'save_entity_signal_handler' )
if for_post_delete :
post_delete . disconnect ( delete_entity_signal_handler , dispatch_uid = 'delete_entity_signal_handler' )
if for_m2m_changed :
m2m_changed . disconnect ( m2m_changed_entity_signal_handler , dispatch_uid = 'm2m_changed_entity_signal_handler' )
if for_post_bulk_operation :
post_bulk_operation . disconnect ( bulk_operation_signal_handler , dispatch_uid = 'bulk_operation_signal_handler' )
|
def angular_templates ( context ) :
"""Generate a dictionary of template contents for all static HTML templates .
If the template has been overridden by a theme , load the
override contents instead of the original HTML file .
One use for this is to pre - populate the angular template cache .
Args :
context : the context of the current Django template
Returns : an object containing
angular _ templates : dictionary of angular template contents
- key is the template ' s static path ,
- value is a string of HTML template contents"""
|
template_paths = context [ 'HORIZON_CONFIG' ] [ 'external_templates' ]
all_theme_static_files = context [ 'HORIZON_CONFIG' ] [ 'theme_static_files' ]
this_theme_static_files = all_theme_static_files [ context [ 'THEME' ] ]
template_overrides = this_theme_static_files [ 'template_overrides' ]
angular_templates = { }
for relative_path in template_paths :
template_static_path = context [ 'STATIC_URL' ] + relative_path
# If the current theme overrides this template , use the theme
# content instead of the original file content
if relative_path in template_overrides :
relative_path = template_overrides [ relative_path ]
result = [ ]
for finder in finders . get_finders ( ) :
result . extend ( finder . find ( relative_path , True ) )
path = result [ - 1 ]
try :
if six . PY3 :
with open ( path , encoding = 'utf-8' ) as template_file :
angular_templates [ template_static_path ] = template_file . read ( )
else :
with open ( path ) as template_file :
angular_templates [ template_static_path ] = template_file . read ( )
except ( OSError , IOError ) : # Failed to read template , leave the template dictionary blank
# If the caller is using this dictionary to pre - populate a cache
# there will simply be no pre - loaded version for this template .
pass
templates = [ ( key , value ) for key , value in angular_templates . items ( ) ]
templates . sort ( key = lambda item : item [ 0 ] )
return { 'angular_templates' : templates }
|
def __map_button ( self , button ) :
"""Get the linux xpad code from the Windows xinput code ."""
|
_ , start_code , start_value = button
value = start_value
ev_type = "Key"
code = self . manager . codes [ 'xpad' ] [ start_code ]
if 1 <= start_code <= 4 :
ev_type = "Absolute"
if start_code == 1 and start_value == 1 :
value = - 1
elif start_code == 3 and start_value == 1 :
value = - 1
return code , value , ev_type
|
def sumlogs ( x , axis = None , out = None ) :
"""Sum of vector where numbers are represented by their logarithms .
Calculates ` ` np . log ( np . sum ( np . exp ( x ) , axis = axis ) ) ` ` in such a fashion that
it works even when elements have large magnitude ."""
|
maxx = x . max ( axis = axis , keepdims = True )
xnorm = x - maxx
np . exp ( xnorm , out = xnorm )
out = np . sum ( xnorm , axis = axis , out = out )
if isinstance ( out , np . ndarray ) :
np . log ( out , out = out )
else :
out = np . log ( out )
out += np . squeeze ( maxx )
return out
|
def binary_dumps ( obj , alt_format = False ) :
"""Serialize ` ` obj ` ` to a binary VDF formatted ` ` bytes ` ` ."""
|
return b'' . join ( _binary_dump_gen ( obj , alt_format = alt_format ) )
|
def move_mission ( self , key , selection_index ) :
'''move a mission point'''
|
idx = self . selection_index_to_idx ( key , selection_index )
self . moving_wp = idx
print ( "Moving wp %u" % idx )
|
def set_data_value ( datastore , path , data ) :
'''Get a data entry in a datastore
: param datastore : The datastore , e . g . running , operational .
One of the NETCONF store IETF types
: type datastore : : class : ` DatastoreType ` ( ` ` str ` ` enum ) .
: param path : The device path to set the value at ,
a list of element names in order , comma separated
: type path : ` ` list ` ` of ` ` str ` ` OR ` ` tuple ` `
: param data : The new value at the given path
: type data : ` ` dict ` `
: rtype : ` ` bool ` `
: return : ` ` True ` ` if successful , otherwise error .'''
|
client = _get_client ( )
return client . set_data_value ( datastore , path , data )
|
def partition_by_cores ( a : Collection , n_cpus : int ) -> List [ Collection ] :
"Split data in ` a ` equally among ` n _ cpus ` cores"
|
return partition ( a , len ( a ) // n_cpus + 1 )
|
def detector_voltage ( self , channels = None ) :
"""Get the detector voltage used for the specified channel ( s ) .
The detector voltage for channel " n " is extracted from the $ PnV
parameter , if available .
Parameters
channels : int , str , list of int , list of str
Channel ( s ) for which to get the detector voltage . If None ,
return a list with the detector voltage of all channels , in the
order of ` ` FCSData . channels ` ` .
Return
float or list of float
The detector voltage of the specified channel ( s ) . If no
information about the detector voltage is found for a channel ,
return None ."""
|
# Check default
if channels is None :
channels = self . _channels
# Get numerical indices of channels
channels = self . _name_to_index ( channels )
# Get detector type of the specified channels
if hasattr ( channels , '__iter__' ) and not isinstance ( channels , six . string_types ) :
return [ self . _detector_voltage [ ch ] for ch in channels ]
else :
return self . _detector_voltage [ channels ]
|
def conv2d_fixed_padding ( inputs , filters , kernel_size , strides , data_format = "channels_first" , use_td = False , targeting_rate = None , keep_prob = None , is_training = None ) :
"""Strided 2 - D convolution with explicit padding .
The padding is consistent and is based only on ` kernel _ size ` , not on the
dimensions of ` inputs ` ( as opposed to using ` tf . layers . conv2d ` alone ) .
Args :
inputs : ` Tensor ` of size ` [ batch , channels , height _ in , width _ in ] ` .
filters : ` int ` number of filters in the convolution .
kernel _ size : ` int ` size of the kernel to be used in the convolution .
strides : ` int ` strides of the convolution .
data _ format : ` str ` either " channels _ first " for ` [ batch , channels , height ,
width ] ` or " channels _ last for ` [ batch , height , width , channels ] ` .
use _ td : ` str ` one of " weight " or " unit " . Set to False or " " to disable
targeted dropout .
targeting _ rate : ` float ` proportion of weights to target with targeted
dropout .
keep _ prob : ` float ` keep probability for targeted dropout .
is _ training : ` bool ` for whether the model is in training .
Returns :
A ` Tensor ` of shape ` [ batch , filters , height _ out , width _ out ] ` .
Raises :
Exception : if use _ td is not valid ."""
|
if strides > 1 :
inputs = fixed_padding ( inputs , kernel_size , data_format = data_format )
if use_td :
inputs_shape = common_layers . shape_list ( inputs )
if use_td == "weight" :
if data_format == "channels_last" :
size = kernel_size * kernel_size * inputs_shape [ - 1 ]
else :
size = kernel_size * kernel_size * inputs_shape [ 1 ]
targeting_count = targeting_rate * tf . to_float ( size )
targeting_fn = common_layers . weight_targeting
elif use_td == "unit" :
targeting_count = targeting_rate * filters
targeting_fn = common_layers . unit_targeting
else :
raise Exception ( "Unrecognized targeted dropout type: %s" % use_td )
y = common_layers . td_conv ( inputs , filters , kernel_size , targeting_count , targeting_fn , keep_prob , is_training , do_prune = True , strides = strides , padding = ( "SAME" if strides == 1 else "VALID" ) , data_format = data_format , use_bias = False , kernel_initializer = tf . variance_scaling_initializer ( ) )
else :
y = layers ( ) . Conv2D ( filters = filters , kernel_size = kernel_size , strides = strides , padding = ( "SAME" if strides == 1 else "VALID" ) , use_bias = False , kernel_initializer = tf . variance_scaling_initializer ( ) , data_format = data_format ) ( inputs )
return y
|
def convert ( self ) :
"""Returns RPM SPECFILE .
Returns :
rendered RPM SPECFILE ."""
|
# move file into position
try :
local_file = self . getter . get ( )
except ( exceptions . NoSuchPackageException , OSError ) as e :
logger . error ( "Failed and exiting:" , exc_info = True )
logger . info ( "Pyp2rpm failed. See log for more info." )
sys . exit ( e )
# save name and version from the file ( rewrite if set previously )
self . name , self . version = self . getter . get_name_version ( )
self . local_file = local_file
data = self . metadata_extractor . extract_data ( self . client )
logger . debug ( "Extracted metadata:" )
logger . debug ( pprint . pformat ( data . data ) )
self . merge_versions ( data )
jinja_env = jinja2 . Environment ( loader = jinja2 . ChoiceLoader ( [ jinja2 . FileSystemLoader ( [ '/' ] ) , jinja2 . PackageLoader ( 'pyp2rpm' , 'templates' ) , ] ) )
for filter in filters . __all__ :
jinja_env . filters [ filter . __name__ ] = filter
try :
jinja_template = jinja_env . get_template ( os . path . abspath ( self . template ) )
except jinja2 . exceptions . TemplateNotFound : # absolute path not found = > search in default template dir
logger . warn ( 'Template: {0} was not found in {1} using default ' 'template dir.' . format ( self . template , os . path . abspath ( self . template ) ) )
jinja_template = jinja_env . get_template ( self . template )
logger . info ( 'Using default template: {0}.' . format ( self . template ) )
ret = jinja_template . render ( data = data , name_convertor = name_convertor )
return re . sub ( r'[ \t]+\n' , "\n" , ret )
|
def gets ( self , key ) :
"Like ` get ` , but return all matches , not just the first ."
|
result_list = [ ]
if key in self . keys ( ) :
result_list . append ( self [ key ] )
for v in self . values ( ) :
if isinstance ( v , self . __class__ ) :
sub_res_list = v . gets ( key )
for res in sub_res_list :
result_list . append ( res )
elif isinstance ( v , dict ) :
if key in v . keys ( ) :
result_list . append ( v [ key ] )
return result_list
|
def present ( name , host = 'localhost' , password = None , password_hash = None , allow_passwordless = False , unix_socket = False , password_column = None , ** connection_args ) :
'''Ensure that the named user is present with the specified properties . A
passwordless user can be configured by omitting ` ` password ` ` and
` ` password _ hash ` ` , and setting ` ` allow _ passwordless ` ` to ` ` True ` ` .
name
The name of the user to manage
host
Host for which this user / password combo applies
password
The password to use for this user . Will take precedence over the
` ` password _ hash ` ` option if both are specified .
password _ hash
The password in hashed form . Be sure to quote the password because YAML
doesn ' t like the ` ` * ` ` . A password hash can be obtained from the mysql
command - line client like so : :
mysql > SELECT PASSWORD ( ' mypass ' ) ;
| PASSWORD ( ' mypass ' ) |
| * 6C8989366EAF75BB670AD8EA7A7FC1176A95CEF4 |
1 row in set ( 0.00 sec )
allow _ passwordless
If ` ` True ` ` , then ` ` password ` ` and ` ` password _ hash ` ` can be omitted to
permit a passwordless login .
. . versionadded : : 0.16.2
unix _ socket
If ` ` True ` ` and allow _ passwordless is ` ` True ` ` , the unix _ socket auth
plugin will be used .'''
|
ret = { 'name' : name , 'changes' : { } , 'result' : True , 'comment' : 'User {0}@{1} is already present' . format ( name , host ) }
passwordless = not any ( ( password , password_hash ) )
# check if user exists with the same password ( or passwordless login )
if passwordless :
if not salt . utils . data . is_true ( allow_passwordless ) :
ret [ 'comment' ] = 'Either password or password_hash must be ' 'specified, unless allow_passwordless is True'
ret [ 'result' ] = False
return ret
else :
if __salt__ [ 'mysql.user_exists' ] ( name , host , passwordless = True , unix_socket = unix_socket , password_column = password_column , ** connection_args ) :
ret [ 'comment' ] += ' with passwordless login'
return ret
else :
err = _get_mysql_error ( )
if err is not None :
ret [ 'comment' ] = err
ret [ 'result' ] = False
return ret
else :
if __salt__ [ 'mysql.user_exists' ] ( name , host , password , password_hash , unix_socket = unix_socket , password_column = password_column , ** connection_args ) :
ret [ 'comment' ] += ' with the desired password'
if password_hash and not password :
ret [ 'comment' ] += ' hash'
return ret
else :
err = _get_mysql_error ( )
if err is not None :
ret [ 'comment' ] = err
ret [ 'result' ] = False
return ret
# check if user exists with a different password
if __salt__ [ 'mysql.user_exists' ] ( name , host , unix_socket = unix_socket , ** connection_args ) : # The user is present , change the password
if __opts__ [ 'test' ] :
ret [ 'comment' ] = 'Password for user {0}@{1} is set to be ' . format ( name , host )
ret [ 'result' ] = None
if passwordless :
ret [ 'comment' ] += 'cleared'
if not salt . utils . data . is_true ( allow_passwordless ) :
ret [ 'comment' ] += ', but allow_passwordless != True'
ret [ 'result' ] = False
else :
ret [ 'comment' ] += 'changed'
return ret
if __salt__ [ 'mysql.user_chpass' ] ( name , host , password , password_hash , allow_passwordless , unix_socket , ** connection_args ) :
ret [ 'comment' ] = 'Password for user {0}@{1} has been ' '{2}' . format ( name , host , 'cleared' if passwordless else 'changed' )
ret [ 'changes' ] [ name ] = 'Updated'
else :
ret [ 'comment' ] = 'Failed to {0} password for user ' '{1}@{2}' . format ( 'clear' if passwordless else 'change' , name , host )
err = _get_mysql_error ( )
if err is not None :
ret [ 'comment' ] += ' ({0})' . format ( err )
if passwordless and not salt . utils . data . is_true ( allow_passwordless ) :
ret [ 'comment' ] += '. Note: allow_passwordless must be True ' 'to permit passwordless login.'
ret [ 'result' ] = False
else :
err = _get_mysql_error ( )
if err is not None :
ret [ 'comment' ] = err
ret [ 'result' ] = False
return ret
# The user is not present , make it !
if __opts__ [ 'test' ] :
ret [ 'comment' ] = 'User {0}@{1} is set to be added' . format ( name , host )
ret [ 'result' ] = None
if passwordless :
ret [ 'comment' ] += ' with passwordless login'
if not salt . utils . data . is_true ( allow_passwordless ) :
ret [ 'comment' ] += ', but allow_passwordless != True'
ret [ 'result' ] = False
return ret
if __salt__ [ 'mysql.user_create' ] ( name , host , password , password_hash , allow_passwordless , unix_socket = unix_socket , password_column = password_column , ** connection_args ) :
ret [ 'comment' ] = 'The user {0}@{1} has been added' . format ( name , host )
if passwordless :
ret [ 'comment' ] += ' with passwordless login'
ret [ 'changes' ] [ name ] = 'Present'
else :
ret [ 'comment' ] = 'Failed to create user {0}@{1}' . format ( name , host )
err = _get_mysql_error ( )
if err is not None :
ret [ 'comment' ] += ' ({0})' . format ( err )
ret [ 'result' ] = False
return ret
|
def translate ( otp , to = MODHEX ) :
"""Return set ( ) of possible modhex interpretations of a Yubikey otp .
If otp uses all 16 characters in its alphabet , there will be only
one possible interpretation of that Yubikey otp ( except for two
Armenian keyboard layouts ) .
otp : Yubikey output .
to : 16 - character target alphabet , default MODHEX ."""
|
if PY3 :
if isinstance ( otp , bytes ) :
raise ValueError ( "otp must be unicode" )
if isinstance ( to , bytes ) :
raise ValueError ( "to must be unicode" )
else :
if not isinstance ( otp , unicode ) :
raise ValueError ( "otp must be unicode" )
if not isinstance ( to , unicode ) :
raise ValueError ( "to must be unicode" )
possible = ( set ( index [ c ] ) for c in set ( otp ) )
possible = reduce ( lambda a , b : a . intersection ( b ) , possible )
translated = set ( )
for i in possible :
a = alphabets [ i ]
translation = dict ( zip ( ( ord ( c ) for c in a ) , to ) )
translated . add ( otp . translate ( translation ) )
return translated
|
def find_db_attributes ( self , table , * conditions ) :
"""Lists records satisfying ' conditions ' in ' table ' .
This method is corresponding to the following ovs - vsctl command : :
$ ovs - vsctl find TBL CONDITION . . .
. . Note : :
Currently , only ' = ' condition is supported .
To support other condition is TODO ."""
|
args = [ table ]
args . extend ( conditions )
command = ovs_vsctl . VSCtlCommand ( 'find' , args )
self . run_command ( [ command ] )
if command . result :
return command . result
return [ ]
|
def similarity ( self , other ) :
"""Calculate similarity based on best matching permutation of items ."""
|
# Select the longer list as the basis for comparison
if len ( self . items ) > len ( other . items ) :
first , second = self , other
else :
first , second = other , self
items = list ( first . items )
# backup items list
length = len ( items )
sim = self . Similarity ( 0.0 if length else 1.0 )
# Calculate the similarity for each permutation of items
cname = self . __class__ . __name__
for num , perm in enumerate ( permutations ( items , length ) , start = 1 ) :
first . items = perm
aname = 'items-p{}' . format ( num )
self . log ( first , second , '%' , cname = cname , aname = aname )
permutation_sim = super ( Group , first ) . similarity ( second )
self . log ( first , second , '%' , cname = cname , aname = aname , result = permutation_sim )
sim = max ( sim , permutation_sim )
logging . debug ( "highest similarity: %s" , sim )
first . items = items
# restore original items list
return sim
|
def create_transaction ( self , * args : Any , ** kwargs : Any ) -> BaseTransaction :
"""Proxy for instantiating a signed transaction for this VM ."""
|
return self . get_transaction_class ( ) ( * args , ** kwargs )
|
def _stat_name ( feat_name , stat_mode ) :
'''Set stat name based on feature name and stat mode'''
|
if feat_name [ - 1 ] == 's' :
feat_name = feat_name [ : - 1 ]
if feat_name == 'soma_radii' :
feat_name = 'soma_radius'
if stat_mode == 'raw' :
return feat_name
return '%s_%s' % ( stat_mode , feat_name )
|
def get_admin_email_link ( application ) :
"""Retrieve a link that can be emailed to the administrator ."""
|
url = '%s/applications/%d/' % ( settings . ADMIN_BASE_URL , application . pk )
is_secret = False
return url , is_secret
|
def is_excel_file ( inputfile ) :
"""Return whether the provided file is a CSV file or not .
This checks if the first row of the file can be splitted by ' , ' and
if the resulting line contains more than 4 columns ( Markers , linkage
group , chromosome , trait ) ."""
|
try :
xlrd . open_workbook ( inputfile )
except Exception as err :
print ( err )
return False
return True
|
def parse ( self , input_text , syncmap ) :
"""Read from SMIL file .
Limitations :
1 . parses only ` ` < par > ` ` elements , in order
2 . timings must have ` ` hh : mm : ss . mmm ` ` or ` ` ss . mmm ` ` format ( autodetected )
3 . both ` ` clipBegin ` ` and ` ` clipEnd ` ` attributes of ` ` < audio > ` ` must be populated"""
|
from lxml import etree
smil_ns = "{http://www.w3.org/ns/SMIL}"
root = etree . fromstring ( gf . safe_bytes ( input_text ) )
for par in root . iter ( smil_ns + "par" ) :
for child in par :
if child . tag == ( smil_ns + "text" ) :
identifier = gf . safe_unicode ( gf . split_url ( child . get ( "src" ) ) [ 1 ] )
elif child . tag == ( smil_ns + "audio" ) :
begin_text = child . get ( "clipBegin" )
if ":" in begin_text :
begin = gf . time_from_hhmmssmmm ( begin_text )
else :
begin = gf . time_from_ssmmm ( begin_text )
end_text = child . get ( "clipEnd" )
if ":" in end_text :
end = gf . time_from_hhmmssmmm ( end_text )
else :
end = gf . time_from_ssmmm ( end_text )
# TODO read text from additional text _ file ?
self . _add_fragment ( syncmap = syncmap , identifier = identifier , lines = [ u"" ] , begin = begin , end = end )
|
def scan_temperature ( self , measure , temperature , rate , delay = 1 ) :
"""Performs a temperature scan .
Measures until the target temperature is reached .
: param measure : A callable called repeatedly until stability at target
temperature is reached .
: param temperature : The target temperature in kelvin .
: param rate : The sweep rate in kelvin per minute .
: param delay : The time delay between each call to measure in seconds ."""
|
if not hasattr ( measure , '__call__' ) :
raise TypeError ( 'measure parameter not callable.' )
self . set_temperature ( temperature , rate , 'no overshoot' , wait_for_stability = False )
start = datetime . datetime . now ( )
while True : # The PPMS needs some time to update the status code , we therefore ignore it for 10s .
if ( self . system_status [ 'temperature' ] == 'normal stability at target temperature' and ( datetime . datetime . now ( ) - start > datetime . timedelta ( seconds = 10 ) ) ) :
break
measure ( )
time . sleep ( delay )
|
def split ( self , meta = False ) :
"""split disconnected structure to connected substructures
: param meta : copy metadata to each substructure
: return : list of substructures"""
|
return [ self . substructure ( c , meta , False ) for c in connected_components ( self ) ]
|
def concat ( self , to_concat , new_axis ) :
"""Concatenate a list of SingleBlockManagers into a single
SingleBlockManager .
Used for pd . concat of Series objects with axis = 0.
Parameters
to _ concat : list of SingleBlockManagers
new _ axis : Index of the result
Returns
SingleBlockManager"""
|
non_empties = [ x for x in to_concat if len ( x ) > 0 ]
# check if all series are of the same block type :
if len ( non_empties ) > 0 :
blocks = [ obj . blocks [ 0 ] for obj in non_empties ]
if len ( { b . dtype for b in blocks } ) == 1 :
new_block = blocks [ 0 ] . concat_same_type ( blocks )
else :
values = [ x . values for x in blocks ]
values = _concat . _concat_compat ( values )
new_block = make_block ( values , placement = slice ( 0 , len ( values ) , 1 ) )
else :
values = [ x . _block . values for x in to_concat ]
values = _concat . _concat_compat ( values )
new_block = make_block ( values , placement = slice ( 0 , len ( values ) , 1 ) )
mgr = SingleBlockManager ( new_block , new_axis )
return mgr
|
def user_parser ( user ) :
"""Parses a user object"""
|
if __is_deleted ( user ) :
return deleted_parser ( user )
if user [ 'id' ] in item_types :
raise Exception ( 'Not a user name' )
if type ( user [ 'id' ] ) == int :
raise Exception ( 'Not a user name' )
return User ( user [ 'id' ] , user [ 'delay' ] , user [ 'created' ] , user [ 'karma' ] , user [ 'about' ] , user [ 'submitted' ] , )
|
def modular_sqrt ( a , p ) :
"""Find a quadratic residue ( mod p ) of ' a ' . p
must be an odd prime .
Solve the congruence of the form :
x ^ 2 = a ( mod p )
And returns x . Note that p - x is also a root .
0 is returned is no square root exists for
these a and p .
The Tonelli - Shanks algorithm is used ( except
for some simple cases in which the solution
is known from an identity ) . This algorithm
runs in polynomial time ( unless the
generalized Riemann hypothesis is false ) ."""
|
# Simple cases
if legendre_symbol ( a , p ) != 1 :
return 0
elif a == 0 :
return 0
elif p == 2 :
return p
elif p % 4 == 3 :
return pow ( a , ( p + 1 ) / 4 , p )
# Partition p - 1 to s * 2 ^ e for an odd s ( i . e .
# reduce all the powers of 2 from p - 1)
s = p - 1
e = 0
while s % 2 == 0 :
s /= 2
e += 1
# Find some ' n ' with a legendre symbol n | p = - 1.
# Shouldn ' t take long .
n = 2
while legendre_symbol ( n , p ) != - 1 :
n += 1
# Here be dragons !
# Read the paper " Square roots from 1 ; 24 , 51,
# 10 to Dan Shanks " by Ezra Brown for more
# information
# x is a guess of the square root that gets better
# with each iteration .
# b is the " fudge factor " - by how much we ' re off
# with the guess . The invariant x ^ 2 = ab ( mod p )
# is maintained throughout the loop .
# g is used for successive powers of n to update
# both a and b
# r is the exponent - decreases with each update
x = pow ( a , ( s + 1 ) / 2 , p )
b = pow ( a , s , p )
g = pow ( n , s , p )
r = e
while True :
t = b
m = 0
for m in xrange ( r ) :
if t == 1 :
break
t = pow ( t , 2 , p )
if m == 0 :
return x
gs = pow ( g , 2 ** ( r - m - 1 ) , p )
g = ( gs * gs ) % p
x = ( x * gs ) % p
b = ( b * g ) % p
r = m
|
def pos ( self ) :
"""Lazy - loads the part of speech tag for this word
: getter : Returns the plain string value of the POS tag for the word
: type : str"""
|
if self . _pos is None :
poses = self . _element . xpath ( 'POS/text()' )
if len ( poses ) > 0 :
self . _pos = poses [ 0 ]
return self . _pos
|
def extract_path_info ( environ_or_baseurl , path_or_url , charset = "utf-8" , errors = "werkzeug.url_quote" , collapse_http_schemes = True , ) :
"""Extracts the path info from the given URL ( or WSGI environment ) and
path . The path info returned is a unicode string , not a bytestring
suitable for a WSGI environment . The URLs might also be IRIs .
If the path info could not be determined , ` None ` is returned .
Some examples :
> > > extract _ path _ info ( ' http : / / example . com / app ' , ' / app / hello ' )
u ' / hello '
> > > extract _ path _ info ( ' http : / / example . com / app ' ,
. . . ' https : / / example . com / app / hello ' )
u ' / hello '
> > > extract _ path _ info ( ' http : / / example . com / app ' ,
. . . ' https : / / example . com / app / hello ' ,
. . . collapse _ http _ schemes = False ) is None
True
Instead of providing a base URL you can also pass a WSGI environment .
: param environ _ or _ baseurl : a WSGI environment dict , a base URL or
base IRI . This is the root of the
application .
: param path _ or _ url : an absolute path from the server root , a
relative path ( in which case it ' s the path info )
or a full URL . Also accepts IRIs and unicode
parameters .
: param charset : the charset for byte data in URLs
: param errors : the error handling on decode
: param collapse _ http _ schemes : if set to ` False ` the algorithm does
not assume that http and https on the
same server point to the same
resource .
. . versionchanged : : 0.15
The ` ` errors ` ` parameter defaults to leaving invalid bytes
quoted instead of replacing them .
. . versionadded : : 0.6"""
|
def _normalize_netloc ( scheme , netloc ) :
parts = netloc . split ( u"@" , 1 ) [ - 1 ] . split ( u":" , 1 )
if len ( parts ) == 2 :
netloc , port = parts
if ( scheme == u"http" and port == u"80" ) or ( scheme == u"https" and port == u"443" ) :
port = None
else :
netloc = parts [ 0 ]
port = None
if port is not None :
netloc += u":" + port
return netloc
# make sure whatever we are working on is a IRI and parse it
path = uri_to_iri ( path_or_url , charset , errors )
if isinstance ( environ_or_baseurl , dict ) :
environ_or_baseurl = get_current_url ( environ_or_baseurl , root_only = True )
base_iri = uri_to_iri ( environ_or_baseurl , charset , errors )
base_scheme , base_netloc , base_path = url_parse ( base_iri ) [ : 3 ]
cur_scheme , cur_netloc , cur_path , = url_parse ( url_join ( base_iri , path ) ) [ : 3 ]
# normalize the network location
base_netloc = _normalize_netloc ( base_scheme , base_netloc )
cur_netloc = _normalize_netloc ( cur_scheme , cur_netloc )
# is that IRI even on a known HTTP scheme ?
if collapse_http_schemes :
for scheme in base_scheme , cur_scheme :
if scheme not in ( u"http" , u"https" ) :
return None
else :
if not ( base_scheme in ( u"http" , u"https" ) and base_scheme == cur_scheme ) :
return None
# are the netlocs compatible ?
if base_netloc != cur_netloc :
return None
# are we below the application path ?
base_path = base_path . rstrip ( u"/" )
if not cur_path . startswith ( base_path ) :
return None
return u"/" + cur_path [ len ( base_path ) : ] . lstrip ( u"/" )
|
def data ( self ) :
"""A 2D cutout image of the segment using the minimal bounding box ,
where pixels outside of the labeled region are set to zero ( i . e .
neighboring segments within the rectangular cutout image are not
shown ) ."""
|
cutout = np . copy ( self . _segment_img [ self . slices ] )
cutout [ cutout != self . label ] = 0
return cutout
|
def _update_summary ( self , summary = None ) :
"""Update all parts of the summary or clear when no summary ."""
|
board_image_label = self . _parts [ 'board image label' ]
# get content for update or use blanks when no summary
if summary : # make a board image with the swap drawn on it
# board , action , text = summary . board , summary . action , summary . text
board_image_cv = self . _create_board_image_cv ( summary . board )
self . _draw_swap_cv ( board_image_cv , summary . action )
board_image_tk = self . _convert_cv_to_tk ( board_image_cv )
text = ''
if not summary . score is None :
text += 'Score: {:3.1f}' . format ( summary . score )
if ( not summary . mana_drain_leaves is None ) and ( not summary . total_leaves is None ) :
text += ' Mana Drains: {}/{}' '' . format ( summary . mana_drain_leaves , summary . total_leaves )
else : # clear any stored state image and use the blank
board_image_tk = board_image_label . _blank_image
text = ''
# update the UI parts with the content
board_image_label . _board_image = board_image_tk
board_image_label . config ( image = board_image_tk )
# update the summary text
summary_label = self . _parts [ 'summary label' ]
summary_label . config ( text = text )
# refresh the UI
self . _base . update ( )
|
def inDignities ( self , idA , idB ) :
"""Returns the dignities of A which belong to B ."""
|
objA = self . chart . get ( idA )
info = essential . getInfo ( objA . sign , objA . signlon )
# Should we ignore exile and fall ?
return [ dign for ( dign , ID ) in info . items ( ) if ID == idB ]
|
def audio_detection_sensitivity ( self ) :
"""Sensitivity level of Camera audio detection ."""
|
if not self . triggers :
return None
for trigger in self . triggers :
if trigger . get ( "type" ) != "audioAmplitude" :
continue
sensitivity = trigger . get ( "sensitivity" )
if sensitivity :
return sensitivity . get ( "default" )
return None
|
def extract_fcp_data ( raw_data , status ) :
"""extract data from smcli System _ WWPN _ Query output .
Input :
raw data returned from smcli
Output :
data extracted would be like :
' status : Free \n
fcp _ dev _ no : 1D2F \n
physical _ wwpn : C05076E9928051D1 \n
channel _ path _ id : 8B \n
npiv _ wwpn ' : ' NONE ' \n
status : Free \n
fcp _ dev _ no : 1D29 \n
physical _ wwpn : C05076E9928051D1 \n
channel _ path _ id : 8B \n
npiv _ wwpn : NONE"""
|
raw_data = raw_data . split ( '\n' )
# clear blank lines
data = [ ]
for i in raw_data :
i = i . strip ( ' \n' )
if i == '' :
continue
else :
data . append ( i )
# process data into one list of dicts
results = [ ]
for i in range ( 0 , len ( data ) , 5 ) :
temp = data [ i + 1 ] . split ( ':' ) [ - 1 ] . strip ( )
# only return results match the status
if temp . lower ( ) == status . lower ( ) :
results . extend ( data [ i : i + 5 ] )
return '\n' . join ( results )
|
def open ( self ) :
'''Open notification or quick settings .
Usage :
d . open . notification ( )
d . open . quick _ settings ( )'''
|
@ param_to_property ( action = [ "notification" , "quick_settings" ] )
def _open ( action ) :
if action == "notification" :
return self . server . jsonrpc . openNotification ( )
else :
return self . server . jsonrpc . openQuickSettings ( )
return _open
|
def vdatainfo ( self , listAttr = 0 ) :
"""Return info about all the file vdatas .
Args : :
listAttr Set to 0 to ignore vdatas used to store attribute
values , 1 to list them ( see the VD . _ isattr readonly
attribute )
Returns : :
List of vdata descriptions . Each vdata is described as
a 9 - element tuple , composed of the following :
- vdata name
- vdata class
- vdata reference number
- vdata number of records
- vdata number of fields
- vdata number of attributes
- vdata record size in bytes
- vdata tag number
- vdata interlace mode
C library equivalent : no equivalent"""
|
lst = [ ]
ref = - 1
# start at beginning
while True :
try :
nxtRef = self . next ( ref )
except HDF4Error : # no vdata left
break
# Attach the vdata and check for an " attribute " vdata .
ref = nxtRef
vdObj = self . attach ( ref )
if listAttr or not vdObj . _isattr : # Append a list of vdata properties .
lst . append ( ( vdObj . _name , vdObj . _class , vdObj . _refnum , vdObj . _nrecs , vdObj . _nfields , vdObj . _nattrs , vdObj . _recsize , vdObj . _tag , vdObj . _interlace ) )
vdObj . detach ( )
return lst
|
def GetParentFileEntry ( self ) :
"""Retrieves the parent file entry .
Returns :
ZipFileEntry : parent file entry or None if not available ."""
|
location = getattr ( self . path_spec , 'location' , None )
if location is None :
return None
parent_location = self . _file_system . DirnamePath ( location )
if parent_location is None :
return None
parent_path_spec = getattr ( self . path_spec , 'parent' , None )
if parent_location == '' :
parent_location = self . _file_system . PATH_SEPARATOR
is_root = True
is_virtual = True
else :
is_root = False
is_virtual = False
path_spec = zip_path_spec . ZipPathSpec ( location = parent_location , parent = parent_path_spec )
return ZipFileEntry ( self . _resolver_context , self . _file_system , path_spec , is_root = is_root , is_virtual = is_virtual )
|
def add_annotation ( self , about , content , motivated_by = "oa:describing" ) : # type : ( str , List [ str ] , str ) - > str
"""Cheap URI relativize for current directory and / ."""
|
self . self_check ( )
curr = self . base_uri + METADATA + "/"
content = [ c . replace ( curr , "" ) . replace ( self . base_uri , "../" ) for c in content ]
uri = uuid . uuid4 ( ) . urn
ann = { "uri" : uri , "about" : about , "content" : content , "oa:motivatedBy" : { "@id" : motivated_by } }
self . annotations . append ( ann )
return uri
|
def add_prefix ( self , ncname : str ) -> None :
"""Look up ncname and add it to the prefix map if necessary
@ param ncname : name to add"""
|
if ncname not in self . prefixmap :
uri = cu . expand_uri ( ncname + ':' , self . curi_maps )
if uri and '://' in uri :
self . prefixmap [ ncname ] = uri
else :
print ( f"Unrecognized prefix: {ncname}" , file = sys . stderr )
self . prefixmap [ ncname ] = f"http://example.org/unknown/{ncname}/"
|
def attachRequest ( PTmsiSignature_presence = 0 , GprsTimer_presence = 0 , TmsiStatus_presence = 0 ) :
"""ATTACH REQUEST Section 9.4.1"""
|
a = TpPd ( pd = 0x3 )
b = MessageType ( mesType = 0x1 )
# 000001
c = MsNetworkCapability ( )
d = AttachTypeAndCiphKeySeqNr ( )
f = DrxParameter ( )
g = MobileId ( )
h = RoutingAreaIdentification ( )
i = MsRadioAccessCapability ( )
packet = a / b / c / d / f / g / h / i
if PTmsiSignature_presence is 1 :
j = PTmsiSignature ( ieiPTS = 0x19 )
packet = packet / j
if GprsTimer_presence is 1 :
k = GprsTimer ( ieiGT = 0x17 )
packet = packet / k
if TmsiStatus_presence is 1 :
l = TmsiStatus ( ieiTS = 0x9 )
packet = packet / l
return packet
|
def optimize ( self , method = "simplex" , verbosity = False , tolerance = 1e-9 , ** kwargs ) :
"""Run the linprog function on the problem . Returns None ."""
|
c = np . array ( [ self . objective . get ( name , 0 ) for name in self . _variables ] )
if self . direction == "max" :
c *= - 1
bounds = list ( six . itervalues ( self . bounds ) )
solution = linprog ( c , self . A , self . upper_bounds , bounds = bounds , method = method , options = { "maxiter" : 10000 , "disp" : verbosity , "tol" : tolerance } , ** kwargs )
self . _solution = solution
self . _status = solution . status
if SCIPY_STATUS [ self . _status ] == interface . OPTIMAL :
self . _var_primals = solution . x
self . _slacks = solution . slack
else :
self . _var_primals = None
self . _slacks = None
self . _f = solution . fun
|
def add_inline_l2fw_interface ( self , interface_id , second_interface_id , logical_interface_ref = None , vlan_id = None , zone_ref = None , second_zone_ref = None , comment = None ) :
""". . versionadded : : 0.5.6
Requires NGFW engine > = 6.3 and layer 3 FW or cluster
An inline L2 FW interface is a new interface type for Layer 3 NGFW
engines version > = 6.3 . Traffic passing an Inline Layer 2 Firewall
interface will have a default action in access rules of Discard .
Layer 2 Firewall interfaces are not bypass capable , so when NGFW is
powered off , in an offline state or overloaded , traffic is blocked on
this interface .
If the interface does not exist and a VLAN id is specified , the logical
interface and zones will be applied to the top level physical interface .
If adding VLANs to an existing inline ips pair , the logical and zones
will be applied to the VLAN .
: param str interface _ id : interface id ; ' 1-2 ' , ' 3-4 ' , etc
: param str logical _ interface _ ref : logical interface name , href or LogicalInterface .
If None , ' default _ eth ' logical interface will be used .
: param str vlan _ id : optional VLAN id for first interface pair
: param str vlan _ id2 : optional VLAN id for second interface pair
: param zone _ ref _ intf1 : zone for first interface in pair , can be name ,
str href or Zone
: param zone _ ref _ intf2 : zone for second interface in pair , can be name ,
str href or Zone
: raises EngineCommandFailed : failure creating interface
: return : None
. . note : : Only a single VLAN is supported on this inline pair type"""
|
_interface = { 'interface_id' : interface_id , 'second_interface_id' : second_interface_id , 'logical_interface_ref' : logical_interface_ref , 'failure_mode' : 'normal' , 'zone_ref' : zone_ref , 'second_zone_ref' : second_zone_ref , 'comment' : comment , 'interface' : 'inline_l2fw_interface' , 'vlan_id' : vlan_id }
return self . add_inline_interface ( ** _interface )
|
def plot_sens_center ( self , frequency = 2 ) :
"""plot sensitivity center distribution for all configurations in
config . dat . The centers of mass are colored by the data given in
volt _ file ."""
|
try :
colors = np . loadtxt ( self . volt_file , skiprows = 1 )
except IOError :
print ( 'IOError opening {0}' . format ( volt_file ) )
exit ( )
# check for 1 - dimensionality
if ( len ( colors . shape ) > 1 ) :
print ( 'Artificial or Multi frequency data' )
colors = colors [ : , frequency ] . flatten ( )
colors = colors [ ~ np . isnan ( colors ) ]
elem . load_elem_file ( self . elem_file )
elem . load_elec_file ( self . elec_file )
nr_elements = len ( elem . element_type_list [ 0 ] )
elem . element_data = np . zeros ( ( nr_elements , 1 ) ) * np . nan
elem . plt_opt . title = ' '
elem . plt_opt . reverse = True
elem . plt_opt . cbmin = - 1
elem . plt_opt . cbmax = 1
elem . plt_opt . cblabel = self . cblabel
elem . plt_opt . xlabel = 'x (m)'
elem . plt_opt . ylabel = 'z (m)'
fig = plt . figure ( figsize = ( 5 , 7 ) )
ax = fig . add_subplot ( 111 )
ax , pm , cb = elem . plot_element_data_to_ax ( 0 , ax , scale = 'linear' , no_cb = True )
ax . scatter ( self . sens_centers [ : , 0 ] , self . sens_centers [ : , 1 ] , c = colors , s = 100 , edgecolors = 'none' )
cb_pos = mpl_get_cb_bound_next_to_plot ( ax )
ax1 = fig . add_axes ( cb_pos , frame_on = True )
cmap = mpl . cm . jet_r
norm = mpl . colors . Normalize ( vmin = np . nanmin ( colors ) , vmax = np . nanmax ( colors ) )
mpl . colorbar . ColorbarBase ( ax1 , cmap = cmap , norm = norm , orientation = 'vertical' )
fig . savefig ( self . output_file , bbox_inches = 'tight' , dpi = 300 )
|
def get_json ( identifier , namespace = 'cid' , domain = 'compound' , operation = None , searchtype = None , ** kwargs ) :
"""Request wrapper that automatically parses JSON response and supresses NotFoundError ."""
|
try :
return json . loads ( get ( identifier , namespace , domain , operation , 'JSON' , searchtype , ** kwargs ) . decode ( ) )
except NotFoundError as e :
log . info ( e )
return None
|
def offset ( self , points , offsets , along , offset_axis , units = "same" , offset_units = "same" , mode = "valid" , method = "linear" , verbose = True , ) :
"""Offset one axis based on another axis ' values .
Useful for correcting instrumental artifacts such as zerotune .
Parameters
points : 1D array - like
Points .
offsets : 1D array - like
Offsets .
along : str or int
Axis that points array lies along .
offset _ axis : str or int
Axis to offset using offsets .
units : str ( optional )
Units of points array .
offset _ units : str ( optional )
Units of offsets aray .
mode : { ' valid ' , ' full ' , ' old ' } ( optional )
Define how far the new axis will extend . Points outside of valid
interpolation range will be written nan .
method : { ' linear ' , ' nearest ' , ' cubic ' } ( optional )
The interpolation method . Note that cubic interpolation is only
possible for 1D and 2D data . See ` griddata ` _ _ for more information .
Default is linear .
verbose : bool ( optional )
Toggle talkback . Default is True .
_ _ http : / / docs . scipy . org / doc / scipy / reference / generated / scipy . interpolate . griddata . html
> > > points # an array of w1 points
> > > offsets # an array of d1 corrections
> > > data . offset ( points , offsets , ' w1 ' , ' d1 ' )"""
|
raise NotImplementedError
# axis - - - - -
if isinstance ( along , int ) :
axis_index = along
elif isinstance ( along , str ) :
axis_index = self . axis_names . index ( along )
else :
raise TypeError ( "along: expected {int, str}, got %s" % type ( along ) )
axis = self . _axes [ axis_index ]
# values & points - - - - -
# get values , points , units
if units == "same" :
input_units = axis . units
else :
input_units = units
# check offsets is 1D or 0D
if len ( offsets . shape ) == 1 :
pass
else :
raise RuntimeError ( "values must be 1D or 0D in offset!" )
# check if units is compatible , convert
dictionary = getattr ( wt_units , axis . units_kind )
if input_units in dictionary . keys ( ) :
pass
else :
raise RuntimeError ( "units incompatible in offset!" )
points = wt_units . converter ( points , input_units , axis . units )
# create correction array
function = interp1d ( points , offsets , bounds_error = False )
corrections = function ( axis [ : ] )
# remove nans
finite_indicies = np . where ( np . isfinite ( corrections ) ) [ 0 ]
left_pad_width = finite_indicies [ 0 ]
right_pad_width = len ( corrections ) - finite_indicies [ - 1 ] - 1
corrections = np . pad ( corrections [ np . isfinite ( corrections ) ] , ( int ( left_pad_width ) , int ( right_pad_width ) ) , mode = "edge" , )
# do correction - - - - -
# transpose so axis is last
transpose_order = np . arange ( len ( self . _axes ) )
transpose_order [ axis_index ] = len ( self . _axes ) - 1
transpose_order [ - 1 ] = axis_index
self . transpose ( transpose_order , verbose = False )
# get offset axis index
if isinstance ( offset_axis , int ) :
offset_axis_index = offset_axis
elif isinstance ( offset_axis , str ) :
offset_axis_index = self . axis_names . index ( offset_axis )
else :
raise TypeError ( "offset_axis: expected {int, str}, got %s" % type ( offset_axis ) )
# new points
new_points = [ a [ : ] for a in self . _axes ]
old_offset_axis_points = self . _axes [ offset_axis_index ] [ : ]
spacing = abs ( ( old_offset_axis_points . max ( ) - old_offset_axis_points . min ( ) ) / float ( len ( old_offset_axis_points ) ) )
if mode == "old" :
new_offset_axis_points = old_offset_axis_points
elif mode == "valid" :
_max = old_offset_axis_points . max ( ) + corrections . min ( )
_min = old_offset_axis_points . min ( ) + corrections . max ( )
n = int ( abs ( np . ceil ( ( _max - _min ) / spacing ) ) )
new_offset_axis_points = np . linspace ( _min , _max , n )
elif mode == "full" :
_max = old_offset_axis_points . max ( ) + corrections . max ( )
_min = old_offset_axis_points . min ( ) + corrections . min ( )
n = np . ceil ( ( _max - _min ) / spacing )
new_offset_axis_points = np . linspace ( _min , _max , n )
new_points [ offset_axis_index ] = new_offset_axis_points
new_xi = tuple ( np . meshgrid ( * new_points , indexing = "ij" ) )
xi = tuple ( np . meshgrid ( * [ a [ : ] for a in self . _axes ] , indexing = "ij" ) )
for channel in self . channels : # ' undo ' gridding
arr = np . zeros ( ( len ( self . _axes ) + 1 , channel [ : ] . size ) )
for i in range ( len ( self . _axes ) ) :
arr [ i ] = xi [ i ] . flatten ( )
arr [ - 1 ] = channel [ : ] . flatten ( )
# do corrections
corrections = list ( corrections )
corrections = corrections * int ( ( len ( arr [ 0 ] ) / len ( corrections ) ) )
arr [ offset_axis_index ] += corrections
# grid data
tup = tuple ( [ arr [ i ] for i in range ( len ( arr ) - 1 ) ] )
# note that rescale is crucial in this operation
out = griddata ( tup , arr [ - 1 ] , new_xi , method = method , fill_value = np . nan , rescale = True )
channel [ : ] = out
self . _axes [ offset_axis_index ] [ : ] = new_offset_axis_points
# transpose out
self . transpose ( transpose_order , verbose = False )
|
def contrast ( x , severity = 1 ) :
"""Change contrast of images .
Args :
x : numpy array , uncorrupted image , assumed to have uint8 pixel in [ 0,255 ] .
severity : integer , severity of corruption .
Returns :
numpy array , image with uint8 pixels in [ 0,255 ] . Changed contrast ."""
|
c = [ 0.4 , .3 , .2 , .1 , .05 ] [ severity - 1 ]
x = np . array ( x ) / 255.
means = np . mean ( x , axis = ( 0 , 1 ) , keepdims = True )
x_clip = np . clip ( ( x - means ) * c + means , 0 , 1 ) * 255
return around_and_astype ( x_clip )
|
def verify_data_signature ( self , signature_url , data_url , data ) :
"""Verify data against it ' s remote signature
: type signature _ url : str
: param signature _ url : remote path to signature for data _ url
: type data _ url : str
: param data _ url : url from which data was fetched
: type data : str
: param data : content of remote file at file _ url"""
|
req = requests . get ( signature_url )
if req . status_code is 200 :
tm = int ( time . time ( ) )
datestamp = datetime . utcfromtimestamp ( tm ) . isoformat ( )
sigfile = "repo-{0}-tmp.sig" . format ( datestamp )
logger . debug ( "writing {0} to {1}" . format ( signature_url , sigfile ) )
with open ( sigfile , 'wb' ) as f :
f . write ( req . content )
else :
raise RepositoryMissingSignatureError ( signature_url )
verified = self . gpg . verify_data ( sigfile , data )
try :
os . remove ( sigfile )
except OSError :
pass
if verified . valid is True :
logger . debug ( "verified {0} against {1}" . format ( data_url , signature_url ) )
else :
raise RepositorySignatureError ( data_url , signature_url )
|
def remove_redundant_verts ( self , eps = 1e-10 ) :
"""Given verts and faces , this remove colocated vertices"""
|
import numpy as np
from scipy . spatial import cKDTree
# FIXME pylint : disable = no - name - in - module
fshape = self . f . shape
tree = cKDTree ( self . v )
close_pairs = list ( tree . query_pairs ( eps ) )
if close_pairs :
close_pairs = np . sort ( close_pairs , axis = 1 )
# update faces to not refer to redundant vertices
equivalent_verts = np . arange ( self . v . shape [ 0 ] )
for v1 , v2 in close_pairs :
if equivalent_verts [ v2 ] > v1 :
equivalent_verts [ v2 ] = v1
self . f = equivalent_verts [ self . f . flatten ( ) ] . reshape ( ( - 1 , 3 ) )
# get rid of unused verts , and update faces accordingly
vertidxs_left = np . unique ( self . f )
repl = np . arange ( np . max ( self . f ) + 1 )
repl [ vertidxs_left ] = np . arange ( len ( vertidxs_left ) )
self . v = self . v [ vertidxs_left ]
self . f = repl [ self . f ] . reshape ( ( - 1 , fshape [ 1 ] ) )
|
def remake_images_variants ( profiles , clear = True ) :
"""Перестворює варіанти для картинок згідно налаштувань .
profiles - список профілів , для картинок яких треба перестворити варіанти .
clear - якщо True , тоді перед створенням варіантів будуть видалені ВСІ попередні варіанти ."""
|
assert isinstance ( profiles , ( list , tuple ) ) or profiles is None
if profiles is None :
profiles = dju_settings . DJU_IMG_UPLOAD_PROFILES . keys ( )
profiles = set ( ( 'default' , ) + tuple ( profiles ) )
removed = remade = 0
for profile in profiles :
conf = get_profile_configs ( profile = profile )
root_path = os . path . join ( settings . MEDIA_ROOT , dju_settings . DJU_IMG_UPLOAD_SUBDIR , conf [ 'PATH' ] )
if clear :
for fn in get_files_recursive ( root_path ) :
if dju_settings . DJU_IMG_UPLOAD_VARIANT_SUFFIX in os . path . basename ( fn ) :
os . remove ( fn )
removed += 1
for fn in get_files_recursive ( root_path ) :
filename = os . path . basename ( fn )
if dju_settings . DJU_IMG_UPLOAD_VARIANT_SUFFIX in filename :
continue
if dju_settings . DJU_IMG_UPLOAD_MAIN_SUFFIX not in filename :
continue
img_id = '{profile}:{name}' . format ( profile = profile , name = filename [ : filename . find ( dju_settings . DJU_IMG_UPLOAD_MAIN_SUFFIX ) ] )
with open ( fn , 'rb' ) as f :
for v_conf in conf [ 'VARIANTS' ] :
label = v_conf [ 'LABEL' ]
if not label :
label = get_variant_label ( v_conf )
v_f = adjust_image ( f , max_size = v_conf [ 'MAX_SIZE' ] , new_format = v_conf [ 'FORMAT' ] , jpeg_quality = v_conf [ 'JPEG_QUALITY' ] , fill = v_conf [ 'FILL' ] , stretch = v_conf [ 'STRETCH' ] , return_new_image = True )
v_relative_path = get_relative_path_from_img_id ( img_id , variant_label = label , ext = image_get_format ( v_f ) )
v_full_path = media_path ( v_relative_path )
save_file ( v_f , v_full_path )
remade += 1
return removed , remade
|
def get_confidence ( self ) :
"""return confidence based on existing data"""
|
# if we didn ' t receive any character in our consideration range ,
# return negative answer
if self . _mTotalChars <= 0 or self . _mFreqChars <= MINIMUM_DATA_THRESHOLD :
return SURE_NO
if self . _mTotalChars != self . _mFreqChars :
r = ( self . _mFreqChars / ( ( self . _mTotalChars - self . _mFreqChars ) * self . _mTypicalDistributionRatio ) )
if r < SURE_YES :
return r
# normalize confidence ( we don ' t want to be 100 % sure )
return SURE_YES
|
def imagetransformer_sep_channels_8l_8h_local_and_global_att ( ) :
"""separate rgb embeddings ."""
|
hparams = imagetransformer_sep_channels_8l_8h ( )
hparams . num_heads = 8
hparams . batch_size = 1
hparams . attention_key_channels = hparams . attention_value_channels = 0
hparams . hidden_size = 256
hparams . filter_size = 256
hparams . num_hidden_layers = 4
hparams . sampling_method = "random"
hparams . local_and_global_att = True
return hparams
|
def autoformat_pep8 ( sourcecode , ** kwargs ) :
r"""Args :
code ( str ) :
CommandLine :
python - m utool . util _ str - - exec - autoformat _ pep8
Kwargs :
' aggressive ' : 0,
' diff ' : False ,
' exclude ' : [ ] ,
' experimental ' : False ,
' files ' : [ u ' ' ] ,
' global _ config ' : ~ / . config / pep8,
' ignore ' : set ( [ u ' E24 ' ] ) ,
' ignore _ local _ config ' : False ,
' in _ place ' : False ,
' indent _ size ' : 4,
' jobs ' : 1,
' line _ range ' : None ,
' list _ fixes ' : False ,
' max _ line _ length ' : 79,
' pep8 _ passes ' : - 1,
' recursive ' : False ,
' select ' : ,
' verbose ' : 0,
Ignore :
autopep8 - - recursive - - in - place - - ignore E126 , E127 , E201 , E202 , E203 , E221 , E222 , E241 , E265 , E271 , E272 , E301 , E501 , W602 , E266 , N801 , N802 , N803 , N805 , N806 , N811 , N813 ."""
|
import autopep8
default_ignore = { 'E126' , # continuation line hanging - indent
'E127' , # continuation line over - indented for visual indent
'E201' , # whitespace after ' ( '
'E202' , # whitespace before ' ] '
'E203' , # whitespace before ' , '
'E221' , # multiple spaces before operator
'E222' , # multiple spaces after operator
'E241' , # multiple spaces after ,
'E265' , # block comment should start with " # "
'E271' , # multiple spaces after keyword
'E272' , # multiple spaces before keyword
'E301' , # expected 1 blank line , found 0
'E501' , # line length > 79
'W602' , # Old reraise syntax
'E266' , # too many leading ' # ' for block comment
'N801' , # function name should be lowercase [ N806]
'N802' , # function name should be lowercase [ N806]
'N803' , # argument should be lowercase [ N806]
'N805' , # first argument of a method should be named ' self '
'N806' , # variable in function should be lowercase [ N806]
'N811' , # constant name imported as non constant
'N813' , # camel case
}
# My defaults
kwargs [ 'ignore' ] = kwargs . get ( 'ignore' , default_ignore )
kwargs [ 'aggressive' ] = kwargs . get ( 'aggressive' , 1 )
pep8_options = autopep8 . _get_options ( kwargs , False )
new_source = autopep8 . fix_code ( sourcecode , pep8_options )
return new_source
|
def edit ( self , customer_id , data = { } , ** kwargs ) :
"""Edit Customer information from given dict
Returns :
Customer Dict which was edited"""
|
url = '{}/{}' . format ( self . base_url , customer_id )
return self . put_url ( url , data , ** kwargs )
|
def get_auth_from_url ( url ) :
"""Given a url with authentication components , extract them into a tuple of
username , password .
: rtype : ( str , str )"""
|
parsed = urlparse ( url )
try :
auth = ( unquote ( parsed . username ) , unquote ( parsed . password ) )
except ( AttributeError , TypeError ) :
auth = ( '' , '' )
return auth
|
def upload_news_picture ( self , file ) :
"""上传图文消息内的图片 。
: param file : 要上传的文件 , 一个 File - object
: return : 返回的 JSON 数据包"""
|
return self . post ( url = "https://api.weixin.qq.com/cgi-bin/media/uploadimg" , params = { "access_token" : self . token } , files = { "media" : file } )
|
def recv ( self , timeout = - 1 ) :
"""Receive and item from our Sender . This will block unless our Sender is
ready , either forever or unless * timeout * milliseconds ."""
|
if self . ready :
return self . other . handover ( self )
return self . pause ( timeout = timeout )
|
def _extract ( self , resource ) :
"""Extract a single archive , returns Promise - > path to extraction result ."""
|
if isinstance ( resource , six . string_types ) :
resource = resource_lib . Resource ( path = resource )
path = resource . path
extract_method = resource . extract_method
if extract_method == resource_lib . ExtractMethod . NO_EXTRACT :
logging . info ( 'Skipping extraction for %s (method=NO_EXTRACT).' , path )
return promise . Promise . resolve ( path )
method_name = resource_lib . ExtractMethod ( extract_method ) . name
extract_path = os . path . join ( self . _extract_dir , '%s.%s' % ( method_name , os . path . basename ( path ) ) )
if not self . _force_extraction and tf . io . gfile . exists ( extract_path ) :
logging . info ( 'Reusing extraction of %s at %s.' , path , extract_path )
return promise . Promise . resolve ( extract_path )
return self . _extractor . extract ( path , extract_method , extract_path )
|
def modify_ack_deadline ( self , items ) :
"""Modify the ack deadline for the given messages .
Args :
items ( Sequence [ ModAckRequest ] ) : The items to modify ."""
|
ack_ids = [ item . ack_id for item in items ]
seconds = [ item . seconds for item in items ]
request = types . StreamingPullRequest ( modify_deadline_ack_ids = ack_ids , modify_deadline_seconds = seconds )
self . _manager . send ( request )
|
def _find_group_coordinator_id ( self , group_id ) :
"""Find the broker node _ id of the coordinator of the given group .
Sends a FindCoordinatorRequest message to the cluster . Will block until
the FindCoordinatorResponse is received . Any errors are immediately
raised .
: param group _ id : The consumer group ID . This is typically the group
name as a string .
: return : The node _ id of the broker that is the coordinator ."""
|
# Note : Java may change how this is implemented in KAFKA - 6791.
# TODO add support for dynamically picking version of
# GroupCoordinatorRequest which was renamed to FindCoordinatorRequest .
# When I experimented with this , GroupCoordinatorResponse _ v1 didn ' t
# match GroupCoordinatorResponse _ v0 and I couldn ' t figure out why .
gc_request = GroupCoordinatorRequest [ 0 ] ( group_id )
gc_response = self . _send_request_to_node ( self . _client . least_loaded_node ( ) , gc_request )
# use the extra error checking in add _ group _ coordinator ( ) rather than
# immediately returning the group coordinator .
success = self . _client . cluster . add_group_coordinator ( group_id , gc_response )
if not success :
error_type = Errors . for_code ( gc_response . error_code )
assert error_type is not Errors . NoError
# Note : When error _ type . retriable , Java will retry . . . see
# KafkaAdminClient ' s handleFindCoordinatorError method
raise error_type ( "Could not identify group coordinator for group_id '{}' from response '{}'." . format ( group_id , gc_response ) )
group_coordinator = self . _client . cluster . coordinator_for_group ( group_id )
# will be None if the coordinator was never populated , which should never happen here
assert group_coordinator is not None
# will be - 1 if add _ group _ coordinator ( ) failed . . . but by this point the
# error should have been raised .
assert group_coordinator != - 1
return group_coordinator
|
def load_streets ( self , filename ) :
"""Load up all streets in lowercase for easier matching . The file should have one street per line , with no extra
characters . This isn ' t strictly required , but will vastly increase the accuracy ."""
|
with open ( filename , 'r' ) as f :
for line in f :
self . streets . append ( line . strip ( ) . lower ( ) )
|
def t_bin_ZERO ( t ) :
r'[ ^ 01]'
|
t . lexer . begin ( 'INITIAL' )
t . type = 'NUMBER'
t . value = 0
t . lexer . lexpos -= 1
return t
|
def calculate_curvature_tangent ( self , step_range = False , step = None , store_tangent = False ) :
"""To calculate curvatures and tangent vectors along the helical axis .
The curvature and tangent vectors are calculated using Frenet - Serret formula .
The calculated values are stored in ` ` DNA . data ` ` dictionary and also in HDF5 file .
Parameters
step _ range : bool
* ` ` step _ range = True ` ` : Calculate curvature and tangent vectors for the given range of base - steps
* ` ` step _ range = False ` ` : Calculate curvature and tangent vectors for entire DNA . If smoothed helical - axis of any base - step will be found to be not available , error will be raised .
step : list
List containing lower and higher limit of base - steps range .
* This option only works with ` ` step _ range = True ` ` .
* This list should not contain more than two number .
* First number should be less than second number .
Example for base - step 4 to 15:
` ` step = [ 4,15 ] # step _ range = True ` `
store _ tangent : bool
* ` ` store _ tangent = True ` ` : The calculated tangent vectors will be stored for later use .
* ` ` store _ tangent = False ` ` : The calculated tangent vectors will be discarded .
In case of HDF5 file , calculated tangents will be stored in this
file and it will not add cost to memory . However , without HDF5 file ,
storing tangents in ` ` DNA . data ` ` will be expansive for memory ."""
|
if not self . smooth_axis :
raise ValueError ( "The helical axis is not smooth. At first, smooth the axis using generate_smooth_axis() method as described in http://rjdkmr.github.io/do_x3dna/apidoc.html#dnaMD.DNA.generate_smooth_axis." )
if ( step_range ) and ( step == None ) :
raise ValueError ( "See, documentation for step and step_range usage!!!" )
if step_range :
if ( len ( step ) != 2 ) :
raise ValueError ( "See, documentation for step usage!!!" )
if step [ 0 ] > step [ 1 ] :
raise ValueError ( "See, documentation for step usage!!!" )
X , bp_idx = self . get_parameters ( 'helical x-axis smooth' , step , bp_range = True )
Y , bp_idx = self . get_parameters ( 'helical y-axis smooth' , step , bp_range = True )
Z , bp_idx = self . get_parameters ( 'helical z-axis smooth' , step , bp_range = True )
else :
X , bp_idx = self . get_parameters ( 'helical x-axis smooth' , [ 1 , self . num_step ] , bp_range = True )
Y , bp_idx = self . get_parameters ( 'helical y-axis smooth' , [ 1 , self . num_step ] , bp_range = True )
Z , bp_idx = self . get_parameters ( 'helical z-axis smooth' , [ 1 , self . num_step ] , bp_range = True )
X = np . asarray ( X ) . T
Y = np . asarray ( Y ) . T
Z = np . asarray ( Z ) . T
curvature , tangent = [ ] , [ ]
for i in range ( len ( self . time ) ) : # Curvature calculation
xyz = np . vstack ( ( X [ i ] , Y [ i ] , Z [ i ] ) ) . T
T , N , B , k_temp , t_temp = frenet_serret ( xyz )
curvature . append ( k_temp . flatten ( ) )
if ( store_tangent ) :
tangent . append ( T )
curvature = np . asarray ( curvature ) . T
for i in range ( len ( bp_idx ) ) :
bp_num = str ( bp_idx [ i ] + self . startBP )
self . _set_data ( curvature [ i ] , 'bps' , bp_num , 'curvature' , scaleoffset = 3 )
if ( store_tangent ) :
tangent = np . asarray ( tangent )
final_tan = [ ]
for i in range ( len ( tangent [ 0 ] ) ) :
temp = [ ]
for j in range ( len ( tangent ) ) :
temp . append ( tangent [ j ] [ i ] )
final_tan . append ( np . asarray ( temp ) )
for i in range ( len ( bp_idx ) ) :
bp_num = str ( bp_idx [ i ] + self . startBP )
self . _set_data ( np . asarray ( final_tan [ i ] ) , 'bps' , bp_num , 'tangent' , scaleoffset = 3 )
|
def start_server ( self ) :
"""Starts the TCP stream server , binding to the configured host and port .
Host and port are configured via the command line arguments .
. . note : : The server does not process requests unless
: meth : ` handle ` is called in regular intervals ."""
|
if self . _server is None :
if self . _options . telnet_mode :
self . interface . in_terminator = '\r\n'
self . interface . out_terminator = '\r\n'
self . _server = StreamServer ( self . _options . bind_address , self . _options . port , self . interface , self . device_lock )
|
async def main ( loop ) :
"""Log packets from Bus ."""
|
# Setting debug
PYVLXLOG . setLevel ( logging . DEBUG )
stream_handler = logging . StreamHandler ( )
stream_handler . setLevel ( logging . DEBUG )
PYVLXLOG . addHandler ( stream_handler )
# Connecting to KLF 200
pyvlx = PyVLX ( 'pyvlx.yaml' , loop = loop )
await pyvlx . load_scenes ( )
await pyvlx . load_nodes ( )
# and wait , increase this timeout if you want to
# log for a longer time . : )
await asyncio . sleep ( 90 )
# Cleanup , KLF 200 is terrible in handling lost connections
await pyvlx . disconnect ( )
|
def _create_options ( self , items ) :
"""Helper method to create options from list , or instance .
Applies preprocess method if available to create a uniform
output"""
|
return OrderedDict ( map ( lambda x : ( x . name , x ) , coerce_to_list ( items , self . preprocess ) ) )
|
def start ( self ) :
"""Starts the camera recording process ."""
|
self . _started = True
self . _camera = _Camera ( self . _actual_camera , self . _cmd_q , self . _res , self . _codec , self . _fps , self . _rate )
self . _camera . start ( )
|
def rename ( self , ** kwargs ) :
'''Rename series in the group .'''
|
for old , new in kwargs . iteritems ( ) :
if old in self . groups :
self . groups [ new ] = self . groups [ old ]
del self . groups [ old ]
|
def from_string ( cls , string , format_ = None , fps = None , ** kwargs ) :
"""Load subtitle file from string .
See : meth : ` SSAFile . load ( ) ` for full description .
Arguments :
string ( str ) : Subtitle file in a string . Note that the string
must be Unicode ( in Python 2 ) .
Returns :
SSAFile
Example :
> > > text = ' ' '
. . . 00:00:00,000 - - > 00:00:05,000
. . . An example SubRip file .
> > > subs = SSAFile . from _ string ( text )"""
|
fp = io . StringIO ( string )
return cls . from_file ( fp , format_ , fps = fps , ** kwargs )
|
def get ( self , name ) :
"""Returns a Vxlan interface as a set of key / value pairs
The Vxlan interface resource returns the following :
* name ( str ) : The name of the interface
* type ( str ) : Always returns ' vxlan '
* source _ interface ( str ) : The vxlan source - interface value
* multicast _ group ( str ) : The vxlan multicast - group value
* udp _ port ( int ) : The vxlan udp - port value
* vlans ( dict ) : The vlan to vni mappings
* flood _ list ( list ) : The list of global VTEP flood list
* multicast _ decap ( bool ) : If the mutlicast decap
feature is configured
Args :
name ( str ) : The interface identifier to retrieve from the
running - configuration
Returns :
A Python dictionary object of key / value pairs that represents
the interface configuration . If the specified interface
does not exist , then None is returned"""
|
config = self . get_block ( '^interface %s' % name )
if not config :
return None
response = super ( VxlanInterface , self ) . get ( name )
response . update ( dict ( name = name , type = 'vxlan' ) )
response . update ( self . _parse_source_interface ( config ) )
response . update ( self . _parse_multicast_group ( config ) )
response . update ( self . _parse_udp_port ( config ) )
response . update ( self . _parse_vlans ( config ) )
response . update ( self . _parse_flood_list ( config ) )
response . update ( self . _parse_multicast_decap ( config ) )
return response
|
def unflatten ( flat_weights ) :
"""Pivot weights from long DataFrame into weighting matrix .
Parameters
flat _ weights : pandas . DataFrame
A long DataFrame of weights , where columns are " date " , " contract " ,
" generic " , " weight " and optionally " key " . If " key " column is
present a dictionary of unflattened DataFrames is returned with the
dictionary keys corresponding to the " key " column and each sub
DataFrame containing rows for this key .
Returns
A DataFrame or dict of DataFrames of instrument weights with a MultiIndex
where the top level contains pandas . Timestamps and the second level is
instrument names . The columns consist of generic names . If dict is returned
the dict keys correspond to the " key " column of the input .
Example
> > > import pandas as pd
> > > from pandas import Timestamp as TS
> > > import mapping . util as util
> > > long _ wts = pd . DataFrame (
. . . { " date " : [ TS ( ' 2015-01-03 ' ) ] * 4 + [ TS ( ' 2015-01-04 ' ) ] * 4,
. . . " contract " : [ ' CLF5 ' ] * 2 + [ ' CLG5 ' ] * 4 + [ ' CLH5 ' ] * 2,
. . . " generic " : [ " CL1 " , " CL2 " ] * 4,
. . . " weight " : [ 1 , 0 , 0 , 1 , 1 , 0 , 0 , 1 ] }
. . . ) . loc [ : , [ " date " , " contract " , " generic " , " weight " ] ]
> > > util . unflatten ( long _ wts )
See also : calc _ rets ( )"""
|
# NOQA
if flat_weights . columns . contains ( "key" ) :
weights = { }
for key in flat_weights . loc [ : , "key" ] . unique ( ) :
flt_wts = flat_weights . loc [ flat_weights . loc [ : , "key" ] == key , : ]
flt_wts = flt_wts . drop ( labels = "key" , axis = 1 )
wts = flt_wts . pivot_table ( index = [ "date" , "contract" ] , columns = [ "generic" ] , values = [ "weight" ] )
wts . columns = wts . columns . droplevel ( 0 )
weights [ key ] = wts
else :
weights = flat_weights . pivot_table ( index = [ "date" , "contract" ] , columns = [ "generic" ] , values = [ "weight" ] )
weights . columns = weights . columns . droplevel ( 0 )
return weights
|
def paramsReport ( self ) :
"""See docs for ` Model ` abstract base class ."""
|
report = { }
for param in self . _REPORTPARAMS :
pvalue = getattr ( self , param )
if isinstance ( pvalue , float ) :
report [ param ] = pvalue
elif isinstance ( pvalue , scipy . ndarray ) and pvalue . shape == ( 3 , N_NT ) :
for p in range ( 3 ) :
for w in range ( N_NT - 1 ) :
report [ '{0}{1}{2}' . format ( param , p , INDEX_TO_NT [ w ] ) ] = pvalue [ p ] [ w ]
else :
raise ValueError ( "Unexpected param: {0}" . format ( param ) )
return report
|
def http_form_post_message ( message , location , relay_state = "" , typ = "SAMLRequest" , ** kwargs ) :
"""The HTTP POST binding defines a mechanism by which SAML protocol
messages may be transmitted within the base64 - encoded content of a
HTML form control .
: param message : The message
: param location : Where the form should be posted to
: param relay _ state : for preserving and conveying state information
: return : A tuple containing header information and a HTML message ."""
|
if not isinstance ( message , six . string_types ) :
message = str ( message )
if not isinstance ( message , six . binary_type ) :
message = message . encode ( 'utf-8' )
if typ == "SAMLRequest" or typ == "SAMLResponse" :
_msg = base64 . b64encode ( message )
else :
_msg = message
_msg = _msg . decode ( 'ascii' )
saml_response_input = HTML_INPUT_ELEMENT_SPEC . format ( name = cgi . escape ( typ ) , val = cgi . escape ( _msg ) , type = 'hidden' )
relay_state_input = ""
if relay_state :
relay_state_input = HTML_INPUT_ELEMENT_SPEC . format ( name = 'RelayState' , val = cgi . escape ( relay_state ) , type = 'hidden' )
response = HTML_FORM_SPEC . format ( saml_response_input = saml_response_input , relay_state_input = relay_state_input , action = location )
return { "headers" : [ ( "Content-type" , "text/html" ) ] , "data" : response }
|
def alias ( self , aliases , stats ) :
"""Apply the login / email alias if configured ."""
|
login = email = None
if stats is None :
return
# Attempt to use alias directly from the config section
try :
config = dict ( Config ( ) . section ( stats ) )
try :
email = config [ "email" ]
except KeyError :
pass
try :
login = config [ "login" ]
except KeyError :
pass
except ( ConfigFileError , NoSectionError ) :
pass
# Check for aliases specified in the email string
if aliases is not None :
try :
aliases = dict ( [ re . split ( r"\s*:\s*" , definition , 1 ) for definition in re . split ( r"\s*;\s*" , aliases . strip ( ) ) ] )
except ValueError :
raise ConfigError ( "Invalid alias definition: '{0}'" . format ( aliases ) )
if stats in aliases :
if "@" in aliases [ stats ] :
email = aliases [ stats ]
else :
login = aliases [ stats ]
# Update login / email if alias detected
if email is not None :
self . email = email
log . info ( "Using email alias '{0}' for '{1}'" . format ( email , stats ) )
if login is None :
login = email . split ( "@" ) [ 0 ]
if login is not None :
self . login = login
log . info ( "Using login alias '{0}' for '{1}'" . format ( login , stats ) )
|
def set_interact_items ( glob ) :
"""This function prepares the interaction items for inclusion in main script ' s
global scope .
: param glob : main script ' s global scope dictionary reference"""
|
a , l = glob [ 'args' ] , glob [ 'logger' ]
enabled = getattr ( a , a . _collisions . get ( "interact" ) or "interact" , False )
if enabled :
readline . parse_and_bind ( 'tab: complete' )
# InteractiveConsole as defined in the code module , but handling a banner
# using the logging of tinyscript
class InteractiveConsole ( BaseInteractiveConsole , object ) :
def __init__ ( self , banner = None , namespace = None , filename = '<console>' , exitmsg = None ) :
if enabled :
self . banner = banner
self . exitmsg = exitmsg
ns = glob
ns . update ( namespace or { } )
super ( InteractiveConsole , self ) . __init__ ( locals = ns , filename = filename )
def __enter__ ( self ) :
if enabled and self . banner is not None :
l . interact ( self . banner )
return self
def __exit__ ( self , * args ) :
if enabled and self . exitmsg is not None :
l . interact ( self . exitmsg )
def interact ( self , * args , ** kwargs ) :
if enabled :
super ( InteractiveConsole , self ) . interact ( * args , ** kwargs )
glob [ 'InteractiveConsole' ] = InteractiveConsole
def interact ( banner = None , readfunc = None , namespace = None , exitmsg = None ) :
if enabled :
if banner is not None :
l . interact ( banner )
ns = glob
ns . update ( namespace or { } )
base_interact ( readfunc = readfunc , local = ns )
if exitmsg is not None :
l . interact ( exitmsg )
glob [ 'interact' ] = interact
glob [ 'compile_command' ] = compile_command if enabled else lambda * a , ** kw : None
# ConsoleSocket for handling duplicating std * * * to a socket for the
# RemoteInteractiveConsole
host = getattr ( a , a . _collisions . get ( "host" ) or "host" , None )
port = getattr ( a , a . _collisions . get ( "port" ) or "port" , None )
# custom socket , for handling the bindings of stdXXX through a socket
class ConsoleSocket ( socket . socket ) :
def readline ( self , nbytes = 2048 ) :
return self . recv ( nbytes )
def write ( self , * args , ** kwargs ) :
return self . send ( * args , ** kwargs )
# RemoteInteractiveConsole as defined in the code module , but handling
# interaction through a socket
class RemoteInteractiveConsole ( InteractiveConsole ) :
def __init__ ( self , * args , ** kwargs ) :
if enabled : # open a socket
self . socket = ConsoleSocket ( )
self . socket . connect ( ( str ( host ) , port ) )
# save STDIN , STDOUT and STDERR
self . __stdin = sys . stdin
self . __stdout = sys . stdout
self . __stderr = sys . stderr
# rebind STDIN , STDOUT and STDERR to the socket
sys . stdin = sys . stdout = sys . stderr = self . socket
# now initialize the interactive console
super ( RemoteInteractiveConsole , self ) . __init__ ( * args , ** kwargs )
def __exit__ ( self , * args ) :
if enabled :
super ( RemoteInteractiveConsole , self ) . __exit__ ( * args )
self . socket . close ( )
self . close ( )
def close ( self ) :
if enabled : # restore STDIN , STDOUT and STDERR
sys . stdin = self . __stdin
sys . stdout = self . __stdout
sys . stderr = self . __stderr
glob [ 'RemoteInteractiveConsole' ] = RemoteInteractiveConsole
|
def load_initial_files ( self , locations , in_tab_pages = False , hsplit = False , vsplit = False ) :
"""Load a list of files ."""
|
assert in_tab_pages + hsplit + vsplit <= 1
# Max one of these options .
# When no files were given , open at least one empty buffer .
locations2 = locations or [ None ]
# First file
self . window_arrangement . open_buffer ( locations2 [ 0 ] )
for f in locations2 [ 1 : ] :
if in_tab_pages :
self . window_arrangement . create_tab ( f )
elif hsplit :
self . window_arrangement . hsplit ( location = f )
elif vsplit :
self . window_arrangement . vsplit ( location = f )
else :
self . window_arrangement . open_buffer ( f )
self . window_arrangement . active_tab_index = 0
if locations and len ( locations ) > 1 :
self . show_message ( '%i files loaded.' % len ( locations ) )
|
def ecef2aer ( x : float , y : float , z : float , lat0 : float , lon0 : float , h0 : float , ell = None , deg : bool = True ) -> Tuple [ float , float , float ] :
"""gives azimuth , elevation and slant range from an Observer to a Point with ECEF coordinates .
ECEF input location is with units of meters
Parameters
x : float or numpy . ndarray of float
ECEF x coordinate ( meters )
y : float or numpy . ndarray of float
ECEF y coordinate ( meters )
z : float or numpy . ndarray of float
ECEF z coordinate ( meters )
lat0 : float
Observer geodetic latitude
lon0 : float
Observer geodetic longitude
h0 : float
observer altitude above geodetic ellipsoid ( meters )
ell : Ellipsoid , optional
reference ellipsoid
deg : bool , optional
degrees input / output ( False : radians in / out )
Returns
az : float or numpy . ndarray of float
azimuth to target
el : float or numpy . ndarray of float
elevation to target
srange : float or numpy . ndarray of float
slant range [ meters ]"""
|
xEast , yNorth , zUp = ecef2enu ( x , y , z , lat0 , lon0 , h0 , ell , deg = deg )
return enu2aer ( xEast , yNorth , zUp , deg = deg )
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.