signature
stringlengths 29
44.1k
| implementation
stringlengths 0
85.2k
|
|---|---|
def set_taker ( self , resource_id ) :
"""Sets the resource who will be taking this assessment .
arg : resource _ id ( osid . id . Id ) : the resource Id
raise : InvalidArgument - ` ` resource _ id ` ` is invalid
raise : NoAccess - ` ` Metadata . isReadOnly ( ) ` ` is ` ` true ` `
* compliance : mandatory - - This method must be implemented . *"""
|
# Implemented from template for osid . resource . ResourceForm . set _ avatar _ template
if self . get_taker_metadata ( ) . is_read_only ( ) :
raise errors . NoAccess ( )
if not self . _is_valid_id ( resource_id ) :
raise errors . InvalidArgument ( )
self . _my_map [ 'takerId' ] = str ( resource_id )
|
def delete_after ( filename ) :
"""Decorator to be sure the file given by parameter is deleted after the
execution of the method ."""
|
def delete_after_decorator ( function ) :
def wrapper ( * args , ** kwargs ) :
try :
return function ( * args , ** kwargs )
finally :
if os . path . isfile ( filename ) :
os . remove ( filename )
if os . path . isdir ( filename ) :
shutil . rmtree ( filename )
return wrapper
return delete_after_decorator
|
def markdown_single_text ( self , catalog , cdli_number ) :
"""Prints single text in file in markdown .
: param catalog : text ingested by cdli _ corpus
: param cdli _ number : text you wish to print
: return : output in filename . md"""
|
if cdli_number in catalog :
pnum = catalog [ cdli_number ] [ 'pnum' ]
edition = catalog [ cdli_number ] [ 'edition' ]
metadata = '\n\t' . join ( catalog [ cdli_number ] [ 'metadata' ] )
transliteration = '\n\t' . join ( catalog [ cdli_number ] [ 'transliteration' ] )
normalization = '\n\t' . join ( catalog [ cdli_number ] [ 'normalization' ] )
translation = '\n\t' . join ( catalog [ cdli_number ] [ 'translation' ] )
m_d = """{edition}
{pnum}
---
### metadata
{metadata}
### transliteration
{trans}
### normalization
{norm}
### translation
{translation}
""" . format ( pnum = pnum , edition = edition , metadata = metadata , trans = transliteration , norm = normalization , translation = translation )
self . markdown_text = m_d
|
def locale ( self ) -> tornado . locale . Locale :
"""The locale for the current session .
Determined by either ` get _ user _ locale ` , which you can override to
set the locale based on , e . g . , a user preference stored in a
database , or ` get _ browser _ locale ` , which uses the ` ` Accept - Language ` `
header .
. . versionchanged : 4.1
Added a property setter ."""
|
if not hasattr ( self , "_locale" ) :
loc = self . get_user_locale ( )
if loc is not None :
self . _locale = loc
else :
self . _locale = self . get_browser_locale ( )
assert self . _locale
return self . _locale
|
def digicam_control_encode ( self , target_system , target_component , session , zoom_pos , zoom_step , focus_lock , shot , command_id , extra_param , extra_value ) :
'''Control on - board Camera Control System to take shots .
target _ system : System ID ( uint8 _ t )
target _ component : Component ID ( uint8 _ t )
session : 0 : stop , 1 : start or keep it up / / Session control e . g . show / hide lens ( uint8 _ t )
zoom _ pos : 1 to N / / Zoom ' s absolute position ( 0 means ignore ) ( uint8 _ t )
zoom _ step : - 100 to 100 / / Zooming step value to offset zoom from the current position ( int8 _ t )
focus _ lock : 0 : unlock focus or keep unlocked , 1 : lock focus or keep locked , 3 : re - lock focus ( uint8 _ t )
shot : 0 : ignore , 1 : shot or start filming ( uint8 _ t )
command _ id : Command Identity ( incremental loop : 0 to 255 ) / / A command sent multiple times will be executed or pooled just once ( uint8 _ t )
extra _ param : Extra parameters enumeration ( 0 means ignore ) ( uint8 _ t )
extra _ value : Correspondent value to given extra _ param ( float )'''
|
return MAVLink_digicam_control_message ( target_system , target_component , session , zoom_pos , zoom_step , focus_lock , shot , command_id , extra_param , extra_value )
|
def register_handler ( self , callable_obj , entrypoint , methods = ( 'GET' , ) ) :
"""Register a handler callable to a specific route .
Args :
entrypoint ( str ) : The uri relative path .
methods ( tuple ) : A tuple of valid method strings .
callable _ obj ( callable ) : The callable object .
Returns :
The Router instance ( for chaining purposes ) .
Raises :
RouteError , for missing routing params or invalid callable
object type ."""
|
router_obj = Route . wrap_callable ( uri = entrypoint , methods = methods , callable_obj = callable_obj )
if router_obj . is_valid :
self . _routes . add ( router_obj )
return self
raise RouteError ( # pragma : no cover
"Missing params: methods: {} - entrypoint: {}" . format ( methods , entrypoint ) )
|
def set_values ( self , x ) :
"""Updates self . theta parameter . No returns values"""
|
x = numpy . atleast_2d ( x )
x = x . real
# ahem
C_inv = self . __C_inv__
theta = numpy . dot ( x , C_inv )
self . theta = theta
return theta
|
def __is_valid_for_dict_to_object_conversion ( strict_mode : bool , from_type : Type , to_type : Type ) -> bool :
"""Returns true if the provided types are valid for dict _ to _ object conversion
Explicitly declare that we are not able to parse collections nor able to create an object from a dictionary if the
object ' s constructor is non correctly PEP484 - specified .
None should be treated as a Joker here ( but we know that never from _ type and to _ type will be None at the same time )
: param strict _ mode :
: param from _ type :
: param to _ type :
: return :"""
|
# right now we ' re stuck with the default logger . .
logr = default_logger
if to_type is None or is_any_type ( to_type ) : # explicitly handle the ' None ' ( joker ) or ' any ' type
return True
elif is_collection ( to_type , strict = True ) : # if the destination type is ' strictly a collection ' ( not a subclass of a collection ) we know that we can ' t
# handle it here , the constructor is not pep484 - typed
return False
else : # (1 ) Try the type itself
try : # can we find enough pep - 484 information in the constructor to be able to understand what is required ?
get_constructor_attributes_types ( to_type )
return True
except TypeInformationRequiredError as main_e : # # failed : we cant guess the required types of constructor arguments
# if strict _ mode :
# # Warning and return NO
# if should _ display _ warnings _ for ( to _ type ) :
# logr . warn ( ' Object constructor signature for type { } does not allow parsyfiles to '
# ' automatically create instances from dict content . Caught { } : { } '
# ' ' . format ( get _ pretty _ type _ str ( to _ type ) , type ( main _ e ) . _ _ name _ _ , main _ e ) )
# return False
# # non - strict mode : ( 2 ) Check if any subclasses exist
# subclasses = get _ all _ subclasses ( to _ type )
# if len ( subclasses ) > GLOBAL _ CONFIG . dict _ to _ object _ subclass _ limit :
# logr . warn ( ' WARNING : Type { } has { } subclasses , only { } will be tried by parsyfiles when attempting to '
# ' create it from a subclass . You can raise this limit by setting the appropriate option with '
# ' ` parsyfiles _ global _ config ( ) ` '
# ' ' . format ( to _ type , len ( subclasses ) , GLOBAL _ CONFIG . dict _ to _ object _ subclass _ limit ) )
# # Then for each subclass also try ( with a configurable limit in nb of subclasses )
# for subclass in subclasses [ 0 : GLOBAL _ CONFIG . dict _ to _ object _ subclass _ limit ] :
# try :
# get _ constructor _ attributes _ types ( subclass )
# # OK , but issue warning for the root type still
# if should _ display _ warnings _ for ( to _ type ) :
# logr . warn ( ' WARNING : Object constructor signature for type { } does not allow parsyfiles to '
# ' automatically create instances from dict content , but it can for at least one of '
# ' its subclasses ( { } ) so it might be ok for you . Caught { } : { } '
# ' ' . format ( get _ pretty _ type _ str ( to _ type ) , get _ pretty _ type _ str ( subclass ) ,
# type ( main _ e ) . _ _ name _ _ , main _ e ) )
# return True
# except TypeInformationRequiredError as e :
# # failed : we cant guess the required types of constructor arguments
# if should _ display _ warnings _ for ( to _ type ) :
# logr . warn ( ' WARNING : Object constructor signature for type { } does not allow parsyfiles to '
# ' automatically create instances from dict content . Caught { } : { } '
# ' ' . format ( subclass , type ( e ) . _ _ name _ _ , e ) )
# # Nothing succeeded
if should_display_warnings_for ( to_type ) :
logr . warn ( 'WARNING: Object constructor signature for type {} does not allow parsyfiles to ' 'automatically create instances from dict content. Caught {}: {}' '' . format ( get_pretty_type_str ( to_type ) , type ( main_e ) . __name__ , main_e ) )
return False
|
def get_internal_project ( self , timeout : float = 1 ) -> typing . Union [ 'projects.Project' , None ] :
"""Attempts to return the internally loaded project . This function
prevents race condition issues where projects are loaded via threads
because the internal loop will try to continuously load the internal
project until it is available or until the timeout is reached .
: param timeout :
Maximum number of seconds to wait before giving up and returning
None ."""
|
count = int ( timeout / 0.1 )
for _ in range ( count ) :
project = self . internal_project
if project :
return project
time . sleep ( 0.1 )
return self . internal_project
|
def can_subscribe_to_topic ( self , topic , user ) :
"""Given a topic , checks whether the user can add it to their subscription list ."""
|
# A user can subscribe to topics if they are authenticated and if they have the permission
# to read the related forum . Of course a user can subscribe only if they have not already
# subscribed to the considered topic .
return ( user . is_authenticated and not topic . has_subscriber ( user ) and self . _perform_basic_permission_check ( topic . forum , user , 'can_read_forum' ) )
|
def wait ( self ) :
"""It returns when an event which we have configured by set _ threshold happens .
Note that it blocks until then ."""
|
ret = os . read ( self . event_fd , 64 / 8 )
return struct . unpack ( 'Q' , ret )
|
def route ( self , path ) : # type : ( str ) - > Tuple [ Any , Callable ]
"""Returns the task handling the given request path ."""
|
logging . getLogger ( __name__ ) . debug ( "Routing path '%s'." , path )
cls = None
for strategy in self . _strategies :
if strategy . can_route ( path ) :
cls = strategy . route ( path )
break
if cls is None :
raise RoutingError ( path )
return self . _create_result ( cls )
|
def _calc_resp ( password_hash , server_challenge ) :
"""Generate the LM response given a 16 - byte password hash and the
challenge from the CHALLENGE _ MESSAGE
: param password _ hash : A 16 - byte password hash
: param server _ challenge : A random 8 - byte response generated by the
server in the CHALLENGE _ MESSAGE
: return res : A 24 - byte buffer to contain the LM response upon return"""
|
# padding with zeros to make the hash 21 bytes long
password_hash += b'\x00' * ( 21 - len ( password_hash ) )
res = b''
dobj = DES ( DES . key56_to_key64 ( password_hash [ 0 : 7 ] ) )
res = res + dobj . encrypt ( server_challenge [ 0 : 8 ] )
dobj = DES ( DES . key56_to_key64 ( password_hash [ 7 : 14 ] ) )
res = res + dobj . encrypt ( server_challenge [ 0 : 8 ] )
dobj = DES ( DES . key56_to_key64 ( password_hash [ 14 : 21 ] ) )
res = res + dobj . encrypt ( server_challenge [ 0 : 8 ] )
return res
|
def list_all_native_quantities ( self , with_info = False ) :
"""Return a list of all available native quantities in this catalog .
If * with _ info * is ` True ` , return a dict with quantity info .
See also : list _ all _ quantities"""
|
q = self . _native_quantities
return { k : self . get_quantity_info ( k ) for k in q } if with_info else list ( q )
|
def channels_voice_greeting_recording ( self , id , ** kwargs ) :
"https : / / developer . zendesk . com / rest _ api / docs / voice - api / greetings # get - greeting - audio - file"
|
api_path = "/api/v2/channels/voice/greetings/{id}/recording.mp3"
api_path = api_path . format ( id = id )
return self . call ( api_path , ** kwargs )
|
def draw_actions ( self ) :
"""Draw the actions so that they can be inspected for accuracy ."""
|
now = time . time ( )
for act in self . _past_actions :
if act . pos and now < act . deadline :
remain = ( act . deadline - now ) / ( act . deadline - act . time )
if isinstance ( act . pos , point . Point ) :
size = remain / 3
self . all_surfs ( _Surface . draw_circle , act . color , act . pos , size , 1 )
else : # Fade with alpha would be nice , but doesn ' t seem to work .
self . all_surfs ( _Surface . draw_rect , act . color , act . pos , 1 )
|
def stage ( self ) :
"""Stage Redis and ThreatConnect data defined in profile .
Redis Data :
. . code - block : : javascript
" data " : [
" This is an example Source # 1 " ,
" This is an example Source # 2"
" variable " : " # App : 1234 : source ! StringArray "
Redis Array :
. . code - block : : javascript
" data " : {
" variables " : [ {
" value " : " # App : 4768 : tc . adversary ! TCEntity " ,
" value " : " # App : 4768 : tc . threat ! TCEntity " ,
" data _ type " : " redis _ array " ,
" variable " : " # App : 4768 : groups ! TCEntityArray "
" data " : {
" variables " : [ {
" value " : " # App : 4768 : tc . adversary ! TCEntity " ,
" path " : " . name "
" value " : " # App : 4768 : tc . threat ! TCEntity " ,
" path " : " . name "
" data _ type " : " redis _ array " ,
" variable " : " # App : 4768 : groups ! StringArray "
ThreatConnect Data :
. . code - block : : javascript
" data " : {
" group " : [ {
" firstSeen " : " 2008-12-12T12:00:00Z " ,
" name " : " campaign - 002 " ,
" type " : " Campaign " ,
" xid " : " camp - 0002 " ,
" attribute " : [ {
" displayed " : True ,
" type " : " Description " ,
" value " : " Campaign Example Description "
" tag " : [ {
" name " : " SafeToDelete "
" variable " : " # App : 4768 : tc . campaign ! TCEntity "
" indicator " : [ {
" associatedGroups " : [
" groupXid " : " campaign - 002"
" confidence " : 100,
" fileOccurrence " : [
" date " : " 2017-02-02T01:02:03Z " ,
" fileName " : " drop1 . exe " ,
" path " : " C : \\ \\ test \\ \\ "
" rating " : 5.0,
" summary " : " 43c3609411c83f363e051d455ade78a6 " ,
" tag " : [
" name " : " SafeToDelete "
" type " : " File " ,
" xid " : " 55ee19565db5b16a0f511791a3b2a7ef0ccddf4d9d64e7008561329419cb675b " ,
" variable " : " # App : 4768 : tc . file ! TCEntity "
" data _ owner " : " TCI " ,
" data _ type " : " threatconnect " """
|
for sd in self . staging_data :
if not isinstance ( sd , dict ) : # reported issue from qa where staging data is invalid
msg = 'Invalid staging data provided ({}).' . format ( sd )
sys . exit ( msg )
data_type = sd . get ( 'data_type' , 'redis' )
if data_type == 'redis' :
self . log . debug ( 'Stage Redis Data' )
self . stage_redis ( sd . get ( 'variable' ) , sd . get ( 'data' ) )
elif data_type in [ 'redis-array' , 'redis_array' ] :
self . log . debug ( 'Stage Redis Array' )
out_variable = sd . get ( 'variable' )
# build array
redis_array = [ ]
for var in sd . get ( 'data' , { } ) . get ( 'variables' ) or [ ] :
variable = var . get ( 'value' )
if variable . endswith ( 'Binary' ) :
data = self . tcex . playbook . read_binary ( variable , False , False )
elif variable . endswith ( 'BinaryArray' ) :
data = self . tcex . playbook . read_binary_array ( variable , False , False )
else :
data = self . path_data ( self . tcex . playbook . read ( variable ) , var . get ( 'path' ) )
# TODO : should None value be appended ?
redis_array . append ( data )
self . stage_redis ( out_variable , redis_array )
# print ( redis _ array )
elif data_type == 'threatconnect' :
self . log . debug ( 'Stage ThreatConnect Data' )
self . stage_tc ( sd . get ( 'data_owner' ) , sd . get ( 'data' , { } ) , sd . get ( 'variable' ) )
elif data_type == 'threatconnect-association' :
self . log . debug ( 'Stage ThreatConnect Association Data' )
data = sd . get ( 'data' )
self . stage_tc_associations ( data . get ( 'entity1' ) , data . get ( 'entity2' ) )
elif data_type == 'threatconnect-batch' :
self . log . debug ( 'Stage ThreatConnect Batch Data' )
self . stage_tc_batch ( sd . get ( 'data_owner' ) , sd . get ( 'data' , { } ) )
|
def check_atd ( text ) :
"""Check for redundancies from After the Deadline ."""
|
err = "after_the_deadline.redundancy"
msg = "Redundancy. Use '{}' instead of '{}'."
redundancies = [ [ u"Bō" , [ "Bo Staff" ] ] , [ "Challah" , [ "Challah bread" ] ] , [ "Hallah" , [ "Hallah bread" ] ] , [ "Challah" , [ "Challah bread" ] ] , [ "I" , [ "I myself" , "I personally" ] ] , [ "Mount Fuji" , [ "Mount Fujiyama" ] ] , [ "Milky Way" , [ "Milky Way galaxy" ] ] , [ "Rio Grande" , [ "Rio Grande river" ] ] , [ "adage" , [ "old adage" ] ] , [ "add" , [ "add a further" , "add an additional" ] ] , [ "advance" , [ "advance forward" ] ] , [ "alternative" , [ "alternative choice" ] ] , [ "amaretto" , [ "amaretto almond" ] ] , [ "annihilate" , [ "completely annihilate" ] ] , [ "anniversary" , [ "annual anniversary" ] ] , [ "anonymous" , [ "unnamed anonymous" ] ] , [ "as" , [ "equally as" ] ] , [ "ascend" , [ "ascend up" ] ] , [ "ask" , [ "ask the question" ] ] , [ "assemble" , [ "assemble together" ] ] , [ "at present the" , [ "at the present time the" ] ] , [ "at this point" , [ "at this point in time" ] ] , [ "attach" , [ "attach together" ] ] , [ "autumn" , [ "autumn season" ] ] , [ "bald" , [ "bald-headed" ] ] , [ "balsa" , [ "balsa wood" ] ] , [ "belongings" , [ "personal belongings" ] ] , [ "benefits" , [ "desirable benefits" ] ] , [ "bento" , [ "bento box" ] ] , [ "best" , [ "best ever" ] ] , [ "bit" , [ "tiny bit" ] ] , [ "blend" , [ "blend together" ] ] , [ "bond" , [ "common bond" ] ] , [ "bonus" , [ "added bonus" , "extra bonus" ] ] , [ "bouquet" , [ "bouquet of flowers" ] ] , [ "breakthrough" , [ "major breakthrough" ] ] , [ "bride" , [ "new bride" ] ] , [ "brief" , [ "brief in duration" ] ] , [ "bruin" , [ "bruin bear" ] ] , [ "hot" , [ "burning hot" ] ] , [ "cacophony" , [ "cacophony of sound" ] ] , [ "cameo" , [ "brief cameo" , "cameo appearance" ] ] , [ "cancel" , [ "cancel out" ] ] , [ "cash" , [ "cash money" ] ] , [ "chai" , [ "chai tea" ] ] , [ "chance" , [ "random chance" ] ] , [ "charm" , [ "personal charm" ] ] , [ "circle" , [ "circle around" , "round circle" ] ] , [ "circulate" , [ "circulate around" ] ] , [ "classify" , [ "classify into groups" ] ] , [ "classmates" , [ "fellow classmates" ] ] , [ "cliche" , [ "old cliche" , "overused cliche" ] ] , [ "climb" , [ "climb up" ] ] , [ "clock" , [ "time clock" ] ] , [ "collaborate" , [ "collaborate together" ] ] , [ "collaboration" , [ "joint collaboration" ] ] , [ "colleague" , [ "fellow colleague" ] ] , [ "combine" , [ "combine together" ] ] , [ "commute" , [ "commute back and forth" ] ] , [ "compete" , [ "compete with each other" ] ] , [ "comprise" , [ "comprise of" ] ] , [ "comprises" , [ "comprises of" ] ] , [ "conceived" , [ "first conceived" ] ] , [ "conclusion" , [ "final conclusion" ] ] , [ "confer" , [ "confer together" ] ] , [ "confrontation" , [ "direct confrontation" ] ] , # [ " confused " , [ " confused state " ] ] ,
[ "connect" , [ "connect together" , "connect up" ] ] , [ "consensus" , [ "consensus of opinion" , "general consensus" ] ] , [ "consult" , [ "consult with" ] ] , [ "conversation" , [ "oral conversation" ] ] , [ "cool" , [ "cool down" ] ] , [ "cooperate" , [ "cooperate together" ] ] , [ "cooperation" , [ "mutual cooperation" ] ] , [ "copy" , [ "duplicate copy" ] ] , [ "core" , [ "inner core" ] ] , [ "cost" , [ "cost the sum of" ] ] , [ "could" , [ "could possibly" ] ] , [ "coupon" , [ "money-saving coupon" ] ] , [ "created" , [ "originally created" ] ] , [ "crisis" , [ "crisis situation" ] ] , [ "crouch" , [ "crouch down" ] ] , [ "currently" , [ "now currently" ] ] , [ "custom" , [ "old custom" , "usual custom" ] ] , [ "danger" , [ "serious danger" ] ] , [ "dates" , [ "dates back" ] ] , [ "decision" , [ "definite decision" ] ] , [ "depreciate" , [ "depreciate in value" ] ] , [ "descend" , [ "descend down" ] ] , [ "destroy" , [ "totally destroy" ] ] , [ "destroyed" , [ "completely destroyed" ] ] , [ "destruction" , [ "total destruction" ] ] , [ "details" , [ "specific details" ] ] , [ "dilemma" , [ "difficult dilemma" ] ] , [ "disappear" , [ "disappear from sight" ] ] , [ "discovered" , [ "originally discovered" ] ] , [ "dive" , [ "dive down" ] ] , [ "done" , [ "over and done with" ] ] , [ "drawing" , [ "illustrated drawing" ] ] , [ "drop" , [ "drop down" ] ] , [ "dune" , [ "sand dune" ] ] , [ "during" , [ "during the course of" ] ] , [ "dwindle" , [ "dwindle down" ] ] , [ "dwindled" , [ "dwindled down" ] ] , [ "every" , [ "each and every" ] ] , [ "earlier" , [ "earlier in time" ] ] , [ "eliminate" , [ "completely eliminate" , "eliminate altogether" , "entirely eliminate" ] ] , [ "ember" , [ "glowing ember" ] ] , [ "embers" , [ "burning embers" ] ] , [ "emergency" , [ "emergency situation" , "unexpected emergency" ] ] , [ "empty" , [ "empty out" ] ] , [ "enclosed" , [ "enclosed herein" ] ] , [ "end" , [ "final end" ] ] , [ "engulfed" , [ "completely engulfed" ] ] , [ "enter" , [ "enter in" , "enter into" ] ] , [ "equal" , [ "equal to one another" ] ] , [ "eradicate" , [ "eradicate completely" ] ] , [ "essential" , [ "absolutely essential" ] ] , [ "estimated at" , [ "estimated at about" , "estimated at approximately" , "estimated at around" ] ] , [ "etc." , [ "and etc." ] ] , [ "evolve" , [ "evolve over time" ] ] , [ "exaggerate" , [ "over exaggerate" ] ] , [ "exited" , [ "exited from" ] ] , [ "experience" , [ "actual experience" , "past experience" ] ] , [ "experts" , [ "knowledgeable experts" ] ] , [ "extradite" , [ "extradite back" ] ] , [ "face the consequences" , [ "face up to the consequences" ] ] , [ "face the fact" , [ "face up to the fact" ] ] , [ "face the challenge" , [ "face up to the challenge" ] ] , [ "face the problem" , [ "face up to the problem" ] ] , [ "facilitate" , [ "facilitate easier" ] ] , [ "fact" , [ "established fact" ] ] , [ "facts" , [ "actual facts" , "hard facts" , "true facts" ] ] , [ "fad" , [ "passing fad" ] ] , [ "fall" , [ "fall down" ] ] , [ "fall" , [ "fall season" ] ] , [ "feat" , [ "major feat" ] ] , [ "feel" , [ "feel inside" ] ] , [ "feelings" , [ "inner feelings" ] ] , [ "few" , [ "few in number" ] ] , [ "filled" , [ "completely filled" , "filled to capacity" ] ] , [ "first" , [ "first of all" ] ] , [ "first time" , [ "first time ever" ] ] , [ "fist" , [ "closed fist" ] ] , [ "fly" , [ "fly through the air" ] ] , [ "focus" , [ "focus in" , "main focus" ] ] , [ "follow" , [ "follow after" ] ] , [ "for example" , [ "as for example" ] ] , # [ " foremost " , [ " first and foremost " ] ] ,
[ "forever" , [ "forever and ever" ] ] , [ "free" , [ "for free" ] ] , [ "friend" , [ "personal friend" ] ] , [ "friendship" , [ "personal friendship" ] ] , [ "full" , [ "full to capacity" ] ] , [ "fundamentals" , [ "basic fundamentals" ] ] , [ "fuse" , [ "fuse together" ] ] , [ "gather" , [ "gather together" , "gather up" ] ] , [ "get up" , [ "get up on his feet" , "get up on your feet" ] ] , [ "gift" , [ "free gift" ] ] , [ "gifts" , [ "free gifts" ] ] , [ "goal" , [ "ultimate goal" ] ] , # [ " graduate " , [ " former graduate " ] ] ,
[ "grow" , [ "grow in size" ] ] , [ "guarantee" , [ "absolute guarantee" ] ] , [ "gunman" , [ "armed gunman" ] ] , [ "gunmen" , [ "armed gunmen" ] ] , [ "habitat" , [ "native habitat" ] ] , [ "had done" , [ "had done previously" ] ] , [ "halves" , [ "two equal halves" ] ] , # [ " has " , [ " has got " ] ] ,
# [ " have " , [ " have got " ] ] ,
[ "haven" , [ "safe haven" ] ] , # [ " he " , [ " he himself " ] ] ,
[ "heat" , [ "heat up" ] ] , [ "history" , [ "past history" ] ] , [ "hoist" , [ "hoist up" ] ] , [ "hole" , [ "empty hole" ] ] , [ "honcho" , [ "head honcho" ] ] , [ "ice" , [ "frozen ice" ] ] , [ "ideal" , [ "perfect ideal" ] ] , [ "identical" , [ "same identical" ] ] , [ "identification" , [ "positive identification" ] ] , [ "imports" , [ "foreign imports" ] ] , [ "impulse" , [ "sudden impulse" ] ] , [ "in fact" , [ "in actual fact" ] ] , [ "in the yard" , [ "outside in the yard" ] ] , [ "inclusive" , [ "all inclusive" ] ] , [ "incredible" , [ "incredible to believe" ] ] , [ "incumbent" , [ "present incumbent" ] ] , # [ " indicted " , [ " indicted on a charge " ] ] ,
[ "industry" , [ "private industry" ] ] , [ "injuries" , [ "harmful injuries" ] ] , [ "innovation" , [ "new innovation" ] ] , [ "innovative" , [ "innovative new" , "new innovative" ] ] , # [ " input " , [ " input into " ] ] ,
[ "instinct" , [ "natural instinct" , "naturally instinct" ] ] , [ "integrate" , [ "integrate together" , "integrate with each other" ] ] , [ "interdependent" , [ "interdependent on each other" , "mutually interdependent" ] ] , [ "introduced" , [ "introduced for the first time" ] ] , [ "invention" , [ "new invention" ] ] , [ "kneel" , [ "kneel down" ] ] , [ "knots" , [ "knots per hour" ] ] , # [ " last " , [ " last of all " ] ] ,
# [ " later " , [ " later time " ] ] ,
[ "lift" , [ "lift up" ] ] , [ "lingers" , [ "still lingers" ] ] , [ "look to the future" , [ "look ahead to the future" ] ] , [ "love triangle" , [ "three-way love triangle" ] ] , [ "maintained" , [ "constantly maintained" ] ] , [ "manually" , [ "manually by hand" ] ] , [ "marina" , [ "boat marina" ] ] , [ "may" , [ "may possibly" ] ] , [ "meet" , [ "meet together" , "meet with each other" ] ] , [ "memories" , [ "past memories" ] ] , [ "merge" , [ "merge together" ] ] , [ "merged" , [ "merged together" ] ] , [ "meshed" , [ "meshed together" ] ] , [ "midnight" , [ "twelve midnight" ] ] , [ "migraine" , [ "migraine headache" ] ] , [ "minestrone" , [ "minestrone soup" ] ] , [ "mix" , [ "mix together" ] ] , [ "moment" , [ "brief moment" , "moment in time" ] ] , [ "monopoly" , [ "complete monopoly" ] ] , [ "mural" , [ "wall mural" ] ] , [ "mutual respect" , [ "mutual respect for each other" ] ] , [ "mutually dependent" , [ "mutually dependent on each other" ] ] , [ "mystery" , [ "unsolved mystery" ] ] , # [ " naked " , [ " bare naked " ] ] ,
[ "nape" , [ "nape of her neck" ] ] , [ "necessary" , [ "absolutely necessary" ] ] , [ "never" , [ "never at any time" ] ] , [ "noon" , [ "12 noon" , "12 o'clock noon" , "high noon" , "twelve noon" ] ] , [ "nostalgia" , [ "nostalgia for the past" ] ] , [ "number of" , [ "number of different" ] ] , [ "opening" , [ "exposed opening" ] ] , [ "my opinion" , [ "my personal opinion" ] ] , [ "opposites" , [ "exact opposites" , "polar opposites" ] ] , [ "opposite" , [ "exact opposite" , "polar opposite" ] ] , [ "orbits" , [ "orbits around" ] ] , [ "outcome" , [ "final outcome" ] ] , [ "panacea" , [ "universal panacea" ] ] , [ "pending" , [ "now pending" ] ] , [ "penetrate" , [ "penetrate through" ] ] , [ "persists" , [ "still persists" ] ] , [ "pioneer" , [ "old pioneer" ] ] , [ "plan" , [ "plan ahead" , "plan in advance" , "proposed plan" ] ] , [ "planning" , [ "advance planning" , "forward planning" ] ] , [ "plans" , [ "future plans" ] ] , [ "plan" , [ "future plan" ] ] , [ "point" , [ "point in time" ] ] , [ "point" , [ "sharp point" ] ] , [ "postpone" , [ "postpone until later" ] ] , [ "pouring rain" , [ "pouring down rain" ] ] , [ "preview" , [ "advance preview" ] ] , [ "previously listed" , [ "previously listed above" ] ] , [ "probed" , [ "probed into" ] ] , [ "proceed" , [ "proceed ahead" ] ] , [ "prosthesis" , [ "artificial prosthesis" ] ] , # [ " protrude " , [ " protrude out " ] ] ,
[ "proverb" , [ "old proverb" ] ] , # [ " proximity " , [ " close proximity " ] ] ,
[ "put off" , [ "put off until later" ] ] , # [ " raise " , [ " raise up " ] ] ,
[ "re-elect" , [ "re-elect for another term" ] ] , [ "reason is" , [ "reason is because" ] ] , [ "recur" , [ "recur again" ] ] , [ "recurrence" , [ "future recurrence" ] ] , [ "refer" , [ "refer back" ] ] , [ "reflect" , [ "reflect back" ] ] , # [ " relevant " , [ " highly relevant " ] ] ,
[ "remain" , [ "continue to remain" ] ] , [ "remains" , [ "still remains" ] ] , [ "replica" , [ "exact replica" ] ] , [ "reply" , [ "reply back" ] ] , # [ " requirements " , [ " necessary requirements " ] ] ,
[ "reservations" , [ "advance reservations" ] ] , [ "retreat" , [ "retreat back" ] ] , [ "revert" , [ "revert back" ] ] , [ "round" , [ "round in shape" ] ] , [ "rule of thumb" , [ "rough rule of thumb" ] ] , [ "rumor" , [ "unconfirmed rumor" ] ] , [ "rustic" , [ "rustic country" ] ] , [ "same" , [ "exact same" , "precise same" , "same exact" ] ] , [ "sanctuary" , [ "safe sanctuary" ] ] , [ "satisfaction" , [ "full satisfaction" ] ] , [ "scrutinize" , [ "scrutinize in detail" ] ] , [ "scrutiny" , [ "careful scrutiny" , "close scrutiny" ] ] , [ "secret" , [ "secret that cannot be told" ] ] , [ "seek" , [ "seek to find" ] ] , [ "separated" , [ "separated apart from each other" ] ] , [ "share" , [ "share together" ] ] , [ "shiny" , [ "shiny in appearance" ] ] , [ "sincere" , [ "truly sincere" ] ] , [ "sink" , [ "sink down" ] ] , [ "skipped" , [ "skipped over" ] ] , # [ " slow " , [ " slow speed " ] ] ,
# [ " small " , [ " small size " ] ] ,
[ "soft" , [ "soft in texture" , "soft to the touch" ] ] , [ "sole" , [ "sole of the foot" ] ] , [ "some time" , [ "some time to come" ] ] , [ "speck" , [ "small speck" ] ] , [ "speed" , [ "rate of speed" ] ] , [ "spell out" , [ "spell out in detail" ] ] , [ "spiked" , [ "spiked upward" , "spiked upwards" ] ] , [ "spring" , [ "spring season" ] ] , [ "stranger" , [ "anonymous stranger" ] ] , [ "studio audience" , [ "live studio audience" ] ] , [ "subway" , [ "underground subway" ] ] , [ "sufficient" , [ "sufficient enough" ] ] , [ "summer" , [ "summer season" ] ] , [ "sure" , [ "absolutely sure" ] ] , [ "surprise" , [ "unexpected surprise" ] ] , [ "surround" , [ "completely surround" ] ] , [ "surrounded" , [ "surrounded on all sides" ] ] , [ "tall" , [ "tall in height" , "tall in stature" ] ] , [ "telepathy" , [ "mental telepathy" ] ] , [ "ten" , [ "ten in number" ] ] , [ "these" , [ "these ones" ] ] , # [ " they " , [ " they themselves " ] ] ,
[ "those" , [ "those ones" ] ] , [ "trench" , [ "open trench" ] ] , [ "truth" , [ "honest truth" ] ] , [ "tundra" , [ "frozen tundra" ] ] , [ "ultimatum" , [ "final ultimatum" ] ] , # [ " undeniable " , [ " undeniable truth " ] ] ,
[ "undergraduate" , [ "undergraduate student" ] ] , # [ " unintentional " , [ " unintentional mistake " ] ] ,
[ "vacillate" , [ "vacillate back and forth" ] ] , [ "veteran" , [ "former veteran" ] ] , [ "visible" , [ "visible to the eye" ] ] , [ "warn" , [ "warn in advance" ] ] , [ "warning" , [ "advance warning" ] ] , [ "water heater" , [ "hot water heater" ] ] , [ "in which we live" , [ "in which we live in" ] ] , [ "winter" , [ "winter season" ] ] , [ "witness" , [ "live witness" ] ] , [ "yakitori" , [ "yakitori chicken" ] ] , [ "yerba mate" , [ "yerba mate tea" ] ] , [ "yes" , [ "affirmative yes" ] ] , ]
return preferred_forms_check ( text , redundancies , err , msg )
|
def getscheme ( self , default = None ) :
"""Return the URI scheme in canonical ( lowercase ) form , or ` default `
if the original URI reference did not contain a scheme component ."""
|
scheme = self . scheme
if scheme is None :
return default
elif isinstance ( scheme , bytes ) :
return scheme . decode ( 'ascii' ) . lower ( )
else :
return scheme . lower ( )
|
def tab_insert ( self , e ) : # ( M - TAB )
u'''Insert a tab character .'''
|
cursor = min ( self . l_buffer . point , len ( self . l_buffer . line_buffer ) )
ws = ' ' * ( self . tabstop - ( cursor % self . tabstop ) )
self . insert_text ( ws )
self . finalize ( )
|
def _on_unexpected_disconnection ( self , success , result , failure_reason , context ) :
"""Callback function called when an unexpected disconnection occured ( meaning that we didn ' t previously send
a ` disconnect ` request ) .
It is executed in the baBLE working thread : should not be blocking .
Args :
success ( bool ) : A bool indicating that the operation is successful or not
result ( dict ) : Disconnection information ( if successful )
- connection _ handle ( int ) : The connection handle that just disconnected
- code ( int ) : The reason code
- reason ( str ) : A message explaining the reason code in plain text
failure _ reason ( any ) : An object indicating the reason why the operation is not successful ( else None )
context ( dict ) : The connection context"""
|
connection_id = context [ 'connection_id' ]
self . _logger . warn ( 'Unexpected disconnection event, handle=%d, reason=0x%X, state=%s' , result [ 'connection_handle' ] , result [ 'code' ] , self . connections . get_state ( connection_id ) )
self . connections . unexpected_disconnect ( connection_id )
self . _trigger_callback ( 'on_disconnect' , self . id , connection_id )
|
def stop ( self , dummy_signum = None , dummy_frame = None ) :
"""Shutdown process ( this method is also a signal handler )"""
|
logging . info ( 'Shutting down ...' )
self . socket . close ( )
sys . exit ( 0 )
|
def copy_files ( src_dir , dst_dir , filespec = '*' , recursive = False ) :
"""Copies any files matching filespec from src _ dir into dst _ dir .
If ` recursive ` is ` True ` , also copies any matching directories ."""
|
import os
from . modules import copyfiles
if src_dir == dst_dir :
raise RuntimeError ( 'copy_files() src and dst directories must be different.' )
if not os . path . isdir ( src_dir ) :
raise RuntimeError ( 'copy_files() src directory "{}" does not exist.' . format ( src_dir ) )
return { 'dependencies_fn' : copyfiles . list_files , 'compiler_fn' : copyfiles . copy_files , 'input' : src_dir , 'output' : dst_dir , 'kwargs' : { 'filespec' : filespec , 'recursive' : recursive , } , }
|
def _name_value_to_bson ( name , value , check_keys , opts ) :
"""Encode a single name , value pair ."""
|
# First see if the type is already cached . KeyError will only ever
# happen once per subtype .
try :
return _ENCODERS [ type ( value ) ] ( name , value , check_keys , opts )
except KeyError :
pass
# Second , fall back to trying _ type _ marker . This has to be done
# before the loop below since users could subclass one of our
# custom types that subclasses a python built - in ( e . g . Binary )
marker = getattr ( value , "_type_marker" , None )
if isinstance ( marker , int ) and marker in _MARKERS :
func = _MARKERS [ marker ]
# Cache this type for faster subsequent lookup .
_ENCODERS [ type ( value ) ] = func
return func ( name , value , check_keys , opts )
# If all else fails test each base type . This will only happen once for
# a subtype of a supported base type .
for base in _ENCODERS :
if isinstance ( value , base ) :
func = _ENCODERS [ base ]
# Cache this type for faster subsequent lookup .
_ENCODERS [ type ( value ) ] = func
return func ( name , value , check_keys , opts )
raise InvalidDocument ( "cannot convert value of type %s to bson" % type ( value ) )
|
def create_spot_instances ( launch_specs , spot_price = 26 , expiration_mins = 15 ) :
"""args :
spot _ price : default is $ 26 which is right above p3.16xlarge on demand price
expiration _ mins : this request only valid for this many mins from now"""
|
ec2c = get_ec2_client ( )
num_tasks = launch_specs [ 'MinCount' ] or 1
if 'MinCount' in launch_specs :
del launch_specs [ 'MinCount' ]
if 'MaxCount' in launch_specs :
del launch_specs [ 'MaxCount' ]
if 'TagSpecifications' in launch_specs :
try :
tags = launch_specs [ 'TagSpecifications' ] [ 0 ] [ 'Tags' ]
except :
pass
del launch_specs [ 'TagSpecifications' ]
import pytz
# datetime is not timezone aware , use pytz to fix
import datetime as dt
now = dt . datetime . utcnow ( ) . replace ( tzinfo = pytz . utc )
spot_args = { }
spot_args [ 'LaunchSpecification' ] = launch_specs
spot_args [ 'SpotPrice' ] = str ( spot_price )
spot_args [ 'InstanceCount' ] = num_tasks
spot_args [ 'ValidUntil' ] = now + dt . timedelta ( minutes = expiration_mins )
try :
spot_requests = ec2c . request_spot_instances ( ** spot_args )
except Exception as e :
assert False , f"Spot instance request failed (out of capacity?), error was {e}"
spot_requests = spot_requests [ 'SpotInstanceRequests' ]
instance_ids = wait_on_fulfillment ( ec2c , spot_requests )
print ( 'Instances fullfilled...' )
ec2 = get_ec2_resource ( )
instances = list ( ec2 . instances . filter ( Filters = [ { 'Name' : 'instance-id' , 'Values' : list ( filter ( None , instance_ids ) ) } ] ) )
if not all ( instance_ids ) :
for i in instances :
i . terminate ( )
raise RuntimeError ( 'Failed to create spot instances:' , instance_ids )
if tags :
for i in instances :
i . create_tags ( Tags = tags )
return instances
|
def inverse_kinematics ( self , end_effector_transformation , q = None , max_iter = 1000 , tolerance = 0.05 , mask = numpy . ones ( 6 ) , use_pinv = False ) :
"""Computes the joint angles corresponding to the end effector transformation .
: param end _ effector _ transformation : the end effector homogeneous transformation matrix
: param vector q : initial estimate of the joint angles
: param int max _ iter : maximum number of iteration
: param float tolerance : tolerance before convergence
: param mask : specify the cartesian DOF that will be ignore ( in the case of a chain with less than 6 joints ) .
: rtype : vector of the joint angles ( theta 1 , theta 2 , . . . , theta n )"""
|
if q is None :
q = numpy . zeros ( ( len ( self . links ) , 1 ) )
q = numpy . matrix ( q . reshape ( - 1 , 1 ) )
best_e = numpy . ones ( 6 ) * numpy . inf
best_q = None
alpha = 1.0
for _ in range ( max_iter ) :
e = numpy . multiply ( transform_difference ( self . forward_kinematics ( q ) [ 0 ] , end_effector_transformation ) , mask )
d = numpy . linalg . norm ( e )
if d < numpy . linalg . norm ( best_e ) :
best_e = e . copy ( )
best_q = q . copy ( )
alpha *= 2.0 ** ( 1.0 / 8.0 )
else :
q = best_q . copy ( )
e = best_e . copy ( )
alpha *= 0.5
if use_pinv :
dq = numpy . linalg . pinv ( self . _jacob0 ( q ) ) * e . reshape ( ( - 1 , 1 ) )
else :
dq = self . _jacob0 ( q ) . T * e . reshape ( ( - 1 , 1 ) )
q += alpha * dq
# d = numpy . linalg . norm ( dq )
if d < tolerance :
return q
else :
raise ValueError ( 'could not converge d={}' . format ( numpy . linalg . norm ( best_e ) ) )
|
def add_operations ( self , root , definitions ) :
"""Add < operation / > children"""
|
dsop = Element ( 'operation' , ns = soapns )
for c in root . getChildren ( 'operation' ) :
op = Facade ( 'Operation' )
op . name = c . get ( 'name' )
sop = c . getChild ( 'operation' , default = dsop )
soap = Facade ( 'soap' )
soap . action = '"%s"' % sop . get ( 'soapAction' , default = '' )
soap . style = sop . get ( 'style' , default = self . soap . style )
soap . input = Facade ( 'Input' )
soap . input . body = Facade ( 'Body' )
soap . input . headers = [ ]
soap . output = Facade ( 'Output' )
soap . output . body = Facade ( 'Body' )
soap . output . headers = [ ]
op . soap = soap
input = c . getChild ( 'input' )
if input is None :
input = Element ( 'input' , ns = wsdlns )
body = input . getChild ( 'body' )
self . body ( definitions , soap . input . body , body )
for header in input . getChildren ( 'header' ) :
self . header ( definitions , soap . input , header )
output = c . getChild ( 'output' )
if output is None :
output = Element ( 'output' , ns = wsdlns )
body = output . getChild ( 'body' )
self . body ( definitions , soap . output . body , body )
for header in output . getChildren ( 'header' ) :
self . header ( definitions , soap . output , header )
faults = [ ]
for fault in c . getChildren ( 'fault' ) :
sf = fault . getChild ( 'fault' )
if sf is None :
continue
fn = fault . get ( 'name' )
f = Facade ( 'Fault' )
f . name = sf . get ( 'name' , default = fn )
f . use = sf . get ( 'use' , default = 'literal' )
faults . append ( f )
soap . faults = faults
self . operations [ op . name ] = op
|
def get_configuration ( self ) :
"""Returns a mapping of UID - > configuration"""
|
mapping = { }
settings = self . get_settings ( )
for record in self . context . getAnalyses ( ) :
uid = record . get ( "service_uid" )
setting = settings . get ( uid , { } )
config = { "partition" : record . get ( "partition" ) , "hidden" : setting . get ( "hidden" , False ) , }
mapping [ uid ] = config
return mapping
|
def history ( self , channel_name , ** kwargs ) :
"""https : / / api . slack . com / methods / channels . history"""
|
channel_id = self . get_channel_id ( channel_name )
self . params . update ( { 'channel' : channel_id } )
if kwargs :
self . params . update ( kwargs )
return FromUrl ( 'https://slack.com/api/channels.history' , self . _requests ) ( data = self . params ) . get ( )
|
def _split_generators ( self , dl_manager ) :
"""Returns SplitGenerators ."""
|
# Download images and annotations that come in separate archives .
# Note , that the extension of archives is . tar . gz even though the actual
# archives format is uncompressed tar .
dl_paths = dl_manager . download_and_extract ( { "images" : tfds . download . Resource ( url = os . path . join ( _BASE_URL , "102flowers.tgz" ) , extract_method = tfds . download . ExtractMethod . TAR ) , "labels" : os . path . join ( _BASE_URL , "imagelabels.mat" ) , "setid" : os . path . join ( _BASE_URL , "setid.mat" ) , } )
gen_kwargs = dict ( images_dir_path = os . path . join ( dl_paths [ "images" ] , "jpg" ) , labels_path = dl_paths [ "labels" ] , setid_path = dl_paths [ "setid" ] , )
return [ tfds . core . SplitGenerator ( name = tfds . Split . TRAIN , num_shards = 1 , gen_kwargs = dict ( split_name = "trnid" , ** gen_kwargs ) ) , tfds . core . SplitGenerator ( name = tfds . Split . TEST , num_shards = 1 , gen_kwargs = dict ( split_name = "tstid" , ** gen_kwargs ) ) , tfds . core . SplitGenerator ( name = tfds . Split . VALIDATION , num_shards = 1 , gen_kwargs = dict ( split_name = "valid" , ** gen_kwargs ) ) , ]
|
def add_methods ( methods_to_add ) :
'''use this to bulk add new methods to Generator'''
|
for i in methods_to_add :
try :
Generator . add_method ( * i )
except Exception as ex :
raise Exception ( 'issue adding {} - {}' . format ( repr ( i ) , ex ) )
|
def process_species ( self , limit ) :
"""Loop through the xml file and process the species .
We add elements to the graph , and store the
id - to - label in the label _ hash dict .
: param limit :
: return :"""
|
myfile = '/' . join ( ( self . rawdir , self . files [ 'data' ] [ 'file' ] ) )
fh = gzip . open ( myfile , 'rb' )
filereader = io . TextIOWrapper ( fh , newline = "" )
filereader . readline ( )
# remove the xml declaration line
for event , elem in ET . iterparse ( filereader ) : # iterparse is not deprecated
# Species ids are = = NCBITaxon ids
self . process_xml_table ( elem , 'Species_gb' , self . _process_species_table_row , limit )
fh . close ( )
return
|
def populate_all_metadata ( ) :
"""Create metadata instances for all models in seo _ models if empty .
Once you have created a single metadata instance , this will not run .
This is because it is a potentially slow operation that need only be
done once . If you want to ensure that everything is populated , run the
populate _ metadata management command ."""
|
for Metadata in registry . values ( ) :
InstanceMetadata = Metadata . _meta . get_model ( 'modelinstance' )
if InstanceMetadata is not None :
for model in Metadata . _meta . seo_models :
populate_metadata ( model , InstanceMetadata )
|
def generate_signed_url_v4 ( credentials , resource , expiration , api_access_endpoint = DEFAULT_ENDPOINT , method = "GET" , content_md5 = None , content_type = None , response_type = None , response_disposition = None , generation = None , headers = None , query_parameters = None , _request_timestamp = None , # for testing only
) :
"""Generate a V4 signed URL to provide query - string auth ' n to a resource .
. . note : :
Assumes ` ` credentials ` ` implements the
: class : ` google . auth . credentials . Signing ` interface . Also assumes
` ` credentials ` ` has a ` ` service _ account _ email ` ` property which
identifies the credentials .
. . note : :
If you are on Google Compute Engine , you can ' t generate a signed URL .
Follow ` Issue 922 ` _ for updates on this . If you ' d like to be able to
generate a signed URL from GCE , you can use a standard service account
from a JSON file rather than a GCE service account .
See headers ` reference ` _ for more details on optional arguments .
. . _ Issue 922 : https : / / github . com / GoogleCloudPlatform / google - cloud - python / issues / 922
. . _ reference : https : / / cloud . google . com / storage / docs / reference - headers
: type credentials : : class : ` google . auth . credentials . Signing `
: param credentials : Credentials object with an associated private key to
sign text .
: type resource : str
: param resource : A pointer to a specific resource
( typically , ` ` / bucket - name / path / to / blob . txt ` ` ) .
: type expiration : Union [ Integer , datetime . datetime , datetime . timedelta ]
: param expiration : Point in time when the signed URL should expire .
: type api _ access _ endpoint : str
: param api _ access _ endpoint : Optional URI base . Defaults to
" https : / / storage . googleapis . com / "
: type method : str
: param method : The HTTP verb that will be used when requesting the URL .
Defaults to ` ` ' GET ' ` ` . If method is ` ` ' RESUMABLE ' ` ` then the
signature will additionally contain the ` x - goog - resumable `
header , and the method changed to POST . See the signed URL
docs regarding this flow :
https : / / cloud . google . com / storage / docs / access - control / signed - urls
: type content _ md5 : str
: param content _ md5 : ( Optional ) The MD5 hash of the object referenced by
` ` resource ` ` .
: type content _ type : str
: param content _ type : ( Optional ) The content type of the object referenced
by ` ` resource ` ` .
: type response _ type : str
: param response _ type : ( Optional ) Content type of responses to requests for
the signed URL . Used to over - ride the content type of
the underlying resource .
: type response _ disposition : str
: param response _ disposition : ( Optional ) Content disposition of responses to
requests for the signed URL .
: type generation : str
: param generation : ( Optional ) A value that indicates which generation of
the resource to fetch .
: type headers : dict
: param headers :
( Optional ) Additional HTTP headers to be included as part of the
signed URLs . See :
https : / / cloud . google . com / storage / docs / xml - api / reference - headers
Requests using the signed URL * must * pass the specified header
( name and value ) with each request for the URL .
: type query _ parameters : dict
: param query _ parameters :
( Optional ) Additional query paramtersto be included as part of the
signed URLs . See :
https : / / cloud . google . com / storage / docs / xml - api / reference - headers # query
: raises : : exc : ` TypeError ` when expiration is not a valid type .
: raises : : exc : ` AttributeError ` if credentials is not an instance
of : class : ` google . auth . credentials . Signing ` .
: rtype : str
: returns : A signed URL you can use to access the resource
until expiration ."""
|
ensure_signed_credentials ( credentials )
expiration_seconds = get_expiration_seconds_v4 ( expiration )
if _request_timestamp is None :
now = NOW ( )
request_timestamp = now . strftime ( "%Y%m%dT%H%M%SZ" )
datestamp = now . date ( ) . strftime ( "%Y%m%d" )
else :
request_timestamp = _request_timestamp
datestamp = _request_timestamp [ : 8 ]
client_email = credentials . signer_email
credential_scope = "{}/auto/storage/goog4_request" . format ( datestamp )
credential = "{}/{}" . format ( client_email , credential_scope )
if headers is None :
headers = { }
if content_type is not None :
headers [ "Content-Type" ] = content_type
if content_md5 is not None :
headers [ "Content-MD5" ] = content_md5
header_names = [ key . lower ( ) for key in headers ]
if "host" not in header_names :
headers [ "Host" ] = "storage.googleapis.com"
if method . upper ( ) == "RESUMABLE" :
method = "POST"
headers [ "x-goog-resumable" ] = "start"
canonical_headers , ordered_headers = get_canonical_headers ( headers )
canonical_header_string = ( "\n" . join ( canonical_headers ) + "\n" )
# Yes , Virginia , the extra newline is part of the spec .
signed_headers = ";" . join ( [ key for key , _ in ordered_headers ] )
if query_parameters is None :
query_parameters = { }
else :
query_parameters = { key : value or "" for key , value in query_parameters . items ( ) }
query_parameters [ "X-Goog-Algorithm" ] = "GOOG4-RSA-SHA256"
query_parameters [ "X-Goog-Credential" ] = credential
query_parameters [ "X-Goog-Date" ] = request_timestamp
query_parameters [ "X-Goog-Expires" ] = expiration_seconds
query_parameters [ "X-Goog-SignedHeaders" ] = signed_headers
if response_type is not None :
query_parameters [ "response-content-type" ] = response_type
if response_disposition is not None :
query_parameters [ "response-content-disposition" ] = response_disposition
if generation is not None :
query_parameters [ "generation" ] = generation
ordered_query_parameters = sorted ( query_parameters . items ( ) )
canonical_query_string = six . moves . urllib . parse . urlencode ( ordered_query_parameters )
canonical_elements = [ method , resource , canonical_query_string , canonical_header_string , signed_headers , "UNSIGNED-PAYLOAD" , ]
canonical_request = "\n" . join ( canonical_elements )
canonical_request_hash = hashlib . sha256 ( canonical_request . encode ( "ascii" ) ) . hexdigest ( )
string_elements = [ "GOOG4-RSA-SHA256" , request_timestamp , credential_scope , canonical_request_hash , ]
string_to_sign = "\n" . join ( string_elements )
signature_bytes = credentials . sign_bytes ( string_to_sign . encode ( "ascii" ) )
signature = binascii . hexlify ( signature_bytes ) . decode ( "ascii" )
return "{}{}?{}&X-Goog-Signature={}" . format ( api_access_endpoint , resource , canonical_query_string , signature )
|
def is_equal ( self , other_consonnant ) :
"""> > > v _ consonant = Consonant ( Place . labio _ dental , Manner . fricative , True , " v " , False )
> > > f _ consonant = Consonant ( Place . labio _ dental , Manner . fricative , False , " f " , False )
> > > v _ consonant . is _ equal ( f _ consonant )
False
: param other _ consonnant :
: return :"""
|
return self . place == other_consonnant . place and self . manner == other_consonnant . manner and self . voiced == other_consonnant . voiced and self . geminate == other_consonnant . geminate
|
def success ( request , message , extra_tags = '' , fail_silently = False ) :
"""Adds a message with the ` ` SUCCESS ` ` level ."""
|
add_message ( request , constants . SUCCESS , message , extra_tags = extra_tags , fail_silently = fail_silently )
|
def filter_convolve ( data , filters , filter_rot = False , method = 'scipy' ) :
r"""Filter convolve
This method convolves the input image with the wavelet filters
Parameters
data : np . ndarray
Input data , 2D array
filters : np . ndarray
Wavelet filters , 3D array
filter _ rot : bool , optional
Option to rotate wavelet filters ( default is ' False ' )
method : str { ' astropy ' , ' scipy ' } , optional
Convolution method ( default is ' scipy ' )
Returns
np . ndarray convolved data
Examples
> > > from modopt . signal . wavelet import filter _ convolve
> > > x = np . arange ( 9 ) . reshape ( 3 , 3 ) . astype ( float )
> > > y = np . arange ( 36 ) . reshape ( 4 , 3 , 3 ) . astype ( float )
> > > filter _ convolve ( x , y )
array ( [ [ [ 174 . , 165 . , 174 . ] ,
[ 93 . , 84 . , 93 . ] ,
[ 174 . , 165 . , 174 . ] ] ,
[ [ 498 . , 489 . , 498 . ] ,
[ 417 . , 408 . , 417 . ] ,
[ 498 . , 489 . , 498 . ] ] ,
[ [ 822 . , 813 . , 822 . ] ,
[ 741 . , 732 . , 741 . ] ,
[ 822 . , 813 . , 822 . ] ] ,
[ [ 1146 . , 1137 . , 1146 . ] ,
[ 1065 . , 1056 . , 1065 . ] ,
[ 1146 . , 1137 . , 1146 . ] ] ] )
> > > filter _ convolve ( y , y , filter _ rot = True )
array ( [ [ 14550 . , 14586 . , 14550 . ] ,
[ 14874 . , 14910 . , 14874 . ] ,
[ 14550 . , 14586 . , 14550 . ] ] )"""
|
if filter_rot :
return np . sum ( [ convolve ( coef , f , method = method ) for coef , f in zip ( data , rotate_stack ( filters ) ) ] , axis = 0 )
else :
return np . array ( [ convolve ( data , f , method = method ) for f in filters ] )
|
def add_sldId ( self , rId ) :
"""Return a reference to a newly created < p : sldId > child element having
its r : id attribute set to * rId * ."""
|
return self . _add_sldId ( id = self . _next_id , rId = rId )
|
def getPropagationBit ( self , t , p ) :
'''returns the propagation bit of a text value'''
|
try :
return self . validPropagations [ t ] [ p ] [ 'BITS' ]
except KeyError :
raise CommandExecutionError ( ( 'No propagation type of "{0}". It should be one of the following: {1}' ) . format ( p , ', ' . join ( self . validPropagations [ t ] ) ) )
|
def inputcooker_store_queue ( self , char ) :
"""Put the cooked data in the input queue ( with locking )"""
|
self . IQUEUELOCK . acquire ( )
if type ( char ) in [ type ( ( ) ) , type ( [ ] ) , type ( "" ) ] :
for v in char :
self . cookedq . append ( v )
else :
self . cookedq . append ( char )
self . IQUEUELOCK . release ( )
|
def to_shape_list ( region_list , coordinate_system = 'fk5' ) :
"""Converts a list of regions into a ` regions . ShapeList ` object .
Parameters
region _ list : python list
Lists of ` regions . Region ` objects
format _ type : str ( ' DS9 ' or ' CRTF ' )
The format type of the Shape object . Default is ' DS9 ' .
coordinate _ system : str
The astropy coordinate system frame in which all the coordinates present
in the ` region _ list ` will be converted . Default is ' fk5 ' .
Returns
shape _ list : ` regions . ShapeList ` object
list of ` regions . Shape ` objects ."""
|
shape_list = ShapeList ( )
for region in region_list :
coord = [ ]
if isinstance ( region , SkyRegion ) :
reg_type = region . __class__ . __name__ [ : - 9 ] . lower ( )
else :
reg_type = region . __class__ . __name__ [ : - 11 ] . lower ( )
for val in regions_attributes [ reg_type ] :
coord . append ( getattr ( region , val ) )
if reg_type == 'polygon' :
coord = [ x for x in region . vertices ]
if coordinate_system :
coordsys = coordinate_system
else :
if isinstance ( region , SkyRegion ) :
coordsys = coord [ 0 ] . name
else :
coordsys = 'image'
frame = coordinates . frame_transform_graph . lookup_name ( coordsys )
new_coord = [ ]
for val in coord :
if isinstance ( val , Angle ) or isinstance ( val , u . Quantity ) or isinstance ( val , numbers . Number ) :
new_coord . append ( val )
elif isinstance ( val , PixCoord ) :
new_coord . append ( u . Quantity ( val . x , u . dimensionless_unscaled ) )
new_coord . append ( u . Quantity ( val . y , u . dimensionless_unscaled ) )
else :
new_coord . append ( Angle ( val . transform_to ( frame ) . spherical . lon ) )
new_coord . append ( Angle ( val . transform_to ( frame ) . spherical . lat ) )
meta = dict ( region . meta )
meta . update ( region . visual )
if reg_type == 'text' :
meta [ 'text' ] = meta . get ( 'text' , meta . pop ( 'label' , '' ) )
include = region . meta . pop ( 'include' , True )
shape_list . append ( Shape ( coordsys , reg_type , new_coord , meta , False , include ) )
return shape_list
|
def write_cyc ( fn , this , conv = 1.0 ) :
"""Write the lattice information to a cyc . dat file ( i . e . , tblmd input file )"""
|
lattice = this . get_cell ( )
f = paropen ( fn , "w" )
f . write ( "<------- Simulation box definition\n" )
f . write ( "<------- Barostat (on = 1, off = 0)\n" )
f . write ( " 0\n" )
f . write ( "<------- Box vectors (start)\n" )
f . write ( " %20.10f %20.10f %20.10f\n" % ( lattice [ 0 ] [ 0 ] * conv , lattice [ 1 ] [ 0 ] * conv , lattice [ 2 ] [ 0 ] * conv ) )
f . write ( " %20.10f %20.10f %20.10f\n" % ( lattice [ 0 ] [ 1 ] * conv , lattice [ 1 ] [ 1 ] * conv , lattice [ 2 ] [ 1 ] * conv ) )
f . write ( " %20.10f %20.10f %20.10f\n" % ( lattice [ 0 ] [ 2 ] * conv , lattice [ 1 ] [ 2 ] * conv , lattice [ 2 ] [ 2 ] * conv ) )
f . write ( "<------- Box vectors (end)\n" )
f . write ( " %20.10f %20.10f %20.10f\n" % ( lattice [ 0 ] [ 0 ] * conv , lattice [ 1 ] [ 0 ] * conv , lattice [ 2 ] [ 0 ] * conv ) )
f . write ( " %20.10f %20.10f %20.10f\n" % ( lattice [ 0 ] [ 1 ] * conv , lattice [ 1 ] [ 1 ] * conv , lattice [ 2 ] [ 1 ] * conv ) )
f . write ( " %20.10f %20.10f %20.10f\n" % ( lattice [ 0 ] [ 2 ] * conv , lattice [ 1 ] [ 2 ] * conv , lattice [ 2 ] [ 2 ] * conv ) )
f . write ( "<------- Mass and gamma of the box (used in connection with the barostat)\n" )
f . write ( " 240 0.005\n" )
f . write ( "<------- Stress tensor (start)\n" )
f . write ( " 0 0 0\n" )
f . write ( " 0 0 0\n" )
f . write ( " 0 0 0\n" )
f . write ( "<------- Stress tensor (end)\n" )
f . write ( " 0 0 0\n" )
f . write ( " 0 0 0\n" )
f . write ( " 0 0 0\n" )
f . close ( )
|
def download_queue ( self , job_ids ) :
"""Downloads data of completed jobs ."""
|
if self . skip :
return None
url = "{}?jobtype=completed&jobIds={}" . format ( self . queue_url , "," . join ( str ( x ) for x in job_ids ) )
try :
response = self . session . get ( url , headers = { "Accept" : "application/json" } )
if response :
response = response . json ( )
else :
response = None
# pylint : disable = broad - except
except Exception as err :
logger . error ( err )
response = None
return response
|
def get_options ( self ) :
"""A hook to override the flattened list of all options used to generate
option names and defaults ."""
|
return reduce ( list . __add__ , [ list ( option_list ) for option_list in self . get_option_lists ( ) ] , [ ] )
|
def html_result ( html : str , extraheaders : TYPE_WSGI_RESPONSE_HEADERS = None ) -> WSGI_TUPLE_TYPE :
"""Returns ` ` ( contenttype , extraheaders , data ) ` ` tuple for UTF - 8 HTML ."""
|
extraheaders = extraheaders or [ ]
return 'text/html; charset=utf-8' , extraheaders , html . encode ( "utf-8" )
|
def insert_fft_option_group ( parser ) :
"""Adds the options used to choose an FFT backend . This should be used
if your program supports the ability to select the FFT backend ; otherwise
you may simply call the fft and ifft functions and rely on default
choices . This function will also attempt to add any options exported
by available backends through a function called insert _ fft _ options .
These submodule functions should take the fft _ group object as argument .
Parameters
parser : object
OptionParser instance"""
|
fft_group = parser . add_argument_group ( "Options for selecting the" " FFT backend and controlling its performance" " in this program." )
# We have one argument to specify the backends . This becomes the default list used
# if none is specified for a particular call of fft ( ) of ifft ( ) . Note that this
# argument expects a * list * of inputs , as indicated by the nargs = ' * ' .
fft_group . add_argument ( "--fft-backends" , help = "Preference list of the FFT backends. " "Choices are: \n" + str ( get_backend_names ( ) ) , nargs = '*' , default = [ ] )
for backend in get_backend_modules ( ) :
try :
backend . insert_fft_options ( fft_group )
except AttributeError :
pass
|
def lattice_array_to_unit_cell ( lattice_array ) :
"""Return crystallographic param . from unit cell lattice matrix ."""
|
cell_lengths = np . sqrt ( np . sum ( lattice_array ** 2 , axis = 0 ) )
gamma_r = np . arccos ( lattice_array [ 0 ] [ 1 ] / cell_lengths [ 1 ] )
beta_r = np . arccos ( lattice_array [ 0 ] [ 2 ] / cell_lengths [ 2 ] )
alpha_r = np . arccos ( lattice_array [ 1 ] [ 2 ] * np . sin ( gamma_r ) / cell_lengths [ 2 ] + np . cos ( beta_r ) * np . cos ( gamma_r ) )
cell_angles = [ np . rad2deg ( alpha_r ) , np . rad2deg ( beta_r ) , np . rad2deg ( gamma_r ) ]
return np . append ( cell_lengths , cell_angles )
|
def create_schema_from_xsd_directory ( directory , version ) :
"""Create and fill the schema from a directory which contains xsd
files . It calls fill _ schema _ from _ xsd _ file for each xsd file
found ."""
|
schema = Schema ( version )
for f in _get_xsd_from_directory ( directory ) :
logger . info ( "Loading schema %s" % f )
fill_schema_from_xsd_file ( f , schema )
return schema
|
def _two_qubit_accumulate_into_scratch ( args : Dict [ str , Any ] ) :
"""Accumulates two qubit phase gates into the scratch shards ."""
|
index0 , index1 = args [ 'indices' ]
half_turns = args [ 'half_turns' ]
scratch = _scratch_shard ( args )
projector = _one_projector ( args , index0 ) * _one_projector ( args , index1 )
# Exp11 = exp ( - i pi | 11 > < 11 | half _ turns ) , but we accumulate phases as
# pi / 2.
scratch += 2 * half_turns * projector
|
def security_cleanup ( app , appbuilder ) :
"""Cleanup unused permissions from views and roles ."""
|
_appbuilder = import_application ( app , appbuilder )
_appbuilder . security_cleanup ( )
click . echo ( click . style ( "Finished security cleanup" , fg = "green" ) )
|
def request_issuance ( self , csr ) :
"""Request a certificate .
Authorizations should have already been completed for all of the names
requested in the CSR .
Note that unlike ` acme . client . Client . request _ issuance ` , the certificate
resource will have the body data as raw bytes .
. . seealso : : ` txacme . util . csr _ for _ names `
. . todo : : Delayed issuance is not currently supported , the server must
issue the requested certificate immediately .
: param csr : A certificate request message : normally
` txacme . messages . CertificateRequest ` or
` acme . messages . CertificateRequest ` .
: rtype : Deferred [ ` acme . messages . CertificateResource ` ]
: return : The issued certificate ."""
|
action = LOG_ACME_REQUEST_CERTIFICATE ( )
with action . context ( ) :
return ( DeferredContext ( self . _client . post ( self . directory [ csr ] , csr , content_type = DER_CONTENT_TYPE , headers = Headers ( { b'Accept' : [ DER_CONTENT_TYPE ] } ) ) ) . addCallback ( self . _expect_response , http . CREATED ) . addCallback ( self . _parse_certificate ) . addActionFinish ( ) )
|
def show_instances ( server , cim_class ) :
"""Display the instances of the CIM _ Class defined by cim _ class . If the
namespace is None , use the interop namespace . Search all namespaces for
instances except for CIM _ RegisteredProfile"""
|
if cim_class == 'CIM_RegisteredProfile' :
for inst in server . profiles :
print ( inst . tomof ( ) )
return
for ns in server . namespaces :
try :
insts = server . conn . EnumerateInstances ( cim_class , namespace = ns )
if len ( insts ) :
print ( 'INSTANCES OF %s ns=%s' % ( cim_class , ns ) )
for inst in insts :
print ( inst . tomof ( ) )
except pywbem . Error as er :
if er . status_code != pywbem . CIM_ERR_INVALID_CLASS :
print ( '%s namespace %s Enumerate failed for conn=%s\n' 'exception=%s' % ( cim_class , ns , server , er ) )
|
def unique ( self , sort = False ) :
"""Return unique set of values in image"""
|
unique_vals = np . unique ( self . numpy ( ) )
if sort :
unique_vals = np . sort ( unique_vals )
return unique_vals
|
def get_arg_parse ( ) :
"""Parses the Command Line Arguments using argparse ."""
|
# Create parser object :
objParser = argparse . ArgumentParser ( )
# Add argument to namespace - config file path :
objParser . add_argument ( '-config' , required = True , metavar = '/path/to/config.csv' , help = 'Absolute file path of config file with \
parameters for pRF analysis. Ignored if in \
testing mode.' )
# Add argument to namespace - prior results file path :
objParser . add_argument ( '-strPthPrior' , required = True , metavar = '/path/to/my_prior_res' , help = 'Absolute file path of prior pRF results. \
Ignored if in testing mode.' )
# Add argument to namespace - varNumOpt1 flag :
objParser . add_argument ( '-varNumOpt1' , required = True , type = int , metavar = 'N1' , help = 'Number of radial positions.' )
# Add argument to namespace - varNumOpt2 flag :
objParser . add_argument ( '-varNumOpt2' , required = True , type = int , metavar = 'N2' , help = 'Number of angular positions.' )
# Add argument to namespace - varNumOpt3 flag :
objParser . add_argument ( '-varNumOpt3' , default = None , metavar = 'N3' , help = 'Max displacement in radial direction.' )
# Add argument to namespace - lgcRstrCentre flag :
objParser . add_argument ( '-lgcRstrCentre' , dest = 'lgcRstrCentre' , action = 'store_true' , default = False , help = 'Restrict fitted models to stimulated area.' )
objParser . add_argument ( '-strPathHrf' , default = None , required = False , metavar = '/path/to/custom_hrf_parameter.npy' , help = 'Path to npy file with custom hrf parameters. \
Ignored if in testing mode.' )
objParser . add_argument ( '-supsur' , nargs = '+' , help = 'List of floats that represent the ratio of \
size neg surround to size pos center.' , type = float , default = None )
# Namespace object containign arguments and values :
objNspc = objParser . parse_args ( )
return objNspc
|
def run ( config , path = None , stop_on_error = True , just_tests = False ) :
"""Run sciunit tests for the given configuration ."""
|
if path is None :
path = os . getcwd ( )
prep ( config , path = path )
models = __import__ ( 'models' )
tests = __import__ ( 'tests' )
suites = __import__ ( 'suites' )
print ( '\n' )
for x in [ 'models' , 'tests' , 'suites' ] :
module = __import__ ( x )
assert hasattr ( module , x ) , "'%s' module requires attribute '%s'" % ( x , x )
if just_tests :
for test in tests . tests :
_run ( test , models , stop_on_error )
else :
for suite in suites . suites :
_run ( suite , models , stop_on_error )
|
def equal ( self , a , b , message = None ) :
"Check if two values are equal"
|
if a != b :
self . log_error ( "{} != {}" . format ( str ( a ) , str ( b ) ) , message )
return False
return True
|
def toroidal ( target , mode = 'max' , r_toroid = 5e-6 , target_Pc = None , num_points = 1e2 , surface_tension = 'pore.surface_tension' , contact_angle = 'pore.contact_angle' , throat_diameter = 'throat.diameter' , touch_length = 'throat.touch_length' ) :
r"""Calculate the filling angle ( alpha ) for a given capillary pressure
Parameters
target : OpenPNM Object
The object for which these values are being calculated . This
controls the length of the calculated array , and also provides
access to other necessary thermofluid properties .
mode : string ( Default is ' max ' )
Determines what information to send back . Options are :
' max ' : the maximum capillary pressure along the throat axis
' touch ' : the maximum capillary pressure a meniscus can sustain before
touching a solid feature
' men ' : return the meniscus info for a target pressure
r _ toroid : float or array _ like
The radius of the toroid surrounding the pore
target _ Pc : float
The target capillary pressure
num _ points : float ( Default 100)
The number of divisions to make along the profile length to assess the
meniscus properties in order to find target pressures , touch lengths ,
minima and maxima .
surface _ tension : dict key ( string )
The dictionary key containing the surface tension values to be used . If
a pore property is given , it is interpolated to a throat list .
contact _ angle : dict key ( string )
The dictionary key containing the contact angle values to be used . If
a pore property is given , it is interpolated to a throat list .
throat _ diameter : dict key ( string )
The dictionary key containing the throat diameter values to be used .
touch _ length : dict key ( string )
The dictionary key containing the maximum length that a meniscus can
protrude into the connecting pore before touching a solid feature and
therfore invading
Notes
This approach accounts for the converging - diverging nature of many throat
types . Advancing the meniscus beyond the apex of the toroid requires an
increase in capillary pressure beyond that for a cylindical tube of the
same radius . The details of this equation are described by Mason and
Morrow [ 1 ] _ , and explored by Gostick [ 2 ] _ in the context of a pore network
model .
References
. . [ 1 ] G . Mason , N . R . Morrow , Effect of contact angle on capillary
displacement curvatures in pore throats formed by spheres . J .
Colloid Interface Sci . 168 , 130 ( 1994 ) .
. . [ 2 ] J . Gostick , Random pore network modeling of fibrous PEMFC gas
diffusion media using Voronoi and Delaunay tessellations . J .
Electrochem . Soc . 160 , F731 ( 2013 ) ."""
|
network = target . project . network
phase = target . project . find_phase ( target )
element , sigma , theta = _get_key_props ( phase = phase , diameter = throat_diameter , surface_tension = surface_tension , contact_angle = contact_angle )
x , R , rt , s , t = syp . symbols ( 'x, R, rt, s, t' )
# Equation of circle re - arranged for y
y = R * syp . sqrt ( 1 - ( x / R ) ** 2 )
# Throat radius profile
r = rt + ( R - y )
# Derivative of profile
rprime = r . diff ( x )
# Filling angle
alpha = syp . atan ( rprime )
# Radius of curvature of meniscus
rm = r / syp . cos ( alpha + t )
# distance from center of curvature to meniscus contact point ( Pythagoras )
a = syp . sqrt ( rm ** 2 - r ** 2 )
# angle between throat axis , meniscus center and meniscus contact point
gamma = syp . atan ( r / a )
# Capillary Pressure
f = - 2 * s * syp . cos ( alpha + t ) / r
# Callable Functions
rx = syp . lambdify ( ( x , R , rt ) , r , 'numpy' )
fill_angle = syp . lambdify ( ( x , R , rt ) , alpha , 'numpy' )
Pc = syp . lambdify ( ( x , R , rt , s , t ) , f , 'numpy' )
rad_curve = syp . lambdify ( ( x , R , rt , s , t ) , rm , 'numpy' )
c2x = syp . lambdify ( ( x , R , rt , s , t ) , a , 'numpy' )
cap_angle = syp . lambdify ( ( x , R , rt , s , t ) , gamma , 'numpy' )
# Contact Angle
theta = np . deg2rad ( theta )
# Network properties
throatRad = network [ throat_diameter ] / 2
pos = np . arange ( - r_toroid * 0.999 , r_toroid * 0.999 , r_toroid / num_points )
fiberRad = np . ones ( len ( throatRad ) ) * r_toroid
# Now find the positions of the menisci along each throat axis
Y , X = np . meshgrid ( throatRad , pos )
t_Pc = Pc ( X , fiberRad , Y , sigma , theta )
# Values of minima and maxima
Pc_min = np . min ( t_Pc , axis = 0 )
Pc_max = np . max ( t_Pc , axis = 0 )
# Arguments of minima and maxima
a_min = np . argmin ( t_Pc , axis = 0 )
a_max = np . argmax ( t_Pc , axis = 0 )
if mode == 'max' :
return Pc_max
elif mode == 'touch' :
all_rad = rad_curve ( X , fiberRad , Y , sigma , theta )
all_c2x = c2x ( X , fiberRad , Y , sigma , theta )
all_cen = X + np . sign ( all_rad ) * all_c2x
dist = all_cen + np . abs ( all_rad )
# Only count lengths where meniscus bulges into pore
dist [ all_rad > 0 ] = 0.0
touch_len = network [ touch_length ]
mask = dist > touch_len
arg_touch = np . argmax ( mask , axis = 0 )
# Make sure we only count ones that happen before max pressure
# And above min pressure ( which will be erroneous )
arg_in_range = ( arg_touch < a_max ) * ( arg_touch > a_min )
arg_touch [ ~ arg_in_range ] = a_max [ ~ arg_in_range ]
x_touch = pos [ arg_touch ]
# Return the pressure at which a touch happens
Pc_touch = Pc ( x_touch , fiberRad , throatRad , sigma , theta )
return Pc_touch
elif target_Pc is None :
logger . exception ( msg = 'Please supply a target capillary pressure' + ' when mode is "men"' )
if np . abs ( target_Pc ) < 1.0 :
target_Pc = 1.0
inds = np . indices ( np . shape ( t_Pc ) )
# Change values outside the range between minima and maxima to be those
# Values
mask = inds [ 0 ] < np . ones ( len ( pos ) ) [ : , np . newaxis ] * a_min
t_Pc [ mask ] = ( np . ones ( len ( pos ) ) [ : , np . newaxis ] * Pc_min ) [ mask ]
mask = inds [ 0 ] > np . ones ( len ( pos ) ) [ : , np . newaxis ] * a_max
t_Pc [ mask ] = ( np . ones ( len ( pos ) ) [ : , np . newaxis ] * Pc_max ) [ mask ]
# Find the argument at or above the target Pressure
mask = t_Pc >= target_Pc
arg_x = np . argmax ( mask , axis = 0 )
# If outside range change to minima or maxima accordingly
arg_x [ target_Pc < Pc_min ] = a_min [ target_Pc < Pc_min ]
arg_x [ target_Pc > Pc_max ] = a_max [ target_Pc > Pc_max ]
xpos = pos [ arg_x ]
# Output
men_data = { }
men_data [ 'pos' ] = xpos
men_data [ 'rx' ] = rx ( xpos , fiberRad , throatRad )
men_data [ 'alpha' ] = fill_angle ( xpos , fiberRad , throatRad )
men_data [ 'alpha_min' ] = fill_angle ( pos [ a_min ] , fiberRad , throatRad )
men_data [ 'alpha_max' ] = fill_angle ( pos [ a_max ] , fiberRad , throatRad )
men_data [ 'c2x' ] = c2x ( xpos , fiberRad , throatRad , sigma , theta )
men_data [ 'gamma' ] = cap_angle ( xpos , fiberRad , throatRad , sigma , theta )
men_data [ 'radius' ] = rad_curve ( xpos , fiberRad , throatRad , sigma , theta )
# xpos is relative to the throat center
men_data [ 'center' ] = ( xpos + np . sign ( men_data [ 'radius' ] ) * men_data [ 'c2x' ] )
men_data [ 'men_max' ] = men_data [ 'center' ] - men_data [ 'radius' ]
logger . info ( mode + ' calculated for Pc: ' + str ( target_Pc ) )
return men_data
|
def _add_offsets_to_token_nodes ( self ) :
"""Adds primary text string onsets / offsets to all nodes that represent
tokens . In SaltDocuments , this data was stored in TextualRelation
edges only ."""
|
for edge_index in self . _textual_relation_ids :
token_node_index = self . edges [ edge_index ] . source
self . nodes [ token_node_index ] . onset = self . edges [ edge_index ] . onset
self . nodes [ token_node_index ] . offset = self . edges [ edge_index ] . offset
|
def write_autoconf ( self , filename , header = "/* Generated by Kconfiglib (https://github.com/ulfalizer/Kconfiglib) */\n" ) :
r"""Writes out symbol values as a C header file , matching the format used
by include / generated / autoconf . h in the kernel .
The ordering of the # defines matches the one generated by
write _ config ( ) . The order in the C implementation depends on the hash
table implementation as of writing , and so won ' t match .
filename :
Self - explanatory .
header ( default : " / * Generated by Kconfiglib ( https : / / github . com / ulfalizer / Kconfiglib ) * / \ n " ) :
Text that will be inserted verbatim at the beginning of the file . You
would usually want it enclosed in ' / * * / ' to make it a C comment ,
and include a final terminating newline ."""
|
with self . _open ( filename , "w" ) as f :
f . write ( header )
for sym in self . unique_defined_syms : # Note : _ write _ to _ conf is determined when the value is
# calculated . This is a hidden function call due to
# property magic .
val = sym . str_value
if sym . _write_to_conf :
if sym . orig_type in _BOOL_TRISTATE :
if val != "n" :
f . write ( "#define {}{}{} 1\n" . format ( self . config_prefix , sym . name , "_MODULE" if val == "m" else "" ) )
elif sym . orig_type is STRING :
f . write ( '#define {}{} "{}"\n' . format ( self . config_prefix , sym . name , escape ( val ) ) )
else : # sym . orig _ type in _ INT _ HEX :
if sym . orig_type is HEX and not val . startswith ( ( "0x" , "0X" ) ) :
val = "0x" + val
f . write ( "#define {}{} {}\n" . format ( self . config_prefix , sym . name , val ) )
|
def change_and_save ( self , update_only_changed_fields = False , ** changed_fields ) :
"""Changes a given ` changed _ fields ` on this object , saves it and returns itself .
: param update _ only _ changed _ fields : only changed fields will be updated in the database .
: param changed _ fields : fields to change .
: return : self"""
|
change_and_save ( self , update_only_changed_fields = update_only_changed_fields , ** changed_fields )
return self
|
def t_ID ( self , t ) :
r"""[ a - zA - Z _ ] [ a - zA - Z _ 0-9 ] *"""
|
# If the value is a reserved name , give it the appropriate type ( not ID )
if t . value in self . reserved :
t . type = self . reserved [ t . value ]
# If it ' s a function , give it the FUNC type
elif t . value in self . functions :
t . type = 'FUNC'
return t
|
def authenticate ( self ) :
"""Authenticate into the UAA instance as the admin user ."""
|
# Make sure we ' ve stored uri for use
predix . config . set_env_value ( self . use_class , 'uri' , self . _get_uri ( ) )
self . uaac = predix . security . uaa . UserAccountAuthentication ( )
self . uaac . authenticate ( 'admin' , self . _get_admin_secret ( ) , use_cache = False )
self . is_admin = True
|
def new ( self , fname = None , editorstack = None , text = None ) :
"""Create a new file - Untitled
fname = None - - > fname will be ' untitledXX . py ' but do not create file
fname = < basestring > - - > create file"""
|
# If no text is provided , create default content
empty = False
try :
if text is None :
default_content = True
text , enc = encoding . read ( self . TEMPLATE_PATH )
enc_match = re . search ( r'-*- coding: ?([a-z0-9A-Z\-]*) -*-' , text )
if enc_match :
enc = enc_match . group ( 1 )
# Initialize template variables
# Windows
username = encoding . to_unicode_from_fs ( os . environ . get ( 'USERNAME' , '' ) )
# Linux , Mac OS X
if not username :
username = encoding . to_unicode_from_fs ( os . environ . get ( 'USER' , '-' ) )
VARS = { 'date' : time . ctime ( ) , 'username' : username , }
try :
text = text % VARS
except Exception :
pass
else :
default_content = False
enc = encoding . read ( self . TEMPLATE_PATH ) [ 1 ]
except ( IOError , OSError ) :
text = ''
enc = 'utf-8'
default_content = True
empty = True
create_fname = lambda n : to_text_string ( _ ( "untitled" ) ) + ( "%d.py" % n )
# Creating editor widget
if editorstack is None :
current_es = self . get_current_editorstack ( )
else :
current_es = editorstack
created_from_here = fname is None
if created_from_here :
while True :
fname = create_fname ( self . untitled_num )
self . untitled_num += 1
if not osp . isfile ( fname ) :
break
basedir = getcwd_or_home ( )
if self . main . projects . get_active_project ( ) is not None :
basedir = self . main . projects . get_active_project_path ( )
else :
c_fname = self . get_current_filename ( )
if c_fname is not None and c_fname != self . TEMPFILE_PATH :
basedir = osp . dirname ( c_fname )
fname = osp . abspath ( osp . join ( basedir , fname ) )
else : # QString when triggered by a Qt signal
fname = osp . abspath ( to_text_string ( fname ) )
index = current_es . has_filename ( fname )
if index is not None and not current_es . close_file ( index ) :
return
# Creating the editor widget in the first editorstack ( the one that
# can ' t be destroyed ) , then cloning this editor widget in all other
# editorstacks :
finfo = self . editorstacks [ 0 ] . new ( fname , enc , text , default_content , empty )
finfo . path = self . main . get_spyder_pythonpath ( )
self . _clone_file_everywhere ( finfo )
current_editor = current_es . set_current_filename ( finfo . filename )
self . register_widget_shortcuts ( current_editor )
if not created_from_here :
self . save ( force = True )
|
def database_path ( cls , project , instance , database ) :
"""Return a fully - qualified database string ."""
|
return google . api_core . path_template . expand ( "projects/{project}/instances/{instance}/databases/{database}" , project = project , instance = instance , database = database , )
|
def in6_getLocalUniquePrefix ( ) :
"""Returns a pseudo - randomly generated Local Unique prefix . Function
follows recommandation of Section 3.2.2 of RFC 4193 for prefix
generation ."""
|
# Extracted from RFC 1305 ( NTP ) :
# NTP timestamps are represented as a 64 - bit unsigned fixed - point number ,
# in seconds relative to 0h on 1 January 1900 . The integer part is in the
# first 32 bits and the fraction part in the last 32 bits .
# epoch = ( 1900 , 1 , 1 , 0 , 0 , 0 , 5 , 1 , 0)
# x = time . time ( )
# from time import gmtime , strftime , gmtime , mktime
# delta = mktime ( gmtime ( 0 ) ) - mktime ( self . epoch )
# x = x - delta
tod = time . time ( )
# time of day . Will bother with epoch later
i = int ( tod )
j = int ( ( tod - i ) * ( 2 ** 32 ) )
tod = struct . pack ( "!II" , i , j )
# TODO : Add some check regarding system address gathering
rawmac = get_if_raw_hwaddr ( conf . iface6 )
mac = b":" . join ( map ( lambda x : b"%.02x" % ord ( x ) , list ( rawmac ) ) )
# construct modified EUI - 64 ID
eui64 = inet_pton ( socket . AF_INET6 , '::' + in6_mactoifaceid ( mac ) ) [ 8 : ]
import sha
globalid = sha . new ( tod + eui64 ) . digest ( ) [ : 5 ]
return inet_ntop ( socket . AF_INET6 , b'\xfd' + globalid + b'\x00' * 10 )
|
def _categorize_successor ( self , state ) :
"""Append state into successor lists .
: param state : a SimState instance
: param target : The target ( of the jump / call / ret )
: return : The state"""
|
self . all_successors . append ( state )
target = state . scratch . target
# categorize the state
if o . APPROXIMATE_GUARDS in state . options and state . solver . is_false ( state . scratch . guard , exact = False ) :
if o . VALIDATE_APPROXIMATIONS in state . options :
if state . satisfiable ( ) :
raise Exception ( 'WTF' )
self . unsat_successors . append ( state )
elif o . APPROXIMATE_SATISFIABILITY in state . options and not state . solver . satisfiable ( exact = False ) :
if o . VALIDATE_APPROXIMATIONS in state . options :
if state . solver . satisfiable ( ) :
raise Exception ( 'WTF' )
self . unsat_successors . append ( state )
elif not state . scratch . guard . symbolic and state . solver . is_false ( state . scratch . guard ) :
self . unsat_successors . append ( state )
elif o . LAZY_SOLVES not in state . options and not state . satisfiable ( ) :
self . unsat_successors . append ( state )
elif o . NO_SYMBOLIC_JUMP_RESOLUTION in state . options and state . solver . symbolic ( target ) :
self . unconstrained_successors . append ( state )
elif not state . solver . symbolic ( target ) and not state . history . jumpkind . startswith ( "Ijk_Sys" ) : # a successor with a concrete IP , and it ' s not a syscall
self . successors . append ( state )
self . flat_successors . append ( state )
elif state . history . jumpkind . startswith ( "Ijk_Sys" ) : # syscall
self . successors . append ( state )
# Misuse the ip _ at _ syscall register to save the return address for this syscall
# state . ip * might be * changed to be the real address of syscall SimProcedures by syscall handling code in
# angr
state . regs . ip_at_syscall = state . ip
try :
symbolic_syscall_num , concrete_syscall_nums = self . _resolve_syscall ( state )
if concrete_syscall_nums is not None :
for i , n in enumerate ( concrete_syscall_nums ) :
split_state = state if i == len ( concrete_syscall_nums ) - 1 else state . copy ( )
split_state . add_constraints ( symbolic_syscall_num == n )
if split_state . supports_inspect :
split_state . inspect . downsize ( )
self . _fix_syscall_ip ( split_state )
self . flat_successors . append ( split_state )
else : # We cannot resolve the syscall number
# However , we still put it to the flat _ successors list , and angr . SimOS . handle _ syscall will pick it
# up , and create a " unknown syscall " stub for it .
self . _fix_syscall_ip ( state )
self . flat_successors . append ( state )
except AngrUnsupportedSyscallError :
self . unsat_successors . append ( state )
else : # a successor with a symbolic IP
_max_targets = state . options . symbolic_ip_max_targets
_max_jumptable_targets = state . options . jumptable_symbolic_ip_max_targets
try :
if o . NO_IP_CONCRETIZATION in state . options : # Don ' t try to concretize the IP
cond_and_targets = [ ( claripy . true , target ) ]
max_targets = 0
elif o . KEEP_IP_SYMBOLIC in state . options :
s = claripy . Solver ( )
addrs = s . eval ( target , _max_targets + 1 , extra_constraints = tuple ( state . ip_constraints ) )
if len ( addrs ) > _max_targets : # It is not a library
l . debug ( "It is not a Library" )
addrs = state . solver . eval_upto ( target , _max_targets + 1 )
l . debug ( "addrs :%s" , addrs )
cond_and_targets = [ ( target == addr , addr ) for addr in addrs ]
max_targets = _max_targets
else :
cond_and_targets = self . _eval_target_jumptable ( state , target , _max_jumptable_targets + 1 )
if cond_and_targets is None : # Fallback to the traditional and slow method
cond_and_targets = self . _eval_target_brutal ( state , target , _max_targets + 1 )
max_targets = _max_targets
else :
max_targets = _max_jumptable_targets
if len ( cond_and_targets ) > max_targets :
l . warning ( "Exit state has over %d possible solutions. Likely unconstrained; skipping. %s" , max_targets , target . shallow_repr ( ) )
self . unconstrained_successors . append ( state )
else :
for cond , a in cond_and_targets :
split_state = state . copy ( )
if o . KEEP_IP_SYMBOLIC in split_state . options :
split_state . regs . ip = target
else :
split_state . add_constraints ( cond , action = True )
split_state . regs . ip = a
if split_state . supports_inspect :
split_state . inspect . downsize ( )
self . flat_successors . append ( split_state )
self . successors . append ( state )
except SimSolverModeError :
self . unsat_successors . append ( state )
return state
|
def create_authenticate_message ( self , user_name , password , domain_name = None , workstation = None , server_certificate_hash = None ) :
"""Create an NTLM AUTHENTICATE _ MESSAGE based on the Ntlm context and the previous messages sent and received
: param user _ name : The user name of the user we are trying to authenticate with
: param password : The password of the user we are trying to authenticate with
: param domain _ name : The domain name of the user account we are authenticated with , default is None
: param workstation : The workstation we are using to authenticate with , default is None
: param server _ certificate _ hash : The SHA256 hash string of the server certificate ( DER encoded ) NTLM is authenticating to . Used for Channel
Binding Tokens . If nothing is supplied then the CBT hash will not be sent . See messages . py AuthenticateMessage
for more details
: return : A base64 encoded string of the AUTHENTICATE _ MESSAGE"""
|
self . authenticate_message = AuthenticateMessage ( user_name , password , domain_name , workstation , self . challenge_message , self . ntlm_compatibility , server_certificate_hash )
self . authenticate_message . add_mic ( self . negotiate_message , self . challenge_message )
# Setups up the session _ security context used to sign and seal messages if wanted
if self . negotiate_flags & NegotiateFlags . NTLMSSP_NEGOTIATE_SEAL or self . negotiate_flags & NegotiateFlags . NTLMSSP_NEGOTIATE_SIGN :
self . session_security = SessionSecurity ( struct . unpack ( "<I" , self . authenticate_message . negotiate_flags ) [ 0 ] , self . authenticate_message . exported_session_key )
return base64 . b64encode ( self . authenticate_message . get_data ( ) )
|
def update_resource_attribute ( resource_attr_id , is_var , ** kwargs ) :
"""Deletes a resource attribute and all associated data ."""
|
user_id = kwargs . get ( 'user_id' )
try :
ra = db . DBSession . query ( ResourceAttr ) . filter ( ResourceAttr . id == resource_attr_id ) . one ( )
except NoResultFound :
raise ResourceNotFoundError ( "Resource Attribute %s not found" % ( resource_attr_id ) )
ra . check_write_permission ( user_id )
ra . is_var = is_var
return 'OK'
|
def text2class_txt_iterator ( source_txt_path , label_txt_path , class_strs = None ) :
"""Yield dicts for Text2ClassProblem . generate _ samples from lines of files .
Args :
source _ txt _ path : txt file with record per line .
label _ txt _ path : txt file with label per line , either as int or str . If
string , must provide class _ strs .
class _ strs : list < str > of class label names . Must be in correct order ( i . e .
[ " a " , " b " , " c " ] means that " a " will get class ID 0 , " b " ID 1 , etc . ) .
Yields :
{ " inputs " : inputs , " label " : label }"""
|
if class_strs :
class_strs = dict ( [ ( s , i ) for i , s in enumerate ( class_strs ) ] )
for inputs , label in zip ( txt_line_iterator ( source_txt_path ) , txt_line_iterator ( label_txt_path ) ) :
label = label . strip ( )
if class_strs :
label = class_strs [ label ]
else :
label = int ( label )
yield { "inputs" : inputs , "label" : label }
|
def get_darwin_arches ( major , minor , machine ) :
"""Return a list of supported arches ( including group arches ) for
the given major , minor and machine architecture of a macOS machine ."""
|
arches = [ ]
def _supports_arch ( major , minor , arch ) : # Looking at the application support for macOS versions in the chart
# provided by https : / / en . wikipedia . org / wiki / OS _ X # Versions it appears
# our timeline looks roughly like :
# 10.0 - Introduces ppc support .
# 10.4 - Introduces ppc64 , i386 , and x86_64 support , however the ppc64
# and x86_64 support is CLI only , and cannot be used for GUI
# applications .
# 10.5 - Extends ppc64 and x86_64 support to cover GUI applications .
# 10.6 - Drops support for ppc64
# 10.7 - Drops support for ppc
# Given that we do not know if we ' re installing a CLI or a GUI
# application , we must be conservative and assume it might be a GUI
# application and behave as if ppc64 and x86_64 support did not occur
# until 10.5.
# Note : The above information is taken from the " Application support "
# column in the chart not the " Processor support " since I believe
# that we care about what instruction sets an application can use
# not which processors the OS supports .
if arch == 'ppc' :
return ( major , minor ) <= ( 10 , 5 )
if arch == 'ppc64' :
return ( major , minor ) == ( 10 , 5 )
if arch == 'i386' :
return ( major , minor ) >= ( 10 , 4 )
if arch == 'x86_64' :
return ( major , minor ) >= ( 10 , 5 )
if arch in groups :
for garch in groups [ arch ] :
if _supports_arch ( major , minor , garch ) :
return True
return False
groups = OrderedDict ( [ ( "fat" , ( "i386" , "ppc" ) ) , ( "intel" , ( "x86_64" , "i386" ) ) , ( "fat64" , ( "x86_64" , "ppc64" ) ) , ( "fat32" , ( "x86_64" , "i386" , "ppc" ) ) , ] )
if _supports_arch ( major , minor , machine ) :
arches . append ( machine )
for garch in groups :
if machine in groups [ garch ] and _supports_arch ( major , minor , garch ) :
arches . append ( garch )
arches . append ( 'universal' )
return arches
|
def get_delete_url ( self ) :
"""Get model object delete url"""
|
return reverse ( 'trionyx:model-delete' , kwargs = { 'app' : self . get_app_label ( ) , 'model' : self . get_model_name ( ) , 'pk' : self . object . id } )
|
def buildDiscoveryURL ( self ) :
"""Return a discovery URL for this realm .
This function does not check to make sure that the realm is
valid . Its behaviour on invalid inputs is undefined .
@ rtype : str
@ returns : The URL upon which relying party discovery should be run
in order to verify the return _ to URL
@ since : 2.1.0"""
|
if self . wildcard : # Use " www . " in place of the star
assert self . host . startswith ( '.' ) , self . host
www_domain = 'www' + self . host
return '%s://%s%s' % ( self . proto , www_domain , self . path )
else :
return self . unparsed
|
def _scheme_propagation ( self , scheme , definitions ) :
"""Will updated a scheme based on inheritance . This is defined in a scheme objects with ` ` ' inherit ' : ' $ definition ' ` ` .
Will also updated parent objects for nested inheritance .
Usage : :
> > > SCHEME = {
> > > ' thing1 ' : {
> > > ' inherit ' : ' $ thing2'
> > > ' thing2 ' : {
> > > ' this _ is ' : ' thing2 is a definition '
> > > scheme = SCHEME . get ( ' thing1 ' )
> > > if ' inherit ' in scheme :
> > > scheme = self . _ scheme _ propagation ( scheme , SCHEME . get ( ' _ ' ) )
> > > scheme . get ( ' some _ data ' )
: param scheme : A dict , should be a scheme defining validation .
: param definitions : A dict , should be defined in the scheme using ' _ ' .
: rtype : A : dict : will return a updated copy of the scheme ."""
|
if not isinstance ( scheme , dict ) :
raise TypeError ( 'scheme must be a dict to propagate.' )
inherit_from = scheme . get ( 'inherit' )
if isinstance ( inherit_from , six . string_types ) :
if not inherit_from . startswith ( '$' ) :
raise AttributeError ( 'When inheriting from an object it must start with a $.' )
if inherit_from . count ( '$' ) > 1 :
raise AttributeError ( 'When inheriting an object it can only have one $.' )
if not isinstance ( definitions , dict ) :
raise AttributeError ( "Must define definitions in the root of the SCHEME. " "It is done so with '_': { objs }." )
name = inherit_from [ 1 : ]
definition = definitions . copy ( ) . get ( name )
if not definition :
raise LookupError ( 'Was unable to find {0} in definitions. The follow are available: {1}.' . format ( name , definitions ) )
else :
raise AttributeError ( 'inherit must be defined in your scheme and be a string value. format: $variable.' )
updated_scheme = { key : value for key , value in six . iteritems ( scheme ) if key not in definition }
nested_scheme = None
for key , value in six . iteritems ( definition ) :
if key in scheme :
updated_scheme [ key ] = scheme [ key ]
else :
updated_scheme [ key ] = value
if key == 'inherit' :
nested_scheme = self . _scheme_propagation ( definition , definitions )
# remove inherit key
if 'inherit' in updated_scheme :
del updated_scheme [ 'inherit' ]
if nested_scheme is not None :
updated_scheme . update ( nested_scheme )
return updated_scheme
|
def clear ( self ) -> None :
"""Destroys all running containers ."""
|
r = self . __api . delete ( 'containers' )
if r . status_code != 204 :
self . __api . handle_erroneous_response ( r )
|
def exit_with_error ( message ) :
"""Display formatted error message and exit call"""
|
click . secho ( message , err = True , bg = 'red' , fg = 'white' )
sys . exit ( 0 )
|
def add_node ( self , transformer , name = None , children = None , parent = None , parameters = { } , return_node = False ) :
'''Adds a node to the current graph .
Args :
transformer ( str , Transformer ) : The pliers Transformer to use at
the to - be - added node . Either a case - insensitive string giving
the name of a Transformer class , or an initialized Transformer
instance .
name ( str ) : Optional name to give this Node .
children ( list ) : Optional list of child nodes ( i . e . , nodes to pass
the to - be - added node ' s Transformer output to ) .
parent ( Node ) : Optional node from which the to - be - added Node
receives its input .
parameters ( dict ) : Optional keyword arguments to pass onto the
Transformer initialized at this Node if a string is passed to
the ' transformer ' argument . Ignored if an already - initialized
Transformer is passed .
return _ node ( bool ) : If True , returns the initialized Node instance .
Returns :
The initialized Node instance if return _ node is True ,
None otherwise .'''
|
node = Node ( transformer , name , ** parameters )
self . nodes [ node . id ] = node
if parent is None :
self . roots . append ( node )
else :
parent = self . nodes [ parent . id ]
parent . add_child ( node )
if children is not None :
self . add_nodes ( children , parent = node )
if return_node :
return node
|
def get_objectives_by_query ( self , objective_query = None ) :
"""Gets a list of Objectives matching the given objective query .
arg : objectiveQuery ( osid . learning . ObjectiveQuery ) : the
objective query
return : ( osid . learning . ObjectiveList ) - the returned
ObjectiveList
raise : NullArgument - objectiveQuery is null
raise : OperationFailed - unable to complete request
raise : PermissionDenied - authorization failure
raise : Unsupported - objectiveQuery is not of this service
compliance : mandatory - This method must be implemented ."""
|
if objective_query is None :
raise NullArgument ( )
if 'ancestorObjectiveId' in objective_query . _query_terms :
url_path = construct_url ( 'objectives' , bank_id = self . _objective_bank_id , obj_id = objective_query . _query_terms [ 'ancestorObjectiveId' ] . split ( '=' ) [ 1 ] )
url_path += '/children'
elif 'descendantObjectiveId' in objective_query . _query_terms :
url_path = construct_url ( 'objectives' , bank_id = self . _objective_bank_id , obj_id = objective_query . _query_terms [ 'descendantObjectiveId' ] . split ( '=' ) [ 1 ] )
url_path += '/parents'
else :
url_path = construct_url ( 'objectives' , obj_id = None )
for term in objective_query . _query_terms :
if term not in [ 'ancestorObjectiveId' , 'descendantObjectiveId' ] :
url_path += '&{0}' . format ( objective_query . _query_terms [ term ] )
url_path = url_path . replace ( '&' , '?' , 1 )
return objects . ObjectiveList ( self . _get_request ( url_path ) )
|
def validate_lv_districts ( session , nw ) :
'''Validate if total load of a grid in a pkl file is what expected from LV districts
Parameters
session : sqlalchemy . orm . session . Session
Database session
nw :
The network
Returns
DataFrame
compare _ by _ district
DataFrame
compare _ by _ loads'''
|
# config network intern variables
nw . _config = nw . import_config ( )
nw . _pf_config = nw . import_pf_config ( )
nw . _static_data = nw . import_static_data ( )
nw . _orm = nw . import_orm ( )
# rescue peak load from input table
lv_ditricts = [ dist . id_db for mv in nw . mv_grid_districts ( ) for la in mv . lv_load_areas ( ) for dist in la . lv_grid_districts ( ) ]
load_input = nw . list_lv_grid_districts ( session , lv_ditricts )
load_input = load_input . sum ( axis = 0 ) . apply ( lambda x : np . round ( x , 3 ) )
load_input . sort_index ( inplace = True )
load_input . index . names = [ 'id_db' ]
load_input [ 'peak_load_retind' ] = load_input [ 'peak_load_retail' ] + load_input [ 'peak_load_industrial' ]
# search for lv _ district in the grid
lv_dist_idx = 0
lv_dist_dict = { }
lv_load_idx = 0
lv_load_dict = { }
for mv_district in nw . mv_grid_districts ( ) :
for LA in mv_district . lv_load_areas ( ) :
for lv_district in LA . lv_grid_districts ( ) :
lv_dist_idx += 1
lv_dist_dict [ lv_dist_idx ] = { 'id_db' : lv_district . id_db , 'peak_load_residential' : lv_district . peak_load_residential , 'peak_load_retail' : lv_district . peak_load_retail , 'peak_load_industrial' : lv_district . peak_load_industrial , 'peak_load_agricultural' : lv_district . peak_load_agricultural , 'peak_load_retind' : lv_district . peak_load_industrial + lv_district . peak_load_retail , }
for node in lv_district . lv_grid . graph_nodes_sorted ( ) :
if isinstance ( node , LVLoadDing0 ) :
lv_load_idx += 1
peak_load_agricultural = 0
peak_load_residential = 0
peak_load_retail = 0
peak_load_industrial = 0
peak_load_retind = 0
if 'agricultural' in node . consumption :
tipo = 'agricultural'
peak_load_agricultural = node . peak_load
elif 'industrial' in node . consumption :
if node . consumption [ 'retail' ] == 0 :
tipo = 'industrial'
peak_load_industrial = node . peak_load
elif node . consumption [ 'industrial' ] == 0 :
tipo = 'retail'
peak_load_retail = node . peak_load
else :
tipo = 'ret_ind'
peak_load_retind = node . peak_load
elif 'residential' in node . consumption :
tipo = 'residential'
peak_load_residential = node . peak_load
else :
tipo = 'none'
print ( node . consumption )
lv_load_dict [ lv_load_idx ] = { 'id_db' : node . id_db , 'peak_load_residential' : peak_load_residential , 'peak_load_retail' : peak_load_retail , 'peak_load_industrial' : peak_load_industrial , 'peak_load_agricultural' : peak_load_agricultural , 'peak_load_retind' : peak_load_retind , }
for node in mv_district . mv_grid . graph_nodes_sorted ( ) :
if isinstance ( node , LVLoadAreaCentreDing0 ) :
lv_load_idx += 1
lv_load_dict [ lv_load_idx ] = { 'id_db' : node . id_db , 'peak_load_residential' : node . lv_load_area . peak_load_residential , 'peak_load_retail' : node . lv_load_area . peak_load_retail , 'peak_load_industrial' : node . lv_load_area . peak_load_industrial , 'peak_load_agricultural' : node . lv_load_area . peak_load_agricultural , 'peak_load_retind' : 0 , }
# compare by LV district
load_effective_lv_distr = pd . DataFrame . from_dict ( lv_dist_dict , orient = 'index' ) . set_index ( 'id_db' ) . sum ( axis = 0 ) . apply ( lambda x : np . round ( x , 3 ) )
load_effective_lv_distr . sort_index ( inplace = True )
compare_by_district = pd . concat ( [ load_input , load_effective_lv_distr , load_input == load_effective_lv_distr ] , axis = 1 )
compare_by_district . columns = [ 'table' , 'ding0' , 'equal?' ]
compare_by_district . index . names = [ 'sector' ]
# compare by LV Loads
load_effective_lv_load = pd . DataFrame . from_dict ( lv_load_dict , orient = 'index' ) . set_index ( 'id_db' )
load_effective_lv_load = load_effective_lv_load . sum ( axis = 0 ) . apply ( lambda x : np . round ( x , 3 ) )
load_effective_lv_load . sort_index ( inplace = True )
load_effective_lv_load [ 'peak_load_retind' ] = load_effective_lv_load [ 'peak_load_retail' ] + load_effective_lv_load [ 'peak_load_industrial' ] + load_effective_lv_load [ 'peak_load_retind' ]
compare_by_load = pd . concat ( [ load_input , load_effective_lv_load , load_input == load_effective_lv_load ] , axis = 1 )
compare_by_load . columns = [ 'table' , 'ding0' , 'equal?' ]
compare_by_load . index . names = [ 'sector' ]
return compare_by_district , compare_by_load
|
def multi_dict ( pairs ) :
"""Given a set of key value pairs , create a dictionary .
If a key occurs multiple times , stack the values into an array .
Can be called like the regular dict ( pairs ) constructor
Parameters
pairs : ( n , 2 ) array of key , value pairs
Returns
result : dict , with all values stored ( rather than last with regular dict )"""
|
result = collections . defaultdict ( list )
for k , v in pairs :
result [ k ] . append ( v )
return result
|
def add_operation ( self , operation_type , operation , mode = None ) :
"""Add an operation to the version
: param mode : Name of the mode in which the operation is executed
: type mode : str
: param operation _ type : one of ' pre ' , ' post '
: type operation _ type : str
: param operation : the operation to add
: type operation : : class : ` marabunta . model . Operation `"""
|
version_mode = self . _get_version_mode ( mode = mode )
if operation_type == 'pre' :
version_mode . add_pre ( operation )
elif operation_type == 'post' :
version_mode . add_post ( operation )
else :
raise ConfigurationError ( u"Type of operation must be 'pre' or 'post', got %s" % ( operation_type , ) )
|
def _on_changed ( self ) :
"""Update the tree items"""
|
self . _updating = True
to_collapse = [ ]
self . clear ( )
if self . _editor and self . _outline_mode and self . _folding_panel :
items , to_collapse = self . to_tree_widget_items ( self . _outline_mode . definitions , to_collapse = to_collapse )
if len ( items ) :
self . addTopLevelItems ( items )
self . expandAll ( )
for item in reversed ( to_collapse ) :
self . collapseItem ( item )
self . _updating = False
return
# no data
root = QtWidgets . QTreeWidgetItem ( )
root . setText ( 0 , _ ( 'No data' ) )
root . setIcon ( 0 , icons . icon ( 'dialog-information' , ':/pyqode-icons/rc/dialog-info.png' , 'fa.info-circle' ) )
self . addTopLevelItem ( root )
self . _updating = False
self . sync ( )
|
async def delete ( self , db ) :
'''Delete document'''
|
for i in self . connection_retries ( ) :
try :
return await db [ self . get_collection_name ( ) ] . delete_one ( { self . primary_key : self . pk } )
except ConnectionFailure as ex :
exceed = await self . check_reconnect_tries_and_wait ( i , 'delete' )
if exceed :
raise ex
|
def visitInlineShapeOrRef ( self , ctx : ShExDocParser . InlineShapeOrRefContext ) :
"""inlineShapeOrRef : inlineShapeDefinition | shapeRef"""
|
if ctx . inlineShapeDefinition ( ) :
from pyshexc . parser_impl . shex_shape_definition_parser import ShexShapeDefinitionParser
shdef_parser = ShexShapeDefinitionParser ( self . context , self . label )
shdef_parser . visitChildren ( ctx )
self . expr = shdef_parser . shape
else :
self . expr = self . context . shapeRef_to_iriref ( ctx . shapeRef ( ) )
|
def _get_indices ( self , names ) :
"""Safe get multiple indices , translate keys for
datelike to underlying repr ."""
|
def get_converter ( s ) : # possibly convert to the actual key types
# in the indices , could be a Timestamp or a np . datetime64
if isinstance ( s , ( Timestamp , datetime . datetime ) ) :
return lambda key : Timestamp ( key )
elif isinstance ( s , np . datetime64 ) :
return lambda key : Timestamp ( key ) . asm8
else :
return lambda key : key
if len ( names ) == 0 :
return [ ]
if len ( self . indices ) > 0 :
index_sample = next ( iter ( self . indices ) )
else :
index_sample = None
# Dummy sample
name_sample = names [ 0 ]
if isinstance ( index_sample , tuple ) :
if not isinstance ( name_sample , tuple ) :
msg = ( "must supply a tuple to get_group with multiple" " grouping keys" )
raise ValueError ( msg )
if not len ( name_sample ) == len ( index_sample ) :
try : # If the original grouper was a tuple
return [ self . indices [ name ] for name in names ]
except KeyError : # turns out it wasn ' t a tuple
msg = ( "must supply a same-length tuple to get_group" " with multiple grouping keys" )
raise ValueError ( msg )
converters = [ get_converter ( s ) for s in index_sample ]
names = ( tuple ( f ( n ) for f , n in zip ( converters , name ) ) for name in names )
else :
converter = get_converter ( index_sample )
names = ( converter ( name ) for name in names )
return [ self . indices . get ( name , [ ] ) for name in names ]
|
def reverseCommit ( self ) :
"""Reinsert the killed word ."""
|
if self . selectionPosOld is None :
return
# Shorthand .
wid = self . qteWidget
# Select , backup , and delete the selection .
wid . setSelection ( * self . selectionPosNew )
self . baseClass . replaceSelectedText ( self . oldText )
# Add the styling information .
line , col = self . selectionPosNew [ 0 : 2 ]
wid . SCISetStylingEx ( line , col , self . oldStyle )
wid . setCursorPosition ( line , col )
|
def _is_bounded_iterator_based ( self ) :
"""Iterator based check .
With respect to a certain variable / value A ,
- there must be at least one exit condition being A / / Iterator / / HasNext = = 0
- there must be at least one local that ticks the iterator next : A / / Iterator / / Next"""
|
# Condition 0
check_0 = lambda cond : ( isinstance ( cond , Condition ) and cond . op == Condition . Equal and cond . val1 == 0 and isinstance ( cond . val0 , AnnotatedVariable ) and cond . val0 . type == VariableTypes . HasNext )
check_0_results = [ ( check_0 ( stmt [ 0 ] ) , stmt [ 0 ] ) for stmt in self . loop_exit_stmts ]
check_0_conds = [ cond for r , cond in check_0_results if r ]
# remove all False ones
if not check_0_conds :
return None
the_iterator = check_0_conds [ 0 ] . val0 . variable
# Condition 1
check_1 = lambda local : ( isinstance ( local , AnnotatedVariable ) and local . type == VariableTypes . Next and local . variable == the_iterator )
if not any ( [ check_1 ( local ) for local in self . locals . values ( ) ] ) :
return None
return True
|
def getScoringVector ( self , profile ) :
"""Returns the scoring vector [ m - 1 , m - 2 , m - 3 , . . . , 0 ] where m is the number of candidates in the
election profile . This function is called by getCandScoresMap ( ) which is implemented in the
parent class .
: ivar Profile profile : A Profile object that represents an election profile ."""
|
scoringVector = [ ]
score = profile . numCands - 1
for i in range ( 0 , profile . numCands ) :
scoringVector . append ( score )
score -= 1
return scoringVector
|
def routes ( name , ** kwargs ) :
'''Manage network interface static routes .
name
Interface name to apply the route to .
kwargs
Named routes'''
|
ret = { 'name' : name , 'changes' : { } , 'result' : True , 'comment' : 'Interface {0} routes are up to date.' . format ( name ) , }
apply_routes = False
if 'test' not in kwargs :
kwargs [ 'test' ] = __opts__ . get ( 'test' , False )
# Build interface routes
try :
old = __salt__ [ 'ip.get_routes' ] ( name )
new = __salt__ [ 'ip.build_routes' ] ( name , ** kwargs )
if kwargs [ 'test' ] :
if old == new :
return ret
if not old and new :
ret [ 'result' ] = None
ret [ 'comment' ] = 'Interface {0} routes are set to be added.' . format ( name )
return ret
elif old != new :
diff = difflib . unified_diff ( old , new , lineterm = '' )
ret [ 'result' ] = None
ret [ 'comment' ] = 'Interface {0} routes are set to be ' 'updated:\n{1}' . format ( name , '\n' . join ( diff ) )
return ret
if not old and new :
apply_routes = True
ret [ 'comment' ] = 'Interface {0} routes added.' . format ( name )
ret [ 'changes' ] [ 'network_routes' ] = 'Added interface {0} routes.' . format ( name )
elif old != new :
diff = difflib . unified_diff ( old , new , lineterm = '' )
apply_routes = True
ret [ 'comment' ] = 'Interface {0} routes updated.' . format ( name )
ret [ 'changes' ] [ 'network_routes' ] = '\n' . join ( diff )
except AttributeError as error :
ret [ 'result' ] = False
ret [ 'comment' ] = six . text_type ( error )
return ret
# Apply interface routes
if apply_routes :
try :
__salt__ [ 'ip.apply_network_settings' ] ( ** kwargs )
except AttributeError as error :
ret [ 'result' ] = False
ret [ 'comment' ] = six . text_type ( error )
return ret
return ret
|
def collect_conflicts_between_fragments ( context : ValidationContext , conflicts : List [ Conflict ] , cached_fields_and_fragment_names : Dict , compared_fragment_pairs : "PairSet" , are_mutually_exclusive : bool , fragment_name1 : str , fragment_name2 : str , ) -> None :
"""Collect conflicts between fragments .
Collect all conflicts found between two fragments , including via spreading in any
nested fragments ."""
|
# No need to compare a fragment to itself .
if fragment_name1 == fragment_name2 :
return
# Memoize so two fragments are not compared for conflicts more than once .
if compared_fragment_pairs . has ( fragment_name1 , fragment_name2 , are_mutually_exclusive ) :
return
compared_fragment_pairs . add ( fragment_name1 , fragment_name2 , are_mutually_exclusive )
fragment1 = context . get_fragment ( fragment_name1 )
fragment2 = context . get_fragment ( fragment_name2 )
if not fragment1 or not fragment2 :
return None
field_map1 , fragment_names1 = get_referenced_fields_and_fragment_names ( context , cached_fields_and_fragment_names , fragment1 )
field_map2 , fragment_names2 = get_referenced_fields_and_fragment_names ( context , cached_fields_and_fragment_names , fragment2 )
# ( F ) First , collect all conflicts between these two collections of fields
# ( not including any nested fragments )
collect_conflicts_between ( context , conflicts , cached_fields_and_fragment_names , compared_fragment_pairs , are_mutually_exclusive , field_map1 , field_map2 , )
# ( G ) Then collect conflicts between the first fragment and any nested fragments
# spread in the second fragment .
for nested_fragment_name2 in fragment_names2 :
collect_conflicts_between_fragments ( context , conflicts , cached_fields_and_fragment_names , compared_fragment_pairs , are_mutually_exclusive , fragment_name1 , nested_fragment_name2 , )
# ( G ) Then collect conflicts between the second fragment and any nested fragments
# spread in the first fragment .
for nested_fragment_name1 in fragment_names1 :
collect_conflicts_between_fragments ( context , conflicts , cached_fields_and_fragment_names , compared_fragment_pairs , are_mutually_exclusive , nested_fragment_name1 , fragment_name2 , )
|
def attr_chain ( obj , attr ) :
"""Follow an attribute chain .
If you have a chain of objects where a . foo - > b , b . foo - > c , etc ,
use this to iterate over all objects in the chain . Iteration is
terminated by getattr ( x , attr ) is None .
Args :
obj : the starting object
attr : the name of the chaining attribute
Yields :
Each successive object in the chain ."""
|
next = getattr ( obj , attr )
while next :
yield next
next = getattr ( next , attr )
|
def http_resource ( self , method , url , params = None , data = None ) :
"""Makes an HTTP request ."""
|
url = urllib_parse . urljoin ( self . url , url )
url = url if url . endswith ( "/" ) else url + "/"
headers = None
if method . lower ( ) in self . unsupported_methods :
headers = { "X-HTTP-Method-Override" : method . upper ( ) }
method = "POST"
r = self . session . request ( method , url , params = params , data = data , headers = headers )
r . raise_for_status ( )
return r
|
async def _async_register ( self ) : # pragma : no cover
"""Register the agent in the XMPP server from a coroutine ."""
|
metadata = aioxmpp . make_security_layer ( None , no_verify = not self . verify_security )
query = ibr . Query ( self . jid . localpart , self . password )
_ , stream , features = await aioxmpp . node . connect_xmlstream ( self . jid , metadata , loop = self . loop )
await ibr . register ( stream , query )
|
def broadcast_transaction ( self , hex_tx ) :
"""Dispatch a raw transaction to the network ."""
|
resp = self . obj . sendrawtransaction ( hex_tx )
if len ( resp ) > 0 :
return { 'transaction_hash' : resp , 'success' : True }
else :
return error_reply ( 'Invalid response from bitcoind.' )
|
def _is_address_executable ( self , address ) :
"""Check if the specific address is in one of the executable ranges .
: param int address : The address
: return : True if it ' s in an executable range , False otherwise"""
|
for r in self . _executable_address_ranges :
if r [ 0 ] <= address < r [ 1 ] :
return True
return False
|
async def create_with_msgid ( source_id : str , connection : Connection , msg_id : str ) :
"""Create a credential based off of a known message id for a given connection .
: param source _ id : user defined id of object .
: param connection : connection handle of connection to receive offer from
: param msg _ id : message id
: return : A created credential
Example :
credential = await Credential . create _ with _ msgid ( source _ id , connection , msg _ id )
assert await credential . get _ state ( ) = = State . RequestReceived"""
|
credential = Credential ( source_id , )
c_source_id = c_char_p ( source_id . encode ( 'utf-8' ) )
c_msg_id = c_char_p ( json . dumps ( msg_id ) . encode ( 'utf-8' ) )
c_connection_handle = c_uint32 ( connection . handle )
if not hasattr ( Credential . create_with_msgid , "cb" ) :
Credential . create_with_msgid . cb = create_cb ( CFUNCTYPE ( None , c_uint32 , c_uint32 , c_uint32 , c_char_p ) )
credential . handle , cred_offer = await do_call ( 'vcx_credential_create_with_msgid' , c_source_id , c_connection_handle , c_msg_id , Credential . create_with_msgid . cb )
credential . cred_offer = json . loads ( cred_offer . decode ( ) )
return credential
|
def smooth_l1_distance ( labels , preds , delta = 1.0 ) :
"""Compute the smooth l1 _ distance .
: param labels : A float tensor of shape [ batch _ size , . . . , X ] representing the labels .
: param preds : A float tensor of shape [ batch _ size , . . . , X ] representing the predictions .
: param delta : ` float ` , the point where the huber loss function changes from a quadratic to linear .
: return : A float tensor of shape [ batch _ size , . . . ] representing the smooth l1 distance ."""
|
with tf . variable_scope ( "smooth_l1" ) :
return tf . reduce_sum ( tf . losses . huber_loss ( labels = labels , predictions = preds , delta = delta , loss_collection = None , reduction = tf . losses . Reduction . NONE ) , axis = - 1 )
|
def get_command ( domain_name , command_name ) :
"""Returns a closure function that dispatches message to the WebSocket ."""
|
def send_command ( self , ** kwargs ) :
return self . ws . send_message ( '{0}.{1}' . format ( domain_name , command_name ) , kwargs )
return send_command
|
def reverse_lazy_with_query ( named_url , ** kwargs ) :
"Reverse named URL with GET query ( lazy version )"
|
q = QueryDict ( '' , mutable = True )
q . update ( kwargs )
return '{}?{}' . format ( reverse_lazy ( named_url ) , q . urlencode ( ) )
|
def _abort_batches ( self ) :
"""Go through incomplete batches and abort them ."""
|
error = Errors . IllegalStateError ( "Producer is closed forcefully." )
for batch in self . _incomplete . all ( ) :
tp = batch . topic_partition
# Close the batch before aborting
with self . _tp_locks [ tp ] :
batch . records . close ( )
batch . done ( exception = error )
self . deallocate ( batch )
|
def midpoint_refine_triangulation_by_vertices ( self , vertices ) :
"""return points defining a refined triangulation obtained by bisection of all edges
in the triangulation connected to any of the vertices in the list provided"""
|
mlons , mlats = self . segment_midpoints_by_vertices ( vertices = vertices )
lonv1 = np . concatenate ( ( self . lons , mlons ) , axis = 0 )
latv1 = np . concatenate ( ( self . lats , mlats ) , axis = 0 )
return lonv1 , latv1
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.