signature stringlengths 29 44.1k | implementation stringlengths 0 85.2k |
|---|---|
def getWSDL ( self ) :
"""Return the WSDL object that contains this information item .""" | parent = self
while 1 : # skip any collections
if isinstance ( parent , WSDL ) :
return parent
try :
parent = parent . parent ( )
except :
break
return None |
def wait_not_displayed ( element , timeout = None , fail_on_timeout = None ) :
"""Wait until element becomes invisible or time out .
Returns true is element became invisible , otherwise false .
If timeout is not specified or 0 , then uses specific element wait timeout .
: param element :
: param timeout :
: param fail _ on _ timeout :
: return :""" | return wait ( lambda : not element . is_displayed ( ) , timeout or element . wait_timeout , fail_on_timeout ) |
def stream_stdin_lines ( ) :
"""Generator for unbuffered line reading from STDIN""" | stdin = os . fdopen ( sys . stdin . fileno ( ) , 'r' , 0 )
while True :
line = stdin . readline ( )
if line :
yield line
else :
break |
def _xorterm ( lexer ) :
"""Return an xor term expresssion .""" | prodterm = _prodterm ( lexer )
xorterm_prime = _xorterm_prime ( lexer )
if xorterm_prime is None :
return prodterm
else :
return ( 'xor' , prodterm , xorterm_prime ) |
def neighbors ( self , subid , params = None ) :
'''v1 / server / neighbors
GET - account
Determine what other subscriptions are hosted on the same physical
host as a given subscription .
Link : https : / / www . vultr . com / api / # server _ neighbors''' | params = update_params ( params , { 'SUBID' : subid } )
return self . request ( '/v1/server/neighbors' , params , 'GET' ) |
def sync ( self ) :
"""This is the method that does everything automatically ( at least attempts to ) .
Steps :
0 . Call " before _ start " method ( which might be implemented by children classes )
1 . Retrieve data from external source
2 . Parse the data
3 . Save the data locally
4 . Call " after _ complete " method ( which might be implemented by children classes )""" | self . before_start ( )
self . retrieve_data ( )
self . parse ( )
# TRICK : disable new _ nodes _ allowed _ for _ layer validation
try :
Node . _additional_validation . remove ( 'new_nodes_allowed_for_layer' )
except ValueError as e :
print "WARNING! got exception: %s" % e
# avoid sending zillions of notifications
pause_disconnectable_signals ( )
self . save ( )
# Re - enable new _ nodes _ allowed _ for _ layer validation
try :
Node . _additional_validation . insert ( 0 , 'new_nodes_allowed_for_layer' )
except ValueError as e :
print "WARNING! got exception: %s" % e
# reconnect signals
resume_disconnectable_signals ( )
self . after_complete ( )
# return message as a list because more than one messages might be returned
return [ self . message ] |
def process_config ( config , config_data ) :
"""Populates config with data from the configuration data dict . It handles
components , data , log , management and session sections from the
configuration data .
: param config : The config reference of the object that will hold the
configuration data from the config _ data .
: param config _ data : The configuration data loaded from a configuration
file .""" | if 'components' in config_data :
process_components_config_section ( config , config_data [ 'components' ] )
if 'data' in config_data :
process_data_config_section ( config , config_data [ 'data' ] )
if 'log' in config_data :
process_log_config_section ( config , config_data [ 'log' ] )
if 'management' in config_data :
process_management_config_section ( config , config_data [ 'management' ] )
if 'session' in config_data :
process_session_config_section ( config , config_data [ 'session' ] ) |
def urlize_tweet ( tweet ) :
"""Turn # hashtag and @ username in a text to Twitter hyperlinks ,
similar to the ` ` urlize ( ) ` ` function in Django .""" | text = tweet . get ( 'html' , tweet [ 'text' ] )
for hash in tweet [ 'entities' ] [ 'hashtags' ] :
text = text . replace ( '#%s' % hash [ 'text' ] , TWITTER_HASHTAG_URL % ( quote ( hash [ 'text' ] . encode ( "utf-8" ) ) , hash [ 'text' ] ) )
for mention in tweet [ 'entities' ] [ 'user_mentions' ] :
text = text . replace ( '@%s' % mention [ 'screen_name' ] , TWITTER_USERNAME_URL % ( quote ( mention [ 'screen_name' ] ) , mention [ 'screen_name' ] ) )
tweet [ 'html' ] = text
return tweet |
def get_context_data ( self , ** kwargs ) :
"""checks if there is SocialFrind model record for the user
if not attempt to create one
if all fail , redirects to the next page""" | context = super ( FriendListView , self ) . get_context_data ( ** kwargs )
friends = [ ]
for friend_list in self . social_friend_lists :
fs = friend_list . existing_social_friends ( )
for f in fs :
friends . append ( f )
# Add friends to context
context [ 'friends' ] = friends
connected_providers = [ ]
for sa in self . social_auths :
connected_providers . append ( sa . provider )
context [ 'connected_providers' ] = connected_providers
return context |
def fishers_exact_plot ( data , condition1 , condition2 , ax = None , condition1_value = None , alternative = "two-sided" , ** kwargs ) :
"""Perform a Fisher ' s exact test to compare to binary columns
Parameters
data : Pandas dataframe
Dataframe to retrieve information from
condition1 : str
First binary column to compare ( and used for test sidedness )
condition2 : str
Second binary column to compare
ax : Axes , default None
Axes to plot on
condition1 _ value :
If ` condition1 ` is not a binary column , split on = / ! = to condition1 _ value
alternative :
Specify the sidedness of the test : " two - sided " , " less "
or " greater " """ | plot = sb . barplot ( x = condition1 , y = condition2 , ax = ax , data = data , ** kwargs )
plot . set_ylabel ( "Percent %s" % condition2 )
condition1_mask = get_condition_mask ( data , condition1 , condition1_value )
count_table = pd . crosstab ( data [ condition1 ] , data [ condition2 ] )
print ( count_table )
oddsratio , p_value = fisher_exact ( count_table , alternative = alternative )
add_significance_indicator ( plot = plot , significant = p_value <= 0.05 )
only_percentage_ticks ( plot )
if alternative != "two-sided" :
raise ValueError ( "We need to better understand the one-sided Fisher's Exact test" )
sided_str = "two-sided"
print ( "Fisher's Exact Test: OR: {}, p-value={} ({})" . format ( oddsratio , p_value , sided_str ) )
return FishersExactResults ( oddsratio = oddsratio , p_value = p_value , sided_str = sided_str , with_condition1_series = data [ condition1_mask ] [ condition2 ] , without_condition1_series = data [ ~ condition1_mask ] [ condition2 ] , plot = plot ) |
def _populate_trie_reducer_regex ( self , trie_accumulator = CharTrie ( ) , value = "" ) -> CharTrie :
"""Adds value to trie accumulator""" | regex = re . compile ( r"[A-Za-z0-9]+|[^\w\s]|_" )
if self . _case_sensitive :
key = self . _joiner . join ( [ x for x in re . findall ( regex , value ) ] )
else :
key = self . _joiner . join ( [ x . lower ( ) for x in re . findall ( regex , value ) ] )
trie_accumulator [ key ] = value
return trie_accumulator |
def factory_chat ( js_obj , driver = None ) :
"""Factory function for creating appropriate object given selenium JS object""" | if js_obj [ "kind" ] not in [ "chat" , "group" , "broadcast" ] :
raise AssertionError ( "Expected chat, group or broadcast object, got {0}" . format ( js_obj [ "kind" ] ) )
if js_obj [ "isGroup" ] :
return GroupChat ( js_obj , driver )
if js_obj [ "kind" ] == "broadcast" :
return BroadcastChat ( js_obj , driver )
return UserChat ( js_obj , driver ) |
def reload ( self , client = None ) :
"""API call : sync local metric configuration via a GET request
See
https : / / cloud . google . com / logging / docs / reference / v2 / rest / v2 / projects . metrics / get
: type client : : class : ` ~ google . cloud . logging . client . Client ` or
` ` NoneType ` `
: param client : the client to use . If not passed , falls back to the
` ` client ` ` stored on the current metric .""" | client = self . _require_client ( client )
data = client . metrics_api . metric_get ( self . project , self . name )
self . description = data . get ( "description" , "" )
self . filter_ = data [ "filter" ] |
def allocateFile ( self , sharename , peer ) :
"""return a 2 - tuple of incompletePath , fullPath""" | peerDir = self . basepath . child ( str ( peer ) )
if not peerDir . isdir ( ) :
peerDir . makedirs ( )
return ( peerDir . child ( sharename + '.incomplete' ) , peerDir . child ( sharename ) ) |
def decompile_scriptPubKey ( asm ) :
"""> > > decompile _ scriptPubKey ( ' OP _ DUP OP _ HASH160 cef3550ff9e637ddd120717d43fc21f8a563caf8 OP _ EQUALVERIFY OP _ CHECKSIG ' )
'76a914cef3550ff9e637ddd120717d43fc21f8a563caf888ac '""" | asm = asm . split ( " " )
hex = ""
if asm [ 0 ] == 'OP_DUP' :
hex += "76"
if asm [ 1 ] == 'OP_HASH160' :
hex += 'a9'
if len ( asm [ 2 ] ) == 40 :
hex += asm [ 2 ]
if asm [ 3 ] == 'OP_EQUALVERIFY' :
hex += '88'
if asm [ 4 ] == 'OP_CHECKSIG' :
hex += 'ac'
return hex |
def fo_pct ( self ) :
"""Get the by team overall face - off win % .
: returns : dict , ` ` { ' home ' : % , ' away ' : % } ` `""" | tots = self . team_totals
return { t : tots [ t ] [ 'won' ] / ( 1.0 * tots [ t ] [ 'total' ] ) if tots [ t ] [ 'total' ] else 0.0 for t in [ 'home' , 'away' ] } |
def cleanup_event_loop ( self ) :
"""Cleanup an event loop and close it down forever .""" | for task in asyncio . Task . all_tasks ( loop = self . loop ) :
if self . debug :
warnings . warn ( 'Cancelling task: %s' % task )
task . _log_destroy_pending = False
task . cancel ( )
self . loop . close ( )
self . loop . set_exception_handler ( self . loop_exception_handler_save )
self . loop_exception_handler_save = None
self . loop_policy = None
self . loop = None |
def findOptimalResults ( expName , suite , outFile ) :
"""Go through every experiment in the specified folder . For each experiment , find
the iteration with the best validation score , and return the metrics
associated with that iteration .""" | writer = csv . writer ( outFile )
headers = [ "testAccuracy" , "bgAccuracy" , "maxTotalAccuracy" , "experiment path" ]
writer . writerow ( headers )
info = [ ]
print ( "\n================" , expName , "=====================" )
try : # Retrieve the last totalCorrect from each experiment
# Print them sorted from best to worst
values , params = suite . get_values_fix_params ( expName , 0 , "testerror" , "last" )
for p in params :
expPath = p [ "name" ]
if not "results" in expPath :
expPath = os . path . join ( "results" , expPath )
maxTestAccuracy , maxValidationAccuracy , maxBGAccuracy , maxIter , maxTotalAccuracy = bestScore ( expPath , suite )
row = [ maxTestAccuracy , maxBGAccuracy , maxTotalAccuracy , expPath ]
info . append ( row )
writer . writerow ( row )
print ( tabulate ( info , headers = headers , tablefmt = "grid" ) )
except :
print ( "Couldn't analyze experiment" , expName ) |
def prepare_app_pipeline ( ) :
"""Entry point for application setup and initial pipeline in Spinnaker .""" | runner = ForemastRunner ( )
runner . write_configs ( )
runner . create_app ( )
runner . create_pipeline ( )
runner . cleanup ( ) |
def _table_exists ( self ) :
"""Database - specific method to see if the table exists""" | self . cursor . execute ( "SHOW TABLES" )
for table in self . cursor . fetchall ( ) :
if table [ 0 ] . lower ( ) == self . name . lower ( ) :
return True
return False |
def _add_item_manager ( self , key , item_class , ** paths ) :
'''Add an item manager to this object .''' | updated_paths = { }
for path_type , path_value in paths . iteritems ( ) :
updated_paths [ path_type ] = path_value . format ( ** self . __dict__ )
manager = Redmine_Items_Manager ( self . _redmine , item_class , ** updated_paths )
self . __dict__ [ key ] = manager |
def convert_boolean ( value , parameter , default = False ) :
'''Converts to boolean ( only the first char of the value is used ) :
' ' , ' - ' , None convert to parameter default
' f ' , ' F ' , ' 0 ' , False always convert to False
Anything else converts to True .''' | value = _check_default ( value , parameter , ( '' , '-' , None ) )
if isinstance ( value , bool ) :
return value
if isinstance ( value , str ) and len ( value ) > 0 :
value = value [ 0 ]
return value not in ( 'f' , 'F' , '0' , False , None ) |
def handle_m2m_user ( self , sender , instance , ** kwargs ) :
"""Handle many to many relationships for user field""" | self . handle_save ( instance . user . __class__ , instance . user ) |
def decode_msg ( msg , enc = 'utf-8' ) :
"""Decodes a message fragment .
Args : msg - A Message object representing the fragment
enc - The encoding to use for decoding the message""" | # We avoid the get _ payload decoding machinery for raw
# content - transfer - encodings potentially containing non - ascii characters ,
# such as 8bit or binary , as these are encoded using raw - unicode - escape which
# seems to prevent subsequent utf - 8 decoding .
cte = str ( msg . get ( 'content-transfer-encoding' , '' ) ) . lower ( )
decode = cte not in ( "8bit" , "7bit" , "binary" )
res = msg . get_payload ( decode = decode )
return decode_bytes ( res , enc ) |
def add ( self , scene ) :
"""Add scene , replace existing scene if scene with scene _ id is present .""" | if not isinstance ( scene , Scene ) :
raise TypeError ( )
for i , j in enumerate ( self . __scenes ) :
if j . scene_id == scene . scene_id :
self . __scenes [ i ] = scene
return
self . __scenes . append ( scene ) |
def request_get_variable ( self , py_db , seq , thread_id , frame_id , scope , attrs ) :
''': param scope : ' FRAME ' or ' GLOBAL ' ''' | int_cmd = InternalGetVariable ( seq , thread_id , frame_id , scope , attrs )
py_db . post_internal_command ( int_cmd , thread_id ) |
def call_openers ( self , client , clients_list ) :
"""Calls openers callbacks""" | for func in self . openers :
func ( client , clients_list ) |
def delete_role ( name , region = None , key = None , keyid = None , profile = None ) :
'''Delete an IAM role .
CLI Example :
. . code - block : : bash
salt myminion boto _ iam . delete _ role myirole''' | conn = _get_conn ( region = region , key = key , keyid = keyid , profile = profile )
if not role_exists ( name , region , key , keyid , profile ) :
return True
try :
conn . delete_role ( name )
log . info ( 'Deleted %s IAM role.' , name )
return True
except boto . exception . BotoServerError as e :
log . debug ( e )
log . error ( 'Failed to delete %s IAM role.' , name )
return False |
def yank_nth_arg ( event ) :
"""Insert the first argument of the previous command . With an argument , insert
the nth word from the previous command ( start counting at 0 ) .""" | n = ( event . arg if event . arg_present else None )
event . current_buffer . yank_nth_arg ( n ) |
def _set ( self , node_hash , keypath , value , if_delete_subtrie = False ) :
"""If if _ delete _ subtrie is set to True , what it will do is that it take in a keypath
and traverse til the end of keypath , then delete the whole subtrie of that node .
Note : keypath should be in binary array format , i . e . , encoded by encode _ to _ bin ( )""" | # Empty trie
if node_hash == BLANK_HASH :
if value :
return self . _hash_and_save ( encode_kv_node ( keypath , self . _hash_and_save ( encode_leaf_node ( value ) ) ) )
else :
return BLANK_HASH
nodetype , left_child , right_child = parse_node ( self . db [ node_hash ] )
# Node is a leaf node
if nodetype == LEAF_TYPE : # Keypath must match , there should be no remaining keypath
if keypath :
raise NodeOverrideError ( "Fail to set the value because the prefix of it's key" " is the same as existing key" )
if if_delete_subtrie :
return BLANK_HASH
return self . _hash_and_save ( encode_leaf_node ( value ) ) if value else BLANK_HASH
# node is a key - value node
elif nodetype == KV_TYPE : # Keypath too short
if not keypath :
if if_delete_subtrie :
return BLANK_HASH
else :
raise NodeOverrideError ( "Fail to set the value because it's key" " is the prefix of other existing key" )
return self . _set_kv_node ( keypath , node_hash , nodetype , left_child , right_child , value , if_delete_subtrie )
# node is a branch node
elif nodetype == BRANCH_TYPE : # Keypath too short
if not keypath :
if if_delete_subtrie :
return BLANK_HASH
else :
raise NodeOverrideError ( "Fail to set the value because it's key" " is the prefix of other existing key" )
return self . _set_branch_node ( keypath , nodetype , left_child , right_child , value , if_delete_subtrie )
raise Exception ( "Invariant: This shouldn't ever happen" ) |
def setup_log ( name ) :
'''Returns a logging instance for the provided name . The returned
object is an instance of logging . Logger . Logged messages will be
printed to stderr when running in the CLI , or forwarded to XBMC ' s
log when running in XBMC mode .''' | _log = logging . getLogger ( name )
_log . setLevel ( GLOBAL_LOG_LEVEL )
handler = logging . StreamHandler ( )
formatter = logging . Formatter ( '%(asctime)s - %(levelname)s - [%(name)s] %(message)s' )
handler . setFormatter ( formatter )
_log . addHandler ( handler )
_log . addFilter ( XBMCFilter ( '[%s] ' % name ) )
return _log |
def duplicate ( self ) :
'''Returns a copy of the current Line , including its taxes and discounts
@ returns : Line .''' | instance = self . __class__ ( name = self . name , description = self . description , unit = self . unit , quantity = self . quantity , date = self . date , unit_price = self . unit_price , gin = self . gin , gtin = self . gtin , sscc = self . sscc )
for tax in self . taxes :
instance . taxes . append ( tax . duplicate ( ) )
for discount in self . discounts :
instance . discounts . append ( discount . duplicate ( ) )
return instance |
def _get ( url ) :
"""Convert a URL into it ' s response ( a * str * ) .""" | if PYTHON_3 :
req = request . Request ( url , headers = HEADER )
response = request . urlopen ( req )
return response . read ( ) . decode ( 'utf-8' )
else :
req = urllib2 . Request ( url , headers = HEADER )
response = urllib2 . urlopen ( req )
return response . read ( ) |
def generate_random_string ( length = 6 ) :
'''Returns a random string of a specified length .
> > > len ( generate _ random _ string ( length = 25 ) )
25
Test randomness . Try N times and observe no duplicaton
> > > N = 100
> > > len ( set ( generate _ random _ string ( 10 ) for i in range ( N ) ) ) = = N
True''' | n = int ( length / 2 + 1 )
x = binascii . hexlify ( os . urandom ( n ) )
s = x [ : length ]
return s . decode ( 'utf-8' ) |
def update ( self , buffer , length ) :
"""Add buffer into digest calculation""" | return lib . zdigest_update ( self . _as_parameter_ , buffer , length ) |
def _simple_dispatch ( self , name , params ) :
"""Dispatch method""" | try : # Internal method
return self . funcs [ name ] ( * params )
except KeyError : # Other method
pass
# Call the other method outside the except block , to avoid messy logs
# in case of error
return self . _dispatch_method ( name , params ) |
def _set_suppress_nd ( self , v , load = False ) :
"""Setter method for suppress _ nd , mapped from YANG variable / bridge _ domain / suppress _ nd ( container )
If this variable is read - only ( config : false ) in the
source YANG file , then _ set _ suppress _ nd is considered as a private
method . Backends looking to populate this variable should
do so via calling thisObj . _ set _ suppress _ nd ( ) directly .""" | if hasattr ( v , "_utype" ) :
v = v . _utype ( v )
try :
t = YANGDynClass ( v , base = suppress_nd . suppress_nd , is_container = 'container' , presence = False , yang_name = "suppress-nd" , rest_name = "suppress-nd" , parent = self , path_helper = self . _path_helper , extmethods = self . _extmethods , register_paths = True , extensions = { u'tailf-common' : { u'info' : u'Configure ND suppression' } } , namespace = 'urn:brocade.com:mgmt:brocade-ipv6-nd-ra' , defining_module = 'brocade-ipv6-nd-ra' , yang_type = 'container' , is_config = True )
except ( TypeError , ValueError ) :
raise ValueError ( { 'error-string' : """suppress_nd must be of a type compatible with container""" , 'defined-type' : "container" , 'generated-type' : """YANGDynClass(base=suppress_nd.suppress_nd, is_container='container', presence=False, yang_name="suppress-nd", rest_name="suppress-nd", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, extensions={u'tailf-common': {u'info': u'Configure ND suppression'}}, namespace='urn:brocade.com:mgmt:brocade-ipv6-nd-ra', defining_module='brocade-ipv6-nd-ra', yang_type='container', is_config=True)""" , } )
self . __suppress_nd = t
if hasattr ( self , '_set' ) :
self . _set ( ) |
def get_train_err ( htfa , data , F ) :
"""Calcuate training error
Parameters
htfa : HTFA
An instance of HTFA , factor anaysis class in BrainIAK .
data : 2D array
Input data to HTFA .
F : 2D array
HTFA factor matrix .
Returns
float
Returns root mean squared error on training .""" | W = htfa . get_weights ( data , F )
return recon_err ( data , F , W ) |
def copy ( self ) :
"""Returns a copy of the version .""" | other = Version ( None )
other . tokens = self . tokens [ : ]
other . seps = self . seps [ : ]
return other |
def get_authentic_node_name ( self , node_name : str ) -> Optional [ str ] :
"""Returns the exact , authentic node name for the given node name if a node corresponding to
the given name exists in the graph ( maybe not locally yet ) or ` None ` otherwise .
By default , this method checks whether a node with the given name exists locally in the
graph and return ` node _ name ` if it does or ` None ` otherwise .
In ` Graph ` extensions that are used by applications where the user can enter potentially
incorrect node names , this method should be overridden to improve usability .
Arguments :
node _ name ( str ) : The node name to return the authentic node name for .
Returns :
The authentic name of the node corresponding to the given node name or
` None ` if no such node exists .""" | # Is there a node with the given name ?
vertex : IGraphVertex = None
try :
vertex : IGraphVertex = self . _wrapped_graph . vs . find ( node_name )
except ValueError :
pass
# Is node _ name a node index ?
if vertex is None :
try :
vertex : IGraphVertex = self . _wrapped_graph . vs [ int ( node_name ) ]
except ValueError :
return None
except IndexError :
return None
try :
return vertex [ "name" ]
except KeyError :
return str ( vertex . index ) |
def do_status ( self , subcmd , opts , * args ) :
"""Print the status of working copy files and directories .
usage :
status [ PATH . . . ]
With no args , print only locally modified items ( no network access ) .
With - u , add working revision and server out - of - date information .
With - v , print full revision information on every item .
The first five columns in the output are each one character wide :
First column : Says if item was added , deleted , or otherwise changed
' ' no modifications
' A ' Added
' C ' Conflicted
' D ' Deleted
' G ' Merged
' I ' Ignored
' M ' Modified
' R ' Replaced
' X ' item is unversioned , but is used by an externals definition
' ? ' item is not under version control
' ! ' item is missing ( removed by non - svn command ) or incomplete
' ~ ' versioned item obstructed by some item of a different kind
Second column : Modifications of a file ' s or directory ' s properties
' ' no modifications
' C ' Conflicted
' M ' Modified
Third column : Whether the working copy directory is locked
' ' not locked
' L ' locked
Fourth column : Scheduled commit will contain addition - with - history
' ' no history scheduled with commit
' + ' history scheduled with commit
Fifth column : Whether the item is switched relative to its parent
' ' normal
' S ' switched
The out - of - date information appears in the eighth column ( with - u ) :
' * ' a newer revision exists on the server
' ' the working copy is up to date
Remaining fields are variable width and delimited by spaces :
The working revision ( with - u or - v )
The last committed revision and last committed author ( with - v )
The working copy path is always the final field , so it can
include spaces .
Example output :
svn status wc
M wc / bar . c
A + wc / qax . c
svn status - u wc
M 965 wc / bar . c
* 965 wc / foo . c
A + 965 wc / qax . c
Head revision : 981
svn status - - show - updates - - verbose wc
M 965 938 kfogel wc / bar . c
* 965 922 sussman wc / foo . c
A + 965 687 joe wc / qax . c
965 687 joe wc / zig . c
Head revision : 981
$ { cmd _ option _ list }""" | print "'svn %s' opts: %s" % ( subcmd , opts )
print "'svn %s' args: %s" % ( subcmd , args ) |
def get_vault_nodes ( self , vault_id , ancestor_levels , descendant_levels , include_siblings ) :
"""Gets a portion of the hierarchy for the given vault .
arg : vault _ id ( osid . id . Id ) : the ` ` Id ` ` to query
arg : ancestor _ levels ( cardinal ) : the maximum number of
ancestor levels to include . A value of 0 returns no
parents in the node .
arg : descendant _ levels ( cardinal ) : the maximum number of
descendant levels to include . A value of 0 returns no
children in the node .
arg : include _ siblings ( boolean ) : ` ` true ` ` to include the
siblings of the given node , ` ` false ` ` to omit the
siblings
return : ( osid . authorization . VaultNode ) - a vault node
raise : NotFound - ` ` vault _ id ` ` is not found
raise : NullArgument - ` ` vault _ id ` ` is ` ` null ` `
raise : OperationFailed - unable to complete request
raise : PermissionDenied - authorization failure
* compliance : mandatory - - This method must be implemented . *""" | # Implemented from template for
# osid . resource . BinHierarchySession . get _ bin _ nodes
return objects . VaultNode ( self . get_vault_node_ids ( vault_id = vault_id , ancestor_levels = ancestor_levels , descendant_levels = descendant_levels , include_siblings = include_siblings ) . _my_map , runtime = self . _runtime , proxy = self . _proxy ) |
def create_comment ( self , text ) :
"""Mimic issue API , so we can use it everywhere .
Return dashboard comment .""" | return DashboardComment . get_or_create ( self . _issue_or_pr , self . _header , text ) |
def check_key ( self , key : str ) -> bool :
"""Checks if key exists in datastore . True if yes , False if no .
: param : SHA512 hash key
: return : whether or key not exists in datastore""" | keys = self . get_keys ( )
return key in keys |
def _inter_manager_operations ( self , other , how_to_join , func ) :
"""Inter - data operations ( e . g . add , sub ) .
Args :
other : The other Manager for the operation .
how _ to _ join : The type of join to join to make ( e . g . right , outer ) .
Returns :
New DataManager with new data and index .""" | reindexed_self , reindexed_other_list , joined_index = self . copartition ( 0 , other , how_to_join , False )
# unwrap list returned by ` copartition ` .
reindexed_other = reindexed_other_list [ 0 ]
new_columns = self . _join_index_objects ( 0 , other . columns , how_to_join , sort = False )
# THere is an interesting serialization anomaly that happens if we do
# not use the columns in ` inter _ data _ op _ builder ` from here ( e . g . if we
# pass them in ) . Passing them in can cause problems , so we will just
# use them from here .
self_cols = self . columns
other_cols = other . columns
def inter_data_op_builder ( left , right , func ) :
left . columns = self_cols
right . columns = other_cols
# We reset here to make sure that the internal indexes match . We aligned
# them in the previous step , so this step is to prevent mismatches .
left . index = pandas . RangeIndex ( len ( left . index ) )
right . index = pandas . RangeIndex ( len ( right . index ) )
result = func ( left , right )
result . columns = pandas . RangeIndex ( len ( result . columns ) )
return result
new_data = reindexed_self . inter_data_operation ( 1 , lambda l , r : inter_data_op_builder ( l , r , func ) , reindexed_other )
return self . __constructor__ ( new_data , joined_index , new_columns ) |
def _connected_pids ( self , from_parent = True ) :
"""Follow a relationship to find connected PIDs . abs .
: param from _ parent : search children from the current pid if True , else
search for its parents .
: type from _ parent : bool""" | to_pid = aliased ( PersistentIdentifier , name = 'to_pid' )
if from_parent :
to_relation = PIDRelation . child_id
from_relation = PIDRelation . parent_id
else :
to_relation = PIDRelation . parent_id
from_relation = PIDRelation . child_id
query = PIDQuery ( [ to_pid ] , db . session ( ) , _filtered_pid_class = to_pid ) . join ( PIDRelation , to_pid . id == to_relation )
# accept both PersistentIdentifier models and fake PIDs with just
# pid _ value , pid _ type as they are fetched with the PID fetcher .
if isinstance ( self . pid , PersistentIdentifier ) :
query = query . filter ( from_relation == self . pid . id )
else :
from_pid = aliased ( PersistentIdentifier , name = 'from_pid' )
query = query . join ( from_pid , from_pid . id == from_relation ) . filter ( from_pid . pid_value == self . pid . pid_value , from_pid . pid_type == self . pid . pid_type , )
return query |
def readACTIONRECORD ( self ) :
"""Read a SWFActionRecord""" | action = None
actionCode = self . readUI8 ( )
if actionCode != 0 :
actionLength = self . readUI16 ( ) if actionCode >= 0x80 else 0
# print " 0x % x " % actionCode , actionLength
action = SWFActionFactory . create ( actionCode , actionLength )
action . parse ( self )
return action |
def import_module ( self , name ) :
"""Import a module into the bridge .""" | if name not in self . _objects :
module = _import_module ( name )
self . _objects [ name ] = module
self . _object_references [ id ( module ) ] = name
return self . _objects [ name ] |
def to_directed ( self ) :
"""Return a directed representation of the graph .
Returns
G : DynDiGraph
A dynamic directed graph with the same name , same nodes , and with
each edge ( u , v , data ) replaced by two directed edges
( u , v , data ) and ( v , u , data ) .
Notes
This returns a " deepcopy " of the edge , node , and
graph attributes which attempts to completely copy
all of the data and references .
This is in contrast to the similar D = DynDiGraph ( G ) which returns a
shallow copy of the data .
See the Python copy module for more information on shallow
and deep copies , http : / / docs . python . org / library / copy . html .
Warning : If you have subclassed Graph to use dict - like objects in the
data structure , those changes do not transfer to the DynDiGraph
created by this method .
Examples
> > > G = dn . DynGraph ( ) # or MultiGraph , etc
> > > G . add _ path ( [ 0,1 ] )
> > > H = G . to _ directed ( )
> > > H . edges ( )
[ ( 0 , 1 ) , ( 1 , 0 ) ]
If already directed , return a ( deep ) copy
> > > G = dn . DynDiGraph ( ) # or MultiDiGraph , etc
> > > G . add _ path ( [ 0,1 ] )
> > > H = G . to _ directed ( )
> > > H . edges ( )
[ ( 0 , 1 ) ]""" | from . dyndigraph import DynDiGraph
G = DynDiGraph ( )
G . name = self . name
G . add_nodes_from ( self )
for it in self . interactions_iter ( ) :
for t in it [ 2 ] [ 't' ] :
G . add_interaction ( it [ 0 ] , it [ 1 ] , t = t [ 0 ] , e = t [ 1 ] )
G . graph = deepcopy ( self . graph )
G . _node = deepcopy ( self . _node )
return G |
def _get ( self , tx_id ) :
"""Read and obj in bdb using the tx _ id .
: param resource _ id : id of the transaction to be read .
: return : value with the data , transaction id and transaction .""" | # tx _ id = self . _ find _ tx _ id ( resource _ id )
value = [ { 'data' : transaction [ 'metadata' ] , 'id' : transaction [ 'id' ] } for transaction in self . driver . instance . transactions . get ( asset_id = self . get_asset_id ( tx_id ) ) ] [ - 1 ]
if value [ 'data' ] [ 'data' ] :
self . logger . debug ( 'bdb::read::{}' . format ( value [ 'data' ] ) )
return value
else :
return False |
def metaphone_processor ( words ) :
'''Double metaphone word processor .''' | for word in words :
for w in double_metaphone ( word ) :
if w :
w = w . strip ( )
if w :
yield w |
def save ( self , ** kwargs ) :
"""Save the changes made to the object to the server .
The object is updated to match what the server returns .
Args :
* * kwargs : Extra options to send to the server ( e . g . sudo )
Raise :
GitlabAuthenticationError : If authentication is not correct
GitlabUpdateError : If the server cannot perform the request""" | updated_data = self . _get_updated_data ( )
# Nothing to update . Server fails if sent an empty dict .
if not updated_data :
return
# call the manager
obj_id = self . get_id ( )
server_data = self . manager . update ( obj_id , updated_data , ** kwargs )
if server_data is not None :
self . _update_attrs ( server_data ) |
def color_to_tuple ( color , opacity = 1 ) :
"""convert any color to standard ( )
" red " - > ' c3B ' , ( 255 , 125 , 0)
" # fffff " - > ' c3B ' , ( 255 , 255 , 255)
" # fffff " - > ' c4B ' , ( 255 , 255 , 255 , 255)""" | if ( type ( color ) == str and color [ 0 ] == "#" ) :
color = hex_color_to_tuple ( color )
elif type ( color ) == str :
if color in color_dict :
color = color_dict [ color . lower ( ) ]
else :
print ( "无法解析颜色:" + color )
color = ( 255 , 125 , 0 , int ( 255 * opacity ) )
while len ( color ) < 4 :
color += ( int ( 255 * opacity ) , )
return color |
def transform_conf_module ( cls ) :
"""Transform usages of the conf module by updating locals .""" | global CONF_NODE
if cls . name == 'openhtf.conf' : # Put all the attributes in Configuration into the openhtf . conf node .
cls . _locals . update ( cls . locals [ 'Configuration' ] [ 0 ] . locals )
# Store reference to this node for future use .
CONF_NODE = cls
CONF_LOCALS . update ( cls . locals ) |
def by_label ( self , label ) :
"""Like ` . get ( ) ` , but by label .""" | # don ' t use . first ( ) , so that MultipleResultsFound can be raised
try :
return self . filter_by ( label = label ) . one ( )
except sa . orm . exc . NoResultFound :
return None |
def get_all ( self , api_method , collection_name , ** kwargs ) :
"""Return all objects in an api _ method , handle pagination , and pass
kwargs on to the method being called .
For example , " users . list " returns an object like :
" members " : [ { < member _ obj > } , { < member _ obj _ 2 > } ] ,
" response _ metadata " : {
" next _ cursor " : " cursor _ id "
so if you call ` get _ all ( " users . list " , " members " ) ` , this function
will return all member objects to you while handling pagination""" | objs = [ ]
limit = 250
# if you don ' t provide a limit , the slack API won ' t return a cursor to you
page = json . loads ( self . api_call ( api_method , limit = limit , ** kwargs ) )
while 1 :
try :
for obj in page [ collection_name ] :
objs . append ( obj )
except KeyError :
LOG . error ( "Unable to find key %s in page object: \n" "%s" , collection_name , page )
return objs
cursor = dig ( page , "response_metadata" , "next_cursor" )
if cursor : # In general we allow applications that integrate with Slack to send
# no more than one message per second
# https : / / api . slack . com / docs / rate - limits
time . sleep ( 1 )
page = json . loads ( self . api_call ( api_method , cursor = cursor , limit = limit , ** kwargs ) )
else :
break
return objs |
def _read_ssm_locs ( in_file ) :
"""Map SSMs to chromosomal locations .""" | out = { }
with open ( in_file ) as in_handle :
in_handle . readline ( )
# header
for line in in_handle :
sid , loc = line . split ( ) [ : 2 ]
chrom , pos = loc . split ( "_" )
out [ sid ] = ( chrom , int ( pos ) )
return out |
def undo ( self ) :
"""General Undo , that takes all elements in the parent and
: return :""" | # logger . verbose ( " # H # STATE _ MACHINE _ UNDO STARTED " )
state = self . state_machine . root_state
self . set_root_state_to_version ( state , self . before_storage ) |
def line_type ( line : str ) -> str :
"""Give type of input line , as defined in LINE _ TYPES
> > > line _ type ( ' IN \\ ta \\ tb ' )
' IN '
> > > line _ type ( ' ' )
' EMPTY '""" | for regex , ltype in LINE_TYPES . items ( ) :
if re . fullmatch ( regex , line ) :
return ltype
raise ValueError ( "Input line \"{}\" is not bubble formatted" . format ( line ) ) |
def fix_trim_curves ( obj ) :
"""Fixes direction , connectivity and similar issues of the trim curves .
This function works for surface trim curves consisting of a single curve .
: param obj : input surface
: type obj : abstract . Surface""" | # Validate input
if obj . pdimension != 2 :
raise GeomdlException ( "Input geometry must be a surface" )
# Get trims of the surface
for o in obj :
trims = o . trims
if not trims :
continue
# Get parameter space bounding box
parbox = get_par_box ( o . domain , True )
# Check and update trim curves with respect to the underlying surface
updated_trims = [ ]
for trim in trims :
flag , trm = check_trim_curve ( trim , parbox )
if flag :
if trm :
cont = shortcuts . generate_container_curve ( )
cont . add ( trm )
updated_trims . append ( cont )
else :
updated_trims . append ( trim )
# Set updated trims
obj . trims = updated_trims |
def hacking_python3x_metaclass ( logical_line , noqa ) :
r"""Check for metaclass to be Python 3 . x compatible .
Okay : @ six . add _ metaclass ( Meta ) \ nclass Foo ( object ) : \ n pass
Okay : @ six . with _ metaclass ( Meta ) \ nclass Foo ( object ) : \ n pass
Okay : class Foo ( object ) : \ n ' ' ' docstring \ n \ n _ _ metaclass _ _ = Meta \ n ' ' '
H236 : class Foo ( object ) : \ n _ _ metaclass _ _ = Meta
H236 : class Foo ( object ) : \ n foo = bar \ n _ _ metaclass _ _ = Meta
H236 : class Foo ( object ) : \ n ' ' ' docstr . ' ' ' \ n _ _ metaclass _ _ = Meta
H236 : class Foo ( object ) : \ n _ _ metaclass _ _ = \ \ \ n Meta
Okay : class Foo ( object ) : \ n _ _ metaclass _ _ = Meta # noqa""" | if noqa :
return
split_line = logical_line . split ( )
if ( len ( split_line ) > 2 and split_line [ 0 ] == '__metaclass__' and split_line [ 1 ] == '=' ) :
yield ( logical_line . find ( '__metaclass__' ) , "H236: Python 3.x incompatible __metaclass__, " "use six.add_metaclass()" ) |
def split_package_name ( p ) :
"""Splits the given package name and returns a tuple ( name , ver ) .""" | s = p . split ( six . u ( '==' ) )
if len ( s ) == 1 :
return ( to_text ( s [ 0 ] ) , None )
else :
return ( to_text ( s [ 0 ] ) , to_text ( s [ 1 ] ) ) |
def p_expression_srl ( self , p ) :
'expression : expression RSHIFT expression' | p [ 0 ] = Srl ( p [ 1 ] , p [ 3 ] , lineno = p . lineno ( 1 ) )
p . set_lineno ( 0 , p . lineno ( 1 ) ) |
def constrain_positive ( self , warning = True , trigger_parent = True ) :
""": param warning : print a warning if re - constraining parameters .
Constrain this parameter to the default positive constraint .""" | self . constrain ( Logexp ( ) , warning = warning , trigger_parent = trigger_parent ) |
def get ( self , key , range_end = None , count_only = None , keys_only = None , limit = None , max_create_revision = None , min_create_revision = None , min_mod_revision = None , revision = None , serializable = None , sort_order = None , sort_target = None , timeout = None ) :
"""Range gets the keys in the range from the key - value store .
: param key : key is the first key for the range . If range _ end is not given ,
the request only looks up key .
: type key : bytes
: param range _ end : range _ end is the upper bound on the requested range
[ key , range _ end ) . If range _ end is ` ` \\ 0 ` ` , the range is all keys ` ` \u003e = ` ` key .
If the range _ end is one bit larger than the given key , then the range requests
get the all keys with the prefix ( the given key ) . If both key and range _ end
are ` ` \\ 0 ` ` , then range requests returns all keys .
: type range _ end : bytes
: param prefix : If set , and no range _ end is given , compute range _ end from key prefix .
: type prefix : bool
: param count _ only : count _ only when set returns only the count of the keys in the range .
: type count _ only : bool
: param keys _ only : keys _ only when set returns only the keys and not the values .
: type keys _ only : bool
: param limit : limit is a limit on the number of keys returned for the request .
: type limit : int
: param max _ create _ revision : max _ create _ revision is the upper bound for returned
key create revisions ; all keys with greater create revisions will be filtered away .
: type max _ create _ revision : int
: param max _ mod _ revision : max _ mod _ revision is the upper bound for returned key
mod revisions ; all keys with greater mod revisions will be filtered away .
: type max _ mod _ revision : int
: param min _ create _ revision : min _ create _ revision is the lower bound for returned
key create revisions ; all keys with lesser create trevisions will be filtered away .
: type min _ create _ revision : int
: param min _ mod _ revision : min _ mod _ revision is the lower bound for returned key
mod revisions ; all keys with lesser mod revisions will be filtered away .
: type min _ min _ revision : int
: param revision : revision is the point - in - time of the key - value store to use for the
range . If revision is less or equal to zero , the range is over the newest
key - value store . If the revision has been compacted , ErrCompacted is returned as
a response .
: type revision : int
: param serializable : serializable sets the range request to use serializable
member - local reads . Range requests are linearizable by default ; linearizable
requests have higher latency and lower throughput than serializable requests
but reflect the current consensus of the cluster . For better performance , in
exchange for possible stale reads , a serializable range request is served
locally without needing to reach consensus with other nodes in the cluster .
: type serializable : bool
: param sort _ order : Sort order for returned KVs ,
one of : class : ` txaioetcd . OpGet . SORT _ ORDERS ` .
: type sort _ order : str
: param sort _ target : Sort target for sorting returned KVs ,
one of : class : ` txaioetcd . OpGet . SORT _ TARGETS ` .
: type sort _ taget : str or None
: param timeout : Request timeout in seconds .
: type timeout : int or None""" | def run ( pg_txn ) :
pg_txn . execute ( "SELECT pgetcd.get(%s,%s)" , ( Binary ( key ) , 10 ) )
rows = pg_txn . fetchall ( )
res = "{0}" . format ( rows [ 0 ] [ 0 ] )
return res
return self . _pool . runInteraction ( run ) |
def UpdateFeatures ( self , features ) :
"""This operation updates features to the associated feature layer or
table ( POST only ) . The update features operation is performed on a
feature service layer resource . The result of this operation is an
array of edit results . Each edit result identifies a single feature
and indicates if the edit were successful or not . If not , it also
includes an error code and an error description .""" | fd = { 'features' : "," . join ( json . dumps ( feature . _json_struct_for_featureset ) for feature in features ) }
return self . _get_subfolder ( "./updateFeatures" , JsonPostResult , fd ) |
def adict ( * classes ) :
'''Install one or more classes to be handled as dict .''' | a = True
for c in classes : # if class is dict - like , add class
# name to _ dict _ classes [ module ]
if isclass ( c ) and _infer_dict ( c ) :
t = _dict_classes . get ( c . __module__ , ( ) )
if c . __name__ not in t : # extend tuple
_dict_classes [ c . __module__ ] = t + ( c . __name__ , )
else : # not a dict - like class
a = False
return a |
def prune ( A , threshold ) :
"""Remove coefficients that is not larger than a given threshold .
Args :
A ( Poly ) :
Input data .
threshold ( float ) :
Threshold for which values to cut .
Returns :
( Poly ) :
Same type as A .
Examples :
> > > P = chaospy . sum ( chaospy . prange ( 3 ) * 2 * * - numpy . arange ( 0 , 6 , 2 , float ) )
> > > print ( P )
0.0625q0 ^ 2 + 0.25q0 + 1.0
> > > print ( chaospy . prune ( P , 0.1 ) )
0.25q0 + 1.0
> > > print ( chaospy . prune ( P , 0.5 ) )
1.0
> > > print ( chaospy . prune ( P , 1.5 ) )
0.0""" | if isinstance ( A , Poly ) :
B = A . A . copy ( )
for key in A . keys :
values = B [ key ] . copy ( )
values [ numpy . abs ( values ) < threshold ] = 0.
B [ key ] = values
return Poly ( B , A . dim , A . shape , A . dtype )
A = A . copy ( )
A [ numpy . abs ( A ) < threshold ] = 0.
return A |
def setup_tree ( ctx , verbose = None , root = None , tree_dir = None , modules_dir = None ) :
'''Sets up the SDSS tree enviroment''' | print ( 'Setting up the tree' )
ctx . run ( 'python bin/setup_tree.py -t {0} -r {1} -m {2}' . format ( tree_dir , root , modules_dir ) ) |
def download_file_maybe_extract ( url , directory , filename = None , extension = None , check_files = [ ] ) :
"""Download the file at ` ` url ` ` to ` ` directory ` ` . Extract to ` ` directory ` ` if tar or zip .
Args :
url ( str ) : Url of file .
directory ( str ) : Directory to download to .
filename ( str , optional ) : Name of the file to download ; Otherwise , a filename is extracted
from the url .
extension ( str , optional ) : Extension of the file ; Otherwise , attempts to extract extension
from the filename .
check _ files ( list of str ) : Check if these files exist , ensuring the download succeeded .
If these files exist before the download , the download is skipped .
Returns :
( str ) : Filename of download file .
Raises :
ValueError : Error if one of the ` ` check _ files ` ` are not found following the download .""" | if filename is None :
filename = _get_filename_from_url ( url )
filepath = os . path . join ( directory , filename )
check_files = [ os . path . join ( directory , f ) for f in check_files ]
if len ( check_files ) > 0 and _check_download ( * check_files ) :
return filepath
if not os . path . isdir ( directory ) :
os . makedirs ( directory )
logger . info ( 'Downloading {}' . format ( filename ) )
# Download
if 'drive.google.com' in url :
_download_file_from_drive ( filepath , url )
else :
with tqdm ( unit = 'B' , unit_scale = True , miniters = 1 , desc = filename ) as t :
urllib . request . urlretrieve ( url , filename = filepath , reporthook = _reporthook ( t ) )
_maybe_extract ( compressed_filename = filepath , directory = directory , extension = extension )
if not _check_download ( * check_files ) :
raise ValueError ( '[DOWNLOAD FAILED] `*check_files` not found' )
return filepath |
def get_files_in_commit ( git_folder , commit_id = "HEAD" ) :
"""List of files in HEAD commit .""" | repo = Repo ( str ( git_folder ) )
output = repo . git . diff ( "--name-only" , commit_id + "^" , commit_id )
return output . splitlines ( ) |
def match_type ( expected_type , actual_type ) :
"""Matches expected type to an actual type .
The types can be specified as types , type names or [ [ TypeCode ] ] .
: param expected _ type : an expected type to match .
: param actual _ type : an actual type to match .
: return : true if types are matching and false if they don ' t .""" | if expected_type == None :
return True
if actual_type == None :
raise Exception ( "Actual type cannot be null" )
if isinstance ( expected_type , type ) :
return issubclass ( actual_type , expected_type )
if isinstance ( expected_type , str ) :
return TypeMatcher . match_type_by_name ( expected_type , actual_type )
return False |
def open_in_browser ( self , session , output_filename = None ) :
"""Open the rendered HTML in a webbrowser .
If output _ filename = None ( the default ) , a tempfile is used .
The filename of the HTML file is returned .""" | if output_filename is None :
output_file = tempfile . NamedTemporaryFile ( suffix = '.html' , delete = False )
output_filename = output_file . name
with codecs . getwriter ( 'utf-8' ) ( output_file ) as f :
f . write ( self . render ( session ) )
else :
with codecs . open ( output_filename , 'w' , 'utf-8' ) as f :
f . write ( self . render ( session ) )
from pyinstrument . vendor . six . moves import urllib
url = urllib . parse . urlunparse ( ( 'file' , '' , output_filename , '' , '' , '' ) )
webbrowser . open ( url )
return output_filename |
def _is_every_steps ( self , phase_step , batch , every ) :
"""Determine whether a periodic event should happen at this step .
Args :
phase _ step : The incrementing step .
batch : The number of steps progressed at once .
every : The interval of the period .
Returns :
Boolean of whether the event should happen .""" | if not every :
return False
covered_steps = range ( phase_step , phase_step + batch )
return any ( ( step + 1 ) % every == 0 for step in covered_steps ) |
def _set_item_class ( self ) :
"""cls :
The custom generator class for which to create an item - class""" | clsname = self . __tohu_items_name__
self . item_cls = make_item_class ( clsname , self . field_names ) |
def traceback_string ( ) :
"""Helper function that formats most recent traceback . Useful when a program has an overall try / except
and it wants to output the program trace to the log .
: return : formatted traceback string ( or None if no traceback available )""" | tb_string = None
exc_type , exc_value , exc_traceback = traceback . sys . exc_info ( )
if exc_type is not None :
display_lines_list = [ str ( exc_value ) ] + traceback . format_tb ( exc_traceback )
tb_string = "\n" . join ( display_lines_list )
return tb_string |
def remove_examples_all ( ) :
"""remove arduino / examples / all directory .
: rtype : None""" | d = examples_all_dir ( )
if d . exists ( ) :
log . debug ( 'remove %s' , d )
d . rmtree ( )
else :
log . debug ( 'nothing to remove: %s' , d ) |
def log_and_reraise_smt_request_failed ( action = None ) :
"""Catch SDK base exception and print error log before reraise exception .
msg : the error message to be logged .""" | try :
yield
except exception . SDKSMTRequestFailed as err :
msg = ''
if action is not None :
msg = "Failed to %s. " % action
msg += "SMT error: %s" % err . format_message ( )
LOG . error ( msg )
raise exception . SDKSMTRequestFailed ( err . results , msg ) |
def extend ( self , iterable ) :
"""Return new deque with all elements of iterable appended to the right .
> > > pdeque ( [ 1 , 2 ] ) . extend ( [ 3 , 4 ] )
pdeque ( [ 1 , 2 , 3 , 4 ] )""" | new_right_list , new_left_list , extend_count = self . _extend ( self . _right_list , self . _left_list , iterable )
return PDeque ( new_left_list , new_right_list , self . _length + extend_count , self . _maxlen ) |
def comments ( self , tag , limit = 0 ) :
"""Get comments only .""" | return [ comment for comment in CommentsMatch ( tag ) . get_comments ( limit ) ] |
def load_balancers_list ( resource_group , ** kwargs ) :
'''. . versionadded : : 2019.2.0
List all load balancers within a resource group .
: param resource _ group : The resource group name to list load balancers
within .
CLI Example :
. . code - block : : bash
salt - call azurearm _ network . load _ balancers _ list testgroup''' | result = { }
netconn = __utils__ [ 'azurearm.get_client' ] ( 'network' , ** kwargs )
try :
load_balancers = __utils__ [ 'azurearm.paged_object_to_list' ] ( netconn . load_balancers . list ( resource_group_name = resource_group ) )
for load_balancer in load_balancers :
result [ load_balancer [ 'name' ] ] = load_balancer
except CloudError as exc :
__utils__ [ 'azurearm.log_cloud_error' ] ( 'network' , str ( exc ) , ** kwargs )
result = { 'error' : str ( exc ) }
return result |
def get_locale ( self ) :
"""Get locale
Will extract locale from application , trying to get one from babel
first , then , if not available , will get one from app config""" | if not self . locale :
try :
import flask_babel as babel
self . locale = str ( babel . get_locale ( ) ) . lower ( )
except ImportError :
from flask import current_app
self . locale = current_app . config [ 'DEFAULT_LOCALE' ] . lower
return self . locale |
def list_groups ( self , ** kwargs ) :
"""List all groups .
: param kwargs : arbitrary search filters
: returns : list of groups
: rtype : list [ : class : ` marathon . models . group . MarathonGroup ` ]""" | response = self . _do_request ( 'GET' , '/v2/groups' )
groups = self . _parse_response ( response , MarathonGroup , is_list = True , resource_name = 'groups' )
for k , v in kwargs . items ( ) :
groups = [ o for o in groups if getattr ( o , k ) == v ]
return groups |
def circmean ( dts , axis = 2 ) :
"""Circular mean phase""" | return np . exp ( 1.0j * dts ) . mean ( axis = axis ) . angle ( ) |
def copy_files ( source_files , target_directory , source_directory = None ) :
"""Copies a list of files to the specified directory .
If source _ directory is provided , it will be prepended to each source file .""" | try :
os . makedirs ( target_directory )
except : # TODO : specific exception ?
pass
for f in source_files :
source = os . path . join ( source_directory , f ) if source_directory else f
target = os . path . join ( target_directory , f )
shutil . copy2 ( source , target ) |
def from_chords ( self , chords , duration = 1 ) :
"""Add chords to the Track .
The given chords should be a list of shorthand strings or list of
list of shorthand strings , etc .
Each sublist divides the value by 2.
If a tuning is set , chords will be expanded so they have a proper
fingering .
Example :
> > > t = Track ( ) . from _ chords ( [ ' C ' , [ ' Am ' , ' Dm ' ] , ' G7 ' , ' C # ' ] , 1)""" | tun = self . get_tuning ( )
def add_chord ( chord , duration ) :
if type ( chord ) == list :
for c in chord :
add_chord ( c , duration * 2 )
else :
chord = NoteContainer ( ) . from_chord ( chord )
if tun :
chord = tun . find_chord_fingering ( chord , return_best_as_NoteContainer = True )
if not self . add_notes ( chord , duration ) : # This should be the standard behaviour of add _ notes
dur = self . bars [ - 1 ] . value_left ( )
self . add_notes ( chord , dur )
# warning should hold note
self . add_notes ( chord , value . subtract ( duration , dur ) )
for c in chords :
if c is not None :
add_chord ( c , duration )
else :
self . add_notes ( None , duration )
return self |
def accept_quantity ( input_type = float , allow_none = False ) :
"""A class - method decorator which allow a given method ( typically the set _ value method ) to receive both a
astropy . Quantity or a simple float , but to be coded like it ' s always receiving a pure float in the right units .
This is to give a way to avoid the huge bottleneck that are astropy . units
: param input _ type : the expected type for the input ( float , int )
: param allow _ none : whether to allow or not the passage of None as argument ( default : False )
: return : a decorator for the particular type""" | def accept_quantity_wrapper ( method ) :
def handle_quantity ( instance , value , * args , ** kwargs ) : # For speed reasons , first run the case where the input is not a quantity , and fall back to the handling
# of quantities if that fails . The parts that fails if the input is a Quantity is the conversion
# input _ type ( value ) . This could have been handled more elegantly with a " finally " clause , but that would
# have a 40 percent speed impact . . .
try :
new_value = input_type ( value )
return method ( instance , new_value , * args , ** kwargs )
except TypeError : # Slow for slow , check that we actually have a quantity or None ( if allowed )
if isinstance ( value , u . Quantity ) :
new_value = value . to ( instance . unit ) . value
return method ( instance , new_value , * args , ** kwargs )
elif value is None :
if allow_none :
return method ( instance , None , * args , ** kwargs )
else : # pragma : no cover
raise TypeError ( "You cannot pass None as argument for " "method %s of %s" % ( method . __name__ , instance . name ) )
else : # pragma : no cover
raise TypeError ( "You need to pass either a %s or a astropy.Quantity " "to method %s of %s" % ( input_type . __name__ , method . __name__ , instance . name ) )
return handle_quantity
return accept_quantity_wrapper |
def tree ( self , offset = 0 , prefix_inherited = "" , nofeatsplease = [ 'Phoneme' ] ) :
"""Print a tree - structure of this object ' s phonological representation .""" | tree = ""
numchild = 0
for child in self . children :
if type ( child ) == type ( [ ] ) :
child = child [ 0 ]
numchild += 1
classname = child . classname ( )
if classname == "Word" :
tree += "\n\n"
elif classname == "Line" :
tree += "\n\n\n"
elif classname == "Stanza" :
tree += "\n\n\n\n"
if offset != 0 :
tree += "\n"
for i in range ( 0 , offset ) :
tree += " "
# if not len ( child . feats ) :
# tree + = " "
tree += "|"
tree += "\n"
newline = ""
for i in range ( 0 , offset ) :
newline += " "
newline += "|"
cname = ""
for letter in classname :
if letter == letter . upper ( ) :
cname += letter
prefix = prefix_inherited + cname + str ( numchild ) + "."
newline += "-----| (" + prefix [ : - 1 ] + ") <" + classname + ">"
if child . isBroken ( ) :
newline += "<<broken>>"
else :
string = self . u2s ( child )
if ( not "<" in string ) :
newline = makeminlength ( newline , 99 )
newline += "[" + string + "]"
elif string [ 0 ] != "<" :
newline += "\t" + string
if len ( child . feats ) :
if ( not child . classname ( ) in nofeatsplease ) :
for k , v in sorted ( child . feats . items ( ) ) :
if v == None :
continue
newline += "\n"
for i in range ( 0 , offset + 1 ) :
newline += " "
newline += "| "
newline += self . showFeat ( k , v )
tree += newline
tree += child . tree ( offset + 1 , prefix )
return tree |
def _FetchLinuxFlags ( self ) :
"""Fetches Linux extended file flags .""" | if platform . system ( ) != "Linux" :
return 0
# Since we open a file in the next step we do not want to open a symlink .
# ` lsattr ` returns an error when trying to check flags of a symlink , so we
# assume that symlinks cannot have them .
if self . IsSymlink ( ) :
return 0
# Some files ( e . g . sockets ) cannot be opened . For these we do not really
# care about extended flags ( they should have none ) . ` lsattr ` does not seem
# to support such cases anyway . It is also possible that a file has been
# deleted ( because this method is used lazily ) .
try :
fd = os . open ( self . _path , os . O_RDONLY )
except ( IOError , OSError ) :
return 0
try : # This import is Linux - specific .
import fcntl
# pylint : disable = g - import - not - at - top
# TODO : On Python 2.7.6 ` array . array ` accepts only byte
# strings as an argument . On Python 2.7.12 and 2.7.13 unicodes are
# supported as well . On Python 3 , only unicode strings are supported . This
# is why , as a temporary hack , we wrap the literal with ` str ` call that
# will convert it to whatever is the default on given Python version . This
# should be changed to raw literal once support for Python 2 is dropped .
buf = array . array ( compatibility . NativeStr ( "l" ) , [ 0 ] )
# TODO ( user ) : pytype : incorrect type spec for fcntl . ioctl
# pytype : disable = wrong - arg - types
fcntl . ioctl ( fd , self . FS_IOC_GETFLAGS , buf )
# pytype : enable = wrong - arg - types
return buf [ 0 ]
except ( IOError , OSError ) : # File system does not support extended attributes .
return 0
finally :
os . close ( fd ) |
def add ( self , port , pkt ) :
'''Add new input port + packet to buffer .''' | id = len ( self . _buffer ) + 1
if id > self . _buffsize :
raise FullBuffer ( )
self . _buffer [ id ] = ( port , deepcopy ( pkt ) )
return id |
def make_post_unpack_alert ( streamer ) :
"""Post - processor to " alert " message , to add an " alert _ info " dictionary of parsed
alert information .""" | the_struct = ( "version:L relayUntil:Q expiration:Q id:L cancel:L setCancel:[L] minVer:L " "maxVer:L setSubVer:[S] priority:L comment:S statusBar:S reserved:S" )
alert_submessage_parser = _make_parser ( streamer , the_struct )
def post_unpack_alert ( d , f ) :
d1 = alert_submessage_parser ( io . BytesIO ( d [ "payload" ] ) )
d [ "alert_info" ] = d1
return d
return post_unpack_alert |
def basic ( request , response , verify_user , realm = 'simple' , context = None , ** kwargs ) :
"""Basic HTTP Authentication""" | http_auth = request . auth
response . set_header ( 'WWW-Authenticate' , 'Basic' )
if http_auth is None :
return
if isinstance ( http_auth , bytes ) :
http_auth = http_auth . decode ( 'utf8' )
try :
auth_type , user_and_key = http_auth . split ( ' ' , 1 )
except ValueError :
raise HTTPUnauthorized ( 'Authentication Error' , 'Authentication header is improperly formed' , challenges = ( 'Basic realm="{}"' . format ( realm ) , ) )
if auth_type . lower ( ) == 'basic' :
try :
user_id , key = base64 . decodebytes ( bytes ( user_and_key . strip ( ) , 'utf8' ) ) . decode ( 'utf8' ) . split ( ':' , 1 )
try :
user = verify_user ( user_id , key )
except TypeError :
user = verify_user ( user_id , key , context )
if user :
response . set_header ( 'WWW-Authenticate' , '' )
return user
except ( binascii . Error , ValueError ) :
raise HTTPUnauthorized ( 'Authentication Error' , 'Unable to determine user and password with provided encoding' , challenges = ( 'Basic realm="{}"' . format ( realm ) , ) )
return False |
def run ( * tasks : Awaitable , loop : asyncio . AbstractEventLoop = asyncio . get_event_loop ( ) ) :
"""Helper to run tasks in the event loop
: param tasks : Tasks to run in the event loop .
: param loop : The event loop .""" | futures = [ asyncio . ensure_future ( task , loop = loop ) for task in tasks ]
return loop . run_until_complete ( asyncio . gather ( * futures ) ) |
def _polevl ( x , coefs , N ) :
"""Port of cephes ` ` polevl . c ` ` : evaluate polynomial
See https : / / github . com / jeremybarnes / cephes / blob / master / cprob / polevl . c""" | ans = 0
power = len ( coefs ) - 1
for coef in coefs :
try :
ans += coef * x ** power
except OverflowError :
pass
power -= 1
return ans |
def print_upper_triangular_matrix ( matrix ) :
"""Prints a CVRP data dict matrix
Arguments
matrix : dict
Description
Notes
It is assummed that the first row of matrix contains all needed headers .""" | # Print column header
# Assumes first row contains all needed headers
first = sorted ( matrix . keys ( ) ) [ 0 ]
print ( '\t' , end = ' ' )
for i in matrix [ first ] :
print ( '{}\t' . format ( i ) , end = ' ' )
print ( )
indent_count = 0
for i in matrix : # Print line header
print ( '{}\t' . format ( i ) , end = ' ' )
if indent_count :
print ( '\t' * indent_count , end = ' ' )
for j in sorted ( matrix [ i ] ) : # required because dict doesn ' t guarantee insertion order
print ( '{}\t' . format ( matrix [ i ] [ j ] ) , end = ' ' )
print ( )
indent_count = indent_count + 1 |
def _fixpath ( self , p ) :
"""Apply tilde expansion and absolutization to a path .""" | return os . path . abspath ( os . path . expanduser ( p ) ) |
def load_suffixes ( self , filename ) :
"""Build the suffix dictionary . The keys will be possible long versions , and the values will be the
accepted abbreviations . Everything should be stored using the value version , and you can search all
by using building a set of self . suffixes . keys ( ) and self . suffixes . values ( ) .""" | with open ( filename , 'r' ) as f :
for line in f : # Make sure we have key and value
if len ( line . split ( ',' ) ) != 2 :
continue
# Strip off newlines .
self . suffixes [ line . strip ( ) . split ( ',' ) [ 0 ] ] = line . strip ( ) . split ( ',' ) [ 1 ] |
def has_next_assessment_section ( self , assessment_section_id ) :
"""Tests if there is a next assessment section in the assessment following the given assessment section ` ` Id ` ` .
arg : assessment _ section _ id ( osid . id . Id ) : ` ` Id ` ` of the
` ` AssessmentSection ` `
return : ( boolean ) - ` ` true ` ` if there is a next section ,
` ` false ` ` otherwise
raise : IllegalState - ` ` has _ assessment _ begun ( ) ` ` is ` ` false ` `
raise : NotFound - ` ` assessment _ taken _ id ` ` is not found
raise : NullArgument - ` ` assessment _ taken _ id ` ` is ` ` null ` `
raise : OperationFailed - unable to complete request
raise : PermissionDenied - authorization failure occurred
* compliance : mandatory - - This method must be implemented . *""" | try :
self . get_next_assessment_section ( assessment_section_id )
except errors . IllegalState :
return False
else :
return True |
def _install_houdini ( use_threaded_wrapper ) :
"""Helper function to SideFx Houdini support""" | import hdefereval
def threaded_wrapper ( func , * args , ** kwargs ) :
return hdefereval . executeInMainThreadWithResult ( func , * args , ** kwargs )
_common_setup ( "Houdini" , threaded_wrapper , use_threaded_wrapper ) |
def get_site_decorator ( site_param = 'site' , obj_param = 'obj' , context_param = 'context' ) :
'''It is a function that returns decorator factory useful for PluggableSite
views . This decorator factory returns decorator that do some
boilerplate work and make writing PluggableSite views easier .
It passes PluggableSite instance to decorated view ,
retreives and passes object that site is attached to and passes
common context . It also passes and all the decorator factory ' s
keyword arguments .
For example usage please check photo _ albums . views .
Btw , this decorator seems frightening for me . It feels that
" views as PluggableSite methods " approach can easily make this decorator
obsolete . But for now it just works .''' | def site_method ( ** extra_params ) :
def decorator ( fn ) :
@ wraps ( fn )
def wrapper ( request , ** kwargs ) :
try :
site = kwargs . pop ( site_param )
except KeyError :
raise ValueError ( "'%s' parameter must be passed to " "decorated view (%s)" % ( site_param , fn ) )
# Pop parameters to be passed to actual view function .
params = { }
for key in extra_params :
value = kwargs . pop ( key , extra_params [ key ] )
params . update ( { key : value } )
# Now there are only site . object _ getter lookup parameters in
# kwargs . Get the object and compute common request context .
try :
obj = site . object_getter ( ** kwargs )
except models . ObjectDoesNotExist :
raise Http404 ( "Base object does not exist." )
context = site . get_common_context ( obj )
context_instance = RequestContext ( request , context , processors = site . context_processors )
# pass site name , the object and common request to decorated view
params . update ( { site_param : site , obj_param : obj , context_param : context_instance } )
return fn ( request , ** params )
return wrapper
return decorator
return site_method |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.