signature
stringlengths 29
44.1k
| implementation
stringlengths 0
85.2k
|
|---|---|
def current_line_num ( self ) :
'''Get current line number as an integer ( 1 - based )
Translated from PyFrame _ GetLineNumber and PyCode _ Addr2Line
See Objects / lnotab _ notes . txt'''
|
if self . is_optimized_out ( ) :
return None
f_trace = self . field ( 'f_trace' )
if long ( f_trace ) != 0 : # we have a non - NULL f _ trace :
return self . f_lineno
else : # try :
return self . co . addr2line ( self . f_lasti )
|
def for_category ( self , category , live_only = False ) :
"""Returns queryset of EntryTag instances for specified category .
: param category : the Category instance .
: param live _ only : flag to include only " live " entries .
: rtype : django . db . models . query . QuerySet ."""
|
filters = { 'tag' : category . tag }
if live_only :
filters . update ( { 'entry__live' : True } )
return self . filter ( ** filters )
|
def fix_e712 ( self , result ) :
"""Fix ( trivial case of ) comparison with boolean ."""
|
( line_index , offset , target ) = get_index_offset_contents ( result , self . source )
# Handle very easy " not " special cases .
if re . match ( r'^\s*if [\w."\'\[\]]+ == False:$' , target ) :
self . source [ line_index ] = re . sub ( r'if ([\w."\'\[\]]+) == False:' , r'if not \1:' , target , count = 1 )
elif re . match ( r'^\s*if [\w."\'\[\]]+ != True:$' , target ) :
self . source [ line_index ] = re . sub ( r'if ([\w."\'\[\]]+) != True:' , r'if not \1:' , target , count = 1 )
else :
right_offset = offset + 2
if right_offset >= len ( target ) :
return [ ]
left = target [ : offset ] . rstrip ( )
center = target [ offset : right_offset ]
right = target [ right_offset : ] . lstrip ( )
# Handle simple cases only .
new_right = None
if center . strip ( ) == '==' :
if re . match ( r'\bTrue\b' , right ) :
new_right = re . sub ( r'\bTrue\b *' , '' , right , count = 1 )
elif center . strip ( ) == '!=' :
if re . match ( r'\bFalse\b' , right ) :
new_right = re . sub ( r'\bFalse\b *' , '' , right , count = 1 )
if new_right is None :
return [ ]
if new_right [ 0 ] . isalnum ( ) :
new_right = ' ' + new_right
self . source [ line_index ] = left + new_right
|
def makeRequests ( callable_ , args_list , callback = None , exc_callback = _handle_thread_exception ) :
"""Create several work requests for same callable with different arguments .
Convenience function for creating several work requests for the same
callable where each invocation of the callable receives different values
for its arguments .
` ` args _ list ` ` contains the parameters for each invocation of callable .
Each item in ` ` args _ list ` ` should be either a 2 - item tuple of the list of
positional arguments and a dictionary of keyword arguments or a single ,
non - tuple argument .
See docstring for ` ` WorkRequest ` ` for info on ` ` callback ` ` and
` ` exc _ callback ` ` ."""
|
requests = [ ]
for item in args_list :
if isinstance ( item , tuple ) :
requests . append ( WorkRequest ( callable_ , item [ 0 ] , item [ 1 ] , callback = callback , exc_callback = exc_callback ) )
else :
requests . append ( WorkRequest ( callable_ , [ item ] , None , callback = callback , exc_callback = exc_callback ) )
return requests
|
def split_path ( path ) :
'''Splits the argument into its constituent directories and returns them as
a list .'''
|
def recurse_path ( path , retlist ) :
if len ( retlist ) > 100 :
fullpath = os . path . join ( * ( [ path , ] + retlist ) )
print ( "Directory '{}' contains too many levels" . format ( fullpath ) )
exit ( 1 )
head , tail = os . path . split ( path )
if len ( tail ) > 0 :
retlist . insert ( 0 , tail )
recurse_path ( head , retlist )
elif len ( head ) > 1 :
recurse_path ( head , retlist )
else :
return
retlist = [ ]
path = os . path . realpath ( os . path . normpath ( path ) )
drive , path = os . path . splitdrive ( path )
if len ( drive ) > 0 :
retlist . append ( drive )
recurse_path ( path , retlist )
return retlist
|
def zipf_random_sample ( distr_map , sample_len ) :
"""Helper function : Generate a random Zipf sample of given length .
Args :
distr _ map : list of float , Zipf ' s distribution over nbr _ symbols .
sample _ len : integer , length of sequence to generate .
Returns :
sample : list of integer , Zipf ' s random sample over nbr _ symbols ."""
|
u = np . random . random ( sample_len )
# Random produces values in range [ 0.0,1.0 ) ; even if it is almost
# improbable ( but possible ) that it can generate a clear 0.000 . . 0.
return list ( np . searchsorted ( distr_map , u ) )
|
def features ( self ) :
"""This method returns the properties features .
: return :"""
|
features = [ ]
try :
list_items = self . _ad_page_content . select ( "#features li" )
except Exception as e :
if self . _debug :
logging . error ( "Error getting features. Error message: " + e . args [ 0 ] )
return
for li in list_items :
features . append ( li . text )
return features
|
def move ( self , source , dest ) :
"""the semantic should be like unix ' mv ' command .
Unfortunatelly , shutil . move does work differently ! ! !
Consider ( all paths point to directories )
mv / a / b / a / c
expected outcome :
case 1 . : ' c ' does not exist :
b moved over to / a such that / a / c is what was / a / b / before
case 2 . : ' c ' does exist :
b is moved into ' / a / c / ' such that we have now ' / a / c / b '
But shutil . move will use os . rename whenever possible which means that
' / a / b ' is renamed to ' / a / c ' . The outcome is that the content from b
ends up in c ."""
|
if dest . scheme == 'file' :
if source . isdir ( ) and dest . isdir ( ) :
dest /= source . basename ( )
return shutil . move ( source . path , dest . path )
else :
return super ( LocalFileSystem , self ) . move ( source , dest )
|
def make_copy ( cls , generator ) :
"""Creates a copy of the generator .
: param generator : the generator to copy
: type generator : DataGenerator
: return : the copy of the generator
: rtype : DataGenerator"""
|
return from_commandline ( to_commandline ( generator ) , classname = classes . get_classname ( DataGenerator ( ) ) )
|
def block ( broker ) :
"""Path : / sys / block directories starting with . or ram or dm - or loop"""
|
remove = ( "." , "ram" , "dm-" , "loop" )
tmp = "/dev/%s"
return [ ( tmp % f ) for f in os . listdir ( "/sys/block" ) if not f . startswith ( remove ) ]
|
def getTypes ( cls ) :
"""Get sequence of acceptable model types . Iterates through class
attributes and separates the user - defined enumerations from the default
attributes implicit to Python classes . i . e . this function returns the names
of the attributes explicitly defined above ."""
|
for attrName in dir ( cls ) :
attrValue = getattr ( cls , attrName )
if ( isinstance ( attrValue , type ) ) :
yield attrName
|
def load_entry_point_group_mappings ( self , entry_point_group_mappings ) :
"""Load actions from an entry point group ."""
|
for ep in iter_entry_points ( group = entry_point_group_mappings ) :
self . register_mappings ( ep . name , ep . module_name )
|
def get_short_url ( self , obj ) :
"""Get short URL of blog post like ' / blog / < slug > / ' using ` ` get _ absolute _ url ` ` if available .
Removes dependency on reverse URLs of Mezzanine views when deploying Mezzanine only as an API backend ."""
|
try :
url = obj . get_absolute_url ( )
except NoReverseMatch :
url = '/blog/' + obj . slug
return url
|
def get_or_create_group ( groupname , gid_preset , system = False , id_dependent = True ) :
"""Returns the id for the given group , and creates it first in case it does not exist .
: param groupname : Group name .
: type groupname : unicode
: param gid _ preset : Group id to set if a new group is created .
: type gid _ preset : int or unicode
: param system : Create a system group .
: type system : bool
: param id _ dependent : If the group exists , but its id does not match ` gid _ preset ` , an error is thrown .
: type id _ dependent : bool
: return : Group id of the existing or new group .
: rtype : int"""
|
gid = get_group_id ( groupname )
if gid is None :
create_group ( groupname , gid_preset , system )
return gid_preset
elif id_dependent and gid != gid_preset :
error ( "Present group id '{0}' does not match the required id of the environment '{1}'." . format ( gid , gid_preset ) )
return gid
|
def setup_ui ( self , ) :
"""Create all necessary ui elements for the tool
: returns : None
: rtype : None
: raises : None"""
|
log . debug ( "Setting up the ui" )
self . setup_prjs_page ( )
self . setup_prj_page ( )
self . setup_seq_page ( )
self . setup_shot_page ( )
self . setup_atype_page ( )
self . setup_asset_page ( )
self . setup_dep_page ( )
self . setup_task_page ( )
self . setup_users_page ( )
self . setup_user_page ( )
|
def netconf_capability_change_changed_by_server_or_user_server_server ( self , ** kwargs ) :
"""Auto Generated Code"""
|
config = ET . Element ( "config" )
netconf_capability_change = ET . SubElement ( config , "netconf-capability-change" , xmlns = "urn:ietf:params:xml:ns:yang:ietf-netconf-notifications" )
changed_by = ET . SubElement ( netconf_capability_change , "changed-by" )
server_or_user = ET . SubElement ( changed_by , "server-or-user" )
server = ET . SubElement ( server_or_user , "server" )
server = ET . SubElement ( server , "server" )
callback = kwargs . pop ( 'callback' , self . _callback )
return callback ( config )
|
def respect_language ( language ) :
"""Context manager that changes the current translation language for
all code inside the following block .
Can e . g . be used inside tasks like this : :
from celery import task
from djcelery . common import respect _ language
@ task
def my _ task ( language = None ) :
with respect _ language ( language ) :
pass"""
|
if language :
prev = translation . get_language ( )
translation . activate ( language )
try :
yield
finally :
translation . activate ( prev )
else :
yield
|
def executor ( arg : Union [ Executor , str , Callable ] = None ) :
"""Decorate a function so that it runs in an : class : ` ~ concurrent . futures . Executor ` .
If a resource name is given , the first argument must be a : class : ` ~ . Context ` .
Usage : :
@ executor
def should _ run _ in _ executor ( ) :
With a resource name : :
@ executor ( ' resourcename ' )
def should _ run _ in _ executor ( ctx ) :
: param arg : a callable to decorate , an : class : ` ~ concurrent . futures . Executor ` instance , the
resource name of one or ` ` None ` ` to use the event loop ' s default executor
: return : the wrapped function"""
|
def outer_wrapper ( func : Callable ) :
@ wraps ( func )
def inner_wrapper ( * args , ** kwargs ) :
try :
ctx = next ( arg for arg in args [ : 2 ] if isinstance ( arg , Context ) )
except StopIteration :
raise RuntimeError ( 'the first positional argument to {}() has to be a Context ' 'instance' . format ( callable_name ( func ) ) ) from None
executor = ctx . require_resource ( Executor , resource_name )
return asyncio_extras . call_in_executor ( func , * args , executor = executor , ** kwargs )
return inner_wrapper
if isinstance ( arg , str ) :
resource_name = arg
return outer_wrapper
return asyncio_extras . threadpool ( arg )
|
def check_color ( c ) :
"""Check and parse color specs as either a single [ r , g , b ] or a list of
[ [ r , g , b ] , [ r , g , b ] . . . ]"""
|
c = asarray ( c )
if c . ndim == 1 :
c = c . flatten ( )
c = c [ newaxis , : ]
if c . shape [ 1 ] != 3 :
raise Exception ( "Color must have three values per point" )
elif c . ndim == 2 :
if c . shape [ 1 ] != 3 :
raise Exception ( "Color array must have three values per point" )
return c
|
def url_decode ( s , charset = 'utf-8' , decode_keys = False , include_empty = True , errors = 'replace' , separator = '&' , cls = None ) :
"""Parse a querystring and return it as : class : ` MultiDict ` . There is a
difference in key decoding on different Python versions . On Python 3
keys will always be fully decoded whereas on Python 2 , keys will
remain bytestrings if they fit into ASCII . On 2 . x keys can be forced
to be unicode by setting ` decode _ keys ` to ` True ` .
If the charset is set to ` None ` no unicode decoding will happen and
raw bytes will be returned .
Per default a missing value for a key will default to an empty key . If
you don ' t want that behavior you can set ` include _ empty ` to ` False ` .
Per default encoding errors are ignored . If you want a different behavior
you can set ` errors ` to ` ` ' replace ' ` ` or ` ` ' strict ' ` ` . In strict mode a
` HTTPUnicodeError ` is raised .
. . versionchanged : : 0.5
In previous versions " ; " and " & " could be used for url decoding .
This changed in 0.5 where only " & " is supported . If you want to
use " ; " instead a different ` separator ` can be provided .
The ` cls ` parameter was added .
: param s : a string with the query string to decode .
: param charset : the charset of the query string . If set to ` None `
no unicode decoding will take place .
: param decode _ keys : Used on Python 2 . x to control whether keys should
be forced to be unicode objects . If set to ` True `
then keys will be unicode in all cases . Otherwise ,
they remain ` str ` if they fit into ASCII .
: param include _ empty : Set to ` False ` if you don ' t want empty values to
appear in the dict .
: param errors : the decoding error behavior .
: param separator : the pair separator to be used , defaults to ` ` & ` `
: param cls : an optional dict class to use . If this is not specified
or ` None ` the default : class : ` MultiDict ` is used ."""
|
if cls is None :
cls = MultiDict
if isinstance ( s , text_type ) and not isinstance ( separator , text_type ) :
separator = separator . decode ( charset or 'ascii' )
elif isinstance ( s , bytes ) and not isinstance ( separator , bytes ) :
separator = separator . encode ( charset or 'ascii' )
return cls ( _url_decode_impl ( s . split ( separator ) , charset , decode_keys , include_empty , errors ) )
|
def debug ( evals , feed_dict = None , breakpoints = None , break_immediately = False , session = None ) :
"""spawns a new debug session"""
|
global _dbsession
_dbsession = debug_session . DebugSession ( session )
return _dbsession . run ( evals , feed_dict , breakpoints , break_immediately )
|
def parent_dir ( path ) :
'''Return the parent of a directory .'''
|
return os . path . abspath ( os . path . join ( path , os . pardir , os . pardir , '_build' ) )
|
def runAndWait ( self ) :
'''Called by the engine to start an event loop , process all commands in
the queue at the start of the loop , and then exit the loop .'''
|
self . _push ( self . _engine . endLoop , tuple ( ) )
self . _driver . startLoop ( )
|
def dimensions ( filenames ) :
"""given a filename or list of filenames ,
return a tuple or sequence of tuples ( x , y , filename )"""
|
single = type ( filenames ) is str
if single :
filenames = [ filenames ]
dims = get_dimensions ( filenames )
if single :
dims = dims [ 0 ]
return dims
|
def copy_logstore ( self , from_project , from_logstore , to_logstore , to_project = None , to_client = None ) :
"""copy logstore , index , logtail config to target logstore , machine group are not included yet .
the target logstore will be crated if not existing
: type from _ project : string
: param from _ project : project name
: type from _ logstore : string
: param from _ logstore : logstore name
: type to _ logstore : string
: param to _ logstore : target logstore name
: type to _ project : string
: param to _ project : target project name , copy to same project if not being specified , will try to create it if not being specified
: type to _ client : LogClient
: param to _ client : logclient instance , use it to operate on the " to _ project " if being specified for cross region purpose
: return :"""
|
return copy_logstore ( self , from_project , from_logstore , to_logstore , to_project = to_project , to_client = to_client )
|
def adjusted_current_time ( self ) :
"""Returns calculated current seek time of media in seconds"""
|
if self . player_state == MEDIA_PLAYER_STATE_PLAYING : # Add time since last update
return ( self . current_time + ( datetime . utcnow ( ) - self . last_updated ) . total_seconds ( ) )
# Not playing , return last reported seek time
return self . current_time
|
def decorate ( svg , node , metadata ) :
"""Add metedata next to a node"""
|
if not metadata :
return node
xlink = metadata . get ( 'xlink' )
if xlink :
if not isinstance ( xlink , dict ) :
xlink = { 'href' : xlink , 'target' : '_blank' }
node = svg . node ( node , 'a' , ** xlink )
svg . node ( node , 'desc' , class_ = 'xlink' ) . text = to_unicode ( xlink . get ( 'href' ) )
if 'tooltip' in metadata :
svg . node ( node , 'title' ) . text = to_unicode ( metadata [ 'tooltip' ] )
if 'color' in metadata :
color = metadata . pop ( 'color' )
node . attrib [ 'style' ] = 'fill: %s; stroke: %s' % ( color , color )
if 'style' in metadata :
node . attrib [ 'style' ] = metadata . pop ( 'style' )
if 'label' in metadata and metadata [ 'label' ] :
svg . node ( node , 'desc' , class_ = 'label' ) . text = to_unicode ( metadata [ 'label' ] )
return node
|
def delete_priority_rule ( db , rule_id : int ) -> None :
"""Delete a file priority rule ."""
|
with db :
cur = db . cursor ( )
cur . execute ( 'DELETE FROM file_priority WHERE id=?' , ( rule_id , ) )
|
def mat_to_numpy_arr ( self ) :
'''convert list to numpy array - numpy arrays can not be saved as json'''
|
import numpy as np
self . dat [ 'mat' ] = np . asarray ( self . dat [ 'mat' ] )
|
def to_pytime ( self ) :
"""Converts sql time object into Python ' s time object
this will truncate nanoseconds to microseconds
@ return : naive time"""
|
nanoseconds = self . _nsec
hours = nanoseconds // 1000000000 // 60 // 60
nanoseconds -= hours * 60 * 60 * 1000000000
minutes = nanoseconds // 1000000000 // 60
nanoseconds -= minutes * 60 * 1000000000
seconds = nanoseconds // 1000000000
nanoseconds -= seconds * 1000000000
return datetime . time ( hours , minutes , seconds , nanoseconds // 1000 )
|
def hilbert_chip_order ( machine ) :
"""A generator which iterates over a set of chips in a machine in a hilbert
path .
For use as a chip ordering for the sequential placer ."""
|
max_dimen = max ( machine . width , machine . height )
hilbert_levels = int ( ceil ( log ( max_dimen , 2.0 ) ) ) if max_dimen >= 1 else 0
return hilbert ( hilbert_levels )
|
def learn ( self , initial_state_key , limit = 1000 , game_n = 1 ) :
'''Multi - Agent Learning .
Override .
Args :
initial _ state _ key : Initial state .
limit : Limit of the number of learning .
game _ n : The number of games .'''
|
end_flag_list = [ False ] * len ( self . q_learning_list )
for game in range ( game_n ) :
state_key = copy . copy ( initial_state_key )
self . t = 1
while self . t <= limit :
for i in range ( len ( self . q_learning_list ) ) :
if game + 1 == game_n :
self . state_key_list . append ( ( i , copy . copy ( state_key ) ) )
self . q_learning_list [ i ] . t = self . t
next_action_list = self . q_learning_list [ i ] . extract_possible_actions ( state_key )
if len ( next_action_list ) :
action_key = self . q_learning_list [ i ] . select_action ( state_key = state_key , next_action_list = next_action_list )
reward_value = self . q_learning_list [ i ] . observe_reward_value ( state_key , action_key )
# Check .
if self . q_learning_list [ i ] . check_the_end_flag ( state_key ) is True :
end_flag_list [ i ] = True
# Max - Q - Value in next action time .
next_state_key = self . q_learning_list [ i ] . update_state ( state_key = state_key , action_key = action_key )
next_next_action_list = self . q_learning_list [ i ] . extract_possible_actions ( next_state_key )
if len ( next_next_action_list ) :
next_action_key = self . q_learning_list [ i ] . predict_next_action ( next_state_key , next_next_action_list )
next_max_q = self . q_learning_list [ i ] . extract_q_df ( next_state_key , next_action_key )
# Update Q - Value .
self . q_learning_list [ i ] . update_q ( state_key = state_key , action_key = action_key , reward_value = reward_value , next_max_q = next_max_q )
# Update State .
state_key = next_state_key
# Epsode .
self . t += 1
self . q_learning_list [ i ] . t = self . t
if False not in end_flag_list :
break
|
def empirical_SVD ( stream_list , linear = True ) :
"""Depreciated . Use empirical _ svd ."""
|
warnings . warn ( 'Depreciated, use empirical_svd instead.' )
return empirical_svd ( stream_list = stream_list , linear = linear )
|
def add_role ( self , role , term , start_date = None , end_date = None , ** kwargs ) :
"""Examples :
leg . add _ role ( ' member ' , term = ' 2009 ' , chamber = ' upper ' ,
party = ' Republican ' , district = ' 10th ' )"""
|
self [ 'roles' ] . append ( dict ( role = role , term = term , start_date = start_date , end_date = end_date , ** kwargs ) )
|
def commit_deposit ( self , deposit_id , ** params ) :
"""https : / / developers . coinbase . com / api / v2 # commit - a - deposit"""
|
return self . api_client . commit_deposit ( self . id , deposit_id , ** params )
|
def out_endpoint ( self ) :
"""Open a reference to the USB device ' s only OUT endpoint . This method
assumes that the USB device configuration has already been set ."""
|
if getattr ( self , '_out_endpoint' , None ) is None :
config = self . device . get_active_configuration ( )
interface_number = config [ ( 0 , 0 ) ] . bInterfaceNumber
interface = usb . util . find_descriptor ( config , bInterfaceNumber = interface_number )
self . _out_endpoint = usb . util . find_descriptor ( interface , custom_match = lambda e : usb . util . endpoint_direction ( e . bEndpointAddress ) == usb . util . ENDPOINT_OUT )
if not self . _out_endpoint :
raise ControllerError ( "Couldn't find OUT endpoint on the USB device" )
return self . _out_endpoint
|
async def put ( self , public_key ) :
"""Reject offer and unfreeze balance
Accepts :
- cid
- buyer public key
- buyer address"""
|
if settings . SIGNATURE_VERIFICATION :
super ( ) . verify ( )
# Check if message contains required data
try :
body = json . loads ( self . request . body )
except :
self . set_status ( 400 )
self . write ( { "error" : 400 , "reason" : "Unexpected data format. JSON required" } )
raise tornado . web . Finish
if isinstance ( body [ "message" ] , str ) :
message = json . loads ( body [ "message" ] )
elif isinstance ( body [ "message" ] , dict ) :
message = body [ "message" ]
cid = int ( message [ "offer_id" ] . get ( "cid" , 0 ) )
buyer_address = message [ "offer_id" ] . get ( "buyer_address" )
coinid = message . get ( "coinid" )
if not all ( [ cid , buyer_address , coinid ] ) :
self . set_status ( 400 )
self . write ( { "error" : 400 , "reason" : "Missed required fields." } )
raise tornado . web . Finish
if coinid in settings . bridges . keys ( ) :
self . account . blockchain . setendpoint ( settings . bridges [ coinid ] )
else :
self . set_status ( 400 )
self . write ( { "error" : 400 , "reason" : "Invalid coin ID" } )
raise tornado . web . Finish
check = self . account . validator [ coinid ] ( public_key )
account = await self . account . getaccountdata ( public_key = public_key )
if "error" in account . keys ( ) :
error_code = account [ "error" ]
self . set_status ( error_code )
self . write ( account )
raise tornado . web . Finish
# Check if one of sellers or buyers rejects offer
owneraddr = await self . account . blockchain . ownerbycid ( cid = cid )
hex_ = check
if buyer_address != hex_ and owneraddr != hex_ : # Avoid rejecting offer
self . set_status ( 403 )
self . write ( { "error" : 403 , "reason" : "Forbidden. Offer does not belong to user." } )
# Reject offer
response = await self . account . blockchain . rejectoffer ( coinid = coinid , cid = cid , buyer_address = buyer_address )
if "error" in response . keys ( ) :
self . set_status ( response [ "error" ] )
self . write ( response )
raise tornado . web . Finish
# Get buyer for email sending
buyer = await self . account . getaccountbywallet ( wallet = buyer_address )
if "error" in buyer . keys ( ) :
self . set_status ( buyer [ "error" ] )
self . write ( buyer )
raise tornado . web . Finish
if buyer . get ( "email" ) :
emaildata = { "to" : buyer . get ( "email" ) , "subject" : "Robin8 support" , "optional" : "Your offer with cid %s was rejected." % cid }
await self . account . mailer . sendmail ( ** emaildata )
# Undeposit balance
price = await self . account . blockchain . getwriteprice ( cid = cid )
coinid = "PUT"
await self . account . balance . unfreeze ( uid = buyer [ "id" ] , coinid = coinid , amount = price )
del response [ "result" ]
self . write ( response )
|
def ParseFileObject ( self , parser_mediator , file_object ) :
"""Parses a Firefox cache file - like object .
Args :
parser _ mediator ( ParserMediator ) : mediates interactions between parsers
and other components , such as storage and dfvfs .
file _ object ( dfvfs . FileIO ) : a file - like object .
Raises :
UnableToParseFile : when the file cannot be parsed ."""
|
filename = parser_mediator . GetFilename ( )
if not self . _CACHE_FILENAME_RE . match ( filename ) :
raise errors . UnableToParseFile ( 'Not a Firefox cache2 file.' )
# The file needs to be at least 36 bytes in size for it to contain
# a cache2 file metadata header and a 4 - byte offset that points to its
# location in the file .
file_size = file_object . get_size ( )
if file_size < 36 :
raise errors . UnableToParseFile ( 'File size too small for Firefox cache2 file.' )
file_offset = self . _GetCacheFileMetadataHeaderOffset ( file_object )
file_metadata_header_map = self . _GetDataTypeMap ( 'firefox_cache2_file_metadata_header' )
try :
file_metadata_header , _ = self . _ReadStructureFromFileObject ( file_object , file_offset , file_metadata_header_map )
except ( ValueError , errors . ParseError ) as exception :
raise errors . UnableToParseFile ( ( 'Unable to parse Firefox cache2 file metadata header with error: ' '{0!s}' ) . format ( exception ) )
if not self . _ValidateCacheFileMetadataHeader ( file_metadata_header ) :
raise errors . UnableToParseFile ( 'Not a valid Firefox cache2 record.' )
url = file_object . read ( file_metadata_header . key_size )
header_data = file_object . read ( )
display_name = parser_mediator . GetDisplayName ( )
request_method , response_code = self . _ParseHTTPHeaders ( header_data [ : - 4 ] , file_offset , display_name )
event_data = FirefoxCacheEventData ( )
event_data . fetch_count = file_metadata_header . fetch_count
event_data . frequency = file_metadata_header . frequency
event_data . request_method = request_method
event_data . request_size = file_metadata_header . key_size
event_data . response_code = response_code
event_data . version = self . _CACHE_VERSION
event_data . url = url . decode ( 'ascii' , errors = 'replace' )
date_time = dfdatetime_posix_time . PosixTime ( timestamp = file_metadata_header . last_fetched_time )
event = time_events . DateTimeValuesEvent ( date_time , definitions . TIME_DESCRIPTION_LAST_VISITED )
parser_mediator . ProduceEventWithEventData ( event , event_data )
if file_metadata_header . last_modified_time :
date_time = dfdatetime_posix_time . PosixTime ( timestamp = file_metadata_header . last_modified_time )
event = time_events . DateTimeValuesEvent ( date_time , definitions . TIME_DESCRIPTION_WRITTEN )
parser_mediator . ProduceEventWithEventData ( event , event_data )
if file_metadata_header . expiration_time :
date_time = dfdatetime_posix_time . PosixTime ( timestamp = file_metadata_header . expiration_time )
event = time_events . DateTimeValuesEvent ( date_time , definitions . TIME_DESCRIPTION_EXPIRATION )
parser_mediator . ProduceEventWithEventData ( event , event_data )
|
def _set_all_partition ( self , v , load = False ) :
"""Setter method for all _ partition , mapped from YANG variable / cpu _ state / all _ partition ( container )
If this variable is read - only ( config : false ) in the
source YANG file , then _ set _ all _ partition is considered as a private
method . Backends looking to populate this variable should
do so via calling thisObj . _ set _ all _ partition ( ) directly .
YANG Description : CPU utilization summary of all the MMs and LCs"""
|
if hasattr ( v , "_utype" ) :
v = v . _utype ( v )
try :
t = YANGDynClass ( v , base = all_partition . all_partition , is_container = 'container' , presence = False , yang_name = "all-partition" , rest_name = "all-partition" , parent = self , path_helper = self . _path_helper , extmethods = self . _extmethods , register_paths = True , extensions = { u'tailf-common' : { u'callpoint' : u'RAS-process-cpu-all-partition' , u'cli-suppress-show-path' : None } } , namespace = 'urn:brocade.com:mgmt:brocade-RAS-operational' , defining_module = 'brocade-RAS-operational' , yang_type = 'container' , is_config = False )
except ( TypeError , ValueError ) :
raise ValueError ( { 'error-string' : """all_partition must be of a type compatible with container""" , 'defined-type' : "container" , 'generated-type' : """YANGDynClass(base=all_partition.all_partition, is_container='container', presence=False, yang_name="all-partition", rest_name="all-partition", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, extensions={u'tailf-common': {u'callpoint': u'RAS-process-cpu-all-partition', u'cli-suppress-show-path': None}}, namespace='urn:brocade.com:mgmt:brocade-RAS-operational', defining_module='brocade-RAS-operational', yang_type='container', is_config=False)""" , } )
self . __all_partition = t
if hasattr ( self , '_set' ) :
self . _set ( )
|
def resource_list ( cls ) :
"""Get the possible list of resources ( hostname , id ) ."""
|
items = cls . list ( { 'items_per_page' : 500 } )
ret = [ vm [ 'hostname' ] for vm in items ]
ret . extend ( [ str ( vm [ 'id' ] ) for vm in items ] )
return ret
|
def apply ( self , event = None ) :
"""Before self . onOk closes the window , it calls this function to sync the config changes from the GUI back to self . config ."""
|
for section in self . config . sections ( ) : # Run through the sections to check all the option values :
for option , o in self . config . config [ section ] . items ( ) : # Check the actual values against the validators and complain if necessary :
if not o [ 'include' ] :
continue
# This value is hidden , so there ' s no control for it .
control = self . _controls [ section ] [ option ]
# Get the actual control for GetValue
try :
value = type ( o [ 'value' ] ) ( control . GetValue ( ) )
# Try and convert the value
except ValueError as msg :
self . displayError ( section , option , str ( msg ) )
# Woops , something went wrong
return False
# Tells self . onOk not to close the window
problem = None
# Set up the problem variable .
try :
problem = o [ 'validate' ] ( value )
# See if it passes the test
except Exception as e :
problem = str ( e )
# The lambda raised an exception .
if problem :
self . displayError ( section , option , problem )
# It didn ' t
return False
# Tells self . onOk not to close the window
self . config . set ( section , option , value )
# All clear
return True
|
def convert_celeba_aligned_cropped ( directory , output_directory , output_filename = OUTPUT_FILENAME ) :
"""Converts the aligned and cropped CelebA dataset to HDF5.
Converts the CelebA dataset to an HDF5 dataset compatible with
: class : ` fuel . datasets . CelebA ` . The converted dataset is saved as
' celeba _ aligned _ cropped . hdf5 ' .
It assumes the existence of the following files :
* ` img _ align _ celeba . zip `
* ` list _ attr _ celeba . txt `
Parameters
directory : str
Directory in which input files reside .
output _ directory : str
Directory in which to save the converted dataset .
output _ filename : str , optional
Name of the saved dataset . Defaults to
' celeba _ aligned _ cropped . hdf5 ' .
Returns
output _ paths : tuple of str
Single - element tuple containing the path to the converted
dataset ."""
|
output_path = os . path . join ( output_directory , output_filename )
h5file = _initialize_conversion ( directory , output_path , ( 218 , 178 ) )
features_dataset = h5file [ 'features' ]
image_file_path = os . path . join ( directory , IMAGE_FILE )
with zipfile . ZipFile ( image_file_path , 'r' ) as image_file :
with progress_bar ( 'images' , NUM_EXAMPLES ) as bar :
for i in range ( NUM_EXAMPLES ) :
image_name = 'img_align_celeba/{:06d}.jpg' . format ( i + 1 )
features_dataset [ i ] = numpy . asarray ( Image . open ( image_file . open ( image_name , 'r' ) ) ) . transpose ( 2 , 0 , 1 )
bar . update ( i + 1 )
h5file . flush ( )
h5file . close ( )
return ( output_path , )
|
def _write_source_code ( tlobject , kind , builder , type_constructors ) :
"""Writes the source code corresponding to the given TLObject
by making use of the ` ` builder ` ` ` SourceBuilder ` .
Additional information such as file path depth and
the ` ` Type : [ Constructors ] ` ` must be given for proper
importing and documentation strings ."""
|
_write_class_init ( tlobject , kind , type_constructors , builder )
_write_resolve ( tlobject , builder )
_write_to_dict ( tlobject , builder )
_write_to_bytes ( tlobject , builder )
_write_from_reader ( tlobject , builder )
_write_read_result ( tlobject , builder )
|
def chexdump ( x , dump = False ) :
"""Build a per byte hexadecimal representation
Example :
> > > chexdump ( IP ( ) )
0x45 , 0x00 , 0x00 , 0x14 , 0x00 , 0x01 , 0x00 , 0x00 , 0x40 , 0x00 , 0x7c , 0xe7 , 0x7f , 0x00 , 0x00 , 0x01 , 0x7f , 0x00 , 0x00 , 0x01 # noqa : E501
: param x : a Packet
: param dump : print the view if False
: returns : a String only if dump = True"""
|
x = bytes_encode ( x )
s = ", " . join ( "%#04x" % orb ( x ) for x in x )
if dump :
return s
else :
print ( s )
|
def get_attribute ( self , selector , attribute , by = By . CSS_SELECTOR , timeout = settings . SMALL_TIMEOUT ) :
"""This method uses JavaScript to get the value of an attribute ."""
|
if self . timeout_multiplier and timeout == settings . SMALL_TIMEOUT :
timeout = self . __get_new_timeout ( timeout )
if page_utils . is_xpath_selector ( selector ) :
by = By . XPATH
if page_utils . is_link_text_selector ( selector ) :
selector = page_utils . get_link_text_from_selector ( selector )
by = By . LINK_TEXT
self . wait_for_ready_state_complete ( )
time . sleep ( 0.01 )
element = page_actions . wait_for_element_present ( self . driver , selector , by , timeout )
try :
attribute_value = element . get_attribute ( attribute )
except ( StaleElementReferenceException , ENI_Exception ) :
self . wait_for_ready_state_complete ( )
time . sleep ( 0.06 )
element = page_actions . wait_for_element_present ( self . driver , selector , by , timeout )
attribute_value = element . get_attribute ( attribute )
if attribute_value is not None :
return attribute_value
else :
raise Exception ( "Element {%s} has no attribute {%s}!" % ( selector , attribute ) )
|
def get_auth_header ( self ) :
"""Getting the authorization header according to the authentication procedure
: return [ dict ] : Authorization header"""
|
if self . api . is_authenticated :
return { "Authorization" : "Bearer %s" % self . api . access_token }
return { "Authorization" : "Client-ID %s" % self . api . client_id }
|
def inj_spin_pdf ( key , high_spin , spinz ) :
'''Estimate the probability density of the
injections for the spin distribution .
Parameters
key : string
Injections strategy
high _ spin : float
Maximum spin used in the strategy
spinz : array
Spin of the injections ( for one component )'''
|
# If the data comes from disable _ spin simulation
if spinz [ 0 ] == 0 :
return np . ones_like ( spinz )
spinz = np . array ( spinz )
bound = np . sign ( np . absolute ( high_spin ) - np . absolute ( spinz ) )
bound += np . sign ( 1 - np . absolute ( spinz ) )
if key == 'precessing' : # Returns the PDF of spins when total spin is
# isotropically distributed . Both the component
# masses have the same distribution for this case .
pdf = ( np . log ( high_spin - np . log ( abs ( spinz ) ) ) / high_spin / 2 )
idx = np . where ( bound != 2 )
pdf [ idx ] = 0
return pdf
if key == 'aligned' : # Returns the PDF of mass when spins are aligned and uniformly
# distributed . Component spins are independent for this case .
pdf = ( np . ones_like ( spinz ) / 2 / high_spin )
idx = np . where ( bound != 2 )
pdf [ idx ] = 0
return pdf
if key == 'disable_spin' : # Returns unit array
pdf = np . ones_like ( spinz )
return pdf
|
def insert_record_by_fieldspecs_with_values ( self , table : str , fieldspeclist : FIELDSPECLIST_TYPE ) -> int :
"""Inserts a record into the database using a list of fieldspecs having
their value stored under the ' value ' key ."""
|
fields = [ ]
values = [ ]
for fs in fieldspeclist :
fields . append ( fs [ "name" ] )
values . append ( fs [ "value" ] )
return self . insert_record ( table , fields , values )
|
def create_operations ( ctx = None , ** kwargs ) :
"""Create an alembic operations object ."""
|
if ctx is None :
ctx = create_migration_ctx ( ** kwargs )
operations = Operations ( ctx )
operations . has_table = has_table
return operations
|
def get_property ( node_uri , property_name , ossos_base = True ) :
"""Retrieves the value associated with a property on a node in VOSpace .
@ param node _ uri :
@ param property _ name :
@ param ossos _ base :
@ return :"""
|
# Must use force or we could have a cached copy of the node from before
# properties of interest were set / updated .
node = client . get_node ( node_uri , force = True )
property_uri = tag_uri ( property_name ) if ossos_base else property_name
if property_uri not in node . props :
return None
return node . props [ property_uri ]
|
def field_pklist_from_json ( self , data ) :
"""Load a PkOnlyQueryset from a JSON dict .
This uses the same format as cached _ queryset _ from _ json"""
|
model = get_model ( data [ 'app' ] , data [ 'model' ] )
return PkOnlyQueryset ( self , model , data [ 'pks' ] )
|
def get_subdomain_ops_at_txid ( txid , proxy = None , hostport = None ) :
"""Get the list of subdomain operations added by a txid
Returns the list of operations ( [ { . . . } ] ) on success
Returns { ' error ' : . . . } on failure"""
|
assert proxy or hostport , 'Need proxy or hostport'
if proxy is None :
proxy = connect_hostport ( hostport )
subdomain_ops_schema = { 'type' : 'object' , 'properties' : { 'subdomain_ops' : { 'type' : 'array' , 'items' : { 'type' : 'object' , 'properties' : OP_HISTORY_SCHEMA [ 'properties' ] , 'required' : SUBDOMAIN_HISTORY_REQUIRED , } , } , } , 'required' : [ 'subdomain_ops' ] , }
schema = json_response_schema ( subdomain_ops_schema )
resp = { }
try :
resp = proxy . get_subdomain_ops_at_txid ( txid )
resp = json_validate ( schema , resp )
if json_is_error ( resp ) :
return resp
# names must be valid
for op in resp [ 'subdomain_ops' ] :
assert is_subdomain ( str ( op [ 'fully_qualified_subdomain' ] ) ) , ( 'Invalid subdomain "{}"' . format ( op [ 'fully_qualified_subdomain' ] ) )
except ValidationError as ve :
if BLOCKSTACK_DEBUG :
log . exception ( ve )
resp = { 'error' : 'Server response did not match expected schema. You are likely communicating with an out-of-date Blockstack node.' , 'http_status' : 502 }
return resp
except AssertionError as e :
if BLOCKSTACK_DEBUG :
log . exception ( e )
resp = { 'error' : 'Server response included an invalid subdomain' , 'http_status' : 500 }
return resp
except socket . timeout :
log . error ( "Connection timed out" )
resp = { 'error' : 'Connection to remote host timed out.' , 'http_status' : 503 }
return resp
except socket . error as se :
log . error ( "Connection error {}" . format ( se . errno ) )
resp = { 'error' : 'Connection to remote host failed.' , 'http_status' : 502 }
return resp
except Exception as ee :
if BLOCKSTACK_DEBUG :
log . exception ( ee )
log . error ( "Caught exception while connecting to Blockstack node: {}" . format ( ee ) )
resp = { 'error' : 'Failed to contact Blockstack node. Try again with `--debug`.' , 'http_status' : 500 }
return resp
return resp [ 'subdomain_ops' ]
|
def cmd_led ( self , args ) :
'''send LED pattern as override'''
|
if len ( args ) < 3 :
print ( "Usage: led RED GREEN BLUE <RATE>" )
return
pattern = [ 0 ] * 24
pattern [ 0 ] = int ( args [ 0 ] )
pattern [ 1 ] = int ( args [ 1 ] )
pattern [ 2 ] = int ( args [ 2 ] )
if len ( args ) == 4 :
plen = 4
pattern [ 3 ] = int ( args [ 3 ] )
else :
plen = 3
self . master . mav . led_control_send ( self . settings . target_system , self . settings . target_component , 0 , 0 , plen , pattern )
|
def killall ( self , exc ) :
"""Connection / Channel was closed . All subsequent and ongoing requests
should raise an error"""
|
self . connection_exc = exc
# Set an exception for all others
for method , futs in self . _futures . items ( ) :
for fut in futs :
if fut . done ( ) :
continue
fut . set_exception ( exc )
self . _futures . clear ( )
|
def todegdec ( origin ) :
"""Convert from [ + / - ] DDD ° MMM ' SSS . SSSS " or [ + / - ] DDD ° MMM . MMMM ' to [ + / - ] DDD . DDDDD"""
|
# if the input is already a float ( or can be converted to float )
try :
return float ( origin )
except ValueError :
pass
# DMS format
m = dms_re . search ( origin )
if m :
degrees = int ( m . group ( 'degrees' ) )
minutes = float ( m . group ( 'minutes' ) )
seconds = float ( m . group ( 'seconds' ) )
return degrees + minutes / 60 + seconds / 3600
# Degree + Minutes format
m = mindec_re . search ( origin )
if m :
degrees = int ( m . group ( 'degrees' ) )
minutes = float ( m . group ( 'minutes' ) )
return degrees + minutes / 60
|
def _return_rows ( self , table , cols , values , return_type ) :
"""Return fetched rows in the desired type ."""
|
if return_type is dict : # Pack each row into a dictionary
cols = self . get_columns ( table ) if cols is '*' else cols
if len ( values ) > 0 and isinstance ( values [ 0 ] , ( set , list , tuple ) ) :
return [ dict ( zip ( cols , row ) ) for row in values ]
else :
return dict ( zip ( cols , values ) )
elif return_type is tuple :
return [ tuple ( row ) for row in values ]
else :
return values
|
def nodes_geometry ( self ) :
"""The nodes in the scene graph with geometry attached .
Returns
nodes _ geometry : ( m , ) array , of node names"""
|
nodes = np . array ( [ n for n in self . transforms . nodes ( ) if 'geometry' in self . transforms . node [ n ] ] )
return nodes
|
def fill_delegate_proxy_activation_requirements ( requirements_data , cred_file , lifetime_hours = 12 ) :
"""Given the activation requirements for an endpoint and a filename for
X . 509 credentials , extracts the public key from the activation
requirements , uses the key and the credentials to make a proxy credential ,
and returns the requirements data with the proxy chain filled in ."""
|
# get the public key from the activation requirements
for data in requirements_data [ "DATA" ] :
if data [ "type" ] == "delegate_proxy" and data [ "name" ] == "public_key" :
public_key = data [ "value" ]
break
else :
raise ValueError ( ( "No public_key found in activation requirements, this endpoint " "does not support Delegate Proxy activation." ) )
# get user credentials from user credential file "
with open ( cred_file ) as f :
issuer_cred = f . read ( )
# create the proxy credentials
proxy = create_proxy_credentials ( issuer_cred , public_key , lifetime_hours )
# return the activation requirements document with the proxy _ chain filled
for data in requirements_data [ "DATA" ] :
if data [ "type" ] == "delegate_proxy" and data [ "name" ] == "proxy_chain" :
data [ "value" ] = proxy
return requirements_data
else :
raise ValueError ( ( "No proxy_chain found in activation requirements, this endpoint " "does not support Delegate Proxy activation." ) )
|
def _run ( self , command : List [ str ] , notebook : Optional [ str ] = None ) -> int :
"""Run command from terminal and notebook and view output from subprocess ."""
|
if notebook is None :
return Popen ( command , cwd = self . _build_dir ) . wait ( )
cmd = Popen ( command , cwd = self . _build_dir , stdout = PIPE , stderr = STDOUT )
while True :
line = cmd . stdout . readline ( )
if line == b'' and cmd . poll ( ) is not None :
return cmd . poll ( )
print ( line . decode ( 'utf-8' ) , end = '' )
raise Exception ( )
|
def _write_pair_information ( gsd_file , structure ) :
"""Write the special pairs in the system .
Parameters
gsd _ file :
The file object of the GSD file being written
structure : parmed . Structure
Parmed structure object holding system information"""
|
pair_types = [ ]
pair_typeid = [ ]
pairs = [ ]
for ai in structure . atoms :
for aj in ai . dihedral_partners : # make sure we don ' t double add
if ai . idx > aj . idx :
ps = '-' . join ( sorted ( [ ai . type , aj . type ] , key = natural_sort ) )
if ps not in pair_types :
pair_types . append ( ps )
pair_typeid . append ( pair_types . index ( ps ) )
pairs . append ( ( ai . idx , aj . idx ) )
gsd_file . pairs . types = pair_types
gsd_file . pairs . typeid = pair_typeid
gsd_file . pairs . group = pairs
gsd_file . pairs . N = len ( pairs )
|
def foreach_model ( self , fn ) :
"""Apply the given function to each model replica in each worker .
Returns :
List of results from applying the function ."""
|
results = ray . get ( [ w . foreach_model . remote ( fn ) for w in self . workers ] )
out = [ ]
for r in results :
out . extend ( r )
return out
|
def short_codes ( self ) :
"""Access the short _ codes
: returns : twilio . rest . api . v2010 . account . short _ code . ShortCodeList
: rtype : twilio . rest . api . v2010 . account . short _ code . ShortCodeList"""
|
if self . _short_codes is None :
self . _short_codes = ShortCodeList ( self . _version , account_sid = self . _solution [ 'sid' ] , )
return self . _short_codes
|
def from_wif_hex ( cls : Type [ SigningKeyType ] , wif_hex : str ) -> SigningKeyType :
"""Return SigningKey instance from Duniter WIF in hexadecimal format
: param wif _ hex : WIF string in hexadecimal format"""
|
wif_bytes = Base58Encoder . decode ( wif_hex )
if len ( wif_bytes ) != 35 :
raise Exception ( "Error: the size of WIF is invalid" )
# extract data
checksum_from_wif = wif_bytes [ - 2 : ]
fi = wif_bytes [ 0 : 1 ]
seed = wif_bytes [ 1 : - 2 ]
seed_fi = wif_bytes [ 0 : - 2 ]
# check WIF format flag
if fi != b"\x01" :
raise Exception ( "Error: bad format version, not WIF" )
# checksum control
checksum = libnacl . crypto_hash_sha256 ( libnacl . crypto_hash_sha256 ( seed_fi ) ) [ 0 : 2 ]
if checksum_from_wif != checksum :
raise Exception ( "Error: bad checksum of the WIF" )
return cls ( seed )
|
def set_decdel_rts ( self ) :
"""Figure out the decimal seperator and rows to skip and set
corresponding attributes ."""
|
lnr = max ( self . rows2skip ( ',' ) , self . rows2skip ( '.' ) ) + 1
# If EQUAL _ CNT _ REQ was not met , raise error . Implement !
if self . cnt > EQUAL_CNT_REQ :
raise PatternError ( 'Did not find ' + str ( EQUAL_CNT_REQ ) + ' data rows with equal data pattern in file: ' + self . fn )
elif self . cnt < EQUAL_CNT_REQ : # Too few rows
raise PatternError ( 'Less than' , str ( EQUAL_CNT_REQ ) + 'data rows in' , self . fn + '?' , '\nTry lower the EQUAL_CNT_REQ' )
if self . matches_p [ lnr ] <= self . matches_c [ lnr ] :
self . decdel = '.'
# If equal , assume decimal point is used .
self . datrx = DATPRX
else :
self . decdel = ','
# Assume the lesser count is correct .
self . datrx = DATCRX
self . rts = self . rows2skip ( self . decdel )
|
def next ( self ) :
"""for iteration over the header entries"""
|
if self . _current < len ( self . _record_list ) :
rec = self . _record_list [ self . _current ]
key = rec [ 'name' ]
self . _current += 1
return key
else :
raise StopIteration
|
def exhaustive_curie_check ( self , ontology : pd . DataFrame , curie_predicate : str , curie_prefix : str , diff : bool = True , ) -> Tuple [ list ] :
'''All entities with conflicting curies gets a full diff to see if they belong
Args :
ontology : pandas DataFrame created from an ontology where the colnames are predicates
and if classes exist it is also thrown into a the colnames .
curie _ predicate : usually in qname form and is the colname of the DataFrame
curie _ prefix : Not all cells in the DataFrame will have complete curies so we extract
the fragement from the cell and use the prefix to complete it .
diff : complete exhaustive diff if between curie matches . . . will take FOREVER if there are a lot - > n ^ 2
Returns :
inside : entities that are inside of InterLex
outside : entities NOT in InterLex
diff ( optional ) : List [ List [ dict ] ] . . . so complicated but usefull diff between matches only'''
|
inside , outside = [ ] , [ ]
curie_prefix = curie_prefix . replace ( ':' , '' )
# just in case I forget a colon isnt in a prefix
header = [ 'Index' ] + list ( ontology . columns )
for row in ontology . itertuples ( ) :
row = { header [ i ] : val for i , val in enumerate ( row ) }
entity_curie = row [ curie_predicate ]
if isinstance ( entity_curie , list ) :
if len ( entity_curie ) != 0 :
exit ( 'Need to have only 1 iri in the cell from the onotology.' )
else :
entity_curie = entity_curie [ 0 ]
entity_curie = curie_prefix + ':' + self . extract_fragment ( entity_curie )
ilx_row = self . curie2row . get ( entity_curie )
if ilx_row :
inside . append ( { 'external_ontology_row' : row , 'ilx_rows' : [ ilx_row ] , } )
else :
outside . append ( row )
if diff :
diff = self . __exhaustive_diff ( inside )
return inside , outside , diff
return inside , outside
|
def to_datetime ( dt , tzinfo = None , format = None ) :
"""Convert a date or time to datetime with tzinfo"""
|
if not dt :
return dt
tz = pick_timezone ( tzinfo , __timezone__ )
if isinstance ( dt , ( str , unicode ) ) :
if not format :
formats = DEFAULT_DATETIME_INPUT_FORMATS
else :
formats = list ( format )
d = None
for fmt in formats :
try :
d = datetime . strptime ( dt , fmt )
except ValueError :
continue
if not d :
return None
d = d . replace ( tzinfo = tz )
else :
d = datetime ( getattr ( dt , 'year' , 1970 ) , getattr ( dt , 'month' , 1 ) , getattr ( dt , 'day' , 1 ) , getattr ( dt , 'hour' , 0 ) , getattr ( dt , 'minute' , 0 ) , getattr ( dt , 'second' , 0 ) , getattr ( dt , 'microsecond' , 0 ) )
if not getattr ( dt , 'tzinfo' , None ) :
d = d . replace ( tzinfo = tz )
else :
d = d . replace ( tzinfo = dt . tzinfo )
return to_timezone ( d , tzinfo )
|
def create_hosting_device_resources ( self , context , complementary_id , tenant_id , mgmt_context , max_hosted ) :
"""Create resources for a hosting device in a plugin specific way ."""
|
mgmt_port = None
if mgmt_context and mgmt_context . get ( 'mgmt_nw_id' ) and tenant_id : # Create port for mgmt interface
p_spec = { 'port' : { 'tenant_id' : tenant_id , 'admin_state_up' : True , 'name' : 'mgmt' , 'network_id' : mgmt_context [ 'mgmt_nw_id' ] , 'mac_address' : bc . constants . ATTR_NOT_SPECIFIED , 'fixed_ips' : self . _mgmt_subnet_spec ( context , mgmt_context ) , 'device_id' : "" , # Use device _ owner attribute to ensure we can query for these
# ports even before Nova has set device _ id attribute .
'device_owner' : complementary_id } }
try :
mgmt_port = self . _core_plugin . create_port ( context , p_spec )
except n_exc . NeutronException as e :
LOG . error ( 'Error %s when creating management port. ' 'Cleaning up.' , e )
self . delete_hosting_device_resources ( context , tenant_id , mgmt_port )
mgmt_port = None
# We are setting the ' ports ' to an empty list as it is expected by
# the callee : device _ handling _ db . _ create _ svc _ vm _ hosting _ devices ( )
return { 'mgmt_port' : mgmt_port , 'ports' : [ ] }
|
def _set_optimal_area ( self , data ) :
"""Reduce the zone to reduce the size of fetched data on refresh"""
|
lats = [ station [ "latitude" ] for station in data . values ( ) ]
longs = [ station [ "longitude" ] for station in data . values ( ) ]
self . gps . update ( { "gpsTopLatitude" : max ( lats ) , "gpsTopLongitude" : max ( longs ) , "gpsBotLatitude" : min ( lats ) , "gpsBotLongitude" : min ( longs ) , } )
|
def spawn ( self , spawn_mapping = None ) :
"""Return an exact copy of this generator which behaves the same way
( i . e . , produces the same elements in the same order ) but is otherwise
independent , i . e . there is no link between the two generators
( as opposed to a cloned generator , which is automatically reset
whenever the original generator is reset ) ."""
|
spawn_mapping = spawn_mapping or SpawnMapping ( )
if self . parent is not None :
if self . parent in spawn_mapping : # Return new clone of the mapped parent
return spawn_mapping [ self . parent ] . clone ( )
else :
raise TohuCloneError ( "Cannot spawn a cloned generator without being able to map its parent." )
else :
new_obj = spawn_mapping . spawn_generator ( self )
return new_obj
|
def similarity ( self , track ) :
"""Compares two tracks based on their topology
This method compares the given track against this
instance . It only verifies if given track is close
to this one , not the other way arround
Args :
track ( : obj : ` Track ` )
Returns :
Two - tuple with global similarity between tracks
and an array the similarity between segments"""
|
idx = index . Index ( )
i = 0
for i , segment in enumerate ( self . segments ) :
idx . insert ( i , segment . bounds ( ) , obj = segment )
final_siml = [ ]
final_diff = [ ]
for i , segment in enumerate ( track . segments ) :
query = idx . intersection ( segment . bounds ( ) , objects = True )
res_siml = [ ]
res_diff = [ ]
for result in query :
siml , diff = segment_similarity ( segment , result . object )
res_siml . append ( siml )
res_diff . append ( ( result . id , i , diff ) )
if len ( res_siml ) > 0 :
final_siml . append ( max ( res_siml ) )
final_diff . append ( res_diff [ np . argmax ( res_siml ) ] )
else :
final_siml . append ( 0 )
final_diff . append ( [ ] )
return np . mean ( final_siml ) , final_diff
|
def close ( self , * args , ** kwargs ) :
"""write close tag of MRV file and close opened file
: param force : force closing of externally opened file or buffer"""
|
if not self . __finalized :
self . _file . write ( '</cml>' )
self . __finalized = True
super ( ) . close ( * args , ** kwargs )
|
def convert ( self , vroot , entry_variables ) :
"""All functions are replaced with the same ` new ` function .
Args :
vroot ( : obj : ` Variable ` ) : NNabla Variable
entry _ variables ( : obj : ` Variable ` ) : Entry variable from which the conversion starts ."""
|
self . graph_info = GraphInfo ( vroot )
self . entry_variables = entry_variables
cnt = 0
with nn . parameter_scope ( self . name ) : # Function loop in the forward order
for t , func in enumerate ( self . graph_info . funcs ) :
if func . name == "BatchNormalization" :
bn_func = func
# TODO : should deal with both ?
if bn_func . info . args [ "batch_stat" ] == False :
o = self . _bn_linear_conversion ( bn_func , cnt )
cnt += 1
continue
# Identity conversion
o = self . _identity_conversion ( func )
self . end_variable = o
return self . end_variable
|
def auction_history ( self , symbol = 'btcusd' , since = 0 , limit_auction_results = 50 , include_indicative = 1 ) :
"""Send a request for auction history info , return the response .
Arguments :
symbol - - currency symbol ( default ' btcusd ' )
since - - only return auction events after this timestamp ( default 0)
limit _ auction _ results - - maximum number of auction events to return
( default 50 ) .
include _ indicative - - whether to include publication of indicative
prices and quantities . ( default True )"""
|
url = self . base_url + '/v1/auction/' + symbol + '/history'
params = { 'since' : since , 'limit_auction_results' : limit_auction_results , 'include_indicative' : include_indicative }
return requests . get ( url , params )
|
def fqdns ( ) :
'''Return all known FQDNs for the system by enumerating all interfaces and
then trying to reverse resolve them ( excluding ' lo ' interface ) .'''
|
# Provides :
# fqdns
grains = { }
fqdns = set ( )
addresses = salt . utils . network . ip_addrs ( include_loopback = False , interface_data = _get_interfaces ( ) )
addresses . extend ( salt . utils . network . ip_addrs6 ( include_loopback = False , interface_data = _get_interfaces ( ) ) )
err_message = 'Exception during resolving address: %s'
for ip in addresses :
try :
name , aliaslist , addresslist = socket . gethostbyaddr ( ip )
fqdns . update ( [ socket . getfqdn ( name ) ] + [ als for als in aliaslist if salt . utils . network . is_fqdn ( als ) ] )
except socket . herror as err :
if err . errno == 0 : # No FQDN for this IP address , so we don ' t need to know this all the time .
log . debug ( "Unable to resolve address %s: %s" , ip , err )
else :
log . error ( err_message , err )
except ( socket . error , socket . gaierror , socket . timeout ) as err :
log . error ( err_message , err )
return { "fqdns" : sorted ( list ( fqdns ) ) }
|
def get_vcs_directory ( context , directory ) :
"""Get the pathname of the directory containing the version control metadata files ."""
|
nested = os . path . join ( directory , '.git' )
return nested if context . is_directory ( nested ) else directory
|
def add_cell_footer ( self ) :
"""Add footer cell"""
|
# check if there ' s already a cell footer . . . if true , do not add a second cell footer .
# this situation happens when exporting to ipynb and then importing from ipynb .
logging . info ( 'Adding footer cell' )
for cell in self . nb [ 'cells' ] :
if cell . cell_type == 'markdown' :
if 'pynb_footer_tag' in cell . source :
logging . debug ( 'Footer cell already present' )
return
m = """
---
* **Notebook class name**: {class_name}
* **Notebook cells name**: {cells_name}
* **Execution time**: {exec_begin}
* **Execution duration**: {exec_time:.2f}s
* **Command line**: {argv}
[//]: # (pynb_footer_tag)
"""
self . add_cell_markdown ( m . format ( exec_time = self . exec_time , exec_begin = self . exec_begin_dt , class_name = self . __class__ . __name__ , argv = str ( sys . argv ) , cells_name = self . cells_name ) )
|
def _process_collection ( self , collection_id , label , page ) :
"""This function will process the data supplied internally
about the repository from Coriell .
Triples :
Repository a ERO : collection
rdf : label Literal ( label )
foaf : page Literal ( page )
: param collection _ id :
: param label :
: param page :
: return :"""
|
# # # # # # BUILD THE CELL LINE REPOSITORY # # # # #
for graph in [ self . graph , self . testgraph ] : # TODO : How to devise a label for each repository ?
model = Model ( graph )
reference = Reference ( graph )
repo_id = 'CoriellCollection:' + collection_id
repo_label = label
repo_page = page
model . addIndividualToGraph ( repo_id , repo_label , self . globaltt [ 'collection' ] )
reference . addPage ( repo_id , repo_page )
return
|
def on_compiled ( self , name = None , key_schema = None , value_schema = None , as_mapping_key = None ) :
"""When CompiledSchema compiles this marker , it sets informational values onto it .
Note that arguments may be provided in two incomplete sets ,
e . g . ( name , key _ schema , None ) and then ( None , None , value _ schema ) .
Thus , all assignments must be handled individually .
It is possible that a marker may have no ` value _ schema ` at all :
e . g . in the case of { Extra : Reject } - - ` Reject ` will have no value schema ,
but ` Extra ` will have compiled ` Reject ` as the value .
: param key _ schema : Compiled key schema
: type key _ schema : CompiledSchema | None
: param value _ schema : Compiled value schema
: type value _ schema : CompiledSchema | None
: param name : Human - friendly marker name
: type name : unicode | None
: param as _ mapping _ key : Whether it ' s used as a mapping key ?
: type as _ mapping _ key : bool | None
: rtype : Marker"""
|
if self . name is None :
self . name = name
if self . key_schema is None :
self . key_schema = key_schema
if self . value_schema is None :
self . value_schema = value_schema
if as_mapping_key :
self . as_mapping_key = True
return self
|
def get_stories ( self , userids : Optional [ List [ int ] ] = None ) -> Iterator [ Story ] :
"""Get available stories from followees or all stories of users whose ID are given .
Does not mark stories as seen .
To use this , one needs to be logged in
: param userids : List of user IDs to be processed in terms of downloading their stories , or None ."""
|
if not userids :
data = self . context . graphql_query ( "d15efd8c0c5b23f0ef71f18bf363c704" , { "only_stories" : True } ) [ "data" ] [ "user" ]
if data is None :
raise BadResponseException ( 'Bad stories reel JSON.' )
userids = list ( edge [ "node" ] [ "id" ] for edge in data [ "feed_reels_tray" ] [ "edge_reels_tray_to_reel" ] [ "edges" ] )
def _userid_chunks ( ) :
userids_per_query = 100
for i in range ( 0 , len ( userids ) , userids_per_query ) :
yield userids [ i : i + userids_per_query ]
for userid_chunk in _userid_chunks ( ) :
stories = self . context . graphql_query ( "bf41e22b1c4ba4c9f31b844ebb7d9056" , { "reel_ids" : userid_chunk , "precomposed_overlay" : False } ) [ "data" ]
yield from ( Story ( self . context , media ) for media in stories [ 'reels_media' ] )
|
def extract_cosponsors ( bill ) :
"""Return a list of list relating cosponsors to legislation ."""
|
logger . debug ( "Extracting Cosponsors" )
cosponsor_map = [ ]
cosponsors = bill . get ( 'cosponsors' , [ ] )
bill_id = bill . get ( 'bill_id' , None )
for co in cosponsors :
co_list = [ ]
co_list . append ( co . get ( 'thomas_id' ) )
co_list . append ( bill_id )
co_list . append ( co . get ( 'district' ) )
co_list . append ( co . get ( 'state' ) )
cosponsor_map . append ( co_list )
logger . debug ( "End Extractioning Cosponsors" )
return cosponsor_map
|
def set_user_tag ( self , usertag , pass_to_command_line = True ) :
"""Set the user tag that is passed to the analysis code .
@ param user _ tag : the user tag to identify the job
@ bool pass _ to _ command _ line : add user - tag as a variable option ."""
|
self . __user_tag = usertag
if pass_to_command_line :
self . add_var_opt ( 'user-tag' , usertag )
|
def _ensure_create_ha_compliant ( self , router , router_type ) :
"""To be called in create _ router ( ) BEFORE router is created in DB ."""
|
details = router . pop ( ha . DETAILS , { } )
if details == ATTR_NOT_SPECIFIED :
details = { }
res = { ha . ENABLED : router . pop ( ha . ENABLED , ATTR_NOT_SPECIFIED ) , ha . DETAILS : details }
if not is_attr_set ( res [ ha . ENABLED ] ) :
res [ ha . ENABLED ] = router_type [ 'ha_enabled_by_default' ]
if res [ ha . ENABLED ] and not cfg . CONF . ha . ha_support_enabled :
raise ha . HADisabled ( )
if not res [ ha . ENABLED ] :
return res
if not is_attr_set ( details . get ( ha . TYPE , ATTR_NOT_SPECIFIED ) ) :
details [ ha . TYPE ] = cfg . CONF . ha . default_ha_mechanism
if details [ ha . TYPE ] in cfg . CONF . ha . disabled_ha_mechanisms :
raise ha . HADisabledHAType ( ha_type = details [ ha . TYPE ] )
if not is_attr_set ( details . get ( ha . REDUNDANCY_LEVEL , ATTR_NOT_SPECIFIED ) ) :
details [ ha . REDUNDANCY_LEVEL ] = ( cfg . CONF . ha . default_ha_redundancy_level )
if not is_attr_set ( details . get ( ha . PROBE_CONNECTIVITY , ATTR_NOT_SPECIFIED ) ) :
details [ ha . PROBE_CONNECTIVITY ] = ( cfg . CONF . ha . connectivity_probing_enabled_by_default )
if not is_attr_set ( details . get ( ha . PROBE_TARGET , ATTR_NOT_SPECIFIED ) ) :
details [ ha . PROBE_TARGET ] = cfg . CONF . ha . default_probe_target
if not is_attr_set ( details . get ( ha . PROBE_INTERVAL , ATTR_NOT_SPECIFIED ) ) :
details [ ha . PROBE_INTERVAL ] = cfg . CONF . ha . default_ping_interval
return res
|
def run ( version , quiet , no_fetch , push , ** kwargs ) : # pragma : no cover
"""A nicer ` git pull ` ."""
|
if version :
if NO_DISTRIBUTE :
print ( colored ( 'Please install \'git-up\' via pip in order to ' 'get version information.' , 'yellow' ) )
else :
GitUp ( sparse = True ) . version_info ( )
return
if quiet :
sys . stdout = StringIO ( )
try :
gitup = GitUp ( )
if push is not None :
gitup . settings [ 'push.auto' ] = push
# if arguments [ ' - - no - fetch ' ] or arguments [ ' - - no - f ' ] :
if no_fetch :
gitup . should_fetch = False
except GitError :
sys . exit ( 1 )
# Error in constructor
else :
gitup . run ( )
|
def _send_status_0x01_request ( self ) :
"""Sent status request to device ."""
|
status_command = StandardSend ( self . _address , COMMAND_LIGHT_STATUS_REQUEST_0X19_0X01 )
self . _send_method ( status_command , self . _status_message_0x01_received )
|
def _parse_txtinfo ( self , data ) :
"""Converts the python list returned by self . _ txtinfo _ to _ python ( )
to a NetworkX Graph object , which is then returned ."""
|
graph = self . _init_graph ( )
for link in data :
graph . add_edge ( link [ 'source' ] , link [ 'target' ] , weight = link [ 'cost' ] )
return graph
|
def binary ( self , name ) :
"""Returns the path to the command of the given name for this distribution .
For example : : :
> > > d = Distribution ( )
> > > jar = d . binary ( ' jar ' )
> > > jar
' / usr / bin / jar '
If this distribution has no valid command of the given name raises Distribution . Error .
If this distribution is a JDK checks both ` bin ` and ` jre / bin ` for the binary ."""
|
if not isinstance ( name , str ) :
raise ValueError ( 'name must be a binary name, given {} of type {}' . format ( name , type ( name ) ) )
self . validate ( )
return self . _validated_executable ( name )
|
def send_stats ( self , start , environ , response_interception , exception = None ) :
"""Send the actual timing stats .
: param start : start time in seconds since the epoch as a floating point number
: type start : float
: param environ : wsgi environment
: type environ : dict
: param response _ interception : dictionary in form
{ ' status ' : ' < response status > ' , ' response _ headers ' : [ < response headers ] , ' exc _ info ' : < exc _ info > }
This is the interception of what was passed to start _ response handler .
: type response _ interception : dict
: param exception : optional exception happened during the iteration of the response
: type exception : Exception"""
|
# It could happen that start _ response wasn ' t called or it failed , so we might have an empty interception
if response_interception : # Create the timer object and send the data to statsd .
key_name = self . get_key_name ( environ , response_interception , exception = exception )
timer = self . statsd_client . timer ( key_name )
timer . _start_time = start
timer . stop ( )
|
def get_qword_from_offset ( self , offset ) :
"""Return the quad - word value at the given file offset . ( little endian )"""
|
if offset + 8 > len ( self . __data__ ) :
return None
return self . get_qword_from_data ( self . __data__ [ offset : offset + 8 ] , 0 )
|
def _parse_migrations ( self ) :
"""Build a : class : ` Migration ` instance ."""
|
migration = self . parsed [ 'migration' ]
options = self . _parse_options ( migration )
versions = self . _parse_versions ( migration , options )
return Migration ( versions , options )
|
def render_markdown ( post ) :
"""Renders the post as Markdown using the template specified in : attr : ` markdown _ template _ path ` ."""
|
from engineer . conf import settings
# A hack to guarantee the YAML output is in a sensible order .
# The order , assuming all metadata should be written , should be :
# title
# status
# timestamp
# link
# via
# via - link
# slug
# tags
# updated
# template
# content - template
# url
d = [ ( 'status' , post . status . name ) , ( 'link' , post . link ) , ( 'via' , post . via ) , ( 'via-link' , post . via_link ) , ( 'tags' , post . tags ) , ( 'updated' , post . updated_local . strftime ( settings . TIME_FORMAT ) if post . updated is not None else None ) , ( 'template' , post . template if post . template != 'theme/post_detail.html' else None ) , ( 'content-template' , post . content_template if post . content_template != 'theme/_content_default.html' else None ) , ]
# The complete set of metadata that should be written is the union of the FINALIZE _ METADATA . config setting and
# the set of metadata that was in the file originally .
finalization_config = FinalizationPlugin . get_settings ( ) [ 'config' ]
metadata_to_finalize = set ( [ m for m , s in finalization_config . iteritems ( ) if post . status in s ] )
metadata_to_finalize . update ( post . metadata_original )
if 'title' in metadata_to_finalize : # insert at the top of the list
d . insert ( 0 , ( 'title' , post . title ) )
if 'slug' in metadata_to_finalize : # insert right before tags
d . insert ( d . index ( ( 'tags' , post . tags ) ) , ( 'slug' , post . slug ) )
if 'timestamp' in metadata_to_finalize : # insert right after status
d . insert ( d . index ( ( 'status' , post . status . name ) ) , ( 'timestamp' , post . timestamp_local . strftime ( settings . TIME_FORMAT ) ) )
if 'url' in metadata_to_finalize : # insert at end of list
d . append ( ( 'url' , post . url ) )
metadata = ''
for k , v in d :
if v is not None and len ( v ) > 0 :
metadata += yaml . safe_dump ( dict ( [ ( k , v ) ] ) , default_flow_style = False )
# handle custom metadata
if len ( post . custom_properties ) :
metadata += '\n'
metadata += yaml . safe_dump ( dict ( post . custom_properties ) , default_flow_style = False )
return settings . JINJA_ENV . get_template ( post . markdown_template_path ) . render ( metadata = metadata , content = post . content_finalized , post = post )
|
def min_edit_distance_align ( # TODO Wrangle the typing errors in this function .
# TODO This could work on generic sequences but for now it relies on
# empty strings .
# source : Sequence [ str ] , target : Sequence [ str ] ,
# ins _ cost : Callable [ . . . , int ] = lambda _ x : 1,
# del _ cost : Callable [ . . . , int ] = lambda _ x : 1,
# sub _ cost : Callable [ . . . , int ] = lambda x , y : 0 if x = = y else 1
# ) - > List [ Tuple [ str , str ] ] :
source , target , ins_cost = lambda _x : 1 , del_cost = lambda _x : 1 , sub_cost = lambda x , y : 0 if x == y else 1 ) :
"""Finds a minimum cost alignment between two strings .
Uses the Levenshtein weighting as a default , but offers keyword arguments
to supply functions to measure the costs for editing with different
characters . Note that the alignment may not be unique .
Args :
ins _ cost : A function describing the cost of inserting a given char
del _ cost : A function describing the cost of deleting a given char
sub _ cost : A function describing the cost of substituting one char for
Returns :
A sequence of tuples representing character level alignments between
the source and target strings ."""
|
# Initialize an m + 1 by n + 1 array to hold the distances , and an equal sized
# array to store the backpointers . Note that the strings start from index
# 1 , with index 0 being used to denote the empty string .
n = len ( target )
m = len ( source )
dist = [ [ 0 ] * ( n + 1 ) for _ in range ( m + 1 ) ]
bptrs = [ [ [ ] ] * ( n + 1 ) for _ in range ( m + 1 ) ]
# type : List [ List [ List ] ]
# Adjust the initialization of the first column and row of the matrices to
# their appropriate values .
for i in range ( 1 , m + 1 ) :
dist [ i ] [ 0 ] = i
bptrs [ i ] [ 0 ] = ( i - 1 , 0 )
for j in range ( 1 , n + 1 ) :
dist [ 0 ] [ j ] = j
bptrs [ 0 ] [ j ] = ( 0 , j - 1 )
# Do the dynamic programming to fill in the matrix with the edit distances .
for j in range ( 1 , n + 1 ) :
for i in range ( 1 , m + 1 ) :
options = [ ( dist [ i - 1 ] [ j ] + ins_cost ( target [ j - 1 ] ) , ( i - 1 , j ) ) , ( dist [ i - 1 ] [ j - 1 ] + sub_cost ( source [ i - 1 ] , target [ j - 1 ] ) , ( i - 1 , j - 1 ) ) , ( dist [ i ] [ j - 1 ] + del_cost ( source [ i - 1 ] ) , ( i , j - 1 ) ) ]
( minimum , pointer ) = sorted ( options ) [ 0 ]
dist [ i ] [ j ] = minimum
bptrs [ i ] [ j ] = pointer
# Put the backtrace in a list , and reverse it to get a forward trace .
bt = [ ( m , n ) ]
cell = bptrs [ m ] [ n ]
while True :
if not cell : # If it ' s an empty list or tuple , as will be the case at the start
# of the trace
break
bt . append ( cell )
if bptrs [ cell [ 0 ] ] [ cell [ 1 ] ] :
cell = bptrs [ cell [ 0 ] ] [ cell [ 1 ] ]
else :
break
trace = list ( reversed ( bt ) )
# Construct an alignment between source and target using the trace .
alignment = [ ]
for i in range ( 1 , len ( trace ) ) :
current = trace [ i ]
prev = trace [ i - 1 ]
# If the cell is diagonal from the previous one , it ' s a substitution or
# there wasn ' t a change .
if current [ 0 ] - prev [ 0 ] == 1 and current [ 1 ] - prev [ 1 ] == 1 :
alignment . append ( ( source [ current [ 0 ] - 1 ] , target [ current [ 1 ] - 1 ] ) )
# Otherwise if it moves only on the source side , it ' s a deletion
elif current [ 0 ] - prev [ 0 ] == 1 :
alignment . append ( ( source [ current [ 0 ] - 1 ] , "" ) )
# Otherwise if it moves only on the target side , it ' s an insertion
elif current [ 1 ] - prev [ 1 ] == 1 :
alignment . append ( ( "" , target [ current [ 1 ] - 1 ] ) )
return alignment
|
def write_data ( self , buf ) :
"""Send data to the device .
: param buf : the data to send .
: type buf : list ( int )
: return : success status .
: rtype : bool"""
|
data = '' . join ( map ( chr , buf ) )
size = len ( data )
if hidapi . hid_write ( self . device , ctypes . c_char_p ( data ) , size ) != size :
raise IOError ( 'pywws.device_ctypes_hidapi.USBDevice.write_data failed' )
return True
|
def make_id ( self ) :
"""Create a new URL id that is unique to the parent container"""
|
if self . url_id is None : # Set id only if empty
self . url_id = select ( [ func . coalesce ( func . max ( self . __class__ . url_id + 1 ) , 1 ) ] , self . __class__ . parent == self . parent )
|
def hide_arp_holder_arp_entry_interfacetype_Port_channel_Port_channel ( self , ** kwargs ) :
"""Auto Generated Code"""
|
config = ET . Element ( "config" )
hide_arp_holder = ET . SubElement ( config , "hide-arp-holder" , xmlns = "urn:brocade.com:mgmt:brocade-arp" )
arp_entry = ET . SubElement ( hide_arp_holder , "arp-entry" )
arp_ip_address_key = ET . SubElement ( arp_entry , "arp-ip-address" )
arp_ip_address_key . text = kwargs . pop ( 'arp_ip_address' )
interfacetype = ET . SubElement ( arp_entry , "interfacetype" )
Port_channel = ET . SubElement ( interfacetype , "Port-channel" )
Port_channel = ET . SubElement ( Port_channel , "Port-channel" )
Port_channel . text = kwargs . pop ( 'Port_channel' )
callback = kwargs . pop ( 'callback' , self . _callback )
return callback ( config )
|
def generate_airflow_spec ( name , pipeline_spec ) :
"""Gets the airflow python spec for the Pipeline object ."""
|
task_definitions = ''
up_steam_statements = ''
parameters = pipeline_spec . get ( 'parameters' )
for ( task_id , task_details ) in sorted ( pipeline_spec [ 'tasks' ] . items ( ) ) :
task_def = PipelineGenerator . _get_operator_definition ( task_id , task_details , parameters )
task_definitions = task_definitions + task_def
dependency_def = PipelineGenerator . _get_dependency_definition ( task_id , task_details . get ( 'up_stream' , [ ] ) )
up_steam_statements = up_steam_statements + dependency_def
schedule_config = pipeline_spec . get ( 'schedule' , { } )
default_args = PipelineGenerator . _get_default_args ( schedule_config , pipeline_spec . get ( 'emails' , { } ) )
dag_definition = PipelineGenerator . _get_dag_definition ( name , schedule_config . get ( 'interval' , '@once' ) , schedule_config . get ( 'catchup' , False ) )
return PipelineGenerator . _imports + default_args + dag_definition + task_definitions + up_steam_statements
|
def progressbar ( iterable = None , length = None , label = None , show_eta = True , show_percent = None , show_pos = False , item_show_func = None , fill_char = '#' , empty_char = '-' , bar_template = '%(label)s [%(bar)s] %(info)s' , info_sep = ' ' , width = 36 , file = None , color = None ) :
"""This function creates an iterable context manager that can be used
to iterate over something while showing a progress bar . It will
either iterate over the ` iterable ` or ` length ` items ( that are counted
up ) . While iteration happens , this function will print a rendered
progress bar to the given ` file ` ( defaults to stdout ) and will attempt
to calculate remaining time and more . By default , this progress bar
will not be rendered if the file is not a terminal .
The context manager creates the progress bar . When the context
manager is entered the progress bar is already displayed . With every
iteration over the progress bar , the iterable passed to the bar is
advanced and the bar is updated . When the context manager exits ,
a newline is printed and the progress bar is finalized on screen .
No printing must happen or the progress bar will be unintentionally
destroyed .
Example usage : :
with progressbar ( items ) as bar :
for item in bar :
do _ something _ with ( item )
Alternatively , if no iterable is specified , one can manually update the
progress bar through the ` update ( ) ` method instead of directly
iterating over the progress bar . The update method accepts the number
of steps to increment the bar with : :
with progressbar ( length = chunks . total _ bytes ) as bar :
for chunk in chunks :
process _ chunk ( chunk )
bar . update ( chunks . bytes )
. . versionadded : : 2.0
. . versionadded : : 4.0
Added the ` color ` parameter . Added a ` update ` method to the
progressbar object .
: param iterable : an iterable to iterate over . If not provided the length
is required .
: param length : the number of items to iterate over . By default the
progressbar will attempt to ask the iterator about its
length , which might or might not work . If an iterable is
also provided this parameter can be used to override the
length . If an iterable is not provided the progress bar
will iterate over a range of that length .
: param label : the label to show next to the progress bar .
: param show _ eta : enables or disables the estimated time display . This is
automatically disabled if the length cannot be
determined .
: param show _ percent : enables or disables the percentage display . The
default is ` True ` if the iterable has a length or
` False ` if not .
: param show _ pos : enables or disables the absolute position display . The
default is ` False ` .
: param item _ show _ func : a function called with the current item which
can return a string to show the current item
next to the progress bar . Note that the current
item can be ` None ` !
: param fill _ char : the character to use to show the filled part of the
progress bar .
: param empty _ char : the character to use to show the non - filled part of
the progress bar .
: param bar _ template : the format string to use as template for the bar .
The parameters in it are ` ` label ` ` for the label ,
` ` bar ` ` for the progress bar and ` ` info ` ` for the
info section .
: param info _ sep : the separator between multiple info items ( eta etc . )
: param width : the width of the progress bar in characters , 0 means full
terminal width
: param file : the file to write to . If this is not a terminal then
only the label is printed .
: param color : controls if the terminal supports ANSI colors or not . The
default is autodetection . This is only needed if ANSI
codes are included anywhere in the progress bar output
which is not the case by default ."""
|
from . _termui_impl import ProgressBar
color = resolve_color_default ( color )
return ProgressBar ( iterable = iterable , length = length , show_eta = show_eta , show_percent = show_percent , show_pos = show_pos , item_show_func = item_show_func , fill_char = fill_char , empty_char = empty_char , bar_template = bar_template , info_sep = info_sep , file = file , label = label , width = width , color = color )
|
def cleanup_unattached_disks ( kwargs = None , conn = None , call = None ) :
'''. . versionadded : : 2015.8.0
Cleans up all disks associated with the account , which are not attached .
* * * CAUTION * * * This is a destructive function with no undo button , and no
" Are you sure ? " confirmation !
CLI Examples :
. . code - block : : bash
salt - cloud - f cleanup _ unattached _ disks my - azure name = my _ disk
salt - cloud - f cleanup _ unattached _ disks my - azure name = my _ disk delete _ vhd = True'''
|
if call != 'function' :
raise SaltCloudSystemExit ( 'The delete_disk function must be called with -f or --function.' )
if kwargs is None :
kwargs = { }
disks = list_disks ( kwargs = kwargs , conn = conn , call = 'function' )
for disk in disks :
if disks [ disk ] [ 'attached_to' ] is None :
del_kwargs = { 'name' : disks [ disk ] [ 'name' ] , 'delete_vhd' : kwargs . get ( 'delete_vhd' , False ) }
log . info ( 'Deleting disk %s, deleting VHD: %s' , del_kwargs [ 'name' ] , del_kwargs [ 'delete_vhd' ] )
data = delete_disk ( kwargs = del_kwargs , call = 'function' )
return True
|
def request ( self , method , url , name = None , ** kwargs ) :
"""Constructs and sends a : py : class : ` requests . Request ` .
Returns : py : class : ` requests . Response ` object .
: param method :
method for the new : class : ` Request ` object .
: param url :
URL for the new : class : ` Request ` object .
: param name : ( optional )
Placeholder , make compatible with Locust ' s HttpSession
: param params : ( optional )
Dictionary or bytes to be sent in the query string for the : class : ` Request ` .
: param data : ( optional )
Dictionary or bytes to send in the body of the : class : ` Request ` .
: param headers : ( optional )
Dictionary of HTTP Headers to send with the : class : ` Request ` .
: param cookies : ( optional )
Dict or CookieJar object to send with the : class : ` Request ` .
: param files : ( optional )
Dictionary of ` ` ' filename ' : file - like - objects ` ` for multipart encoding upload .
: param auth : ( optional )
Auth tuple or callable to enable Basic / Digest / Custom HTTP Auth .
: param timeout : ( optional )
How long to wait for the server to send data before giving up , as a float , or a ( ` connect timeout , read timeout < user / advanced . html # timeouts > ` _ ) tuple .
: type timeout : float or tuple
: param allow _ redirects : ( optional )
Set to True by default .
: type allow _ redirects : bool
: param proxies : ( optional )
Dictionary mapping protocol to the URL of the proxy .
: param stream : ( optional )
whether to immediately download the response content . Defaults to ` ` False ` ` .
: param verify : ( optional )
if ` ` True ` ` , the SSL cert will be verified . A CA _ BUNDLE path can also be provided .
: param cert : ( optional )
if String , path to ssl client cert file ( . pem ) . If Tuple , ( ' cert ' , ' key ' ) pair ."""
|
self . init_meta_data ( )
# record test name
self . meta_data [ "name" ] = name
# record original request info
self . meta_data [ "data" ] [ 0 ] [ "request" ] [ "method" ] = method
self . meta_data [ "data" ] [ 0 ] [ "request" ] [ "url" ] = url
kwargs . setdefault ( "timeout" , 120 )
self . meta_data [ "data" ] [ 0 ] [ "request" ] . update ( kwargs )
start_timestamp = time . time ( )
response = self . _send_request_safe_mode ( method , url , ** kwargs )
response_time_ms = round ( ( time . time ( ) - start_timestamp ) * 1000 , 2 )
# get the length of the content , but if the argument stream is set to True , we take
# the size from the content - length header , in order to not trigger fetching of the body
if kwargs . get ( "stream" , False ) :
content_size = int ( dict ( response . headers ) . get ( "content-length" ) or 0 )
else :
content_size = len ( response . content or "" )
# record the consumed time
self . meta_data [ "stat" ] = { "response_time_ms" : response_time_ms , "elapsed_ms" : response . elapsed . microseconds / 1000.0 , "content_size" : content_size }
# record request and response histories , include 30X redirection
response_list = response . history + [ response ]
self . meta_data [ "data" ] = [ self . get_req_resp_record ( resp_obj ) for resp_obj in response_list ]
try :
response . raise_for_status ( )
except RequestException as e :
logger . log_error ( u"{exception}" . format ( exception = str ( e ) ) )
else :
logger . log_info ( """status_code: {}, response_time(ms): {} ms, response_length: {} bytes\n""" . format ( response . status_code , response_time_ms , content_size ) )
return response
|
def get_by_scheme ( path , lookup , default ) :
"""Helper function used by get * ForPath ( ) ."""
|
parsed = urlparse ( path )
class_name = lookup . get ( parsed . scheme , default )
if class_name is None :
raise NotImplementedError ( "No implementation for scheme " + parsed . scheme )
return class_name
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.