signature stringlengths 29 44.1k | implementation stringlengths 0 85.2k |
|---|---|
async def get_constraints ( self ) :
"""Return the machine constraints for this model .
: returns : A ` ` dict ` ` of constraints .""" | constraints = { }
client_facade = client . ClientFacade . from_connection ( self . connection ( ) )
result = await client_facade . GetModelConstraints ( )
# GetModelConstraints returns GetConstraintsResults which has a
# ' constraints ' attribute . If no constraints have been set
# GetConstraintsResults . constraints is None . Otherwise
# GetConstraintsResults . constraints has an attribute for each possible
# constraint , each of these in turn will be None if they have not been
# set .
if result . constraints :
constraint_types = [ a for a in dir ( result . constraints ) if a in Value . _toSchema . keys ( ) ]
for constraint in constraint_types :
value = getattr ( result . constraints , constraint )
if value is not None :
constraints [ constraint ] = getattr ( result . constraints , constraint )
return constraints |
def dictfetchone ( cursor : Cursor ) -> Optional [ Dict [ str , Any ] ] :
"""Return the next row from a cursor as an : class : ` OrderedDict ` , or ` ` None ` ` .""" | columns = get_fieldnames_from_cursor ( cursor )
row = cursor . fetchone ( )
if not row :
return None
return OrderedDict ( zip ( columns , row ) ) |
def get_default_bios_settings ( self , only_allowed_settings = True ) :
"""Get default BIOS settings .
: param : only _ allowed _ settings : True when only allowed BIOS settings
are to be returned . If False , All the BIOS settings supported
by iLO are returned .
: return : a dictionary of default BIOS settings ( factory settings ) .
Depending on the ' only _ allowed _ settings ' , either only the
allowed settings are returned or all the supported settings
are returned .
: raises : IloError , on an error from iLO .
: raises : IloCommandNotSupportedError , if the command is not supported
on the server .""" | headers_bios , bios_uri , bios_settings = self . _check_bios_resource ( )
# Get the BaseConfig resource .
try :
base_config_uri = bios_settings [ 'links' ] [ 'BaseConfigs' ] [ 'href' ]
except KeyError :
msg = ( "BaseConfigs resource not found. Couldn't apply the BIOS " "Settings." )
raise exception . IloCommandNotSupportedError ( msg )
status , headers , config = self . _rest_get ( base_config_uri )
if status != 200 :
msg = self . _get_extended_error ( config )
raise exception . IloError ( msg )
for cfg in config [ 'BaseConfigs' ] :
default_settings = cfg . get ( 'default' )
if default_settings :
break
else :
msg = ( "Default BIOS Settings not found in 'BaseConfigs' " "resource." )
raise exception . IloCommandNotSupportedError ( msg )
if only_allowed_settings :
return utils . apply_bios_properties_filter ( default_settings , constants . SUPPORTED_BIOS_PROPERTIES )
return default_settings |
def convert_raw_data_to_universes ( raw_data ) -> tuple :
"""converts the raw data to a readable universes tuple . The raw _ data is scanned from index 0 and has to have
16 - bit numbers with high byte first . The data is converted from the start to the beginning !
: param raw _ data : the raw data to convert
: return : tuple full with 16 - bit numbers""" | if len ( raw_data ) % 2 != 0 :
raise TypeError ( 'The given data has not a length that is a multiple of 2!' )
rtrnList = [ ]
for i in range ( 0 , len ( raw_data ) , 2 ) :
rtrnList . append ( two_bytes_to_int ( raw_data [ i ] , raw_data [ i + 1 ] ) )
return tuple ( rtrnList ) |
def createAppendElement ( self , namespaceURI , localName , prefix = None ) :
'''Create a new element ( namespaceURI , name ) , append it
to current node , and return the newly created node .
Keyword arguments :
namespaceURI - - namespace of element to create
localName - - local name of new element
prefix - - if namespaceURI is not defined , declare prefix . defaults
to ' ns1 ' if left unspecified .''' | declare = False
qualifiedName = localName
if namespaceURI :
try :
prefix = self . getPrefix ( namespaceURI )
except :
declare = True
prefix = prefix or self . _getUniquePrefix ( )
if prefix :
qualifiedName = '%s:%s' % ( prefix , localName )
node = self . createElementNS ( namespaceURI , qualifiedName )
if declare :
node . _setAttributeNS ( XMLNS . BASE , 'xmlns:%s' % prefix , namespaceURI )
self . _appendChild ( node = node . _getNode ( ) )
return node |
def get_dev_details ( ip_address ) :
"""Takes string input of IP address to issue RESTUL call to HP IMC
: param ip _ address : string object of dotted decimal notation of IPv4 address
: return : dictionary of device details
> > > get _ dev _ details ( ' 10.101.0.1 ' )
{ ' symbolLevel ' : ' 2 ' , ' typeName ' : ' Cisco 2811 ' , ' location ' : ' changed this too ' , ' status ' : ' 1 ' , ' sysName ' : ' Cisco2811 . haw . int ' , ' id ' : ' 30 ' , ' symbolType ' : ' 3 ' , ' symbolId ' : ' 1032 ' , ' sysDescription ' : ' ' , ' symbolName ' : ' Cisco2811 . haw . int ' , ' mask ' : ' 255.255.255.0 ' , ' label ' : ' Cisco2811 . haw . int ' , ' symbolDesc ' : ' ' , ' sysOid ' : ' 1.3.6.1.4.1.9.1.576 ' , ' contact ' : ' changed this too ' , ' statusDesc ' : ' Normal ' , ' parentId ' : ' 1 ' , ' categoryId ' : ' 0 ' , ' topoIconName ' : ' iconroute ' , ' mac ' : ' 00:1b : d4:47:1e : 68 ' , ' devCategoryImgSrc ' : ' router ' , ' link ' : { ' @ rel ' : ' self ' , ' @ href ' : ' http : / / 10.101.0.202:8080 / imcrs / plat / res / device / 30 ' , ' @ op ' : ' GET ' } , ' ip ' : ' 10.101.0.1 ' }
> > > get _ dev _ details ( ' 8.8.8.8 ' )
Device not found
' Device not found '""" | # checks to see if the imc credentials are already available
if auth is None or url is None :
set_imc_creds ( )
global r
get_dev_details_url = "/imcrs/plat/res/device?resPrivilegeFilter=false&ip=" + str ( ip_address ) + "&start=0&size=1000&orderBy=id&desc=false&total=false"
f_url = url + get_dev_details_url
payload = None
# creates the URL using the payload variable as the contents
r = requests . get ( f_url , auth = auth , headers = headers )
# r . status _ code
if r . status_code == 200 :
dev_details = ( json . loads ( r . text ) )
if len ( dev_details ) == 0 :
print ( "Device not found" )
return "Device not found"
elif type ( dev_details [ 'device' ] ) == list :
for i in dev_details [ 'device' ] :
if i [ 'ip' ] == ip_address :
dev_details = i
return dev_details
elif type ( dev_details [ 'device' ] ) == dict :
return dev_details [ 'device' ]
else :
print ( "dev_details: An Error has occured" ) |
def _get_free_vpcids_on_switches ( switch_ip_list ) :
'''Get intersect list of free vpcids in list of switches .''' | session = bc . get_reader_session ( )
prev_view = aliased ( nexus_models_v2 . NexusVPCAlloc )
query = session . query ( prev_view . vpc_id )
prev_swip = switch_ip_list [ 0 ]
for ip in switch_ip_list [ 1 : ] :
cur_view = aliased ( nexus_models_v2 . NexusVPCAlloc )
cur_swip = ip
query = query . join ( cur_view , sa . and_ ( prev_view . switch_ip == prev_swip , prev_view . active == False , # noqa
cur_view . switch_ip == cur_swip , cur_view . active == False , # noqa
prev_view . vpc_id == cur_view . vpc_id ) )
prev_view = cur_view
prev_swip = cur_swip
unique_vpcids = query . all ( )
shuffle ( unique_vpcids )
return unique_vpcids |
def create ( self ) :
"""Create the required directory structure and admin metadata .""" | self . _storage_broker . create_structure ( )
self . _storage_broker . put_admin_metadata ( self . _admin_metadata ) |
def run ( ) :
"""Use preconfigured settings to optimize files .""" | # Setup Multiprocessing
# manager = multiprocessing . Manager ( )
Settings . pool = multiprocessing . Pool ( Settings . jobs )
# Optimize Files
record_dirs , bytes_in , bytes_out , nag_about_gifs , errors = _walk_all_files ( )
# Shut down multiprocessing
Settings . pool . close ( )
Settings . pool . join ( )
# Write timestamps
for filename in record_dirs :
timestamp . record_timestamp ( filename )
# Finish by reporting totals
stats . report_totals ( bytes_in , bytes_out , nag_about_gifs , errors ) |
def lists ( self , column , key = None ) :
"""Get a list with the values of a given column
: param column : The column to get the values for
: type column : str
: param key : The key
: type key : str
: return : The list of values
: rtype : list or dict""" | results = self . _query . lists ( column , key )
if self . _model . has_get_mutator ( column ) :
if isinstance ( results , dict ) :
for key , value in results . items ( ) :
fill = { column : value }
results [ key ] = self . _model . new_from_builder ( fill ) . column
else :
for i , value in enumerate ( results ) :
fill = { column : value }
results [ i ] = self . _model . new_from_builder ( fill ) . column
return results |
def complete_scopes ( cls , scope_infos ) :
"""Expand a set of scopes to include all enclosing scopes .
E . g . , if the set contains ` foo . bar . baz ` , ensure that it also contains ` foo . bar ` and ` foo ` .
Also adds any deprecated scopes .""" | ret = { GlobalOptionsRegistrar . get_scope_info ( ) }
original_scopes = dict ( )
for si in scope_infos :
ret . add ( si )
if si . scope in original_scopes :
raise cls . DuplicateScopeError ( 'Scope `{}` claimed by {}, was also claimed by {}.' . format ( si . scope , si , original_scopes [ si . scope ] ) )
original_scopes [ si . scope ] = si
if si . deprecated_scope :
ret . add ( ScopeInfo ( si . deprecated_scope , si . category , si . optionable_cls ) )
original_scopes [ si . deprecated_scope ] = si
# TODO : Once scope name validation is enforced ( so there can be no dots in scope name
# components ) we can replace this line with ` for si in scope _ infos : ` , because it will
# not be possible for a deprecated _ scope to introduce any new intermediate scopes .
for si in copy . copy ( ret ) :
for scope in all_enclosing_scopes ( si . scope , allow_global = False ) :
if scope not in original_scopes :
ret . add ( ScopeInfo ( scope , ScopeInfo . INTERMEDIATE ) )
return ret |
def get_limits ( self , coord = 'data' ) :
"""Get the bounding box of the viewer extents .
Returns
limits : tuple
Bounding box in coordinates of type ` coord ` in the form of
` ` ( ll _ pt , ur _ pt ) ` ` .""" | limits = self . t_ [ 'limits' ]
if limits is None : # No user defined limits . If there is an image loaded
# use its dimensions as the limits
image = self . get_image ( )
if image is not None :
wd , ht = image . get_size ( )
limits = ( ( self . data_off , self . data_off ) , ( float ( wd - 1 + self . data_off ) , float ( ht - 1 + self . data_off ) ) )
else : # Calculate limits based on plotted points , if any
canvas = self . get_canvas ( )
pts = canvas . get_points ( )
if len ( pts ) > 0 :
limits = trcalc . get_bounds ( pts )
else : # No limits found , go to default
limits = ( ( 0.0 , 0.0 ) , ( 0.0 , 0.0 ) )
# convert to desired coordinates
crdmap = self . get_coordmap ( coord )
limits = crdmap . data_to ( limits )
return limits |
def makeicons ( source ) :
"""Create all the neccessary icons from source image""" | im = Image . open ( source )
for name , ( _ , w , h , func ) in icon_sizes . iteritems ( ) :
print ( 'Making icon %s...' % name )
tn = func ( im , ( w , h ) )
bg = Image . new ( 'RGBA' , ( w , h ) , ( 255 , 255 , 255 ) )
x = ( w / 2 ) - ( tn . size [ 0 ] / 2 )
y = ( h / 2 ) - ( tn . size [ 1 ] / 2 )
bg . paste ( tn , ( x , y ) )
bg . save ( path . join ( env . dir , name ) ) |
def get_stp_brief_info_output_has_more ( self , ** kwargs ) :
"""Auto Generated Code""" | config = ET . Element ( "config" )
get_stp_brief_info = ET . Element ( "get_stp_brief_info" )
config = get_stp_brief_info
output = ET . SubElement ( get_stp_brief_info , "output" )
has_more = ET . SubElement ( output , "has-more" )
has_more . text = kwargs . pop ( 'has_more' )
callback = kwargs . pop ( 'callback' , self . _callback )
return callback ( config ) |
def add_or_update ( self , section , key , value ) :
"""Update the key or , if no previous value existed , add it .
Returns :
int : Number of updated lines .""" | updates = self . update ( section , key , value )
if updates == 0 :
self . add ( section , key , value )
return updates |
def main ( params = None ) :
"""Main function to launch phonefy .
The function is created in this way so as to let other applications make
use of the full configuration capabilities of the application . The
parameters received are used as parsed by this modules ` getParser ( ) ` .
Args :
params : A list with the parameters as grabbed by the terminal . It is
None when this is called by an entry _ point . If it is called by osrf
the data is already parsed .
Results :
Returns a list with i3visio entities .""" | if params == None :
parser = getParser ( )
args = parser . parse_args ( params )
else :
args = params
results = [ ]
# Recovering the logger
# Calling the logger when being imported
logSet . setupLogger ( loggerName = "osrframework.entify" , verbosity = args . verbose , logFolder = args . logfolder )
# From now on , the logger can be recovered like this :
logger = logging . getLogger ( "osrframework.entify" )
logger . info ( "Selecting the regular expressions to be analysed..." )
if not args . quiet :
print ( general . title ( banner . text ) )
sayingHello = """
Entify | Copyright (C) Yaiza Rubio & Félix Brezo (i3visio) 2014-2018
This program comes with ABSOLUTELY NO WARRANTY. This is free software, and you
are welcome to redistribute it under certain conditions. For additional info,
visit <{}>.
""" . format ( general . LICENSE_URL )
print ( general . info ( sayingHello ) )
if args . license :
general . showLicense ( )
else :
listRegexp = [ ]
if args . regexp :
listRegexp = regexp_selection . getRegexpsByName ( args . regexp )
elif args . new_regexp :
for i , r in enumerate ( args . new_regexp ) :
listRegexp . append ( RegexpObject ( name = "NewRegexp" + str ( i ) , reg_exp = args . new_regexp ) )
if not args . web :
results = scanFolderForRegexp ( folder = args . input_folder , listRegexp = listRegexp , recursive = args . recursive , verbosity = args . verbose , logFolder = args . logfolder , quiet = args . quiet )
else :
results = scanResource ( uri = args . web , listRegexp = listRegexp , verbosity = args . verbose , logFolder = args . logfolder )
logger . info ( "Logging the results:\n" + json . dumps ( results , indent = 2 , sort_keys = True ) )
# Trying to store the information recovered
if args . output_folder != None : # Verifying an output folder was selected
logger . debug ( "Preparing the output folder..." )
if not os . path . exists ( args . output_folder ) :
logger . warning ( "The output folder \'" + args . output_folder + "\' does not exist. The system will try to create it." )
os . makedirs ( args . output_folder )
# Grabbing the results
fileHeader = os . path . join ( args . output_folder , args . file_header )
for ext in args . extension : # Generating output files
general . exportUsufy ( results , ext , fileHeader )
# Showing the information gathered if requested
if not args . quiet :
now = dt . datetime . now ( )
print ( "\n{}\tResults obtained:\n" . format ( str ( now ) ) )
print ( general . success ( general . usufyToTextExport ( results ) ) )
now = dt . datetime . now ( )
print ( str ( now ) + "\tYou can find all the information collected in the following files:" )
for ext in args . extension : # Showing the output files
print ( "\t-" + general . emphasis ( fileHeader + "." + ext ) )
# Urging users to place an issue on Github . . .
print ( banner . footer )
if params :
return results |
def persistent_object_context_changed ( self ) :
"""Override from PersistentObject .""" | super ( ) . persistent_object_context_changed ( )
def register ( ) :
if self . __source is not None and self . __target is not None :
assert not self . __binding
self . __binding = Binding . PropertyBinding ( self . __source , self . source_property )
self . __binding . target_setter = self . __set_target_from_source
# while reading , the data item in the display data channel will not be connected ;
# we still set its value here . when the data item becomes valid , it will update .
self . __binding . update_target_direct ( self . __binding . get_target_value ( ) )
def source_registered ( source ) :
self . __source = source
register ( )
def target_registered ( target ) :
self . __target = target
def property_changed ( target , property_name ) :
if property_name == self . target_property :
self . __set_source_from_target ( getattr ( target , property_name ) )
assert self . __target_property_changed_listener is None
self . __target_property_changed_listener = target . property_changed_event . listen ( functools . partial ( property_changed , target ) )
register ( )
def unregistered ( item = None ) :
if not item or item == self . __source :
self . __source = None
if not item or item == self . __target :
self . __target = None
if self . __binding :
self . __binding . close ( )
self . __binding = None
if self . __target_property_changed_listener :
self . __target_property_changed_listener . close ( )
self . __target_property_changed_listener = None
def change_registration ( registered_object , unregistered_object ) :
if registered_object and registered_object . uuid == self . source_uuid :
source_registered ( registered_object )
if registered_object and registered_object . uuid == self . target_uuid :
target_registered ( registered_object )
if unregistered_object and unregistered_object in ( self . _source , self . _target ) :
unregistered ( unregistered_object )
if self . persistent_object_context :
self . __registration_listener = self . persistent_object_context . registration_event . listen ( change_registration )
source = self . persistent_object_context . get_registered_object ( self . source_uuid )
target = self . persistent_object_context . get_registered_object ( self . target_uuid )
if source :
source_registered ( source )
if target :
target_registered ( target )
else :
unregistered ( ) |
def _get_site_dummy_variables ( self , vs30 ) :
"""Returns the Eurocode 8 site class dummy variable""" | s_b = np . zeros_like ( vs30 )
s_c = np . zeros_like ( vs30 )
s_d = np . zeros_like ( vs30 )
s_b [ np . logical_and ( vs30 >= 360. , vs30 < 800. ) ] = 1.0
s_c [ np . logical_and ( vs30 >= 180. , vs30 < 360. ) ] = 1.0
s_d [ vs30 < 180 ] = 1.0
return s_b , s_c , s_d |
def validator_for ( schema , default = _LATEST_VERSION ) :
"""Retrieve the validator class appropriate for validating the given schema .
Uses the : validator : ` $ schema ` property that should be present in the given
schema to look up the appropriate validator class .
Arguments :
schema ( collections . Mapping or bool ) :
the schema to look at
default :
the default to return if the appropriate validator class cannot be
determined .
If unprovided , the default is to return
the latest supported draft .""" | if schema is True or schema is False or u"$schema" not in schema :
return default
if schema [ u"$schema" ] not in meta_schemas :
warn ( ( "The metaschema specified by $schema was not found. " "Using the latest draft to validate, but this will raise " "an error in the future." ) , DeprecationWarning , stacklevel = 2 , )
return meta_schemas . get ( schema [ u"$schema" ] , _LATEST_VERSION ) |
def time_to_str ( timestamp : int ) -> str :
"""Convert seconds past Epoch to human readable string .""" | datetimestamp = datetime . datetime . fromtimestamp ( timestamp )
return '{:04d}-{:02d}-{:02d}-{:02d}-{:02d}-{:02d}' . format ( datetimestamp . year , datetimestamp . month , datetimestamp . day , datetimestamp . hour , datetimestamp . minute , datetimestamp . second ) |
def delete ( self , path , data = None , headers = None , params = None ) :
"""Deletes resources at given paths .
: rtype : dict
: return : Empty dictionary to have consistent interface .
Some of Atlassian REST resources don ' t return any content .""" | self . request ( 'DELETE' , path = path , data = data , headers = headers , params = params ) |
def is_real_iterable ( raise_ex : bool = False , summary : bool = True , ** items : Any ) -> ValidationReturn :
"""Tests if the given items are iterables ( collections ) but no strings .
Per default this function yields whether ` ` True ` ` or ` ` False ` ` depending on the fact if all
items withstand the validation or not . Per default the validation / evaluation is
short - circuit and will return as soon an item evaluates to ` ` False ` ` .
When ` ` raise _ ex ` ` is set to ` ` True ` ` the function will raise a meaningful error message
after the first item evaluates to ` ` False ` ` ( short - circuit ) .
When ` ` summary ` ` is set to ` ` False ` ` a dictionary is returned containing the individual
evaluation result of each item ( non short - circuit ) .
Examples :
> > > l = [ ' i ' , ' am ' , ' an ' , ' iterable ' ]
> > > Validator . is _ real _ iterable ( l = l )
True
> > > d = { ' i ' : ' am ' , ' a ' : ' dict ' }
> > > Validator . is _ real _ iterable ( d = d )
True
> > > s = " i am a string "
> > > Validator . is _ real _ iterable ( s = s )
False
> > > Validator . is _ real _ iterable ( raise _ ex = True , s = s )
Traceback ( most recent call last ) :
ValueError : ' s ' is not an iterable
Args :
raise _ ex ( bool , optional ) : If set to ` ` True ` ` an exception is raised if at least one
item is validated to ` ` False ` ` ( works short - circuit and will abort the validation when
the first item is evaluated to ` ` False ` ` ) .
summary ( bool , optional ) : If set to ` ` False ` ` instead of returning just a single
` ` bool ` ` the validation will return a dictionary containing the individual evaluation
result of each item .
Returns :
( boolean or dictionary ) : ` ` True ` ` when the value was successfully validated ; ` ` False ` `
otherwise .
If ` ` summary ` ` is set to ` ` False ` ` a dictionary containing the individual evaluation
result of each item will be returned .
If ` ` raise _ ex ` ` is set to True , instead of returning False a meaningful error will be
raised .""" | return Validator . __test_all ( condition = lambda _ , val : hasattr ( val , '__iter__' ) and not isinstance ( val , str ) , formatter = lambda name , _ : "'{varname}' is not an iterable" . format ( varname = name ) , raise_ex = raise_ex , summary = summary , ** items ) |
def compute_steadystate ( self , nnc = 2 ) :
"""Computes the non - stochastic steady - state of the economy .
Parameters
nnc : array _ like ( float )
nnc is the location of the constant in the state vector x _ t""" | zx = np . eye ( self . A0 . shape [ 0 ] ) - self . A0
self . zz = nullspace ( zx )
self . zz /= self . zz [ nnc ]
self . css = self . Sc . dot ( self . zz )
self . sss = self . Ss . dot ( self . zz )
self . iss = self . Si . dot ( self . zz )
self . dss = self . Sd . dot ( self . zz )
self . bss = self . Sb . dot ( self . zz )
self . kss = self . Sk . dot ( self . zz )
self . hss = self . Sh . dot ( self . zz ) |
def _set_ra_dns_server ( self , v , load = False ) :
"""Setter method for ra _ dns _ server , mapped from YANG variable / routing _ system / interface / ve / ipv6 / ipv6 _ nd _ ra / ipv6 _ intf _ cmds / nd / ra _ dns _ server ( list )
If this variable is read - only ( config : false ) in the
source YANG file , then _ set _ ra _ dns _ server is considered as a private
method . Backends looking to populate this variable should
do so via calling thisObj . _ set _ ra _ dns _ server ( ) directly .""" | if hasattr ( v , "_utype" ) :
v = v . _utype ( v )
try :
t = YANGDynClass ( v , base = YANGListType ( "dns_server_prefix" , ra_dns_server . ra_dns_server , yang_name = "ra-dns-server" , rest_name = "ra-dns-server" , parent = self , is_container = 'list' , user_ordered = False , path_helper = self . _path_helper , yang_keys = 'dns-server-prefix' , extensions = { u'tailf-common' : { u'info' : u'Set DNS server option' , u'cli-suppress-list-no' : None , u'cli-suppress-mode' : None , u'cli-suppress-key-abbreviation' : None , u'callpoint' : u'IpV6NdRaDnsServerVlanIntf' } } ) , is_container = 'list' , yang_name = "ra-dns-server" , rest_name = "ra-dns-server" , parent = self , path_helper = self . _path_helper , extmethods = self . _extmethods , register_paths = True , extensions = { u'tailf-common' : { u'info' : u'Set DNS server option' , u'cli-suppress-list-no' : None , u'cli-suppress-mode' : None , u'cli-suppress-key-abbreviation' : None , u'callpoint' : u'IpV6NdRaDnsServerVlanIntf' } } , namespace = 'urn:brocade.com:mgmt:brocade-ipv6-nd-ra' , defining_module = 'brocade-ipv6-nd-ra' , yang_type = 'list' , is_config = True )
except ( TypeError , ValueError ) :
raise ValueError ( { 'error-string' : """ra_dns_server must be of a type compatible with list""" , 'defined-type' : "list" , 'generated-type' : """YANGDynClass(base=YANGListType("dns_server_prefix",ra_dns_server.ra_dns_server, yang_name="ra-dns-server", rest_name="ra-dns-server", parent=self, is_container='list', user_ordered=False, path_helper=self._path_helper, yang_keys='dns-server-prefix', extensions={u'tailf-common': {u'info': u'Set DNS server option', u'cli-suppress-list-no': None, u'cli-suppress-mode': None, u'cli-suppress-key-abbreviation': None, u'callpoint': u'IpV6NdRaDnsServerVlanIntf'}}), is_container='list', yang_name="ra-dns-server", rest_name="ra-dns-server", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, extensions={u'tailf-common': {u'info': u'Set DNS server option', u'cli-suppress-list-no': None, u'cli-suppress-mode': None, u'cli-suppress-key-abbreviation': None, u'callpoint': u'IpV6NdRaDnsServerVlanIntf'}}, namespace='urn:brocade.com:mgmt:brocade-ipv6-nd-ra', defining_module='brocade-ipv6-nd-ra', yang_type='list', is_config=True)""" , } )
self . __ra_dns_server = t
if hasattr ( self , '_set' ) :
self . _set ( ) |
def stop ( self , container , ** kwargs ) :
"""Identical to : meth : ` dockermap . client . base . DockerClientWrapper . stop ` with additional logging .""" | self . push_log ( "Stopping container '{0}'." . format ( container ) )
super ( DockerFabricClient , self ) . stop ( container , ** kwargs ) |
def defer ( callable ) :
'''Defers execution of the callable to a thread .
For example :
> > > def foo ( ) :
. . . print ( ' bar ' )
> > > join = defer ( foo )
> > > join ( )''' | t = threading . Thread ( target = callable )
t . start ( )
return t . join |
def ClientFromConfig ( engine , config , database , logger = None , verbose = True ) :
"""Return new database client from valid [ couchdb ] or [ couchbase ]
config section .
engine < str > defines which engine to use , currently supports " couchdb " and " couchbase "
config < dict > [ couchdb ] or [ couchbase ] section from config
database < str > token to use with bucket _ % s or db _ % s config property""" | if type ( config ) == list :
config = dict ( config )
if engine == "couchbase" :
return Client ( engine = engine , host = config . get ( "host" ) , auth = config . get ( "bucket_%s" % database ) , logger = logger , verbose = verbose )
elif engine == "couchdb" :
if config . get ( "admin_user" ) and config . get ( "admin_password" ) :
auth = "%s:%s" % ( config . get ( "admin_user" ) , config . get ( "admin_password" ) )
elif config . get ( "user" ) and config . get ( "password" ) :
auth = "%s:%s" % ( config . get ( "user" ) , config . get ( "password" ) )
else :
auth = None
return Client ( engine = engine , host = config . get ( "host" ) , auth = auth , database = config . get ( "db_%s" % database ) , logger = logger , verbose = verbose )
elif engine == "dummydb" :
return Client ( engine = engine )
else :
raise InvalidEngineException ( engine ) |
def update ( self , reconfigure = False ) :
"""update the environment""" | try :
self . phase = PHASE . UPDATE
self . logger . info ( "Updating environment %s..." % self . namespace )
self . install_sandboxes ( )
self . instantiate_features ( )
# We don ' t grab inputs , only on install
# updates inputs are grabbed on demand
# self . grab _ inputs ( reconfigure = reconfigure )
if reconfigure :
self . grab_inputs ( reconfigure = True )
else :
self . _copy_source_to_target ( )
self . _specialize ( reconfigure = reconfigure )
for feature in self . features . run_order :
self . run_action ( feature , 'sync' )
self . inject_environment_config ( )
self . _finalize ( )
except Exception :
self . logger . debug ( "" , exc_info = sys . exc_info ( ) )
et , ei , tb = sys . exc_info ( )
reraise ( et , ei , tb ) |
def auto_disable ( enabled : Optional [ bool ] = True , fds : Optional [ Sequence [ IO ] ] = ( sys . stdout , sys . stderr ) ) -> None :
"""Automatically decide whether to disable color codes if stdout or
stderr are not ttys .
Arguments :
enabled : Whether to automatically disable color codes .
When set to True , the fds will be checked for ttys .
When set to False , enable ( ) is called .
fds : Open file descriptors to check for ttys .
If any non - ttys are found , colors will be disabled .
Objects must have a isatty ( ) method .""" | if enabled :
if not all ( getattr ( f , 'isatty' , lambda : False ) ( ) for f in fds ) :
disable ( )
else :
enable ( ) |
def qteBindKeyApplet ( self , keysequence , macroName : str , appletObj : QtmacsApplet ) :
"""Bind ` ` macroName ` ` to all widgets in ` ` appletObj ` ` .
This method does not affect the key bindings of other applets ,
or other instances of the same applet .
The ` ` keysequence ` ` can be specified either as a string ( eg
' < ctrl > + x < ctrl > + f ' ) , or a list of tuples containing the
constants from the ` ` QtCore . Qt ` ` name space
( eg . [ ( ControlModifier , Key _ X ) , ( ControlModifier , Key _ F ) ] ) , or
as a ` ` QtmacsKeysequence ` ` object .
| Args |
* ` ` keysequence ` ` ( * * str * * , * * list * * of * * tuples * * ,
* * QtmacsKeysequence * * ) :
key sequence to activate ` ` macroName ` ` for specified
` ` widgetSignature ` ` .
* ` ` macroName ` ` ( * * str * * ) : the macro to associated with
` ` keysequence ` ` .
* ` ` appletObj ` ` ( * * QtmacsApplet * * ) : only widgets in this
applet are affected .
| Returns |
* * * bool * * : whether or not at least one widget was
successfully bound .
| Raises |
* * * QtmacsArgumentError * * if at least one argument has an invalid type .
* * * QtmacsKeysequenceError * * if the provided ` ` keysequence ` `
could not be parsed .""" | # Convert the key sequence into a QtmacsKeysequence object , or
# raise a QtmacsKeysequenceError if the conversion is
# impossible .
keysequence = QtmacsKeysequence ( keysequence )
# Verify that Qtmacs knows a macro named ' macroName ' .
if not self . qteIsMacroRegistered ( macroName ) :
msg = ( 'Cannot bind key because the macro <b>{}</b> does' 'not exist.' . format ( macroName ) )
self . qteLogger . error ( msg , stack_info = True )
return False
# Bind the key also to the applet itself because it can
# receive keyboard events ( eg . when it is empty ) .
appletObj . _qteAdmin . keyMap . qteInsertKey ( keysequence , macroName )
# Update the key map of every widget inside the applet .
for wid in appletObj . _qteAdmin . widgetList :
self . qteBindKeyWidget ( keysequence , macroName , wid )
return True |
def set_default_identity_by_index ( self , index : int ) :
"""This interface is used to set default account by given an index value .
: param index : an int value that indicate the position of an account object in account list .""" | identities_len = len ( self . identities )
if index >= identities_len :
raise SDKException ( ErrorCode . param_error )
for i in range ( identities_len ) :
self . identities [ i ] . is_default = False
if i == index :
self . identities [ index ] . is_default = True |
def association_pivot ( self , association_resource ) :
"""Pivot point on association for this resource .
This method will return all * resources * ( group , indicators , task , victims , etc ) for this
resource that are associated with the provided resource .
* * Example Endpoints URI ' s * *
| METHOD | API Endpoint URI ' s |
| GET | / v2 / groups / { pivot resourceType } / { pivot uniqueId } / { resourceType } |
| GET | / v2 / groups / { pivot resourceType } / { pivot uniqueId } / { resourceType } / { uniqueId } |
| POST | / v2 / groups / { pivot resourceType } / { pivot uniqueId } / { resourceType } / { uniqueId } |
| GET | / v2 / indicators / { pivot resourceType } / { pivot uniqueId } / { resourceType } |
| GET | / v2 / indicators / { pivot resourceType } / { pivot uniqueId } / { resourceType } / { uniqueId } |
| POST | / v2 / indicator / { pivot resourceType } / { pivot uniqueId } / { resourceType } / { uniqueId } |
Args :
resource _ api _ branch ( string ) : The resource pivot api branch including resource id .""" | resource = self . copy ( )
resource . _request_uri = '{}/{}' . format ( association_resource . request_uri , resource . _request_uri )
return resource |
def recursive_asdict ( d ) :
"""Convert Suds object into serializable format .""" | out = { }
for k , v in asdict ( d ) . iteritems ( ) :
if hasattr ( v , '__keylist__' ) :
out [ k ] = recursive_asdict ( v )
elif isinstance ( v , list ) :
out [ k ] = [ ]
for item in v :
if hasattr ( item , '__keylist__' ) :
out [ k ] . append ( recursive_asdict ( item ) )
else :
out [ k ] . append ( item )
else :
out [ k ] = v
return out |
def build_dependencies ( self ) :
'''build _ dependencies
High - level api : Briefly compile all yang files and find out dependency
infomation of all modules .
Returns
None
Nothing returns .''' | cmd_list = [ 'pyang' , '--plugindir' , self . pyang_plugins ]
cmd_list += [ '-p' , self . dir_yang ]
cmd_list += [ '-f' , 'pyimport' ]
cmd_list += [ self . dir_yang + '/*.yang' ]
logger . info ( 'Building dependencies: {}' . format ( ' ' . join ( cmd_list ) ) )
p = Popen ( ' ' . join ( cmd_list ) , shell = True , stdout = PIPE , stderr = PIPE )
stdout , stderr = p . communicate ( )
logger . info ( 'pyang return code is {}' . format ( p . returncode ) )
logger . debug ( stderr . decode ( ) )
parser = etree . XMLParser ( remove_blank_text = True )
self . dependencies = etree . XML ( stdout . decode ( ) , parser ) |
def option_group_exists ( name , tags = None , region = None , key = None , keyid = None , profile = None ) :
'''Check to see if an RDS option group exists .
CLI example : :
salt myminion boto _ rds . option _ group _ exists myoptiongr region = us - east - 1''' | conn = _get_conn ( region = region , key = key , keyid = keyid , profile = profile )
try :
rds = conn . describe_option_groups ( OptionGroupName = name )
return { 'exists' : bool ( rds ) }
except ClientError as e :
return { 'error' : __utils__ [ 'boto3.get_error' ] ( e ) } |
def set_number ( self , key , value ) :
"""set a key ' s value""" | storage = self . storage
if not isinstance ( value , int ) :
logger . error ( "set_number: Value must be an integer" )
return
try :
lock . acquire ( )
storage [ key ] = value
finally :
self . storage . _p_changed = True
lock . release ( )
return storage [ key ] |
def projects ( self ) :
"""* All child projects of this taskpaper object *
* * Usage : * *
Given a taskpaper document object ( ` doc ` ) , to get a list of the project objects found within the document use :
. . code - block : : python
docProjects = doc . projects
The same is true of project objects which may contain sub - projects :
. . code - block : : python
aProject = docProjects [ 0]
subProjects = aProject . projects""" | return self . _get_object ( regex = re . compile ( r'((?<=\n)|(?<=^))(?P<title>(?!\[Searches\]|- )\S.*?:(?!\S)) *(?P<tagString>( *?@[^(\s]+(\([^)]*\))?)+)?(?P<content>(\n(( |\t)+\S.*)|\n( |\t)*|\n)+)' , re . UNICODE ) , objectType = "project" , content = None ) |
def load ( self , fname ) :
"""loads a ASCII text file grid to self""" | # get height and width of grid from file
self . grid_width = 4
self . grid_height = 4
# re - read the file and load it
self . grid = [ [ 0 for dummy_l in range ( self . grid_width ) ] for dummy_l in range ( self . grid_height ) ]
with open ( fname , 'r' ) as f :
for row_num , row in enumerate ( f ) :
if row . strip ( '\n' ) == '' :
break
for col_num , col in enumerate ( row . strip ( '\n' ) ) :
self . set_tile ( row_num , col_num , col ) |
def _fromdata ( self , code , dtype , count , value , name = None ) :
"""Initialize instance from arguments .""" | self . code = int ( code )
self . name = name if name else str ( code )
self . dtype = TIFF_DATA_TYPES [ dtype ]
self . count = int ( count )
self . value = value |
def hex_color_to_rgba ( hex_color , normalize_to = 255 ) :
'''Convert a hex - formatted number ( i . e . , ` " # RGB [ A ] " ` or ` " # RRGGBB [ AA ] " ` ) to an
RGBA tuple ( i . e . , ` ( < r > , < g > , < b > , < a > ) ` ) .
Args :
hex _ color ( str ) : hex - formatted number ( e . g . , ` " # 2fc " ` , ` " # 3c2f8611 " ` )
normalize _ to ( int , float ) : Factor to normalize each channel by
Returns :
( tuple ) : RGBA tuple ( i . e . , ` ( < r > , < g > , < b > , < a > ) ` ) , where range of
each channel in tuple is ` [ 0 , normalize _ to ] ` .''' | color_pattern_one_digit = ( r'#(?P<R>[\da-fA-F])(?P<G>[\da-fA-F])' r'(?P<B>[\da-fA-F])(?P<A>[\da-fA-F])?' )
color_pattern_two_digit = ( r'#(?P<R>[\da-fA-F]{2})(?P<G>[\da-fA-F]{2})' r'(?P<B>[\da-fA-F]{2})(?P<A>[\da-fA-F]{2})?' )
# First try to match ` # rrggbb [ aa ] ` .
match = re . match ( color_pattern_two_digit , hex_color )
if match :
channels = match . groupdict ( )
channel_scale = 255
else : # Try to match ` # rgb [ a ] ` .
match = re . match ( color_pattern_one_digit , hex_color )
if match :
channels = match . groupdict ( )
channel_scale = 15
else :
raise ValueError ( 'Color string must be in format #RGB[A] or ' '#RRGGBB[AA] (i.e., alpha channel is optional)' )
scale = normalize_to / channel_scale
return tuple ( type ( normalize_to ) ( int ( channels [ k ] , 16 ) * scale ) if channels [ k ] is not None else None for k in 'RGBA' ) |
def random_split ( self , fraction , seed = None ) :
"""Randomly split the rows of an SArray into two SArrays . The first SArray
contains * M * rows , sampled uniformly ( without replacement ) from the
original SArray . * M * is approximately the fraction times the original
number of rows . The second SArray contains the remaining rows of the
original SArray .
Parameters
fraction : float
Approximate fraction of the rows to fetch for the first returned
SArray . Must be between 0 and 1.
seed : int , optional
Seed for the random number generator used to split .
Returns
out : tuple [ SArray ]
Two new SArrays .
Examples
Suppose we have an SArray with 1,024 rows and we want to randomly split
it into training and testing datasets with about a 90 % / 10 % split .
> > > sa = turicreate . SArray ( range ( 1024 ) )
> > > sa _ train , sa _ test = sa . random _ split ( . 9 , seed = 5)
> > > print ( len ( sa _ train ) , len ( sa _ test ) )
922 102""" | from . sframe import SFrame
temporary_sf = SFrame ( )
temporary_sf [ 'X1' ] = self
( train , test ) = temporary_sf . random_split ( fraction , seed )
return ( train [ 'X1' ] , test [ 'X1' ] ) |
def configure_rmq_ssl_on ( self , sentry_units , deployment , port = None , max_wait = 60 ) :
"""Turn ssl charm config option on , with optional non - default
ssl port specification . Confirm that it is enabled on every
unit .
: param sentry _ units : list of sentry units
: param deployment : amulet deployment object pointer
: param port : amqp port , use defaults if None
: param max _ wait : maximum time to wait in seconds to confirm
: returns : None if successful . Raise on error .""" | self . log . debug ( 'Setting ssl charm config option: on' )
# Enable RMQ SSL
config = { 'ssl' : 'on' }
if port :
config [ 'ssl_port' ] = port
deployment . d . configure ( 'rabbitmq-server' , config )
# Wait for unit status
self . rmq_wait_for_cluster ( deployment )
# Confirm
tries = 0
ret = self . validate_rmq_ssl_enabled_units ( sentry_units , port = port )
while ret and tries < ( max_wait / 4 ) :
time . sleep ( 4 )
self . log . debug ( 'Attempt {}: {}' . format ( tries , ret ) )
ret = self . validate_rmq_ssl_enabled_units ( sentry_units , port = port )
tries += 1
if ret :
amulet . raise_status ( amulet . FAIL , ret ) |
def update_pypsa_timeseries ( network , loads_to_update = None , generators_to_update = None , storages_to_update = None , timesteps = None ) :
"""Updates load , generator , storage and bus time series in pypsa network .
See functions : func : ` update _ pypsa _ load _ timeseries ` ,
: func : ` update _ pypsa _ generator _ timeseries ` ,
: func : ` update _ pypsa _ storage _ timeseries ` , and
: func : ` update _ pypsa _ bus _ timeseries ` for more information .
Parameters
network : Network
The eDisGo grid topology model overall container
loads _ to _ update : : obj : ` list ` , optional
List with all loads ( of type : class : ` ~ . grid . components . Load ` ) that need
to be updated . If None all loads are updated depending on mode . See
: meth : ` ~ . tools . pypsa _ io . to _ pypsa ` for more information .
generators _ to _ update : : obj : ` list ` , optional
List with all generators ( of type : class : ` ~ . grid . components . Generator ` )
that need to be updated . If None all generators are updated depending
on mode . See : meth : ` ~ . tools . pypsa _ io . to _ pypsa ` for more information .
storages _ to _ update : : obj : ` list ` , optional
List with all storages ( of type : class : ` ~ . grid . components . Storage ` )
that need to be updated . If None all storages are updated depending on
mode . See : meth : ` ~ . tools . pypsa _ io . to _ pypsa ` for more information .
timesteps : : pandas : ` pandas . DatetimeIndex < datetimeindex > ` or : pandas : ` pandas . Timestamp < timestamp > `
Timesteps specifies which time steps of the load time series to export
to pypsa representation and use in power flow analysis .
If None all time steps currently existing in pypsa representation are
updated . If not None current time steps are overwritten by given
time steps . Default : None .""" | update_pypsa_load_timeseries ( network , loads_to_update = loads_to_update , timesteps = timesteps )
update_pypsa_generator_timeseries ( network , generators_to_update = generators_to_update , timesteps = timesteps )
update_pypsa_storage_timeseries ( network , storages_to_update = storages_to_update , timesteps = timesteps )
update_pypsa_bus_timeseries ( network , timesteps = timesteps )
# update pypsa snapshots
if timesteps is None :
timesteps = network . pypsa . buses_t . v_mag_pu_set . index
network . pypsa . set_snapshots ( timesteps ) |
def postprocess_alignment ( data ) :
"""Perform post - processing steps required on full BAM files .
Prepares list of callable genome regions allowing subsequent parallelization .""" | data = cwlutils . normalize_missing ( utils . to_single_data ( data ) )
data = cwlutils . unpack_tarballs ( data , data )
bam_file = data . get ( "align_bam" ) or data . get ( "work_bam" )
ref_file = dd . get_ref_file ( data )
if vmulti . bam_needs_processing ( data ) and bam_file and bam_file . endswith ( ".bam" ) :
out_dir = utils . safe_makedir ( os . path . join ( dd . get_work_dir ( data ) , "align" , dd . get_sample_name ( data ) ) )
bam_file_ready = os . path . join ( out_dir , os . path . basename ( bam_file ) )
if not utils . file_exists ( bam_file_ready ) :
utils . symlink_plus ( bam_file , bam_file_ready )
bam . index ( bam_file_ready , data [ "config" ] )
covinfo = callable . sample_callable_bed ( bam_file_ready , ref_file , data )
callable_region_bed , nblock_bed = callable . block_regions ( covinfo . raw_callable , bam_file_ready , ref_file , data )
data [ "regions" ] = { "nblock" : nblock_bed , "callable" : covinfo . raw_callable , "sample_callable" : covinfo . callable , "mapped_stats" : readstats . get_cache_file ( data ) }
data [ "depth" ] = covinfo . depth_files
data = coverage . assign_interval ( data )
data = samtools . run_and_save ( data )
data = recalibrate . prep_recal ( data )
data = recalibrate . apply_recal ( data )
elif dd . get_variant_regions ( data ) :
callable_region_bed , nblock_bed = callable . block_regions ( dd . get_variant_regions ( data ) , bam_file , ref_file , data )
data [ "regions" ] = { "nblock" : nblock_bed , "callable" : dd . get_variant_regions ( data ) , "sample_callable" : dd . get_variant_regions ( data ) }
return [ [ data ] ] |
def add_cmd_output ( self , cmds , suggest_filename = None , root_symlink = None , timeout = 300 , stderr = True , chroot = True , runat = None , env = None , binary = False , sizelimit = None , pred = None ) :
"""Run a program or a list of programs and collect the output""" | if isinstance ( cmds , six . string_types ) :
cmds = [ cmds ]
if len ( cmds ) > 1 and ( suggest_filename or root_symlink ) :
self . _log_warn ( "ambiguous filename or symlink for command list" )
if sizelimit is None :
sizelimit = self . get_option ( "log_size" )
for cmd in cmds :
self . _add_cmd_output ( cmd , suggest_filename = suggest_filename , root_symlink = root_symlink , timeout = timeout , stderr = stderr , chroot = chroot , runat = runat , env = env , binary = binary , sizelimit = sizelimit , pred = pred ) |
def generate_rsa_key_pair ( ) :
"""Create public and private ssh - keys .""" | key = rsa . generate_private_key ( backend = default_backend ( ) , public_exponent = 65537 , key_size = 2048 )
public_key = key . public_key ( ) . public_bytes ( serialization . Encoding . OpenSSH , serialization . PublicFormat . OpenSSH ) . decode ( "utf-8" )
pem = key . private_bytes ( encoding = serialization . Encoding . PEM , format = serialization . PrivateFormat . TraditionalOpenSSL , encryption_algorithm = serialization . NoEncryption ( ) ) . decode ( "utf-8" )
return public_key , pem |
def modify ( self , service_name , json , ** kwargs ) :
"""Modify an AppNexus object""" | return self . _send ( requests . put , service_name , json , ** kwargs ) |
def raise_204 ( instance ) :
"""Abort the current request with a 204 ( No Content ) response code . Clears
out the body of the response .
: param instance : Resource instance ( used to access the response )
: type instance : : class : ` webob . resource . Resource `
: raises : : class : ` webob . exceptions . ResponseException ` of status 204""" | instance . response . status = 204
instance . response . body = ''
instance . response . body_raw = None
raise ResponseException ( instance . response ) |
def _get_app_auth_headers ( self ) :
"""Set the correct auth headers to authenticate against GitHub .""" | now = datetime . now ( timezone . utc )
expiry = now + timedelta ( minutes = 5 )
data = { "iat" : now , "exp" : expiry , "iss" : self . app_id }
app_token = jwt . encode ( data , self . app_key , algorithm = "RS256" ) . decode ( "utf-8" )
headers = { "Accept" : PREVIEW_JSON_ACCEPT , "Authorization" : "Bearer {}" . format ( app_token ) , }
return headers |
def fix_facets ( self ) :
"""This function convert date _ histogram facets to datetime""" | facets = self . facets
for key in list ( facets . keys ( ) ) :
_type = facets [ key ] . get ( "_type" , "unknown" )
if _type == "date_histogram" :
for entry in facets [ key ] . get ( "entries" , [ ] ) :
for k , v in list ( entry . items ( ) ) :
if k in [ "count" , "max" , "min" , "total_count" , "mean" , "total" ] :
continue
if not isinstance ( entry [ k ] , datetime ) :
entry [ k ] = datetime . utcfromtimestamp ( v / 1e3 ) |
def finddefaultreference ( self ) :
"""Find the default reference for text offsets :
The parent of the current textcontent ' s parent ( counting only Structure Elements and Subtoken Annotation Elements )
Note : This returns not a TextContent element , but its parent . Whether the textcontent actually exists is checked later / elsewhere""" | depth = 0
e = self
while True :
if e . parent :
e = e . parent
# pylint : disable = redefined - variable - type
else : # no parent , breaking
return False
if isinstance ( e , AbstractStructureElement ) or isinstance ( e , AbstractSubtokenAnnotation ) :
depth += 1
if depth == 2 :
return e
return False |
def save_figure ( self , event = None , transparent = False , dpi = 600 ) :
"""save figure image to file""" | file_choices = "PNG (*.png)|*.png|SVG (*.svg)|*.svg|PDF (*.pdf)|*.pdf"
try :
ofile = self . conf . title . strip ( )
except :
ofile = 'Image'
if len ( ofile ) > 64 :
ofile = ofile [ : 63 ] . strip ( )
if len ( ofile ) < 1 :
ofile = 'plot'
for c in ' :";|/\\' :
ofile = ofile . replace ( c , '_' )
ofile = ofile + '.png'
orig_dir = os . path . abspath ( os . curdir )
dlg = wx . FileDialog ( self , message = 'Save Plot Figure as...' , defaultDir = os . getcwd ( ) , defaultFile = ofile , wildcard = file_choices , style = wx . FD_SAVE | wx . FD_CHANGE_DIR )
if dlg . ShowModal ( ) == wx . ID_OK :
path = dlg . GetPath ( )
if hasattr ( self , 'fig' ) :
self . fig . savefig ( path , transparent = transparent , dpi = dpi )
else :
self . canvas . print_figure ( path , transparent = transparent , dpi = dpi )
if ( path . find ( self . launch_dir ) == 0 ) :
path = path [ len ( self . launch_dir ) + 1 : ]
self . write_message ( 'Saved plot to %s' % path )
os . chdir ( orig_dir ) |
def _process_kill ( self ) :
"""Kill the fake executable process if it ' s still running .""" | if self . _process . poll ( ) is None : # pragma : no cover
self . _process . kill ( )
self . _process . wait ( timeout = 5 ) |
def sanitize_label ( self , label ) :
"""Converts a label taken from user input into a well - formed label .
@ type label : str
@ param label : Label taken from user input .
@ rtype : str
@ return : Sanitized label .""" | ( module , function , offset ) = self . split_label_fuzzy ( label )
label = self . parse_label ( module , function , offset )
return label |
def get_service_by_name ( self , service_name ) :
"""Get a specific service by name .""" | content = self . _fetch ( "/service/search?name=%s" % service_name )
return FastlyService ( self , content ) |
def FindUnspentCoinsByAsset ( self , asset_id , from_addr = None , use_standard = False , watch_only_val = 0 ) :
"""Finds unspent coin objects in the wallet limited to those of a certain asset type .
Args :
asset _ id ( UInt256 ) : a bytearray ( len 32 ) representing an asset on the blockchain .
from _ addr ( UInt160 ) : a bytearray ( len 20 ) representing an address .
use _ standard ( bool ) : whether or not to only include standard contracts ( i . e not a smart contract addr ) .
watch _ only _ val ( int ) : a flag ( 0 or 64 ) indicating whether or not to find coins that are in ' watch only ' addresses .
Returns :
list : a list of ` ` neo . Wallet . Coin ` ` in the wallet that are not spent""" | coins = self . FindUnspentCoins ( from_addr = from_addr , use_standard = use_standard , watch_only_val = watch_only_val )
return [ coin for coin in coins if coin . Output . AssetId == asset_id ] |
def init_huang ( X , n_clusters , dissim , random_state ) :
"""Initialize centroids according to method by Huang [ 1997 ] .""" | n_attrs = X . shape [ 1 ]
centroids = np . empty ( ( n_clusters , n_attrs ) , dtype = 'object' )
# determine frequencies of attributes
for iattr in range ( n_attrs ) :
freq = defaultdict ( int )
for curattr in X [ : , iattr ] :
freq [ curattr ] += 1
# Sample centroids using the probabilities of attributes .
# ( I assume that ' s what ' s meant in the Huang [ 1998 ] paper ; it works ,
# at least )
# Note : sampling using population in static list with as many choices
# as frequency counts . Since the counts are small integers ,
# memory consumption is low .
choices = [ chc for chc , wght in freq . items ( ) for _ in range ( wght ) ]
# So that we are consistent between Python versions ,
# each with different dict ordering .
choices = sorted ( choices )
centroids [ : , iattr ] = random_state . choice ( choices , n_clusters )
# The previously chosen centroids could result in empty clusters ,
# so set centroid to closest point in X .
for ik in range ( n_clusters ) :
ndx = np . argsort ( dissim ( X , centroids [ ik ] ) )
# We want the centroid to be unique , if possible .
while np . all ( X [ ndx [ 0 ] ] == centroids , axis = 1 ) . any ( ) and ndx . shape [ 0 ] > 1 :
ndx = np . delete ( ndx , 0 )
centroids [ ik ] = X [ ndx [ 0 ] ]
return centroids |
def validate ( self , raw_data , ** kwargs ) :
"""Convert the raw _ data to a float .""" | try :
converted_data = float ( raw_data )
super ( FloatField , self ) . validate ( converted_data , ** kwargs )
return raw_data
except ValueError :
raise ValidationException ( self . messages [ 'invalid' ] , repr ( raw_data ) ) |
def GetAnnotatedMethods ( cls ) :
"""Returns a dictionary of annotated router methods .""" | result = { }
# We want methods with the highest call - order to be processed last ,
# so that their annotations have precedence .
for i_cls in reversed ( inspect . getmro ( cls ) ) :
for name in compatibility . ListAttrs ( i_cls ) :
cls_method = getattr ( i_cls , name )
if not callable ( cls_method ) :
continue
if not hasattr ( cls_method , "__http_methods__" ) :
continue
result [ name ] = RouterMethodMetadata ( name = name , doc = cls_method . __doc__ , args_type = getattr ( cls_method , "__args_type__" , None ) , result_type = getattr ( cls_method , "__result_type__" , None ) , category = getattr ( cls_method , "__category__" , None ) , http_methods = getattr ( cls_method , "__http_methods__" , set ( ) ) , no_audit_log_required = getattr ( cls_method , "__no_audit_log_required__" , False ) )
return result |
def get_canonical_host ( self ) :
"""Return the canonical host as for the Host HTTP header specification .""" | host = self . host . lower ( )
if self . port is not None :
host = "%s:%s" % ( host , self . port )
return host |
def automodsumm_to_autosummary_lines ( fn , app ) :
"""Generates lines from a file with an " automodsumm " entry suitable for
feeding into " autosummary " .
Searches the provided file for ` automodsumm ` directives and returns
a list of lines specifying the ` autosummary ` commands for the modules
requested . This does * not * return the whole file contents - just an
autosummary section in place of any : automodsumm : entries . Note that
any options given for ` automodsumm ` are also included in the
generated ` autosummary ` section .
Parameters
fn : str
The name of the file to search for ` automodsumm ` entries .
app : sphinx . application . Application
The sphinx Application object
Return
lines : list of str
Lines for all ` automodsumm ` entries with the entries replaced by
` autosummary ` and the module ' s members added .""" | fullfn = os . path . join ( app . builder . env . srcdir , fn )
with open ( fullfn ) as fr :
if 'astropy_helpers.sphinx.ext.automodapi' in app . _extensions :
from astropy_helpers . sphinx . ext . automodapi import automodapi_replace
# Must do the automodapi on the source to get the automodsumm
# that might be in there
docname = os . path . splitext ( fn ) [ 0 ]
filestr = automodapi_replace ( fr . read ( ) , app , True , docname , False )
else :
filestr = fr . read ( )
spl = _automodsummrex . split ( filestr )
# 0th entry is the stuff before the first automodsumm line
indent1s = spl [ 1 : : 5 ]
mods = spl [ 2 : : 5 ]
opssecs = spl [ 3 : : 5 ]
indent2s = spl [ 4 : : 5 ]
remainders = spl [ 5 : : 5 ]
# only grab automodsumm sections and convert them to autosummary with the
# entries for all the public objects
newlines = [ ]
# loop over all automodsumms in this document
for i , ( i1 , i2 , modnm , ops , rem ) in enumerate ( zip ( indent1s , indent2s , mods , opssecs , remainders ) ) :
allindent = i1 + ( '' if i2 is None else i2 )
# filter out functions - only and classes - only options if present
oplines = ops . split ( '\n' )
toskip = [ ]
allowedpkgnms = [ ]
funcsonly = clssonly = False
for i , ln in reversed ( list ( enumerate ( oplines ) ) ) :
if ':functions-only:' in ln :
funcsonly = True
del oplines [ i ]
if ':classes-only:' in ln :
clssonly = True
del oplines [ i ]
if ':skip:' in ln :
toskip . extend ( _str_list_converter ( ln . replace ( ':skip:' , '' ) ) )
del oplines [ i ]
if ':allowed-package-names:' in ln :
allowedpkgnms . extend ( _str_list_converter ( ln . replace ( ':allowed-package-names:' , '' ) ) )
del oplines [ i ]
if funcsonly and clssonly :
msg = ( 'Defined both functions-only and classes-only options. ' 'Skipping this directive.' )
lnnum = sum ( [ spl [ j ] . count ( '\n' ) for j in range ( i * 5 + 1 ) ] )
app . warn ( '[automodsumm]' + msg , ( fn , lnnum ) )
continue
# Use the currentmodule directive so we can just put the local names
# in the autosummary table . Note that this doesn ' t always seem to
# actually " take " in Sphinx ' s eyes , so in ` Automodsumm . run ` , we have to
# force it internally , as well .
newlines . extend ( [ i1 + '.. currentmodule:: ' + modnm , '' , '.. autosummary::' ] )
newlines . extend ( oplines )
ols = True if len ( allowedpkgnms ) == 0 else allowedpkgnms
for nm , fqn , obj in zip ( * find_mod_objs ( modnm , onlylocals = ols ) ) :
if nm in toskip :
continue
if funcsonly and not inspect . isroutine ( obj ) :
continue
if clssonly and not inspect . isclass ( obj ) :
continue
newlines . append ( allindent + nm )
# add one newline at the end of the autosummary block
newlines . append ( '' )
return newlines |
def etag ( self ) :
"""Get the ETag option of the message .
: rtype : list
: return : the ETag values or [ ] if not specified by the request""" | value = [ ]
for option in self . options :
if option . number == defines . OptionRegistry . ETAG . number :
value . append ( option . value )
return value |
def proximal_step ( self , grad = None ) :
"""Compute proximal update ( gradient descent + regularization ) .""" | if grad is None :
grad = self . eval_grad ( )
V = self . Y - ( 1. / self . L ) * grad
self . X = self . eval_proxop ( V )
return grad |
def register_setting ( name = None , label = None , editable = False , description = None , default = None , choices = None , append = False , translatable = False ) :
"""Registers a setting that can be edited via the admin . This mostly
equates to storing the given args as a dict in the ` ` registry ` `
dict by name .""" | if name is None :
raise TypeError ( "yacms.conf.register_setting requires the " "'name' keyword argument." )
if editable and default is None :
raise TypeError ( "yacms.conf.register_setting requires the " "'default' keyword argument when 'editable' is True." )
# append is True when called from an app ( typically external )
# after the setting has already been registered , with the
# intention of appending to its default value .
if append and name in registry :
registry [ name ] [ "default" ] += default
return
# If an editable setting has a value defined in the
# project ' s settings . py module , it can ' t be editable , since
# these lead to a lot of confusion once its value gets
# defined in the db .
if hasattr ( django_settings , name ) :
editable = False
if label is None :
label = name . replace ( "_" , " " ) . title ( )
# Python 2/3 compatibility . isinstance ( ) is overridden by future
# on Python 2 to behave as Python 3 in conjunction with either
# Python 2 ' s native types or the future . builtins types .
if isinstance ( default , bool ) : # Prevent bools treated as ints
setting_type = bool
elif isinstance ( default , int ) : # An int or long or subclass on Py2
setting_type = int
elif isinstance ( default , ( str , Promise ) ) : # A unicode or subclass on Py2
setting_type = str
elif isinstance ( default , bytes ) : # A byte - string or subclass on Py2
setting_type = bytes
else :
setting_type = type ( default )
registry [ name ] = { "name" : name , "label" : label , "editable" : editable , "description" : description , "default" : default , "choices" : choices , "type" : setting_type , "translatable" : translatable } |
def _set_view ( self , v , load = False ) :
"""Setter method for view , mapped from YANG variable / snmp _ server / view ( list )
If this variable is read - only ( config : false ) in the
source YANG file , then _ set _ view is considered as a private
method . Backends looking to populate this variable should
do so via calling thisObj . _ set _ view ( ) directly .""" | if hasattr ( v , "_utype" ) :
v = v . _utype ( v )
try :
t = YANGDynClass ( v , base = YANGListType ( "viewname mibtree" , view . view , yang_name = "view" , rest_name = "view" , parent = self , is_container = 'list' , user_ordered = False , path_helper = self . _path_helper , yang_keys = 'viewname mibtree' , extensions = { u'tailf-common' : { u'info' : u'view Define an SNMPv2 MIB view' , u'cli-suppress-key-sort' : None , u'cli-suppress-mode' : None , u'sort-priority' : u'26' , u'cli-suppress-list-no' : None , u'cli-suppress-key-abbreviation' : None , u'cli-incomplete-command' : None , u'callpoint' : u'snmpview' } } ) , is_container = 'list' , yang_name = "view" , rest_name = "view" , parent = self , path_helper = self . _path_helper , extmethods = self . _extmethods , register_paths = True , extensions = { u'tailf-common' : { u'info' : u'view Define an SNMPv2 MIB view' , u'cli-suppress-key-sort' : None , u'cli-suppress-mode' : None , u'sort-priority' : u'26' , u'cli-suppress-list-no' : None , u'cli-suppress-key-abbreviation' : None , u'cli-incomplete-command' : None , u'callpoint' : u'snmpview' } } , namespace = 'urn:brocade.com:mgmt:brocade-snmp' , defining_module = 'brocade-snmp' , yang_type = 'list' , is_config = True )
except ( TypeError , ValueError ) :
raise ValueError ( { 'error-string' : """view must be of a type compatible with list""" , 'defined-type' : "list" , 'generated-type' : """YANGDynClass(base=YANGListType("viewname mibtree",view.view, yang_name="view", rest_name="view", parent=self, is_container='list', user_ordered=False, path_helper=self._path_helper, yang_keys='viewname mibtree', extensions={u'tailf-common': {u'info': u'view Define an SNMPv2 MIB view', u'cli-suppress-key-sort': None, u'cli-suppress-mode': None, u'sort-priority': u'26', u'cli-suppress-list-no': None, u'cli-suppress-key-abbreviation': None, u'cli-incomplete-command': None, u'callpoint': u'snmpview'}}), is_container='list', yang_name="view", rest_name="view", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, extensions={u'tailf-common': {u'info': u'view Define an SNMPv2 MIB view', u'cli-suppress-key-sort': None, u'cli-suppress-mode': None, u'sort-priority': u'26', u'cli-suppress-list-no': None, u'cli-suppress-key-abbreviation': None, u'cli-incomplete-command': None, u'callpoint': u'snmpview'}}, namespace='urn:brocade.com:mgmt:brocade-snmp', defining_module='brocade-snmp', yang_type='list', is_config=True)""" , } )
self . __view = t
if hasattr ( self , '_set' ) :
self . _set ( ) |
def add_star ( self , component = None , ** kwargs ) :
"""Shortcut to : meth : ` add _ component ` but with kind = ' star '""" | kwargs . setdefault ( 'component' , component )
return self . add_component ( 'star' , ** kwargs ) |
def fail ( self , err = 'MockupDB query failure' , * args , ** kwargs ) :
"""Reply to a query with the QueryFailure flag and an ' $ err ' key .
Returns True so it is suitable as an ` ~ MockupDB . autoresponds ` handler .""" | kwargs . setdefault ( 'flags' , 0 )
kwargs [ 'flags' ] |= REPLY_FLAGS [ 'QueryFailure' ]
kwargs [ '$err' ] = err
self . replies ( * args , ** kwargs )
return True |
def send_mail ( to_addr , subj_msg , body_msg , attach_path , serv_addr , serv_port , from_addr , passwd ) :
"""Send an e - mail message using smtplib and email standard python libraries .
IMPORTANT ! Password is stored as plain text ! Do NOT use with your personal
account !
Args :
to _ addr ( str ) : Recipient address .
subj _ msg ( str ) : Message subject .
body _ msg ( str ) : Message body .
serv _ addr ( str ) : Server ' s address . Default : < smtp . gmail . com > .
serv _ port ( int ) : Server ' s port . Default : < 587 > .
from _ addr ( str ) : Account address . Default : < headnode . notifiy @ gmail . com > .
passwd ( str ) : Account password .""" | msg = MIMEMultipart ( )
if attach_path is not None :
with open ( attach_path , "rb" ) as fin :
part = MIMEBase ( "application" , "octet-stream" )
part . set_payload ( fin . read ( ) )
encoders . encode_base64 ( part )
part . add_header ( "Content-Disposition" , "attachment; filename={0}" . format ( attach_path ) )
msg . attach ( part )
else :
pass
msg [ "From" ] = from_addr
msg [ "To" ] = to_addr
msg [ "Subject" ] = subj_msg
msg . attach ( MIMEText ( body_msg , "plain" ) )
server = smtplib . SMTP ( serv_addr , serv_port )
server . starttls ( )
server . login ( from_addr , passwd )
text_msg = msg . as_string ( )
server . sendmail ( from_addr , to_addr , text_msg )
server . quit |
def mask ( x , indices ) :
"""The same as x [ indices ] , but return an empty array if indices are empty ,
instead of returning all x elements ,
and handles sparse " vectors " .""" | indices_shape = ( [ len ( indices ) ] if isinstance ( indices , list ) else indices . shape )
if not indices_shape [ 0 ] :
return np . array ( [ ] )
elif is_sparse_vector ( x ) and len ( indices_shape ) == 1 :
return x [ 0 , indices ] . toarray ( ) [ 0 ]
else :
return x [ indices ] |
def strace_set_buffer_size ( self , size ) :
"""Sets the STRACE buffer size .
Args :
self ( JLink ) : the ` ` JLink ` ` instance .
Returns :
` ` None ` `
Raises :
JLinkException : on error .""" | size = ctypes . c_uint32 ( size )
res = self . _dll . JLINK_STRACE_Control ( enums . JLinkStraceCommand . SET_BUFFER_SIZE , size )
if res < 0 :
raise errors . JLinkException ( 'Failed to set the STRACE buffer size.' )
return None |
def _filter_ignored ( self , entries , selector = None ) :
"""Given an opaque entry list , filter any ignored entries .
: param entries : A list or generator that produces entries to filter .
: param selector : A function that computes a path for an entry relative to the root of the
ProjectTree , or None to use identity .""" | selector = selector or ( lambda x : x )
prefixed_entries = [ ( self . _append_slash_if_dir_path ( selector ( entry ) ) , entry ) for entry in entries ]
ignored_paths = set ( self . ignore . match_files ( path for path , _ in prefixed_entries ) )
return [ entry for path , entry in prefixed_entries if path not in ignored_paths ] |
def in1d_events ( ar1 , ar2 ) :
"""Does the same than np . in1d but uses the fact that ar1 and ar2 are sorted and the c + + library . Is therefore much much faster .""" | ar1 = np . ascontiguousarray ( ar1 )
# change memory alignement for c + + library
ar2 = np . ascontiguousarray ( ar2 )
# change memory alignement for c + + library
tmp = np . empty_like ( ar1 , dtype = np . uint8 )
# temporary result array filled by c + + library , bool type is not supported with cython / numpy
return analysis_functions . get_in1d_sorted ( ar1 , ar2 , tmp ) |
def next ( self ) -> mx . io . DataBatch :
"""Returns the next batch from the data iterator .""" | if not self . iter_next ( ) :
raise StopIteration
i , j = self . batch_indices [ self . curr_batch_index ]
self . curr_batch_index += 1
batch_size = self . bucket_batch_sizes [ i ] . batch_size
source = self . data . source [ i ] [ j : j + batch_size ]
target = self . data . target [ i ] [ j : j + batch_size ]
data = [ source , target ]
label = [ self . data . label [ i ] [ j : j + batch_size ] ]
provide_data = [ mx . io . DataDesc ( name = n , shape = x . shape , layout = C . BATCH_MAJOR ) for n , x in zip ( self . data_names , data ) ]
provide_label = [ mx . io . DataDesc ( name = n , shape = x . shape , layout = C . BATCH_MAJOR ) for n , x in zip ( self . label_names , label ) ]
# TODO : num pad examples is not set here if fillup policy would be padding
return mx . io . DataBatch ( data , label , pad = 0 , index = None , bucket_key = self . buckets [ i ] , provide_data = provide_data , provide_label = provide_label ) |
def parse ( cls , op ) :
"""Gets the enum for the op code
Args :
op : value of the op code ( will be casted to int )
Returns :
The enum that matches the op code""" | for event in cls :
if event . value == int ( op ) :
return event
return None |
def genome_info ( genome , info ) :
"""return genome info for choosing representative
if ggKbase table provided - choose rep based on SCGs and genome length
- priority for most SCGs - extra SCGs , then largest genome
otherwise , based on largest genome""" | try :
scg = info [ '#SCGs' ]
dups = info [ '#SCG duplicates' ]
length = info [ 'genome size (bp)' ]
return [ scg - dups , length , genome ]
except :
return [ False , False , info [ 'genome size (bp)' ] , genome ] |
def set_environment ( self , name , value ) :
"""Set environment ` ` $ tmux set - environment < name > < value > ` ` .
Parameters
name : str
the environment variable name . such as ' PATH ' .
option : str
environment value .""" | args = [ 'set-environment' ]
if self . _add_option :
args += [ self . _add_option ]
args += [ name , value ]
proc = self . cmd ( * args )
if proc . stderr :
if isinstance ( proc . stderr , list ) and len ( proc . stderr ) == int ( 1 ) :
proc . stderr = proc . stderr [ 0 ]
raise ValueError ( 'tmux set-environment stderr: %s' % proc . stderr ) |
def make_declarative_base ( self , model , metadata = None ) :
"""Creates the declarative base that all models will inherit from .
: param model : base model class ( or a tuple of base classes ) to pass
to : func : ` ~ sqlalchemy . ext . declarative . declarative _ base ` . Or a class
returned from ` ` declarative _ base ` ` , in which case a new base class
is not created .
: param metadata : : class : ` ~ sqlalchemy . MetaData ` instance to use , or
none to use SQLAlchemy ' s default .
. . versionchanged 2.3.0 : :
` ` model ` ` can be an existing declarative base in order to support
complex customization such as changing the metaclass .""" | if not isinstance ( model , DeclarativeMeta ) :
model = declarative_base ( cls = model , name = 'Model' , metadata = metadata , metaclass = DefaultMeta )
# if user passed in a declarative base and a metaclass for some reason ,
# make sure the base uses the metaclass
if metadata is not None and model . metadata is not metadata :
model . metadata = metadata
if not getattr ( model , 'query_class' , None ) :
model . query_class = self . Query
model . query = _QueryProperty ( self )
return model |
def run ( self , redirects = [ ] ) :
"""Runs the pipelines with the specified redirects and returns
a RunningPipeline instance .""" | if not isinstance ( redirects , redir . Redirects ) :
redirects = redir . Redirects ( self . _env . _redirects , * redirects )
with copy . copy_session ( ) as sess :
self = copy . deepcopy ( self )
processes = self . _run ( redirects , sess )
pipeline = RunningPipeline ( processes , self )
self . _env . last_pipeline = pipeline
return pipeline |
def unregisterChecker ( self , checker ) :
"""Remove a checker from the list of registered checkers .
@ param checker : the checker to remove""" | self . linter . _checkers [ checker . name ] . remove ( checker )
if checker in self . linter . _reports :
del self . linter . _reports [ checker ]
if checker in self . linter . options_providers :
self . linter . options_providers . remove ( checker ) |
def lexeme ( p ) :
"""From a parser ( or string ) , make a parser that consumes
whitespace on either side .""" | if isinstance ( p , str ) :
p = string ( p )
return regex ( r'\s*' ) >> p << regex ( r'\s*' ) |
def _print_result ( case , summary ) :
"""Show some statistics from the run""" | for case , case_data in summary . items ( ) :
for dof , data in case_data . items ( ) :
print ( " " + case + " " + dof )
print ( " -------------------" )
for header , val in data . items ( ) :
print ( " " + header + " : " + str ( val ) )
print ( "" ) |
def ostree_init ( self , release ) :
"""Initialize the OSTree for a release""" | out = release [ 'output_dir' ] . rstrip ( '/' )
base = os . path . dirname ( out )
if not os . path . isdir ( base ) :
self . log . info ( 'Creating %s' , base )
os . makedirs ( base , mode = 0755 )
if not os . path . isdir ( out ) :
self . mock_chroot ( release , release [ 'ostree_init' ] ) |
def fit ( self ) :
"""Fit MCMC AgeDepthModel""" | self . _mcmcfit = self . mcmcsetup . run ( )
self . _mcmcfit . burnin ( self . burnin )
dmin = min ( self . _mcmcfit . depth_segments )
dmax = max ( self . _mcmcfit . depth_segments )
self . _thick = ( dmax - dmin ) / len ( self . mcmcfit . depth_segments )
self . _depth = np . arange ( dmin , dmax + 0.001 )
self . _age_ensemble = np . array ( [ self . agedepth ( d = dx ) for dx in self . depth ] ) |
def hoveredItem ( self ) :
"""Returns the currently hovered item .
: return < QtGui . QTreeWidgetItem > | | None""" | out = None
if ( self . _hoveredItem is not None ) :
out = self . _hoveredItem ( )
if out is None :
self . _hoveredItem = None
return out |
def build_policy ( self , name , statements , roles , is_managed_policy = False ) :
"""Generate policy for IAM cloudformation template
: param name : Name of the policy
: param statements : The " rules " the policy should have
: param roles : The roles associated with this policy
: param is _ managed _ policy : True if managed policy
: return : Ref to new policy""" | if is_managed_policy :
policy = ManagedPolicy ( self . name_strip ( name , True ) , PolicyDocument = { "Version" : self . VERSION_IAM , "Statement" : statements , } , Roles = roles , Path = self . __role_path , )
else :
policy = PolicyType ( self . name_strip ( name , True ) , PolicyName = self . name_strip ( name , True ) , PolicyDocument = { "Version" : self . VERSION_IAM , "Statement" : statements , } , Roles = roles , )
self . __template . add_resource ( policy )
return policy |
def del_data ( name = None ) :
"""This function will delete tplot variables that are already stored in memory .
Parameters :
name : str
Name of the tplot variable to be deleted . If no name is provided , then
all tplot variables will be deleted .
Returns :
None
Examples :
> > > # Delete Variable 1
> > > import pytplot
> > > pytplot . del _ data ( " Variable1 " )""" | if name is None :
tplot_names = list ( data_quants . keys ( ) )
for i in tplot_names :
del data_quants [ i ]
return
if not isinstance ( name , list ) :
name = [ name ]
entries = [ ]
for i in name :
if ( '?' in i ) or ( '*' in i ) :
for j in data_quants . keys ( ) :
var_verif = fnmatch . fnmatch ( data_quants [ j ] . name , i )
if var_verif == 1 :
entries . append ( data_quants [ j ] . name )
else :
continue
for key in entries :
if key in data_quants :
del data_quants [ key ]
elif i not in data_quants . keys ( ) :
print ( str ( i ) + " is currently not in pytplot." )
return
else :
temp_data_quants = data_quants [ i ]
str_name = temp_data_quants . name
del data_quants [ str_name ]
return |
def parse ( self , output ) :
"""Find stems for a given text .""" | output = self . _get_lines_with_stems ( output )
words = self . _make_unique ( output )
return self . _parse_for_simple_stems ( words ) |
def save_json ( data , path , fatal = True , logger = None , sort_keys = True , indent = 2 , ** kwargs ) :
"""Args :
data ( object | None ) : Data to serialize and save
path ( str | None ) : Path to file where to save
fatal ( bool | None ) : Abort execution on failure if True
logger ( callable | None ) : Logger to use
sort _ keys ( bool ) : Save json with sorted keys
indent ( int ) : Indentation to use
* * kwargs : Passed through to ` json . dump ( ) `
Returns :
( int ) : 1 if saved , - 1 if failed ( when ` fatal ` is False )""" | if data is None or not path :
return abort ( "No file %s" , short ( path ) , fatal = fatal )
try :
path = resolved_path ( path )
ensure_folder ( path , fatal = fatal , logger = None )
if is_dryrun ( ) :
LOG . info ( "Would save %s" , short ( path ) )
return 1
if hasattr ( data , "to_dict" ) :
data = data . to_dict ( )
if indent :
kwargs . setdefault ( "separators" , ( "," , ': ' ) )
with open ( path , "wt" ) as fh :
json . dump ( data , fh , sort_keys = sort_keys , indent = indent , ** kwargs )
fh . write ( "\n" )
if logger :
logger ( "Saved %s" , short ( path ) )
return 1
except Exception as e :
return abort ( "Couldn't save %s: %s" , short ( path ) , e , fatal = ( fatal , - 1 ) ) |
def coerce_repository ( value , context = None ) :
"""Convert a string ( taken to be a repository name or location ) to a : class : ` Repository ` object .
: param value : The name or location of a repository ( a string ) or a
: class : ` Repository ` object .
: param context : An execution context created by : mod : ` executor . contexts `
( defaults to : class : ` executor . contexts . LocalContext ` ) .
: returns : A : class : ` Repository ` object .
: raises : : exc : ` ~ exceptions . ValueError ` when the given value is not a string
or a : class : ` Repository ` object or if the value is a string but
doesn ' t match the name of any configured repository and also can ' t
be parsed as the location of a repository .
The : func : ` coerce _ repository ( ) ` function creates : class : ` Repository ` objects :
1 . If the value is already a : class : ` Repository ` object it is returned to
the caller untouched .
2 . If the value is accepted by : func : ` find _ configured _ repository ( ) ` then
the resulting : class : ` Repository ` object is returned .
3 . If the value is a string that starts with a known VCS type prefix ( e . g .
` ` hg + https : / / bitbucket . org / ianb / virtualenv ` ` ) the prefix is removed from
the string and a : class : ` Repository ` object is returned :
- If the resulting string points to an existing local directory it will
be used to set : attr : ` ~ Repository . local ` .
- Otherwise the resulting string is used to set
: attr : ` ~ Repository . remote ` .
4 . If the value is a string pointing to an existing local directory , the
VCS type is inferred from the directory ' s contents and a
: class : ` Repository ` object is returned whose : attr : ` ~ Repository . local `
property is set to the local directory .
5 . If the value is a string that ends with ` ` . git ` ` ( a common idiom for git
repositories ) a : class : ` Repository ` object is returned :
- If the value points to an existing local directory it will be used to
set : attr : ` ~ Repository . local ` .
- Otherwise the value is used to set : attr : ` ~ Repository . remote ` .""" | # Coerce the context argument .
context = context or LocalContext ( )
# Repository objects pass through untouched .
if isinstance ( value , Repository ) :
return value
# We expect a string with a name or URL .
if not isinstance ( value , string_types ) :
msg = "Expected string or Repository object as argument, got %s instead!"
raise ValueError ( msg % type ( value ) )
# If the string matches the name of a configured repository we ' ll return that .
try :
return find_configured_repository ( value )
except NoSuchRepositoryError :
pass
# Parse and try to resolve the VCS type prefix .
vcs_type , _ , location = value . partition ( '+' )
if vcs_type and location :
kw = { 'context' : context , 'local' if context . exists ( location ) else 'remote' : location , }
try :
return repository_factory ( vcs_type , ** kw )
except UnknownRepositoryTypeError :
pass
# Try to infer the type of an existing local repository .
for cls in load_backends ( ) :
if cls . contains_repository ( context , value ) :
return repository_factory ( cls , context = context , local = value )
# Check for locations that end with ` . git ' ( a common idiom for remote
# git repositories ) even if the location isn ' t prefixed with ` git + ' .
if value . endswith ( '.git' ) :
from vcs_repo_mgr . backends . git import GitRepo
return repository_factory ( GitRepo , ** { 'context' : context , 'local' if context . exists ( value ) else 'remote' : value , } )
# If all else fails , at least give a clear explanation of the problem .
msg = ( "The string %r doesn't match the name of any configured repository" " and it also can't be parsed as the location of a remote" " repository! (maybe you forgot to prefix the type?)" )
raise ValueError ( msg % value ) |
def _prerun ( self ) :
"""To execute before running message""" | self . check_required_params ( )
self . _set_status ( "RUNNING" )
logger . debug ( "{}.PreRun: {}[{}]: running..." . format ( self . __class__ . __name__ , self . __class__ . path , self . uuid ) , extra = dict ( kmsg = Message ( self . uuid , entrypoint = self . __class__ . path , params = self . params ) . dump ( ) ) )
return self . prerun ( ) |
def _send_chunk ( self , chunk , chunk_num ) :
"""Send a single chunk to the remote service .
: param chunk : bytes data we are uploading
: param chunk _ num : int number associated with this chunk""" | url_info = self . upload_operations . create_file_chunk_url ( self . upload_id , chunk_num , chunk )
self . upload_operations . send_file_external ( url_info , chunk ) |
def to_file ( self , f ) :
"""Saves the history as a json file . In order to use this feature ,
the history must only contain JSON encodable Python data structures .
Numpy and PyTorch types should not be in the history .
Parameters
f : file - like object or str""" | with open_file_like ( f , 'w' ) as fp :
json . dump ( self . to_list ( ) , fp ) |
def _ParseContainerLogJSON ( self , parser_mediator , file_object ) :
"""Extract events from a Docker container log files .
The format is one JSON formatted log message per line .
The path of each container log file ( which logs the container stdout and
stderr ) is :
DOCKER _ DIR / containers / < container _ id > / < container _ id > - json . log
Args :
parser _ mediator ( ParserMediator ) : mediates interactions between parsers
and other components , such as storage and dfvfs .
file _ object ( dfvfs . FileIO ) : a file - like object .""" | container_id = self . _GetIdentifierFromPath ( parser_mediator )
text_file_object = text_file . TextFile ( file_object )
for log_line in text_file_object :
json_log_line = json . loads ( log_line )
time = json_log_line . get ( 'time' , None )
if not time :
continue
event_data = DockerJSONContainerLogEventData ( )
event_data . container_id = container_id
event_data . log_line = json_log_line . get ( 'log' , None )
event_data . log_source = json_log_line . get ( 'stream' , None )
# TODO : pass line number to offset or remove .
event_data . offset = 0
timestamp = timelib . Timestamp . FromTimeString ( time )
event = time_events . TimestampEvent ( timestamp , definitions . TIME_DESCRIPTION_WRITTEN )
parser_mediator . ProduceEventWithEventData ( event , event_data ) |
def add_member ( self , urls ) :
"""Add a member into the cluster .
: returns : new member
: rtype : : class : ` . Member `""" | member_add_request = etcdrpc . MemberAddRequest ( peerURLs = urls )
member_add_response = self . clusterstub . MemberAdd ( member_add_request , self . timeout , credentials = self . call_credentials , metadata = self . metadata )
member = member_add_response . member
return etcd3 . members . Member ( member . ID , member . name , member . peerURLs , member . clientURLs , etcd_client = self ) |
def is_multi_timeseries_orthogonal ( nc , variable ) :
'''Returns true if the variable is a orthogonal multidimensional array
representation of time series . For more information on what this means see
CF 1.6 § H . 2.1
http : / / cfconventions . org / cf - conventions / v1.6.0 / cf - conventions . html # _ orthogonal _ multidimensional _ array _ representation _ of _ time _ series
: param netCDF4 . Dataset nc : An open netCDF dataset
: param str variable : name of the variable to check''' | # x ( i ) , y ( i ) , z ( i ) , t ( o )
# X ( i , o )
dims = nc . variables [ variable ] . dimensions
cmatrix = coordinate_dimension_matrix ( nc )
for req in ( 'x' , 'y' , 't' ) :
if req not in cmatrix :
return False
if len ( cmatrix [ 'x' ] ) != 1 or cmatrix [ 'x' ] != cmatrix [ 'y' ] :
return False
if 'z' in cmatrix and cmatrix [ 'x' ] != cmatrix [ 'z' ] :
return False
timevar = get_time_variable ( nc )
if cmatrix [ 't' ] != ( timevar , ) :
return False
i = cmatrix [ 'x' ] [ 0 ]
o = cmatrix [ 't' ] [ 0 ]
if dims == ( i , o ) :
return True
return False |
def resource ( ref : str , delimiter : str = None ) -> str :
"""Given a ( URI ) reference , return up to its delimiter ( exclusively ) , or all of it if there is none .
: param ref : reference
: param delimiter : delimiter character ( default None maps to ' # ' , or ' ; ' introduces identifiers )""" | return ref . split ( delimiter if delimiter else '#' ) [ 0 ] |
def tupletree ( table , start = 'start' , stop = 'stop' , value = None ) :
"""Construct an interval tree for the given table , where each node in the tree
is a row of the table .""" | import intervaltree
tree = intervaltree . IntervalTree ( )
it = iter ( table )
hdr = next ( it )
flds = list ( map ( text_type , hdr ) )
assert start in flds , 'start field not recognised'
assert stop in flds , 'stop field not recognised'
getstart = itemgetter ( flds . index ( start ) )
getstop = itemgetter ( flds . index ( stop ) )
if value is None :
getvalue = tuple
else :
valueindices = asindices ( hdr , value )
assert len ( valueindices ) > 0 , 'invalid value field specification'
getvalue = itemgetter ( * valueindices )
for row in it :
tree . addi ( getstart ( row ) , getstop ( row ) , getvalue ( row ) )
return tree |
def generate_cmd_string ( self , cmd , * args , ** kwargs ) :
"""for any generate _ cmd _ string doesn ' t written as public method of terraform
examples :
1 . call import command ,
ref to https : / / www . terraform . io / docs / commands / import . html
- - > generate _ cmd _ string call :
terraform import - input = true aws _ instance . foo i - abcd1234
- - > python call :
tf . generate _ cmd _ string ( ' import ' , ' aws _ instance . foo ' , ' i - abcd1234 ' , input = True )
2 . call apply command ,
- - > generate _ cmd _ string call :
terraform apply - var = ' a = b ' - var = ' c = d ' - no - color the _ folder
- - > python call :
tf . generate _ cmd _ string ( ' apply ' , the _ folder , no _ color = IsFlagged , var = { ' a ' : ' b ' , ' c ' : ' d ' } )
: param cmd : command and sub - command of terraform , seperated with space
refer to https : / / www . terraform . io / docs / commands / index . html
: param args : arguments of a command
: param kwargs : same as kwags in method ' cmd '
: return : string of valid terraform command""" | cmds = cmd . split ( )
cmds = [ self . terraform_bin_path ] + cmds
for option , value in kwargs . items ( ) :
if '_' in option :
option = option . replace ( '_' , '-' )
if type ( value ) is list :
for sub_v in value :
cmds += [ '-{k}={v}' . format ( k = option , v = sub_v ) ]
continue
if type ( value ) is dict :
if 'backend-config' in option :
for bk , bv in value . items ( ) :
cmds += [ '-backend-config={k}={v}' . format ( k = bk , v = bv ) ]
continue
# since map type sent in string won ' t work , create temp var file for
# variables , and clean it up later
else :
filename = self . temp_var_files . create ( value )
cmds += [ '-var-file={0}' . format ( filename ) ]
continue
# simple flag ,
if value is IsFlagged :
cmds += [ '-{k}' . format ( k = option ) ]
continue
if value is None or value is IsNotFlagged :
continue
if type ( value ) is bool :
value = 'true' if value else 'false'
cmds += [ '-{k}={v}' . format ( k = option , v = value ) ]
cmds += args
return cmds |
def from_dict ( self , dirent ) :
"""Create a new FilePermissions object from the given dictionary . This
works with the FileListing parser class , which has already done the
hard work of pulling many of these fields out . We create an object
with all the dictionary keys available as properties , and also split
the ` ` perms ` ` string up into owner , group""" | # Check that we have at least as much data as the _ _ init _ _ requires
for k in [ 'perms' , 'owner' , 'group' , 'name' , 'dir' ] :
if k not in dirent :
raise ValueError ( "Need required key '{k}'" . format ( k = k ) )
# Copy all values across
for k in dirent :
setattr ( self , k , dirent [ k ] )
# Create perms parts
self . perms_owner = self . perms [ 0 : 3 ]
self . perms_group = self . perms [ 3 : 6 ]
self . perms_other = self . perms [ 6 : 9 ]
return self |
def unused ( self ) :
"""Return unused user - created elements .
: rtype : list ( Element )""" | self . _params . update ( href = self . _resource . get ( 'search_unused' ) )
return self |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.