signature
stringlengths 29
44.1k
| implementation
stringlengths 0
85.2k
|
|---|---|
def custom ( srcpaths , event_cb = None , poll_interval = 1 , recurse = True , restart_cb = None , restart_func = None , close_fds = True ) :
'''Sets up lazarus in custom mode .
See the : py : func : ` default ` function for a simpler mode of use .
The custom mode of lazarus is to watch all modules rooted at any of the
source paths provided for changes and restart when they take place .
Keyword arguments :
event _ cb - - Callback invoked when a file rooted at a source path
changes . Without specifying an event callback , changes to any module
rooted at a source path will trigger a restart .
poll _ interval - - Rate at which changes will be detected . The default
value of ` ` 1 ` ` means it may take up to one second to detect changes .
Decreasing this value may lead to unnecessary overhead .
recurse - - Whether to watch all subdirectories of every source path for
changes or only the paths provided .
restart _ cb - - Callback invoked prior to restarting the process ; allows
for any cleanup to occur prior to restarting . Returning anything other
than * None * in the callback will cancel the restart .
restart _ func - - Function invoked to restart the process . This supplants
the default behavior of using * sys . executable * and * sys . argv * .
close _ fds - - Whether all file descriptors other than * stdin * , * stdout * ,
and * stderr * should be closed
An example of using a cleanup function prior to restarting :
> > > def cleanup ( ) :
. . . pass
> > > import lazarus
> > > lazarus . custom ( os . curdir , restart _ cb = cleanup )
> > > lazarus . stop ( )
An example of avoiding restarts when any ` ` _ _ main _ _ . py ` ` changes :
> > > def skip _ main ( event ) :
. . . if event . src _ path = = ' _ _ main _ _ . py ' :
. . . return False
. . . return True
> > > import lazarus
> > > lazarus . custom ( os . curdir , event _ cb = skip _ main )
> > > lazarus . stop ( )'''
|
if _active :
msg = 'lazarus is already active'
raise RuntimeWarning ( msg )
if restart_cb and not callable ( restart_cb ) :
msg = 'restart_cb keyword argument is not callable'
raise TypeError ( msg )
if restart_func and not callable ( restart_func ) :
msg = 'restart_func keyword argument is not callable'
raise TypeError ( msg )
global _close_fds
_close_fds = close_fds
try :
from watchdog . observers import Observer
from watchdog . events import FileSystemEventHandler
except ImportError as ie :
msg = 'no watchdog support (%s)' % str ( ie )
raise RuntimeError ( msg )
class _Handler ( FileSystemEventHandler ) :
def __init__ ( self ) :
self . active = True
def dispatch ( self , event ) :
if not self . active :
return
super ( _Handler , self ) . dispatch ( event )
def all_events ( self , event ) : # if caller wants event _ cb control , defer _ restart logic to them
# ( caller decides whether this is a restart event )
if event_cb :
if event_cb ( event ) :
cancelled = _restart ( )
if not cancelled :
self . active = False
# use default is _ restart _ event logic
elif is_restart_event ( event ) :
cancelled = _restart ( )
if not cancelled :
self . active = False
self . active = False
def on_created ( self , event ) :
self . all_events ( event )
def on_deleted ( self , event ) :
self . all_events ( event )
def on_modified ( self , event ) :
self . all_events ( event )
def on_moved ( self , event ) :
self . all_events ( event )
global _observer
kwargs = { 'timeout' : poll_interval }
_observer = Observer ( ** kwargs )
global _restart_cb
_restart_cb = restart_cb
handler = _Handler ( )
srcpaths = _as_list ( srcpaths )
kwargs = { }
if recurse :
kwargs [ 'recursive' ] = True
for srcpath in srcpaths :
_observer . schedule ( handler , srcpath , ** kwargs )
_activate ( )
_observer . start ( )
|
def sentences ( ctx , input , output ) :
"""Read input document , and output sentences ."""
|
log . info ( 'chemdataextractor.read.elements' )
log . info ( 'Reading %s' % input . name )
doc = Document . from_file ( input )
for element in doc . elements :
if isinstance ( element , Text ) :
for raw_sentence in element . raw_sentences :
output . write ( raw_sentence . strip ( ) )
output . write ( u'\n' )
|
def set_password_prompt ( self , regex = None ) :
"""Defines a pattern that is used to monitor the response of the
connected host for a password prompt .
: type regex : RegEx
: param regex : The pattern that , when matched , causes an error ."""
|
if regex is None :
self . manual_password_re = regex
else :
self . manual_password_re = to_regexs ( regex )
|
def display ( self ) :
"""Displays the symbol table content"""
|
# Finding the maximum length for each column
sym_name = "Symbol name"
sym_len = max ( max ( len ( i . name ) for i in self . table ) , len ( sym_name ) )
kind_name = "Kind"
kind_len = max ( max ( len ( SharedData . KINDS [ i . kind ] ) for i in self . table ) , len ( kind_name ) )
type_name = "Type"
type_len = max ( max ( len ( SharedData . TYPES [ i . type ] ) for i in self . table ) , len ( type_name ) )
attr_name = "Attribute"
attr_len = max ( max ( len ( i . attribute_str ( ) ) for i in self . table ) , len ( attr_name ) )
# print table header
print ( "{0:3s} | {1:^{2}s} | {3:^{4}s} | {5:^{6}s} | {7:^{8}} | {9:s}" . format ( " No" , sym_name , sym_len , kind_name , kind_len , type_name , type_len , attr_name , attr_len , "Parameters" ) )
print ( "-----------------------------" + "-" * ( sym_len + kind_len + type_len + attr_len ) )
# print symbol table
for i , sym in enumerate ( self . table ) :
parameters = ""
for p in sym . param_types :
if parameters == "" :
parameters = "{0}" . format ( SharedData . TYPES [ p ] )
else :
parameters += ", {0}" . format ( SharedData . TYPES [ p ] )
print ( "{0:3d} | {1:^{2}s} | {3:^{4}s} | {5:^{6}s} | {7:^{8}} | ({9})" . format ( i , sym . name , sym_len , SharedData . KINDS [ sym . kind ] , kind_len , SharedData . TYPES [ sym . type ] , type_len , sym . attribute_str ( ) , attr_len , parameters ) )
|
def _get_for_address ( address , key ) :
"""Retrieve an attribute of or the physical interface that
the IP address provided could be bound to .
: param address ( str ) : An individual IPv4 or IPv6 address without a net
mask or subnet prefix . For example , ' 192.168.1.1 ' .
: param key : ' iface ' for the physical interface name or an attribute
of the configured interface , for example ' netmask ' .
: returns str : Requested attribute or None if address is not bindable ."""
|
address = netaddr . IPAddress ( address )
for iface in netifaces . interfaces ( ) :
addresses = netifaces . ifaddresses ( iface )
if address . version == 4 and netifaces . AF_INET in addresses :
addr = addresses [ netifaces . AF_INET ] [ 0 ] [ 'addr' ]
netmask = addresses [ netifaces . AF_INET ] [ 0 ] [ 'netmask' ]
network = netaddr . IPNetwork ( "%s/%s" % ( addr , netmask ) )
cidr = network . cidr
if address in cidr :
if key == 'iface' :
return iface
else :
return addresses [ netifaces . AF_INET ] [ 0 ] [ key ]
if address . version == 6 and netifaces . AF_INET6 in addresses :
for addr in addresses [ netifaces . AF_INET6 ] :
network = _get_ipv6_network_from_address ( addr )
if not network :
continue
cidr = network . cidr
if address in cidr :
if key == 'iface' :
return iface
elif key == 'netmask' and cidr :
return str ( cidr ) . split ( '/' ) [ 1 ]
else :
return addr [ key ]
return None
|
def get_handlers ( self , kind = None ) :
"""Retrieves the handlers of the given kind . If kind is None , all handlers
are returned .
: param kind : The kind of the handlers to return
: return : A list of handlers , or an empty list"""
|
with self . _lock :
if kind is not None :
try :
return self . _handlers [ kind ] [ : ]
except KeyError :
return [ ]
return self . __all_handlers . copy ( )
|
def get_args_index ( target ) -> int :
"""Returns the index of the " * args " parameter if such a parameter exists in
the function arguments or - 1 otherwise .
: param target :
The target function for which the args index should be determined
: return :
The arguments index if it exists or - 1 if not"""
|
code = target . __code__
if not bool ( code . co_flags & inspect . CO_VARARGS ) :
return - 1
return code . co_argcount + code . co_kwonlyargcount
|
def _read_response ( self ) :
"""Reads a complete response packet from the server"""
|
result = self . buf . read_line ( ) . decode ( "utf-8" )
if not result :
raise NoResponseError ( "No response received from server." )
msg = self . _read_message ( )
if result != "ok" :
raise InvalidResponseError ( msg )
return msg
|
def multiplicity ( self ) :
"""Returns the multiplicity of a defect site within the structure ( needed for concentration analysis )"""
|
if self . _multiplicity is None : # generate multiplicity based on space group symmetry operations performed on defect coordinates
try :
d_structure = create_saturated_interstitial_structure ( self )
except ValueError :
logger . debug ( 'WARNING! Multiplicity was not able to be calculated adequately ' 'for interstitials...setting this to 1 and skipping for now...' )
return 1
sga = SpacegroupAnalyzer ( d_structure )
periodic_struc = sga . get_symmetrized_structure ( )
poss_deflist = sorted ( periodic_struc . get_sites_in_sphere ( self . site . coords , 2 , include_index = True ) , key = lambda x : x [ 1 ] )
defindex = poss_deflist [ 0 ] [ 2 ]
equivalent_sites = periodic_struc . find_equivalent_sites ( periodic_struc [ defindex ] )
return len ( equivalent_sites )
else :
return self . _multiplicity
|
def deploy_webconf ( ) :
"""Deploy nginx and other wsgi server site configurations to the host"""
|
deployed = [ ]
log_dir = '/' . join ( [ deployment_root ( ) , 'log' ] )
# TODO - incorrect - check for actual package to confirm installation
if webserver_list ( ) :
if env . verbosity :
print env . host , "DEPLOYING webconf:"
if not exists ( log_dir ) :
run ( 'ln -s /var/log log' )
# deploys confs for each domain based on sites app
if 'apache2' in get_packages ( ) :
deployed += _deploy_webconf ( '/etc/apache2/sites-available' , 'django-apache-template.txt' )
deployed += _deploy_webconf ( '/etc/nginx/sites-available' , 'nginx-template.txt' )
elif 'gunicorn' in get_packages ( ) :
deployed += _deploy_webconf ( '/etc/nginx/sites-available' , 'nginx-gunicorn-template.txt' )
if not exists ( '/var/www/nginx-default' ) :
sudo ( 'mkdir /var/www/nginx-default' )
upload_template ( 'woven/maintenance.html' , '/var/www/nginx-default/maintenance.html' , use_sudo = True )
sudo ( 'chmod ugo+r /var/www/nginx-default/maintenance.html' )
else :
print env . host , """WARNING: Apache or Nginx not installed"""
return deployed
|
def _rebuild_blknos_and_blklocs ( self ) :
"""Update mgr . _ blknos / mgr . _ blklocs ."""
|
new_blknos = np . empty ( self . shape [ 0 ] , dtype = np . int64 )
new_blklocs = np . empty ( self . shape [ 0 ] , dtype = np . int64 )
new_blknos . fill ( - 1 )
new_blklocs . fill ( - 1 )
for blkno , blk in enumerate ( self . blocks ) :
rl = blk . mgr_locs
new_blknos [ rl . indexer ] = blkno
new_blklocs [ rl . indexer ] = np . arange ( len ( rl ) )
if ( new_blknos == - 1 ) . any ( ) :
raise AssertionError ( "Gaps in blk ref_locs" )
self . _blknos = new_blknos
self . _blklocs = new_blklocs
|
def valuewrapper ( f , arguments = None ) :
"""Return a likelihood accepting value instead of x as a keyword argument .
This is specifically intended for the instantiator above ."""
|
def wrapper ( ** kwds ) :
value = kwds . pop ( 'value' )
return f ( value , ** kwds )
if arguments is None :
wrapper . __dict__ . update ( f . __dict__ )
else :
wrapper . __dict__ . update ( arguments )
return wrapper
|
def _render_content ( self , content , ** settings ) :
"""Perform widget rendering , but do not print anything ."""
|
result = [ ]
bars = settings [ self . SETTING_BARS ]
label_width = self . chart_measure ( bars )
if not settings [ self . SETTING_BAR_WIDTH ] :
settings [ self . SETTING_BAR_WIDTH ] = TERMINAL_WIDTH - label_width - 3
max_value = max ( content )
i = 0
for bar in content :
result . append ( self . _render_bar ( bars [ i ] , bar , max_value , label_width , ** settings ) )
i += 1
return result
|
def bbox_to_mip ( self , bbox , mip , to_mip ) :
"""Convert bbox or slices from one mip level to another ."""
|
if not type ( bbox ) is Bbox :
bbox = lib . generate_slices ( bbox , self . mip_bounds ( mip ) . minpt , self . mip_bounds ( mip ) . maxpt , bounded = False )
bbox = Bbox . from_slices ( bbox )
def one_level ( bbox , mip , to_mip ) :
original_dtype = bbox . dtype
# setting type required for Python2
downsample_ratio = self . mip_resolution ( mip ) . astype ( np . float32 ) / self . mip_resolution ( to_mip ) . astype ( np . float32 )
bbox = bbox . astype ( np . float64 )
bbox *= downsample_ratio
bbox . minpt = np . floor ( bbox . minpt )
bbox . maxpt = np . ceil ( bbox . maxpt )
bbox = bbox . astype ( original_dtype )
return bbox
delta = 1 if to_mip >= mip else - 1
while ( mip != to_mip ) :
bbox = one_level ( bbox , mip , mip + delta )
mip += delta
return bbox
|
def read_folder ( folder ) :
"""Parameters
folder : str
Returns
list of HandwrittenData objects"""
|
hwr_objects = [ ]
for filepath in natsort . natsorted ( glob . glob ( "%s/*.inkml" % folder ) ) :
tmp = inkml . read ( filepath )
for hwr in tmp . to_single_symbol_list ( ) :
hwr_objects . append ( hwr )
logging . info ( "Done reading formulas" )
save_raw_pickle ( hwr_objects )
return hwr_objects
|
def list_pkgs ( versions_as_list = False , include_components = True , include_updates = True , ** kwargs ) :
'''List the packages currently installed .
. . note : :
To view installed software as displayed in the Add / Remove Programs , set
` ` include _ components ` ` and ` ` include _ updates ` ` to False .
Args :
versions _ as _ list ( bool ) :
Returns the versions as a list
include _ components ( bool ) :
Include sub components of installed software . Default is ` ` True ` `
include _ updates ( bool ) :
Include software updates and Windows updates . Default is ` ` True ` `
Kwargs :
saltenv ( str ) :
The salt environment to use . Default ` ` base ` `
refresh ( bool ) :
Refresh package metadata . Default ` ` False ` `
Returns :
dict : A dictionary of installed software with versions installed
. . code - block : : cfg
{ ' < package _ name > ' : ' < version > ' }
CLI Example :
. . code - block : : bash
salt ' * ' pkg . list _ pkgs
salt ' * ' pkg . list _ pkgs versions _ as _ list = True'''
|
versions_as_list = salt . utils . data . is_true ( versions_as_list )
# not yet implemented or not applicable
if any ( [ salt . utils . data . is_true ( kwargs . get ( x ) ) for x in ( 'removed' , 'purge_desired' ) ] ) :
return { }
saltenv = kwargs . get ( 'saltenv' , 'base' )
refresh = salt . utils . data . is_true ( kwargs . get ( 'refresh' , False ) )
_refresh_db_conditional ( saltenv , force = refresh )
ret = { }
name_map = _get_name_map ( saltenv )
for pkg_name , val_list in six . iteritems ( _get_reg_software ( include_components = include_components , include_updates = include_updates ) ) :
if pkg_name in name_map :
key = name_map [ pkg_name ]
for val in val_list :
if val == 'Not Found' : # Look up version from winrepo
pkg_info = _get_package_info ( key , saltenv = saltenv )
if not pkg_info :
continue
for pkg_ver in pkg_info . keys ( ) :
if pkg_info [ pkg_ver ] [ 'full_name' ] == pkg_name :
val = pkg_ver
__salt__ [ 'pkg_resource.add_pkg' ] ( ret , key , val )
else :
key = pkg_name
for val in val_list :
__salt__ [ 'pkg_resource.add_pkg' ] ( ret , key , val )
__salt__ [ 'pkg_resource.sort_pkglist' ] ( ret )
if not versions_as_list :
__salt__ [ 'pkg_resource.stringify' ] ( ret )
return ret
|
def iter_genotypes ( self ) :
"""Iterates on available markers .
Returns :
Genotypes instances ."""
|
for v in self . get_vcf ( ) :
alleles = { v . REF } | set ( v . ALT )
if self . quality_field :
variant = ImputedVariant ( v . ID , v . CHROM , v . POS , alleles , getattr ( v , self . quality_field ) )
else :
variant = Variant ( v . ID , v . CHROM , v . POS , alleles )
for coded_allele , g in self . _make_genotypes ( v . ALT , v . genotypes ) :
yield Genotypes ( variant , g , v . REF , coded_allele , multiallelic = len ( v . ALT ) > 1 )
|
def connect ( self ) :
"""Connect to device ."""
|
return self . loop . create_connection ( lambda : self , self . host , self . port )
|
def remove ( self , interval ) :
"""Removes an interval from the tree , if present . If not , raises
ValueError .
Completes in O ( log n ) time ."""
|
# self . verify ( )
if interval not in self : # print ( self . all _ intervals )
raise ValueError
self . top_node = self . top_node . remove ( interval )
self . all_intervals . remove ( interval )
self . _remove_boundaries ( interval )
|
def capture_guest ( userid ) :
"""Caputre a virtual machine image .
Input parameters :
: userid : USERID of the guest , last 8 if length > 8
Output parameters :
: image _ name : Image name that captured"""
|
# check power state , if down , start it
ret = sdk_client . send_request ( 'guest_get_power_state' , userid )
power_status = ret [ 'output' ]
if power_status == 'off' :
sdk_client . send_request ( 'guest_start' , userid )
# TODO : how much time ?
time . sleep ( 1 )
# do capture
image_name = 'image_captured_%03d' % ( time . time ( ) % 1000 )
sdk_client . send_request ( 'guest_capture' , userid , image_name , capture_type = 'rootonly' , compress_level = 6 )
return image_name
|
def is_ipv4 ( value , ** kwargs ) :
"""Indicate whether ` ` value ` ` is a valid IP version 4 address .
: param value : The value to evaluate .
: returns : ` ` True ` ` if ` ` value ` ` is valid , ` ` False ` ` if it is not .
: rtype : : class : ` bool < python : bool > `
: raises SyntaxError : if ` ` kwargs ` ` contains duplicate keyword parameters or duplicates
keyword parameters passed to the underlying validator"""
|
try :
value = validators . ipv4 ( value , ** kwargs )
except SyntaxError as error :
raise error
except Exception :
return False
return True
|
def get_methods_class ( self , class_name ) :
"""Return all methods of a specific class
: param class _ name : the class name
: type class _ name : string
: rtype : a list with : class : ` EncodedMethod ` objects"""
|
l = [ ]
for i in self . classes . class_def :
for j in i . get_methods ( ) :
if class_name == j . get_class_name ( ) :
l . append ( j )
return l
|
def get_self_url ( request_data ) :
"""Returns the URL of the current host + current view + query .
: param request _ data : The request as a dict
: type : dict
: return : The url of current host + current view + query
: rtype : string"""
|
self_url_host = OneLogin_Saml2_Utils . get_self_url_host ( request_data )
request_uri = ''
if 'request_uri' in request_data :
request_uri = request_data [ 'request_uri' ]
if not request_uri . startswith ( '/' ) :
match = re . search ( '^https?://[^/]*(/.*)' , request_uri )
if match is not None :
request_uri = match . groups ( ) [ 0 ]
return self_url_host + request_uri
|
def time_from_frequencyseries ( htilde , sample_frequencies = None , discont_threshold = 0.99 * numpy . pi ) :
"""Computes time as a function of frequency from the given
frequency - domain waveform . This assumes the stationary phase
approximation . Any frequencies lower than the first non - zero value in
htilde are assigned the time at the first non - zero value . Times for any
frequencies above the next - to - last non - zero value in htilde will be
assigned the time of the next - to - last non - zero value .
. . note : :
Some waveform models ( e . g . , ` SEOBNRv2 _ ROM _ DoubleSpin ` ) can have
discontinuities in the phase towards the end of the waveform due to
numerical error . We therefore exclude any points that occur after a
discontinuity in the phase , as the time estimate becomes untrustworthy
beyond that point . What determines a discontinuity in the phase is set
by the ` discont _ threshold ` . To turn this feature off , just set
` discont _ threshold ` to a value larger than pi ( due to the unwrapping
of the phase , no two points can have a difference > pi ) .
Parameters
htilde : FrequencySeries
The waveform to get the time evolution of ; must be complex .
sample _ frequencies : { None , array }
The frequencies at which the waveform is sampled . If None , will
retrieve from ` ` htilde . sample _ frequencies ` ` .
discont _ threshold : { 0.99 * pi , float }
If the difference in the phase changes by more than this threshold ,
it is considered to be a discontinuity . Default is 0.99 * pi .
Returns
FrequencySeries
The time evolution of the waveform as a function of frequency ."""
|
if sample_frequencies is None :
sample_frequencies = htilde . sample_frequencies . numpy ( )
phase = phase_from_frequencyseries ( htilde ) . data
dphi = numpy . diff ( phase )
time = - dphi / ( 2. * numpy . pi * numpy . diff ( sample_frequencies ) )
nzidx = numpy . nonzero ( abs ( htilde . data ) ) [ 0 ]
kmin , kmax = nzidx [ 0 ] , nzidx [ - 2 ]
# exclude everything after a discontinuity
discont_idx = numpy . where ( abs ( dphi [ kmin : ] ) >= discont_threshold ) [ 0 ]
if discont_idx . size != 0 :
kmax = min ( kmax , kmin + discont_idx [ 0 ] - 1 )
time [ : kmin ] = time [ kmin ]
time [ kmax : ] = time [ kmax ]
return FrequencySeries ( time . astype ( real_same_precision_as ( htilde ) ) , delta_f = htilde . delta_f , epoch = htilde . epoch , copy = False )
|
def complete_io ( self , iocb , msg ) :
"""Called by a handler to return data to the client ."""
|
if _debug :
IOQController . _debug ( "complete_io %r %r" , iocb , msg )
# check to see if it is completing the active one
if iocb is not self . active_iocb :
raise RuntimeError ( "not the current iocb" )
# normal completion
IOController . complete_io ( self , iocb , msg )
# no longer an active iocb
self . active_iocb = None
# check to see if we should wait a bit
if self . wait_time : # change our state
self . state = CTRL_WAITING
_statelog . debug ( "%s %s %s" % ( _strftime ( ) , self . name , "waiting" ) )
# schedule a call in the future
task = FunctionTask ( IOQController . _wait_trigger , self )
task . install_task ( delta = self . wait_time )
else : # change our state
self . state = CTRL_IDLE
_statelog . debug ( "%s %s %s" % ( _strftime ( ) , self . name , "idle" ) )
# look for more to do
deferred ( IOQController . _trigger , self )
|
def _connect ( self ) :
"""Connect to the MySQL server"""
|
self . _close ( )
self . conn = MySQLdb . Connect ( host = self . hostname , port = self . port , user = self . username , passwd = self . password , db = self . database )
|
def nfa ( self ) :
"""convert the expression into an NFA"""
|
finalstate = State ( final = True )
nextstate = finalstate
for tokenexpr in reversed ( self ) :
state = tokenexpr . nfa ( nextstate )
nextstate = state
return NFA ( state )
|
def extend_is_dir ( value , minimum = None , maximum = None ) :
u"""This function is extended is _ dir ( ) .
This function was able to take ListType or StringType as argument ."""
|
if isinstance ( value , list ) :
return [ is_dir ( member ) for member in validate . is_list ( value , minimum , maximum ) ]
else :
return is_dir ( value )
|
def safe_filter ( error_output = '' ) :
"""A safe filter decorator only raising errors when ` ` THUMBNAIL _ DEBUG ` ` is
` ` True ` ` otherwise returning ` ` error _ output ` ` ."""
|
def inner ( f ) :
@ wraps ( f )
def wrapper ( * args , ** kwargs ) :
try :
return f ( * args , ** kwargs )
except Exception as err :
if sorl_settings . THUMBNAIL_DEBUG :
raise
logger . error ( 'Thumbnail filter failed: %s' % str ( err ) , exc_info = sys . exc_info ( ) )
return error_output
return wrapper
return inner
|
def reset_ids_in_meta_df ( meta_df ) :
"""Meta _ df is modified inplace ."""
|
# Record original index name , and then change it so that the column that it
# becomes will be appropriately named
original_index_name = meta_df . index . name
meta_df . index . name = "old_id"
# Reset index
meta_df . reset_index ( inplace = True )
# Change the index name back to what it was
meta_df . index . name = original_index_name
|
def is_parans_exp ( istr ) :
"""Determines if an expression is a valid function " call " """
|
fxn = istr . split ( '(' ) [ 0 ]
if ( not fxn . isalnum ( ) and fxn != '(' ) or istr [ - 1 ] != ')' :
return False
plevel = 1
for c in '(' . join ( istr [ : - 1 ] . split ( '(' ) [ 1 : ] ) :
if c == '(' :
plevel += 1
elif c == ')' :
plevel -= 1
if plevel == 0 :
return False
return True
|
def Burr ( c , k , tag = None ) :
"""A Burr random variate
Parameters
c : scalar
The first shape parameter
k : scalar
The second shape parameter"""
|
assert c > 0 and k > 0 , 'Burr "c" and "k" parameters must be greater than zero'
return uv ( ss . burr ( c , k ) , tag = tag )
|
def parse_hicpro_stats ( self , f , rsection ) :
"""Parse a HiC - Pro stat file"""
|
s_name = self . clean_s_name ( os . path . basename ( f [ 'root' ] ) , os . path . dirname ( f [ 'root' ] ) )
if s_name not in self . hicpro_data . keys ( ) :
self . hicpro_data [ s_name ] = { }
self . add_data_source ( f , s_name , section = rsection )
for l in f [ 'f' ] . splitlines ( ) :
if not l . startswith ( '#' ) :
s = l . split ( "\t" )
if s [ 0 ] in self . hicpro_data [ s_name ] :
log . debug ( "Duplicated keys found! Overwriting: {}" . format ( s [ 0 ] ) )
self . hicpro_data [ s_name ] [ s [ 0 ] ] = int ( s [ 1 ] )
|
def get_relationships ( self ) :
"""Gets all ` ` Relationships ` ` .
return : ( osid . relationship . RelationshipList ) - a list of
` ` Relationships ` `
raise : OperationFailed - unable to complete request
raise : PermissionDenied - authorization failure
* compliance : mandatory - - This method must be implemented . *"""
|
url_path = ( '/handcar/services/relationship/families/' + self . _catalog_idstr + '/relationships' )
return objects . RelationshipList ( self . _get_request ( url_path ) )
|
def allocate_stream ( self , stream_type , stream_id = None , previous = None , attach = False ) :
"""Allocate a new stream of the given type .
The stream is allocated with an incremental ID starting at
StreamAllocator . StartingID . The returned data stream can always
be used to to attach a NodeInput to this stream , however the
attach _ stream ( ) function should always be called first since this
stream ' s output may need to be split and a logically equivalent
stream used instead to satisfy a device specific constraint on the
maximum number of outputs attached to a given stream .
You can call allocate _ stream on the same stream multiple times without
issue . Subsequent calls to allocate _ stream are noops .
Args :
stream _ type ( int ) : A stream type specified in the DataStream class
like DataStream . ConstantType
stream _ id ( int ) : The ID we would like to use for this stream , if
this is not specified , an ID is automatically allocated .
previous ( DataStream ) : If this stream was automatically derived from
another stream , this parameter should be a link to the old
stream .
attach ( bool ) : Call attach _ stream immediately before returning . Convenience
routine for streams that should immediately be attached to something .
Returns :
DataStream : The allocated data stream ."""
|
if stream_type not in DataStream . TypeToString :
raise ArgumentError ( "Unknown stream type in allocate_stream" , stream_type = stream_type )
if stream_id is not None and stream_id >= StreamAllocator . StartingID :
raise ArgumentError ( "Attempted to explicitly allocate a stream id in the internally managed id range" , stream_id = stream_id , started_id = StreamAllocator . StartingID )
# If the stream id is not explicitly given , we need to manage and track it
# from our autoallocate range
if stream_id is None :
if stream_type not in self . _next_id :
self . _next_id [ stream_type ] = StreamAllocator . StartingID
stream_id = self . _next_id [ stream_type ]
self . _next_id [ stream_type ] += 1
# Keep track of how many downstream nodes are attached to this stream so
# that we know when we need to split it into two .
stream = DataStream ( stream_type , stream_id )
if stream not in self . _allocated_streams :
self . _allocated_streams [ stream ] = ( stream , 0 , previous )
if attach :
stream = self . attach_stream ( stream )
return stream
|
def _filter_metadata_for_connection ( target , connection , ** kw ) :
"""Listener to control what indexes get created .
Useful for skipping postgres - specific indexes on a sqlite for example .
It ' s looking for info entry ` engines ` on an index
( ` Index ( info = dict ( engines = [ ' postgresql ' ] ) ) ` ) , an iterable of engine names ."""
|
engine = connection . engine . name
default_engines = ( engine , )
tables = target if isinstance ( target , sa . Table ) else kw . get ( "tables" , [ ] )
for table in tables :
indexes = list ( table . indexes )
for idx in indexes :
if engine not in idx . info . get ( "engines" , default_engines ) :
table . indexes . remove ( idx )
|
def comparedist ( df , * args , ** kwargs ) :
"""Compare the distributions of two DataFrames giving visualisations of :
- individual and combined distributions
- distribution of non - common values
- distribution of non - common values vs . each side
Plot distribution as area ( fill _ between ) + mean , median vertical bars .
: param df1 : ` pandas . DataFrame `
: param arg0 : the base dataframe * selector * to perform calculation against .
: param * : a number of ` pandas . DataFrames ` selectors to compare to arg0
: param bins : ` int ` number of bins for histogram
: param xlabel : label for X axis
: param ylabel : label for Y axis
: param base _ fmt : Text format to use for base selector legend . Python . format ( ) syntax .
: param arg _ fmt : Text format to use for format , with selectors legend . Python . format ( ) syntax .
: return : Figure"""
|
bins = kwargs . get ( 'bins' , 50 )
xlabel = kwargs . get ( 'xlabel' , 'Value' )
ylabel = kwargs . get ( 'ylabel' , 'Count' )
base_fmt = kwargs . get ( 'base_fmt' )
arg_fmt = kwargs . get ( 'arg_fmt' )
# The base for comparisons is the first passed selector .
base_selector , selectors = args [ 0 ] , args [ 1 : ]
df1 = df [ base_selector ]
fig , axes = plt . subplots ( len ( selectors ) , 1 , figsize = ( 10 , len ( selectors ) * 5 ) )
if not isinstance ( axes , np . ndarray ) :
axes = [ axes ]
# mpl returns a single object when only one .
for n , ( ax1 , selector ) in enumerate ( zip ( axes , selectors ) ) :
dfn = df [ selector ]
xr = np . nanmin ( [ np . nanmin ( df1 ) , np . nanmin ( dfn ) ] ) , np . nanmax ( [ np . nanmax ( df1 ) , np . nanmax ( dfn ) ] )
ax1 . set_title ( 'Distributions of %s and %s' % ( base_selector , selector ) )
_areadist ( ax1 , dfn . values , xr , c = 'r' , bins = bins , label = format_label ( base_selector , base_fmt ) )
_areadist ( ax1 , df1 . values , xr , c = 'k' , bins = bins , alpha = 0.3 , label = format_label ( selector , arg_fmt ) )
ax1 . set_xlabel ( xlabel )
ax1 . set_ylabel ( ylabel )
_ , ymax = ax1 . get_ylim ( )
ax1 . set_ylim ( 0 , ymax )
ax1 . legend ( loc = 'upper right' )
fig . tight_layout ( )
return fig
|
def _raw_print_image ( self , line , size , output = None ) :
"""Print formatted image"""
|
i = 0
cont = 0
buffer = ""
raw = ""
def __raw ( string ) :
if output :
output ( string )
else :
self . _raw ( string )
raw += S_RASTER_N
buffer = "%02X%02X%02X%02X" % ( ( ( size [ 0 ] / size [ 1 ] ) / 8 ) , 0 , size [ 1 ] , 0 )
raw += buffer . decode ( 'hex' )
buffer = ""
while i < len ( line ) :
hex_string = int ( line [ i : i + 8 ] , 2 )
buffer += "%02X" % hex_string
i += 8
cont += 1
if cont % 4 == 0 :
raw += buffer . decode ( "hex" )
buffer = ""
cont = 0
return raw
|
def remove_empty_dir ( path ) :
"""Function to remove empty folders"""
|
try :
if not os . path . isdir ( path ) :
return
files = os . listdir ( path )
# if folder empty , delete it
if len ( files ) == 0 :
os . rmdir ( path )
# remove empty subdirectory
elif len ( files ) > 0 :
for f in files :
abspath = os . path . join ( path , f )
if os . path . isdir ( abspath ) :
remove_empty_dir ( abspath )
except OSError as e :
if e . errno == errno . ENOTEMPTY :
pass
|
def add_snippet_client ( self , name , package ) :
"""Adds a snippet client to the management .
Args :
name : string , the attribute name to which to attach the snippet
client . E . g . ` name = ' maps ' ` attaches the snippet client to
` ad . maps ` .
package : string , the package name of the snippet apk to connect to .
Raises :
Error , if a duplicated name or package is passed in ."""
|
# Should not load snippet with the same name more than once .
if name in self . _snippet_clients :
raise Error ( self , 'Name "%s" is already registered with package "%s", it cannot ' 'be used again.' % ( name , self . _snippet_clients [ name ] . client . package ) )
# Should not load the same snippet package more than once .
for snippet_name , client in self . _snippet_clients . items ( ) :
if package == client . package :
raise Error ( self , 'Snippet package "%s" has already been loaded under name' ' "%s".' % ( package , snippet_name ) )
client = snippet_client . SnippetClient ( package = package , ad = self . _device )
client . start_app_and_connect ( )
self . _snippet_clients [ name ] = client
|
def env_absent ( name , user = 'root' ) :
'''Verifies that the specified environment variable is absent from the crontab
for the specified user
name
The name of the environment variable to remove from the user crontab
user
The name of the user whose crontab needs to be modified , defaults to
the root user'''
|
name = name . strip ( )
ret = { 'name' : name , 'result' : True , 'changes' : { } , 'comment' : '' }
if __opts__ [ 'test' ] :
status = _check_cron_env ( user , name )
ret [ 'result' ] = None
if status == 'absent' :
ret [ 'result' ] = True
ret [ 'comment' ] = 'Cron env {0} is absent' . format ( name )
elif status == 'present' or status == 'update' :
ret [ 'comment' ] = 'Cron env {0} is set to be removed' . format ( name )
return ret
data = __salt__ [ 'cron.rm_env' ] ( user , name )
if data == 'absent' :
ret [ 'comment' ] = "Cron env {0} already absent" . format ( name )
return ret
if data == 'removed' :
ret [ 'comment' ] = ( "Cron env {0} removed from {1}'s crontab" . format ( name , user ) )
ret [ 'changes' ] = { user : name }
return ret
ret [ 'comment' ] = ( "Cron env {0} for user {1} failed to commit with error {2}" . format ( name , user , data ) )
ret [ 'result' ] = False
return ret
|
def register_name ( self , register_index ) :
"""Retrives and returns the name of an ARM CPU register .
Args :
self ( JLink ) : the ` ` JLink ` ` instance
register _ index ( int ) : index of the register whose name to retrieve
Returns :
Name of the register ."""
|
result = self . _dll . JLINKARM_GetRegisterName ( register_index )
return ctypes . cast ( result , ctypes . c_char_p ) . value . decode ( )
|
def gauss_fltr_astropy ( dem , size = None , sigma = None , origmask = False , fill_interior = False ) :
"""Astropy gaussian filter properly handles convolution with NaN
http : / / stackoverflow . com / questions / 23832852 / by - which - measures - should - i - set - the - size - of - my - gaussian - filter - in - matlab
width1 = 3 ; sigma1 = ( width1-1 ) / 6;
Specify width for smallest feature of interest and determine sigma appropriately
sigma is width of 1 std in pixels ( not multiplier )
scipy and astropy both use cutoff of 4 * sigma on either side of kernel - 99.994%
3 * sigma on either side of kernel - 99.7%
If sigma is specified , filter width will be a multiple of 8 times sigma
Alternatively , specify filter size , then compute sigma : sigma = ( size - 1 ) / 8.
If size is < the required width for 6-8 sigma , need to use different mode to create kernel
mode ' oversample ' and ' center ' are essentially identical for sigma 1 , but very different for sigma 0.3
The sigma / size calculations below should work for non - integer sigma"""
|
# import astropy . nddata
import astropy . convolution
dem = malib . checkma ( dem )
# Generate 2D gaussian kernel for input sigma and size
# Default size is 8 * sigma in x and y directions
# kernel = astropy . nddata . make _ kernel ( [ size , size ] , sigma , ' gaussian ' )
# Size must be odd
if size is not None :
size = int ( np . floor ( size / 2 ) * 2 + 1 )
size = max ( size , 3 )
# Truncate the filter at this many standard deviations . Default is 4.0
truncate = 3.0
if size is not None and sigma is None :
sigma = ( size - 1 ) / ( 2 * truncate )
elif size is None and sigma is not None : # Round up to nearest odd int
size = int ( np . ceil ( ( sigma * ( 2 * truncate ) + 1 ) / 2 ) * 2 - 1 )
elif size is None and sigma is None : # Use default parameters
sigma = 1
size = int ( np . ceil ( ( sigma * ( 2 * truncate ) + 1 ) / 2 ) * 2 - 1 )
size = max ( size , 3 )
kernel = astropy . convolution . Gaussian2DKernel ( sigma , x_size = size , y_size = size , mode = 'oversample' )
print ( "Applying gaussian smoothing filter with size %i and sigma %0.3f (sum %0.3f)" % ( size , sigma , kernel . array . sum ( ) ) )
# This will fill holes
# np . nan is float
# dem _ filt _ gauss = astropy . nddata . convolve ( dem . astype ( float ) . filled ( np . nan ) , kernel , boundary = ' fill ' , fill _ value = np . nan )
# dem _ filt _ gauss = astropy . convolution . convolve ( dem . astype ( float ) . filled ( np . nan ) , kernel , boundary = ' fill ' , fill _ value = np . nan )
# Added normalization to ensure filtered values are not brightened / darkened if kernelsum ! = 1
dem_filt_gauss = astropy . convolution . convolve ( dem . astype ( float ) . filled ( np . nan ) , kernel , boundary = 'fill' , fill_value = np . nan , normalize_kernel = True )
# This will preserve original ndv pixels , applying original mask after filtering
if origmask :
print ( "Applying original mask" )
# Allow filling of interior holes , but use original outer edge
if fill_interior :
mask = malib . maskfill ( dem )
else :
mask = dem . mask
dem_filt_gauss = np . ma . array ( dem_filt_gauss , mask = mask , fill_value = dem . fill_value )
out = np . ma . fix_invalid ( dem_filt_gauss , copy = False , fill_value = dem . fill_value )
out . set_fill_value ( dem . fill_value . astype ( dem . dtype ) )
return out . astype ( dem . dtype )
|
def calculate_hash ( options ) :
"""returns an option _ collection _ hash given a list of options"""
|
options = sorted ( list ( options ) )
sha_hash = sha1 ( )
# equivalent to loop over the options and call sha _ hash . update ( )
sha_hash . update ( '' . join ( options ) . encode ( 'utf-8' ) )
return sha_hash . hexdigest ( )
|
def get_call_name ( self , node ) :
"""Return call name for the given node ."""
|
if isinstance ( node . func , ast . Attribute ) :
return node . func . attr
elif isinstance ( node . func , ast . Name ) :
return node . func . id
|
def _read_eeprom ( self , address , size ) :
'''Read EEPROM'''
|
self . _intf . write ( self . _base_addr + self . CAL_EEPROM_ADD , array ( 'B' , pack ( '>H' , address & 0x3FFF ) ) )
# 14 - bit address , 16384 bytes
n_pages , n_bytes = divmod ( size , self . CAL_EEPROM_PAGE_SIZE )
data = array ( 'B' )
for _ in range ( n_pages ) :
data . extend ( self . _intf . read ( self . _base_addr + self . CAL_EEPROM_ADD | 1 , size = self . CAL_EEPROM_PAGE_SIZE ) )
if n_bytes > 0 :
data . extend ( self . _intf . read ( self . _base_addr + self . CAL_EEPROM_ADD | 1 , size = n_bytes ) )
return data
|
def pick ( self , form , target_units , parcel_size , ave_unit_size , current_units , max_parcel_size = 200000 , min_unit_size = 400 , drop_after_build = True , residential = True , bldg_sqft_per_job = 400.0 , profit_to_prob_func = None ) :
"""Choose the buildings from the list that are feasible to build in
order to match the specified demand .
Parameters
form : string or list
One or more of the building forms from the pro forma specification -
e . g . " residential " or " mixedresidential " - these are configuration
parameters passed previously to the pro forma . If more than one form
is passed the forms compete with each other ( based on profitability )
for which one gets built in order to meet demand .
target _ units : int
The number of units to build . For non - residential buildings this
should be passed as the number of job spaces that need to be created .
parcel _ size : series
The size of the parcels . This was passed to feasibility as well ,
but should be passed here as well . Index should be parcel _ ids .
ave _ unit _ size : series
The average residential unit size around each parcel - this is
indexed by parcel , but is usually a disaggregated version of a
zonal or accessibility aggregation .
bldg _ sqft _ per _ job : float ( default 400.0)
The average square feet per job for this building form .
min _ unit _ size : float
Values less than this number in ave _ unit _ size will be set to this
number . Deals with cases where units are currently not built .
current _ units : series
The current number of units on the parcel . Is used to compute the
net number of units produced by the developer model . Many times
the developer model is redeveloping units ( demolishing them ) and
is trying to meet a total number of net units produced .
max _ parcel _ size : float
Parcels larger than this size will not be considered for
development - usually large parcels should be specified manually
in a development projects table .
drop _ after _ build : bool
Whether or not to drop parcels from consideration after they
have been chosen for development . Usually this is true so as
to not develop the same parcel twice .
residential : bool
If creating non - residential buildings set this to false and
developer will fill in job _ spaces rather than residential _ units
profit _ to _ prob _ func : function
As there are so many ways to turn the development feasibility
into a probability to select it for building , the user may pass
a function which takes the feasibility dataframe and returns
a series of probabilities . If no function is passed , the behavior
of this method will not change
Returns
None if thar are no feasible buildings
new _ buildings : dataframe
DataFrame of buildings to add . These buildings are rows from the
DataFrame that is returned from feasibility ."""
|
if len ( self . feasibility ) == 0 : # no feasible buildings , might as well bail
return
if form is None :
df = self . feasibility
elif isinstance ( form , list ) :
df = self . keep_form_with_max_profit ( form )
else :
df = self . feasibility [ form ]
# feasible buildings only for this building type
df = df [ df . max_profit_far > 0 ]
ave_unit_size [ ave_unit_size < min_unit_size ] = min_unit_size
df [ "ave_unit_size" ] = ave_unit_size
df [ "parcel_size" ] = parcel_size
df [ 'current_units' ] = current_units
df = df [ df . parcel_size < max_parcel_size ]
df [ 'residential_units' ] = ( df . residential_sqft / df . ave_unit_size ) . round ( )
df [ 'job_spaces' ] = ( df . non_residential_sqft / bldg_sqft_per_job ) . round ( )
if residential :
df [ 'net_units' ] = df . residential_units - df . current_units
else :
df [ 'net_units' ] = df . job_spaces - df . current_units
df = df [ df . net_units > 0 ]
if len ( df ) == 0 :
print ( "WARNING THERE ARE NO FEASIBLE BUILDING TO CHOOSE FROM" )
return
# print " Describe of net units \ n " , df . net _ units . describe ( )
print ( "Sum of net units that are profitable: {:,}" . format ( int ( df . net_units . sum ( ) ) ) )
if profit_to_prob_func :
p = profit_to_prob_func ( df )
else :
df [ 'max_profit_per_size' ] = df . max_profit / df . parcel_size
p = df . max_profit_per_size . values / df . max_profit_per_size . sum ( )
if df . net_units . sum ( ) < target_units :
print ( "WARNING THERE WERE NOT ENOUGH PROFITABLE UNITS TO" , "MATCH DEMAND" )
build_idx = df . index . values
elif target_units <= 0 :
build_idx = [ ]
else : # we don ' t know how many developments we will need , as they differ in net _ units .
# If all developments have net _ units of 1 than we need target _ units of them .
# So we choose the smaller of available developments and target _ units .
choices = np . random . choice ( df . index . values , size = min ( len ( df . index ) , target_units ) , replace = False , p = p )
tot_units = df . net_units . loc [ choices ] . values . cumsum ( )
ind = int ( np . searchsorted ( tot_units , target_units , side = "left" ) ) + 1
build_idx = choices [ : ind ]
if drop_after_build :
self . feasibility = self . feasibility . drop ( build_idx )
new_df = df . loc [ build_idx ]
new_df . index . name = "parcel_id"
return new_df . reset_index ( )
|
def add_method ( self , f = None , name = None ) :
"""Add a method to the dispatcher .
Parameters
f : callable
Callable to be added .
name : str , optional
Name to register ( the default is function * * f * * name )
Notes
When used as a decorator keeps callable object unmodified .
Examples
Use as method
> > > d = Dispatcher ( )
> > > d . add _ method ( lambda a , b : a + b , name = " sum " )
< function _ _ main _ _ . < lambda > >
Or use as decorator
> > > d = Dispatcher ( )
> > > @ d . add _ method
def mymethod ( * args , * * kwargs ) :
print ( args , kwargs )
Or use as a decorator with a different function name
> > > d = Dispatcher ( )
> > > @ d . add _ method ( name = " my . method " )
def mymethod ( * args , * * kwargs ) :
print ( args , kwargs )"""
|
if name and not f :
return functools . partial ( self . add_method , name = name )
self . method_map [ name or f . __name__ ] = f
return f
|
def getEPrintURL ( self , CorpNum , MgtKey , UserID = None ) :
"""공급받는자용 인쇄 URL 확인
args
CorpNum : 팝빌회원 사업자번호
MgtKey : 문서관리번호
UserID : 팝빌회원 아이디
return
팝빌 URL as str
raise
PopbillException"""
|
if MgtKey == None or MgtKey == "" :
raise PopbillException ( - 99999999 , "관리번호가 입력되지 않았습니다." )
result = self . _httpget ( '/Cashbill/' + MgtKey + '?TG=EPRINT' , CorpNum , UserID )
return result . url
|
def set_metadata ( self , metadata , clear = False , prefix = None ) :
"""Accepts a dictionary of metadata key / value pairs and updates the
specified container metadata with them .
If ' clear ' is True , any existing metadata is deleted and only the
passed metadata is retained . Otherwise , the values passed here update
the container ' s metadata .
The ' extra _ info ' parameter is included for backwards compatibility . It
is no longer used at all , and will not be modified with swiftclient
info , since swiftclient is not used any more .
By default , the standard container metadata prefix
( ' X - Container - Meta - ' ) is prepended to the header name if it isn ' t
present . For non - standard headers , you must include a non - None prefix ,
such as an empty string ."""
|
return self . manager . set_metadata ( self , metadata , clear = clear , prefix = prefix )
|
def get_data ( self ) :
"""Gets data from the given url"""
|
url = self . build_url ( )
self . incidents_data = requests . get ( url )
if not self . incidents_data . status_code == 200 :
raise self . incidents_data . raise_for_status ( )
|
def _maybe_fill ( arr , fill_value = np . nan ) :
"""if we have a compatible fill _ value and arr dtype , then fill"""
|
if _isna_compat ( arr , fill_value ) :
arr . fill ( fill_value )
return arr
|
def upload_documentation ( self , metadata , doc_dir ) :
"""Upload documentation to the index .
: param metadata : A : class : ` Metadata ` instance defining at least a name
and version number for the documentation to be
uploaded .
: param doc _ dir : The pathname of the directory which contains the
documentation . This should be the directory that
contains the ` ` index . html ` ` for the documentation .
: return : The HTTP response received from PyPI upon submission of the
request ."""
|
self . check_credentials ( )
if not os . path . isdir ( doc_dir ) :
raise DistlibException ( 'not a directory: %r' % doc_dir )
fn = os . path . join ( doc_dir , 'index.html' )
if not os . path . exists ( fn ) :
raise DistlibException ( 'not found: %r' % fn )
metadata . validate ( )
name , version = metadata . name , metadata . version
zip_data = zip_dir ( doc_dir ) . getvalue ( )
fields = [ ( ':action' , 'doc_upload' ) , ( 'name' , name ) , ( 'version' , version ) ]
files = [ ( 'content' , name , zip_data ) ]
request = self . encode_request ( fields , files )
return self . send_request ( request )
|
def list_media ( self , series , sort = META . SORT_DESC , limit = META . MAX_MEDIA , offset = 0 ) :
"""List media for a given series or collection
@ param crunchyroll . models . Series series the series to search for
@ param str sort choose the ordering of the
results , only META . SORT _ DESC
is known to work
@ param int limit limit size of results
@ param int offset start results from this index ,
for pagination
@ return list < crunchyroll . models . Media >"""
|
params = { 'sort' : sort , 'offset' : offset , 'limit' : limit , }
params . update ( self . _get_series_query_dict ( series ) )
result = self . _android_api . list_media ( ** params )
return result
|
def new ( cls , chart_type , chart_data , package ) :
"""Return a new | ChartPart | instance added to * package * containing
a chart of * chart _ type * and depicting * chart _ data * ."""
|
chart_blob = chart_data . xml_bytes ( chart_type )
partname = package . next_partname ( cls . partname_template )
content_type = CT . DML_CHART
chart_part = cls . load ( partname , content_type , chart_blob , package )
xlsx_blob = chart_data . xlsx_blob
chart_part . chart_workbook . update_from_xlsx_blob ( xlsx_blob )
return chart_part
|
def factorize_groupby_cols ( self , groupby_cols ) :
"""factorizes all columns that are used in the groupby
it will use cache carrays if available
if not yet auto _ cache is valid , it will create cache carrays"""
|
# first check if the factorized arrays already exist
# unless we need to refresh the cache
factor_list = [ ]
values_list = [ ]
# factorize the groupby columns
for col in groupby_cols :
if self . auto_cache or self . cache_valid ( col ) : # create factorization cache if needed
if not self . cache_valid ( col ) :
self . cache_factor ( [ col ] )
col_rootdir = self [ col ] . rootdir
col_factor_rootdir = col_rootdir + '.factor'
col_values_rootdir = col_rootdir + '.values'
col_carray_factor = bcolz . carray ( rootdir = col_factor_rootdir , mode = 'r' )
col_carray_values = bcolz . carray ( rootdir = col_values_rootdir , mode = 'r' )
else :
col_carray_factor , values = ctable_ext . factorize ( self [ col ] )
col_carray_values = bcolz . carray ( np . fromiter ( values . values ( ) , dtype = self [ col ] . dtype ) )
factor_list . append ( col_carray_factor )
values_list . append ( col_carray_values )
return factor_list , values_list
|
def no_type_check ( arg ) :
"""Decorator to indicate that annotations are not type hints .
The argument must be a class or function ; if it is a class , it
applies recursively to all methods and classes defined in that class
( but not to methods defined in its superclasses or subclasses ) .
This mutates the function ( s ) or class ( es ) in place ."""
|
if isinstance ( arg , type ) :
arg_attrs = arg . __dict__ . copy ( )
for attr , val in arg . __dict__ . items ( ) :
if val in arg . __bases__ + ( arg , ) :
arg_attrs . pop ( attr )
for obj in arg_attrs . values ( ) :
if isinstance ( obj , types . FunctionType ) :
obj . __no_type_check__ = True
if isinstance ( obj , type ) :
no_type_check ( obj )
try :
arg . __no_type_check__ = True
except TypeError : # built - in classes
pass
return arg
|
def vis_keypoints ( img , kps , kp_thresh = 2 , alpha = 0.7 ) :
"""Visualizes keypoints ( adapted from vis _ one _ image ) .
kps has shape ( 4 , # keypoints ) where 4 rows are ( x , y , logit , prob ) ."""
|
dataset_keypoints = PersonKeypoints . NAMES
kp_lines = PersonKeypoints . CONNECTIONS
# Convert from plt 0-1 RGBA colors to 0-255 BGR colors for opencv .
cmap = plt . get_cmap ( 'rainbow' )
colors = [ cmap ( i ) for i in np . linspace ( 0 , 1 , len ( kp_lines ) + 2 ) ]
colors = [ ( c [ 2 ] * 255 , c [ 1 ] * 255 , c [ 0 ] * 255 ) for c in colors ]
# Perform the drawing on a copy of the image , to allow for blending .
kp_mask = np . copy ( img )
# Draw mid shoulder / mid hip first for better visualization .
mid_shoulder = ( kps [ : 2 , dataset_keypoints . index ( 'right_shoulder' ) ] + kps [ : 2 , dataset_keypoints . index ( 'left_shoulder' ) ] ) / 2.0
sc_mid_shoulder = np . minimum ( kps [ 2 , dataset_keypoints . index ( 'right_shoulder' ) ] , kps [ 2 , dataset_keypoints . index ( 'left_shoulder' ) ] )
mid_hip = ( kps [ : 2 , dataset_keypoints . index ( 'right_hip' ) ] + kps [ : 2 , dataset_keypoints . index ( 'left_hip' ) ] ) / 2.0
sc_mid_hip = np . minimum ( kps [ 2 , dataset_keypoints . index ( 'right_hip' ) ] , kps [ 2 , dataset_keypoints . index ( 'left_hip' ) ] )
nose_idx = dataset_keypoints . index ( 'nose' )
if sc_mid_shoulder > kp_thresh and kps [ 2 , nose_idx ] > kp_thresh :
cv2 . line ( kp_mask , tuple ( mid_shoulder ) , tuple ( kps [ : 2 , nose_idx ] ) , color = colors [ len ( kp_lines ) ] , thickness = 2 , lineType = cv2 . LINE_AA )
if sc_mid_shoulder > kp_thresh and sc_mid_hip > kp_thresh :
cv2 . line ( kp_mask , tuple ( mid_shoulder ) , tuple ( mid_hip ) , color = colors [ len ( kp_lines ) + 1 ] , thickness = 2 , lineType = cv2 . LINE_AA )
# Draw the keypoints .
for l in range ( len ( kp_lines ) ) :
i1 = kp_lines [ l ] [ 0 ]
i2 = kp_lines [ l ] [ 1 ]
p1 = kps [ 0 , i1 ] , kps [ 1 , i1 ]
p2 = kps [ 0 , i2 ] , kps [ 1 , i2 ]
if kps [ 2 , i1 ] > kp_thresh and kps [ 2 , i2 ] > kp_thresh :
cv2 . line ( kp_mask , p1 , p2 , color = colors [ l ] , thickness = 2 , lineType = cv2 . LINE_AA )
if kps [ 2 , i1 ] > kp_thresh :
cv2 . circle ( kp_mask , p1 , radius = 3 , color = colors [ l ] , thickness = - 1 , lineType = cv2 . LINE_AA )
if kps [ 2 , i2 ] > kp_thresh :
cv2 . circle ( kp_mask , p2 , radius = 3 , color = colors [ l ] , thickness = - 1 , lineType = cv2 . LINE_AA )
# Blend the keypoints .
return cv2 . addWeighted ( img , 1.0 - alpha , kp_mask , alpha , 0 )
|
def start_program ( self , turn_on_load = True ) :
"""Starts running programmed test sequence
: return : None"""
|
self . __set_buffer_start ( self . CMD_START_PROG )
self . __set_checksum ( )
self . __send_buffer ( )
# Turn on Load if not on
if turn_on_load and not self . load_on :
self . load_on = True
|
def autocomplete_query ( self , ** kwargs ) :
"""Query the Yelp Autocomplete API .
documentation : https : / / www . yelp . com / developers / documentation / v3 / autocomplete
required parameters :
* text - search text"""
|
if not kwargs . get ( 'text' ) :
raise ValueError ( 'Valid text (parameter "text") must be provided.' )
return self . _query ( AUTOCOMPLETE_API_URL , ** kwargs )
|
def server_socket ( self , config ) :
""": meth : ` . WNetworkNativeTransportProto . server _ socket ` method implementation"""
|
if self . __server_socket is None :
self . __server_socket = self . create_server_socket ( config )
self . __server_socket . bind ( self . bind_socket ( config ) . pair ( ) )
return self . __server_socket
|
def get_user ( self , user_id ) :
"""Returns the current user from the session data .
If authenticated , this return the user object based on the user ID
and session data .
. . note : :
This required monkey - patching the ` ` contrib . auth ` ` middleware
to make the ` ` request ` ` object available to the auth backend class ."""
|
if ( hasattr ( self , 'request' ) and user_id == self . request . session [ "user_id" ] ) :
token = self . request . session [ 'token' ]
endpoint = self . request . session [ 'region_endpoint' ]
services_region = self . request . session [ 'services_region' ]
user = auth_user . create_user_from_token ( self . request , token , endpoint , services_region )
return user
else :
return None
|
def execute_ls ( host_list , remote_user , remote_pass ) :
'''Execute any adhoc command on the hosts .'''
|
runner = spam . ansirunner . AnsibleRunner ( )
result , failed_hosts = runner . ansible_perform_operation ( host_list = host_list , remote_user = remote_user , remote_pass = remote_pass , module = "command" , module_args = "ls -1" )
print "Result: " , result
|
def Sign ( message , private_key ) :
"""Sign the message with the given private key .
Args :
message ( str ) : message to be signed
private _ key ( str ) : 32 byte key as a double digit hex string ( e . g . having a length of 64)
Returns :
bytearray : the signature of the message ."""
|
hash = hashlib . sha256 ( binascii . unhexlify ( message ) ) . hexdigest ( )
v , r , s = bitcoin . ecdsa_raw_sign ( hash , private_key )
rb = bytearray ( r . to_bytes ( 32 , 'big' ) )
sb = bytearray ( s . to_bytes ( 32 , 'big' ) )
sig = rb + sb
return sig
|
def Poll ( generator = None , condition = None , interval = None , timeout = None ) :
"""Periodically calls generator function until a condition is satisfied ."""
|
if not generator :
raise ValueError ( "generator has to be a lambda" )
if not condition :
raise ValueError ( "condition has to be a lambda" )
if interval is None :
interval = DEFAULT_POLL_INTERVAL
if timeout is None :
timeout = DEFAULT_POLL_TIMEOUT
started = time . time ( )
while True :
obj = generator ( )
check_result = condition ( obj )
if check_result :
return obj
if timeout and ( time . time ( ) - started ) > timeout :
raise errors . PollTimeoutError ( "Polling on %s timed out after %ds." % ( obj , timeout ) )
time . sleep ( interval )
|
def register ( self , uri , prefix ) :
'''Registers the given URI and associates it with the given prefix .
If the URI has already been registered , this is a no - op .
: param uri : string
: param prefix : string'''
|
if not is_valid_schema_uri ( uri ) :
raise KeyError ( 'cannot register invalid URI {} (prefix {})' . format ( uri , prefix ) )
if not is_valid_prefix ( prefix ) :
raise ValueError ( 'cannot register invalid prefix %q for URI %q' . format ( prefix , uri ) )
if self . _uri_to_prefix . get ( uri ) is None :
self . _uri_to_prefix [ uri ] = prefix
|
def update_radii ( self , radii ) :
'''Update the radii inplace'''
|
self . radii = np . array ( radii , dtype = 'float32' )
prim_radii = self . _gen_radii ( self . radii )
self . _radii_vbo . set_data ( prim_radii )
self . widget . update ( )
|
def zone_compare ( timezone ) :
'''Compares the given timezone name with the system timezone name .
Checks the hash sum between the given timezone , and the one set in
/ etc / localtime . Returns True if names and hash sums match , and False if not .
Mostly useful for running state checks .
. . versionchanged : : 2016.3.0
. . note : :
On Solaris - link operating systems only a string comparison is done .
. . versionchanged : : 2016.11.4
. . note : :
On AIX operating systems only a string comparison is done .
CLI Example :
. . code - block : : bash
salt ' * ' timezone . zone _ compare ' America / Denver ' '''
|
if 'Solaris' in __grains__ [ 'os_family' ] or 'AIX' in __grains__ [ 'os_family' ] :
return timezone == get_zone ( )
if 'FreeBSD' in __grains__ [ 'os_family' ] :
if not os . path . isfile ( _get_localtime_path ( ) ) :
return timezone == get_zone ( )
tzfile = _get_localtime_path ( )
zonepath = _get_zone_file ( timezone )
try :
return filecmp . cmp ( tzfile , zonepath , shallow = False )
except OSError as exc :
problematic_file = exc . filename
if problematic_file == zonepath :
raise SaltInvocationError ( 'Can\'t find a local timezone "{0}"' . format ( timezone ) )
elif problematic_file == tzfile :
raise CommandExecutionError ( 'Failed to read {0} to determine current timezone: {1}' . format ( tzfile , exc . strerror ) )
raise
|
def _save_cfg_packages ( self , data ) :
'''Save configuration packages . ( NG )
: param data :
: return :'''
|
pkg_id = 0
pkg_cfg_id = 0
for pkg_name , pkg_configs in data . items ( ) :
pkg = Package ( )
pkg . id = pkg_id
pkg . name = pkg_name
self . db . store ( pkg )
for pkg_config in pkg_configs :
cfg = PackageCfgFile ( )
cfg . id = pkg_cfg_id
cfg . pkgid = pkg_id
cfg . path = pkg_config
self . db . store ( cfg )
pkg_cfg_id += 1
pkg_id += 1
|
def sample_rollout_single_env ( self , rollout_length ) :
"""Return indexes of next sample"""
|
# Sample from up to total size
if self . current_size < self . buffer_capacity :
if rollout_length + 1 > self . current_size :
raise VelException ( "Not enough elements in the buffer to sample the rollout" )
# -1 because we cannot take the last one
return np . random . choice ( self . current_size - rollout_length ) + rollout_length - 1
else :
if rollout_length + self . frame_history > self . current_size :
raise VelException ( "Not enough elements in the buffer to sample the rollout" )
candidate = np . random . choice ( self . buffer_capacity )
# These are the elements we cannot draw , as then we don ' t have enough history
forbidden_ones = ( np . arange ( self . current_idx , self . current_idx + self . frame_history + rollout_length - 1 ) % self . buffer_capacity )
# Exclude these frames for learning as they may have some part of history overwritten
while candidate in forbidden_ones :
candidate = np . random . choice ( self . buffer_capacity )
return candidate
|
def get_privilege_set ( self , hiveObject , user_name , group_names ) :
"""Parameters :
- hiveObject
- user _ name
- group _ names"""
|
self . send_get_privilege_set ( hiveObject , user_name , group_names )
return self . recv_get_privilege_set ( )
|
def parse_star_genecount_report ( self , f ) :
"""Parse a STAR gene counts output file"""
|
# Three numeric columns : unstranded , stranded / first - strand , stranded / second - strand
keys = [ 'N_unmapped' , 'N_multimapping' , 'N_noFeature' , 'N_ambiguous' ]
unstranded = { 'N_genes' : 0 }
first_strand = { 'N_genes' : 0 }
second_strand = { 'N_genes' : 0 }
num_errors = 0
num_genes = 0
for l in f [ 'f' ] :
s = l . split ( "\t" )
try :
for i in [ 1 , 2 , 3 ] :
s [ i ] = float ( s [ i ] )
if s [ 0 ] in keys :
unstranded [ s [ 0 ] ] = s [ 1 ]
first_strand [ s [ 0 ] ] = s [ 2 ]
second_strand [ s [ 0 ] ] = s [ 3 ]
else :
unstranded [ 'N_genes' ] += s [ 1 ]
first_strand [ 'N_genes' ] += s [ 2 ]
second_strand [ 'N_genes' ] += s [ 3 ]
num_genes += 1
except IndexError : # Tolerate a few errors in case there is something random added at the top of the file
num_errors += 1
if num_errors > 10 and num_genes == 0 :
log . warning ( "Error parsing {}" . format ( f [ 'fn' ] ) )
return None
if num_genes > 0 :
return { 'unstranded' : unstranded , 'first_strand' : first_strand , 'second_strand' : second_strand }
else :
return None
|
def _get_question_map ( self , question_id ) :
"""get question map from questions matching question _ id
This can make sense of both Section assigned Ids or normal Question / Item Ids"""
|
if question_id . get_authority ( ) == ASSESSMENT_AUTHORITY :
key = '_id'
match_value = ObjectId ( question_id . get_identifier ( ) )
else :
key = 'questionId'
match_value = str ( question_id )
for question_map in self . _my_map [ 'questions' ] :
if question_map [ key ] == match_value :
return question_map
raise errors . NotFound ( )
|
def _finalize ( self , dry_run = False ) :
"""Remove / compress files as requested"""
|
for rmfile in self . files . temp_files :
if dry_run :
print ( "remove %s" % rmfile )
else :
os . remove ( rmfile )
for gzfile in self . files . gzip_files :
if dry_run : # print ( " gzip % s " % gzfile )
pass
else :
os . system ( 'gzip -9 %s' % gzfile )
|
def is_subscriber ( self ) :
"""Returns whether the user is a subscriber or not . True or False ."""
|
doc = self . _request ( self . ws_prefix + ".getInfo" , True )
return _extract ( doc , "subscriber" ) == "1"
|
def share_project ( project_id , usernames , read_only , share , ** kwargs ) :
"""Share an entire project with a list of users , identifed by
their usernames .
The read _ only flag ( ' Y ' or ' N ' ) must be set
to ' Y ' to allow write access or sharing .
The share flat ( ' Y ' or ' N ' ) must be set to ' Y ' to allow the
project to be shared with other users"""
|
user_id = kwargs . get ( 'user_id' )
proj_i = _get_project ( project_id )
# Is the sharing user allowed to share this project ?
proj_i . check_share_permission ( int ( user_id ) )
user_id = int ( user_id )
for owner in proj_i . owners :
if user_id == owner . user_id :
break
else :
raise HydraError ( "Permission Denied. Cannot share project." )
if read_only == 'Y' :
write = 'N'
share = 'N'
else :
write = 'Y'
if proj_i . created_by != user_id and share == 'Y' :
raise HydraError ( "Cannot share the 'sharing' ability as user %s is not" " the owner of project %s" % ( user_id , project_id ) )
for username in usernames :
user_i = _get_user ( username )
proj_i . set_owner ( user_i . id , write = write , share = share )
for net_i in proj_i . networks :
net_i . set_owner ( user_i . id , write = write , share = share )
db . DBSession . flush ( )
|
def close ( self ) :
"""Close the audio file and free associated memory ."""
|
if not self . closed :
check ( _coreaudio . ExtAudioFileDispose ( self . _obj ) )
self . closed = True
|
def outputWord ( self ) :
"""Output report to word docx"""
|
import docx
from docx . enum . text import WD_ALIGN_PARAGRAPH
doc = docx . Document ( )
doc . styles [ 'Normal' ] . paragraph_format . alignment = WD_ALIGN_PARAGRAPH . JUSTIFY
doc . add_heading ( self . title , level = 0 )
if self . addTime :
from time import localtime , strftime
doc . add_heading ( strftime ( "%Y-%m-%d %H:%M:%S" , localtime ( ) ) , level = 1 )
# Append introduction
if self . p :
doc . add_heading ( 'Introduction' , level = 1 )
for p in renewliner ( self . p ) . split ( '\n' ) :
doc . add_paragraph ( p )
# Sections
c = count ( 1 )
# Prepare fig and table numbers
self . listFigures ( tuple ( ) )
self . listTables ( tuple ( ) )
for section in self . sections :
section . sectionsWord ( ( next ( c ) , ) , doc = doc )
# Append conclusion
if self . conclusion :
doc . add_heading ( 'Conclusion' , level = 1 )
for p in renewliner ( self . conclusion ) . split ( '\n' ) :
doc . add_paragraph ( p )
# Generate Word document
doc . save ( self . outfile + '.docx' )
|
def post_event ( event , channel = None , username = None , api_url = None , hook = None ) :
'''Send an event to a Mattermost channel .
: param channel : The channel name , either will work .
: param username : The username of the poster .
: param event : The event to send to the Mattermost channel .
: param api _ url : The Mattermost api url , if not specified in the configuration .
: param hook : The Mattermost hook , if not specified in the configuration .
: return : Boolean if message was sent successfully .'''
|
if not api_url :
api_url = _get_api_url ( )
if not hook :
hook = _get_hook ( )
if not username :
username = _get_username ( )
if not channel :
channel = _get_channel ( )
if not event :
log . error ( 'message is a required option.' )
log . debug ( 'Event: %s' , event )
log . debug ( 'Event data: %s' , event [ 'data' ] )
message = 'tag: {0}\r\n' . format ( event [ 'tag' ] )
for key , value in six . iteritems ( event [ 'data' ] ) :
message += '{0}: {1}\r\n' . format ( key , value )
result = post_message ( channel , username , message , api_url , hook )
return bool ( result )
|
def all_tables ( self ) -> List [ str ] :
"""List of all known tables
: return :"""
|
return sorted ( [ k for k in self . __dict__ . keys ( ) if k not in _I2B2Tables . _funcs and not k . startswith ( "_" ) ] )
|
def setMinimum ( self , minimum ) :
"""setter to _ minimum .
Args :
minimum ( int or long ) : new _ minimum value .
Raises :
TypeError : If the given argument is not an integer ."""
|
if not isinstance ( minimum , int ) :
raise TypeError ( "Argument is not of type int or long" )
self . _minimum = minimum
|
def vrrp_vip ( self , ** kwargs ) :
"""Set VRRP VIP .
Args :
int _ type ( str ) : Type of interface . ( gigabitethernet ,
tengigabitethernet , etc ) .
name ( str ) : Name of interface . ( 1/0/5 , 1/0/10 , etc ) .
vrid ( str ) : VRRPv3 ID .
vip ( str ) : IPv4 / IPv6 Virtual IP Address .
rbridge _ id ( str ) : rbridge - id for device . Only required when type is
` ve ` .
callback ( function ) : A function executed upon completion of the
method . The only parameter passed to ` callback ` will be the
` ` ElementTree ` ` ` config ` .
Returns :
Return value of ` callback ` .
Raises :
KeyError : if ` int _ type ` , ` name ` , ` vrid ` , or ` vip ` is not passed .
ValueError : if ` int _ type ` , ` name ` , ` vrid ` , or ` vip ` is invalid .
Examples :
> > > import pynos . device
> > > switches = [ ' 10.24.39.211 ' , ' 10.24.39.203 ' ]
> > > auth = ( ' admin ' , ' password ' )
> > > for switch in switches :
. . . conn = ( switch , ' 22 ' )
. . . with pynos . device . Device ( conn = conn , auth = auth ) as dev :
. . . output = dev . interface . anycast _ mac ( rbridge _ id = ' 225 ' ,
. . . mac = ' aabb . ccdd . eeff ' , delete = True )
. . . output = dev . services . vrrp ( ip _ version = ' 6 ' ,
. . . enabled = True , rbridge _ id = ' 225 ' )
. . . output = dev . services . vrrp ( enabled = True ,
. . . rbridge _ id = ' 225 ' )
. . . output = dev . interface . set _ ip ( ' tengigabitethernet ' ,
. . . ' 225/0/18 ' , ' 10.1.1.2/24 ' )
. . . output = dev . interface . ip _ address ( name = ' 225/0/18 ' ,
. . . int _ type = ' tengigabitethernet ' ,
. . . ip _ addr = ' 2001:4818 : f000:1ab : cafe : beef : 1000:2/64 ' )
. . . dev . interface . vrrp _ vip ( int _ type = ' tengigabitethernet ' ,
. . . name = ' 225/0/18 ' , vrid = ' 1 ' , vip = ' 10.1.1.1/24 ' )
. . . dev . interface . vrrp _ vip ( int _ type = ' tengigabitethernet ' ,
. . . name = ' 225/0/18 ' , vrid = ' 1 ' ,
. . . vip = ' fe80 : : cafe : beef : 1000:1/64 ' )
. . . dev . interface . vrrp _ vip ( int _ type = ' tengigabitethernet ' ,
. . . name = ' 225/0/18 ' , vrid = ' 1 ' ,
. . . vip = ' 2001:4818 : f000:1ab : cafe : beef : 1000:1/64 ' )
. . . output = dev . interface . add _ vlan _ int ( ' 89 ' )
. . . output = dev . interface . ip _ address ( name = ' 89 ' ,
. . . int _ type = ' ve ' , ip _ addr = ' 172.16.1.1/24 ' ,
. . . rbridge _ id = ' 225 ' )
. . . output = dev . interface . ip _ address ( name = ' 89 ' ,
. . . int _ type = ' ve ' , rbridge _ id = ' 225 ' ,
. . . ip _ addr = ' 2002:4818 : f000:1ab : cafe : beef : 1000:2/64 ' )
. . . dev . interface . vrrp _ vip ( int _ type = ' ve ' , name = ' 89 ' ,
. . . vrid = ' 1 ' , vip = ' 172.16.1.2/24 ' , rbridge _ id = ' 225 ' )
. . . dev . interface . vrrp _ vip ( int _ type = ' ve ' , name = ' 89 ' ,
. . . vrid = ' 1 ' , vip = ' fe80 : : dafe : beef : 1000:1/64 ' ,
. . . rbridge _ id = ' 225 ' )
. . . dev . interface . vrrp _ vip ( int _ type = ' ve ' , name = ' 89 ' ,
. . . vrid = ' 1 ' , vip = ' 2002:4818 : f000:1ab : cafe : beef : 1000:1/64 ' ,
. . . rbridge _ id = ' 225 ' )
. . . output = dev . services . vrrp ( ip _ version = ' 6 ' ,
. . . enabled = False , rbridge _ id = ' 225 ' )
. . . output = dev . services . vrrp ( enabled = False ,
. . . rbridge _ id = ' 225 ' )"""
|
int_type = kwargs . pop ( 'int_type' ) . lower ( )
name = kwargs . pop ( 'name' )
vrid = kwargs . pop ( 'vrid' )
vip = kwargs . pop ( 'vip' )
rbridge_id = kwargs . pop ( 'rbridge_id' , '1' )
callback = kwargs . pop ( 'callback' , self . _callback )
valid_int_types = [ 'gigabitethernet' , 'tengigabitethernet' , 'fortygigabitethernet' , 'hundredgigabitethernet' , 'port_channel' , 've' ]
ipaddress = ip_interface ( unicode ( vip ) )
vrrp_vip = None
vrrp_args = dict ( name = name , vrid = vrid , virtual_ipaddr = str ( ipaddress . ip ) )
method_class = self . _interface
if int_type not in valid_int_types :
raise ValueError ( '`int_type` must be one of: %s' % repr ( valid_int_types ) )
if ipaddress . version == 4 :
vrrp_args [ 'version' ] = '3'
method_name = 'interface_%s_vrrp_virtual_ip_virtual_' 'ipaddr' % int_type
elif ipaddress . version == 6 :
method_name = 'interface_%s_ipv6_vrrpv3_group_virtual_ip_' 'virtual_ipaddr' % int_type
if int_type == 've' :
method_name = 'rbridge_id_%s' % method_name
if ipaddress . version == 6 :
method_name = method_name . replace ( 'group_' , '' )
method_class = self . _rbridge
vrrp_args [ 'rbridge_id' ] = rbridge_id
if not pynos . utilities . valid_vlan_id ( name ) :
raise InvalidVlanId ( "`name` must be between `1` and `8191`" )
elif not pynos . utilities . valid_interface ( int_type , name ) :
raise ValueError ( '`name` must be in the format of x/y/z for ' 'physical interfaces or x for port channel.' )
vrrp_vip = getattr ( method_class , method_name )
config = vrrp_vip ( ** vrrp_args )
return callback ( config )
|
def to_dict ( self , depth = - 1 , ordered = True , ** kwargs ) :
"""Returns a dict representation of the object ."""
|
if ordered :
dict_fun = OrderedDict
else :
dict_fun = dict
out = dict_fun ( )
out [ 'name' ] = self . _name
out [ 'id' ] = self . _id
if depth != 0 :
out [ 'children' ] = dict_fun ( [ ( name , child . to_dict ( depth = depth - 1 ) ) for name , child in self . _children . items ( ) ] )
# noqa
return out
|
def rn_boundary ( af , b_hi ) :
"""R ( n ) ratio boundary for selecting between [ b _ hi - 1 , b _ hi ]
alpha = b + 2"""
|
return np . sqrt ( rn_theory ( af , b ) * rn_theory ( af , b - 1 ) )
|
def create_product ( AcceptLanguage = None , Name = None , Owner = None , Description = None , Distributor = None , SupportDescription = None , SupportEmail = None , SupportUrl = None , ProductType = None , Tags = None , ProvisioningArtifactParameters = None , IdempotencyToken = None ) :
"""Creates a new product .
See also : AWS API Documentation
: example : response = client . create _ product (
AcceptLanguage = ' string ' ,
Name = ' string ' ,
Owner = ' string ' ,
Description = ' string ' ,
Distributor = ' string ' ,
SupportDescription = ' string ' ,
SupportEmail = ' string ' ,
SupportUrl = ' string ' ,
ProductType = ' CLOUD _ FORMATION _ TEMPLATE ' ,
Tags = [
' Key ' : ' string ' ,
' Value ' : ' string '
ProvisioningArtifactParameters = {
' Name ' : ' string ' ,
' Description ' : ' string ' ,
' Info ' : {
' string ' : ' string '
' Type ' : ' CLOUD _ FORMATION _ TEMPLATE '
IdempotencyToken = ' string '
: type AcceptLanguage : string
: param AcceptLanguage : The language code to use for this operation . Supported language codes are as follows :
' en ' ( English )
' jp ' ( Japanese )
' zh ' ( Chinese )
If no code is specified , ' en ' is used as the default .
: type Name : string
: param Name : [ REQUIRED ]
The name of the product .
: type Owner : string
: param Owner : [ REQUIRED ]
The owner of the product .
: type Description : string
: param Description : The text description of the product .
: type Distributor : string
: param Distributor : The distributor of the product .
: type SupportDescription : string
: param SupportDescription : Support information about the product .
: type SupportEmail : string
: param SupportEmail : Contact email for product support .
: type SupportUrl : string
: param SupportUrl : Contact URL for product support .
: type ProductType : string
: param ProductType : [ REQUIRED ]
The type of the product to create .
: type Tags : list
: param Tags : Tags to associate with the new product .
( dict ) - - Key / value pairs to associate with this provisioning . These tags are entirely discretionary and are propagated to the resources created in the provisioning .
Key ( string ) - - [ REQUIRED ] The ProvisioningArtifactParameter . TagKey parameter from DescribeProvisioningParameters .
Value ( string ) - - [ REQUIRED ] The esired value for this key .
: type ProvisioningArtifactParameters : dict
: param ProvisioningArtifactParameters : [ REQUIRED ]
Parameters for the provisioning artifact .
Name ( string ) - - The name assigned to the provisioning artifact properties .
Description ( string ) - - The text description of the provisioning artifact properties .
Info ( dict ) - - [ REQUIRED ] Additional information about the provisioning artifact properties .
( string ) - -
( string ) - -
Type ( string ) - - The type of the provisioning artifact properties .
: type IdempotencyToken : string
: param IdempotencyToken : [ REQUIRED ]
A token to disambiguate duplicate requests . You can create multiple resources using the same input in multiple requests , provided that you also specify a different idempotency token for each request .
This field is autopopulated if not provided .
: rtype : dict
: return : {
' ProductViewDetail ' : {
' ProductViewSummary ' : {
' Id ' : ' string ' ,
' ProductId ' : ' string ' ,
' Name ' : ' string ' ,
' Owner ' : ' string ' ,
' ShortDescription ' : ' string ' ,
' Type ' : ' CLOUD _ FORMATION _ TEMPLATE ' ,
' Distributor ' : ' string ' ,
' HasDefaultPath ' : True | False ,
' SupportEmail ' : ' string ' ,
' SupportDescription ' : ' string ' ,
' SupportUrl ' : ' string '
' Status ' : ' AVAILABLE ' | ' CREATING ' | ' FAILED ' ,
' ProductARN ' : ' string ' ,
' CreatedTime ' : datetime ( 2015 , 1 , 1)
' ProvisioningArtifactDetail ' : {
' Id ' : ' string ' ,
' Name ' : ' string ' ,
' Description ' : ' string ' ,
' Type ' : ' CLOUD _ FORMATION _ TEMPLATE ' ,
' CreatedTime ' : datetime ( 2015 , 1 , 1)
' Tags ' : [
' Key ' : ' string ' ,
' Value ' : ' string '"""
|
pass
|
def parent_organisations ( self ) :
'''The organisations this RTC belongs to .'''
|
class ParentOrg :
def __init__ ( self , sdo_id , org_id ) :
self . sdo_id = sdo_id
self . org_id = org_id
with self . _mutex :
if not self . _parent_orgs :
for sdo in self . _obj . get_organizations ( ) :
if not sdo :
continue
owner = sdo . get_owner ( )
if owner :
sdo_id = owner . _narrow ( SDOPackage . SDO ) . get_sdo_id ( )
else :
sdo_id = ''
org_id = sdo . get_organization_id ( )
self . _parent_orgs . append ( ParentOrg ( sdo_id , org_id ) )
return self . _parent_orgs
|
def get_closest ( self , lon , lat , depth = 0 ) :
"""Get the closest object to the given longitude and latitude
and its distance .
: param lon : longitude in degrees
: param lat : latitude in degrees
: param depth : depth in km ( default 0)
: returns : ( object , distance )"""
|
xyz = spherical_to_cartesian ( lon , lat , depth )
min_dist , idx = self . kdtree . query ( xyz )
return self . objects [ idx ] , min_dist
|
def auto_code_block ( self , node ) :
"""Try to automatically generate nodes for codeblock syntax .
Parameters
node : nodes . literal _ block
Original codeblock node
Returns
tocnode : docutils node
The converted toc tree node , None if conversion is not possible ."""
|
assert isinstance ( node , nodes . literal_block )
original_node = node
if 'language' not in node :
return None
self . state_machine . reset ( self . document , node . parent , self . current_level )
content = node . rawsource . split ( '\n' )
language = node [ 'language' ]
if language == 'math' :
if self . config [ 'enable_math' ] :
return self . state_machine . run_directive ( 'math' , content = content )
elif language == 'eval_rst' :
if self . config [ 'enable_eval_rst' ] : # allow embed non section level rst
node = nodes . section ( )
self . state_machine . state . nested_parse ( StringList ( content , source = original_node . source ) , 0 , node = node , match_titles = True )
return node . children [ : ]
else :
match = re . search ( '[ ]?[\w_-]+::.*' , language )
if match :
parser = Parser ( )
new_doc = new_document ( None , self . document . settings )
newsource = u'.. ' + match . group ( 0 ) + '\n' + node . rawsource
parser . parse ( newsource , new_doc )
return new_doc . children [ : ]
else :
return self . state_machine . run_directive ( 'code-block' , arguments = [ language ] , content = content )
return None
|
def stable_reverse_topological_sort ( graph ) :
"""Return a list of nodes in topological sort order .
This topological sort is a * * unique * * permutation of the nodes
such that an edge from u to v implies that u appears before v in the
topological sort order .
Parameters
graph : NetworkX digraph
A directed graph
Raises
NetworkXError
Topological sort is defined for directed graphs only . If the
graph G is undirected , a NetworkXError is raised .
NetworkXUnfeasible
If G is not a directed acyclic graph ( DAG ) no topological sort
exists and a NetworkXUnfeasible exception is raised .
Notes
- This algorithm is based on a description and proof in
The Algorithm Design Manual [ 1 ] _ .
- This implementation is modified from networkx 1.11 implementation [ 2 ] _
to achieve stability , support only reverse ( allows yielding instead of
returning a list ) , and remove the ` nbunch ` argument ( had no use for it ) .
See also
is _ directed _ acyclic _ graph
References
. . [ 1 ] Skiena , S . S . The Algorithm Design Manual ( Springer - Verlag , 1998 ) .
http : / / www . amazon . com / exec / obidos / ASIN / 0387948600 / ref = ase _ thealgorithmrepo /
. . [ 2 ] networkx on GitHub
https : / / github . com / networkx / networkx / blob / 8358afac209c00b7feb3e81c901098852a9413b3 / networkx / algorithms / dag . py # L88 - L168"""
|
if not graph . is_directed ( ) :
raise networkx . NetworkXError ( 'Topological sort not defined on undirected graphs.' )
# nonrecursive version
seen = set ( )
explored = set ( )
for v in sorted ( graph . nodes ( ) ) :
if v in explored :
continue
fringe = [ v ]
# nodes yet to look at
while fringe :
w = fringe [ - 1 ]
# depth first search
if w in explored : # already looked down this branch
fringe . pop ( )
continue
seen . add ( w )
# mark as seen
# Check successors for cycles and for new nodes
new_nodes = [ ]
for n in sorted ( graph [ w ] ) :
if n not in explored :
if n in seen : # CYCLE ! ! OH NOOOO ! !
raise networkx . NetworkXUnfeasible ( 'Graph contains a cycle.' )
new_nodes . append ( n )
if new_nodes : # Add new _ nodes to fringe
fringe . extend ( new_nodes )
else : # No new nodes so w is fully explored
explored . add ( w )
yield w
fringe . pop ( )
|
def dot ( * , last : bool = False , fileobj : Any = None ) -> None :
"""Print a dot without a newline unless it is the last one .
Useful when you want to display a progress with very little
knowledge .
: param last : whether this is the last dot ( will insert a newline )"""
|
end = "\n" if last else ""
info ( "." , end = end , fileobj = fileobj )
|
def getInterpretation ( self ) :
"""Get the value of the previously POSTed Tropo action ."""
|
actions = self . _actions
if ( type ( actions ) is list ) :
dict = actions [ 0 ]
else :
dict = actions
return dict [ 'interpretation' ]
|
def _configure_manager ( self ) :
"""Creates the Manager instances to handle monitoring ."""
|
self . _flavor_manager = CloudCDNFlavorManager ( self , uri_base = "flavors" , resource_class = CloudCDNFlavor , response_key = None , plural_response_key = "flavors" )
self . _services_manager = CloudCDNServiceManager ( self , uri_base = "services" , resource_class = CloudCDNService , response_key = None , plural_response_key = "services" )
|
def thickness ( mesh , points , exterior = False , normals = None , method = 'max_sphere' ) :
"""Find the thickness of the mesh at the given points .
Parameters
points : ( n , 3 ) float , list of points in space
exterior : bool , whether to compute the exterior thickness
( a . k . a . reach )
normals : ( n , 3 ) float , normals of the mesh at the given points
None , compute this automatically .
method : string , one of ' max _ sphere ' or ' ray '
Returns
thickness : ( n , ) float , thickness"""
|
points = np . asanyarray ( points , dtype = np . float64 )
if not util . is_shape ( points , ( - 1 , 3 ) ) :
raise ValueError ( 'points must be (n,3)!' )
if normals is not None :
normals = np . asanyarray ( normals , dtype = np . float64 )
if not util . is_shape ( normals , ( - 1 , 3 ) ) :
raise ValueError ( 'normals must be (n,3)!' )
if len ( points ) != len ( normals ) :
raise ValueError ( 'number of points must equal number of normals!' )
else :
normals = mesh . face_normals [ closest_point ( mesh , points ) [ 2 ] ]
if method == 'max_sphere' :
centers , radius = max_tangent_sphere ( mesh = mesh , points = points , inwards = not exterior , normals = normals )
thickness = radius * 2
return thickness
elif method == 'ray' :
if exterior :
return longest_ray ( mesh , points , normals )
else :
return longest_ray ( mesh , points , - normals )
else :
raise ValueError ( 'Invalid method, use "max_sphere" or "ray"' )
|
def route ( self , origin , message ) :
'''Using the routing dictionary , dispatch a message to all subscribers
: param origin : name of the origin node
: type origin : : py : class : ` str `
: param message : message to dispatch
: type message : : py : class : ` emit . message . Message ` or subclass'''
|
# side - effect : we have to know all the routes before we can route . But
# we can ' t resolve them while the object is initializing , so we have to
# do it just in time to route .
self . resolve_node_modules ( )
if not self . routing_enabled :
return
subs = self . routes . get ( origin , set ( ) )
for destination in subs :
self . logger . debug ( 'routing "%s" -> "%s"' , origin , destination )
self . dispatch ( origin , destination , message )
|
def append ( self , key , data ) :
"""Append the given data to the data that already exists
in the container for the given key .
Only data with equal dimensions ( except the first ) are allowed ,
since they are concatenated / stacked along the first dimension .
Args :
key ( str ) : Key to store data for .
data ( numpy . ndarray ) : Array - like data .
Has to have the same dimension as
the existing data after the first dimension .
Note :
The container has to be opened in advance .
For appending to existing data the HDF5 - Dataset has to be chunked ,
so it is not allowed to first add data via ` ` set ` ` ."""
|
existing = self . get ( key , mem_map = True )
if existing is not None :
num_existing = existing . shape [ 0 ]
if existing . shape [ 1 : ] != data . shape [ 1 : ] :
error_msg = ( 'The data to append needs to' 'have the same dimensions ({}).' )
raise ValueError ( error_msg . format ( existing . shape [ 1 : ] ) )
existing . resize ( num_existing + data . shape [ 0 ] , 0 )
existing [ num_existing : ] = data
else :
max_shape = list ( data . shape )
max_shape [ 0 ] = None
self . _file . create_dataset ( key , data = data , chunks = True , maxshape = max_shape )
|
def generate_trajs ( P , M , N , start = None , stop = None , dt = 1 ) :
"""Generates multiple realizations of the Markov chain with transition matrix P .
Parameters
P : ( n , n ) ndarray
transition matrix
M : int
number of trajectories
N : int
trajectory length
start : int , optional , default = None
starting state . If not given , will sample from the stationary distribution of P
stop : int or int - array - like , optional , default = None
stopping set . If given , the trajectory will be stopped before N steps
once a state of the stop set is reached
dt : int
trajectory will be saved every dt time steps .
Internally , the dt ' th power of P is taken to ensure a more efficient simulation .
Returns
traj _ sliced : ( N / dt , ) ndarray
A discrete trajectory with length N / dt"""
|
sampler = MarkovChainSampler ( P , dt = dt )
return sampler . trajectories ( M , N , start = start , stop = stop )
|
async def hscan ( self , name , cursor = 0 , match = None , count = None ) :
"""Incrementally return key / value slices in a hash . Also return a cursor
indicating the scan position .
` ` match ` ` allows for filtering the keys by pattern
` ` count ` ` allows for hint the minimum number of returns"""
|
pieces = [ name , cursor ]
if match is not None :
pieces . extend ( [ b ( 'MATCH' ) , match ] )
if count is not None :
pieces . extend ( [ b ( 'COUNT' ) , count ] )
return await self . execute_command ( 'HSCAN' , * pieces )
|
def add_event ( self , event ) :
"""Adds an event to the event file .
Args :
event : An ` Event ` protocol buffer ."""
|
if not isinstance ( event , event_pb2 . Event ) :
raise TypeError ( "Expected an event_pb2.Event proto, " " but got %s" % type ( event ) )
self . _async_writer . write ( event . SerializeToString ( ) )
|
def points ( self ) :
'''Return unordered array with all the points in this neuron'''
|
if self . _points is None :
_points = self . soma . points . tolist ( )
for n in self . neurites :
_points . extend ( n . points . tolist ( ) )
self . _points = np . array ( _points )
return self . _points
|
def decompile ( input_ , file_ , output , format_ , jar , limit , decompiler ) :
"""Decompile an APK and create Control Flow Graphs .
Example :
$ androguard resources . arsc"""
|
from androguard import session
if file_ and input_ :
print ( "Can not give --input and positional argument! " "Please use only one of them!" , file = sys . stderr )
sys . exit ( 1 )
if not input_ and not file_ :
print ( "Give one file to decode!" , file = sys . stderr )
sys . exit ( 1 )
if input_ :
fname = input_
else :
fname = file_
s = session . Session ( )
with open ( fname , "rb" ) as fd :
s . add ( fname , fd . read ( ) )
export_apps_to_format ( fname , s , output , limit , jar , decompiler , format_ )
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.