signature stringlengths 29 44.1k | implementation stringlengths 0 85.2k |
|---|---|
def exclude ( self , col : str , val ) :
"""Delete rows based on value
: param col : column name
: type col : str
: param val : value to delete
: type val : any
: example : ` ` ds . exclude ( " Col 1 " , " value " ) ` `""" | try :
self . df = self . df [ self . df [ col ] != val ]
except Exception as e :
self . err ( e , "Can not exclude rows based on value " + str ( val ) ) |
def read_chunks ( filepath , chunk_size ) :
"""Generator that yields chunks from file""" | try :
with open ( filepath , 'rb' ) as f :
while True :
chunk = f . read ( chunk_size )
if chunk :
yield chunk
else :
break
# EOF
except OSError as e :
raise error . ReadError ( e . errno , filepath ) |
def Tube ( points , r = 1 , c = "r" , alpha = 1 , res = 12 ) :
"""Build a tube along the line defined by a set of points .
: param r : constant radius or list of radii .
: type r : float , list
: param c : constant color or list of colors for each point .
: type c : float , list
. . hint : : | ribbon | | ribbon . py | _
| tube | | tube . py | _""" | ppoints = vtk . vtkPoints ( )
# Generate the polyline
ppoints . SetData ( numpy_to_vtk ( points , deep = True ) )
lines = vtk . vtkCellArray ( )
lines . InsertNextCell ( len ( points ) )
for i in range ( len ( points ) ) :
lines . InsertCellPoint ( i )
polyln = vtk . vtkPolyData ( )
polyln . SetPoints ( ppoints )
polyln . SetLines ( lines )
tuf = vtk . vtkTubeFilter ( )
tuf . CappingOn ( )
tuf . SetNumberOfSides ( res )
tuf . SetInputData ( polyln )
if utils . isSequence ( r ) :
arr = numpy_to_vtk ( np . ascontiguousarray ( r ) , deep = True )
arr . SetName ( "TubeRadius" )
polyln . GetPointData ( ) . AddArray ( arr )
polyln . GetPointData ( ) . SetActiveScalars ( "TubeRadius" )
tuf . SetVaryRadiusToVaryRadiusByAbsoluteScalar ( )
else :
tuf . SetRadius ( r )
usingColScals = False
if utils . isSequence ( c ) and len ( c ) != 3 :
usingColScals = True
cc = vtk . vtkUnsignedCharArray ( )
cc . SetName ( "TubeColors" )
cc . SetNumberOfComponents ( 3 )
cc . SetNumberOfTuples ( len ( c ) )
for i , ic in enumerate ( c ) :
r , g , b = colors . getColor ( ic )
cc . InsertTuple3 ( i , int ( 255 * r ) , int ( 255 * g ) , int ( 255 * b ) )
polyln . GetPointData ( ) . AddArray ( cc )
c = None
tuf . Update ( )
polytu = tuf . GetOutput ( )
actor = Actor ( polytu , c = c , alpha = alpha , computeNormals = 0 )
actor . phong ( )
if usingColScals :
actor . mapper . SetScalarModeToUsePointFieldData ( )
actor . mapper . ScalarVisibilityOn ( )
actor . mapper . SelectColorArray ( "TubeColors" )
actor . mapper . Modified ( )
actor . base = np . array ( points [ 0 ] )
actor . top = np . array ( points [ - 1 ] )
settings . collectable_actors . append ( actor )
return actor |
def interactive ( self , bConfirmQuit = True , bShowBanner = True ) :
"""Start an interactive debugging session .
@ type bConfirmQuit : bool
@ param bConfirmQuit : Set to C { True } to ask the user for confirmation
before closing the session , C { False } otherwise .
@ type bShowBanner : bool
@ param bShowBanner : Set to C { True } to show a banner before entering
the session and after leaving it , C { False } otherwise .
@ warn : This will temporarily disable the user - defined event handler !
This method returns when the user closes the session .""" | print ( '' )
print ( "-" * 79 )
print ( "Interactive debugging session started." )
print ( "Use the \"help\" command to list all available commands." )
print ( "Use the \"quit\" command to close this session." )
print ( "-" * 79 )
if self . lastEvent is None :
print ( '' )
console = ConsoleDebugger ( )
console . confirm_quit = bConfirmQuit
console . load_history ( )
try :
console . start_using_debugger ( self )
console . loop ( )
finally :
console . stop_using_debugger ( )
console . save_history ( )
print ( '' )
print ( "-" * 79 )
print ( "Interactive debugging session closed." )
print ( "-" * 79 )
print ( '' ) |
def set_credentials ( self , username , password = None , region = None , tenant_id = None , authenticate = False ) :
"""Sets the username and password directly . Because Rackspace auth uses
the api _ key , make sure that any old values are cleared .""" | self . api_key = None
super ( RaxIdentity , self ) . set_credentials ( username , password = password , region = region , tenant_id = tenant_id , authenticate = authenticate ) |
def show_rules ( cls , * names , attr = None ) :
"""Print algebraic rules used by : class : ` create `
Print a summary of the algebraic rules with the given names , or all
rules if not names a given .
Args :
names ( str ) : Names of rules to show
attr ( None or str ) : Name of the class attribute from which to get
the rules . Cf . : meth : ` add _ rule ` .
Raises :
AttributeError : If invalid ` attr `""" | from qnet . printing import srepr
try :
if attr is None :
attr = cls . _rules_attr ( )
rules = getattr ( cls , attr )
except TypeError :
rules = { }
for ( name , rule ) in rules . items ( ) :
if len ( names ) > 0 and name not in names :
continue
pat , repl = rule
print ( name )
print ( " PATTERN:" )
print ( textwrap . indent ( textwrap . dedent ( srepr ( pat , indented = True ) ) , prefix = " " * 8 ) )
print ( " REPLACEMENT:" )
print ( textwrap . indent ( textwrap . dedent ( inspect . getsource ( repl ) . rstrip ( ) ) , prefix = " " * 8 ) ) |
def set_reconnect_parameters ( self , interval , attempts , restore_state = True ) :
"""Sets the behaviour of the automatic reconnect feature .
When a connected SK8 is disconnected unexpectedly ( in other words not by a
user - triggered action ) , an automatic attempt to reconnect to the device
can be made . If successful this will typically resume the connection with
an interruption of only a few seconds .
This method allows the application to configure some aspects of the automatic
reconnect functionality .
Args :
interval ( float ) : time in seconds between successive attempts to reconnect .
Also applies to the delay between the initial disconnection and the first
attempt to reconnect .
attempts ( int ) : the number of attempts to make to recreate the connection . This
can be set to zero in order to disable the reconnection feature .
restore _ state ( bool ) : if True , the streaming state of the device will also be
restored if possible . For example , the IMU configuration will be re - applied
after the reconnection attempt succeeds , to return the SK8 to the same
state it was in before the disconnection occurred .
Returns :
None""" | self . _reconnect_attempts = max ( 0 , attempts )
self . _reconnect_interval = max ( 0 , interval )
self . _reconnect_restore_state = restore_state |
def fetch_raw ( self ) :
"""Execute the query and return by batches .
Optional keyword arguments are passed to Query . execute ( ) . Whether
this is real - time or stored logs is dependent on the value of
` ` fetch _ type ` ` .
: return : generator of dict results""" | for results in super ( LogQuery , self ) . execute ( ) :
if 'records' in results and results [ 'records' ] :
yield results [ 'records' ] |
def preprocess_section ( self , section ) :
"""Preprocess the contents of * section * .""" | codeblock_opened = False
current_section = None
content = self . _preprocess_refs ( section )
lines = [ ]
for line in content . split ( '\n' ) :
if line . startswith ( "```" ) :
codeblock_opened = ( not codeblock_opened )
if not line :
current_section = None
elif not codeblock_opened :
line , current_section = self . _preprocess_line ( line , current_section )
lines . append ( line )
section . content = '\n' . join ( lines ) |
def _make_dataset ( cls , coords ) :
"""Construct a new dataset given the coordinates .""" | class Slice ( cls . _SliceType ) :
extra_coords = coords
Slice . __name__ = '%s.slice(%s)' % ( cls . __name__ , ', ' . join ( '%s=%r' % item for item in coords . items ( ) ) , )
return Slice |
def _focus_instance ( self ) :
"""In instances , the ` ` focus ( ) ` ` classmethod is replaced with this instance method .""" | if self . _title :
Debug . log ( 3 , "Focusing app with title like ({})" . format ( self . _title ) )
PlatformManager . focusWindow ( PlatformManager . getWindowByTitle ( re . escape ( self . _title ) ) )
if self . getPID ( ) == - 1 :
self . open ( )
elif self . _pid and self . _pid != - 1 :
Debug . log ( 3 , "Focusing app with pid ({})" . format ( self . _pid ) )
PlatformManager . focusWindow ( PlatformManager . getWindowByPID ( self . _pid ) )
return self |
def get_stp_mst_detail_output_cist_hello_time ( self , ** kwargs ) :
"""Auto Generated Code""" | config = ET . Element ( "config" )
get_stp_mst_detail = ET . Element ( "get_stp_mst_detail" )
config = get_stp_mst_detail
output = ET . SubElement ( get_stp_mst_detail , "output" )
cist = ET . SubElement ( output , "cist" )
hello_time = ET . SubElement ( cist , "hello-time" )
hello_time . text = kwargs . pop ( 'hello_time' )
callback = kwargs . pop ( 'callback' , self . _callback )
return callback ( config ) |
def _appendContour ( self , contour , offset = None , ** kwargs ) :
"""contour will be an object with a drawPoints method .
offset will be a valid offset ( x , y ) .
This must return the new contour .
Subclasses may override this method .""" | copy = contour . copy ( )
if offset != ( 0 , 0 ) :
copy . moveBy ( offset )
pointPen = self . getPointPen ( )
contour . drawPoints ( pointPen )
return self [ - 1 ] |
def ranks ( self , key , value ) :
"""Populate the ` ` ranks ` ` key .""" | return [ normalize_rank ( el ) for el in force_list ( value . get ( 'a' ) ) ] |
def force_encoding ( self , encoding ) :
"""Sets a fixed encoding . The change is emitted right away .
From now one , this buffer will switch the code page anymore .
However , it will still keep track of the current code page .""" | if not encoding :
self . disabled = False
else :
self . write_with_encoding ( encoding , None )
self . disabled = True |
def nvrtcVersion ( self ) :
"""Returns the loaded NVRTC library version as a ( major , minor ) tuple .""" | major = c_int ( )
minor = c_int ( )
code = self . _lib . nvrtcVersion ( byref ( major ) , byref ( minor ) )
self . _throw_on_error ( code )
return ( major . value , minor . value ) |
def result ( task_id , wait = 0 , cached = Conf . CACHED ) :
"""Return the result of the named task .
: type task _ id : str or uuid
: param task _ id : the task name or uuid
: type wait : int
: param wait : number of milliseconds to wait for a result
: param bool cached : run this against the cache backend
: return : the result object of this task
: rtype : object""" | if cached :
return result_cached ( task_id , wait )
start = time ( )
while True :
r = Task . get_result ( task_id )
if r :
return r
if ( time ( ) - start ) * 1000 >= wait >= 0 :
break
sleep ( 0.01 ) |
def info ( name ) :
'''returns info dict about the specified container''' | if not exists ( name ) :
raise ContainerNotExists ( "The container (%s) does not exist!" % name )
cmd = [ 'lxc-info' , '-n' , name ]
out = subprocess . check_output ( cmd ) . splitlines ( )
info = { }
for line in out :
k , v = line . split ( )
info [ k ] = v
return info |
def remove_model ( self , model , ** kwargs ) :
"""Remove a ' model ' from the bundle
: parameter str twig : twig to filter for the model
: parameter * * kwargs : any other tags to do the filter
( except twig or context )""" | kwargs [ 'model' ] = model
kwargs [ 'context' ] = 'model'
self . remove_parameters_all ( ** kwargs ) |
def get_ext_param ( self , ext_key , param_key ) :
'''Get specific param in set of provided extension parameters .''' | return self . extensions [ ext_key ] [ param_key ] if self . extensions [ ext_key ] else None |
def _check_children ( self ) :
'''Check all of the child processes are still running''' | while self . up :
time . sleep ( 1 )
for process in self . _processes :
if process . is_alive ( ) is True :
continue
log . debug ( '%s is dead. Stopping the napalm-logs engine.' , process . description )
self . stop_engine ( ) |
def getCodename ( locale = None , providers = None ) :
"""codename = locale [ - Provider ] *""" | from django . conf import settings
# language
locale = locale or getattr ( settings , 'FAKER_LOCALE' , getattr ( settings , 'LANGUAGE_CODE' , None ) )
# providers
providers = providers or getattr ( settings , 'FAKER_PROVIDERS' , None )
codename = locale or 'default'
if providers :
codename += "-" + "-" . join ( sorted ( providers ) )
return codename |
def record_evaluation ( eval_result ) :
"""Create a callback that records the evaluation history into ` ` eval _ result ` ` .
Parameters
eval _ result : dict
A dictionary to store the evaluation results .
Returns
callback : function
The callback that records the evaluation history into the passed dictionary .""" | if not isinstance ( eval_result , dict ) :
raise TypeError ( 'Eval_result should be a dictionary' )
eval_result . clear ( )
def _init ( env ) :
for data_name , _ , _ , _ in env . evaluation_result_list :
eval_result . setdefault ( data_name , collections . defaultdict ( list ) )
def _callback ( env ) :
if not eval_result :
_init ( env )
for data_name , eval_name , result , _ in env . evaluation_result_list :
eval_result [ data_name ] [ eval_name ] . append ( result )
_callback . order = 20
return _callback |
def write ( self ) :
"""Writes generated presentation code into the destination file .""" | html = self . render ( )
if self . file_type == 'pdf' :
self . write_pdf ( html )
else :
with codecs . open ( self . destination_file , 'w' , encoding = 'utf_8' ) as outfile :
outfile . write ( html ) |
def _arrayize_vectorized_indexer ( indexer , shape ) :
"""Return an identical vindex but slices are replaced by arrays""" | slices = [ v for v in indexer . tuple if isinstance ( v , slice ) ]
if len ( slices ) == 0 :
return indexer
arrays = [ v for v in indexer . tuple if isinstance ( v , np . ndarray ) ]
n_dim = arrays [ 0 ] . ndim if len ( arrays ) > 0 else 0
i_dim = 0
new_key = [ ]
for v , size in zip ( indexer . tuple , shape ) :
if isinstance ( v , np . ndarray ) :
new_key . append ( np . reshape ( v , v . shape + ( 1 , ) * len ( slices ) ) )
else : # slice
shape = ( ( 1 , ) * ( n_dim + i_dim ) + ( - 1 , ) + ( 1 , ) * ( len ( slices ) - i_dim - 1 ) )
new_key . append ( np . arange ( * v . indices ( size ) ) . reshape ( shape ) )
i_dim += 1
return VectorizedIndexer ( tuple ( new_key ) ) |
def write_int8 ( self , value , little_endian = True ) :
"""Pack the value as a signed byte and write 1 byte to the stream .
Args :
value :
little _ endian ( bool ) : specify the endianness . ( Default ) Little endian .
Returns :
int : the number of bytes written .""" | if little_endian :
endian = "<"
else :
endian = ">"
return self . pack ( '%sb' % endian , value ) |
def change_vlan_id ( self , vlan_id ) :
"""Change a VLAN id for an inline interface .
: param str vlan _ id : New VLAN id . Can be in format ' 1-2 ' or
a single numerical value . If in ' 1-2 ' format , this specifies
the vlan ID for the first inline interface and the rightmost
for the second .
: return : None""" | first , second = self . nicid . split ( '-' )
firstintf = first . split ( '.' ) [ 0 ]
secondintf = second . split ( '.' ) [ 0 ]
newvlan = str ( vlan_id ) . split ( '-' )
self . update ( nicid = '{}.{}-{}.{}' . format ( firstintf , newvlan [ 0 ] , secondintf , newvlan [ - 1 ] ) ) |
def trace ( msg ) :
"""Print a trace message to stderr if environment variable is set .""" | if os . environ . get ( 'JARN_TRACE' ) == '1' :
print ( 'TRACE:' , msg , file = sys . stderr ) |
def main ( * argv ) :
"""main driver of program""" | try :
username = str ( argv [ 0 ] )
password = str ( argv [ 1 ] )
baseURL = str ( argv [ 2 ] )
folderId = str ( argv [ 3 ] )
filePath = str ( argv [ 4 ] )
# Local variables
sh = None
agol = None
usercontent = None
folderId = None
proxy_port = None
proxy_url = None
# Logic
if folderId == "" :
folderId = None
if baseURL is None or baseURL == "" :
baseURL = "https://www.arcgis.com/sharing/rest"
sh = arcrest . AGOLTokenSecurityHandler ( username = username , password = password )
agol = arcrest . manageorg . Administration ( url = baseURL , securityHandler = sh )
usercontent = agol . content . usercontent ( username )
if isinstance ( usercontent , arcrest . manageorg . administration . _content . UserContent ) :
pass
res = usercontent . addItem ( itemParameters = None , filePath = filePath , overwrite = True , folder = folderId )
arcpy . SetParameterAsText ( 5 , str ( res ) )
except arcpy . ExecuteError :
line , filename , synerror = trace ( )
arcpy . AddError ( "error on line: %s" % line )
arcpy . AddError ( "error in file name: %s" % filename )
arcpy . AddError ( "with error message: %s" % synerror )
arcpy . AddError ( "ArcPy Error Message: %s" % arcpy . GetMessages ( 2 ) )
except FunctionError , f_e :
messages = f_e . args [ 0 ]
arcpy . AddError ( "error in function: %s" % messages [ "function" ] )
arcpy . AddError ( "error on line: %s" % messages [ "line" ] )
arcpy . AddError ( "error in file name: %s" % messages [ "filename" ] )
arcpy . AddError ( "with error message: %s" % messages [ "synerror" ] )
arcpy . AddError ( "ArcPy Error Message: %s" % messages [ "arc" ] )
except :
line , filename , synerror = trace ( )
arcpy . AddError ( "error on line: %s" % line )
arcpy . AddError ( "error in file name: %s" % filename )
arcpy . AddError ( "with error message: %s" % synerror ) |
def save_links ( self , file_path ) :
"""Saves a text file of the search result links .
Saves a text file of the search result links , where each link
is saved in a new line . An example is provided below : :
http : / / www . google . ca
http : / / www . gmail . com
Args :
file _ path ( str ) :
Path to the text file to save links to .""" | data = '\n' . join ( self . links )
with open ( file_path , 'w' ) as out_file :
out_file . write ( data ) |
def read_md5 ( self , hex = False ) :
"""Calculate the md5 hash for this file .
hex - Return the digest as hex string .
This reads through the entire file .""" | f = self . open ( 'rb' )
try :
m = hashlib . md5 ( )
while True :
d = f . read ( 8192 )
if not d :
break
m . update ( d )
finally :
f . close ( )
if hex :
return m . hexdigest ( )
else :
return m . digest ( ) |
def init_autoreload ( mode : int ) -> None :
"""Load and initialize the IPython autoreload extension .""" | from IPython . extensions import autoreload
ip = get_ipython ( )
# type : ignore # noqa : F821
autoreload . load_ipython_extension ( ip )
ip . magics_manager . magics [ "line" ] [ "autoreload" ] ( str ( mode ) ) |
def _dump_spec ( spec ) :
"""Dump bel specification dictionary using YAML
Formats this with an extra indentation for lists to make it easier to
use cold folding on the YAML version of the spec dictionary .""" | with open ( "spec.yaml" , "w" ) as f :
yaml . dump ( spec , f , Dumper = MyDumper , default_flow_style = False ) |
def plot_histogram ( hist , width = 0.9 , title = '' , xlabel = None , datetime_format = "%b %Y" , labels = None , color = None , alpha = None , normalize = True , percent = False , padding = 0.03 , num_labels = 24 , formatter = None , ylabel_precision = 2 , resolution = 3 , figsize = None , line_color = '#C0C0C0' , bg_color = 'white' , bg_alpha = 1 , tight_layout = True , ylabel = None , grid = 'on' , rotation = - 60 , ha = 'left' , save_path = 'plot_histogram' , dpi = 200 ) :
"""Plot a bar chart from np . histogram data
> > > plot _ histogram ( np . histogram ( [ 1 ] * 5 + [ 3 ] * 2 + list ( range ( 20 ) ) + [ 19.1 ] ) , alpha = 1 ) # doctest : + NORMALIZE _ WHITESPACE , + ELLIPSIS
( ( array ( [ 7 , 4 , 2 , 2 , 2 , 2 , 2 , 2 , 2 , 3 ] ) ,
array ( [ 0 . , 1.91 , 3.82 , 5.73 , 7.64 , 9.55 , 11.46 , 13.37,
15.28 , 17.19 , 19.1 ] ) ) ,
< matplotlib . figure . Figure at . . . )""" | his0 , his1 = hist [ 0 ] , hist [ 1 ]
if len ( his1 ) == len ( his0 ) + 1 :
his0 , his1 = his1 [ : - 1 ] , his0
elif len ( his0 ) == len ( his1 ) + 1 :
his0 = his0 [ : - 1 ]
resolution = resolution or 3
if labels in ( None , 0 , 'date' , 'datetime' ) :
try :
labels = prettify_datetimes ( [ '-' . join ( str ( val ) for val in datetime_from_ordinal_float ( val ) . timetuple ( ) [ : resolution ] ) for val in his0 ] , format = datetime_format , max_nonempty_strings = num_labels )
except ValueError :
labels = [ ( '{0:.' + str ( resolution ) + 'g}' ) . format ( val ) for val in his0 ]
elif labels == False :
labels = [ '' ] * len ( his0 )
if len ( labels ) != len ( his0 ) or not all ( isinstance ( val , basestring ) for val in labels ) :
labels = list ( str ( s ) for s in labels )
labels += [ '' ] * ( len ( his0 ) - len ( labels ) )
labels = thin_string_list ( labels , 50 )
fig = plt . gcf ( )
if figsize and len ( figsize ) == 2 :
fig . set_size_inches ( figsize [ 0 ] , figsize [ 1 ] , forward = True )
if bg_color or bg_alpha :
fig . set_facecolor ( bg_color )
fig . set_alpha ( bg_alpha )
if not fig . axes :
ax = fig . add_subplot ( 111 )
else :
ax = fig . gca ( )
color = color or 'b'
alpha = alpha or .8
xlabel = xlabel or ''
xwidth = ( width or 0.9 ) * pd . np . min ( pd . np . diff ( his0 ) )
if not isinstance ( ylabel , basestring ) :
ylabel = 'Count (Number of Occurrences)'
xwidth = ( width or 0.9 ) * pd . np . min ( pd . np . diff ( his0 ) )
ax . bar ( his0 , his1 , width = xwidth , color = color , alpha = alpha )
print ( his0 )
plt . xticks ( [ dy + padding * xwidth for dy in his0 ] , labels , rotation = rotation , ha = ha )
if xlabel :
plt . xlabel ( xlabel )
if ylabel :
plt . ylabel ( ylabel )
if title :
plt . title ( title )
if formatter and callable ( formatter ) :
ax . yaxis . set_major_formatter ( plt . matplotlib . ticker . FuncFormatter ( formatter ) )
ax . grid ( grid , color = ( line_color or 'gray' ) )
# set all the colors and transparency values
fig . patch . set_facecolor ( bg_color )
fig . patch . set_alpha ( bg_alpha )
ax . patch . set_alpha ( bg_alpha )
ax . patch . set_facecolor ( bg_color )
if line_color :
for spine in ax . spines . values ( ) :
spine . set_color ( line_color )
ax . tick_params ( axis = 'x' , colors = line_color )
ax . tick_params ( axis = 'y' , colors = line_color )
ax . xaxis . label . set_color ( line_color )
ax . yaxis . label . set_color ( line_color )
ax . title . set_color ( line_color )
if tight_layout :
plt . tight_layout ( )
try : # ipython notebook overrides plt . show and doesn ' t have a block kwarg
plt . show ( block = False )
except TypeError :
plt . show ( )
if save_path :
if os . path . isfile ( save_path + '.png' ) :
i = 2
save_path2 = save_path + '--{0}' . format ( i )
while os . path . isfile ( save_path2 + '.png' ) :
i += 1
save_path2 = save_path + '--{0}' . format ( i )
save_path = save_path2
plt . savefig ( save_path , facecolor = fig . get_facecolor ( ) , edgecolor = 'none' , dpi = dpi )
# return in standard numpy histogram format , values before bins , and bins include all fenceposts ( edges )
his0 , his1 = pd . np . array ( his0 ) , pd . np . array ( his1 )
his0 = np . append ( his0 , 2 * his0 [ - 1 ] - his0 [ - 2 ] )
return ( his1 , his0 ) , fig |
def get_callback_task ( self , * args , ** kwargs ) :
"""Returns a task for calling back this Pipeline .
Args :
params : Keyword argument containing a dictionary of key / value pairs
that will be passed to the callback when it is executed .
args , kwargs : Passed to the taskqueue . Task constructor . Use these
arguments to set the task name ( for idempotence ) , etc .
Returns :
A taskqueue . Task instance that must be enqueued by the caller .""" | if not self . async :
raise UnexpectedPipelineError ( 'May only call get_callback_task() method for asynchronous pipelines.' )
params = kwargs . get ( 'params' , { } )
kwargs [ 'params' ] = params
params [ 'pipeline_id' ] = self . _pipeline_key . name ( )
kwargs [ 'url' ] = self . base_path + '/callback'
kwargs [ 'method' ] = 'POST'
return taskqueue . Task ( * args , ** kwargs ) |
def mkfs ( device , label = None , ssize = None , noforce = None , bso = None , gmo = None , ino = None , lso = None , rso = None , nmo = None , dso = None ) :
'''Create a file system on the specified device . By default wipes out with force .
General options :
* * * label * * : Specify volume label .
* * * ssize * * : Specify the fundamental sector size of the filesystem .
* * * noforce * * : Do not force create filesystem , if disk is already formatted .
Filesystem geometry options :
* * * bso * * : Block size options .
* * * gmo * * : Global metadata options .
* * * dso * * : Data section options . These options specify the location , size ,
and other parameters of the data section of the filesystem .
* * * ino * * : Inode options to specify the inode size of the filesystem , and other inode allocation parameters .
* * * lso * * : Log section options .
* * * nmo * * : Naming options .
* * * rso * * : Realtime section options .
See the ` ` mkfs . xfs ( 8 ) ` ` manpage for a more complete description of corresponding options description .
CLI Example :
. . code - block : : bash
salt ' * ' xfs . mkfs / dev / sda1
salt ' * ' xfs . mkfs / dev / sda1 dso = ' su = 32k , sw = 6 ' noforce = True
salt ' * ' xfs . mkfs / dev / sda1 dso = ' su = 32k , sw = 6 ' lso = ' logdev = / dev / sda2 , size = 10000b ' ''' | getopts = lambda args : dict ( ( ( args and ( "=" in args ) and args or None ) ) and [ kw . split ( "=" ) for kw in args . split ( "," ) ] or [ ] )
cmd = [ "mkfs.xfs" ]
if label :
cmd . append ( "-L" )
cmd . append ( "'{0}'" . format ( label ) )
if ssize :
cmd . append ( "-s" )
cmd . append ( ssize )
for switch , opts in [ ( "-b" , bso ) , ( "-m" , gmo ) , ( "-n" , nmo ) , ( "-i" , ino ) , ( "-d" , dso ) , ( "-l" , lso ) , ( "-r" , rso ) ] :
try :
if getopts ( opts ) :
cmd . append ( switch )
cmd . append ( opts )
except Exception :
raise CommandExecutionError ( "Wrong parameters \"{0}\" for option \"{1}\"" . format ( opts , switch ) )
if not noforce :
cmd . append ( "-f" )
cmd . append ( device )
cmd = ' ' . join ( cmd )
out = __salt__ [ 'cmd.run_all' ] ( cmd )
_verify_run ( out , cmd = cmd )
return _parse_xfs_info ( out [ 'stdout' ] ) |
def process_sub ( ref , alt_str ) :
"""Process substitution""" | if len ( ref ) == len ( alt_str ) :
if len ( ref ) == 1 :
return record . Substitution ( record . SNV , alt_str )
else :
return record . Substitution ( record . MNV , alt_str )
elif len ( ref ) > len ( alt_str ) :
return process_sub_grow ( ref , alt_str )
else : # len ( ref ) < len ( alt _ str ) :
return process_sub_shrink ( ref , alt_str ) |
def placeholdit ( width , height , background_color = "cccccc" , text_color = "969696" , text = None , random_background_color = False ) :
"""Creates a placeholder image using placehold . it
Usage format :
{ % placeholdit [ width ] [ height ] [ background _ color ] [ text _ color ] [ text ] % }
Example usage :
Default image at 250 square
{ % placeholdit 250 % }
100 wide and 200 high
{ % placeholdit 100 200 % }
Custom background and text colors
{ % placeholdit 100 200 background _ color = ' fff ' text _ color = 000 ' % }
Custom text
{ % placeholdit 100 200 text = ' Hello LA ' % }""" | url = get_placeholdit_url ( width , height , background_color = background_color , text_color = text_color , text = text , )
return format_html ( '<img src="{}"/>' , url ) |
def semcor_to_offset ( sensekey ) :
"""Converts SemCor sensekey IDs to synset offset .
> > > print semcor _ to _ offset ( ' live % 2:42:06 : : ' )
02614387 - v""" | synset = wn . lemma_from_key ( sensekey ) . synset
offset = '%08d-%s' % ( synset . offset , synset . pos )
return offset |
def lock ( self , session ) :
"""Lock the connection , ensuring that it is not busy and storing
a weakref for the session .
: param queries . Session session : The session to lock the connection with
: raises : ConnectionBusyError""" | if self . busy :
raise ConnectionBusyError ( self )
with self . _lock :
self . used_by = weakref . ref ( session )
LOGGER . debug ( 'Connection %s locked' , self . id ) |
def _make_model_class ( message_type , indexed_fields , ** props ) :
"""Construct a Model subclass corresponding to a Message subclass .
Args :
message _ type : A Message subclass .
indexed _ fields : A list of dotted and undotted field names .
* * props : Additional properties with which to seed the class .
Returns :
A Model subclass whose properties correspond to those fields of
message _ type whose field name is listed in indexed _ fields , plus
the properties specified by the * * props arguments . For dotted
field names , a StructuredProperty is generated using a Model
subclass created by a recursive call .
Raises :
Whatever _ analyze _ indexed _ fields ( ) raises .
ValueError if a field name conflicts with a name in * * props .
ValueError if a field name is not valid field of message _ type .
ValueError if an undotted field name designates a MessageField .""" | analyzed = _analyze_indexed_fields ( indexed_fields )
for field_name , sub_fields in analyzed . iteritems ( ) :
if field_name in props :
raise ValueError ( 'field name %s is reserved' % field_name )
try :
field = message_type . field_by_name ( field_name )
except KeyError :
raise ValueError ( 'Message type %s has no field named %s' % ( message_type . __name__ , field_name ) )
if isinstance ( field , messages . MessageField ) :
if not sub_fields :
raise ValueError ( 'MessageField %s cannot be indexed, only sub-fields' % field_name )
sub_model_class = _make_model_class ( field . type , sub_fields )
prop = model . StructuredProperty ( sub_model_class , field_name , repeated = field . repeated )
else :
if sub_fields is not None :
raise ValueError ( 'Unstructured field %s cannot have indexed sub-fields' % field_name )
if isinstance ( field , messages . EnumField ) :
prop = EnumProperty ( field . type , field_name , repeated = field . repeated )
elif isinstance ( field , messages . BytesField ) :
prop = model . BlobProperty ( field_name , repeated = field . repeated , indexed = True )
else : # IntegerField , FloatField , BooleanField , StringField .
prop = model . GenericProperty ( field_name , repeated = field . repeated )
props [ field_name ] = prop
return model . MetaModel ( '_%s__Model' % message_type . __name__ , ( model . Model , ) , props ) |
def show_status ( self ) :
"""dumps the status of the agent""" | txt = 'Agent Status:\n'
print ( txt )
txt += "start_x = " + str ( self . start_x ) + "\n"
txt += "start_y = " + str ( self . start_y ) + "\n"
txt += "target_x = " + str ( self . target_x ) + "\n"
txt += "target_y = " + str ( self . target_y ) + "\n"
txt += "current_x = " + str ( self . current_x ) + "\n"
txt += "current_y = " + str ( self . current_y ) + "\n"
print ( self . grd )
return txt |
def write_command ( self , request_id , msg ) :
"""Send " insert " etc . command , returning response as a dict .
Can raise ConnectionFailure or OperationFailure .
: Parameters :
- ` request _ id ` : an int .
- ` msg ` : bytes , the command message .""" | self . send_message ( msg , 0 )
reply = self . receive_message ( request_id )
result = reply . command_response ( )
# Raises NotMasterError or OperationFailure .
helpers . _check_command_response ( result )
return result |
def retry ( self ) :
"""Retry payment on this invoice if it isn ' t paid , closed , or forgiven .""" | if not self . paid and not self . forgiven and not self . closed :
stripe_invoice = self . api_retrieve ( )
updated_stripe_invoice = ( stripe_invoice . pay ( ) )
# pay ( ) throws an exception if the charge is not successful .
type ( self ) . sync_from_stripe_data ( updated_stripe_invoice )
return True
return False |
def _register_callback ( self , cb ) :
"""Register callbacks to the trainer .
It can only be called before : meth : ` Trainer . train ( ) ` .
Args :
cb ( Callback or [ Callback ] ) : a callback or a list of callbacks
Returns :
succeed or not""" | if isinstance ( cb , ( list , tuple ) ) :
for x in cb :
self . _register_callback ( x )
return
assert isinstance ( cb , Callback ) , cb
assert not isinstance ( self . _callbacks , Callbacks ) , "Cannot register more callbacks after trainer was setup!"
if not self . is_chief and cb . chief_only :
logger . warn ( "Callback {} is chief-only, skipped." . format ( str ( cb ) ) )
return False
else :
self . _callbacks . append ( cb )
return True |
def get_mvgd_lvla_lvgd_obj_from_id ( self ) :
"""Build dict with mapping from LVLoadAreaDing0 id to LVLoadAreaDing0 object ,
MVGridDistrictDing0 id to MVGridDistrictDing0 object ,
LVGridDistrictDing0 id to LVGridDistrictDing0 object and
LVStationDing0 id to LVStationDing0 object
Returns
: obj : ` dict `
mv _ grid _ districts _ dict : :
mv _ grid _ district _ id _ 1 : mv _ grid _ district _ obj _ 1,
mv _ grid _ district _ id _ n : mv _ grid _ district _ obj _ n
: obj : ` dict `
lv _ load _ areas _ dict : :
lv _ load _ area _ id _ 1 : lv _ load _ area _ obj _ 1,
lv _ load _ area _ id _ n : lv _ load _ area _ obj _ n
: obj : ` dict `
lv _ grid _ districts _ dict : :
lv _ grid _ district _ id _ 1 : lv _ grid _ district _ obj _ 1,
lv _ grid _ district _ id _ n : lv _ grid _ district _ obj _ n
: obj : ` dict `
lv _ stations _ dict : :
lv _ station _ id _ 1 : lv _ station _ obj _ 1,
lv _ station _ id _ n : lv _ station _ obj _ n""" | mv_grid_districts_dict = { }
lv_load_areas_dict = { }
lv_grid_districts_dict = { }
lv_stations_dict = { }
for mv_grid_district in self . mv_grid_districts ( ) :
mv_grid_districts_dict [ mv_grid_district . id_db ] = mv_grid_district
for lv_load_area in mv_grid_district . lv_load_areas ( ) :
lv_load_areas_dict [ lv_load_area . id_db ] = lv_load_area
for lv_grid_district in lv_load_area . lv_grid_districts ( ) :
lv_grid_districts_dict [ lv_grid_district . id_db ] = lv_grid_district
lv_stations_dict [ lv_grid_district . lv_grid . station ( ) . id_db ] = lv_grid_district . lv_grid . station ( )
return mv_grid_districts_dict , lv_load_areas_dict , lv_grid_districts_dict , lv_stations_dict |
def normalize_in_place ( sysmeta_pyxb , reset_timestamps = False ) :
"""Normalize SystemMetadata PyXB object in - place .
Args :
sysmeta _ pyxb :
SystemMetadata PyXB object to normalize .
reset _ timestamps : bool
` ` True ` ` : Timestamps in the SystemMetadata are set to a standard value so that
objects that are compared after normalization register as equivalent if only
their timestamps differ .
Notes :
The SystemMetadata is normalized by removing any redundant information and
ordering all sections where there are no semantics associated with the order . The
normalized SystemMetadata is intended to be semantically equivalent to the
un - normalized one .""" | if sysmeta_pyxb . accessPolicy is not None :
sysmeta_pyxb . accessPolicy = d1_common . wrap . access_policy . get_normalized_pyxb ( sysmeta_pyxb . accessPolicy )
if getattr ( sysmeta_pyxb , 'mediaType' , False ) :
d1_common . xml . sort_value_list_pyxb ( sysmeta_pyxb . mediaType . property_ )
if getattr ( sysmeta_pyxb , 'replicationPolicy' , False ) :
d1_common . xml . sort_value_list_pyxb ( sysmeta_pyxb . replicationPolicy . preferredMemberNode )
d1_common . xml . sort_value_list_pyxb ( sysmeta_pyxb . replicationPolicy . blockedMemberNode )
d1_common . xml . sort_elements_by_child_values ( sysmeta_pyxb . replica , [ 'replicaVerified' , 'replicaMemberNode' , 'replicationStatus' ] , )
sysmeta_pyxb . archived = bool ( sysmeta_pyxb . archived )
if reset_timestamps :
epoch_dt = datetime . datetime ( 1970 , 1 , 1 , tzinfo = d1_common . date_time . UTC ( ) )
sysmeta_pyxb . dateUploaded = epoch_dt
sysmeta_pyxb . dateSysMetadataModified = epoch_dt
for replica_pyxb in getattr ( sysmeta_pyxb , 'replica' , [ ] ) :
replica_pyxb . replicaVerified = epoch_dt
else :
sysmeta_pyxb . dateUploaded = d1_common . date_time . round_to_nearest ( sysmeta_pyxb . dateUploaded )
sysmeta_pyxb . dateSysMetadataModified = d1_common . date_time . round_to_nearest ( sysmeta_pyxb . dateSysMetadataModified )
for replica_pyxb in getattr ( sysmeta_pyxb , 'replica' , [ ] ) :
replica_pyxb . replicaVerified = d1_common . date_time . round_to_nearest ( replica_pyxb . replicaVerified ) |
def create_resource ( output_model , rtype , unique , links , existing_ids = None , id_helper = None ) :
'''General - purpose routine to create a new resource in the output model , based on data provided
output _ model - Versa connection to model to be updated
rtype - Type IRI for the new resource , set with Versa type
unique - list of key / value pairs for determining a unique hash for the new resource
links - list of key / value pairs for setting properties on the new resource
id _ helper - If a string , a base URL for the generatd ID . If callable , a function used to return the entity . If None , set a default good enough for testing .
existing _ ids - set of existing IDs to not recreate , or None , in which case a new resource will always be created''' | if isinstance ( id_helper , str ) :
idg = idgen ( id_helper )
elif isinstance ( id_helper , GeneratorType ) :
idg = id_helper
elif id_helper is None :
idg = default_idgen ( None )
else : # FIXME : G11N
raise ValueError ( 'id_helper must be string (URL), callable or None' )
ctx = context ( None , None , output_model , base = None , idgen = idg , existing_ids = existing_ids , extras = None )
rid = I ( materialize_entity ( ctx , rtype , unique = unique ) )
if existing_ids is not None :
if rid in existing_ids :
return ( False , rid )
existing_ids . add ( rid )
output_model . add ( rid , VTYPE_REL , rtype )
for r , t in links :
output_model . add ( rid , r , t )
return ( True , rid ) |
def moving_average ( interval , windowsize , borders = None ) :
"""This is essentially a convolving operation . Several option exist for dealing with the border cases .
* None : Here the returned signal will be smaller than the inputted interval .
* zero _ padding : Here the returned signal will be larger than the inputted interval and we will add zeros to the original interval before operating the convolution .
* zero _ padding _ and _ cut : Same as above only the result is truncated to be the same size as the original input .
* copy _ padding : Here the returned signal will be larger than the inputted interval and we will use the right and leftmost values for padding before operating the convolution .
* copy _ padding _ and _ cut : Same as above only the result is truncated to be the same size as the original input .
* zero _ stretching : Here we will compute the convolution only in the valid domain , then add zeros to the result so that the output is the same size as the input .
* copy _ stretching : Here we will compute the convolution only in the valid domain , then copy the right and leftmost values so that the output is the same size as the input .""" | # The window size in half #
half = int ( math . floor ( windowsize / 2.0 ) )
# The normalized rectangular signal #
window = numpy . ones ( int ( windowsize ) ) / float ( windowsize )
# How do we deal with borders #
if borders == None :
return numpy . convolve ( interval , window , 'valid' )
if borders == 'zero_padding' :
return numpy . convolve ( interval , window , 'full' )
if borders == 'zero_padding_and_cut' :
return numpy . convolve ( interval , window , 'same' )
if borders == 'copy_padding' :
new_interval = [ interval [ 0 ] ] * ( windowsize - 1 ) + interval + [ interval [ - 1 ] ] * ( windowsize - 1 )
return numpy . convolve ( new_interval , window , 'valid' )
if borders == 'copy_padding_and_cut' :
new_interval = [ interval [ 0 ] ] * ( windowsize - 1 ) + interval + [ interval [ - 1 ] ] * ( windowsize - 1 )
return numpy . convolve ( new_interval , window , 'valid' ) [ half : - half ]
if borders == 'zero_stretching' :
result = numpy . convolve ( interval , window , 'valid' )
pad = numpy . zeros ( half )
return numpy . concatenate ( ( pad , result , pad ) )
if borders == 'copy_stretching' :
result = numpy . convolve ( interval , window , 'valid' )
left = numpy . ones ( half ) * result [ 0 ]
right = numpy . ones ( half ) * result [ - 1 ]
return numpy . concatenate ( ( left , result , right ) ) |
def attenuator_connection ( self , connect = True ) :
"""Checks the connection to the attenuator , and attempts to connect if not connected .
Will also set an appropriate ouput minimum for stimuli , if connection successful
: returns : bool - whether there is a connection""" | # all or none will be connected
acquisition_modules = [ self . explorer , self . protocoler , self . bs_calibrator , self . tone_calibrator , self . charter ]
if connect :
if not acquisition_modules [ 0 ] . player . attenuator_connected ( ) : # attempt to re - connect first
for module in acquisition_modules :
success = module . player . connect_attenuator ( )
if success is None :
StimulusModel . setMinVoltage ( 0.0 )
return False
else :
StimulusModel . setMinVoltage ( 0.005 )
return True
else :
StimulusModel . setMinVoltage ( 0.005 )
return True
else :
for module in acquisition_modules :
module . player . connect_attenuator ( False )
StimulusModel . setMinVoltage ( 0.0 )
return False |
def _biased_spectrum ( self , spectrum , k , convention = 'power' , unit = 'per_l' , ** kwargs ) :
"""Calculate the multitaper ( cross - ) spectrum expectation of a function
localized by arbitary windows .""" | # The equation is not modified if the in - and out - spectra are power
# or energy . However , the convention can not be l2norm , which depends
# upon the normalization of the coefficients .
if ( convention != 'power' and convention != 'energy' ) :
raise ValueError ( "convention must be 'power' or 'energy'." + "Input value was {:s}" . format ( repr ( convention ) ) )
if ( unit == 'per_l' ) :
outspectrum = _shtools . SHBiasKMask ( self . tapers , spectrum , k = k , ** kwargs )
elif ( unit == 'per_lm' ) :
degree_l = _np . arange ( len ( spectrum ) )
temp = spectrum * ( 2.0 * degree_l + 1.0 )
outspectrum = _shtools . SHBiasKMask ( self . tapers , temp , k = k , ** kwargs )
outspectrum /= ( 2.0 * degree_l + 1.0 )
else :
raise ValueError ( "unit must be 'per_l' or 'per_lm'." + "Input value was {:s}" . format ( repr ( unit ) ) )
return outspectrum |
def is_same_file ( path1 , path2 ) :
"""Return True if path1 is the same file as path2.
The reason for this dance is that samefile throws if either file doesn ' t
exist .
Args :
path1 : str or path - like .
path2 : str or path - like .
Returns :
bool . True if the same file , False if not .""" | return ( path1 and path2 and os . path . isfile ( path1 ) and os . path . isfile ( path2 ) and os . path . samefile ( path1 , path2 ) ) |
def stub ( base_class = None , ** attributes ) :
"""creates a python class on - the - fly with the given keyword - arguments
as class - attributes accessible with . attrname .
The new class inherits from
Use this to mock rather than stub .""" | if base_class is None :
base_class = object
members = { "__init__" : lambda self : None , "__new__" : lambda * args , ** kw : object . __new__ ( * args , * kw ) , # remove _ _ new _ _ and metaclass behavior from object
"__metaclass__" : None , }
members . update ( attributes )
# let ' s create a python class on - the - fly : )
return type ( f"{base_class.__name__}Stub" , ( base_class , ) , members ) ( ) |
def cell ( self , w , h = 0 , txt = '' , border = 0 , ln = 0 , align = '' , fill = 0 , link = '' ) :
"Output a cell" | txt = self . normalize_text ( txt )
k = self . k
if ( self . y + h > self . page_break_trigger and not self . in_footer and self . accept_page_break ( ) ) : # Automatic page break
x = self . x
ws = self . ws
if ( ws > 0 ) :
self . ws = 0
self . _out ( '0 Tw' )
self . add_page ( self . cur_orientation )
self . x = x
if ( ws > 0 ) :
self . ws = ws
self . _out ( sprintf ( '%.3f Tw' , ws * k ) )
if ( w == 0 ) :
w = self . w - self . r_margin - self . x
s = ''
if ( fill == 1 or border == 1 ) :
if ( fill == 1 ) :
if border == 1 :
op = 'B'
else :
op = 'f'
else :
op = 'S'
s = sprintf ( '%.2f %.2f %.2f %.2f re %s ' , self . x * k , ( self . h - self . y ) * k , w * k , - h * k , op )
if ( isinstance ( border , basestring ) ) :
x = self . x
y = self . y
if ( 'L' in border ) :
s += sprintf ( '%.2f %.2f m %.2f %.2f l S ' , x * k , ( self . h - y ) * k , x * k , ( self . h - ( y + h ) ) * k )
if ( 'T' in border ) :
s += sprintf ( '%.2f %.2f m %.2f %.2f l S ' , x * k , ( self . h - y ) * k , ( x + w ) * k , ( self . h - y ) * k )
if ( 'R' in border ) :
s += sprintf ( '%.2f %.2f m %.2f %.2f l S ' , ( x + w ) * k , ( self . h - y ) * k , ( x + w ) * k , ( self . h - ( y + h ) ) * k )
if ( 'B' in border ) :
s += sprintf ( '%.2f %.2f m %.2f %.2f l S ' , x * k , ( self . h - ( y + h ) ) * k , ( x + w ) * k , ( self . h - ( y + h ) ) * k )
if ( txt != '' ) :
if ( align == 'R' ) :
dx = w - self . c_margin - self . get_string_width ( txt )
elif ( align == 'C' ) :
dx = ( w - self . get_string_width ( txt ) ) / 2.0
else :
dx = self . c_margin
if ( self . color_flag ) :
s += 'q ' + self . text_color + ' '
# If multibyte , Tw has no effect - do word spacing using an adjustment before each space
if ( self . ws and self . unifontsubset ) :
for uni in UTF8StringToArray ( txt ) :
self . current_font [ 'subset' ] . append ( uni )
space = self . _escape ( UTF8ToUTF16BE ( ' ' , False ) )
s += sprintf ( 'BT 0 Tw %.2F %.2F Td [' , ( self . x + dx ) * k , ( self . h - ( self . y + 0.5 * h + 0.3 * self . font_size ) ) * k )
t = txt . split ( ' ' )
numt = len ( t )
for i in range ( numt ) :
tx = t [ i ]
tx = '(' + self . _escape ( UTF8ToUTF16BE ( tx , False ) ) + ')'
s += sprintf ( '%s ' , tx ) ;
if ( ( i + 1 ) < numt ) :
adj = - ( self . ws * self . k ) * 1000 / self . font_size_pt
s += sprintf ( '%d(%s) ' , adj , space )
s += '] TJ'
s += ' ET'
else :
if ( self . unifontsubset ) :
txt2 = self . _escape ( UTF8ToUTF16BE ( txt , False ) )
for uni in UTF8StringToArray ( txt ) :
self . current_font [ 'subset' ] . append ( uni )
else :
txt2 = self . _escape ( txt )
s += sprintf ( 'BT %.2f %.2f Td (%s) Tj ET' , ( self . x + dx ) * k , ( self . h - ( self . y + .5 * h + .3 * self . font_size ) ) * k , txt2 )
if ( self . underline ) :
s += ' ' + self . _dounderline ( self . x + dx , self . y + .5 * h + .3 * self . font_size , txt )
if ( self . color_flag ) :
s += ' Q'
if ( link ) :
self . link ( self . x + dx , self . y + .5 * h - .5 * self . font_size , self . get_string_width ( txt ) , self . font_size , link )
if ( s ) :
self . _out ( s )
self . lasth = h
if ( ln > 0 ) : # Go to next line
self . y += h
if ( ln == 1 ) :
self . x = self . l_margin
else :
self . x += w |
def read_namespaced_pod_status ( self , name , namespace , ** kwargs ) : # noqa : E501
"""read _ namespaced _ pod _ status # noqa : E501
read status of the specified Pod # noqa : E501
This method makes a synchronous HTTP request by default . To make an
asynchronous HTTP request , please pass async _ req = True
> > > thread = api . read _ namespaced _ pod _ status ( name , namespace , async _ req = True )
> > > result = thread . get ( )
: param async _ req bool
: param str name : name of the Pod ( required )
: param str namespace : object name and auth scope , such as for teams and projects ( required )
: param str pretty : If ' true ' , then the output is pretty printed .
: return : V1Pod
If the method is called asynchronously ,
returns the request thread .""" | kwargs [ '_return_http_data_only' ] = True
if kwargs . get ( 'async_req' ) :
return self . read_namespaced_pod_status_with_http_info ( name , namespace , ** kwargs )
# noqa : E501
else :
( data ) = self . read_namespaced_pod_status_with_http_info ( name , namespace , ** kwargs )
# noqa : E501
return data |
def remove_irrelevant_nodes ( frame , options , total_time = None ) :
'''Remove nodes that represent less than e . g . 1 % of the output''' | if frame is None :
return None
if total_time is None :
total_time = frame . time ( )
filter_threshold = options . get ( 'filter_threshold' , 0.01 )
for child in frame . children :
proportion_of_total = child . time ( ) / total_time
if proportion_of_total < filter_threshold :
frame . self_time += child . time ( )
child . remove_from_parent ( )
for child in frame . children :
remove_irrelevant_nodes ( child , options = options , total_time = total_time )
return frame |
def _reset_model ( self , response ) :
"""Update the fields value with the received information .""" | # pylint : disable = no - member
# Reset the model to the initial state
self . _provision_done = False
# Set back the provision flag
self . _changes . clear ( )
# Clear the changes
# Process the raw data from the update response
fields = self . process_raw_data ( response )
# Update the current model representation
self . _set_fields ( fields )
# Lock the current model
self . _provision_done = True |
def start_accepting_passive_svc_checks ( self ) :
"""Enable passive service check submission ( globally )
Format of the line that triggers function call : :
START _ ACCEPTING _ PASSIVE _ SVC _ CHECKS
: return : None""" | # todo : # 783 create a dedicated brok for global parameters
if not self . my_conf . accept_passive_service_checks :
self . my_conf . modified_attributes |= DICT_MODATTR [ "MODATTR_PASSIVE_CHECKS_ENABLED" ] . value
self . my_conf . accept_passive_service_checks = True
self . my_conf . explode_global_conf ( )
self . daemon . update_program_status ( ) |
def prj_show_path ( self , ) :
"""Show the dir in the a filebrowser of the project
: returns : None
: rtype : None
: raises : None""" | f = self . prj_path_le . text ( )
osinter = ostool . get_interface ( )
osinter . open_path ( f ) |
def DICOMfile_read ( self , * args , ** kwargs ) :
"""Read a DICOM file and perform some initial
parsing of tags .
NB !
For thread safety , class member variables
should not be assigned since other threads
might override / change these variables in mid -
flight !""" | b_status = False
l_tags = [ ]
l_tagsToUse = [ ]
d_tagsInString = { }
str_file = ""
d_DICOM = { 'dcm' : None , 'd_dcm' : { } , 'strRaw' : '' , 'l_tagRaw' : [ ] , 'd_json' : { } , 'd_dicom' : { } , 'd_dicomSimple' : { } }
for k , v in kwargs . items ( ) :
if k == 'file' :
str_file = v
if k == 'l_tagsToUse' :
l_tags = v
if len ( args ) :
l_file = args [ 0 ]
str_file = l_file [ 0 ]
str_localFile = os . path . basename ( str_file )
str_path = os . path . dirname ( str_file )
# self . dp . qprint ( " % s : In input base directory : % s " % ( threading . currentThread ( ) . getName ( ) , self . str _ inputDir ) )
# self . dp . qprint ( " % s : Reading DICOM file in path : % s " % ( threading . currentThread ( ) . getName ( ) , str _ path ) )
# self . dp . qprint ( " % s : Analysing tags on DICOM file : % s " % ( threading . currentThread ( ) . getName ( ) , str _ localFile ) )
# self . dp . qprint ( " % s : Loading : % s " % ( threading . currentThread ( ) . getName ( ) , str _ file ) )
try : # self . dcm = dicom . read _ file ( str _ file )
d_DICOM [ 'dcm' ] = dicom . read_file ( str_file )
b_status = True
except :
self . dp . qprint ( 'In directory: %s' % os . getcwd ( ) , comms = 'error' )
self . dp . qprint ( 'Failed to read %s' % str_file , comms = 'error' )
b_status = False
d_DICOM [ 'd_dcm' ] = dict ( d_DICOM [ 'dcm' ] )
d_DICOM [ 'strRaw' ] = str ( d_DICOM [ 'dcm' ] )
d_DICOM [ 'l_tagRaw' ] = d_DICOM [ 'dcm' ] . dir ( )
if len ( l_tags ) :
l_tagsToUse = l_tags
else :
l_tagsToUse = d_DICOM [ 'l_tagRaw' ]
if 'PixelData' in l_tagsToUse :
l_tagsToUse . remove ( 'PixelData' )
for key in l_tagsToUse :
d_DICOM [ 'd_dicom' ] [ key ] = d_DICOM [ 'dcm' ] . data_element ( key )
try :
d_DICOM [ 'd_dicomSimple' ] [ key ] = getattr ( d_DICOM [ 'dcm' ] , key )
except :
d_DICOM [ 'd_dicomSimple' ] [ key ] = "no attribute"
d_DICOM [ 'd_json' ] [ key ] = str ( d_DICOM [ 'd_dicomSimple' ] [ key ] )
# pudb . set _ trace ( )
d_tagsInString = self . tagsInString_process ( d_DICOM , self . str_outputFileStem )
str_outputFile = d_tagsInString [ 'str_result' ]
return { 'status' : b_status , 'inputPath' : str_path , 'inputFilename' : str_localFile , 'outputFileStem' : str_outputFile , 'd_DICOM' : d_DICOM , 'l_tagsToUse' : l_tagsToUse } |
def set_setting ( self , setting , value , area = '1' , validate_value = True ) :
"""Set an abode system setting to a given value .""" | setting = setting . lower ( )
if setting not in CONST . ALL_SETTINGS :
raise AbodeException ( ERROR . INVALID_SETTING , CONST . ALL_SETTINGS )
if setting in CONST . PANEL_SETTINGS :
url = CONST . SETTINGS_URL
data = self . _panel_settings ( setting , value , validate_value )
elif setting in CONST . AREA_SETTINGS :
url = CONST . AREAS_URL
data = self . _area_settings ( area , setting , value , validate_value )
elif setting in CONST . SOUND_SETTINGS :
url = CONST . SOUNDS_URL
data = self . _sound_settings ( area , setting , value , validate_value )
elif setting in CONST . SIREN_SETTINGS :
url = CONST . SIREN_URL
data = self . _siren_settings ( setting , value , validate_value )
return self . send_request ( method = "put" , url = url , data = data ) |
def add_info_from_hv ( self ) :
"""Add the information we need from the old hypervisor section""" | # Router Image
if 'image' in self . hypervisor :
self . node [ 'properties' ] [ 'image' ] = os . path . basename ( self . hypervisor [ 'image' ] )
# IDLE - PC
if 'idlepc' in self . hypervisor :
self . node [ 'properties' ] [ 'idlepc' ] = self . hypervisor [ 'idlepc' ]
# Router RAM
if 'ram' in self . hypervisor :
self . node [ 'properties' ] [ 'ram' ] = self . hypervisor [ 'ram' ]
# 7200 NPE
if 'npe' in self . hypervisor :
self . device_info [ 'npe' ] = self . hypervisor [ 'npe' ]
# Device Chassis
if 'chassis' in self . hypervisor :
self . device_info [ 'chassis' ] = self . hypervisor [ 'chassis' ]
if self . device_info [ 'model' ] == 'c3600' :
self . node [ 'properties' ] [ 'chassis' ] = self . device_info [ 'chassis' ] |
def n_yearly_publications ( self , refresh = True ) :
"""Number of journal publications in a given year .""" | pub_years = [ int ( ab . coverDate . split ( '-' ) [ 0 ] ) for ab in self . get_journal_abstracts ( refresh = refresh ) ]
return Counter ( pub_years ) |
def _set_xff ( self , v , load = False ) :
"""Setter method for xff , mapped from YANG variable / brocade _ interface _ ext _ rpc / get _ media _ detail / output / interface / xff ( container )
If this variable is read - only ( config : false ) in the
source YANG file , then _ set _ xff is considered as a private
method . Backends looking to populate this variable should
do so via calling thisObj . _ set _ xff ( ) directly .""" | if hasattr ( v , "_utype" ) :
v = v . _utype ( v )
try :
t = YANGDynClass ( v , base = xff . xff , is_container = 'container' , presence = False , yang_name = "xff" , rest_name = "xff" , parent = self , choice = ( u'interface-identifier' , u'xff' ) , path_helper = self . _path_helper , extmethods = self . _extmethods , register_paths = False , extensions = None , namespace = 'urn:brocade.com:mgmt:brocade-interface-ext' , defining_module = 'brocade-interface-ext' , yang_type = 'container' , is_config = True )
except ( TypeError , ValueError ) :
raise ValueError ( { 'error-string' : """xff must be of a type compatible with container""" , 'defined-type' : "container" , 'generated-type' : """YANGDynClass(base=xff.xff, is_container='container', presence=False, yang_name="xff", rest_name="xff", parent=self, choice=(u'interface-identifier', u'xff'), path_helper=self._path_helper, extmethods=self._extmethods, register_paths=False, extensions=None, namespace='urn:brocade.com:mgmt:brocade-interface-ext', defining_module='brocade-interface-ext', yang_type='container', is_config=True)""" , } )
self . __xff = t
if hasattr ( self , '_set' ) :
self . _set ( ) |
def data_from_url ( self , url , apple_fix = False ) :
"""Download iCal data from URL .
: param url : URL to download
: param apple _ fix : fix Apple bugs ( protocol type and tzdata in iCal )
: return : decoded ( and fixed ) iCal data""" | if apple_fix :
url = apple_url_fix ( url )
_ , content = self . http . request ( url )
if not content :
raise ConnectionError ( 'Could not get data from %s!' % url )
return self . decode ( content , apple_fix = apple_fix ) |
def save_metadata ( self , phase , data_name ) :
"""Save metadata associated with the phase , such as the name of the pipeline , the name of the phase and the name
of the data being fit""" | with open ( "{}/.metadata" . format ( make_path ( phase ) ) , "w+" ) as f :
f . write ( "pipeline={}\nphase={}\ndata={}" . format ( self . pipeline_name , phase . phase_name , data_name ) ) |
def augknt ( knots , order ) :
"""Augment knot sequence such that some boundary conditions
are met .""" | a = [ ]
[ a . append ( knots [ 0 ] ) for t in range ( 0 , order ) ]
[ a . append ( k ) for k in knots ]
[ a . append ( knots [ - 1 ] ) for t in range ( 0 , order ) ]
return np . array ( a ) |
def lat_id ( self , line ) :
'''Return the corresponding latitude
Args :
line ( int ) : Line number
Returns :
Correponding latitude in degree''' | if self . grid == 'WAC' :
lat = ( ( 1 + self . LINE_PROJECTION_OFFSET - line ) * self . MAP_SCALE * 1e-3 / self . A_AXIS_RADIUS )
return lat * 180 / np . pi
else :
lat = float ( self . CENTER_LATITUDE ) - ( line - float ( self . LINE_PROJECTION_OFFSET ) - 1 ) / float ( self . MAP_RESOLUTION )
return lat |
def includeme ( config ) :
"""Set up event subscribers .""" | from . models import ( AuthUserMixin , random_uuid , lower_strip , encrypt_password , )
add_proc = config . add_field_processors
add_proc ( [ random_uuid , lower_strip ] , model = AuthUserMixin , field = 'username' )
add_proc ( [ lower_strip ] , model = AuthUserMixin , field = 'email' )
add_proc ( [ encrypt_password ] , model = AuthUserMixin , field = 'password' ) |
def sscan ( self , name , cursor = '0' , match = None , count = 10 ) :
"""Emulate sscan .""" | def value_function ( ) :
members = list ( self . smembers ( name ) )
members . sort ( )
# sort for consistent order
return members
return self . _common_scan ( value_function , cursor = cursor , match = match , count = count ) |
def syllabifyTextgrid ( isleDict , tg , wordTierName , phoneTierName , skipLabelList = None , startT = None , stopT = None ) :
'''Given a textgrid , syllabifies the phones in the textgrid
skipLabelList allows you to skip labels without generating warnings
( e . g . ' ' , ' sp ' , etc . )
The textgrid must have a word tier and a phone tier .
Returns a textgrid with only two tiers containing syllable information
( syllabification of the phone tier and a tier marking word - stress ) .''' | minT = tg . minTimestamp
maxT = tg . maxTimestamp
wordTier = tg . tierDict [ wordTierName ]
phoneTier = tg . tierDict [ phoneTierName ]
if skipLabelList is None :
skipLabelList = [ ]
syllableEntryList = [ ]
tonicSEntryList = [ ]
tonicPEntryList = [ ]
if startT is not None or stopT is not None :
if startT is None :
startT = minT
if stopT is None :
stopT = maxT
wordTier = wordTier . crop ( startT , stopT , "truncated" , False )
for start , stop , word in wordTier . entryList :
if word in skipLabelList :
continue
subPhoneTier = phoneTier . crop ( start , stop , "strict" , False )
# entry = ( start , stop , phone )
phoneList = [ entry [ 2 ] for entry in subPhoneTier . entryList if entry [ 2 ] != '' ]
phoneList = [ phoneList , ]
try :
sylTmp = pronunciationtools . findBestSyllabification ( isleDict , word , phoneList )
except isletool . WordNotInISLE :
print ( "Word ('%s') not is isle -- skipping syllabification" % word )
continue
except ( pronunciationtools . NullPronunciationError ) :
print ( "Word ('%s') has no provided pronunciation" % word )
continue
except AssertionError :
print ( "Unable to syllabify '%s'" % word )
continue
for syllabificationResultList in sylTmp :
stressI = syllabificationResultList [ 0 ]
stressJ = syllabificationResultList [ 1 ]
syllableList = syllabificationResultList [ 2 ]
stressedPhone = None
if stressI is not None and stressJ is not None :
stressedPhone = syllableList [ stressI ] [ stressJ ]
syllableList [ stressI ] [ stressJ ] += u"ˈ"
i = 0
# print ( syllableList )
for k , syllable in enumerate ( syllableList ) : # Create the syllable tier entry
j = len ( syllable )
stubEntryList = subPhoneTier . entryList [ i : i + j ]
i += j
# The whole syllable was deleted
if len ( stubEntryList ) == 0 :
continue
syllableStart = stubEntryList [ 0 ] [ 0 ]
syllableEnd = stubEntryList [ - 1 ] [ 1 ]
label = "-" . join ( [ entry [ 2 ] for entry in stubEntryList ] )
syllableEntryList . append ( ( syllableStart , syllableEnd , label ) )
# Create the tonic syllable tier entry
if k == stressI :
tonicSEntryList . append ( ( syllableStart , syllableEnd , 'T' ) )
# Create the tonic phone tier entry
if k == stressI :
syllablePhoneTier = phoneTier . crop ( syllableStart , syllableEnd , "strict" , False )
phoneList = [ entry for entry in syllablePhoneTier . entryList if entry [ 2 ] != '' ]
justPhones = [ phone for _ , _ , phone in phoneList ]
cvList = pronunciationtools . _prepPronunciation ( justPhones )
try :
tmpStressJ = cvList . index ( 'V' )
except ValueError :
for char in [ u'r' , u'n' , u'l' ] :
if char in cvList :
tmpStressJ = cvList . index ( char )
break
phoneStart , phoneEnd = phoneList [ tmpStressJ ] [ : 2 ]
tonicPEntryList . append ( ( phoneStart , phoneEnd , 'T' ) )
# Create a textgrid with the two syllable - level tiers
syllableTier = tgio . IntervalTier ( 'syllable' , syllableEntryList , minT , maxT )
tonicSTier = tgio . IntervalTier ( 'tonicSyllable' , tonicSEntryList , minT , maxT )
tonicPTier = tgio . IntervalTier ( 'tonicVowel' , tonicPEntryList , minT , maxT )
syllableTG = tgio . Textgrid ( )
syllableTG . addTier ( syllableTier )
syllableTG . addTier ( tonicSTier )
syllableTG . addTier ( tonicPTier )
return syllableTG |
def face ( self , index , name = None ) :
"""Generate Face object
index is number or keyword to identify the face of Hex
0 = ' w ' = ' xm ' = ' - 100 ' = ( 0 4 7 3)
1 = ' e ' = ' xp ' = ' 100 ' = ( 1 2 5 6)
2 = ' s ' = ' ym ' = ' 0-10 ' = ( 0 1 5 4)
3 = ' n ' = ' yp ' = ' 010 ' = ( 2 3 7 6)
4 = ' b ' = ' zm ' = ' 00-1 ' = ( 0 3 2 1)
5 = ' t ' = zp ' = ' 001 ' = ( 4 5 6 7)
name is given to Face instance . If omitted , name is automatically
genaratied like ( ' f - ' + self . name + ' - w ' )""" | kw_to_index = { 'w' : 0 , 'xm' : 0 , '-100' : 0 , 'e' : 1 , 'xp' : 1 , '100' : 1 , 's' : 2 , 'ym' : 2 , '0-10' : 2 , 'n' : 3 , 'yp' : 3 , '010' : 3 , 'b' : 4 , 'zm' : 4 , '00-1' : 4 , 't' : 5 , 'zp' : 5 , '001' : 5 }
index_to_vertex = [ ( 0 , 4 , 7 , 3 ) , ( 1 , 2 , 6 , 5 ) , ( 0 , 1 , 5 , 4 ) , ( 2 , 3 , 7 , 6 ) , ( 0 , 3 , 2 , 1 ) , ( 4 , 5 , 6 , 7 ) ]
index_to_defaultsuffix = [ 'f-{}-w' , 'f-{}-n' , 'f-{}-s' , 'f-{}-n' , 'f-{}-b' , 'f-{}-t' ]
if isinstance ( index , string_types ) :
index = kw_to_index [ index ]
vnames = tuple ( [ self . vnames [ i ] for i in index_to_vertex [ index ] ] )
if name is None :
name = index_to_defaultsuffix [ index ] . format ( self . name )
return Face ( vnames , name ) |
def healpix_to_lonlat ( healpix_index , nside , dx = None , dy = None , order = 'ring' ) :
"""Convert HEALPix indices ( optionally with offsets ) to longitudes / latitudes .
If no offsets ( ` ` dx ` ` and ` ` dy ` ` ) are provided , the coordinates will default
to those at the center of the HEALPix pixels .
Parameters
healpix _ index : int or ` ~ numpy . ndarray `
HEALPix indices ( as a scalar or array )
nside : int or ` ~ numpy . ndarray `
Number of pixels along the side of each of the 12 top - level HEALPix tiles
dx , dy : float or ` ~ numpy . ndarray ` , optional
Offsets inside the HEALPix pixel , which must be in the range [ 0:1 ] ,
where 0.5 is the center of the HEALPix pixels ( as scalars or arrays )
order : { ' nested ' | ' ring ' } , optional
Order of HEALPix pixels
Returns
lon : : class : ` ~ astropy . coordinates . Longitude `
The longitude values
lat : : class : ` ~ astropy . coordinates . Latitude `
The latitude values""" | _validate_nside ( nside )
if _validate_order ( order ) == 'ring' :
func = _core . healpix_ring_to_lonlat
else : # _ validate _ order ( order ) = = ' nested '
func = _core . healpix_nested_to_lonlat
if dx is None :
dx = 0.5
else :
_validate_offset ( 'x' , dx )
if dy is None :
dy = 0.5
else :
_validate_offset ( 'y' , dy )
nside = np . asarray ( nside , dtype = np . intc )
lon , lat = func ( healpix_index , nside , dx , dy )
lon = Longitude ( lon , unit = u . rad , copy = False )
lat = Latitude ( lat , unit = u . rad , copy = False )
return lon , lat |
def data ( self ) :
"""Return the batch data to be sent to the ThreatConnect API .
* * Processing Order : * *
* Process groups in memory up to max batch size .
* Process groups in shelf to max batch size .
* Process indicators in memory up to max batch size .
* Process indicators in shelf up to max batch size .
This method will remove the group / indicator from memory and / or shelf .""" | entity_count = 0
data = { 'group' : [ ] , 'indicator' : [ ] }
# process group data
group_data , entity_count = self . data_groups ( self . groups , entity_count )
data [ 'group' ] . extend ( group_data )
if entity_count >= self . _batch_max_chunk :
return data
group_data , entity_count = self . data_groups ( self . groups_shelf , entity_count )
data [ 'group' ] . extend ( group_data )
if entity_count >= self . _batch_max_chunk :
return data
# process indicator data
indicator_data , entity_count = self . data_indicators ( self . indicators , entity_count )
data [ 'indicator' ] . extend ( indicator_data )
if entity_count >= self . _batch_max_chunk :
return data
indicator_data , entity_count = self . data_indicators ( self . indicators_shelf , entity_count )
data [ 'indicator' ] . extend ( indicator_data )
if entity_count >= self . _batch_max_chunk :
return data
return data |
def _configure_port_binding ( self , is_provider_vlan , duplicate_type , is_native , switch_ip , vlan_id , intf_type , nexus_port , vni ) :
"""Conditionally calls vlan and port Nexus drivers .""" | # This implies VLAN , VNI , and Port are all duplicate .
# Then there is nothing to configure in Nexus .
if duplicate_type == const . DUPLICATE_PORT :
return
auto_create , auto_trunk = self . _gather_config_parms ( is_provider_vlan , vlan_id )
# if type DUPLICATE _ VLAN , don ' t create vlan
if duplicate_type == const . DUPLICATE_VLAN :
auto_create = False
if auto_create and auto_trunk :
LOG . debug ( "Nexus: create vlan %s and add to interface" , vlan_id )
self . driver . create_and_trunk_vlan ( switch_ip , vlan_id , intf_type , nexus_port , vni , is_native )
elif auto_create :
LOG . debug ( "Nexus: create vlan %s" , vlan_id )
self . driver . create_vlan ( switch_ip , vlan_id , vni )
elif auto_trunk :
LOG . debug ( "Nexus: trunk vlan %s" , vlan_id )
self . driver . send_enable_vlan_on_trunk_int ( switch_ip , vlan_id , intf_type , nexus_port , is_native ) |
def stop ( self , reason = None ) :
"""Shutdown the service with a reason .""" | self . logger . info ( 'stopping' )
self . loop . stop ( pyev . EVBREAK_ALL ) |
def pop ( self ) :
'''Return a new ImmutableVector with the last item removed .''' | if self . _length == 0 :
raise IndexError ( )
newvec = ImmutableVector ( )
newvec . tree = self . tree . remove ( self . _length - 1 )
newvec . _length = self . _length - 1
return newvec |
def importRegItem ( self , regItem ) :
"""Imports the regItem
Writes this in the statusLabel while the import is in progress .""" | self . statusLabel . setText ( "Importing {}..." . format ( regItem . fullName ) )
QtWidgets . qApp . processEvents ( )
regItem . tryImportClass ( )
self . tableView . model ( ) . emitDataChanged ( regItem )
self . statusLabel . setText ( "" )
QtWidgets . qApp . processEvents ( ) |
def _ReadEncodedData ( self , read_size ) :
"""Reads encoded data from the file - like object .
Args :
read _ size ( int ) : number of bytes of encoded data to read .
Returns :
int : number of bytes of encoded data read .""" | encoded_data = self . _file_object . read ( read_size )
read_count = len ( encoded_data )
self . _encoded_data = b'' . join ( [ self . _encoded_data , encoded_data ] )
self . _decoded_data , self . _encoded_data = ( self . _decoder . Decode ( self . _encoded_data ) )
self . _decoded_data_size = len ( self . _decoded_data )
return read_count |
def addNodeLabelPrefix ( self , prefix = None , copy = False ) :
'''Rename all nodes in the network from x to prefix _ x . If no prefix
is given , use the name of the graph as the prefix .
The purpose of this method is to make node names unique so that
composing two graphs is well - defined .''' | nxgraph = Topology . __relabel_graph ( self . __nxgraph , prefix )
if copy :
newtopo = copy . deepcopy ( self )
newtopo . nxgraph = nxgraph
return newtopo
else : # looks like it was done in place
self . __nxgraph = nxgraph |
def longestNumber ( self , inp ) :
"""Extracts the longest valid numerical description from a string .
Not guaranteed to return a result even if some valid numerical
description exists ( i . e . , method is not particularly advanced ) .
Args :
inp ( str ) : An arbitrary string , hopefully containing a number .
Returns :
The number with the longest string description in input ,
or None if not found .""" | split = inp . split ( ' ' )
# Assume just a single number
numStart = None
numEnd = None
for i , w in enumerate ( split ) :
if self . isValid ( w ) :
if numStart is None :
numStart = i
numEnd = i
else : # Check for ordinal , which would signify end
w = re . sub ( r'(\w+)s(\b)' , '\g<1>\g<2>' , w )
if w in self . __ordinals__ :
if self . isValid ( ' ' . join ( split [ numStart : i + 1 ] ) ) :
numEnd = i
break
description = ' ' . join ( split [ numStart : numEnd + 1 ] )
return self . parse ( description ) |
def make_hash_id ( ) :
"""Compute the ` datetime . now ` based SHA - 1 hash of a string .
: return : Returns the sha1 hash as a string .
: rtype : str""" | today = datetime . datetime . now ( ) . strftime ( DATETIME_FORMAT )
return hashlib . sha1 ( today . encode ( 'utf-8' ) ) . hexdigest ( ) |
def delete ( self , adgroup_id , keyword_ids , nick = None ) :
'''xxxxx . xxxxx . keywords . delete
删除一批关键词''' | request = TOPRequest ( 'xxxxx.xxxxx.keywords.delete' )
request [ 'adgroup_id' ] = adgroup_id
request [ 'keyword_ids' ] = keyword_ids
if nick != None :
request [ 'nick' ] = nick
self . create ( self . execute ( request ) , models = { 'result' : Keyword } )
return self . result |
async def terminate_vm ( self , preset , vm_id ) :
"""Discard vm in specified preset
: arg string preset : preset name
: arg int vm _ id : Virtual Machine id
: return : ' OK '
Sample response :
` ` OK ` `""" | vmshepherd = self . request . app . vmshepherd
preset = vmshepherd . preset_manager . get_preset ( preset )
await preset . iaas . terminate_vm ( vm_id )
return 'OK' |
def get_relaxation_options ( self ) :
"""Returns possible generalizations for the upper values in the taxonomy""" | domain = self . get_domain ( )
for upper_value in self . upper :
for suc in domain . predecessors ( upper_value ) :
yield suc |
def normalise_rows ( matrix ) :
"""Scales all rows to length 1 . Fails when row is 0 - length , so it
leaves these unchanged""" | lengths = np . apply_along_axis ( np . linalg . norm , 1 , matrix )
if not ( lengths > 0 ) . all ( ) : # raise ValueError ( ' Cannot normalise 0 length vector to length 1 ' )
# print ( matrix )
lengths [ lengths == 0 ] = 1
return matrix / lengths [ : , np . newaxis ] |
def return_dat ( self , chan , begsam , endsam ) :
"""Return the data as 2D numpy . ndarray .
Parameters
chan : int or list
index ( indices ) of the channels to read
begsam : int
index of the first sample
endsam : int
index of the last sample
Returns
numpy . ndarray
A 2d matrix , with dimension chan X samples""" | if begsam < 0 :
begpad = - 1 * begsam
begsam = 0
else :
begpad = 0
if endsam > self . n_smp :
endpad = endsam - self . n_smp
endsam = self . n_smp
else :
endpad = 0
first_sam = DATA_PRECISION * self . n_chan * begsam
toread_sam = DATA_PRECISION * self . n_chan * ( endsam - begsam )
with open ( join ( self . filename , EEG_FILE ) , 'rb' ) as f :
f . seek ( first_sam )
x = f . read ( toread_sam )
dat = _read_dat ( x )
dat = reshape ( dat , ( self . n_chan , - 1 ) , 'F' )
dat = self . convertion ( dat [ chan , : ] )
dat = pad ( dat , ( ( 0 , 0 ) , ( begpad , endpad ) ) , mode = 'constant' , constant_values = NaN )
return dat |
def GetDisplayNameForPathSpec ( self , path_spec ) :
"""Retrieves the display name for a path specification .
Args :
path _ spec ( dfvfs . PathSpec ) : path specification .
Returns :
str : human readable version of the path specification .""" | return path_helper . PathHelper . GetDisplayNameForPathSpec ( path_spec , mount_path = self . _mount_path , text_prepend = self . _text_prepend ) |
def excepts ( cls ) :
"""Return tuple of underlying exception classes to trap and wrap .
: rtype : ` ` tuple ` ` of ` ` type ` `""" | if cls . _excepts is None :
cls . _excepts = tuple ( cls . translations . keys ( ) )
return cls . _excepts |
def add_mountains ( self ) :
"""instead of the add _ blocks function which was to produce
line shaped walls for blocking path finding agents , this
function creates more natural looking blocking areas like
mountains""" | from noise import pnoise2
import random
random . seed ( )
octaves = ( random . random ( ) * 0.5 ) + 0.5
freq = 17.0 * octaves
for y in range ( self . grd . grid_height - 1 ) :
for x in range ( self . grd . grid_width - 1 ) :
pixel = self . grd . get_tile ( y , x )
if pixel == 'X' : # denoise blocks of mountains
n = int ( pnoise2 ( x / freq , y / freq , 1 ) * 11 + 5 )
if n < 1 :
self . grd . set_tile ( y , x , '#' ) |
def disapprovecommittee ( ctx , members , account ) :
"""Disapprove committee member ( s )""" | print_tx ( ctx . bitshares . disapprovecommittee ( members , account = account ) ) |
def get_redirect_args ( self , request , callback ) :
"Get request parameters for redirect url ." | callback = request . build_absolute_uri ( callback )
args = { 'client_id' : self . provider . consumer_key , 'redirect_uri' : callback , 'response_type' : 'code' , }
state = self . get_application_state ( request , callback )
if state is not None :
args [ 'state' ] = state
request . session [ self . session_key ] = state
return args |
def _check_worktree_support ( failhard = True ) :
'''Ensure that we don ' t try to operate on worktrees in git < 2.5.0.''' | git_version = version ( versioninfo = False )
if _LooseVersion ( git_version ) < _LooseVersion ( '2.5.0' ) :
if failhard :
raise CommandExecutionError ( 'Worktrees are only supported in git 2.5.0 and newer ' '(detected git version: ' + git_version + ')' )
return False
return True |
def _on_sigalrm ( self , signum , frame ) :
"""Respond to SIGALRM ( job timeout ) by updating the job file and killing
the process .""" | msg = "Job reached maximum time limit of %d seconds." % ( self . timeout_secs , )
self . _update ( { "failed" : 1 , "finished" : 1 , "msg" : msg , } )
self . _timed_out = True
self . econtext . broker . shutdown ( ) |
def _set_conf ( self ) :
"""Set configuration parameters from the Conf object into the detector
object .
Time values are converted to samples , and amplitude values are in mV .""" | self . rr_init = 60 * self . fs / self . conf . hr_init
self . rr_max = 60 * self . fs / self . conf . hr_min
self . rr_min = 60 * self . fs / self . conf . hr_max
# Note : if qrs _ width is odd , qrs _ width = = qrs _ radius * 2 + 1
self . qrs_width = int ( self . conf . qrs_width * self . fs )
self . qrs_radius = int ( self . conf . qrs_radius * self . fs )
self . qrs_thr_init = self . conf . qrs_thr_init
self . qrs_thr_min = self . conf . qrs_thr_min
self . ref_period = int ( self . conf . ref_period * self . fs )
self . t_inspect_period = int ( self . conf . t_inspect_period * self . fs ) |
def update_loci ( self ) :
"""Goes through and combines loci until we have one set meeting our overlap definition""" | # Create sub - loci for each chromosome
lbc = { }
chroms = sorted ( [ x . range . chr for x in self . loci ] )
for chrom in chroms :
lbc [ chrom ] = Loci ( )
for x in self . loci :
lbc [ x . range . chr ] . add_locus ( x )
for chrom in sorted ( lbc . keys ( ) ) :
if self . verbose :
lbc [ chrom ] . verbose = True
sys . stderr . write ( chrom + "\n" )
lbc [ chrom ] . overhang = self . overhang
lbc [ chrom ] . use_direction = self . use_direction
lbc [ chrom ] . merge_down_loci ( )
self . loci = [ ]
for chrom in sorted ( lbc . keys ( ) ) :
for locus in lbc [ chrom ] . loci :
self . loci . append ( locus ) |
def predict ( self , h = 5 ) :
"""Makes forecast with the estimated model
Parameters
h : int ( default : 5)
How many steps ahead would you like to forecast ?
Returns
- pd . DataFrame with predictions""" | if self . latent_variables . estimated is False :
raise Exception ( "No latent variables estimated!" )
else : # Retrieve data , dates and ( transformed ) latent variables
date_index = self . shift_dates ( h )
# Get expected values
forecasted_values = np . zeros ( h )
for value in range ( 0 , h ) :
if value == 0 :
forecasted_values [ value ] = self . states [ 0 ] [ - 1 ] + self . states [ 1 ] [ - 1 ]
else :
forecasted_values [ value ] = forecasted_values [ value - 1 ] + self . states [ 1 ] [ - 1 ]
result = pd . DataFrame ( self . link ( forecasted_values ) )
result . rename ( columns = { 0 : self . data_name } , inplace = True )
result . index = date_index [ - h : ]
return result |
def target ( self ) :
""": class : ` Nation ` , : class : ` Region ` , or None : Target of a
Liberation , Commendation , or Condemnation . ` ` None ` ` if the
resolution is not a Liberation , Commendation , or Condemnation .""" | if self . council == 'Security Council' and self . category != 'Repeal' : # e . g . N : ever - wandering _ souls
entity_type , entity_name = self . option . split ( ':' )
entity_types = { 'R' : aionationstates . Region , 'N' : aionationstates . Nation }
return entity_types [ entity_type ] ( entity_name ) |
def addRequest ( self , service , * args ) :
"""Adds a request to be sent to the remoting gateway .""" | wrapper = RequestWrapper ( self , '/%d' % self . request_number , service , * args )
self . request_number += 1
self . requests . append ( wrapper )
if self . logger :
self . logger . debug ( 'Adding request %s%r' , wrapper . service , args )
return wrapper |
def literalize_string ( content , is_unicode = False ) :
r'''Literalize a string content .
Examples :
> > > print literalize _ string ( ' str ' )
' str '
> > > print literalize _ string ( ' \ ' str \ ' ' )
" ' str ' "
> > > print literalize _ string ( ' \ " \ ' str \ ' \ " ' )
' " \ ' str \ ' " ' ''' | quote_mark = "'"
if "'" in content :
quote_mark = '"'
if '"' in content :
quote_mark = "'"
content = content . replace ( r"'" , r"\'" )
if '\n' in content :
quote_mark *= 3
return 'u' [ not is_unicode : ] + quote_mark + content + quote_mark |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.