signature
stringlengths 29
44.1k
| implementation
stringlengths 0
85.2k
|
|---|---|
def _kill_process_type ( self , process_type , allow_graceful = False , check_alive = True , wait = False ) :
"""Kill a process of a given type .
If the process type is PROCESS _ TYPE _ REDIS _ SERVER , then we will kill all
of the Redis servers .
If the process was started in valgrind , then we will raise an exception
if the process has a non - zero exit code .
Args :
process _ type : The type of the process to kill .
allow _ graceful ( bool ) : Send a SIGTERM first and give the process
time to exit gracefully . If that doesn ' t work , then use
SIGKILL . We usually want to do this outside of tests .
check _ alive ( bool ) : If true , then we expect the process to be alive
and will raise an exception if the process is already dead .
wait ( bool ) : If true , then this method will not return until the
process in question has exited .
Raises :
This process raises an exception in the following cases :
1 . The process had already died and check _ alive is true .
2 . The process had been started in valgrind and had a non - zero
exit code ."""
|
process_infos = self . all_processes [ process_type ]
if process_type != ray_constants . PROCESS_TYPE_REDIS_SERVER :
assert len ( process_infos ) == 1
for process_info in process_infos :
process = process_info . process
# Handle the case where the process has already exited .
if process . poll ( ) is not None :
if check_alive :
raise Exception ( "Attempting to kill a process of type " "'{}', but this process is already dead." . format ( process_type ) )
else :
continue
if process_info . use_valgrind :
process . terminate ( )
process . wait ( )
if process . returncode != 0 :
message = ( "Valgrind detected some errors in process of " "type {}. Error code {}." . format ( process_type , process . returncode ) )
if process_info . stdout_file is not None :
with open ( process_info . stdout_file , "r" ) as f :
message += "\nPROCESS STDOUT:\n" + f . read ( )
if process_info . stderr_file is not None :
with open ( process_info . stderr_file , "r" ) as f :
message += "\nPROCESS STDERR:\n" + f . read ( )
raise Exception ( message )
continue
if process_info . use_valgrind_profiler : # Give process signal to write profiler data .
os . kill ( process . pid , signal . SIGINT )
# Wait for profiling data to be written .
time . sleep ( 0.1 )
if allow_graceful : # Allow the process one second to exit gracefully .
process . terminate ( )
timer = threading . Timer ( 1 , lambda process : process . kill ( ) , [ process ] )
try :
timer . start ( )
process . wait ( )
finally :
timer . cancel ( )
if process . poll ( ) is not None :
continue
# If the process did not exit within one second , force kill it .
process . kill ( )
# The reason we usually don ' t call process . wait ( ) here is that
# there ' s some chance we ' d end up waiting a really long time .
if wait :
process . wait ( )
del self . all_processes [ process_type ]
|
def _prepare_conn_args ( self , kwargs ) :
'''Set connection arguments for remote or local connection .'''
|
kwargs [ 'connect_over_uds' ] = True
kwargs [ 'timeout' ] = kwargs . get ( 'timeout' , 60 )
kwargs [ 'cookie' ] = kwargs . get ( 'cookie' , 'admin' )
if self . _use_remote_connection ( kwargs ) :
kwargs [ 'transport' ] = kwargs . get ( 'transport' , 'https' )
if kwargs [ 'transport' ] == 'https' :
kwargs [ 'port' ] = kwargs . get ( 'port' , 443 )
else :
kwargs [ 'port' ] = kwargs . get ( 'port' , 80 )
kwargs [ 'verify' ] = kwargs . get ( 'verify' , True )
if isinstance ( kwargs [ 'verify' ] , bool ) :
kwargs [ 'verify_ssl' ] = kwargs [ 'verify' ]
else :
kwargs [ 'ca_bundle' ] = kwargs [ 'verify' ]
kwargs [ 'connect_over_uds' ] = False
return kwargs
|
def start ( self ) :
"""Overrides default start behaviour by raising ConnectionError instead
of custom requests _ mock . exceptions . NoMockAddress ."""
|
if self . _http_last_send is not None :
raise RuntimeError ( 'HttpMock has already been started' )
# 1 ) save request . Session . send in self . _ last _ send
# 2 ) replace request . Session . send with MockerCore send function
super ( HttpMock , self ) . start ( )
# 3 ) save MockerCore send function in self . _ http _ last _ send
# 4 ) replace request . Session . send with HttpMock send function
self . _patch_last_send ( )
|
def reformat_input ( self , ** kwargs ) :
"""Reformat input data"""
|
reformatted_input = { }
needed_formats = [ ]
for task_cls in self . tasks :
needed_formats . append ( task_cls . data_format )
self . needed_formats = list ( set ( needed_formats ) )
for output_format in self . needed_formats :
reformatted_input . update ( { output_format : { 'data' : self . reformat_file ( self . input_file , self . input_format , output_format ) , 'target' : self . reformat_file ( self . target_file , self . target_format , output_format ) } } )
return reformatted_input
|
def doit ( self , classes = None , recursive = True , ** kwargs ) :
"""Write out commutator
Write out the commutator according to its definition
$ [ \ Op { A } , \ Op { B } ] = \ Op { A } \ Op { B } - \ Op { A } \ Op { B } $ .
See : meth : ` . Expression . doit ` ."""
|
return super ( ) . doit ( classes , recursive , ** kwargs )
|
def _make_local_question_images ( self , question_dict ) :
"""Process all mardown image links in question _ dict :
- download them to local files under exerciseimages /"""
|
question_dict = question_dict . copy ( )
dest_path = 'exerciseimages/'
if not os . path . exists ( dest_path ) :
os . mkdir ( dest_path )
# helper method
def _process_string ( string ) :
image_regex = re . compile ( MARKDOWN_IMAGE_REGEX , flags = re . IGNORECASE )
contentstorage_prefix = '${☣ CONTENTSTORAGE}/'
studio_storage = 'https://studio.learningequality.org/content/storage/'
matches = image_regex . findall ( string )
# Parse all matches
for match in matches :
file_result = match [ 1 ]
file_name = file_result . replace ( contentstorage_prefix , '' )
file_url = studio_storage + file_name [ 0 ] + '/' + file_name [ 1 ] + '/' + file_name
file_local_path = os . path . join ( dest_path , file_name )
response = requests . get ( file_url )
if response . status_code != 200 :
print ( 'Failed for image ' + str ( response . status_code ) + ' >> ' + file_url )
return string
with open ( file_local_path , 'wb' ) as local_file :
local_file . write ( response . content )
print ( 'saved image file' , file_local_path )
string = string . replace ( file_result , file_local_path )
return string
# Process images in question
new_question = _process_string ( question_dict [ 'question' ] )
question_dict [ 'question' ] = new_question
# Process images in answers
answers = json . loads ( question_dict [ 'answers' ] )
new_answers = [ ]
for ans in answers :
new_ans = ans . copy ( )
new_ans [ 'answer' ] = _process_string ( new_ans [ 'answer' ] )
new_answers . append ( new_ans )
question_dict [ 'answers' ] = json . dumps ( new_answers )
# TODO : process hint images
return question_dict
|
def sim_model ( self , tmax , X0 , noiseDyn = 0 , restart = 0 ) :
"""Simulate the model ."""
|
self . noiseDyn = noiseDyn
X = np . zeros ( ( tmax , self . dim ) )
X [ 0 ] = X0 + noiseDyn * np . random . randn ( self . dim )
# run simulation
for t in range ( 1 , tmax ) :
if self . modelType == 'hill' :
Xdiff = self . Xdiff_hill ( X [ t - 1 ] )
elif self . modelType == 'var' :
Xdiff = self . Xdiff_var ( X [ t - 1 ] )
X [ t ] = X [ t - 1 ] + Xdiff
# add dynamic noise
X [ t ] += noiseDyn * np . random . randn ( self . dim )
return X
|
def load_empty ( cls , path : PathOrStr , fn : PathOrStr ) :
"Load the state in ` fn ` to create an empty ` LabelList ` for inference ."
|
return cls . load_state ( path , pickle . load ( open ( Path ( path ) / fn , 'rb' ) ) )
|
def externalize ( taskclass_or_taskobject ) :
"""Returns an externalized version of a Task . You may both pass an
instantiated task object or a task class . Some examples :
. . code - block : : python
class RequiringTask ( luigi . Task ) :
def requires ( self ) :
task _ object = self . clone ( MyTask )
return externalize ( task _ object )
Here ' s mostly equivalent code , but ` ` externalize ` ` is applied to a task
class instead .
. . code - block : : python
@ luigi . util . requires ( externalize ( MyTask ) )
class RequiringTask ( luigi . Task ) :
pass
Of course , it may also be used directly on classes and objects ( for example
for reexporting or other usage ) .
. . code - block : : python
MyTask = externalize ( MyTask )
my _ task _ 2 = externalize ( MyTask2 ( param = ' foo ' ) )
If you however want a task class to be external from the beginning , you ' re
better off inheriting : py : class : ` ExternalTask ` rather than : py : class : ` Task ` .
This function tries to be side - effect free by creating a copy of the class
or the object passed in and then modify that object . In particular this
code shouldn ' t do anything .
. . code - block : : python
externalize ( MyTask ) # BAD : This does nothing ( as after luigi 2.4.0)"""
|
# Seems like with python < 3.3 copy . copy can ' t copy classes
# and objects with specified metaclass http : / / bugs . python . org / issue11480
compatible_copy = copy . copy if six . PY3 else copy . deepcopy
copied_value = compatible_copy ( taskclass_or_taskobject )
if copied_value is taskclass_or_taskobject : # Assume it ' s a class
clazz = taskclass_or_taskobject
@ _task_wraps ( clazz )
class _CopyOfClass ( clazz ) : # How to copy a class : http : / / stackoverflow . com / a / 9541120/621449
_visible_in_registry = False
_CopyOfClass . run = None
return _CopyOfClass
else : # We assume it ' s an object
copied_value . run = None
return copied_value
|
def register_listener ( self , listener , reading = False ) :
"""Add a callback function that is called when sensor value is updated .
The callback footprint is received _ timestamp , timestamp , status , value .
Parameters
listener : function
Callback signature : if reading
listener ( katcp _ sensor , reading ) where
` katcp _ sensor ` is this KATCPSensor instance
` reading ` is an instance of : class : ` KATCPSensorReading `
Callback signature : default , if not reading
listener ( received _ timestamp , timestamp , status , value )"""
|
listener_id = hashable_identity ( listener )
self . _listeners [ listener_id ] = ( listener , reading )
logger . debug ( 'Register listener for {}' . format ( self . name ) )
|
def info ( cwd , targets = None , user = None , username = None , password = None , fmt = 'str' ) :
'''Display the Subversion information from the checkout .
cwd
The path to the Subversion repository
targets : None
files , directories , and URLs to pass to the command as arguments
svn uses ' . ' by default
user : None
Run svn as a user other than what the minion runs as
username : None
Connect to the Subversion server as another user
password : None
Connect to the Subversion server with this password
. . versionadded : : 0.17.0
fmt : str
How to fmt the output from info .
( str , xml , list , dict )
CLI Example :
. . code - block : : bash
salt ' * ' svn . info / path / to / svn / repo'''
|
opts = list ( )
if fmt == 'xml' :
opts . append ( '--xml' )
if targets :
opts += salt . utils . args . shlex_split ( targets )
infos = _run_svn ( 'info' , cwd , user , username , password , opts )
if fmt in ( 'str' , 'xml' ) :
return infos
info_list = [ ]
for infosplit in infos . split ( '\n\n' ) :
info_list . append ( _INI_RE . findall ( infosplit ) )
if fmt == 'list' :
return info_list
if fmt == 'dict' :
return [ dict ( tmp ) for tmp in info_list ]
|
def process_command_thread ( self , request ) :
"""Worker thread to process a command ."""
|
command , data = request
if multi_thread_enabled ( ) :
try :
self . process_command ( command , data )
except Exception as e :
_logger . exception ( str ( e ) )
raise
else :
pass
|
def cartesian_to_spherical_azimuthal ( x , y ) :
"""Calculates the azimuthal angle in spherical coordinates from Cartesian
coordinates . The azimuthal angle is in [ 0,2 * pi ] .
Parameters
x : { numpy . array , float }
X - coordinate .
y : { numpy . array , float }
Y - coordinate .
Returns
phi : { numpy . array , float }
The azimuthal angle ."""
|
y = float ( y ) if isinstance ( y , int ) else y
phi = numpy . arctan2 ( y , x )
return phi % ( 2 * numpy . pi )
|
def _example_rt_data ( quote_ctx ) :
"""获取分时数据 , 输出 时间 , 数据状态 , 开盘多少分钟 , 目前价 , 昨收价 , 平均价 , 成交量 , 成交额"""
|
stock_code_list = [ "US.AAPL" , "HK.00700" ]
ret_status , ret_data = quote_ctx . subscribe ( stock_code_list , ft . SubType . RT_DATA )
if ret_status != ft . RET_OK :
print ( ret_data )
exit ( )
for stk_code in stock_code_list :
ret_status , ret_data = quote_ctx . get_rt_data ( stk_code )
if ret_status != ft . RET_OK :
print ( stk_code , ret_data )
exit ( )
print ( "%s RT_DATA" % stk_code )
print ( ret_data )
print ( "\n\n" )
|
def Get ( self ) :
"""Fetch hunt ' s data and return proper Hunt object ."""
|
args = hunt_pb2 . ApiGetHuntArgs ( hunt_id = self . hunt_id )
data = self . _context . SendRequest ( "GetHunt" , args )
return Hunt ( data = data , context = self . _context )
|
def setup ( self , host , flow_id , reason , grr_server_url , grr_username , grr_password , approvers = None , verify = True ) :
"""Initializes a GRR flow collector .
Args :
host : hostname of machine .
flow _ id : ID of GRR flow to retrieve .
reason : justification for GRR access .
grr _ server _ url : GRR server URL .
grr _ username : GRR username .
grr _ password : GRR password .
approvers : list of GRR approval recipients .
verify : boolean , whether to verify the GRR server ' s x509 certificate ."""
|
super ( GRRFlowCollector , self ) . setup ( reason , grr_server_url , grr_username , grr_password , approvers = approvers , verify = verify )
self . flow_id = flow_id
self . host = host
|
def sedfile ( fpath , regexpr , repl , force = False , verbose = True , veryverbose = False ) :
"""Executes sed on a specific file
Args :
fpath ( str ) : file path string
regexpr ( str ) :
repl ( str ) :
force ( bool ) : ( default = False )
verbose ( bool ) : verbosity flag ( default = True )
veryverbose ( bool ) : ( default = False )
Returns :
list : changed _ lines
CommandLine :
python - m utool . util _ path - - exec - sedfile - - show
Example :
> > > # ENABLE _ DOCTEST
> > > from utool . util _ path import * # NOQA
> > > import utool as ut
> > > fpath = ut . get _ modpath ( ut . util _ path )
> > > regexpr = ' sedfile '
> > > repl = ' saidfile '
> > > force = False
> > > verbose = True
> > > veryverbose = False
> > > changed _ lines = sedfile ( fpath , regexpr , repl , force , verbose , veryverbose )
> > > result = ( ' changed _ lines = % s ' % ( ut . repr3 ( changed _ lines ) , ) )
> > > print ( result )"""
|
# TODO : move to util _ edit
path , name = split ( fpath )
new_file_lines = [ ]
if veryverbose :
print ( '[sedfile] fpath=%r' % fpath )
print ( '[sedfile] regexpr=%r' % regexpr )
print ( '[sedfile] repl=%r' % repl )
print ( '[sedfile] force=%r' % force )
import utool as ut
file_lines = ut . readfrom ( fpath , aslines = True , verbose = False )
# with open ( fpath , ' r ' ) as file :
# import utool
# with utool . embed _ on _ exception _ context :
# file _ lines = file . readlines ( )
# Search each line for the desired regexpr
new_file_lines = [ re . sub ( regexpr , repl , line ) for line in file_lines ]
changed_lines = [ ( newline , line ) for newline , line in zip ( new_file_lines , file_lines ) if newline != line ]
n_changed = len ( changed_lines )
if n_changed > 0 :
rel_fpath = relpath ( fpath , os . getcwd ( ) )
print ( ' * %s changed %d lines in %r ' % ( [ '(dry-run)' , '(real-run)' ] [ force ] , n_changed , rel_fpath ) )
print ( ' * --------------------' )
import utool as ut
new_file_lines = ut . lmap ( ut . ensure_unicode , new_file_lines )
new_file = '' . join ( new_file_lines )
# print ( new _ file . replace ( ' \ n ' , ' \ n ) )
if verbose :
if True :
import utool as ut
old_file = ut . ensure_unicode ( '' . join ( ut . lmap ( ut . ensure_unicode , file_lines ) ) )
ut . print_difftext ( old_file , new_file )
else :
changed_new , changed_old = zip ( * changed_lines )
prefixold = ' * old (%d, %r): \n | ' % ( n_changed , name )
prefixnew = ' * new (%d, %r): \n | ' % ( n_changed , name )
print ( prefixold + ( ' | ' . join ( changed_old ) ) . strip ( '\n' ) )
print ( ' * ____________________' )
print ( prefixnew + ( ' | ' . join ( changed_new ) ) . strip ( '\n' ) )
print ( ' * --------------------' )
print ( ' * =====================================================' )
# Write back to file
if force :
print ( ' ! WRITING CHANGES' )
ut . writeto ( fpath , new_file )
# with open ( fpath , ' w ' ) as file :
# file . write ( new _ file . encode ( ' utf8 ' ) )
else :
print ( ' dry run' )
return changed_lines
# elif verbose :
# print ( ' Nothing changed ' )
return None
|
def size ( a , b = 0 , nargout = 1 ) :
"""> > > size ( zeros ( 3,3 ) ) + 1
matlabarray ( [ [ 4 , 4 ] ] )"""
|
s = np . asarray ( a ) . shape
if s is ( ) :
return 1 if b else ( 1 , ) * nargout
# a is not a scalar
try :
if b :
return s [ b - 1 ]
else :
return matlabarray ( s ) if nargout <= 1 else s
except IndexError :
return 1
|
def save_file ( path , data , readable = False ) :
"""Save to file
: param path : File path to save
: type path : str | unicode
: param data : Data to save
: type data : None | int | float | str | unicode | list | dict
: param readable : Format file to be human readable ( default : False )
: type readable : bool
: rtype : None
: raises IOError : If empty path or error writing file"""
|
if not path :
IOError ( "No path specified to save" )
try :
with io . open ( path , "w" , encoding = "utf-8" ) as f :
if path . endswith ( ".json" ) :
save_json_file ( f , data , pretty = readable , compact = ( not readable ) , sort = True )
elif path . endswith ( ".yaml" ) or path . endswith ( ".yml" ) :
save_yaml_file ( f , data )
except IOError :
raise
except Exception as e :
raise IOError ( e )
|
def setKeyboardTransformAbsolute ( self , eTrackingOrigin ) :
"""Set the position of the keyboard in world space"""
|
fn = self . function_table . setKeyboardTransformAbsolute
pmatTrackingOriginToKeyboardTransform = HmdMatrix34_t ( )
fn ( eTrackingOrigin , byref ( pmatTrackingOriginToKeyboardTransform ) )
return pmatTrackingOriginToKeyboardTransform
|
def compile_model ( self , input_model_config , output_model_config , role , job_name , stop_condition , tags ) :
"""Create an Amazon SageMaker Neo compilation job .
Args :
input _ model _ config ( dict ) : the trained model and the Amazon S3 location where it is stored .
output _ model _ config ( dict ) : Identifies the Amazon S3 location where you want Amazon SageMaker Neo to save
the results of compilation job
role ( str ) : An AWS IAM role ( either name or full ARN ) . The Amazon SageMaker Neo compilation jobs use this
role to access model artifacts . You must grant sufficient permissions to this role .
job _ name ( str ) : Name of the compilation job being created .
stop _ condition ( dict ) : Defines when compilation job shall finish . Contains entries that can be understood
by the service like ` ` MaxRuntimeInSeconds ` ` .
tags ( list [ dict ] ) : List of tags for labeling a compile model job . For more , see
https : / / docs . aws . amazon . com / sagemaker / latest / dg / API _ Tag . html .
Returns :
str : ARN of the compile model job , if it is created ."""
|
compilation_job_request = { 'InputConfig' : input_model_config , 'OutputConfig' : output_model_config , 'RoleArn' : role , 'StoppingCondition' : stop_condition , 'CompilationJobName' : job_name }
if tags is not None :
compilation_job_request [ 'Tags' ] = tags
LOGGER . info ( 'Creating compilation-job with name: {}' . format ( job_name ) )
self . sagemaker_client . create_compilation_job ( ** compilation_job_request )
|
def _list_collections ( self , sock_info , slave_okay , session , read_preference , ** kwargs ) :
"""Internal listCollections helper ."""
|
coll = self . get_collection ( "$cmd" , read_preference = read_preference )
if sock_info . max_wire_version > 2 :
cmd = SON ( [ ( "listCollections" , 1 ) , ( "cursor" , { } ) ] )
cmd . update ( kwargs )
with self . __client . _tmp_session ( session , close = False ) as tmp_session :
cursor = self . _command ( sock_info , cmd , slave_okay , read_preference = read_preference , session = tmp_session ) [ "cursor" ]
return CommandCursor ( coll , cursor , sock_info . address , session = tmp_session , explicit_session = session is not None )
else :
match = _INDEX_REGEX
if "filter" in kwargs :
match = { "$and" : [ _INDEX_REGEX , kwargs [ "filter" ] ] }
dblen = len ( self . name . encode ( "utf8" ) + b"." )
pipeline = [ { "$project" : { "name" : { "$substr" : [ "$name" , dblen , - 1 ] } , "options" : 1 } } , { "$match" : match } ]
cmd = SON ( [ ( "aggregate" , "system.namespaces" ) , ( "pipeline" , pipeline ) , ( "cursor" , kwargs . get ( "cursor" , { } ) ) ] )
cursor = self . _command ( sock_info , cmd , slave_okay ) [ "cursor" ]
return CommandCursor ( coll , cursor , sock_info . address )
|
def add_fig_kwargs ( func ) :
"""Decorator that adds keyword arguments for functions returning matplotlib
figures .
The function should return either a matplotlib figure or None to signal
some sort of error / unexpected event .
See doc string below for the list of supported options ."""
|
from functools import wraps
@ wraps ( func )
def wrapper ( * args , ** kwargs ) : # pop the kwds used by the decorator .
title = kwargs . pop ( "title" , None )
size_kwargs = kwargs . pop ( "size_kwargs" , None )
show = kwargs . pop ( "show" , True )
savefig = kwargs . pop ( "savefig" , None )
tight_layout = kwargs . pop ( "tight_layout" , False )
ax_grid = kwargs . pop ( "ax_grid" , None )
ax_annotate = kwargs . pop ( "ax_annotate" , None )
# Call func and return immediately if None is returned .
fig = func ( * args , ** kwargs )
if fig is None :
return fig
# Operate on matplotlib figure .
if title is not None :
fig . suptitle ( title )
if size_kwargs is not None :
fig . set_size_inches ( size_kwargs . pop ( "w" ) , size_kwargs . pop ( "h" ) , ** size_kwargs )
if ax_grid is not None :
for ax in fig . axes :
ax . grid ( bool ( ax_grid ) )
if ax_annotate :
from string import ascii_letters
tags = ascii_letters
if len ( fig . axes ) > len ( tags ) :
tags = ( 1 + len ( ascii_letters ) // len ( fig . axes ) ) * ascii_letters
for ax , tag in zip ( fig . axes , tags ) :
ax . annotate ( "(%s)" % tag , xy = ( 0.05 , 0.95 ) , xycoords = "axes fraction" )
if tight_layout :
try :
fig . tight_layout ( )
except Exception as exc : # For some unknown reason , this problem shows up only on travis .
# https : / / stackoverflow . com / questions / 22708888 / valueerror - when - using - matplotlib - tight - layout
print ( "Ignoring Exception raised by fig.tight_layout\n" , str ( exc ) )
if savefig :
fig . savefig ( savefig )
if show :
import matplotlib . pyplot as plt
plt . show ( )
return fig
# Add docstring to the decorated method .
s = "\n\n" + """\
Keyword arguments controlling the display of the figure:
================ ====================================================
kwargs Meaning
================ ====================================================
title Title of the plot (Default: None).
show True to show the figure (default: True).
savefig "abc.png" or "abc.eps" to save the figure to a file.
size_kwargs Dictionary with options passed to fig.set_size_inches
e.g. size_kwargs=dict(w=3, h=4)
tight_layout True to call fig.tight_layout (default: False)
ax_grid True (False) to add (remove) grid from all axes in fig.
Default: None i.e. fig is left unchanged.
ax_annotate Add labels to subplots e.g. (a), (b).
Default: False
================ ====================================================
"""
if wrapper . __doc__ is not None : # Add s at the end of the docstring .
wrapper . __doc__ += "\n" + s
else : # Use s
wrapper . __doc__ = s
return wrapper
|
def validate_request ( self , data : Any , * additional : AnyMapping , merged_class : Type [ dict ] = dict ) -> Any :
r"""Validate request data against request schema from module .
: param data : Request data .
: param \ * additional :
Additional data dicts to be merged with base request data .
: param merged _ class :
When additional data dicts supplied method by default will return
merged * * dict * * with all data , but you can customize things to
use read - only dict or any other additional class or callable ."""
|
request_schema = getattr ( self . module , 'request' , None )
if request_schema is None :
logger . error ( 'Request schema should be defined' , extra = { 'schema_module' : self . module , 'schema_module_attrs' : dir ( self . module ) } )
raise self . make_error ( 'Request schema should be defined' )
# Merge base and additional data dicts , but only if additional data
# dicts have been supplied
if isinstance ( data , dict ) and additional :
data = merged_class ( self . _merge_data ( data , * additional ) )
try :
self . _validate ( data , request_schema )
finally :
self . _valid_request = False
self . _valid_request = True
processor = getattr ( self . module , 'request_processor' , None )
return processor ( data ) if processor else data
|
def askForFolder ( parent , msg = None ) :
'''Asks for a folder , opening the corresponding dialog with the last path that was selected
when this same function was invoked from the calling method
: param parent : The parent window
: param msg : The message to use for the dialog title'''
|
msg = msg or 'Select folder'
caller = _callerName ( ) . split ( "." )
name = "/" . join ( [ LAST_PATH , caller [ - 1 ] ] )
namespace = caller [ 0 ]
path = pluginSetting ( name , namespace )
folder = QtWidgets . QFileDialog . getExistingDirectory ( parent , msg , path )
if folder :
setPluginSetting ( name , folder , namespace )
return folder
|
def top_k_accuracy ( input : Tensor , targs : Tensor , k : int = 5 ) -> Rank0Tensor :
"Computes the Top - k accuracy ( target is in the top k predictions ) ."
|
input = input . topk ( k = k , dim = - 1 ) [ 1 ]
targs = targs . unsqueeze ( dim = - 1 ) . expand_as ( input )
return ( input == targs ) . max ( dim = - 1 ) [ 0 ] . float ( ) . mean ( )
|
def read_from_bpch ( filename , file_position , shape , dtype , endian , use_mmap = False ) :
"""Read a chunk of data from a bpch output file .
Parameters
filename : str
Path to file on disk containing the data
file _ position : int
Position ( bytes ) where desired data chunk begins
shape : tuple of ints
Resultant ( n - dimensional ) shape of requested data ; the chunk
will be read sequentially from disk and then re - shaped
dtype : dtype
Dtype of data ; for best results , pass a dtype which includes
an endian indicator , e . g . ` dtype = np . dtype ( ' > f4 ' ) `
endian : str
Endianness of data ; should be consistent with ` dtype `
use _ mmap : bool
Memory map the chunk of data to the file on disk , else read
immediately
Returns
Array with shape ` shape ` and dtype ` dtype ` containing the requested
chunk of data from ` filename ` ."""
|
offset = file_position + 4
if use_mmap :
d = np . memmap ( filename , dtype = dtype , mode = 'r' , shape = shape , offset = offset , order = 'F' )
else :
with FortranFile ( filename , 'rb' , endian ) as ff :
ff . seek ( file_position )
d = np . array ( ff . readline ( '*f' ) )
d = d . reshape ( shape , order = 'F' )
# As a sanity check , * be sure * that the resulting data block has the
# correct shape , and fail early if it doesn ' t .
if ( d . shape != shape ) :
raise IOError ( "Data chunk read from {} does not have the right shape," " (expected {} but got {})" . format ( filename , shape , d . shape ) )
return d
|
def column ( table_name , column_name = None , cache = False , cache_scope = _CS_FOREVER ) :
"""Decorates functions that return a Series .
Decorator version of ` add _ column ` . Series index must match
the named table . Column name defaults to name of function .
The function ' s argument names and keyword argument values
will be matched to registered variables when the function
needs to be evaluated by Orca .
The argument name " iter _ var " may be used to have the current
iteration variable injected .
The index of the returned Series must match the named table ."""
|
def decorator ( func ) :
if column_name :
name = column_name
else :
name = func . __name__
add_column ( table_name , name , func , cache = cache , cache_scope = cache_scope )
return func
return decorator
|
def bsp_find_node ( node : tcod . bsp . BSP , cx : int , cy : int ) -> Optional [ tcod . bsp . BSP ] :
""". . deprecated : : 2.0
Use : any : ` BSP . find _ node ` instead ."""
|
return node . find_node ( cx , cy )
|
def setup_config ( epab_version : str ) :
"""Set up elib _ config package
: param epab _ version : installed version of EPAB as as string"""
|
logger = logging . getLogger ( 'EPAB' )
logger . debug ( 'setting up config' )
elib_config . ELIBConfig . setup ( app_name = 'EPAB' , app_version = epab_version , config_file_path = 'pyproject.toml' , config_sep_str = '__' , root_path = [ 'tool' , 'epab' ] )
elib_config . write_example_config ( 'pyproject.toml.example' )
if not pathlib . Path ( 'pyproject.toml' ) . exists ( ) :
raise FileNotFoundError ( 'pyproject.toml' )
elib_config . validate_config ( )
|
def sampleLOS ( self , los , n = 1 , deg = True , maxd = None , nsigma = None , targetSurfmass = True , targetSigma2 = True ) :
"""NAME :
sampleLOS
PURPOSE :
sample along a given LOS
INPUT :
los - line of sight ( in deg , unless deg = False ; can be Quantity )
n = number of desired samples
deg = los in degrees ? ( default = True )
targetSurfmass , targetSigma2 = if True , use target surface mass and sigma2 profiles , respectively ( there is not much point to doing the latter )
( default = True )
OUTPUT :
returns list of Orbits
BUGS :
target = False uses target distribution for derivatives ( this is a detail )
HISTORY :
2011-03-24 - Started - Bovy ( NYU )"""
|
if _APY_LOADED and isinstance ( los , units . Quantity ) :
l = los . to ( units . rad ) . value
elif deg :
l = los * _DEGTORAD
else :
l = los
out = [ ]
# sample distances
ds = self . sampledSurfacemassLOS ( l , n = n , maxd = maxd , target = targetSurfmass , use_physical = False )
for ii in range ( int ( n ) ) : # Calculate R and phi
thisR , thisphi = _dlToRphi ( ds [ ii ] , l )
# sample velocities
vv = self . sampleVRVT ( thisR , n = 1 , nsigma = nsigma , target = targetSigma2 , use_physical = False ) [ 0 ]
if self . _roSet and self . _voSet :
out . append ( Orbit ( [ thisR , vv [ 0 ] , vv [ 1 ] , thisphi ] , ro = self . _ro , vo = self . _vo ) )
else :
out . append ( Orbit ( [ thisR , vv [ 0 ] , vv [ 1 ] , thisphi ] ) )
return out
|
def _get_translations_multi_paths ( ) :
"""Return the correct gettext translations that should be used for this
request .
This will never fail and return a dummy translation object if used
outside of the request or if a translation cannot be found ."""
|
ctx = _request_ctx_stack . top
if ctx is None :
return None
translations = getattr ( ctx , "babel_translations" , None )
if translations is None :
babel_ext = ctx . app . extensions [ "babel" ]
translations = None
trs = None
# reverse order : thus the application catalog is loaded last , so that
# translations from libraries can be overriden
for ( dirname , domain ) in reversed ( babel_ext . _translations_paths ) :
trs = Translations . load ( dirname , locales = [ flask_babel . get_locale ( ) ] , domain = domain )
# babel . support . Translations is a subclass of
# babel . support . NullTranslations , so we test if object has a ' merge '
# method
if not trs or not hasattr ( trs , "merge" ) : # got None or NullTranslations instance
continue
elif translations is not None and hasattr ( translations , "merge" ) :
translations . merge ( trs )
else :
translations = trs
# ensure translations is at least a NullTranslations object
if translations is None :
translations = trs
ctx . babel_translations = translations
return translations
|
def _get_login_manager ( self , app : FlaskUnchained , anonymous_user : AnonymousUser , ) -> LoginManager :
"""Get an initialized instance of Flask Login ' s
: class : ` ~ flask _ login . LoginManager ` ."""
|
login_manager = LoginManager ( )
login_manager . anonymous_user = anonymous_user or AnonymousUser
login_manager . localize_callback = _
login_manager . request_loader ( self . _request_loader )
login_manager . user_loader ( lambda * a , ** kw : self . security_utils_service . user_loader ( * a , ** kw ) )
login_manager . login_view = 'security_controller.login'
login_manager . login_message = _ ( 'flask_unchained.bundles.security:error.login_required' )
login_manager . login_message_category = 'info'
login_manager . needs_refresh_message = _ ( 'flask_unchained.bundles.security:error.fresh_login_required' )
login_manager . needs_refresh_message_category = 'info'
login_manager . init_app ( app )
return login_manager
|
def _xml_for_episode_index ( self , ep_ind ) :
"""Helper method to retrieve the corresponding model xml string
for the passed episode index ."""
|
# read the model xml , using the metadata stored in the attribute for this episode
model_file = self . demo_file [ "data/{}" . format ( ep_ind ) ] . attrs [ "model_file" ]
model_path = os . path . join ( self . demo_path , "models" , model_file )
with open ( model_path , "r" ) as model_f :
model_xml = model_f . read ( )
return model_xml
|
def write_gff_file ( self , outfile , force_rerun = False ) :
"""Write a GFF file for the protein features , ` ` features ` ` will now load directly from this file .
Args :
outfile ( str ) : Path to new FASTA file to be written to
force _ rerun ( bool ) : If an existing file should be overwritten"""
|
if ssbio . utils . force_rerun ( outfile = outfile , flag = force_rerun ) :
with open ( outfile , "w" ) as out_handle :
GFF . write ( [ self ] , out_handle )
self . feature_path = outfile
|
def node_theta ( nodelist , node ) :
"""Maps node to Angle .
: param nodelist : Nodelist from the graph .
: type nodelist : list .
: param node : The node of interest . Must be in the nodelist .
: returns : theta - - the angle of the node in radians ."""
|
assert len ( nodelist ) > 0 , "nodelist must be a list of items."
assert node in nodelist , "node must be inside nodelist."
i = nodelist . index ( node )
theta = - np . pi + i * 2 * np . pi / len ( nodelist )
return theta
|
def handle_request ( request , validator_map , ** kwargs ) :
"""Validate the request against the swagger spec and return a dict with
all parameter values available in the request , casted to the expected
python type .
: param request : a : class : ` PyramidSwaggerRequest ` to validate
: param validator _ map : a : class : ` pyramid _ swagger . load _ schema . ValidatorMap `
used to validate the request
: returns : a : class : ` dict ` of request data for each parameter in the swagger
spec
: raises : RequestValidationError when the request is not valid for the
swagger spec"""
|
request_data = { }
validation_pairs = [ ]
for validator , values in [ ( validator_map . query , request . query ) , ( validator_map . path , request . path ) , ( validator_map . form , request . form ) , ( validator_map . headers , request . headers ) , ] :
values = cast_params ( validator . schema , values )
validation_pairs . append ( ( validator , values ) )
request_data . update ( values )
# Body is a special case because the key for the request _ data comes
# from the name in the schema , instead of keys in the values
if validator_map . body . schema :
param_name = validator_map . body . schema [ 'name' ]
validation_pairs . append ( ( validator_map . body , request . body ) )
request_data [ param_name ] = request . body
validate_request ( validation_pairs )
return request_data
|
def p_nonblocking_substitution ( self , p ) :
'nonblocking _ substitution : delays lvalue LE delays rvalue SEMICOLON'
|
p [ 0 ] = NonblockingSubstitution ( p [ 2 ] , p [ 5 ] , p [ 1 ] , p [ 4 ] , lineno = p . lineno ( 2 ) )
p . set_lineno ( 0 , p . lineno ( 2 ) )
|
def emit ( self , event , * event_args ) :
"""Call the registered listeners for ` ` event ` ` .
The listeners will be called with any extra arguments passed to
: meth : ` emit ` first , and then the extra arguments passed to : meth : ` on `"""
|
listeners = self . _listeners [ event ] [ : ]
for listener in listeners :
args = list ( event_args ) + list ( listener . user_args )
result = listener . callback ( * args )
if result is False :
self . off ( event , listener . callback )
|
def add_cert ( self , cert ) :
"""Explicitely adds certificate to set of trusted in the store
@ param cert - X509 object to add"""
|
if not isinstance ( cert , X509 ) :
raise TypeError ( "cert should be X509" )
libcrypto . X509_STORE_add_cert ( self . store , cert . cert )
|
def make_params ( self ) :
"""Create all Variables to be returned later by get _ params .
By default this is a no - op .
Models that need their fprop to be called for their params to be
created can set ` needs _ dummy _ fprop = True ` in the constructor ."""
|
if self . needs_dummy_fprop :
if hasattr ( self , "_dummy_input" ) :
return
self . _dummy_input = self . make_input_placeholder ( )
self . fprop ( self . _dummy_input )
|
def add_result ( self , source , found , runtime ) :
"""Adds a new record to the statistics ' database ' . This function is
intended to be called after a website has been scraped . The arguments
indicate the function that was called , the time taken to scrap the
website and a boolean indicating if the lyrics were found or not ."""
|
self . source_stats [ source . __name__ ] . add_runtime ( runtime )
if found :
self . source_stats [ source . __name__ ] . successes += 1
else :
self . source_stats [ source . __name__ ] . fails += 1
|
def print_download_uri ( self , version , source ) :
"""@ param version : version number or ' dev ' for svn
@ type version : string
@ param source : download source or egg
@ type source : boolean
@ returns : None"""
|
if version == "dev" :
pkg_type = "subversion"
source = True
elif source :
pkg_type = "source"
else :
pkg_type = "egg"
# Use setuptools monkey - patch to grab url
url = get_download_uri ( self . project_name , version , source , self . options . pypi_index )
if url :
print ( "%s" % url )
else :
self . logger . info ( "No download URL found for %s" % pkg_type )
|
def _load_instance ( self , instance_id , force_reload = True ) :
"""Return instance with the given id .
For performance reasons , the instance ID is first searched for in the
collection of VM instances started by ElastiCluster
( ` self . _ instances ` ) , then in the list of all instances known to the
cloud provider at the time of the last update
( ` self . _ cached _ instances ` ) , and finally the cloud provider is directly
queried .
: param str instance _ id : instance identifier
: param bool force _ reload :
if ` ` True ` ` , skip searching caches and reload instance from server
and immediately reload instance data from cloud provider
: return : py : class : ` novaclient . v1_1 . servers . Server ` - instance
: raises : ` InstanceError ` is returned if the instance can ' t
be found in the local cache or in the cloud ."""
|
if force_reload :
try : # Remove from cache and get from server again
vm = self . nova_client . servers . get ( instance_id )
except NotFound :
raise InstanceNotFoundError ( "Instance `{instance_id}` not found" . format ( instance_id = instance_id ) )
# update caches
self . _instances [ instance_id ] = vm
self . _cached_instances [ instance_id ] = vm
# if instance is known , return it
if instance_id in self . _instances :
return self . _instances [ instance_id ]
# else , check ( cached ) list from provider
if instance_id not in self . _cached_instances : # Refresh the cache , just in case
self . _cached_instances = dict ( ( vm . id , vm ) for vm in self . nova_client . servers . list ( ) )
if instance_id in self . _cached_instances :
inst = self . _cached_instances [ instance_id ]
self . _instances [ instance_id ] = inst
return inst
# If we reached this point , the instance was not found neither
# in the caches nor on the website .
raise InstanceNotFoundError ( "Instance `{instance_id}` not found" . format ( instance_id = instance_id ) )
|
def get_render_data ( self , ** kwargs ) :
"""Returns all data that should be passed to the renderer .
By default adds the following arguments :
* * * bundle * * - The bundle that is attached to this view instance .
* * * url _ params * * - The url keyword arguments . i . e . : self . kwargs .
* * * user * * - The user attached to this request .
* * * base * * - Unless base was already specified this gets set to ' self . base _ template ' .
* * * navigation * * - The navigation bar for the page
* * * object _ header _ tmpl * * - The template to use for the object _ header . Set to ` self . object _ header _ tmpl ` .
* * * back _ bundle * * - The back _ back bundle is bundle that is linked to from the object header as part of navigation . If there is an ' obj ' argument in the context to render , this will be set to the bundle pointed to by the ` main _ list ` attribute of this view ' s bundle . If this is not set , the template ' s back link will point to the admin _ site ' s home page ."""
|
obj = getattr ( self , 'object' , None )
data = dict ( self . extra_render_data )
data . update ( kwargs )
data . update ( { 'bundle' : self . bundle , 'navigation' : self . get_navigation ( ) , 'url_params' : self . kwargs , 'user' : self . request . user , 'object_header_tmpl' : self . object_header_tmpl , 'view_tags' : tag_handler . tags_to_string ( self . get_tags ( obj ) ) } )
if not 'base' in data :
data [ 'base' ] = self . base_template
if not 'back_bundle' in data :
data [ 'back_bundle' ] = self . get_back_bundle ( )
return super ( CMSView , self ) . get_render_data ( ** data )
|
def get_component_related_issues ( self , component_id ) :
"""Returns counts of issues related to this component .
: param component _ id :
: return :"""
|
url = 'rest/api/2/component/{component_id}/relatedIssueCounts' . format ( component_id = component_id )
return self . get ( url )
|
def get_chemical ( self , chemical_name = None , chemical_id = None , cas_rn = None , drugbank_id = None , parent_id = None , parent_tree_number = None , tree_number = None , synonym = None , limit = None , as_df = False ) :
"""Get chemical
: param bool as _ df : if set to True result returns as ` pandas . DataFrame `
: param str chemical _ name : chemical name
: param str chemical _ id : cehmical identifier
: param str cas _ rn : CAS registry number
: param str drugbank _ id : DrugBank identifier
: param str parent _ id : identifiers of the parent terms
: param str parent _ tree _ number : identifiers of the parent nodes
: param str tree _ number : identifiers of the chemical ' s nodes
: param str synonym : chemical synonym
: param int limit : maximum number of results
: return : list of : class : ` pyctd . manager . models . Chemical ` objects
. . seealso : :
: class : ` pyctd . manager . models . Chemical `"""
|
q = self . session . query ( models . Chemical )
if chemical_name :
q = q . filter ( models . Chemical . chemical_name . like ( chemical_name ) )
if chemical_id :
q = q . filter ( models . Chemical . chemical_id == chemical_id )
if cas_rn :
q = q . filter ( models . Chemical . cas_rn == cas_rn )
if drugbank_id :
q = q . join ( models . ChemicalDrugbank ) . filter ( models . ChemicalDrugbank . drugbank_id == drugbank_id )
if parent_id :
q = q . join ( models . ChemicalParentid ) . filter ( models . ChemicalParentid . parent_id == parent_id )
if tree_number :
q = q . join ( models . ChemicalTreenumber ) . filter ( models . ChemicalTreenumber . tree_number == tree_number )
if parent_tree_number :
q = q . join ( models . ChemicalParenttreenumber ) . filter ( models . ChemicalParenttreenumber . parent_tree_number == parent_tree_number )
if synonym :
q = q . join ( models . ChemicalSynonym ) . filter ( models . ChemicalSynonym . synonym . like ( synonym ) )
return self . _limit_and_df ( q , limit , as_df )
|
def set_iscsi_boot_info ( self , mac , target_name , lun , ip_address , port = '3260' , auth_method = None , username = None , password = None ) :
"""Set iscsi details of the system in uefi boot mode .
The initiator system is set with the target details like
IQN , LUN , IP , Port etc .
: param mac : The MAC of the NIC to be set with iSCSI information
: param target _ name : Target Name for iscsi .
: param lun : logical unit number .
: param ip _ address : IP address of the target .
: param port : port of the target .
: param auth _ method : either None or CHAP .
: param username : CHAP Username for authentication .
: param password : CHAP secret .
: raises : IloError , on an error from iLO .
: raises : IloCommandNotSupportedInBiosError , if the system is
in the bios boot mode ."""
|
LOG . warning ( "'set_iscsi_boot_info' is deprecated. The 'MAC' parameter" "passed in is ignored. Use 'set_iscsi_info' instead." )
return self . _call_method ( 'set_iscsi_info' , target_name , lun , ip_address , port , auth_method , username , password )
|
def add_node_configuration ( self , param_name , node_id , param_value ) :
"""Set a parameter for a given node
: param param _ name : parameter identifier ( as specified by the chosen model )
: param node _ id : node identifier
: param param _ value : parameter value"""
|
if param_name not in self . config [ 'nodes' ] :
self . config [ 'nodes' ] [ param_name ] = { node_id : param_value }
else :
self . config [ 'nodes' ] [ param_name ] [ node_id ] = param_value
|
def forget_masks ( self ) :
"""Forget all loaded coordinates ."""
|
self . _seqno = 1
self . _maskobjs = [ ]
self . _treepaths = [ ]
self . tree_dict = Bunch . caselessDict ( )
self . redo ( )
|
def serve ( self , conn , addr , auth = False ) :
"""Handle a single client .
: param conn : The Connection instance .
: param addr : The address of the client , for logging purposes .
: param auth : A boolean specifying whether the connection
should be considered authenticated or not .
Provided for debugging ."""
|
try : # Handle data from the client
while True : # Get the command
try :
cmd , payload = conn . recv ( )
except ValueError as exc : # Tell the client about the error
conn . send ( 'ERR' , "Failed to parse command: %s" % str ( exc ) )
# If they haven ' t successfully authenticated yet ,
# disconnect them
if not auth :
return
continue
# Pragma : nocover
# Log the command and payload , for debugging purposes
LOG . debug ( "Received command %r from %s port %s; payload: %r" % ( cmd , addr [ 0 ] , addr [ 1 ] , payload ) )
# Handle authentication
if cmd == 'AUTH' :
if auth :
conn . send ( 'ERR' , "Already authenticated" )
elif payload [ 0 ] != self . authkey : # Don ' t give them a second chance
conn . send ( 'ERR' , "Invalid authentication key" )
return
else : # Authentication successful
conn . send ( 'OK' )
auth = True
# Handle unauthenticated connections
elif not auth : # No second chances
conn . send ( 'ERR' , "Not authenticated" )
return
# Handle aliveness test
elif cmd == 'PING' :
conn . send ( 'PONG' , * payload )
# Handle a function call command
elif cmd == 'CALL' :
try : # Get the call parameters
try :
funcname , args , kwargs = payload
except ValueError as exc :
conn . send ( 'ERR' , "Invalid payload for 'CALL' " "command: %s" % str ( exc ) )
continue
# Look up the function
func = self . _get_remote_method ( funcname )
# Call the function
result = func ( * args , ** kwargs )
except Exception as exc :
exc_name = '%s:%s' % ( exc . __class__ . __module__ , exc . __class__ . __name__ )
conn . send ( 'EXC' , exc_name , str ( exc ) )
else : # Return the result
conn . send ( 'RES' , result )
# Handle all other commands by returning an ERR
else :
conn . send ( 'ERR' , "Unrecognized command %r" % cmd )
except ConnectionClosed : # Ignore the connection closed error
pass
except Exception as exc : # Log other exceptions
LOG . exception ( "Error serving client at %s port %s: %s" % ( addr [ 0 ] , addr [ 1 ] , str ( exc ) ) )
finally :
LOG . info ( "Closing connection from %s port %s" % ( addr [ 0 ] , addr [ 1 ] ) )
# Make sure the socket gets closed
conn . close ( )
|
def get_setter ( self , oid ) :
"""Retrieve the nearest parent setter function for an OID"""
|
if hasattr ( self . setter , oid ) :
return self . setter [ oid ]
parents = [ poid for poid in list ( self . setter . keys ( ) ) if oid . startswith ( poid ) ]
if parents :
return self . setter [ max ( parents ) ]
return self . default_setter
|
def _create_pipe ( self ) :
"""Creates a new pipe and returns the child end of the connection .
To request an account from the pipe , use : :
pipe = queue . _ create _ pipe ( )
# Let the account manager choose an account .
pipe . send ( ( ' acquire - account - for - host ' , host ) )
account = pipe . recv ( )
pipe . send ( ( ' release - account ' , account . id ( ) ) )
# Or acquire a specific account .
pipe . send ( ( ' acquire - account ' , account . id ( ) ) )
account = pipe . recv ( )
pipe . send ( ( ' release - account ' , account . id ( ) ) )
pipe . close ( )"""
|
child = _PipeHandler ( self . account_manager )
self . pipe_handlers [ id ( child ) ] = child
child . start ( )
return child . to_parent
|
def gt ( self , value ) :
"""Construct a greater than ( ` ` > ` ` ) filter .
: param value : Filter value
: return : : class : ` filters . Field < filters . Field > ` object
: rtype : filters . Field"""
|
self . op = '>'
self . negate_op = '<='
self . value = self . _value ( value )
return self
|
def default_signal_map ( ) :
"""Create the default signal map for this system .
: return : dict"""
|
name_map = { 'SIGTSTP' : None , 'SIGTTIN' : None , 'SIGTTOU' : None , 'SIGTERM' : 'terminate' }
signal_map = { }
for name , target in list ( name_map . items ( ) ) :
if hasattr ( signal , name ) :
signal_map [ getattr ( signal , name ) ] = target
return signal_map
|
def makedirs ( name , mode = 0o777 , exist_ok = False ) :
"""Mimicks os . makedirs ( ) from Python 3."""
|
try :
os . makedirs ( name , mode )
except OSError :
if not os . path . isdir ( name ) or not exist_ok :
raise
|
def get_session ( self , redirect_url ) :
"""Create Session to store credentials .
Parameters
redirect _ url ( str )
The full URL that the Uber server redirected to after
the user authorized your app .
Returns
( Session )
A Session object with OAuth 2.0 credentials .
Raises
UberIllegalState ( APIError )
Raised if redirect URL contains an error ."""
|
query_params = self . _extract_query ( redirect_url )
error = query_params . get ( 'error' )
if error :
raise UberIllegalState ( error )
# convert space delimited string to set
scopes = query_params . get ( 'scope' )
scopes_set = { scope for scope in scopes . split ( ) }
oauth2credential = OAuth2Credential ( client_id = self . client_id , redirect_url = self . redirect_url , access_token = query_params . get ( 'access_token' ) , expires_in_seconds = query_params . get ( 'expires_in' ) , scopes = scopes_set , grant_type = auth . IMPLICIT_GRANT , )
return Session ( oauth2credential = oauth2credential )
|
def safe_mkdir ( directory , clean = False ) :
"""Safely create a directory .
Ensures a directory is present . If it ' s not there , it is created . If it
is , it ' s a no - op . If clean is True , ensures the directory is empty ."""
|
if clean :
safe_rmtree ( directory )
try :
os . makedirs ( directory )
except OSError as e :
if e . errno != errno . EEXIST :
raise
|
def run ( self ) :
'''Enter into the server loop'''
|
salt . utils . process . appendproctitle ( self . __class__ . __name__ )
# instantiate some classes inside our new process
self . event = salt . utils . event . get_event ( self . opts [ '__role' ] , self . opts [ 'sock_dir' ] , self . opts [ 'transport' ] , opts = self . opts , listen = True )
self . wrap = ReactWrap ( self . opts )
for data in self . event . iter_events ( full = True ) : # skip all events fired by ourselves
if data [ 'data' ] . get ( 'user' ) == self . wrap . event_user :
continue
# NOTE : these events must contain the masters key in order to be accepted
# see salt . runners . reactor for the requesting interface
if 'salt/reactors/manage' in data [ 'tag' ] :
master_key = salt . utils . master . get_master_key ( 'root' , self . opts )
if data [ 'data' ] . get ( 'key' ) != master_key :
log . error ( 'received salt/reactors/manage event without matching master_key. discarding' )
continue
if data [ 'tag' ] . endswith ( 'salt/reactors/manage/is_leader' ) :
self . event . fire_event ( { 'result' : self . is_leader , 'user' : self . wrap . event_user } , 'salt/reactors/manage/leader/value' )
if data [ 'tag' ] . endswith ( 'salt/reactors/manage/set_leader' ) : # we only want to register events from the local master
if data [ 'data' ] . get ( 'id' ) == self . opts [ 'id' ] :
self . is_leader = data [ 'data' ] [ 'value' ]
self . event . fire_event ( { 'result' : self . is_leader , 'user' : self . wrap . event_user } , 'salt/reactors/manage/leader/value' )
if data [ 'tag' ] . endswith ( 'salt/reactors/manage/add' ) :
_data = data [ 'data' ]
res = self . add_reactor ( _data [ 'event' ] , _data [ 'reactors' ] )
self . event . fire_event ( { 'reactors' : self . list_all ( ) , 'result' : res , 'user' : self . wrap . event_user } , 'salt/reactors/manage/add-complete' )
elif data [ 'tag' ] . endswith ( 'salt/reactors/manage/delete' ) :
_data = data [ 'data' ]
res = self . delete_reactor ( _data [ 'event' ] )
self . event . fire_event ( { 'reactors' : self . list_all ( ) , 'result' : res , 'user' : self . wrap . event_user } , 'salt/reactors/manage/delete-complete' )
elif data [ 'tag' ] . endswith ( 'salt/reactors/manage/list' ) :
self . event . fire_event ( { 'reactors' : self . list_all ( ) , 'user' : self . wrap . event_user } , 'salt/reactors/manage/list-results' )
# do not handle any reactions if not leader in cluster
if not self . is_leader :
continue
else :
reactors = self . list_reactors ( data [ 'tag' ] )
if not reactors :
continue
chunks = self . reactions ( data [ 'tag' ] , data [ 'data' ] , reactors )
if chunks :
if self . opts [ 'master_stats' ] :
_data = data [ 'data' ]
start = time . time ( )
try :
self . call_reactions ( chunks )
except SystemExit :
log . warning ( 'Exit ignored by reactor' )
if self . opts [ 'master_stats' ] :
stats = salt . utils . event . update_stats ( self . stats , start , _data )
self . _post_stats ( stats )
|
def _process_credentials ( self , req , resp , origin ) :
"""Adds the Access - Control - Allow - Credentials to the response
if the cors settings indicates it should be set ."""
|
if self . _cors_config [ 'allow_credentials_all_origins' ] :
self . _set_allow_credentials ( resp )
return True
if origin in self . _cors_config [ 'allow_credentials_origins_list' ] :
self . _set_allow_credentials ( resp )
return True
credentials_regex = self . _cors_config [ 'allow_credentials_origins_regex' ]
if credentials_regex :
if credentials_regex . match ( origin ) :
self . _set_allow_credentials ( resp )
return True
return False
|
def directions ( self , origin , destination , mode = None , alternatives = None , waypoints = None , optimize_waypoints = False , avoid = None , language = None , units = None , region = None , departure_time = None , arrival_time = None , sensor = None ) :
"""Get directions between locations
: param origin : Origin location - string address ; ( latitude , longitude )
two - tuple , dict with ( " lat " , " lon " ) keys or object with ( lat , lon )
attributes
: param destination : Destination location - type same as origin
: param mode : Travel mode as string , defaults to " driving " .
See ` google docs details < https : / / developers . google . com / maps / documentation / directions / # TravelModes > ` _
: param alternatives : True if provide it has to return more then one
route alternative
: param waypoints : Iterable with set of intermediate stops ,
like ( " Munich " , " Dallas " )
See ` google docs details < https : / / developers . google . com / maps / documentation / javascript / reference # DirectionsRequest > ` _
: param optimize _ waypoints : if true will attempt to re - order supplied
waypoints to minimize overall cost of the route . If waypoints are
optimized , the route returned will show the optimized order under
" waypoint _ order " . See ` google docs details < https : / / developers . google . com / maps / documentation / javascript / reference # DirectionsRequest > ` _
: param avoid : Iterable with set of restrictions ,
like ( " tolls " , " highways " ) . For full list refer to
` google docs details < https : / / developers . google . com / maps / documentation / directions / # Restrictions > ` _
: param language : The language in which to return results .
See ` list of supported languages < https : / / developers . google . com / maps / faq # languagesupport > ` _
: param units : Unit system for result . Defaults to unit system of
origin ' s country .
See ` google docs details < https : / / developers . google . com / maps / documentation / directions / # UnitSystems > ` _
: param region : The region code . Affects geocoding of origin and
destination ( see ` gmaps . Geocoding . geocode ` region parameter )
: param departure _ time : Desired time of departure as
seconds since midnight , January 1 , 1970 UTC
: param arrival _ time : Desired time of arrival for transit directions as
seconds since midnight , January 1 , 1970 UTC ."""
|
# noqa
if optimize_waypoints :
waypoints . insert ( 0 , "optimize:true" )
parameters = dict ( origin = self . assume_latlon_or_address ( origin ) , destination = self . assume_latlon_or_address ( destination ) , mode = mode , alternatives = alternatives , waypoints = waypoints or [ ] , avoid = avoid , language = language , units = units , region = region , departure_time = departure_time , arrival_time = arrival_time , sensor = sensor , )
return self . _make_request ( self . DIRECTIONS_URL , parameters , "routes" )
|
def is_uniform_join_units ( join_units ) :
"""Check if the join units consist of blocks of uniform type that can
be concatenated using Block . concat _ same _ type instead of the generic
concatenate _ join _ units ( which uses ` _ concat . _ concat _ compat ` ) ."""
|
return ( # all blocks need to have the same type
all ( type ( ju . block ) is type ( join_units [ 0 ] . block ) for ju in join_units ) and # noqa
# no blocks that would get missing values ( can lead to type upcasts )
# unless we ' re an extension dtype .
all ( not ju . is_na or ju . block . is_extension for ju in join_units ) and # no blocks with indexers ( as then the dimensions do not fit )
all ( not ju . indexers for ju in join_units ) and # disregard Panels
all ( ju . block . ndim <= 2 for ju in join_units ) and # only use this path when there is something to concatenate
len ( join_units ) > 1 )
|
def restore_backup ( path , backup_id ) :
'''. . versionadded : : 0.17.0
Restore a previous version of a file that was backed up using Salt ' s
: ref : ` file state backup < file - state - backups > ` system .
path
The path on the minion to check for backups
backup _ id
The numeric id for the backup you wish to restore , as found using
: mod : ` file . list _ backups < salt . modules . file . list _ backups > `
CLI Example :
. . code - block : : bash
salt ' * ' file . restore _ backup / foo / bar / baz . txt 0'''
|
path = os . path . expanduser ( path )
# Note : This only supports minion backups , so this function will need to be
# modified if / when master backups are implemented .
ret = { 'result' : False , 'comment' : 'Invalid backup_id \'{0}\'' . format ( backup_id ) }
try :
if len ( six . text_type ( backup_id ) ) == len ( six . text_type ( int ( backup_id ) ) ) :
backup = list_backups ( path ) [ int ( backup_id ) ]
else :
return ret
except ValueError :
return ret
except KeyError :
ret [ 'comment' ] = 'backup_id \'{0}\' does not exist for ' '{1}' . format ( backup_id , path )
return ret
salt . utils . files . backup_minion ( path , _get_bkroot ( ) )
try :
shutil . copyfile ( backup [ 'Location' ] , path )
except IOError as exc :
ret [ 'comment' ] = 'Unable to restore {0} to {1}: ' '{2}' . format ( backup [ 'Location' ] , path , exc )
return ret
else :
ret [ 'result' ] = True
ret [ 'comment' ] = 'Successfully restored {0} to ' '{1}' . format ( backup [ 'Location' ] , path )
# Try to set proper ownership
if not salt . utils . platform . is_windows ( ) :
try :
fstat = os . stat ( path )
except ( OSError , IOError ) :
ret [ 'comment' ] += ', but was unable to set ownership'
else :
os . chown ( path , fstat . st_uid , fstat . st_gid )
return ret
|
def vbreak ( image , mask = None , iterations = 1 ) :
'''Remove horizontal breaks
1 1 1 1 1 1
0 1 0 - > 0 0 0 ( this case only )
1 1 1 1 1 1'''
|
global vbreak_table
if mask is None :
masked_image = image
else :
masked_image = image . astype ( bool ) . copy ( )
masked_image [ ~ mask ] = False
result = table_lookup ( masked_image , vbreak_table , False )
if not mask is None :
result [ ~ mask ] = image [ ~ mask ]
return result
|
def add_optional_parameters ( detail_json , detail , rating , rating_n , popularity , current_popularity , time_spent ) :
"""check for optional return parameters and add them to the result json
: param detail _ json :
: param detail :
: param rating :
: param rating _ n :
: param popularity :
: param current _ popularity :
: param time _ spent :
: return :"""
|
if rating is not None :
detail_json [ "rating" ] = rating
elif "rating" in detail :
detail_json [ "rating" ] = detail [ "rating" ]
if rating_n is not None :
detail_json [ "rating_n" ] = rating_n
if "international_phone_number" in detail :
detail_json [ "international_phone_number" ] = detail [ "international_phone_number" ]
if current_popularity is not None :
detail_json [ "current_popularity" ] = current_popularity
if popularity is not None :
popularity , wait_times = get_popularity_for_day ( popularity )
detail_json [ "populartimes" ] = popularity
detail_json [ "time_wait" ] = wait_times
if time_spent is not None :
detail_json [ "time_spent" ] = time_spent
return detail_json
|
def get_xmlrpc_server ( self ) :
"""Returns PyPI ' s XML - RPC server instance"""
|
check_proxy_setting ( )
if os . environ . has_key ( 'XMLRPC_DEBUG' ) :
debug = 1
else :
debug = 0
try :
return xmlrpclib . Server ( XML_RPC_SERVER , transport = ProxyTransport ( ) , verbose = debug )
except IOError :
self . logger ( "ERROR: Can't connect to XML-RPC server: %s" % XML_RPC_SERVER )
|
def reset_kernel ( self ) :
"""Reset kernel of current client ."""
|
client = self . get_current_client ( )
if client is not None :
self . switch_to_plugin ( )
client . reset_namespace ( )
|
def set_location ( self , place , latitude , longitude , pipe = None ) :
"""Set the location of * place * to the location specified by
* latitude * and * longitude * .
* place * can be any pickle - able Python object ."""
|
pipe = self . redis if pipe is None else pipe
pipe . geoadd ( self . key , longitude , latitude , self . _pickle ( place ) )
|
def save ( self , filename , format = None ) :
"""Saves the SArray to file .
The saved SArray will be in a directory named with the ` targetfile `
parameter .
Parameters
filename : string
A local path or a remote URL . If format is ' text ' , it will be
saved as a text file . If format is ' binary ' , a directory will be
created at the location which will contain the SArray .
format : { ' binary ' , ' text ' , ' csv ' } , optional
Format in which to save the SFrame . Binary saved SArrays can be
loaded much faster and without any format conversion losses .
' text ' and ' csv ' are synonymous : Each SArray row will be written
as a single line in an output text file . If not
given , will try to infer the format from filename given . If file
name ends with ' csv ' , ' txt ' or ' . csv . gz ' , then save as ' csv ' format ,
otherwise save as ' binary ' format ."""
|
from . sframe import SFrame as _SFrame
if format is None :
if filename . endswith ( ( '.csv' , '.csv.gz' , 'txt' ) ) :
format = 'text'
else :
format = 'binary'
if format == 'binary' :
with cython_context ( ) :
self . __proxy__ . save ( _make_internal_url ( filename ) )
elif format == 'text' or format == 'csv' :
sf = _SFrame ( { 'X1' : self } )
with cython_context ( ) :
sf . __proxy__ . save_as_csv ( _make_internal_url ( filename ) , { 'header' : False } )
else :
raise ValueError ( "Unsupported format: {}" . format ( format ) )
|
def append_columns ( self , colnames , values , ** kwargs ) :
"""Append new columns to the table .
When appending a single column , ` ` values ` ` can be a scalar or an
array of either length 1 or the same length as this array ( the one
it ' s appended to ) . In case of multiple columns , values must have
the shape ` ` list ( arrays ) ` ` , and the dimension of each array
has to match the length of this array .
See the docs for ` ` numpy . lib . recfunctions . append _ fields ` ` for an
explanation of the remaining options ."""
|
n = len ( self )
if np . isscalar ( values ) :
values = np . full ( n , values )
values = np . atleast_1d ( values )
if not isinstance ( colnames , str ) and len ( colnames ) > 1 :
values = np . atleast_2d ( values )
self . _check_column_length ( values , n )
if values . ndim == 1 :
if len ( values ) > n :
raise ValueError ( "New Column is longer than existing table!" )
elif len ( values ) > 1 and len ( values ) < n :
raise ValueError ( "New Column is shorter than existing table, " "but not just one element!" )
elif len ( values ) == 1 :
values = np . full ( n , values [ 0 ] )
new_arr = rfn . append_fields ( self , colnames , values , usemask = False , asrecarray = True , ** kwargs )
return self . __class__ ( new_arr , h5loc = self . h5loc , split_h5 = self . split_h5 , name = self . name , h5singleton = self . h5singleton )
|
def delete_template ( server , token , template ) :
"""Delete template .
Argument :
server : TonicDNS API server
token : TonicDNS API authentication token
template : Delete template datas
x - authentication - token : token"""
|
method = 'DELETE'
uri = 'https://' + server + '/template/' + template
connect . tonicdns_client ( uri , method , token , data = False )
|
def get ( self , request = None , timeout = 1.0 ) :
"""Get an NDEF message from the server . Temporarily connects
to the default SNEP server if the client is not yet connected .
. . deprecated : : 0.13
Use : meth : ` get _ records ` or : meth : ` get _ octets ` ."""
|
if request is None :
request = nfc . ndef . Message ( nfc . ndef . Record ( ) )
if not isinstance ( request , nfc . ndef . Message ) :
raise TypeError ( "request type must be nfc.ndef.Message" )
response_data = self . _get ( request , timeout )
if response_data is not None :
try :
response = nfc . ndef . Message ( response_data )
except Exception as error :
log . error ( repr ( error ) )
else :
return response
|
def _qemu_image_create ( disk , create_overlay = False , saltenv = 'base' ) :
'''Create the image file using specified disk _ size or / and disk _ image
Return path to the created image file'''
|
disk_size = disk . get ( 'size' , None )
disk_image = disk . get ( 'image' , None )
if not disk_size and not disk_image :
raise CommandExecutionError ( 'Unable to create new disk {0}, please specify' ' disk size and/or disk image argument' . format ( disk [ 'filename' ] ) )
img_dest = disk [ 'source_file' ]
log . debug ( 'Image destination will be %s' , img_dest )
img_dir = os . path . dirname ( img_dest )
log . debug ( 'Image destination directory is %s' , img_dir )
if not os . path . exists ( img_dir ) :
os . makedirs ( img_dir )
if disk_image :
log . debug ( 'Create disk from specified image %s' , disk_image )
sfn = __salt__ [ 'cp.cache_file' ] ( disk_image , saltenv )
qcow2 = False
if salt . utils . path . which ( 'qemu-img' ) :
res = __salt__ [ 'cmd.run' ] ( 'qemu-img info {}' . format ( sfn ) )
imageinfo = salt . utils . yaml . safe_load ( res )
qcow2 = imageinfo [ 'file format' ] == 'qcow2'
try :
if create_overlay and qcow2 :
log . info ( 'Cloning qcow2 image %s using copy on write' , sfn )
__salt__ [ 'cmd.run' ] ( 'qemu-img create -f qcow2 -o backing_file={0} {1}' . format ( sfn , img_dest ) . split ( ) )
else :
log . debug ( 'Copying %s to %s' , sfn , img_dest )
salt . utils . files . copyfile ( sfn , img_dest )
mask = salt . utils . files . get_umask ( )
if disk_size and qcow2 :
log . debug ( 'Resize qcow2 image to %sM' , disk_size )
__salt__ [ 'cmd.run' ] ( 'qemu-img resize {0} {1}M' . format ( img_dest , disk_size ) )
log . debug ( 'Apply umask and remove exec bit' )
mode = ( 0o0777 ^ mask ) & 0o0666
os . chmod ( img_dest , mode )
except ( IOError , OSError ) as err :
raise CommandExecutionError ( 'Problem while copying image. {0} - {1}' . format ( disk_image , err ) )
else : # Create empty disk
try :
mask = salt . utils . files . get_umask ( )
if disk_size :
log . debug ( 'Create empty image with size %sM' , disk_size )
__salt__ [ 'cmd.run' ] ( 'qemu-img create -f {0} {1} {2}M' . format ( disk . get ( 'format' , 'qcow2' ) , img_dest , disk_size ) )
else :
raise CommandExecutionError ( 'Unable to create new disk {0},' ' please specify <size> argument' . format ( img_dest ) )
log . debug ( 'Apply umask and remove exec bit' )
mode = ( 0o0777 ^ mask ) & 0o0666
os . chmod ( img_dest , mode )
except ( IOError , OSError ) as err :
raise CommandExecutionError ( 'Problem while creating volume {0} - {1}' . format ( img_dest , err ) )
return img_dest
|
def _parse_fail ( name , var_type , value , values ) :
"""Helper function for raising a value error for bad assignment ."""
|
raise ValueError ( 'Could not parse hparam \'%s\' of type \'%s\' with value \'%s\' in %s' % ( name , var_type . __name__ , value , values ) )
|
def has_parent_objective_banks ( self , objective_bank_id ) :
"""Tests if the ` ` ObjectiveBank ` ` has any parents .
arg : objective _ bank _ id ( osid . id . Id ) : the ` ` Id ` ` of an
objective bank
return : ( boolean ) - ` ` true ` ` if the objective bank has parents ,
` ` false ` ` otherwise
raise : NotFound - ` ` objective _ bank _ id ` ` is not found
raise : NullArgument - ` ` objective _ bank _ id ` ` is ` ` null ` `
raise : OperationFailed - unable to complete request
raise : PermissionDenied - authorization failure
* compliance : mandatory - - This method must be implemented . *"""
|
# Implemented from template for
# osid . resource . BinHierarchySession . has _ parent _ bins
if self . _catalog_session is not None :
return self . _catalog_session . has_parent_catalogs ( catalog_id = objective_bank_id )
return self . _hierarchy_session . has_parents ( id_ = objective_bank_id )
|
def zone_schedules_backup ( self , filename ) :
"""Backup all zones on control system to the given file ."""
|
_LOGGER . info ( "Backing up schedules from ControlSystem: %s (%s)..." , self . systemId , self . location . name )
schedules = { }
if self . hotwater :
_LOGGER . info ( "Retrieving DHW schedule: %s..." , self . hotwater . zoneId )
schedule = self . hotwater . schedule ( )
schedules [ self . hotwater . zoneId ] = { 'name' : 'Domestic Hot Water' , 'schedule' : schedule }
for zone in self . _zones :
zone_id = zone . zoneId
name = zone . name
_LOGGER . info ( "Retrieving Zone schedule: %s - %s" , zone_id , name )
schedule = zone . schedule ( )
schedules [ zone_id ] = { 'name' : name , 'schedule' : schedule }
schedule_db = json . dumps ( schedules , indent = 4 )
_LOGGER . info ( "Writing to backup file: %s..." , filename )
with open ( filename , 'w' ) as file_output :
file_output . write ( schedule_db )
_LOGGER . info ( "Backup completed." )
|
def process_params ( self , params ) :
'''Populates the launch data from a dictionary . Only cares about keys in
the LAUNCH _ DATA _ PARAMETERS list , or that start with ' custom _ ' or
' ext _ ' .'''
|
for key , val in params . items ( ) :
if key in LAUNCH_DATA_PARAMETERS and val != 'None' :
if key == 'roles' :
if isinstance ( val , list ) : # If it ' s already a list , no need to parse
self . roles = list ( val )
else : # If it ' s a ' , ' delimited string , split
self . roles = val . split ( ',' )
else :
setattr ( self , key , touni ( val ) )
elif 'custom_' in key :
self . custom_params [ key ] = touni ( val )
elif 'ext_' in key :
self . ext_params [ key ] = touni ( val )
|
def remove_handler ( ) :
"""Remove the user , group and policies for Blockade ."""
|
logger . debug ( "[#] Removing user, group and permissions for Blockade" )
client = boto3 . client ( "iam" , region_name = PRIMARY_REGION )
iam = boto3 . resource ( 'iam' )
account_id = iam . CurrentUser ( ) . arn . split ( ':' ) [ 4 ]
try :
logger . debug ( "[#] Removing %s from %s group" % ( BLOCKADE_USER , BLOCKADE_GROUP ) )
response = client . remove_user_from_group ( GroupName = BLOCKADE_GROUP , UserName = BLOCKADE_USER )
except client . exceptions . NoSuchEntityException :
logger . debug ( "[!] Blockade user already removed from group" )
for label in BLOCKADE_POLICIES + [ 'PushToCloud' , 'APIGatewayAdmin' ] :
logger . debug ( "[#] Removing %s policy" % ( label ) )
arn = 'arn:aws:iam::{id}:policy/{policy}' . format ( id = account_id , policy = label )
if label == 'PushToCloud' :
arn = "arn:aws:iam::aws:policy/service-role/AmazonAPIGatewayPushToCloudWatchLogs"
if label == 'APIGatewayAdmin' :
arn = "arn:aws:iam::aws:policy/AmazonAPIGatewayAdministrator"
try :
response = client . detach_group_policy ( GroupName = BLOCKADE_GROUP , PolicyArn = arn )
except :
pass
try :
response = client . detach_role_policy ( RoleName = BLOCKADE_ROLE , PolicyArn = arn )
except :
pass
try :
response = client . delete_policy ( PolicyArn = arn )
except Exception as e :
print ( e )
pass
logger . debug ( "[#] Removed all policies" )
try :
logger . debug ( "[#] Deleting %s user" % ( BLOCKADE_USER ) )
response = client . delete_user ( UserName = BLOCKADE_USER )
except client . exceptions . NoSuchEntityException :
logger . debug ( "[!] %s user already deleted" % ( BLOCKADE_USER ) )
try :
logger . debug ( "[#] Removing %s group" % ( BLOCKADE_GROUP ) )
response = client . delete_group ( GroupName = BLOCKADE_GROUP )
except :
logger . debug ( "[!] Group already removed" )
try :
logger . debug ( "[#] Removing %s role" % ( BLOCKADE_ROLE ) )
response = client . delete_role ( RoleName = BLOCKADE_ROLE )
except :
logger . debug ( "[!] Role already removed" )
return True
|
def hexblock_byte ( cls , data , address = None , bits = None , separator = ' ' , width = 16 ) :
"""Dump a block of hexadecimal BYTEs from binary data .
@ type data : str
@ param data : Binary data .
@ type address : str
@ param address : Memory address where the data was read from .
@ type bits : int
@ param bits :
( Optional ) Number of bits of the target architecture .
The default is platform dependent . See : L { HexDump . address _ size }
@ type separator : str
@ param separator :
Separator between the hexadecimal representation of each BYTE .
@ type width : int
@ param width :
( Optional ) Maximum number of BYTEs to convert per text line .
@ rtype : str
@ return : Multiline output text ."""
|
return cls . hexblock_cb ( cls . hexadecimal , data , address , bits , width , cb_kwargs = { 'separator' : separator } )
|
def setup ( app ) :
"Setup function for Sphinx Extension"
|
app . add_config_value ( "sphinx_to_github" , True , '' )
app . add_config_value ( "sphinx_to_github_verbose" , True , '' )
app . connect ( "build-finished" , sphinx_extension )
|
def add_permission ( self , resource , operation ) :
'''Add a new : class : ` Permission ` for ` ` resource ` ` to perform an
` ` operation ` ` . The resource can be either an object or a model .'''
|
if isclass ( resource ) :
model_type = resource
pk = ''
else :
model_type = resource . __class__
pk = resource . pkvalue ( )
p = Permission ( model_type = model_type , object_pk = pk , operation = operation )
session = self . session
if session . transaction :
session . add ( p )
self . permissions . add ( p )
return p
else :
with session . begin ( ) as t :
t . add ( p )
self . permissions . add ( p )
return t . add_callback ( lambda r : p )
|
def calculate_x_ticks ( self , plot_width ) :
"""Calculate the x - axis items dependent on the plot width ."""
|
x_calibration = self . x_calibration
uncalibrated_data_left = self . __uncalibrated_left_channel
uncalibrated_data_right = self . __uncalibrated_right_channel
calibrated_data_left = x_calibration . convert_to_calibrated_value ( uncalibrated_data_left ) if x_calibration is not None else uncalibrated_data_left
calibrated_data_right = x_calibration . convert_to_calibrated_value ( uncalibrated_data_right ) if x_calibration is not None else uncalibrated_data_right
calibrated_data_left , calibrated_data_right = min ( calibrated_data_left , calibrated_data_right ) , max ( calibrated_data_left , calibrated_data_right )
graph_left , graph_right , tick_values , division , precision = Geometry . make_pretty_range ( calibrated_data_left , calibrated_data_right )
drawn_data_width = self . drawn_right_channel - self . drawn_left_channel
x_ticks = list ( )
if drawn_data_width > 0.0 :
for tick_value in tick_values :
label = nice_label ( tick_value , precision )
data_tick = x_calibration . convert_from_calibrated_value ( tick_value ) if x_calibration else tick_value
x_tick = plot_width * ( data_tick - self . drawn_left_channel ) / drawn_data_width
if x_tick >= 0 and x_tick <= plot_width :
x_ticks . append ( ( x_tick , label ) )
return x_ticks
|
def load_rsa_private_key_file ( rsakeyfile , passphrase ) : # type : ( str , str ) - >
# cryptography . hazmat . primitives . asymmetric . rsa . RSAPrivateKey
"""Load an RSA Private key PEM file with passphrase if specified
: param str rsakeyfile : RSA private key PEM file to load
: param str passphrase : optional passphrase
: rtype : cryptography . hazmat . primitives . asymmetric . rsa . RSAPrivateKey
: return : RSAPrivateKey"""
|
keypath = os . path . expandvars ( os . path . expanduser ( rsakeyfile ) )
with open ( keypath , 'rb' ) as keyfile :
return cryptography . hazmat . primitives . serialization . load_pem_private_key ( keyfile . read ( ) , passphrase . encode ( 'utf8' ) if passphrase is not None else None , backend = cryptography . hazmat . backends . default_backend ( ) )
|
def configure ( self , config ) :
"""See base class method ."""
|
super ( ) . configure ( config )
self . _regex = self . _build_matcher ( config . get ( 'regex{suffix}' . format ( suffix = self . _option_suffix ) , fallback = self . _regex . pattern ) )
|
def update_model_in_repo_based_on_filename ( self , model ) :
"""Adds a model to the repo ( not initially visible )
Args :
model : the model to be added . If the model
has no filename , a name is invented
Returns : the filename of the model added to the repo"""
|
if model . _tx_filename is None :
for fn in self . all_models . filename_to_model :
if self . all_models . filename_to_model [ fn ] == model :
return fn
i = 0
while self . all_models . has_model ( "anonymous{}" . format ( i ) ) :
i += 1
myfilename = "anonymous{}" . format ( i )
self . all_models . filename_to_model [ myfilename ] = model
else :
myfilename = model . _tx_filename
if ( not self . all_models . has_model ( myfilename ) ) :
self . all_models . filename_to_model [ myfilename ] = model
return myfilename
|
def full_game_name ( short_name ) :
"""CamelCase game name with mode suffix .
Args :
short _ name : snake _ case name without mode e . g " crazy _ climber "
Returns :
full game name e . g . " CrazyClimberNoFrameskip - v4" """
|
camel_game_name = misc_utils . snakecase_to_camelcase ( short_name )
full_name = camel_game_name + ATARI_GAME_MODE
return full_name
|
def transform_to_length ( nndata , length ) :
"""Given NNData , transforms data to the specified fingerprint length
Args :
nndata : ( NNData )
length : ( int ) desired length of NNData"""
|
if length is None :
return nndata
if length :
for cn in range ( length ) :
if cn not in nndata . cn_weights :
nndata . cn_weights [ cn ] = 0
nndata . cn_nninfo [ cn ] = [ ]
return nndata
|
def _setdefault ( obj , key , value ) :
"""DO NOT USE _ _ dict _ _ . setdefault ( obj , key , value ) , IT DOES NOT CHECK FOR obj [ key ] = = None"""
|
v = obj . get ( key )
if v == None :
obj [ key ] = value
return value
return v
|
def compile_binary ( source ) :
"""Prepare chkrootkit binary
$ tar xzvf chkrootkit . tar . gz
$ cd chkrootkit - 0.52
$ make sense
sudo mv chkrootkit - 0.52 / usr / local / chkrootkit
sudo ln - s"""
|
cmd = 'make sense'
slink = '/usr/local/bin/chkrootkit'
target = '/usr/local/chkrootkit/chkrootkit'
# Tar Extraction
t = tarfile . open ( source , 'r' )
t . extractall ( TMPDIR )
if isinstance ( t . getnames ( ) , list ) :
extract_dir = t . getnames ( ) [ 0 ] . split ( '/' ) [ 0 ]
os . chdir ( TMPDIR + '/' + extract_dir )
logger . info ( 'make output: \n%s' % subprocess . getoutput ( cmd ) )
# move directory in place
os . rename ( TMPDIR + '/' + extract_dir , 'usr/local/chkrootkit' )
# create symlink to binary in directory
os . symlink ( target , slink )
return True
return False
|
def MakeSuiteFromHist ( hist , name = None ) :
"""Makes a normalized suite from a Hist object .
Args :
hist : Hist object
name : string name
Returns :
Suite object"""
|
if name is None :
name = hist . name
# make a copy of the dictionary
d = dict ( hist . GetDict ( ) )
return MakeSuiteFromDict ( d , name )
|
def sections ( self ) :
'''List of section titles from the table of contents on the page .'''
|
if not getattr ( self , '_sections' , False ) :
query_params = { 'action' : 'parse' , 'prop' : 'sections' , }
query_params . update ( self . __title_query_param )
request = _wiki_request ( query_params )
self . _sections = [ section [ 'line' ] for section in request [ 'parse' ] [ 'sections' ] ]
return self . _sections
|
def scaleField ( self , scalingFactor ) :
"""Adjust the field of the magnet by the value of ` ` scalingFactor ` ` . The adjustment
is multiplicative , so a value of ` ` scalingFactor = 1.0 ` ` will result in no change
of the field ."""
|
self . field_strength = self . field_strength . _replace ( val = self . field_strength . val * scalingFactor )
|
def contamination_detection ( self ) :
"""Calculate the levels of contamination in the reads"""
|
self . qualityobject = quality . Quality ( self )
self . qualityobject . contamination_finder ( input_path = self . sequencepath , report_path = self . reportpath )
|
def find_all ( self , find_string , flags ) :
"""Return list of all positions of event _ find _ string in MainGrid .
Only the code is searched . The result is not searched here .
Parameters :
gridpos : 3 - tuple of Integer
\t Position at which the search starts
find _ string : String
\t String to find in grid
flags : List of strings
\t Search flag out of
\t [ " UP " xor " DOWN " , " WHOLE _ WORD " , " MATCH _ CASE " , " REG _ EXP " ]"""
|
code_array = self . grid . code_array
string_match = code_array . string_match
find_keys = [ ]
for key in code_array :
if string_match ( code_array ( key ) , find_string , flags ) is not None :
find_keys . append ( key )
return find_keys
|
def rhypergeometric ( n , m , N , size = None ) :
"""Returns hypergeometric random variates ."""
|
if n == 0 :
return np . zeros ( size , dtype = int )
elif n == N :
out = np . empty ( size , dtype = int )
out . fill ( m )
return out
return np . random . hypergeometric ( n , N - n , m , size )
|
def pad_timestamp ( string , pad_str = PAD_6_UP ) :
"""> > > pad _ timestamp ( ' 20 ' )
'209912'
> > > pad _ timestamp ( ' 2014 ' )
'201412'
> > > pad _ timestamp ( ' 20141011 ' )
'20141011'
> > > pad _ timestamp ( ' 201410110010 ' )
'201410110010'"""
|
str_len = len ( string )
pad_len = len ( pad_str )
if str_len < pad_len :
string = string + pad_str [ str_len : ]
return string
|
def extract_morphological_information ( mrph_object , is_feature , is_surface ) : # type : ( pyknp . Morpheme , bool , bool ) - > TokenizedResult
"""This method extracts morphlogical information from token object ."""
|
assert isinstance ( mrph_object , pyknp . Morpheme )
assert isinstance ( is_feature , bool )
assert isinstance ( is_surface , bool )
surface = mrph_object . midasi
word_stem = mrph_object . genkei
tuple_pos = ( mrph_object . hinsi , mrph_object . bunrui )
misc_info = { 'katuyou1' : mrph_object . katuyou1 , 'katuyou2' : mrph_object . katuyou2 , 'imis' : mrph_object . imis , 'repname' : mrph_object . repname }
token_object = TokenizedResult ( node_obj = None , tuple_pos = tuple_pos , word_stem = word_stem , word_surface = surface , is_feature = is_feature , is_surface = is_surface , misc_info = misc_info )
return token_object
|
def transform_collection ( collection_dir ) :
"""Given an unzipped collection generate a giant HTML file representing
the entire collection ( including loading and converting individual modules )"""
|
collxml_file = open ( os . path . join ( collection_dir , 'collection.xml' ) )
collxml_html = transform_collxml ( collxml_file )
# For each included module , parse and convert it
for node in INCLUDE_XPATH ( collxml_html ) :
href = node . attrib [ 'href' ]
module = href . split ( '@' ) [ 0 ]
# version = None # We don ' t care about version
module_dir = os . path . join ( collection_dir , module )
# By default , use the index _ auto _ generated . cnxml file for the module
module_path = os . path . join ( module_dir , 'index_auto_generated.cnxml' )
if not os . path . exists ( module_path ) :
module_path = os . path . join ( module_dir , 'index.cnxml' )
module_html = transform_cnxml ( module_path )
# Replace the include link with the body of the module
module_body = MODULE_BODY_XPATH ( module_html )
node . getparent ( ) . replace ( node , module_body [ 0 ] )
return collxml_html
|
def interventions ( self ) :
"""Dictionary of interventions in / scenario / interventions / vectorPop section"""
|
interventions = { }
if self . et is None :
return interventions
for intervention in self . et . findall ( "intervention" ) :
interventions [ intervention . attrib [ 'name' ] ] = VectorPopIntervention ( intervention )
return interventions
|
def _f16_oper ( op1 , op2 = None , useBC = False , reversed = False ) :
"""Returns pop sequence for 32 bits operands
1st operand in HLDE , 2nd operand remains in the stack
Now it does support operands inversion calling _ _ SWAP32.
However , if 1st operand is integer ( immediate ) or indirect , the stack
will be rearranged , so it contains a 32 bit pushed parameter value for the
subroutine to be called .
If preserveHL is True , then BC will be used instead of HL for lower part
for the 1st operand ."""
|
output = [ ]
if op1 is not None :
op1 = str ( op1 )
if op2 is not None :
op2 = str ( op2 )
op = op2 if op2 is not None else op1
float1 = False
# whether op1 ( 2nd operand ) is float
indirect = ( op [ 0 ] == '*' )
if indirect :
op = op [ 1 : ]
immediate = ( op [ 0 ] == '#' )
if immediate :
op = op [ 1 : ]
hl = 'hl' if not useBC and not indirect else 'bc'
if is_float ( op ) :
float1 = True
op = float ( op )
if indirect :
op = int ( op ) & 0xFFFF
if immediate :
output . append ( 'ld hl, %i' % op )
else :
output . append ( 'ld hl, (%i)' % op )
output . append ( 'call __ILOAD32' )
REQUIRES . add ( 'iload32.asm' )
if preserveHL : # noqa TODO : it will fail
output . append ( 'ld b, h' )
output . append ( 'ld c, l' )
else :
DE , HL = f16 ( op )
output . append ( 'ld de, %i' % DE )
output . append ( 'ld %s, %i' % ( hl , HL ) )
else :
if op [ 0 ] == '_' :
if immediate :
output . append ( 'ld %s, %s' % ( hl , op ) )
else :
output . append ( 'ld %s, (%s)' % ( hl , op ) )
else :
output . append ( 'pop %s' % hl )
if indirect :
output . append ( 'call __ILOAD32' )
REQUIRES . add ( 'iload32.asm' )
if preserveHL : # noqa TODO : it will fail
output . append ( 'ld b, h' )
output . append ( 'ld c, l' )
else :
if op [ 0 ] == '_' :
output . append ( 'ld de, (%s + 2)' % op )
else :
output . append ( 'pop de' )
if op2 is not None :
op = op1
indirect = ( op [ 0 ] == '*' )
if indirect :
op = op [ 1 : ]
immediate = ( op [ 0 ] == '#' )
if immediate :
op = op [ 1 : ]
if is_float ( op ) :
op = float ( op )
if indirect :
op = int ( op )
output . append ( 'exx' )
if immediate :
output . append ( 'ld hl, %i' % ( op & 0xFFFF ) )
else :
output . append ( 'ld hl, (%i)' % ( op & 0xFFFF ) )
output . append ( 'call __ILOAD32' )
output . append ( 'push de' )
output . append ( 'push hl' )
output . append ( 'exx' )
REQUIRES . add ( 'iload32.asm' )
else :
DE , HL = f16 ( op )
output . append ( 'ld bc, %i' % DE )
output . append ( 'push bc' )
output . append ( 'ld bc, %i' % HL )
output . append ( 'push bc' )
else :
if indirect :
output . append ( 'exx' )
# uses alternate set to put it on the stack
if op [ 0 ] == '_' :
if immediate :
output . append ( 'ld hl, %s' % op )
else :
output . append ( 'ld hl, (%s)' % op )
else :
output . append ( 'pop hl' )
# Pointers are only 16 bits * * *
output . append ( 'call __ILOAD32' )
output . append ( 'push de' )
output . append ( 'push hl' )
output . append ( 'exx' )
REQUIRES . add ( 'iload32.asm' )
elif op [ 0 ] == '_' : # an address
if float1 or op1 [ 0 ] == '_' : # If previous op was constant , we can use hl in advance
tmp = output
output = [ ]
output . append ( 'ld hl, (%s + 2)' % op )
output . append ( 'push hl' )
output . append ( 'ld hl, (%s)' % op )
output . append ( 'push hl' )
output . extend ( tmp )
else :
output . append ( 'ld bc, (%s + 2)' % op )
output . append ( 'push bc' )
output . append ( 'ld bc, (%s)' % op )
output . append ( 'push bc' )
else :
pass
# 2nd operand remains in the stack
if op2 is not None and reversed :
output . append ( 'call __SWAP32' )
REQUIRES . add ( 'swap32.asm' )
return output
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.