signature
stringlengths 29
44.1k
| implementation
stringlengths 0
85.2k
|
|---|---|
def bygroups ( * args ) :
"""Callback that yields multiple actions for each group in the match ."""
|
def callback ( lexer , match , ctx = None ) :
for i , action in enumerate ( args ) :
if action is None :
continue
elif type ( action ) is _TokenType :
data = match . group ( i + 1 )
if data :
yield match . start ( i + 1 ) , action , data
else :
data = match . group ( i + 1 )
if data is not None :
if ctx :
ctx . pos = match . start ( i + 1 )
for item in action ( lexer , _PseudoMatch ( match . start ( i + 1 ) , data ) , ctx ) :
if item :
yield item
if ctx :
ctx . pos = match . end ( )
return callback
|
def _clean_kwargs ( keep_name = False , ** kwargs ) :
'''Sanatize the the arguments for use with shade'''
|
if 'name' in kwargs and not keep_name :
kwargs [ 'name_or_id' ] = kwargs . pop ( 'name' )
return __utils__ [ 'args.clean_kwargs' ] ( ** kwargs )
|
def dockerize_package ( ctx , os , host ) :
"""Creates a Docker build file pre - configured with Plume ."""
|
from canari . commands . dockerize_package import dockerize_package
dockerize_package ( ctx . project , os , host )
|
def update_script_from_item ( self , item ) :
"""updates the script based on the information provided in item
Args :
script : script to be updated
item : B26QTreeItem that contains the new settings of the script"""
|
script , path_to_script , script_item = item . get_script ( )
# build dictionary
# get full information from script
dictator = list ( script_item . to_dict ( ) . values ( ) ) [ 0 ]
# there is only one item in the dictionary
for instrument in list ( script . instruments . keys ( ) ) : # update instrument
script . instruments [ instrument ] [ 'settings' ] = dictator [ instrument ] [ 'settings' ]
# remove instrument
del dictator [ instrument ]
for sub_script_name in list ( script . scripts . keys ( ) ) :
sub_script_item = script_item . get_subscript ( sub_script_name )
self . update_script_from_item ( sub_script_item )
del dictator [ sub_script_name ]
script . update ( dictator )
# update datefolder path
script . data_path = self . gui_settings [ 'data_folder' ]
|
def get_events ( self ) :
"""Get event list from satellite
: return : A copy of the events list
: rtype : list"""
|
res = copy . copy ( self . events )
del self . events [ : ]
return res
|
def load ( self , schema_file : Union [ str , TextIO ] , schema_location : Optional [ str ] = None ) -> ShExJ . Schema :
"""Load a ShEx Schema from schema _ location
: param schema _ file : name or file - like object to deserialize
: param schema _ location : URL or file name of schema . Used to create the base _ location
: return : ShEx Schema represented by schema _ location"""
|
if isinstance ( schema_file , str ) :
schema_file = self . location_rewrite ( schema_file )
self . schema_text = load_shex_file ( schema_file )
else :
self . schema_text = schema_file . read ( )
if self . base_location :
self . root_location = self . base_location
elif schema_location :
self . root_location = os . path . dirname ( schema_location ) + '/'
else :
self . root_location = None
return self . loads ( self . schema_text )
|
def as_issue ( self ) :
""": calls : ` GET / repos / : owner / : repo / issues / : number < http : / / developer . github . com / v3 / issues > ` _
: rtype : : class : ` github . Issue . Issue `"""
|
headers , data = self . _requester . requestJsonAndCheck ( "GET" , self . issue_url )
return github . Issue . Issue ( self . _requester , headers , data , completed = True )
|
def description_record ( self ) :
"""Return dict describing : class : ` condoor . Connection ` object .
Example : :
{ ' connections ' : [ { ' chain ' : [ { ' driver _ name ' : ' eXR ' ,
' family ' : ' ASR9K ' ,
' hostname ' : ' vkg3 ' ,
' is _ console ' : True ,
' is _ target ' : True ,
' mode ' : ' global ' ,
' os _ type ' : ' eXR ' ,
' os _ version ' : ' 6.1.2.06I ' ,
' platform ' : ' ASR - 9904 ' ,
' prompt ' : ' RP / 0 / RSP0 / CPU0 : vkg3 # ' ,
' udi ' : { ' description ' : ' ASR - 9904 AC Chassis ' ,
' name ' : ' Rack 0 ' ,
' pid ' : ' ASR - 9904 - AC ' ,
' sn ' : ' FOX2024GKDE ' ,
' vid ' : ' V01 ' } } ] } ,
{ ' chain ' : [ { ' driver _ name ' : ' generic ' ,
' family ' : None ,
' hostname ' : ' 172.27.41.52:2045 ' ,
' is _ console ' : None ,
' is _ target ' : True ,
' mode ' : None ,
' os _ type ' : None ,
' os _ version ' : None ,
' platform ' : None ,
' prompt ' : None ,
' udi ' : None } ] } ] ,
' last _ chain ' : 0}"""
|
if self . connection_chains :
return { 'connections' : [ { 'chain' : [ device . device_info for device in chain . devices ] } for chain in self . connection_chains ] , 'last_chain' : self . _last_chain_index , }
else :
raise ConnectionError ( "Device not connected" )
|
def sanity_check_subsections ( self ) :
"""This function goes through the ConfigParset and checks that any options
given in the [ SECTION _ NAME ] section are not also given in any
[ SECTION _ NAME - SUBSECTION ] sections ."""
|
# Loop over the sections in the ini file
for section in self . sections ( ) : # [ pegasus _ profile ] specially is allowed to be overriden by
# sub - sections
if section == 'pegasus_profile' :
continue
# Loop over the sections again
for section2 in self . sections ( ) : # Check if any are subsections of section
if section2 . startswith ( section + '-' ) : # Check for duplicate options whenever this exists
self . check_duplicate_options ( section , section2 , raise_error = True )
|
def plot_total ( self , colorbar = True , cb_orientation = 'vertical' , cb_label = '$|B|$, nT' , ax = None , show = True , fname = None , ** kwargs ) :
"""Plot the total magnetic intensity .
Usage
x . plot _ total ( [ tick _ interval , xlabel , ylabel , ax , colorbar ,
cb _ orientation , cb _ label , show , fname , * * kwargs ] )
Parameters
tick _ interval : list or tuple , optional , default = [ 30 , 30]
Intervals to use when plotting the x and y ticks . If set to None ,
ticks will not be plotted .
xlabel : str , optional , default = ' longitude '
Label for the longitude axis .
ylabel : str , optional , default = ' latitude '
Label for the latitude axis .
ax : matplotlib axes object , optional , default = None
A single matplotlib axes object where the plot will appear .
colorbar : bool , optional , default = True
If True , plot a colorbar .
cb _ orientation : str , optional , default = ' vertical '
Orientation of the colorbar : either ' vertical ' or ' horizontal ' .
cb _ label : str , optional , default = ' $ | B | $ , nT '
Text label for the colorbar .
show : bool , optional , default = True
If True , plot the image to the screen .
fname : str , optional , default = None
If present , and if axes is not specified , save the image to the
specified file .
kwargs : optional
Keyword arguements that will be sent to the SHGrid . plot ( )
and plt . imshow ( ) methods ."""
|
if ax is None :
fig , axes = self . total . plot ( colorbar = colorbar , cb_orientation = cb_orientation , cb_label = cb_label , show = False , ** kwargs )
if show :
fig . show ( )
if fname is not None :
fig . savefig ( fname )
return fig , axes
else :
self . total . plot ( colorbar = colorbar , cb_orientation = cb_orientation , cb_label = cb_label , ax = ax , ** kwargs )
|
def import_from_xls ( filename_or_fobj , sheet_name = None , sheet_index = 0 , start_row = None , start_column = None , end_row = None , end_column = None , * args , ** kwargs ) :
"""Return a rows . Table created from imported XLS file ."""
|
source = Source . from_file ( filename_or_fobj , mode = "rb" , plugin_name = "xls" )
source . fobj . close ( )
book = xlrd . open_workbook ( source . uri , formatting_info = True , logfile = open ( os . devnull , mode = "w" ) )
if sheet_name is not None :
sheet = book . sheet_by_name ( sheet_name )
else :
sheet = book . sheet_by_index ( sheet_index )
# TODO : may re - use Excel data types
# Get header and rows
# xlrd library reads rows and columns starting from 0 and ending on
# sheet . nrows / ncols - 1 . rows accepts the same pattern
# The xlrd library reads rows and columns starting from 0 and ending on
# sheet . nrows / ncols - 1 . rows also uses 0 - based indexes , so no
# transformation is needed
min_row , min_column = get_table_start ( sheet )
max_row , max_column = sheet . nrows - 1 , sheet . ncols - 1
# TODO : consider adding a parameter ` ignore _ padding = True ` and when it ' s
# True , consider ` start _ row ` starting from ` min _ row ` and ` start _ column `
# starting from ` min _ col ` .
start_row = max ( start_row if start_row is not None else min_row , min_row )
end_row = min ( end_row if end_row is not None else max_row , max_row )
start_column = max ( start_column if start_column is not None else min_column , min_column )
end_column = min ( end_column if end_column is not None else max_column , max_column )
table_rows = [ [ cell_value ( sheet , row_index , column_index ) for column_index in range ( start_column , end_column + 1 ) ] for row_index in range ( start_row , end_row + 1 ) ]
meta = { "imported_from" : "xls" , "source" : source , "name" : sheet . name }
return create_table ( table_rows , meta = meta , * args , ** kwargs )
|
def generate ( cls , country_code , bank_code , account_code ) :
"""Generate an IBAN from it ' s components .
If the bank - code and / or account - number have less digits than required by their
country specific representation , the respective component is padded with zeros .
Examples :
To generate an IBAN do the following : :
> > > bank _ code = ' 37040044'
> > > account _ code = ' 532013000'
> > > iban = IBAN . generate ( ' DE ' , bank _ code , account _ code )
> > > iban . formatted
' DE89 3704 0044 0532 0130 00'
Args :
country _ code ( str ) : The ISO 3166 alpha - 2 country code .
bank _ code ( str ) : The country specific bank - code .
account _ code ( str ) : The customer specific account - code ."""
|
spec = _get_iban_spec ( country_code )
bank_code_length = code_length ( spec , 'bank_code' )
branch_code_length = code_length ( spec , 'branch_code' )
bank_and_branch_code_length = bank_code_length + branch_code_length
account_code_length = code_length ( spec , 'account_code' )
if len ( bank_code ) > bank_and_branch_code_length :
raise ValueError ( "Bank code exceeds maximum size {}" . format ( bank_and_branch_code_length ) )
if len ( account_code ) > account_code_length :
raise ValueError ( "Account code exceeds maximum size {}" . format ( account_code_length ) )
bank_code = bank_code . rjust ( bank_and_branch_code_length , '0' )
account_code = account_code . rjust ( account_code_length , '0' )
iban = country_code + '??' + bank_code + account_code
return cls ( iban )
|
def _stmt_inside_loop ( self , stmt_idx ) :
"""Test whether a statement is inside the loop body or not .
: param stmt _ idx :
: return :"""
|
# TODO : This is slow . Fix the performance issue
for node in self . loop . body_nodes :
if node . addr . stmt_idx <= stmt_idx < node . addr . stmt_idx + node . size :
return True
return False
|
def close ( self , child ) :
"""Close a child position - alias for rebalance ( 0 , child ) . This will also
flatten ( close out all ) the child ' s children .
Args :
* child ( str ) : Child , specified by name ."""
|
c = self . children [ child ]
# flatten if children not None
if c . children is not None and len ( c . children ) != 0 :
c . flatten ( )
if c . value != 0. and not np . isnan ( c . value ) :
c . allocate ( - c . value )
|
def sample ( self , label ) :
"""generate random cropping boxes according to parameters
if satifactory crops generated , apply to ground - truth as well
Parameters :
label : numpy . array ( n x 5 matrix )
ground - truths
Returns :
list of ( crop _ box , label ) tuples , if failed , return empty list [ ]"""
|
samples = [ ]
count = 0
for trial in range ( self . max_trials ) :
if count >= self . max_sample :
return samples
scale = np . random . uniform ( self . min_scale , self . max_scale )
min_ratio = max ( self . min_aspect_ratio , scale * scale )
max_ratio = min ( self . max_aspect_ratio , 1. / scale / scale )
ratio = math . sqrt ( np . random . uniform ( min_ratio , max_ratio ) )
width = scale * ratio
height = scale / ratio
left = np . random . uniform ( 0. , 1 - width )
top = np . random . uniform ( 0. , 1 - height )
rand_box = ( left , top , left + width , top + height )
valid_mask = np . where ( label [ : , 0 ] > - 1 ) [ 0 ]
gt = label [ valid_mask , : ]
ious = self . _check_satisfy ( rand_box , gt )
if ious is not None : # transform gt labels after crop , discard bad ones
l , t , r , b = rand_box
new_gt_boxes = [ ]
new_width = r - l
new_height = b - t
for i in range ( valid_mask . size ) :
if ious [ i ] > 0 :
xmin = max ( 0. , ( gt [ i , 1 ] - l ) / new_width )
ymin = max ( 0. , ( gt [ i , 2 ] - t ) / new_height )
xmax = min ( 1. , ( gt [ i , 3 ] - l ) / new_width )
ymax = min ( 1. , ( gt [ i , 4 ] - t ) / new_height )
new_gt_boxes . append ( [ gt [ i , 0 ] , xmin , ymin , xmax , ymax ] )
if not new_gt_boxes :
continue
new_gt_boxes = np . array ( new_gt_boxes )
label = np . lib . pad ( new_gt_boxes , ( ( 0 , label . shape [ 0 ] - new_gt_boxes . shape [ 0 ] ) , ( 0 , 0 ) ) , 'constant' , constant_values = ( - 1 , - 1 ) )
samples . append ( ( rand_box , label ) )
count += 1
return samples
|
def install_builtin ( translator , do_unicode ) :
"""Install _ ( ) and _ n ( ) gettext methods into default namespace ."""
|
try :
import __builtin__ as builtins
except ImportError : # Python 3
import builtins
# Python 3 has no ugettext
has_unicode = hasattr ( translator , 'ugettext' )
if do_unicode and has_unicode :
builtins . __dict__ [ '_' ] = translator . ugettext
# also install ngettext
builtins . __dict__ [ '_n' ] = translator . ungettext
else :
builtins . __dict__ [ '_' ] = translator . gettext
# also install ngettext
builtins . __dict__ [ '_n' ] = translator . ngettext
|
def perform_batch_reply ( self , * , callback : Callable [ ... , str ] = None , target_handles : Dict [ str , str ] = None , lookback_limit : int = 20 , per_service_lookback_limit : Dict [ str , int ] = None , ) -> IterationRecord :
"""Performs batch reply on target accounts .
Looks up the recent messages of the target user ,
applies the callback ,
and replies with
what the callback generates .
: param callback : a callback taking a message id ,
message contents ,
and optional extra keys ,
and returning a message string .
: param targets : a dictionary of service names to target handles
( currently only one per service ) .
: param lookback _ limit : a lookback limit of how many messages to consider ( optional ) .
: param per _ service _ lookback : and a dictionary of service names to per - service
lookback limits .
takes preference over lookback _ limit ( optional ) .
: returns : new record of iteration
: raises BotSkeletonException : raises BotSkeletonException if batch reply fails or cannot be
performed"""
|
if callback is None :
raise BotSkeletonException ( "Callback must be provided." "" )
if target_handles is None :
raise BotSkeletonException ( "Targets must be provided." "" )
if lookback_limit > self . lookback_limit :
raise BotSkeletonException ( f"Lookback_limit cannot exceed {self.lookback_limit}, " + f"but it was {lookback_limit}" )
# use per - service lookback dict for convenience in a moment .
# if necessary , use lookback _ limit to fill it out .
lookback_dict = per_service_lookback_limit
if ( lookback_dict is None ) :
lookback_dict = { }
record = IterationRecord ( extra_keys = self . extra_keys )
for key , output in self . outputs . items ( ) :
if key not in lookback_dict :
lookback_dict [ key ] = lookback_limit
if target_handles . get ( key , None ) is None :
self . log . info ( f"No target for output {key}, skipping this output." )
elif not output . get ( "active" , False ) :
self . log . info ( f"Output {key} is inactive. Not calling batch reply." )
elif output [ "active" ] :
self . log . info ( f"Output {key} is active, calling batch reply on it." )
entry : Any = output [ "obj" ]
output_result = entry . perform_batch_reply ( callback = callback , target_handle = target_handles [ key ] , lookback_limit = lookback_dict [ key ] , )
record . output_records [ key ] = output_result
self . history . append ( record )
self . update_history ( )
return record
|
def postman ( filename , pretty , urlvars , swagger ) :
'''Dump the API as a Postman collection'''
|
data = api . as_postman ( urlvars = urlvars , swagger = swagger )
json_to_file ( data , filename , pretty )
|
def get_collection ( self , uri = None , filter = '' , path = '' ) :
"""Retrieves a collection of resources .
Use this function when the ' start ' and ' count ' parameters are not allowed in the GET call .
Otherwise , use get _ all instead .
Optional filtering criteria may be specified .
Args :
filter ( list or str ) : General filter / query string .
path ( str ) : path to be added with base URI
Returns :
Collection of the requested resource ."""
|
if not uri :
uri = self . _base_uri
if filter :
filter = self . make_query_filter ( filter )
filter = "?" + filter [ 1 : ]
uri = "{uri}{path}{filter}" . format ( uri = uri , path = path , filter = filter )
logger . debug ( 'Get resource collection (uri = %s)' % uri )
response = self . _connection . get ( uri )
return self . get_members ( response )
|
def open ( self , options ) :
"""Open and include the refrenced schema .
@ param options : An options dictionary .
@ type options : L { options . Options }
@ return : The referenced schema .
@ rtype : L { Schema }"""
|
if self . opened :
return
self . opened = True
log . debug ( '%s, including location="%s"' , self . id , self . location )
result = self . download ( options )
log . debug ( 'included:\n%s' , result )
return result
|
def write_metadata_to_filestream ( filedir , filestream , max_bytes = MAX_FILE_DEFAULT ) :
"""Make metadata file for all files in a directory ( helper function )
: param filedir : This field is the filepath of the directory whose csv
has to be made .
: param filestream : This field is a stream for writing to the csv .
: param max _ bytes : This field is the maximum file size to consider . Its
default value is 128m ."""
|
csv_out = csv . writer ( filestream )
subdirs = [ os . path . join ( filedir , i ) for i in os . listdir ( filedir ) if os . path . isdir ( os . path . join ( filedir , i ) ) ]
if subdirs :
logging . info ( 'Making metadata for subdirs of {}' . format ( filedir ) )
if not all ( [ re . match ( '^[0-9]{8}$' , os . path . basename ( d ) ) for d in subdirs ] ) :
raise ValueError ( "Subdirs not all project member ID format!" )
csv_out . writerow ( [ 'project_member_id' , 'filename' , 'tags' , 'description' , 'md5' , 'creation_date' ] )
for subdir in subdirs :
file_info = characterize_local_files ( filedir = subdir , max_bytes = max_bytes )
proj_member_id = os . path . basename ( subdir )
if not file_info :
csv_out . writerow ( [ proj_member_id , 'None' , 'NA' , 'NA' , 'NA' , 'NA' ] )
continue
for filename in file_info :
csv_out . writerow ( [ proj_member_id , filename , ', ' . join ( file_info [ filename ] [ 'tags' ] ) , file_info [ filename ] [ 'description' ] , file_info [ filename ] [ 'md5' ] , file_info [ filename ] [ 'creation_date' ] , ] )
else :
csv_out . writerow ( [ 'filename' , 'tags' , 'description' , 'md5' , 'creation_date' ] )
file_info = characterize_local_files ( filedir = filedir , max_bytes = max_bytes )
for filename in file_info :
csv_out . writerow ( [ filename , ', ' . join ( file_info [ filename ] [ 'tags' ] ) , file_info [ filename ] [ 'description' ] , file_info [ filename ] [ 'md5' ] , file_info [ filename ] [ 'creation_date' ] , ] )
|
def tarbell_install ( command , args ) :
"""Install a project ."""
|
with ensure_settings ( command , args ) as settings :
project_url = args . get ( 0 )
puts ( "\n- Getting project information for {0}" . format ( project_url ) )
project_name = project_url . split ( "/" ) . pop ( )
error = None
# Create a tempdir and clone
tempdir = tempfile . mkdtemp ( )
try :
testgit = sh . git . bake ( _cwd = tempdir , _tty_in = True , _tty_out = False )
# _ err _ to _ out = True )
testclone = testgit . clone ( project_url , '.' , '--depth=1' , '--bare' )
puts ( testclone )
config = testgit . show ( "HEAD:tarbell_config.py" )
puts ( "\n- Found tarbell_config.py" )
path = _get_path ( _clean_suffix ( project_name , ".git" ) , settings )
_mkdir ( path )
git = sh . git . bake ( _cwd = path )
clone = git . clone ( project_url , '.' , _tty_in = True , _tty_out = False , _err_to_out = True )
puts ( clone )
puts ( git . submodule . update ( '--init' , '--recursive' , _tty_in = True , _tty_out = False , _err_to_out = True ) )
_install_requirements ( path )
# Get site , run hook
with ensure_project ( command , args , path ) as site :
site . call_hook ( "install" , site , git )
except sh . ErrorReturnCode_128 as e :
if e . message . endswith ( 'Device not configured\n' ) :
error = 'Git tried to prompt for a username or password.\n\nTarbell doesn\'t support interactive sessions. Please configure ssh key access to your Git repository. (See https://help.github.com/articles/generating-ssh-keys/)'
else :
error = 'Not a valid repository or Tarbell project'
finally :
_delete_dir ( tempdir )
if error :
show_error ( error )
else :
puts ( "\n- Done installing project in {0}" . format ( colored . yellow ( path ) ) )
|
def read_json_document ( title ) :
"""Reads in a json document and returns a native python
datastructure ."""
|
if not title . endswith ( '.json' ) :
juicer . utils . Log . log_warn ( "File name (%s) does not end with '.json', appending it automatically." % title )
title += '.json'
if not os . path . exists ( title ) :
raise IOError ( "Could not find file: '%s'" % title )
f = open ( title , 'r' )
doc = f . read ( )
f . close ( )
return load_json_str ( doc )
|
def create_default_units_and_dimensions ( ) :
"""Adds the units and the dimensions reading a json file . It adds only dimensions and units that are not inside the db
It is possible adding new dimensions and units to the DB just modifiyin the json file"""
|
default_units_file_location = os . path . realpath ( os . path . join ( os . path . dirname ( os . path . realpath ( __file__ ) ) , '../' , 'static' , 'default_units_and_dimensions.json' ) )
d = None
with open ( default_units_file_location ) as json_data :
d = json . load ( json_data )
json_data . close ( )
for json_dimension in d [ "dimension" ] :
new_dimension = None
dimension_name = get_utf8_encoded_string ( json_dimension [ "name" ] )
db_dimensions_by_name = db . DBSession . query ( Dimension ) . filter ( Dimension . name == dimension_name ) . all ( )
if len ( db_dimensions_by_name ) == 0 : # Adding the dimension
log . debug ( "Adding Dimension `{}`" . format ( dimension_name ) )
new_dimension = Dimension ( )
if "id" in json_dimension : # If ID is specified
new_dimension . id = json_dimension [ "id" ]
new_dimension . name = dimension_name
db . DBSession . add ( new_dimension )
db . DBSession . flush ( )
# Get the dimension by name
new_dimension = get_dimension_from_db_by_name ( dimension_name )
for json_unit in json_dimension [ "unit" ] :
db_units_by_name = db . DBSession . query ( Unit ) . filter ( Unit . abbreviation == get_utf8_encoded_string ( json_unit [ 'abbr' ] ) ) . all ( )
if len ( db_units_by_name ) == 0 : # Adding the unit
log . debug ( "Adding Unit %s in %s" , json_unit [ 'abbr' ] , json_dimension [ "name" ] )
new_unit = Unit ( )
if "id" in json_unit :
new_unit . id = json_unit [ "id" ]
new_unit . dimension_id = new_dimension . id
new_unit . name = get_utf8_encoded_string ( json_unit [ 'name' ] )
new_unit . abbreviation = get_utf8_encoded_string ( json_unit [ 'abbr' ] )
new_unit . lf = get_utf8_encoded_string ( json_unit [ 'lf' ] )
new_unit . cf = get_utf8_encoded_string ( json_unit [ 'cf' ] )
if "description" in json_unit : # If Description is specified
new_unit . description = get_utf8_encoded_string ( json_unit [ "description" ] )
# Save on DB
db . DBSession . add ( new_unit )
db . DBSession . flush ( )
else : # log . critical ( " UNIT { } . { } EXISTANT " . format ( dimension _ name , json _ unit [ ' abbr ' ] ) )
pass
try : # Needed for test . on HWI it fails so we need to catch the exception and pass by
db . DBSession . commit ( )
except Exception as e : # Needed for HWI
pass
return
|
def movies_opening ( self , ** kwargs ) :
"""Gets the current opening movies from the API .
Args :
limit ( optional ) : limits the number of movies returned , default = 10
country ( optional ) : localized data for selected country , default = " us "
Returns :
A dict respresentation of the JSON returned from the API ."""
|
path = self . _get_path ( 'movies_opening' )
response = self . _GET ( path , kwargs )
self . _set_attrs_to_values ( response )
return response
|
def start_sctp_client ( self , ip = None , port = None , name = None , timeout = None , protocol = None , family = 'ipv4' ) :
"""Starts a new SCTP client .
Client can be optionally given ` ip ` and ` port ` to bind to , as well as
` name ` , default ` timeout ` and a ` protocol ` . ` family ` can be either
ipv4 ( default ) or ipv6.
You should use ` Connect ` keyword to connect client to a host .
Examples :
| Start TCP client |
| Start TCP client | name = Client1 | protocol = GTPV2 |
| Start TCP client | 10.10.10.2 | 53 | name = Server1 | protocol = GTPV2 |
| Start TCP client | timeout = 5 |"""
|
self . _start_client ( SCTPClient , ip , port , name , timeout , protocol , family )
|
def get_owner_and_repo ( repourl ) :
"""Takes a git repository URL from Bitbucket and tries to determine the owner and repository name
: param repourl : Bitbucket git repo in the form of
git @ bitbucket . com : OWNER / REPONAME . git
https : / / bitbucket . com / OWNER / REPONAME . git
ssh : / / git @ bitbucket . com / OWNER / REPONAME . git
: return : owner , repo : The owner of the repository and the repository name"""
|
parsed = urlparse ( repourl )
if parsed . scheme :
path = parsed . path [ 1 : ]
else : # we assume git @ host : owner / repo . git here
path = parsed . path . split ( ':' , 1 ) [ - 1 ]
if path . endswith ( '.git' ) :
path = path [ : - 4 ]
while path . endswith ( '/' ) :
path = path [ : - 1 ]
parts = path . split ( '/' )
assert len ( parts ) == 2 , 'OWNER/REPONAME is expected'
return parts
|
def on_shutdown ( self , broker ) :
"""Shut down the write end of the logging socket ."""
|
_v and LOG . debug ( '%r.on_shutdown()' , self )
if not IS_WSL : # #333 : WSL generates invalid readiness indication on shutdown ( )
self . _wsock . shutdown ( socket . SHUT_WR )
self . _wsock . close ( )
self . transmit_side . close ( )
|
def _parse_meta_info ( self , line ) :
"""Parse and extract all meta data by looping through the dictionary of meta _ info regexs
updates self . meta _ info
Args :
line ( str ) : line of the msp file"""
|
if self . mslevel :
self . meta_info [ 'ms_level' ] = self . mslevel
if self . polarity :
self . meta_info [ 'polarity' ] = self . polarity
for k , regexes in six . iteritems ( self . meta_regex ) :
for reg in regexes :
m = re . search ( reg , line , re . IGNORECASE )
if m :
self . meta_info [ k ] = m . group ( 1 ) . strip ( )
|
def breadth_first_vertex_order ( vertices_resources , nets ) :
"""A generator which iterates over a set of vertices in a breadth - first
order in terms of connectivity .
For use as a vertex ordering for the sequential placer ."""
|
# Special case : no vertices , just stop immediately
if len ( vertices_resources ) == 0 :
return
# Enumerate the set of nets attached to each vertex
vertex_neighbours = defaultdict ( set )
for net in nets : # Note : Iterating over a Net object produces the set of vertices
# involved in the net .
vertex_neighbours [ net . source ] . update ( net )
for sink in net . sinks :
vertex_neighbours [ sink ] . update ( net )
# Perform a breadth - first iteration over the vertices .
unplaced_vertices = set ( vertices_resources )
vertex_queue = deque ( )
while vertex_queue or unplaced_vertices :
if not vertex_queue :
vertex_queue . append ( unplaced_vertices . pop ( ) )
vertex = vertex_queue . popleft ( )
yield vertex
vertex_queue . extend ( v for v in vertex_neighbours [ vertex ] if v in unplaced_vertices )
unplaced_vertices . difference_update ( vertex_neighbours [ vertex ] )
|
def _timedeltaToSignHrMin ( offset ) :
"""Return a ( sign , hour , minute ) triple for the offset described by timedelta .
sign is a string , either " + " or " - " . In the case of 0 offset , sign is " + " ."""
|
minutes = round ( ( offset . days * 3600000000 * 24 + offset . seconds * 1000000 + offset . microseconds ) / 60000000.0 )
if minutes < 0 :
sign = '-'
minutes = - minutes
else :
sign = '+'
return ( sign , minutes // 60 , minutes % 60 )
|
def mktns ( self ) :
"""Make the schema ' s target namespace .
@ return : namespace representation of the schema ' s targetNamespace
value .
@ rtype : ( prefix , URI )"""
|
tns = self . root . get ( "targetNamespace" )
tns_prefix = None
if tns is not None :
tns_prefix = self . root . findPrefix ( tns )
return tns_prefix , tns
|
def default ( self , name , action , seqno ) :
"""Defaults the routemap on the node
Note :
This method will attempt to default the routemap from the nodes
operational config . Since routemaps do not exist by default ,
the default action is essentially a negation and the result will
be the removal of the routemap clause .
If the routemap does not exist then this
method will not perform any changes but still return True
Args :
name ( string ) : The full name of the routemap .
action ( string ) : The action to take for this routemap clause .
seqno ( integer ) : The sequence number for the routemap clause .
Returns :
True if the routemap could be deleted otherwise False ( see Node )"""
|
return self . configure ( 'default route-map %s %s %s' % ( name , action , seqno ) )
|
def _get_bokeh_chart ( self , x_field , y_field , chart_type , label , opts , style , options = { } , ** kwargs ) :
"""Get a Bokeh chart object"""
|
if isinstance ( x_field , list ) :
kdims = x_field
else :
kdims = [ x_field ]
if isinstance ( y_field , list ) :
vdims = y_field
else :
vdims = [ y_field ]
args = kwargs
args [ "data" ] = self . df
args [ "kdims" ] = kdims
args [ "vdims" ] = vdims
if label is not None :
args [ "label" ] = label
else :
if self . label is not None :
args [ "label" ] = self . label
chart = None
try :
if chart_type == "line" :
chart = hv . Curve ( ** args )
if chart_type == "hline" :
chart = self . _hline_bokeh_ ( y_field )
elif chart_type == "point" :
chart = hv . Scatter ( ** args )
elif chart_type == "area" :
chart = hv . Area ( ** args )
elif chart_type == "bar" :
chart = hv . Bars ( ** args )
elif chart_type == "hist" :
chart = hv . Histogram ( ** args )
elif chart_type == "errorBar" :
chart = hv . ErrorBars ( ** args )
elif chart_type == "heatmap" :
chart = hv . HeatMap ( ** args )
elif chart_type == "lreg" :
chart = self . _lreg_bokeh ( ** args )
elif chart_type == "sline" :
window_size , y_label = options [ "window_size" ] , options [ "y_label" ]
chart = self . _sline_bokeh ( window_size , y_label )
if chart is None :
self . err ( "Chart type " + chart_type + " unknown" , self . _get_bokeh_chart )
return
endchart = chart ( plot = opts , style = style )
return endchart
except DataError as e :
msg = "Column not found in " + x_field + " and " + y_field
self . err ( e , self . _get_bokeh_chart , msg )
except Exception as e :
self . err ( e )
|
def check_plate_compatibility ( tool , source_plate , sink_plate ) :
"""Checks whether the source and sink plate are compatible given the tool
: param tool : The tool
: param source _ plate : The source plate
: param sink _ plate : The sink plate
: return : Either an error , or None
: type tool : Tool
: type source _ plate : Plate
: type sink _ plate : Plate
: rtype : None | str"""
|
if sink_plate == source_plate . parent :
return None
# could be that they have the same meta data , but the sink plate is a simplification of the source
# plate ( e . g . when using IndexOf tool )
if sink_plate . meta_data_id == source_plate . meta_data_id :
if sink_plate . is_sub_plate ( source_plate ) :
return None
return "Sink plate {} is not a simplification of source plate {}" . format ( sink_plate . plate_id , source_plate . plate_id )
# Also check to see if the meta data differs by only one value
meta_data_diff = set ( source_plate . ancestor_meta_data_ids ) - set ( sink_plate . ancestor_meta_data_ids )
if len ( meta_data_diff ) == 1 : # Is the diff value the same as the aggregation meta id passed to the aggregate tool
if tool . aggregation_meta_data not in meta_data_diff :
return "Aggregate tool meta data ({}) " "does not match the diff between source and sink plates ({})" . format ( tool . aggregation_meta_data , list ( meta_data_diff ) [ 0 ] )
else :
return "{} not in source's parent plates" . format ( sink_plate . plate_id )
|
def _update_proxy ( self , change ) :
"""An observer which sends state change to the proxy ."""
|
if change [ 'name' ] in [ 'row' , 'column' ] :
super ( AbstractWidgetItem , self ) . _update_proxy ( change )
else :
self . proxy . data_changed ( change )
|
def _get_audio_sample_bit ( self , audio_abs_path ) :
"""Parameters
audio _ abs _ path : str
Returns
sample _ bit : int"""
|
sample_bit = int ( subprocess . check_output ( ( """sox --i {} | grep "{}" | awk -F " : " '{{print $2}}' | """ """grep -oh "^[^-]*" """ ) . format ( audio_abs_path , "Precision" ) , shell = True , universal_newlines = True ) . rstrip ( ) )
return sample_bit
|
def isOriginalLocation ( attr ) :
"""Attempt to discover if this appearance of a PythonAttribute
representing a class refers to the module where that class was
defined ."""
|
sourceModule = inspect . getmodule ( attr . load ( ) )
if sourceModule is None :
return False
currentModule = attr
while not isinstance ( currentModule , PythonModule ) :
currentModule = currentModule . onObject
return currentModule . name == sourceModule . __name__
|
def stringPropertyNames ( self ) :
r"""Returns a ` set ` of all keys in the ` Properties ` object and its
` defaults ` ( and its ` defaults ` \ ’ s ` defaults ` , etc . )
: rtype : ` set ` of text strings"""
|
names = set ( self . data )
if self . defaults is not None :
names . update ( self . defaults . stringPropertyNames ( ) )
return names
|
def get_sdk_version ( self ) -> str :
'''Show Android SDK version .'''
|
output , _ = self . _execute ( '-s' , self . device_sn , 'shell' , 'getprop' , 'ro.build.version.sdk' )
return output . strip ( )
|
def path_from_pythonpath ( pythonpath ) :
"""Create an fs . Path object from a pythonpath string ."""
|
path = fs . Path ( )
for p in pythonpath . split ( os . pathsep ) :
path . add_path ( utils . expand_path ( p ) , 'os' )
return path
|
def overlay_images ( self , canvas , data , whence = 0.0 ) :
"""Overlay data from any canvas image objects .
Parameters
canvas : ` ~ ginga . canvas . types . layer . DrawingCanvas `
Canvas containing possible images to overlay .
data : ndarray
Output array on which to overlay image data .
whence
See : meth : ` get _ rgb _ object ` ."""
|
# if not canvas . is _ compound ( ) :
if not hasattr ( canvas , 'objects' ) :
return
for obj in canvas . get_objects ( ) :
if hasattr ( obj , 'draw_image' ) :
obj . draw_image ( self , data , whence = whence )
elif obj . is_compound ( ) and ( obj != canvas ) :
self . overlay_images ( obj , data , whence = whence )
|
def appendInnerHTML ( self , html ) :
'''appendInnerHTML - Appends nodes from arbitrary HTML as if doing element . innerHTML + = ' someHTML ' in javascript .
@ param html < str > - Some HTML
NOTE : If associated with a document ( AdvancedHTMLParser ) , the html will use the encoding associated with
that document .
@ return - None . A browser would return innerHTML , but that ' s somewhat expensive on a high - level node .
So just call . innerHTML explicitly if you need that'''
|
# Late - binding to prevent circular import
from . Parser import AdvancedHTMLParser
# Inherit encoding from the associated document , if any .
encoding = None
if self . ownerDocument :
encoding = self . ownerDocument . encoding
# Generate blocks ( text nodes and AdvancedTag ' s ) from HTML
blocks = AdvancedHTMLParser . createBlocksFromHTML ( html , encoding )
# Throw them onto this node
self . appendBlocks ( blocks )
|
def create_rflink_connection ( port = None , host = None , baud = 57600 , protocol = RflinkProtocol , packet_callback = None , event_callback = None , disconnect_callback = None , ignore = None , loop = None ) :
"""Create Rflink manager class , returns transport coroutine ."""
|
# use default protocol if not specified
protocol = partial ( protocol , loop = loop if loop else asyncio . get_event_loop ( ) , packet_callback = packet_callback , event_callback = event_callback , disconnect_callback = disconnect_callback , ignore = ignore if ignore else [ ] , )
# setup serial connection if no transport specified
if host :
conn = loop . create_connection ( protocol , host , port )
else :
baud = baud
conn = create_serial_connection ( loop , protocol , port , baud )
return conn
|
def preprocess_input ( userinput ) :
"""< Purpose >
Preprocess the raw command line input string .
< Arguments >
The raw command line input string . We assume it is pre - stripped .
< Side Effects >
The string will be processed by each module that has a defined preprocessor .
< Exceptions >
None
< Returns >
The preprocessed string ."""
|
for module in get_enabled_modules ( ) : # Not every module has a preprocessor . . .
if 'input_preprocessor' in module_data [ module ] :
userinput = module_data [ module ] [ 'input_preprocessor' ] ( userinput )
return userinput
|
def parse ( cls , msg ) :
"""Parse message string to response object ."""
|
lines = msg . splitlines ( )
version , status_code , reason = lines [ 0 ] . split ( )
headers = cls . parse_headers ( '\r\n' . join ( lines [ 1 : ] ) )
return cls ( version = version , status_code = status_code , reason = reason , headers = headers )
|
def isin ( comps , values ) :
"""Compute the isin boolean array
Parameters
comps : array - like
values : array - like
Returns
boolean array same length as comps"""
|
if not is_list_like ( comps ) :
raise TypeError ( "only list-like objects are allowed to be passed" " to isin(), you passed a [{comps_type}]" . format ( comps_type = type ( comps ) . __name__ ) )
if not is_list_like ( values ) :
raise TypeError ( "only list-like objects are allowed to be passed" " to isin(), you passed a [{values_type}]" . format ( values_type = type ( values ) . __name__ ) )
if not isinstance ( values , ( ABCIndex , ABCSeries , np . ndarray ) ) :
values = construct_1d_object_array_from_listlike ( list ( values ) )
if is_categorical_dtype ( comps ) : # TODO ( extension )
# handle categoricals
return comps . _values . isin ( values )
comps = com . values_from_object ( comps )
comps , dtype , _ = _ensure_data ( comps )
values , _ , _ = _ensure_data ( values , dtype = dtype )
# faster for larger cases to use np . in1d
f = lambda x , y : htable . ismember_object ( x , values )
# GH16012
# Ensure np . in1d doesn ' t get object types or it * may * throw an exception
if len ( comps ) > 1000000 and not is_object_dtype ( comps ) :
f = lambda x , y : np . in1d ( x , y )
elif is_integer_dtype ( comps ) :
try :
values = values . astype ( 'int64' , copy = False )
comps = comps . astype ( 'int64' , copy = False )
f = lambda x , y : htable . ismember_int64 ( x , y )
except ( TypeError , ValueError , OverflowError ) :
values = values . astype ( object )
comps = comps . astype ( object )
elif is_float_dtype ( comps ) :
try :
values = values . astype ( 'float64' , copy = False )
comps = comps . astype ( 'float64' , copy = False )
f = lambda x , y : htable . ismember_float64 ( x , y )
except ( TypeError , ValueError ) :
values = values . astype ( object )
comps = comps . astype ( object )
return f ( comps , values )
|
def IsExecutionWhitelisted ( cmd , args ) :
"""Check if a binary and args is whitelisted .
Args :
cmd : Canonical path to the binary .
args : List of arguments to be passed to the binary .
Returns :
Bool , True if it is whitelisted .
These whitelists could also go in the platform specific client files
client _ utils _ < platform > . py . We chose to leave them here instead of putting
them in global arrays to discourage people coding other modules from adding
new commands to the whitelist before running them .
The idea is to have a single place that lists every command we can run during
normal operation ( obviously doesn ' t catch the special cases where we bypass
the list ) .
A deployment - specific list is also checked ( see local / binary _ whitelist . py ) ."""
|
if platform . system ( ) == "Windows" :
whitelist = [ ( "arp.exe" , [ "-a" ] ) , ( "driverquery.exe" , [ "/v" ] ) , ( "ipconfig.exe" , [ "/all" ] ) , ( "netsh.exe" , [ "advfirewall" , "firewall" , "show" , "rule" , "name=all" ] ) , ( "netsh.exe" , [ "advfirewall" , "monitor" , "show" , "firewall" , "rule" , "name=all" ] ) , ( "tasklist.exe" , [ "/SVC" ] ) , ( "tasklist.exe" , [ "/v" ] ) , ]
elif platform . system ( ) == "Linux" :
whitelist = [ ( "/bin/df" , [ ] ) , ( "/bin/echo" , [ "1" ] ) , ( "/bin/rpm" , [ "-qa" ] ) , ( "/bin/sleep" , [ "10" ] ) , ( "/sbin/auditctl" , [ "-l" ] ) , ( "/sbin/ifconfig" , [ "-a" ] ) , ( "/sbin/iptables" , [ "-L" , "-n" , "-v" ] ) , ( "/sbin/lsmod" , [ ] ) , ( "/usr/bin/dpkg" , [ "--list" ] ) , ( "/usr/bin/last" , [ ] ) , ( "/usr/bin/yum" , [ "list" , "installed" , "-q" ] ) , ( "/usr/bin/yum" , [ "repolist" , "-v" , "-q" ] ) , ( "/usr/bin/who" , [ ] ) , ( "/usr/sbin/arp" , [ "-a" ] ) , ( "/usr/sbin/dmidecode" , [ "-q" ] ) , ( "/usr/sbin/sshd" , [ "-T" ] ) , ]
elif platform . system ( ) == "Darwin" :
whitelist = [ ( "/bin/echo" , [ "1" ] ) , ( "/bin/launchctl" , [ "unload" , config . CONFIG [ "Client.plist_path" ] ] ) , ( "/usr/bin/hdiutil" , [ "info" ] ) , ( "/usr/bin/last" , [ ] ) , ( "/usr/bin/who" , [ ] ) , ( "/usr/sbin/arp" , [ "-a" ] ) , ( "/usr/sbin/kextstat" , [ ] ) , ( "/usr/sbin/system_profiler" , [ "-xml" , "SPHardwareDataType" ] ) , ( "/usr/libexec/firmwarecheckers/ethcheck/ethcheck" , [ "--show-hashes" ] ) , ]
else :
whitelist = [ ]
for ( allowed_cmd , allowed_args ) in whitelist :
if cmd == allowed_cmd and args == allowed_args :
return True
# Check if this is whitelisted locally .
if binary_whitelist . IsExecutionWhitelisted ( cmd , args ) :
return True
return False
|
def profileFromPNG ( inp ) :
"""Extract profile from PNG file . Return ( * profile * , * name * )
pair ."""
|
r = png . Reader ( file = inp )
_ , chunk = r . chunk ( 'iCCP' )
i = chunk . index ( b'\x00' )
name = chunk [ : i ]
compression = chunk [ i + 1 ]
assert compression == 0
profile = zlib . decompress ( chunk [ i + 2 : ] )
return profile , name
|
def _search_dirs ( self , dirs , basename , extension = "" ) :
"""Search a list of directories for a given filename or directory name .
Iterator over the supplied directories , returning the first file
found with the supplied name and extension .
: param dirs : a list of directories
: param basename : the filename
: param extension : the file extension , for example ' . conf '
: returns : the path to a matching file , or None"""
|
for d in dirs :
path = os . path . join ( d , '%s%s' % ( basename , extension ) )
if os . path . exists ( path ) :
return path
return None
|
def save ( self , filepath ) :
"""Save current configuration to file .
: param str filepath : Path to file where settings will be saved .
: raises : ValueError if supplied filepath cannot be written to ."""
|
self . _config . add_section ( "RetryPolicy" )
self . _config . set ( "RetryPolicy" , "retries" , str ( self . retry_policy . retries ) )
self . _config . set ( "RetryPolicy" , "backoff_factor" , str ( self . retry_policy . backoff_factor ) )
self . _config . set ( "RetryPolicy" , "max_backoff" , str ( self . retry_policy . max_backoff ) )
super ( RequestHTTPSenderConfiguration , self ) . save ( filepath )
|
def wait_new_conf ( self ) :
"""Send a HTTP request to the satellite ( GET / wait _ new _ conf )
: return : True if wait new conf , otherwise False
: rtype : bool"""
|
logger . debug ( "Wait new configuration for %s, %s %s" , self . name , self . alive , self . reachable )
return self . con . get ( '_wait_new_conf' )
|
def load_data ( self ) :
"""Loads image and label data from specified directory path .
: return : Dataset object containing image and label data ."""
|
images = list ( )
labels = list ( )
emotion_index_map = dict ( )
label_directories = [ dir for dir in os . listdir ( self . datapath ) if not dir . startswith ( '.' ) ]
for label_directory in label_directories :
if self . target_emotion_map :
if label_directory not in self . target_emotion_map . keys ( ) :
continue
self . _add_new_label_to_map ( label_directory , emotion_index_map )
label_directory_path = self . datapath + '/' + label_directory
if self . time_delay :
self . _load_series_for_single_emotion_directory ( images , label_directory , label_directory_path , labels )
else :
image_files = [ image_file for image_file in os . listdir ( label_directory_path ) if not image_file . startswith ( '.' ) ]
self . _load_images_from_directory_to_array ( image_files , images , label_directory , label_directory_path , labels )
vectorized_labels = self . _vectorize_labels ( emotion_index_map , labels )
self . _check_data_not_empty ( images )
return self . _load_dataset ( np . array ( images ) , np . array ( vectorized_labels ) , emotion_index_map )
|
def wait ( fs , timeout = - 1 , return_when = ALL_COMPLETED ) :
"""Wait for the futures in the given sequence to complete .
Using this function may prevent a worker from executing .
: param fs : The sequence of Futures to wait upon .
: param timeout : The maximum number of seconds to wait . If negative or not
specified , then there is no limit on the wait time .
: param return _ when : Indicates when this function should return . The options
are :
FIRST _ COMPLETED Return when any future finishes or is cancelled .
FIRST _ EXCEPTION Return when any future finishes by raising an
exception . If no future raises an exception then
it is equivalent to ALL _ COMPLETED .
ALL _ COMPLETED Return when all futures finish or are cancelled .
: return : A named 2 - tuple of sets . The first set , named ' done ' , contains the
futures that completed ( is finished or cancelled ) before the wait
completed . The second set , named ' not _ done ' , contains uncompleted
futures ."""
|
DoneAndNotDoneFutures = namedtuple ( 'DoneAndNotDoneFutures' , 'done not_done' )
if timeout < 0 : # Negative timeout means blocking .
if return_when == FIRST_COMPLETED :
next ( _waitAny ( * fs ) )
elif return_when in [ ALL_COMPLETED , FIRST_EXCEPTION ] :
for _ in _waitAll ( * fs ) :
pass
done = set ( f for f in fs if f . done ( ) )
not_done = set ( fs ) - done
return DoneAndNotDoneFutures ( done , not_done )
elif timeout == 0 : # Zero - value entry means non - blocking
control . execQueue . flush ( )
control . execQueue . updateQueue ( )
done = set ( f for f in fs if f . _ended ( ) )
not_done = set ( fs ) - done
return DoneAndNotDoneFutures ( done , not_done )
else : # Any other value means blocking for a given time .
done = set ( )
start_time = time . time ( )
while time . time ( ) - start_time < timeout : # Flush futures on local queue ( to be executed remotely )
control . execQueue . flush ( )
# Block until data arrives ( to free CPU time )
control . execQueue . socket . _poll ( time . time ( ) - start_time )
# Update queue
control . execQueue . updateQueue ( )
for f in fs :
if f . _ended ( ) :
done . add ( f )
not_done = set ( fs ) - done
if return_when == FIRST_COMPLETED and len ( done ) > 0 :
break
if len ( not_done ) == 0 :
break
return DoneAndNotDoneFutures ( done , not_done )
|
def outer_definition_name ( cls ) :
"""Helper method for creating outer definition name .
Returns :
If definition is nested , will return the outer definitions
name , else the package name ."""
|
outer_definition = cls . message_definition ( )
if not outer_definition :
return util . get_package_for_module ( cls . __module__ )
return outer_definition . definition_name ( )
|
def set_state ( self , value , labels = None ) :
""": param value : Any number
: param labels : A list of two Strings sending None won ' t change the current values .
: return :"""
|
values = { "value" : value }
json_labels = [ ]
if labels :
for label in labels :
json_labels . append ( str ( label ) . upper ( ) )
values [ "labels" ] = json_labels
self . _update_state_from_response ( self . parent . set_dial ( values , self . index ( ) ) )
|
def validateFilepath ( value , blank = False , strip = None , allowlistRegexes = None , blocklistRegexes = None , excMsg = None , mustExist = False ) :
r"""Raises ValidationException if value is not a valid filename .
Filenames can ' t contain \ \ / : * ? " < > |
Returns the value argument .
* value ( str ) : The value being validated as an IP address .
* blank ( bool ) : If True , a blank string will be accepted . Defaults to False .
* strip ( bool , str , None ) : If None , whitespace is stripped from value . If a str , the characters in it are stripped from value . If False , nothing is stripped .
* allowlistRegexes ( Sequence , None ) : A sequence of regex str that will explicitly pass validation , even if they aren ' t numbers .
* blocklistRegexes ( Sequence , None ) : A sequence of regex str or ( regex _ str , response _ str ) tuples that , if matched , will explicitly fail validation .
* excMsg ( str ) : A custom message to use in the raised ValidationException .
> > > import pysimplevalidate as pysv
> > > pysv . validateFilepath ( ' foo . txt ' )
' foo . txt '
> > > pysv . validateFilepath ( ' / spam / foo . txt ' )
' / spam / foo . txt '
> > > pysv . validateFilepath ( r ' c : \ spam \ foo . txt ' )
' c : \ \ spam \ \ foo . txt '
> > > pysv . validateFilepath ( r ' c : \ spam \ ? ? ? . txt ' )
Traceback ( most recent call last ) :
pysimplevalidate . ValidationException : ' c : \ \ spam \ \ ? ? ? . txt ' is not a valid file path ."""
|
returnNow , value = _prevalidationCheck ( value , blank , strip , allowlistRegexes , blocklistRegexes , excMsg )
if returnNow :
return value
if ( value != value . strip ( ) ) or ( any ( c in value for c in '*?"<>|' ) ) : # Same as validateFilename , except we allow \ and / and :
if ':' in value :
if value . find ( ':' , 2 ) != - 1 or not value [ 0 ] . isalpha ( ) : # For Windows : Colon can only be found at the beginning , e . g . ' C : \ ' , or the first letter is not a letter drive .
_raiseValidationException ( _ ( '%r is not a valid file path.' ) % ( _errstr ( value ) ) , excMsg )
_raiseValidationException ( _ ( '%r is not a valid file path.' ) % ( _errstr ( value ) ) , excMsg )
return value
raise NotImplementedError ( )
|
def add_subrule ( self , subrule , weight ) :
"""Add subrule to the rule .
: param subrule :
Subrule to add to this rule , an instance of : class : ` Rule ` or
: class : ` RuleLeaf ` .
: param float weight : Weight of the subrule"""
|
if not issubclass ( subrule . __class__ , ( Rule , RuleLeaf ) ) :
raise TypeError ( "Rule's class must be (subclass of) {} or {}, got " "{}." . format ( Rule , RuleLeaf , subrule . __class__ ) )
self . __domains = set . union ( self . __domains , subrule . domains )
self . R . append ( subrule )
self . W . append ( weight )
|
def _validate_currency ( self , currency ) :
"""Check if the given order book is valid .
: param currency : Major currency name in lowercase .
: type currency : str | unicode
: raise InvalidCurrencyError : If an invalid major currency is given ."""
|
if currency not in self . major_currencies :
raise InvalidCurrencyError ( 'Invalid major currency \'{}\'. Choose from {}.' . format ( currency , tuple ( self . major_currencies ) ) )
|
def inform_version_connect ( self , msg ) :
"""Process a # version - connect message ."""
|
if len ( msg . arguments ) < 2 :
return
# Store version information .
name = msg . arguments [ 0 ]
self . versions [ name ] = tuple ( msg . arguments [ 1 : ] )
if msg . arguments [ 0 ] == "katcp-protocol" :
protocol_flags = ProtocolFlags . parse_version ( msg . arguments [ 1 ] )
self . _set_protocol_from_inform ( protocol_flags , msg )
|
def symmetry ( self ) :
"""Check whether a mesh has rotational symmetry .
Returns
symmetry : None No rotational symmetry
' radial ' Symmetric around an axis
' spherical ' Symmetric around a point"""
|
symmetry , axis , section = inertia . radial_symmetry ( self )
self . _cache [ 'symmetry_axis' ] = axis
self . _cache [ 'symmetry_section' ] = section
return symmetry
|
def write ( s = '' ) :
"""Automates the process of typing by converting a string into a set of press ( ) and hold ( ) calls
: param s : string to be written
: return : None"""
|
for char in s :
if char . isupper ( ) : # Handles uppercase
hold ( 'shift' , char . lower ( ) )
elif char == " " : # Handles spaces
press ( 'spacebar' )
elif char == "\n" : # Handles newline
press ( 'enter' )
elif char in ( ')' , '!' , '@' , '#' , '$' , '%' , '^' , '&' , '*' , '(' ) : # Handles shift & number
hold ( 'shift' , str ( ( ')' , '!' , '@' , '#' , '$' , '%' , '^' , '&' , '*' , '(' ) . index ( char ) ) )
elif char in ( '{' , '}' , '<' , '>' , '?' , ':' , '"' , '_' , '+' , '~' ) :
hold ( 'shift' , ( '[' , ']' , ',' , '.' , '/' , ';' , "'" , '-' , '=' , '`' ) [ ( '{' , '}' , '<' , '>' , '?' , ':' , '"' , '_' , '+' , '~' ) . index ( char ) ] )
else :
press ( char )
|
def get_lattice_quanta ( self , convert_to_muC_per_cm2 = True , all_in_polar = True ) :
"""Returns the dipole / polarization quanta along a , b , and c for
all structures ."""
|
lattices = [ s . lattice for s in self . structures ]
volumes = np . array ( [ s . lattice . volume for s in self . structures ] )
L = len ( self . structures )
e_to_muC = - 1.6021766e-13
cm2_to_A2 = 1e16
units = 1.0 / np . array ( volumes )
units *= e_to_muC * cm2_to_A2
# convert polarizations and lattice lengths prior to adjustment
if convert_to_muC_per_cm2 and not all_in_polar : # adjust lattices
for i in range ( L ) :
lattice = lattices [ i ]
l , a = lattice . lengths_and_angles
lattices [ i ] = Lattice . from_lengths_and_angles ( np . array ( l ) * units . ravel ( ) [ i ] , a )
elif convert_to_muC_per_cm2 and all_in_polar :
for i in range ( L ) :
lattice = lattices [ - 1 ]
l , a = lattice . lengths_and_angles
lattices [ i ] = Lattice . from_lengths_and_angles ( np . array ( l ) * units . ravel ( ) [ - 1 ] , a )
quanta = np . array ( [ np . array ( l . lengths_and_angles [ 0 ] ) for l in lattices ] )
return quanta
|
def events ( self ) -> Iterable [ Tuple [ Optional [ float ] , Callable , Sequence [ Any ] , Mapping [ str , Any ] ] ] :
"""Iterates over scheduled events . Each event is a 4 - tuple composed of the moment ( on the simulated clock ) the
event should execute , the function corresponding to the event , its positional parameters ( as a tuple of
arbitrary length ) , and its keyword parameters ( as a dictionary ) ."""
|
return ( ( event . timestamp , event . fn , event . args , event . kwargs ) for event in self . _events if not event . is_cancelled )
|
def ParseOptions ( cls , options , config_object , category = None , names = None ) :
"""Parses and validates arguments using the appropriate helpers .
Args :
options ( argparse . Namespace ) : parser options .
config _ object ( object ) : object to be configured by an argument helper .
category ( Optional [ str ] ) : category of helpers to apply to
the group , such as storage , output , where None will apply the
arguments to all helpers . The category can be used to add arguments
to a specific group of registered helpers .
names ( Optional [ list [ str ] ] ) : names of argument helpers to apply ,
where None will apply the arguments to all helpers ."""
|
for helper_name , helper_class in cls . _helper_classes . items ( ) :
if ( ( category and helper_class . CATEGORY != category ) or ( names and helper_name not in names ) ) :
continue
try :
helper_class . ParseOptions ( options , config_object )
except errors . BadConfigObject :
pass
|
def sem ( self , ddof = 1 ) :
"""Compute standard error of the mean of groups , excluding missing values .
For multiple groupings , the result index will be a MultiIndex .
Parameters
ddof : integer , default 1
degrees of freedom"""
|
return self . std ( ddof = ddof ) / np . sqrt ( self . count ( ) )
|
def uninstall ( self , pkgname , * args , ** kwargs ) :
"""A context manager which allows uninstallation of packages from the environment
: param str pkgname : The name of a package to uninstall
> > > env = Environment ( " / path / to / env / root " )
> > > with env . uninstall ( " pytz " , auto _ confirm = True , verbose = False ) as uninstaller :
cleaned = uninstaller . paths
> > > if cleaned :
print ( " uninstalled packages : % s " % cleaned )"""
|
auto_confirm = kwargs . pop ( "auto_confirm" , True )
verbose = kwargs . pop ( "verbose" , False )
with self . activated ( ) :
monkey_patch = next ( iter ( dist for dist in self . base_working_set if dist . project_name == "recursive-monkey-patch" ) , None )
if monkey_patch :
monkey_patch . activate ( )
pip_shims = self . safe_import ( "pip_shims" )
pathset_base = pip_shims . UninstallPathSet
pathset_base . _permitted = PatchedUninstaller . _permitted
dist = next ( iter ( filter ( lambda d : d . project_name == pkgname , self . get_working_set ( ) ) ) , None )
pathset = pathset_base . from_dist ( dist )
if pathset is not None :
pathset . remove ( auto_confirm = auto_confirm , verbose = verbose )
try :
yield pathset
except Exception as e :
if pathset is not None :
pathset . rollback ( )
else :
if pathset is not None :
pathset . commit ( )
if pathset is None :
return
|
def get_shapes_pymunk_space ( df_convex_shapes , shape_i_columns ) :
'''Return two - ple containing :
- A ` pymunk . Space ` instance .
- A ` pandas . Series ` mapping each ` pymunk . Body ` object in the ` Space ` to a
shape index .
The ` Body ` to shape index mapping makes it possible to , for example , look
up the index of the convex shape associated with a ` Body ` returned by a
` pymunk ` point query in the ` Space ` .'''
|
if isinstance ( shape_i_columns , bytes ) :
shape_i_columns = [ shape_i_columns ]
space = pm . Space ( )
bodies = [ ]
convex_groups = df_convex_shapes . groupby ( shape_i_columns )
for shape_i , df_i in convex_groups :
if not isinstance ( shape_i , ( list , tuple ) ) :
shape_i = [ shape_i ]
if hasattr ( pm . Body , 'STATIC' ) : # Assume ` pymunk > = 5.0 ` , where static bodies must be declared
# explicitly .
body = pm . Body ( body_type = pm . Body . STATIC )
else : # Assume ` pymunk < 5.0 ` , where bodies are static unless otherwise
# specified .
body = pm . Body ( )
# Using the code below is about 66 % faster than :
# ` df _ i [ [ ' x ' , ' y ' ] ] . values ` .
points = [ [ x , y ] for x , y in zip ( df_i . x , df_i . y ) ]
poly = pm . Poly ( body , points )
space . add ( poly )
bodies . append ( [ body , shape_i [ 0 ] ] )
bodies = None if not bodies else bodies
return space , ( pd . DataFrame ( bodies , columns = [ 'body' , shape_i_columns [ 0 ] ] ) . set_index ( 'body' ) [ shape_i_columns [ 0 ] ] )
|
def _uncythonized_model ( self , beta ) :
"""Creates the structure of the model
Parameters
beta : np . array
Contains untransformed starting values for latent variables
Returns
theta : np . array
Contains the predicted values for the time series
Y : np . array
Contains the length - adjusted time series ( accounting for lags )
scores : np . array
Contains the scores for the time series"""
|
parm = np . array ( [ self . latent_variables . z_list [ k ] . prior . transform ( beta [ k ] ) for k in range ( beta . shape [ 0 ] ) ] )
coefficients = np . zeros ( ( self . X . shape [ 1 ] , self . model_Y . shape [ 0 ] + 1 ) )
coefficients [ : , 0 ] = self . initial_values
theta = np . zeros ( self . model_Y . shape [ 0 ] + 1 )
model_scale , model_shape , model_skewness = self . _get_scale_and_shape ( parm )
# Loop over time series
theta , self . model_scores , coefficients = gas_reg_recursion ( parm , theta , self . X , coefficients , self . model_scores , self . model_Y , self . model_Y . shape [ 0 ] , self . family . reg_score_function , self . link , model_scale , model_shape , model_skewness , self . max_lag )
return theta [ : - 1 ] , self . model_Y , self . model_scores , coefficients
|
def getResourceMapPid ( self ) :
"""Returns :
str : PID of the Resource Map itself ."""
|
ore = [ o for o in self . subjects ( predicate = rdflib . RDF . type , object = ORE . ResourceMap ) ] [ 0 ]
pid = [ str ( o ) for o in self . objects ( predicate = DCTERMS . identifier , subject = ore ) ] [ 0 ]
return pid
|
def _summarize_coefficients ( top_coefs , bottom_coefs ) :
"""Return a tuple of sections and section titles .
Sections are pretty print of model coefficients
Parameters
top _ coefs : SFrame of top k coefficients
bottom _ coefs : SFrame of bottom k coefficients
Returns
( sections , section _ titles ) : tuple
sections : list
summary sections for top / bottom k coefficients
section _ titles : list
summary section titles"""
|
def get_row_name ( row ) :
if row [ 'index' ] is None :
return row [ 'name' ]
else :
return "%s[%s]" % ( row [ 'name' ] , row [ 'index' ] )
if len ( top_coefs ) == 0 :
top_coefs_list = [ ( 'No Positive Coefficients' , _precomputed_field ( '' ) ) ]
else :
top_coefs_list = [ ( get_row_name ( row ) , _precomputed_field ( row [ 'value' ] ) ) for row in top_coefs ]
if len ( bottom_coefs ) == 0 :
bottom_coefs_list = [ ( 'No Negative Coefficients' , _precomputed_field ( '' ) ) ]
else :
bottom_coefs_list = [ ( get_row_name ( row ) , _precomputed_field ( row [ 'value' ] ) ) for row in bottom_coefs ]
return ( [ top_coefs_list , bottom_coefs_list ] , [ 'Highest Positive Coefficients' , 'Lowest Negative Coefficients' ] )
|
def satosa_logging ( logger , level , message , state , ** kwargs ) :
"""Adds a session ID to the message .
: type logger : logging
: type level : int
: type message : str
: type state : satosa . state . State
: param logger : Logger to use
: param level : Logger level ( ex : logging . DEBUG / logging . WARN / . . . )
: param message : Message
: param state : The current state
: param kwargs : set exc _ info = True to get an exception stack trace in the log"""
|
if state is None :
session_id = "UNKNOWN"
else :
try :
session_id = state [ LOGGER_STATE_KEY ]
except KeyError :
session_id = uuid4 ( ) . urn
state [ LOGGER_STATE_KEY ] = session_id
logger . log ( level , "[{id}] {msg}" . format ( id = session_id , msg = message ) , ** kwargs )
|
def make_seg_table ( workflow , seg_files , seg_names , out_dir , tags = None , title_text = None , description = None ) :
"""Creates a node in the workflow for writing the segment summary
table . Returns a File instances for the output file ."""
|
seg_files = list ( seg_files )
seg_names = list ( seg_names )
if tags is None :
tags = [ ]
makedir ( out_dir )
node = PlotExecutable ( workflow . cp , 'page_segtable' , ifos = workflow . ifos , out_dir = out_dir , tags = tags ) . create_node ( )
node . add_input_list_opt ( '--segment-files' , seg_files )
quoted_seg_names = [ ]
for s in seg_names :
quoted_seg_names . append ( "'" + s + "'" )
node . add_opt ( '--segment-names' , ' ' . join ( quoted_seg_names ) )
if description :
node . add_opt ( '--description' , "'" + description + "'" )
if title_text :
node . add_opt ( '--title-text' , "'" + title_text + "'" )
node . new_output_file_opt ( workflow . analysis_time , '.html' , '--output-file' )
workflow += node
return node . output_files [ 0 ]
|
def set_max_string_length ( self , length = None ) :
"""stub"""
|
if self . get_max_string_length_metadata ( ) . is_read_only ( ) :
raise NoAccess ( )
if not self . my_osid_object_form . _is_valid_cardinal ( length , self . get_max_string_length_metadata ( ) ) :
raise InvalidArgument ( )
if self . my_osid_object_form . min_string_length is not None and length < self . my_osid_object_form . min_string_length + 1 :
raise InvalidArgument ( )
self . my_osid_object_form . _my_map [ 'maxStringLength' ] = length
self . _max_string_length = length
|
def ip_registrant_monitor ( self , query , days_back = 0 , search_type = "all" , server = None , country = None , org = None , page = 1 , include_total_count = False , ** kwargs ) :
"""Query based on free text query terms"""
|
return self . _results ( 'ip-registrant-monitor' , '/v1/ip-registrant-monitor' , query = query , days_back = days_back , search_type = search_type , server = server , country = country , org = org , page = page , include_total_count = include_total_count , ** kwargs )
|
def optimize_delta_bisection ( self , data , skipstep = 1 , verbose = None ) :
"""Find optimal ridge penalty with bisection search .
Parameters
data : array , shape ( n _ trials , n _ channels , n _ samples )
Epoched data set . At least two trials are required .
skipstep : int , optional
Speed up calculation by skipping samples during cost function
calculation .
Returns
self : : class : ` VAR `
The : class : ` VAR ` object to facilitate method chaining ( see usage
example ) ."""
|
data = atleast_3d ( data )
if data . shape [ 0 ] < 2 :
raise ValueError ( "At least two trials are required." )
if verbose is None :
verbose = config . getboolean ( 'scot' , 'verbose' )
maxsteps = 10
maxdelta = 1e50
a = - 10
b = 10
trform = lambda x : np . sqrt ( np . exp ( x ) )
msge = _get_msge_with_gradient_func ( data . shape , self . p )
ja , ka = msge ( data , trform ( a ) , self . xvschema , skipstep , self . p )
jb , kb = msge ( data , trform ( b ) , self . xvschema , skipstep , self . p )
# before starting the real bisection , assure the interval contains 0
while np . sign ( ka ) == np . sign ( kb ) :
if verbose :
print ( 'Bisection initial interval (%f,%f) does not contain 0. ' 'New interval: (%f,%f)' % ( a , b , a * 2 , b * 2 ) )
a *= 2
b *= 2
ja , ka = msge ( data , trform ( a ) , self . xvschema , skipstep , self . p )
jb , kb = msge ( data , trform ( b ) , self . xvschema , skipstep , self . p )
if trform ( b ) >= maxdelta :
if verbose :
print ( 'Bisection: could not find initial interval.' )
print ( ' ********* Delta set to zero! ************ ' )
return 0
nsteps = 0
while nsteps < maxsteps : # point where the line between a and b crosses zero
# this is not very stable !
# c = a + ( b - a ) * np . abs ( ka ) / np . abs ( kb - ka )
c = ( a + b ) / 2
j , k = msge ( data , trform ( c ) , self . xvschema , skipstep , self . p )
if np . sign ( k ) == np . sign ( ka ) :
a , ka = c , k
else :
b , kb = c , k
nsteps += 1
tmp = trform ( [ a , b , a + ( b - a ) * np . abs ( ka ) / np . abs ( kb - ka ) ] )
if verbose :
print ( '%d Bisection Interval: %f - %f, (projected: %f)' % ( nsteps , tmp [ 0 ] , tmp [ 1 ] , tmp [ 2 ] ) )
self . delta = trform ( a + ( b - a ) * np . abs ( ka ) / np . abs ( kb - ka ) )
if verbose :
print ( 'Final point: %f' % self . delta )
return self
|
def str_with_sizes ( self , max_name , max_remote_id , max_size ) :
"""Create string for report based on internal properties using sizes to line up columns .
: param max _ name : int width of the name column
: param max _ remote _ id : int width of the remote _ id column
: return : str info from this report item"""
|
name_str = self . name . ljust ( max_name )
remote_id_str = self . remote_id . ljust ( max_remote_id )
size_str = self . size . ljust ( max_size )
return u'{} {} {} {}' . format ( name_str , remote_id_str , size_str , self . file_hash )
|
def field_specific_errors ( self ) :
"""Returns a dictionary of field - specific validation errors for this row ."""
|
return { key : value for key , value in self . error_dict . items ( ) if key != NON_FIELD_ERRORS }
|
def delete_table ( self , table , retry = DEFAULT_RETRY , not_found_ok = False ) :
"""Delete a table
See
https : / / cloud . google . com / bigquery / docs / reference / rest / v2 / tables / delete
Args :
table ( Union [ : class : ` ~ google . cloud . bigquery . table . Table ` , : class : ` ~ google . cloud . bigquery . table . TableReference ` , str , ] ) :
A reference to the table to delete . If a string is passed in ,
this method attempts to create a table reference from a
string using
: func : ` google . cloud . bigquery . table . TableReference . from _ string ` .
retry ( : class : ` google . api _ core . retry . Retry ` ) :
( Optional ) How to retry the RPC .
not _ found _ ok ( bool ) :
Defaults to ` ` False ` ` . If ` ` True ` ` , ignore " not found " errors
when deleting the table ."""
|
table = _table_arg_to_table_ref ( table , default_project = self . project )
if not isinstance ( table , TableReference ) :
raise TypeError ( "Unable to get TableReference for table '{}'" . format ( table ) )
try :
self . _call_api ( retry , method = "DELETE" , path = table . path )
except google . api_core . exceptions . NotFound :
if not not_found_ok :
raise
|
def _ConvertRowToUnicode ( self , parser_mediator , row ) :
"""Converts all strings in a DSV row dict to Unicode .
Args :
parser _ mediator ( ParserMediator ) : mediates interactions between parsers
and other components , such as storage and dfvfs .
row ( dict [ str , bytes ] ) : a row from a DSV file , where the dictionary
key contains the column name and the value a binary string .
Returns :
dict [ str , str ] : a row from the DSV file , where the dictionary key
contains the column name and the value a Unicode string ."""
|
for key , value in iter ( row . items ( ) ) :
if isinstance ( value , py2to3 . UNICODE_TYPE ) :
continue
try :
row [ key ] = value . decode ( self . _encoding )
except UnicodeDecodeError :
replaced_value = value . decode ( self . _encoding , errors = 'replace' )
parser_mediator . ProduceExtractionWarning ( 'error decoding DSV value: {0:s} as {1:s}, characters have been ' 'replaced in {2:s}' . format ( key , self . _encoding , replaced_value ) )
row [ key ] = replaced_value
return row
|
def is_active_feature ( feature , redirect_to = None , redirect = None ) :
"""Decorator for Flask views . If a feature is off , it can either return a 404 or redirect to a URL if you ' d rather ."""
|
def _is_active_feature ( func ) :
@ wraps ( func )
def wrapped ( * args , ** kwargs ) :
if not is_active ( feature ) :
url = redirect_to
if redirect :
url = url_for ( redirect )
if url :
log . debug ( u'Feature {feature} is off, redirecting to {url}' . format ( feature = feature , url = url ) )
return _redirect ( url , code = 302 )
else :
log . debug ( u'Feature {feature} is off, aborting request' . format ( feature = feature ) )
abort ( 404 )
return func ( * args , ** kwargs )
return wrapped
return _is_active_feature
|
def from_tibiadata ( cls , content ) :
"""Parses the content of the World Overview section from TibiaData . com into an object of this class .
Notes
Due to TibiaData limitations , : py : attr : ` record _ count ` and : py : attr : ` record _ date ` are unavailable
object .
Additionally , the listed worlds in : py : attr : ` worlds ` lack some information when obtained from TibiaData .
The following attributes are unavailable :
- : py : attr : ` ListedWorld . status ` is always ` ` Online ` ` .
- : py : attr : ` ListedWorld . battleye _ protected ` is always ` ` False ` `
- : py : attr : ` ListedWorld . battleye _ date ` is always ` ` None ` ` .
Parameters
content : : class : ` str `
The JSON response of the worlds section in TibiaData . com
Returns
: class : ` WorldOverview `
An instance of this class containing only the available worlds .
Raises
InvalidContent
If the provided content is the json content of the world section in TibiaData . com"""
|
json_data = parse_json ( content )
try :
worlds_json = json_data [ "worlds" ] [ "allworlds" ]
world_overview = cls ( )
for world_json in worlds_json :
world = ListedWorld ( world_json [ "name" ] , world_json [ "location" ] , world_json [ "worldtype" ] )
world . _parse_additional_info ( world_json [ "additional" ] )
world . online_count = world_json [ "online" ]
world_overview . worlds . append ( world )
return world_overview
except KeyError :
raise InvalidContent ( "content is not a worlds json response from TibiaData.com." )
|
def saveCustomParams ( self , data ) :
"""Send custom dictionary to Polyglot to save and be retrieved on startup .
: param data : Dictionary of key value pairs to store in Polyglot database ."""
|
LOGGER . info ( 'Sending customParams to Polyglot.' )
message = { 'customparams' : data }
self . send ( message )
|
def _generate_custom_type ( self , resource_type ) :
"""Dynamically allocates a new CustomResource class definition using the
specified Custom : : SomeCustomName resource type . This special resource
type is equivalent to the AWS : : CloudFormation : : CustomResource ."""
|
if not resource_type . startswith ( "Custom::" ) :
raise TypeError ( "Custom types must start with Custom::" )
custom_type = type ( str ( resource_type . replace ( "::" , "" ) ) , ( self . inspect_resources [ 'AWS::CloudFormation::CustomResource' ] , ) , { 'resource_type' : resource_type } )
self . inspect_members . add ( custom_type )
self . inspect_resources [ resource_type ] = custom_type
return custom_type
|
def ep ( self , exc : Exception ) -> bool :
"""Return False if the exception had not been handled gracefully"""
|
if not isinstance ( exc , ConnectionAbortedError ) :
return False
if len ( exc . args ) != 2 :
return False
origin , reason = exc . args
logging . getLogger ( __name__ ) . warning ( 'Exited' )
return True
|
def unassign_log_entry_from_log ( self , log_entry_id , log_id ) :
"""Removes a ` ` LogEntry ` ` from a ` ` Log ` ` .
arg : log _ entry _ id ( osid . id . Id ) : the ` ` Id ` ` of the
` ` LogEntry ` `
arg : log _ id ( osid . id . Id ) : the ` ` Id ` ` of the ` ` Log ` `
raise : NotFound - ` ` log _ entry _ id ` ` or ` ` log _ id ` ` not found or
` ` log _ entry _ id ` ` not assigned to ` ` log _ id ` `
raise : NullArgument - ` ` log _ entry _ id ` ` or ` ` log _ id ` ` is
` ` null ` `
raise : OperationFailed - unable to complete request
raise : PermissionDenied - authorization failure
* compliance : mandatory - - This method must be implemented . *"""
|
# Implemented from template for
# osid . resource . ResourceBinAssignmentSession . unassign _ resource _ from _ bin
mgr = self . _get_provider_manager ( 'LOGGING' , local = True )
lookup_session = mgr . get_log_lookup_session ( proxy = self . _proxy )
lookup_session . get_log ( log_id )
# to raise NotFound
self . _unassign_object_from_catalog ( log_entry_id , log_id )
|
def send_request ( self , service_name , actions , switches = None , correlation_id = None , continue_on_error = False , context = None , control_extra = None , message_expiry_in_seconds = None , suppress_response = False , ) :
"""Build and send a JobRequest , and return a request ID .
The context and control _ extra arguments may be used to include extra values in the
context and control headers , respectively .
: param service _ name : The name of the service from which to receive responses
: type service _ name : union [ str , unicode ]
: param actions : A list of ` ActionRequest ` objects
: type actions : list
: param switches : A list of switch value integers
: type switches : union [ list , set ]
: param correlation _ id : The request correlation ID
: type correlation _ id : union [ str , unicode ]
: param continue _ on _ error : Whether to continue executing further actions once one action has returned errors
: type continue _ on _ error : bool
: param context : A dictionary of extra values to include in the context header
: type context : dict
: param control _ extra : A dictionary of extra values to include in the control header
: type control _ extra : dict
: param message _ expiry _ in _ seconds : How soon the message will expire if not received by a server ( defaults to
sixty seconds unless the settings are otherwise )
: type message _ expiry _ in _ seconds : int
: param suppress _ response : If ` True ` , the service will process the request normally but omit the step of
sending a response back to the client ( use this feature to implement send - and - forget
patterns for asynchronous execution )
: type suppress _ response : bool
: return : The request ID
: rtype : int
: raise : ConnectionError , InvalidField , MessageSendError , MessageSendTimeout , MessageTooLarge"""
|
control_extra = control_extra . copy ( ) if control_extra else { }
if message_expiry_in_seconds and 'timeout' not in control_extra :
control_extra [ 'timeout' ] = message_expiry_in_seconds
handler = self . _get_handler ( service_name )
control = self . _make_control_header ( continue_on_error = continue_on_error , control_extra = control_extra , suppress_response = suppress_response , )
context = self . _make_context_header ( switches = switches , correlation_id = correlation_id , context_extra = context , )
job_request = JobRequest ( actions = actions , control = control , context = context or { } )
return handler . send_request ( job_request , message_expiry_in_seconds )
|
def render_activity ( activity , grouped_activity = None , * args , ** kwargs ) :
"""Given an activity , will attempt to render the matching template snippet
for that activity ' s content object
or will return a simple representation of the activity .
Also takes an optional ' grouped _ activity ' argument that would match up with
what is produced by utils . group _ activity"""
|
template_name = 'activity_monitor/includes/models/{0.app_label}_{0.model}.html' . format ( activity . content_type )
try :
tmpl = loader . get_template ( template_name )
except template . TemplateDoesNotExist :
return None
# we know we have a template , so render it
content_object = activity . content_object
return tmpl . render ( Context ( { 'activity' : activity , 'obj' : content_object , 'grouped_activity' : grouped_activity } ) )
|
async def release ( self , * , comment : str = None , erase : bool = None , secure_erase : bool = None , quick_erase : bool = None , wait : bool = False , wait_interval : int = 5 ) :
"""Release the machine .
: param comment : Reason machine was released .
: type comment : ` str `
: param erase : Erase the disk when release .
: type erase : ` bool `
: param secure _ erase : Use the drive ' s secure erase feature if available .
: type secure _ erase : ` bool `
: param quick _ erase : Wipe the just the beginning and end of the disk .
This is not secure .
: param wait : If specified , wait until the deploy is complete .
: type wait : ` bool `
: param wait _ interval : How often to poll , defaults to 5 seconds .
: type wait _ interval : ` int `"""
|
params = remove_None ( { "system_id" : self . system_id , "comment" : comment , "erase" : erase , "secure_erase" : secure_erase , "quick_erase" : quick_erase , } )
self . _data = await self . _handler . release ( ** params )
if not wait :
return self
else : # Wait for machine to be released
while self . status in [ NodeStatus . RELEASING , NodeStatus . DISK_ERASING ] :
await asyncio . sleep ( wait_interval )
try :
self . _data = await self . _handler . read ( system_id = self . system_id )
except CallError as error :
if error . status == HTTPStatus . NOT_FOUND : # Release must have been on a machine in a pod . This
# machine no longer exists . Just return the machine
# as it has been released .
return self
else :
raise
if self . status == NodeStatus . FAILED_RELEASING :
msg = "{hostname} failed to be released." . format ( hostname = self . hostname )
raise FailedReleasing ( msg , self )
elif self . status == NodeStatus . FAILED_DISK_ERASING :
msg = "{hostname} failed to erase disk." . format ( hostname = self . hostname )
raise FailedDiskErasing ( msg , self )
return self
|
def add ( self , name , graph ) :
"""Index and add a : ref : ` networkx . Graph < networkx : graph > ` to the
: class : ` . GraphCollection ` .
Parameters
name : hashable
Unique name used to identify the ` graph ` .
graph : : ref : ` networkx . Graph < networkx : graph > `
Raises
ValueError
If ` name ` has already been used in this : class : ` . GraphCollection ` \ ."""
|
if name in self :
raise ValueError ( "{0} exists in this GraphCollection" . format ( name ) )
elif hasattr ( self , unicode ( name ) ) :
raise ValueError ( "Name conflicts with an existing attribute" )
indexed_graph = self . index ( name , graph )
# Add all edges to the ` master _ graph ` .
for s , t , attrs in indexed_graph . edges ( data = True ) :
attrs . update ( { 'graph' : name } )
self . master_graph . add_edge ( s , t , ** attrs )
# Add all node attributes to the ` master _ graph ` .
for n , attrs in indexed_graph . nodes ( data = True ) :
for k , v in attrs . iteritems ( ) :
if k not in self . master_graph . node [ n ] :
self . master_graph . node [ n ] [ k ] = { }
self . master_graph . node [ n ] [ k ] [ name ] = v
dict . __setitem__ ( self , name , indexed_graph )
|
def save ( self ) :
"""Saves the snapshot based on the current region ."""
|
# close down the snapshot widget
if self . hideWindow ( ) :
self . hideWindow ( ) . hide ( )
self . hide ( )
QApplication . processEvents ( )
time . sleep ( 1 )
# create the pixmap to save
wid = QApplication . desktop ( ) . winId ( )
if not self . _region . isNull ( ) :
x = self . _region . x ( )
y = self . _region . y ( )
w = self . _region . width ( )
h = self . _region . height ( )
else :
x = self . x ( )
y = self . y ( )
w = self . width ( )
h = self . height ( )
pixmap = QPixmap . grabWindow ( wid , x , y , w , h )
pixmap . save ( self . filepath ( ) )
self . close ( )
self . deleteLater ( )
if self . hideWindow ( ) :
self . hideWindow ( ) . show ( )
|
def _get_total_available_slots ( self , context , template_id , capacity ) :
"""Returns available slots in idle devices based on < template _ id > .
Only slots in tenant unbound hosting devices are counted to ensure
there is always hosting device slots available regardless of tenant ."""
|
query = context . session . query ( hd_models . HostingDevice . id )
query = query . outerjoin ( hd_models . SlotAllocation , hd_models . HostingDevice . id == hd_models . SlotAllocation . hosting_device_id )
query = query . filter ( hd_models . HostingDevice . template_id == template_id , hd_models . HostingDevice . admin_state_up == expr . true ( ) , hd_models . HostingDevice . tenant_bound == expr . null ( ) )
query = query . group_by ( hd_models . HostingDevice . id )
query = query . having ( func . sum ( hd_models . SlotAllocation . num_allocated ) == expr . null ( ) )
num_hosting_devices = query . count ( )
return num_hosting_devices * capacity
|
def user_login ( self , email = None , password = None ) :
"""Login with email , password and get back a session cookie
: type email : str
: param email : The email used for authentication
: type password : str
: param password : The password used for authentication"""
|
email = six . moves . input ( "Email: " ) if email is None else email
password = getpass . getpass ( ) if password is None else password
login_data = { "method" : "user.login" , "params" : { "email" : email , "pass" : password } }
# If the user / password match , the server respond will contain a
# session cookie that you can use to authenticate future requests .
r = self . session . post ( self . base_api_urls [ "logic" ] , data = json . dumps ( login_data ) , )
if r . json ( ) [ "result" ] not in [ "OK" ] :
raise AuthenticationError ( "Could not authenticate.\n{}" . format ( r . json ( ) ) )
|
def return_dat ( self , chan , begsam , endsam ) :
"""Return the data as 2D numpy . ndarray .
Parameters
chan : int or list
index ( indices ) of the channels to read
begsam : int
index of the first sample
endsam : int
index of the last sample
Returns
numpy . ndarray
A 2d matrix , with dimension chan X samples"""
|
dat = _read_memmap ( self . eeg_file , self . dshape , begsam , endsam , self . data_type , self . data_order )
return dat [ chan , : ] * self . gain [ chan , None ]
|
def get_card_transfer ( provider : Provider , deck : Deck , txid : str , debug : bool = False ) -> Iterator :
'''get a single card transfer by it ' s id'''
|
rawtx = provider . getrawtransaction ( txid , 1 )
bundle = card_bundler ( provider , deck , rawtx )
return card_bundle_parser ( bundle , debug )
|
def _instance_callable ( obj ) :
"""Given an object , return True if the object is callable .
For classes , return True if instances would be callable ."""
|
if not isinstance ( obj , ClassTypes ) : # already an instance
return getattr ( obj , '__call__' , None ) is not None
klass = obj
# uses _ _ bases _ _ instead of _ _ mro _ _ so that we work with old style classes
if klass . __dict__ . get ( '__call__' ) is not None :
return True
for base in klass . __bases__ :
if _instance_callable ( base ) :
return True
return False
|
def sens_atmos_send ( self , TempAmbient , Humidity , force_mavlink1 = False ) :
'''Atmospheric sensors ( temperature , humidity , . . . )
TempAmbient : Ambient temperature [ degrees Celsius ] ( float )
Humidity : Relative humidity [ % ] ( float )'''
|
return self . send ( self . sens_atmos_encode ( TempAmbient , Humidity ) , force_mavlink1 = force_mavlink1 )
|
def variables ( self ) -> list :
"""Variables ."""
|
try :
assert self . _variables is not None
except ( AssertionError , AttributeError ) :
pattern = "|" . join ( map ( re . escape , operators ) )
keys = re . split ( pattern , self . expression )
indices = [ ]
for key in keys :
if key in self . parent . variable_names :
indices . append ( self . parent . variable_names . index ( key ) )
self . _variables = [ self . parent . variables [ i ] for i in indices ]
finally :
return self . _variables
|
def batch_augment ( x , func , device = '/CPU:0' ) :
"""Apply dataset augmentation to a batch of exmaples .
: param x : Tensor representing a batch of examples .
: param func : Callable implementing dataset augmentation , operating on
a single image .
: param device : String specifying which device to use ."""
|
with tf . device ( device ) :
return tf . map_fn ( func , x )
|
def eval ( self , expression , args = None , * , timeout = - 1.0 , push_subscribe = False ) -> _MethodRet :
"""Eval request coroutine .
Examples :
. . code - block : : pycon
> > > await conn . eval ( ' return 42 ' )
< Response sync = 3 rowcount = 1 data = [ 42 ] >
> > > await conn . eval ( ' return box . info . version ' )
< Response sync = 3 rowcount = 1 data = [ ' 2.1.1-7 - gd381a45b6 ' ] >
: param expression : expression to execute
: param args : arguments to pass to the function , that will
execute your expression ( list object )
: param timeout : Request timeout
: param push _ subscribe : Subscribe to push messages
: returns : : class : ` asynctnt . Response ` instance"""
|
return self . _db . eval ( expression , args , timeout = timeout , push_subscribe = push_subscribe )
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.