signature stringlengths 29 44.1k | implementation stringlengths 0 85.2k |
|---|---|
def _GetArgsDescription ( self , args_type ) :
"""Get a simplified description of the args _ type for a flow .""" | args = { }
if args_type :
for type_descriptor in args_type . type_infos :
if not type_descriptor . hidden :
args [ type_descriptor . name ] = { "description" : type_descriptor . description , "default" : type_descriptor . default , "type" : "" , }
if type_descriptor . type :
args [ type_descriptor . name ] [ "type" ] = type_descriptor . type . __name__
return args |
def chartspan ( cls , start , end ) :
"""Create a Lnk object for a chart span .
Args :
start : the initial chart vertex
end : the final chart vertex""" | return cls ( Lnk . CHARTSPAN , ( int ( start ) , int ( end ) ) ) |
def predict ( self , x ) :
"""Make prediction recursively . Use both the samples inside the current
node and the statistics inherited from parent .""" | if self . _is_leaf ( ) :
d1 = self . predict_initialize [ 'count_dict' ]
d2 = count_dict ( self . Y )
for key , value in d1 . iteritems ( ) :
if key in d2 :
d2 [ key ] += value
else :
d2 [ key ] = value
return argmax ( d2 )
else :
if self . criterion ( x ) :
return self . right . predict ( x )
else :
return self . left . predict ( x ) |
def migrate_flow_collection ( apps , schema_editor ) :
"""Migrate ' flow _ collection ' field to ' entity _ type ' .""" | Process = apps . get_model ( 'flow' , 'Process' )
DescriptorSchema = apps . get_model ( 'flow' , 'DescriptorSchema' )
for process in Process . objects . all ( ) :
process . entity_type = process . flow_collection
process . entity_descriptor_schema = process . flow_collection
if ( process . entity_descriptor_schema is not None and not DescriptorSchema . objects . filter ( slug = process . entity_descriptor_schema ) . exists ( ) ) :
raise LookupError ( "Descriptow schema '{}' referenced in 'entity_descriptor_schema' not " "found." . format ( process . entity_descriptor_schema ) )
process . save ( ) |
def get_all ( self , routes ) :
"""Load the metadata for all routes ( collections ) and populate the cache
: param list [ str ] routes : List of collection names
: return : A dictionary containing each collections metadata
: rtype : dict""" | for route in routes :
self . load ( route )
return { name : value [ 1 ] for name , value in iteritems ( self . cache ) } |
def cli ( env , volume_id , snapshot_id ) :
"""Restore block volume using a given snapshot""" | block_manager = SoftLayer . BlockStorageManager ( env . client )
success = block_manager . restore_from_snapshot ( volume_id , snapshot_id )
if success :
click . echo ( 'Block volume %s is being restored using snapshot %s' % ( volume_id , snapshot_id ) ) |
def contains_all ( self , items ) :
"""Determines whether this set contains all of the items in the specified collection or not .
: param items : ( Collection ) , the specified collection which includes the items to be searched .
: return : ( bool ) , ` ` true ` ` if all of the items in the specified collection exist in this set , ` ` false ` ` otherwise .""" | check_not_none ( items , "Value can't be None" )
data_items = [ ]
for item in items :
check_not_none ( item , "Value can't be None" )
data_items . append ( self . _to_data ( item ) )
return self . _encode_invoke ( set_contains_all_codec , items = data_items ) |
def get_node ( start , tree , pnames ) :
"""for each parent find a single branch to root""" | def get_first_branch ( node ) :
if node not in pnames : # one way to hit a root
return [ ]
if pnames [ node ] : # mmmm names
fp = pnames [ node ] [ 0 ]
if cycle_check ( node , fp , pnames ) :
fp = pnames [ node ] [ 1 ]
# if there are double cycles I WILL KILL FOR THE PLEASURE IF IT
print ( fp )
return [ fp ] + get_first_branch ( fp )
else :
return [ ]
branch = get_first_branch ( start )
for n in branch [ : : - 1 ] :
tree = tree [ n ]
assert start in tree , "our start wasnt in the tree! OH NO!"
branch = [ start ] + branch
print ( 'branch' , branch )
return tree , branch |
def save_widget ( cls , editor ) :
"""Implements SplittableTabWidget . save _ widget to actually save the
code editor widget .
If the editor . file . path is None or empty or the file does not exist ,
a save as dialog is shown ( save as ) .
: param editor : editor widget to save .
: return : False if there was a problem saving the editor ( e . g . the save
as dialog has been canceled by the user , or a permission error , . . . )""" | if editor . original :
editor = editor . original
if editor . file . path is None or not os . path . exists ( editor . file . path ) : # save as
path , filter = cls . _ask_path ( editor )
if not path :
return False
if not os . path . splitext ( path ) [ 1 ] :
if len ( editor . mimetypes ) :
path += mimetypes . guess_extension ( editor . mimetypes [ 0 ] )
try :
_logger ( ) . debug ( 'saving %r as %r' , editor . file . _old_path , path )
except AttributeError :
_logger ( ) . debug ( 'saving %r as %r' , editor . file . path , path )
editor . file . _path = path
else :
path = editor . file . path
try :
editor . file . save ( path )
except Exception as e :
QtWidgets . QMessageBox . warning ( editor , "Failed to save file" , 'Failed to save %r.\n\nError="%s"' % ( path , e ) )
else :
tw = editor . parent_tab_widget
text = tw . tabText ( tw . indexOf ( editor ) ) . replace ( '*' , '' )
tw . setTabText ( tw . indexOf ( editor ) , text )
for clone in [ editor ] + editor . clones :
if clone != editor :
tw = clone . parent_tab_widget
tw . setTabText ( tw . indexOf ( clone ) , text )
return True |
def queryset ( self , request , queryset ) :
"""Return the filtered queryset based on the value provided in the query string .
source : https : / / docs . djangoproject . com / en / 1.10 / ref / contrib / admin / # django . contrib . admin . ModelAdmin . list _ filter""" | filter_args = { self . _filter_arg_key : None }
if self . value ( ) == "yes" :
return queryset . exclude ( ** filter_args )
if self . value ( ) == "no" :
return queryset . filter ( ** filter_args ) |
def from_v_theta ( cls , v , theta ) :
"""Create a quaternion from unit vector v and rotation angle theta .
Returns
q : : class : ` gala . coordinates . Quaternion `
A ` ` Quaternion ` ` instance .""" | theta = np . asarray ( theta )
v = np . asarray ( v )
s = np . sin ( 0.5 * theta )
c = np . cos ( 0.5 * theta )
vnrm = np . sqrt ( np . sum ( v * v ) )
q = np . concatenate ( [ [ c ] , s * v / vnrm ] )
return cls ( q ) |
def _temporary_keychain ( ) :
"""This function creates a temporary Mac keychain that we can use to work with
credentials . This keychain uses a one - time password and a temporary file to
store the data . We expect to have one keychain per socket . The returned
SecKeychainRef must be freed by the caller , including calling
SecKeychainDelete .
Returns a tuple of the SecKeychainRef and the path to the temporary
directory that contains it .""" | # Unfortunately , SecKeychainCreate requires a path to a keychain . This
# means we cannot use mkstemp to use a generic temporary file . Instead ,
# we ' re going to create a temporary directory and a filename to use there .
# This filename will be 8 random bytes expanded into base64 . We also need
# some random bytes to password - protect the keychain we ' re creating , so we
# ask for 40 random bytes .
random_bytes = os . urandom ( 40 )
filename = base64 . b16encode ( random_bytes [ : 8 ] ) . decode ( 'utf-8' )
password = base64 . b16encode ( random_bytes [ 8 : ] )
# Must be valid UTF - 8
tempdirectory = tempfile . mkdtemp ( )
keychain_path = os . path . join ( tempdirectory , filename ) . encode ( 'utf-8' )
# We now want to create the keychain itself .
keychain = Security . SecKeychainRef ( )
status = Security . SecKeychainCreate ( keychain_path , len ( password ) , password , False , None , ctypes . byref ( keychain ) )
_assert_no_error ( status )
# Having created the keychain , we want to pass it off to the caller .
return keychain , tempdirectory |
def child ( self , name , data , source = None ) :
"""Create or update child with given data""" | try :
if isinstance ( data , dict ) :
self . children [ name ] . update ( data )
else :
self . children [ name ] . grow ( data )
except KeyError :
self . children [ name ] = Tree ( data , name , parent = self )
# Save source file
if source is not None :
self . children [ name ] . sources . append ( source ) |
def getSeriesRegistered ( self , q_filter = Q ( ) , distinct = True , counter = False , ** kwargs ) :
'''Return a list that indicates each series the person has registered for
and how many registrations they have for that series ( because of couples ) .
This can be filtered by any keyword arguments passed ( e . g . year and month ) .''' | series_set = Series . objects . filter ( q_filter , eventregistration__registration__customer = self , ** kwargs )
if not distinct :
return series_set
elif distinct and not counter :
return series_set . distinct ( )
elif 'year' in kwargs or 'month' in kwargs :
return [ str ( x [ 1 ] ) + 'x: ' + x [ 0 ] . classDescription . title for x in Counter ( series_set ) . items ( ) ]
else :
return [ str ( x [ 1 ] ) + 'x: ' + x [ 0 ] . __str__ ( ) for x in Counter ( series_set ) . items ( ) ] |
def concordance_index_censored ( event_indicator , event_time , estimate , tied_tol = 1e-8 ) :
"""Concordance index for right - censored data
The concordance index is defined as the proportion of all comparable pairs
in which the predictions and outcomes are concordant .
Samples are comparable if for at least one of them an event occurred .
If the estimated risk is larger for the sample with a higher time of
event / censoring , the predictions of that pair are said to be concordant .
If an event occurred for one sample and the other is known to be
event - free at least until the time of event of the first , the second
sample is assumed to * outlive * the first .
When predicted risks are identical for a pair , 0.5 rather than 1 is added
to the count of concordant pairs .
A pair is not comparable if an event occurred for both of them at the same
time or an event occurred for one of them but the time of censoring is
smaller than the time of event of the first one .
Parameters
event _ indicator : array - like , shape = ( n _ samples , )
Boolean array denotes whether an event occurred
event _ time : array - like , shape = ( n _ samples , )
Array containing the time of an event or time of censoring
estimate : array - like , shape = ( n _ samples , )
Estimated risk of experiencing an event
tied _ tol : float , optional , default : 1e - 8
The tolerance value for considering ties .
If the absolute difference between risk scores is smaller
or equal than ` tied _ tol ` , risk scores are considered tied .
Returns
cindex : float
Concordance index
concordant : int
Number of concordant pairs
discordant : int
Number of discordant pairs
tied _ risk : int
Number of pairs having tied estimated risks
tied _ time : int
Number of comparable pairs sharing the same time
References
. . [ 1 ] Harrell , F . E . , Califf , R . M . , Pryor , D . B . , Lee , K . L . , Rosati , R . A ,
" Multivariable prognostic models : issues in developing models ,
evaluating assumptions and adequacy , and measuring and reducing errors " ,
Statistics in Medicine , 15(4 ) , 361-87 , 1996.""" | event_indicator , event_time , estimate = _check_inputs ( event_indicator , event_time , estimate )
w = numpy . ones_like ( estimate )
return _estimate_concordance_index ( event_indicator , event_time , estimate , w , tied_tol ) |
def gevent_stop ( self ) :
"""Helper method to stop the node for gevent - based applications .""" | import gevent
gevent . kill ( self . _poller_greenlet )
self . remove ( )
self . _select = select . select |
def get_energies ( rootdir , reanalyze , verbose , detailed , sort , fmt ) :
"""Doc string .""" | if verbose :
logformat = "%(relativeCreated)d msecs : %(message)s"
logging . basicConfig ( level = logging . INFO , format = logformat )
if not detailed :
drone = SimpleVaspToComputedEntryDrone ( inc_structure = True )
else :
drone = VaspToComputedEntryDrone ( inc_structure = True , data = [ "filename" , "initial_structure" ] )
ncpus = multiprocessing . cpu_count ( )
logging . info ( "Detected {} cpus" . format ( ncpus ) )
queen = BorgQueen ( drone , number_of_drones = ncpus )
if os . path . exists ( SAVE_FILE ) and not reanalyze :
msg = "Using previously assimilated data from {}." . format ( SAVE_FILE ) + " Use -r to force re-analysis."
queen . load_data ( SAVE_FILE )
else :
if ncpus > 1 :
queen . parallel_assimilate ( rootdir )
else :
queen . serial_assimilate ( rootdir )
msg = "Analysis results saved to {} for faster " . format ( SAVE_FILE ) + "subsequent loading."
queen . save_data ( SAVE_FILE )
entries = queen . get_data ( )
if sort == "energy_per_atom" :
entries = sorted ( entries , key = lambda x : x . energy_per_atom )
elif sort == "filename" :
entries = sorted ( entries , key = lambda x : x . data [ "filename" ] )
all_data = [ ]
for e in entries :
if not detailed :
delta_vol = "{:.2f}" . format ( e . data [ "delta_volume" ] * 100 )
else :
delta_vol = e . structure . volume / e . data [ "initial_structure" ] . volume - 1
delta_vol = "{:.2f}" . format ( delta_vol * 100 )
all_data . append ( ( e . data [ "filename" ] . replace ( "./" , "" ) , re . sub ( r"\s+" , "" , e . composition . formula ) , "{:.5f}" . format ( e . energy ) , "{:.5f}" . format ( e . energy_per_atom ) , delta_vol ) )
if len ( all_data ) > 0 :
headers = ( "Directory" , "Formula" , "Energy" , "E/Atom" , "% vol chg" )
print ( tabulate ( all_data , headers = headers , tablefmt = fmt ) )
print ( "" )
print ( msg )
else :
print ( "No valid vasp run found." )
os . unlink ( SAVE_FILE ) |
def start ( self , ignore_state = False ) :
"""Starts the service .""" | self . logger . debug ( "Start service" )
self . _toggle_running ( True , ignore_state ) |
def _encode_dict ( self , obj ) :
"""Returns a JSON representation of a Python dict""" | self . _increment_nested_level ( )
buffer = [ ]
for key in obj :
buffer . append ( self . _encode_key ( key ) + ':' + self . _encode ( obj [ key ] ) )
self . _decrement_nested_level ( )
return '{' + ',' . join ( buffer ) + '}' |
def decrypt_gpp ( password ) :
"""Decrypt the password of local users added via Windows 2008 Group Policy Preferences .
This value is the ' cpassword ' attribute embedded in the Groups . xml file , stored in the domain controller ' s Sysvol share .
Example :
# habu . decrypt . gpp AzVJmXh / J9KrU5n0czX1uBPLSUjzFE8j7dOltPD8tLk
testpassword""" | iv = b"\x00" * 16
# add the ' = ' characters for padding , if needed
password += "=" * ( ( 4 - len ( password ) % 4 ) % 4 )
password = b64decode ( password )
key = """
4e 99 06 e8 fc b6 6c c9 fa f4 93 10 62 0f fe e8
f4 96 e8 06 cc 05 79 90 20 9b 09 a4 33 b6 6c 1b
""" . replace ( " " , "" ) . replace ( "\n" , "" )
key = unhexlify ( key )
cipher = Cipher ( algorithms . AES ( key ) , modes . CBC ( iv ) , backend = default_backend ( ) )
decryptor = cipher . decryptor ( )
plain = decryptor . update ( password ) + decryptor . finalize ( )
print ( plain . decode ( errors = 'ignore' ) ) |
def parse_value ( type : str , val : str ) :
"""Parses a given OBD value of a given type ( PID )
and returns the parsed value .
If the PID is unknown / not implemented a PIDParserUnknownError
will be raised including the type which was unknown
: param type :
: param val :
: return :""" | if type . upper ( ) in PARSER_MAP : # prep _ val = prepare _ value ( val )
out = PARSER_MAP [ type ] ( val )
log . debug ( 'For {} entered {}, got {} out' . format ( type , val , out ) )
return out
else :
raise ObdPidParserUnknownError ( type , val ) |
def create ( cls , mr_spec , shard_number , shard_attempt , _writer_state = None ) :
"""Inherit docs .""" | mapper_spec = mr_spec . mapper
params = output_writers . _get_params ( mapper_spec )
bucket_name = params . get ( cls . BUCKET_NAME_PARAM )
shards = mapper_spec . shard_count
filehandles = [ ]
filename = ( mr_spec . name + "/" + mr_spec . mapreduce_id + "/shard-" + str ( shard_number ) + "-bucket-" )
for i in range ( shards ) :
full_filename = "/%s/%s%d" % ( bucket_name , filename , i )
filehandles . append ( cloudstorage . open ( full_filename , mode = "w" ) )
return cls ( filehandles ) |
def decode_example ( self , tfexample_data ) :
"""See base class for details .""" | # TODO ( epot ) : Support dynamic shape
if self . shape . count ( None ) < 2 : # Restore the shape if possible . TF Example flattened it .
shape = [ - 1 if i is None else i for i in self . shape ]
tfexample_data = tf . reshape ( tfexample_data , shape )
if tfexample_data . dtype != self . dtype :
tfexample_data = tf . dtypes . cast ( tfexample_data , self . dtype )
return tfexample_data |
def _generate_segments ( im_orig , scale , sigma , min_size ) :
"""segment smallest regions by the algorithm of Felzenswalb and
Huttenlocher""" | # open the Image
im_mask = skimage . segmentation . felzenszwalb ( skimage . util . img_as_float ( im_orig ) , scale = scale , sigma = sigma , min_size = min_size )
# merge mask channel to the image as a 4th channel
im_orig = numpy . append ( im_orig , numpy . zeros ( im_orig . shape [ : 2 ] ) [ : , : , numpy . newaxis ] , axis = 2 )
im_orig [ : , : , 3 ] = im_mask
return im_orig |
def profile ( self , username ) :
"""User profile page""" | if not username and g . user :
username = g . user . username
payload = { 'user' : bootstrap_user_data ( username , include_perms = True ) , 'common' : self . common_bootsrap_payload ( ) , }
return self . render_template ( 'superset/basic.html' , title = _ ( "%(user)s's profile" , user = username ) , entry = 'profile' , bootstrap_data = json . dumps ( payload , default = utils . json_iso_dttm_ser ) , ) |
def _make_annulus_path ( patch_inner , patch_outer ) :
"""Defines a matplotlib annulus path from two patches .
This preserves the cubic Bezier curves ( CURVE4 ) of the aperture
paths .
# This is borrowed from photutils aperture .""" | import matplotlib . path as mpath
path_inner = patch_inner . get_path ( )
transform_inner = patch_inner . get_transform ( )
path_inner = transform_inner . transform_path ( path_inner )
path_outer = patch_outer . get_path ( )
transform_outer = patch_outer . get_transform ( )
path_outer = transform_outer . transform_path ( path_outer )
verts_inner = path_inner . vertices [ : - 1 ] [ : : - 1 ]
verts_inner = np . concatenate ( ( verts_inner , [ verts_inner [ - 1 ] ] ) )
verts = np . vstack ( ( path_outer . vertices , verts_inner ) )
codes = np . hstack ( ( path_outer . codes , path_inner . codes ) )
return mpath . Path ( verts , codes ) |
def openSafeReplace ( filepath , mode = 'w+b' ) :
"""Context manager to open a temporary file and replace the original file on
closing .""" | tempfileName = None
# Check if the filepath can be accessed and is writable before creating the
# tempfile
if not _isFileAccessible ( filepath ) :
raise IOError ( 'File %s is not writtable' % ( filepath , ) )
with tempfile . NamedTemporaryFile ( delete = False , mode = mode ) as tmpf :
tempfileName = tmpf . name
yield tmpf
# Check if the filepath can be accessed and is writable before moving the
# tempfile
if not _isFileAccessible ( filepath ) :
raise IOError ( 'File %s is not writtable' % ( filepath , ) )
# Note : here unhandled exceptions may still occur because of race conditions ,
# messing things up .
shutil . move ( tempfileName , filepath ) |
def parse_stdout ( self , filelike ) :
"""Parse the content written by the script to standard out .
: param filelike : filelike object of stdout
: returns : an exit code in case of an error , None otherwise""" | from aiida . orm import Dict
numbers = { }
content = filelike . read ( ) . strip ( )
if not content :
return self . exit_codes . ERROR_EMPTY_OUTPUT_FILE
try :
for line in content . split ( '\n' ) :
formula , identifier , count , _ = re . split ( r'\s+' , line . strip ( ) )
numbers [ identifier ] = { 'count' : int ( count ) , 'formula' : formula }
except Exception : # pylint : disable = broad - except
self . logger . exception ( 'Failed to parse the numbers from the stdout file\n%s' , traceback . format_exc ( ) )
return self . exit_codes . ERROR_PARSING_OUTPUT_DATA
else :
self . out ( 'numbers' , Dict ( dict = numbers ) )
return |
def read_volume_attachment ( self , name , ** kwargs ) : # noqa : E501
"""read _ volume _ attachment # noqa : E501
read the specified VolumeAttachment # noqa : E501
This method makes a synchronous HTTP request by default . To make an
asynchronous HTTP request , please pass async _ req = True
> > > thread = api . read _ volume _ attachment ( name , async _ req = True )
> > > result = thread . get ( )
: param async _ req bool
: param str name : name of the VolumeAttachment ( required )
: param str pretty : If ' true ' , then the output is pretty printed .
: param bool exact : Should the export be exact . Exact export maintains cluster - specific fields like ' Namespace ' .
: param bool export : Should this value be exported . Export strips fields that a user can not specify .
: return : V1VolumeAttachment
If the method is called asynchronously ,
returns the request thread .""" | kwargs [ '_return_http_data_only' ] = True
if kwargs . get ( 'async_req' ) :
return self . read_volume_attachment_with_http_info ( name , ** kwargs )
# noqa : E501
else :
( data ) = self . read_volume_attachment_with_http_info ( name , ** kwargs )
# noqa : E501
return data |
def _do_close ( self ) :
"""Tear down this object , after we ' ve agreed to close
with the server .""" | AMQP_LOGGER . debug ( 'Closed channel #%d' , self . channel_id )
self . is_open = False
channel_id , self . channel_id = self . channel_id , None
connection , self . connection = self . connection , None
if connection :
connection . channels . pop ( channel_id , None )
connection . _avail_channel_ids . append ( channel_id )
self . callbacks . clear ( )
self . cancel_callbacks . clear ( )
self . events . clear ( )
self . no_ack_consumers . clear ( ) |
def resizeToContents ( self ) :
"""Resizes this widget to fit the contents of its text .""" | doc = self . document ( )
h = doc . documentLayout ( ) . documentSize ( ) . height ( )
self . setFixedHeight ( h + 4 ) |
def _parse_metadata ( self , metadata ) :
"""Transforms raw HADS metadata into a dictionary ( station code - > props )""" | retval = { }
# these are the first keys , afterwards follows a var - len list of variables / props
# first key always blank so skip it
field_keys = [ "nesdis_id" , "nwsli" , "location_text" , "latitude" , "longitude" , "hsa" , "state" , "owner" , "manufacturer" , "channel" , "init_transmit" , # HHMM
"trans_interval" , ]
# min
# repeat in blocks of 7 after field _ keys
var_keys = [ "pe_code" , "data_interval" , # min
"coefficient" , "constant" , "time_offset" , # min
"base_elevation" , # ft
"gauge_correction" , ]
# ft
lines = metadata . splitlines ( )
for line in lines :
if len ( line ) == 0 :
continue
raw_fields = line . split ( "|" )
fields = dict ( zip ( field_keys , raw_fields [ 1 : len ( field_keys ) ] ) )
# how many blocks of var _ keys after initial fields
var_offset = len ( field_keys ) + 1
var_blocks = ( len ( raw_fields ) - var_offset ) // len ( var_keys )
# how many variables
vars_only = raw_fields [ var_offset : ]
variables = { }
for offset in range ( var_blocks ) :
var_dict = dict ( zip ( var_keys , vars_only [ offset * len ( var_keys ) : ( offset + 1 ) * len ( var_keys ) ] , ) )
variables [ var_dict [ "pe_code" ] ] = var_dict
var_dict [ "base_elevation" ] = float ( var_dict [ "base_elevation" ] )
var_dict [ "gauge_correction" ] = float ( var_dict [ "gauge_correction" ] )
del var_dict [ "pe_code" ]
# no need to duplicate
line_val = { "variables" : variables }
line_val . update ( fields )
# conversions
def dms_to_dd ( dms ) :
parts = dms . split ( " " )
sec = int ( parts [ 1 ] ) * 60 + int ( parts [ 2 ] )
return float ( parts [ 0 ] ) + ( sec / 3600.0 )
# negative already in first portion
line_val [ "latitude" ] = dms_to_dd ( line_val [ "latitude" ] )
line_val [ "longitude" ] = dms_to_dd ( line_val [ "longitude" ] )
retval [ line_val [ "nesdis_id" ] ] = line_val
return retval |
def set_resource_id ( self , resource_id = None ) :
"""stub""" | if resource_id is None :
raise NullArgument ( )
if self . get_resource_id_metadata ( ) . is_read_only ( ) :
raise NoAccess ( )
if not self . my_osid_object_form . _is_valid_id ( resource_id ) :
raise InvalidArgument ( )
self . my_osid_object_form . _my_map [ 'resourceId' ] = resource_id |
def about_box ( ) :
"""A simple about dialog box using the distribution data files .""" | about_info = wx . adv . AboutDialogInfo ( )
for k , v in metadata . items ( ) :
setattr ( about_info , snake2ucamel ( k ) , v )
wx . adv . AboutBox ( about_info ) |
def auto_display_limits ( self ) :
"""Calculate best display limits and set them .""" | display_data_and_metadata = self . get_calculated_display_values ( True ) . display_data_and_metadata
data = display_data_and_metadata . data if display_data_and_metadata else None
if data is not None : # The old algorithm was a problem during EELS where the signal data
# is a small percentage of the overall data and was falling outside
# the included range . This is the new simplified algorithm . Future
# feature may allow user to select more complex algorithms .
mn , mx = numpy . nanmin ( data ) , numpy . nanmax ( data )
self . display_limits = mn , mx |
def arch ( self ) :
"""Return an architecture for this task .
: returns : an arch string ( eg " noarch " , or " ppc64le " ) , or None this task
has no architecture associated with it .""" | if self . method in ( 'buildArch' , 'createdistrepo' , 'livecd' ) :
return self . params [ 2 ]
if self . method in ( 'createrepo' , 'runroot' ) :
return self . params [ 1 ]
if self . method == 'createImage' :
return self . params [ 3 ]
if self . method == 'indirectionimage' :
return self . params [ 0 ] [ 'arch' ] |
def skill_update ( self , skill_id , data , ** kwargs ) :
"https : / / developer . zendesk . com / rest _ api / docs / chat / skills # update - skill - by - id" | api_path = "/api/v2/skills/{skill_id}"
api_path = api_path . format ( skill_id = skill_id )
return self . call ( api_path , method = "PUT" , data = data , ** kwargs ) |
def update_gates ( self , gate_update_metadata , project , gate_step_id ) :
"""UpdateGates .
[ Preview API ] Updates the gate for a deployment .
: param : class : ` < GateUpdateMetadata > < azure . devops . v5_0 . release . models . GateUpdateMetadata > ` gate _ update _ metadata : Metadata to patch the Release Gates .
: param str project : Project ID or project name
: param int gate _ step _ id : Gate step Id .
: rtype : : class : ` < ReleaseGates > < azure . devops . v5_0 . release . models . ReleaseGates > `""" | route_values = { }
if project is not None :
route_values [ 'project' ] = self . _serialize . url ( 'project' , project , 'str' )
if gate_step_id is not None :
route_values [ 'gateStepId' ] = self . _serialize . url ( 'gate_step_id' , gate_step_id , 'int' )
content = self . _serialize . body ( gate_update_metadata , 'GateUpdateMetadata' )
response = self . _send ( http_method = 'PATCH' , location_id = '2666a539-2001-4f80-bcc7-0379956749d4' , version = '5.0-preview.1' , route_values = route_values , content = content )
return self . _deserialize ( 'ReleaseGates' , response ) |
def _percent_to_integer ( percent ) :
"""Internal helper for converting a percentage value to an integer
between 0 and 255 inclusive .""" | num = float ( percent . split ( '%' ) [ 0 ] ) / 100.0 * 255
e = num - math . floor ( num )
return e < 0.5 and int ( math . floor ( num ) ) or int ( math . ceil ( num ) ) |
def instruction_COM_register ( self , opcode , register ) :
"""Replaces the contents of accumulator A or B with its logical complement .
source code forms : COMA ; COMB""" | register . set ( self . COM ( value = register . value ) ) |
def build_from_source ( version , ** kwargs ) :
"""Builds specified Spark version from source .
: param version :
: param kwargs :
: return : ( Integer ) Status code of build / mvn command .""" | mvn = os . path . join ( Spark . svm_version_path ( version ) , 'build' , 'mvn' )
Spark . chmod_add_excute ( mvn )
p = subprocess . Popen ( [ mvn , '-DskipTests' , 'clean' , 'package' ] , cwd = Spark . svm_version_path ( version ) )
p . wait ( )
return p . returncode |
def simplified_rayliegh_vel ( self ) :
"""Simplified Rayliegh velocity of the site .
This follows the simplifications proposed by Urzua et al . ( 2017)
Returns
rayleigh _ vel : float
Equivalent shear - wave velocity .""" | # FIXME : What if last layer has no thickness ?
thicks = np . array ( [ l . thickness for l in self ] )
depths_mid = np . array ( [ l . depth_mid for l in self ] )
shear_vels = np . array ( [ l . shear_vel for l in self ] )
mode_incr = depths_mid * thicks / shear_vels ** 2
# Mode shape is computed as the sumation from the base of
# the profile . Need to append a 0 for the roll performed in the next
# step
shape = np . r_ [ np . cumsum ( mode_incr [ : : - 1 ] ) [ : : - 1 ] , 0 ]
freq_fund = np . sqrt ( 4 * np . sum ( thicks * depths_mid ** 2 / shear_vels ** 2 ) / np . sum ( thicks * # Roll is used to offset the mode _ shape so that the sum
# can be calculated for two adjacent layers
np . sum ( np . c_ [ shape , np . roll ( shape , - 1 ) ] , axis = 1 ) [ : - 1 ] ** 2 ) )
period_fun = 2 * np . pi / freq_fund
rayleigh_vel = 4 * thicks . sum ( ) / period_fun
return rayleigh_vel |
def create_small_file ( upload_context ) :
"""Function run by CreateSmallFileCommand to create the file .
Runs in a background process .
: param upload _ context : UploadContext : contains data service setup and file details .
: return dict : DukeDS file data""" | data_service = upload_context . make_data_service ( )
parent_data , path_data , remote_file_id = upload_context . params
# The small file will fit into one chunk so read into memory and hash it .
chunk = path_data . read_whole_file ( )
hash_data = path_data . get_hash ( )
# Talk to data service uploading chunk and creating the file .
upload_operations = FileUploadOperations ( data_service , upload_context )
upload_id , url_info = upload_operations . create_upload_and_chunk_url ( upload_context . project_id , path_data , hash_data , storage_provider_id = upload_context . config . storage_provider_id )
upload_operations . send_file_external ( url_info , chunk )
return upload_operations . finish_upload ( upload_id , hash_data , parent_data , remote_file_id ) |
def from_json ( json ) :
"""Creates Point instance from JSON representation
Args :
json ( : obj : ` dict ` ) : Must have at least the following keys : lat ( float ) , lon ( float ) ,
time ( string in iso format ) . Example ,
" lat " : 9.3470298,
" lon " : 3.79274,
" time " : " 2016-07-15T15:27:53.574110"
json : map representation of Point instance
Returns :
: obj : ` Point `""" | return Point ( lat = json [ 'lat' ] , lon = json [ 'lon' ] , time = isostr_to_datetime ( json [ 'time' ] ) ) |
def results ( self ) :
"""Returns a list of tuple , ordered by similarity .""" | d = dict ( )
words = [ word . strip ( ) for word in self . haystack ]
if not words :
raise NoResultException ( 'No similar word found.' )
for w in words :
d [ w ] = Levenshtein . ratio ( self . needle , w )
return sorted ( d . items ( ) , key = operator . itemgetter ( 1 ) , reverse = True ) |
def task_class ( self ) :
"""Return the Task class type configured for the scenario .""" | from scenario_player . tasks . base import get_task_class_for_type
root_task_type , _ = self . task
task_class = get_task_class_for_type ( root_task_type )
return task_class |
def sketch ( * args , output_sketch = 'sketch.msh' , threads = 1 , returncmd = False , ** kwargs ) :
"""Wrapper for mash sketch .
: param args : Files you want to sketch . Any number can be passed in , file patterns ( i . e . * fasta ) can be used .
: param output _ sketch : Output file for your sketch . Default sketch . msh .
: param threads : Number of threads to run analysis on .
: param kwargs : Other arguments , in parameter = ' argument ' format . If parameter is just a switch , do parameter = ' '
: param returncmd : If true , will return the command used to call mash as well as out and err .
: return : stdout and stderr from mash sketch""" | options = kwargs_to_string ( kwargs )
if len ( args ) == 0 :
raise ValueError ( 'At least one file to sketch must be specified. You specified 0 files.' )
cmd = 'mash sketch '
for arg in args :
cmd += arg + ' '
cmd += '-o {} -p {} {}' . format ( output_sketch , str ( threads ) , options )
out , err = accessoryfunctions . run_subprocess ( cmd )
if returncmd :
return out , err , cmd
else :
return out , err |
def neighbors2 ( self , distance , chain_residue , atom = None , resid_list = None ) : # atom = " CA "
'''this one is more precise since it uses the chain identifier also''' | if atom == None : # consider all atoms
lines = [ line for line in self . atomlines ( resid_list ) if line [ 17 : 20 ] in allowed_PDB_residues_types ]
else : # consider only given atoms
lines = [ line for line in self . atomlines ( resid_list ) if line [ 17 : 20 ] in allowed_PDB_residues_types and line [ 12 : 16 ] == atom ]
shash = spatialhash . SpatialHash ( distance )
for line in lines :
pos = ( float ( line [ 30 : 38 ] ) , float ( line [ 38 : 46 ] ) , float ( line [ 46 : 54 ] ) )
shash . insert ( pos , line [ 21 : 26 ] )
neighbor_list = [ ]
for line in lines :
resid = line [ 21 : 26 ]
if resid == chain_residue :
pos = ( float ( line [ 30 : 38 ] ) , float ( line [ 38 : 46 ] ) , float ( line [ 46 : 54 ] ) )
for data in shash . nearby ( pos , distance ) :
if data [ 1 ] not in neighbor_list :
neighbor_list . append ( data [ 1 ] )
neighbor_list . sort ( )
return neighbor_list |
def _lay_out_axes ( ax , projection ) :
"""` ` cartopy ` ` enables a a transparent background patch and an " outline " patch by default . This short method simply
hides these extraneous visual features . If the plot is a pure ` ` matplotlib ` ` one , it does the same thing by
removing the axis altogether .
Parameters
ax : matplotlib . Axes instance
The ` ` matplotlib . Axes ` ` instance being manipulated .
projection : None or geoplot . crs instance
The projection , if one is used .
Returns
None""" | if projection is not None :
try :
ax . background_patch . set_visible ( False )
ax . outline_patch . set_visible ( False )
except AttributeError : # Testing . . .
pass
else :
plt . gca ( ) . axison = False |
def clear_candidates ( self , clear_env = True ) :
"""Clear the current candidates .
: param bool clear _ env :
If ` ` True ` ` , clears also environment ' s ( or its underlying slave
environments ' ) candidates .""" | async def slave_task ( addr ) :
r_manager = await self . env . connect ( addr )
return await r_manager . clear_candidates ( )
self . _candidates = [ ]
if clear_env :
if self . _single_env :
self . env . clear_candidates ( )
else :
mgrs = self . get_managers ( )
run ( create_tasks ( slave_task , mgrs ) ) |
def set_published ( self , published ) :
"""Sets the published status .
arg : published ( boolean ) : the published status
raise : NoAccess - ` ` Metadata . isReadOnly ( ) ` ` is ` ` true ` `
* compliance : mandatory - - This method must be implemented . *""" | # Implemented from template for osid . resource . ResourceForm . set _ group _ template
if self . get_published_metadata ( ) . is_read_only ( ) :
raise errors . NoAccess ( )
if not self . _is_valid_boolean ( published ) :
raise errors . InvalidArgument ( )
self . _my_map [ 'published' ] = published |
def set_quiet ( mres , parent , global_options ) :
"""Sets the ' quiet ' property on the MultiResult""" | quiet = global_options . get ( 'quiet' )
if quiet is not None :
mres . _quiet = quiet
else :
mres . _quiet = parent . quiet |
def plot_clicked ( self , mouse_event ) :
"""gets activated when the user clicks on a plot
Args :
mouse _ event :""" | if isinstance ( self . current_script , SelectPoints ) and self . current_script . is_running :
if ( not ( mouse_event . xdata == None ) ) :
if ( mouse_event . button == 1 ) :
pt = np . array ( [ mouse_event . xdata , mouse_event . ydata ] )
self . current_script . toggle_NV ( pt )
self . current_script . plot ( [ self . matplotlibwidget_1 . figure ] )
self . matplotlibwidget_1 . draw ( )
item = self . tree_scripts . currentItem ( )
if item is not None :
if item . is_point ( ) :
item_x = item . child ( 1 )
if mouse_event . xdata is not None :
self . tree_scripts . setCurrentItem ( item_x )
item_x . value = float ( mouse_event . xdata )
item_x . setText ( 1 , '{:0.3f}' . format ( float ( mouse_event . xdata ) ) )
item_y = item . child ( 0 )
if mouse_event . ydata is not None :
self . tree_scripts . setCurrentItem ( item_y )
item_y . value = float ( mouse_event . ydata )
item_y . setText ( 1 , '{:0.3f}' . format ( float ( mouse_event . ydata ) ) )
# focus back on item
self . tree_scripts . setCurrentItem ( item )
else :
if item . parent ( ) is not None :
if item . parent ( ) . is_point ( ) :
if item == item . parent ( ) . child ( 1 ) :
if mouse_event . xdata is not None :
item . setData ( 1 , 2 , float ( mouse_event . xdata ) )
if item == item . parent ( ) . child ( 0 ) :
if mouse_event . ydata is not None :
item . setData ( 1 , 2 , float ( mouse_event . ydata ) ) |
def get_other_keys ( self , key , including_current = False ) :
"""Returns list of other keys that are mapped to the same value as specified key .
@ param key - key for which other keys should be returned .
@ param including _ current if set to True - key will also appear on this list .""" | other_keys = [ ]
if key in self :
other_keys . extend ( self . __dict__ [ str ( type ( key ) ) ] [ key ] )
if not including_current :
other_keys . remove ( key )
return other_keys |
async def ban ( self , user , * , reason = None , delete_message_days = 1 ) :
"""| coro |
Bans a user from the guild .
The user must meet the : class : ` abc . Snowflake ` abc .
You must have the : attr : ` ~ Permissions . ban _ members ` permission to
do this .
Parameters
user : : class : ` abc . Snowflake `
The user to ban from their guild .
delete _ message _ days : : class : ` int `
The number of days worth of messages to delete from the user
in the guild . The minimum is 0 and the maximum is 7.
reason : Optional [ : class : ` str ` ]
The reason the user got banned .
Raises
Forbidden
You do not have the proper permissions to ban .
HTTPException
Banning failed .""" | await self . _state . http . ban ( user . id , self . id , delete_message_days , reason = reason ) |
def construct_format ( f , type_map = CONSTRUCT_CODE ) :
"""Formats for Construct .""" | formatted = ""
if type_map . get ( f . type_id , None ) :
return "'{identifier}' / {type_id}" . format ( type_id = type_map . get ( f . type_id ) , identifier = f . identifier )
elif f . type_id == 'string' and f . options . get ( 'size' , None ) :
return "'{id}'/ construct.Bytes({size})" . format ( id = f . identifier , size = f . options [ 'size' ] . value )
elif f . type_id == 'string' :
return "'{id}' / construct.GreedyBytes" . format ( id = f . identifier )
elif f . type_id == 'array' and f . options . get ( 'size' , None ) :
fill = f . options [ 'fill' ] . value
f_ = copy . copy ( f )
f_ . type_id = fill
s = f . options . get ( 'size' , None ) . value
return "'{id}' / construct.Array({size}, {type})" . format ( id = f . identifier , size = s , type = type_map . get ( f_ . type_id , 'construct.Byte' ) )
elif f . type_id == 'array' :
fill = f . options [ 'fill' ] . value
f_ = copy . copy ( f )
f_ . type_id = fill
return "construct.GreedyRange(%s)" % construct_format ( f_ )
else :
return "'%s' / construct.Struct(%s._parser)" % ( f . identifier , f . type_id )
return formatted |
def list ( self , accountID , ** kwargs ) :
"""Get a list of Transactions pages that satisfy a time - based Transaction
query .
Args :
accountID :
Account Identifier
fromTime :
The starting time ( inclusive ) of the time range for the
Transactions being queried .
toTime :
The ending time ( inclusive ) of the time range for the
Transactions being queried .
pageSize :
The number of Transactions to include in each page of the
results .
type :
A filter for restricting the types of Transactions to retreive .
Returns :
v20 . response . Response containing the results from submitting the
request""" | request = Request ( 'GET' , '/v3/accounts/{accountID}/transactions' )
request . set_path_param ( 'accountID' , accountID )
request . set_param ( 'from' , kwargs . get ( 'fromTime' ) )
request . set_param ( 'to' , kwargs . get ( 'toTime' ) )
request . set_param ( 'pageSize' , kwargs . get ( 'pageSize' ) )
request . set_param ( 'type' , kwargs . get ( 'type' ) )
response = self . ctx . request ( request )
if response . content_type is None :
return response
if not response . content_type . startswith ( "application/json" ) :
return response
jbody = json . loads ( response . raw_body )
parsed_body = { }
# Parse responses as defined by the API specification
if str ( response . status ) == "200" :
if jbody . get ( 'from' ) is not None :
parsed_body [ 'from' ] = jbody . get ( 'from' )
if jbody . get ( 'to' ) is not None :
parsed_body [ 'to' ] = jbody . get ( 'to' )
if jbody . get ( 'pageSize' ) is not None :
parsed_body [ 'pageSize' ] = jbody . get ( 'pageSize' )
if jbody . get ( 'type' ) is not None :
parsed_body [ 'type' ] = jbody . get ( 'type' )
if jbody . get ( 'count' ) is not None :
parsed_body [ 'count' ] = jbody . get ( 'count' )
if jbody . get ( 'pages' ) is not None :
parsed_body [ 'pages' ] = jbody . get ( 'pages' )
if jbody . get ( 'lastTransactionID' ) is not None :
parsed_body [ 'lastTransactionID' ] = jbody . get ( 'lastTransactionID' )
elif str ( response . status ) == "400" :
if jbody . get ( 'errorCode' ) is not None :
parsed_body [ 'errorCode' ] = jbody . get ( 'errorCode' )
if jbody . get ( 'errorMessage' ) is not None :
parsed_body [ 'errorMessage' ] = jbody . get ( 'errorMessage' )
elif str ( response . status ) == "401" :
if jbody . get ( 'errorCode' ) is not None :
parsed_body [ 'errorCode' ] = jbody . get ( 'errorCode' )
if jbody . get ( 'errorMessage' ) is not None :
parsed_body [ 'errorMessage' ] = jbody . get ( 'errorMessage' )
elif str ( response . status ) == "403" :
if jbody . get ( 'errorCode' ) is not None :
parsed_body [ 'errorCode' ] = jbody . get ( 'errorCode' )
if jbody . get ( 'errorMessage' ) is not None :
parsed_body [ 'errorMessage' ] = jbody . get ( 'errorMessage' )
elif str ( response . status ) == "404" :
if jbody . get ( 'errorCode' ) is not None :
parsed_body [ 'errorCode' ] = jbody . get ( 'errorCode' )
if jbody . get ( 'errorMessage' ) is not None :
parsed_body [ 'errorMessage' ] = jbody . get ( 'errorMessage' )
elif str ( response . status ) == "405" :
if jbody . get ( 'errorCode' ) is not None :
parsed_body [ 'errorCode' ] = jbody . get ( 'errorCode' )
if jbody . get ( 'errorMessage' ) is not None :
parsed_body [ 'errorMessage' ] = jbody . get ( 'errorMessage' )
elif str ( response . status ) == "416" :
if jbody . get ( 'errorCode' ) is not None :
parsed_body [ 'errorCode' ] = jbody . get ( 'errorCode' )
if jbody . get ( 'errorMessage' ) is not None :
parsed_body [ 'errorMessage' ] = jbody . get ( 'errorMessage' )
# Unexpected response status
else :
parsed_body = jbody
response . body = parsed_body
return response |
def do_forget ( self , repo ) :
'''Drop definition of a repo .
forget REPO''' | self . abort_on_nonexisting_repo ( repo , 'forget' )
self . network . forget ( repo ) |
def raise_ ( type_ , value = None , traceback = None ) : # pylint : disable = W0613
"""Does the same as ordinary ` ` raise ` ` with arguments do in Python 2.
But works in Python 3 ( > = 3.3 ) also !
Please checkout README on https : / / github . com / 9seconds / pep3134
to get an idea about possible pitfals . But short story is : please
be pretty carefull with tracebacks . If it is possible , use sys . exc _ info
instead . But in most cases it will work as you expect .""" | prev_exc , prev_tb = sys . exc_info ( ) [ 1 : ]
proxy_class = construct_exc_class ( type ( type_ ) )
err = proxy_class ( type_ )
err . __original_exception__ . __cause__ = None
err . __original_exception__ . __suppress_context__ = False
if getattr ( prev_exc , "__pep3134__" , False ) :
prev_exc = prev_exc . with_traceback ( prev_tb )
err . __original_exception__ . __context__ = prev_exc
if traceback :
raise err . with_traceback ( traceback ) , None , traceback
else :
raise err |
def load_plugin_from_addonxml ( cls , mode , url ) :
'''Attempts to import a plugin ' s source code and find an instance of
: class : ` ~ xbmcswif2 . Plugin ` . Returns an instance of PluginManager if
succesful .''' | cwd = os . getcwd ( )
sys . path . insert ( 0 , cwd )
module_name = get_addon_module_name ( os . path . join ( cwd , 'addon.xml' ) )
addon = __import__ ( module_name )
# Find the first instance of xbmcswift2 . Plugin
try :
plugin = ( attr_value for attr_value in vars ( addon ) . values ( ) if isinstance ( attr_value , Plugin ) ) . next ( )
except StopIteration :
sys . exit ( 'Could\'t find a Plugin instance in %s.py' % module_name )
return cls ( plugin , mode , url ) |
def sort ( self ) :
"""Sort commits by the committer date / time .""" | self . sorted_commits = sorted ( self . commits , key = lambda x : x . committer_time , reverse = True )
return self . sorted_commits |
def checkUpdate ( self , * args ) :
"""Updates values after first checking instrument parameters are OK .
This is not integrated within update to prevent ifinite recursion
since update gets called from ipars .""" | g = get_root ( self ) . globals
if not self . check ( ) :
g . clog . warn ( 'Current observing parameters are not valid.' )
return False
if not g . ipars . check ( ) :
g . clog . warn ( 'Current instrument parameters are not valid.' )
return False |
def _get_user_info ( user = None ) :
'''Wrapper for user . info Salt function''' | if not user : # Get user Salt runnining as
user = __salt__ [ 'config.option' ] ( 'user' )
userinfo = __salt__ [ 'user.info' ] ( user )
if not userinfo :
if user == 'salt' : # Special case with ` salt ` user :
# if it doesn ' t exist then fall back to user Salt running as
userinfo = _get_user_info ( )
else :
raise SaltInvocationError ( 'User {0} does not exist' . format ( user ) )
return userinfo |
def collect ( path , no_input ) :
'''Collect static files''' | if exists ( path ) :
msg = '"%s" directory already exists and will be erased'
log . warning ( msg , path )
if not no_input :
click . confirm ( 'Are you sure?' , abort = True )
log . info ( 'Deleting static directory "%s"' , path )
shutil . rmtree ( path )
prefix = current_app . static_url_path or current_app . static_folder
if prefix . startswith ( '/' ) :
prefix = prefix [ 1 : ]
destination = join ( path , prefix )
log . info ( 'Copying application assets into "%s"' , destination )
shutil . copytree ( current_app . static_folder , destination )
for blueprint in current_app . blueprints . values ( ) :
if blueprint . has_static_folder :
prefix = current_app . static_prefixes . get ( blueprint . name )
prefix = prefix or blueprint . url_prefix or ''
prefix += blueprint . static_url_path or ''
if prefix . startswith ( '/' ) :
prefix = prefix [ 1 : ]
log . info ( 'Copying %s assets to %s' , blueprint . name , prefix )
destination = join ( path , prefix )
copy_recursive ( blueprint . static_folder , destination )
for prefix , source in current_app . config [ 'STATIC_DIRS' ] :
log . info ( 'Copying %s to %s' , source , prefix )
destination = join ( path , prefix )
copy_recursive ( source , destination )
log . info ( 'Done' ) |
def _bulk_op ( self , record_id_iterator , op_type , index = None , doc_type = None ) :
"""Index record in Elasticsearch asynchronously .
: param record _ id _ iterator : Iterator that yields record UUIDs .
: param op _ type : Indexing operation ( one of ` ` index ` ` , ` ` create ` ` ,
` ` delete ` ` or ` ` update ` ` ) .
: param index : The Elasticsearch index . ( Default : ` ` None ` ` )
: param doc _ type : The Elasticsearch doc _ type . ( Default : ` ` None ` ` )""" | with self . create_producer ( ) as producer :
for rec in record_id_iterator :
producer . publish ( dict ( id = str ( rec ) , op = op_type , index = index , doc_type = doc_type ) ) |
def _search ( self , searchfilter , attrs , basedn ) :
"""Generic search""" | if attrs == NO_ATTR :
attrlist = [ ]
elif attrs == DISPLAYED_ATTRS : # fix me later ( to much attributes )
attrlist = self . attrlist
elif attrs == LISTED_ATTRS :
attrlist = self . attrlist
elif attrs == ALL_ATTRS :
attrlist = None
else :
attrlist = None
self . _logger ( severity = logging . DEBUG , msg = "%(backend)s: executing search " "with filter '%(filter)s' in DN '%(dn)s'" % { 'backend' : self . backend_name , 'dn' : basedn , 'filter' : self . _uni ( searchfilter ) } )
# bind and search the ldap
ldap_client = self . _bind ( )
try :
r = ldap_client . search_s ( basedn , ldap . SCOPE_SUBTREE , searchfilter , attrlist = attrlist )
except Exception as e :
ldap_client . unbind_s ( )
self . _exception_handler ( e )
ldap_client . unbind_s ( )
# python - ldap doesn ' t know utf - 8,
# it treates everything as bytes .
# So it ' s necessary to reencode
# it ' s output in utf - 8.
ret = [ ]
for entry in r :
uni_dn = self . _uni ( entry [ 0 ] )
uni_attrs = { }
for attr in entry [ 1 ] :
if type ( entry [ 1 ] [ attr ] ) is list :
tmp = [ ]
for value in entry [ 1 ] [ attr ] :
tmp . append ( self . _uni ( value ) )
else :
tmp = self . _uni ( entry [ 1 ] [ attr ] )
uni_attrs [ self . _uni ( attr ) ] = tmp
ret . append ( ( uni_dn , uni_attrs ) )
return ret |
def transaction_abort ( self , transaction_id , ** kwargs ) :
"""Abort a transaction and roll back all operations .
: param transaction _ id : ID of transaction to be aborted .
: param * * kwargs : Further parameters for the transport layer .""" | if transaction_id not in self . __transactions :
raise workflows . Error ( "Attempting to abort unknown transaction" )
self . log . debug ( "Aborting transaction %s" , transaction_id )
self . __transactions . remove ( transaction_id )
self . _transaction_abort ( transaction_id , ** kwargs ) |
def appendChild ( self , child : 'WdomElement' ) -> Node :
"""Append child node at the last of child nodes .
If this instance is connected to the node on browser , the child node is
also added to it .""" | if self . connected :
self . _append_child_web ( child )
return self . _append_child ( child ) |
def Serialize ( self , writer ) :
"""Serialize this token data to bytes
Args :
writer ( neocore . IO . BinaryWriter ) : binary writer to write serialization data to""" | writer . WriteVarString ( self . name )
writer . WriteVarString ( self . symbol )
writer . WriteUInt8 ( self . decimals ) |
def on_frame ( self , frame_in ) :
"""Handle frame sent to this specific channel .
: param pamqp . Frame frame _ in : Amqp frame .
: return :""" | if self . rpc . on_frame ( frame_in ) :
return
if frame_in . name in CONTENT_FRAME :
self . _inbound . append ( frame_in )
elif frame_in . name == 'Basic.Cancel' :
self . _basic_cancel ( frame_in )
elif frame_in . name == 'Basic.CancelOk' :
self . remove_consumer_tag ( frame_in . consumer_tag )
elif frame_in . name == 'Basic.ConsumeOk' :
self . add_consumer_tag ( frame_in [ 'consumer_tag' ] )
elif frame_in . name == 'Basic.Return' :
self . _basic_return ( frame_in )
elif frame_in . name == 'Channel.Close' :
self . _close_channel ( frame_in )
elif frame_in . name == 'Channel.Flow' :
self . write_frame ( specification . Channel . FlowOk ( frame_in . active ) )
else :
LOGGER . error ( '[Channel%d] Unhandled Frame: %s -- %s' , self . channel_id , frame_in . name , dict ( frame_in ) ) |
def add_parent_commands ( self , cmd_path , help = None ) :
"""Create parent command object in cmd tree then return
the last parent command object .
: rtype : dict""" | existed_cmd_end_index = self . index_in_tree ( cmd_path )
new_path , existed_path = self . _get_paths ( cmd_path , existed_cmd_end_index , )
parent_node = self . get_cmd_by_path ( existed_path )
last_one_index = 1
new_path_len = len ( new_path )
_kwargs = { }
for cmd_name in new_path :
if last_one_index >= new_path_len :
_kwargs [ 'help' ] = help
sub_cmd = parent_node [ 'cmd' ] . add_cmd ( cmd_name , ** _kwargs )
parent_node = _mk_cmd_node ( cmd_name , sub_cmd )
self . _add_node ( parent_node , existed_path + new_path [ : new_path . index ( cmd_name ) ] )
last_one_index += 1
return parent_node |
def rnumlistwithoutreplacement ( min , max ) :
"""Returns a randomly ordered list of the integers between min and max""" | if checkquota ( ) < 1 :
raise Exception ( "Your www.random.org quota has already run out." )
requestparam = build_request_parameterNR ( min , max )
request = urllib . request . Request ( requestparam )
request . add_header ( 'User-Agent' , 'randomwrapy/0.1 very alpha' )
opener = urllib . request . build_opener ( )
numlist = opener . open ( request ) . read ( )
return numlist . split ( ) |
def _load_panel_customization ( self ) :
"""Applies the plugin - based panel configurations .
This method parses the panel customization from the ` ` HORIZON _ CONFIG ` `
and make changes to the dashboard accordingly .
It supports adding , removing and setting default panels on the
dashboard . It also support registering a panel group .""" | panel_customization = self . _conf . get ( "panel_customization" , [ ] )
# Process all the panel groups first so that they exist before panels
# are added to them and Dashboard . _ autodiscover ( ) doesn ' t wipe out any
# panels previously added when its panel groups are instantiated .
panel_configs = [ ]
for config in panel_customization :
if config . get ( 'PANEL' ) :
panel_configs . append ( config )
elif config . get ( 'PANEL_GROUP' ) :
self . _process_panel_group_configuration ( config )
else :
LOG . warning ( "Skipping %s because it doesn't have PANEL or " "PANEL_GROUP defined." , config . __name__ )
# Now process the panels .
for config in panel_configs :
self . _process_panel_configuration ( config ) |
def arg_int ( name , default = None ) :
"""Fetch a query argument , as an integer .""" | try :
v = request . args . get ( name )
return int ( v )
except ( ValueError , TypeError ) :
return default |
def search ( self , ** kwargs ) :
"""Method to search Virtual Interfaces based on extends search .
: param search : Dict containing QuerySets to find Virtual Interfaces .
: param include : Array containing fields to include on response .
: param exclude : Array containing fields to exclude on response .
: param fields : Array containing fields to override default fields .
: param kind : Determine if result will be detailed ( ' detail ' ) or basic ( ' basic ' ) .
: return : Dict containing Virtual Interfaces""" | return super ( ApiV4VirtualInterface , self ) . get ( self . prepare_url ( 'api/v4/virtual-interface/' , kwargs ) ) |
def retrieve ( pdb_id , cache_dir = None , bio_cache = None ) :
'''Creates a PDBML object by using a cached copy of the files if they exists or by retrieving the files from the RCSB .''' | pdb_contents = None
xml_contents = None
pdb_id = pdb_id . upper ( )
if bio_cache :
pdb_contents = bio_cache . get_pdb_contents ( pdb_id )
xml_contents = bio_cache . get_pdbml_contents ( pdb_id )
if cache_dir :
if not pdb_contents : # Check to see whether we have a cached copy of the PDB file
filename = os . path . join ( cache_dir , "%s.pdb" % pdb_id )
if os . path . exists ( filename ) :
pdb_contents = read_file ( filename )
if not xml_contents : # Check to see whether we have a cached copy of the XML file
filename = os . path . join ( cache_dir , "%s.xml" % pdb_id )
if os . path . exists ( filename ) :
xml_contents = read_file ( filename )
# Get any missing files from the RCSB and create cached copies if appropriate
if not pdb_contents :
pdb_contents = rcsb . retrieve_pdb ( pdb_id )
if cache_dir :
write_file ( os . path . join ( cache_dir , "%s.pdb" % pdb_id ) , pdb_contents )
if not xml_contents :
xml_contents = rcsb . retrieve_xml ( pdb_id )
if cache_dir :
write_file ( os . path . join ( cache_dir , "%s.xml" % pdb_id ) , xml_contents )
# Return the object
handler = PDBML ( xml_contents , pdb_contents , bio_cache = bio_cache , pdb_id = pdb_id )
xml . sax . parseString ( xml_contents , handler )
return handler |
def tag ( self , layer ) :
"""Tag the annotations of given layer . It can automatically tag any built - in layer type .""" | mapping = self . layer_tagger_mapping
if layer in mapping :
mapping [ layer ] ( )
return self |
def p_state_cons_list ( self , p ) :
'''state _ cons _ list : state _ cons _ list state _ cons _ def
| state _ cons _ def''' | if len ( p ) == 3 :
p [ 1 ] . append ( p [ 2 ] )
p [ 0 ] = p [ 1 ]
elif len ( p ) == 2 :
p [ 0 ] = [ p [ 1 ] ] |
def write_json ( dictionary , filename ) :
"""Write dictionary to JSON""" | with open ( filename , 'w' ) as data_file :
json . dump ( dictionary , data_file , indent = 4 , sort_keys = True )
print ( '--> Wrote ' + os . path . basename ( filename ) ) |
def get ( self ) :
"""Get a JSON - ready representation of this Attachment .
: returns : This Attachment , ready for use in a request body .
: rtype : dict""" | attachment = { }
if self . file_content is not None :
attachment [ "content" ] = self . file_content . get ( )
if self . file_type is not None :
attachment [ "type" ] = self . file_type . get ( )
if self . file_name is not None :
attachment [ "filename" ] = self . file_name . get ( )
if self . disposition is not None :
attachment [ "disposition" ] = self . disposition . get ( )
if self . content_id is not None :
attachment [ "content_id" ] = self . content_id . get ( )
return attachment |
def is_dirty ( self , untracked = False ) -> bool :
"""Checks if the current repository contains uncommitted or untracked changes
Returns : true if the repository is clean""" | result = False
if not self . index_is_empty ( ) :
LOGGER . error ( 'index is not empty' )
result = True
changed_files = self . changed_files ( )
if bool ( changed_files ) :
LOGGER . error ( f'Repo has %s modified files: %s' , len ( changed_files ) , changed_files )
result = True
if untracked :
result = result or bool ( self . untracked_files ( ) )
return result |
def gammatone_erb_constants ( n ) :
"""Constants for using the real bandwidth in the gammatone filter , given its
order . Returns a pair : math : ` ( x , y ) = ( 1 / a _ n , c _ n ) ` .
Based on equations from :
` ` Holdsworth , J . ; Patterson , R . ; Nimmo - Smith , I . ; Rice , P . Implementing a
GammaTone Filter Bank . In : SVOS Final Report , Annex C , Part A : The
Auditory Filter Bank . 1988 . ` `
First returned value is a bandwidth compensation for direct use in the
gammatone formula :
> > > x , y = gammatone _ erb _ constants ( 4)
> > > central _ frequency = 1000
> > > round ( x , 3)
1.019
> > > bandwidth = x * erb [ " moore _ glasberg _ 83 " ] ( central _ frequency )
> > > round ( bandwidth , 2)
130.52
Second returned value helps us find the ` ` 3 dB ` ` bandwidth as :
> > > x , y = gammatone _ erb _ constants ( 4)
> > > central _ frequency = 1000
> > > bandwidth3dB = x * y * erb [ " moore _ glasberg _ 83 " ] ( central _ frequency )
> > > round ( bandwidth3dB , 2)
113.55""" | tnt = 2 * n - 2
return ( factorial ( n - 1 ) ** 2 / ( pi * factorial ( tnt ) * 2 ** - tnt ) , 2 * ( 2 ** ( 1. / n ) - 1 ) ** .5 ) |
def clusters_sites_obj ( clusters ) :
"""Get all the corresponding sites of the passed clusters .
Args :
clusters ( list ) : list of string uid of sites ( e . g ' rennes ' )
Return :
dict corresponding to the mapping cluster uid to python - grid5000 site""" | result = { }
all_clusters = get_all_clusters_sites ( )
clusters_sites = { c : s for ( c , s ) in all_clusters . items ( ) if c in clusters }
for cluster , site in clusters_sites . items ( ) : # here we want the site python - grid5000 site object
result . update ( { cluster : get_site_obj ( site ) } )
return result |
def smallest_signed_angle ( source , target ) :
"""Find the smallest angle going from angle ` source ` to angle ` target ` .""" | dth = target - source
dth = ( dth + np . pi ) % ( 2.0 * np . pi ) - np . pi
return dth |
def run ( self , value , errors , request ) :
"""Return the object if valid and available , otherwise None .""" | value = self . id_validator ( value , errors , request )
if errors :
return None
if self . fetch_by :
thing = self . cls . fetch_by ( ** { self . fetch_by : value } )
else :
thing = self . cls . fetch_by_id ( value )
if not thing and self . source == SOURCE_MATCHDICT : # If part of the URL we should have a not - found error
raise HTTPNotFound ( )
elif not thing :
self . add_error ( errors , 'Invalid {0}' . format ( self . cls . __name__ ) )
return thing |
def address ( address = None ) :
"""Convert one of a number of inputs into a valid ip : port string .
Elements which are not provided are filled in as follows :
* IP Address : the system is asked for the set of IP addresses associated
with the machine and the first one is used , preferring those matching
` address ` if it is a wildcard .
* Port number : a random port is selected from the pool of dynamically - available
port numbers .
This means you can pass any of : nothing ; a hostname ; an IP address ; an IP address with wildcards ; a port number
If an IP address is supplied but is invalid , an InvalidAddressError
exception is raised .
: param address : ( optional ) Any of : an IP address , a port number , or both
: returns : a valid ip : port string for this machine""" | address = str ( address or "" ) . strip ( )
# If the address is an ip : port pair , split into its component parts .
# Otherwise , try to determine whether we ' re looking at an IP
# or at a port and leave the other one blank
host_or_ip , port = split_address ( address )
# If the port has been supplied , make sure it ' s numeric and that it ' s a valid
# port number . If it hasn ' t been supplied , remove a random one from the pool
# of possible dynamically - allocated ports and use that .
if port :
try :
port = int ( port )
except ValueError :
raise AddressError ( "Port %s must be a number" % port )
if port not in config . VALID_PORTS :
raise AddressError ( "Port %d must be in range %d - %d" % ( port , min ( config . VALID_PORTS ) , max ( config . VALID_PORTS ) ) )
else :
random . shuffle ( PORT_POOL )
port = PORT_POOL . pop ( )
# The address part could be an IP address ( optionally including
# wildcards to indicate a preference ) or a hostname or nothing .
# If it ' s a hostname we attempt to resolve it to an IP address .
# It it ' s nothing or a wildcard we query the system for a matching IP address .
if ( not host_or_ip ) or is_valid_ip_pattern ( host_or_ip ) : # If a specific IP address is given , use that .
# If an IP pattern is given ( ie something with a wildcard in it ) treat
# that as no address with a preference for that wildcard .
prefer = None
if "*" in host_or_ip :
host_or_ip , prefer = None , [ host_or_ip ]
# If no IP ( or only a wildcard ) is specified , query the system for valid
# addresses , preferring those which match the wildcard . NB if the preference
# matches one we ' ve previously used , we can return a cached address . But
# different requests can specify different wildcard preferences .
if not host_or_ip :
if _ip4 and _prefer == prefer :
ip = _ip4
else :
ip = _find_ip4 ( prefer )
else :
ip = host_or_ip
else : # Treat the string as a hostname and resolve to an IP4 address
try :
ip = socket . gethostbyname ( host_or_ip )
except socket . gaierror as exc :
_logger . error ( "gaierror %d for %s" , exc . errno , host_or_ip )
raise InvalidAddressError ( host_or_ip , exc . errno )
else : # Bizarrely specific check because BT Internet " helpfully "
# redirects DNS fails to this address which hosts a sponsored
# landing page !
if ip == "92.242.132.15" :
raise InvalidAddressError ( host_or_ip , 0 )
return "%s:%s" % ( ip , port ) |
def list ( cls , datacenter = None , flavor = None , match = '' , exact_match = False ) :
"""List available kernels for datacenter .""" | if not datacenter :
dc_ids = [ dc [ 'id' ] for dc in Datacenter . filtered_list ( ) ]
kmap = { }
for dc_id in dc_ids :
vals = cls . safe_call ( 'hosting.disk.list_kernels' , dc_id )
for key in vals :
kmap . setdefault ( key , [ ] ) . extend ( vals . get ( key , [ ] ) )
# remove duplicates
for key in kmap :
kmap [ key ] = list ( set ( kmap [ key ] ) )
else :
dc_id = Datacenter . usable_id ( datacenter )
kmap = cls . safe_call ( 'hosting.disk.list_kernels' , dc_id )
if match :
for flav in kmap :
if exact_match :
kmap [ flav ] = [ x for x in kmap [ flav ] if match == x ]
else :
kmap [ flav ] = [ x for x in kmap [ flav ] if match in x ]
if flavor :
if flavor not in kmap :
cls . error ( 'flavor %s not supported here' % flavor )
return dict ( [ ( flavor , kmap [ flavor ] ) ] )
return kmap |
def heirarchical_helper ( shovel , prefix , level = 0 ) :
'''Return a list of tuples of ( fullname , docstring , level ) for all the
tasks in the provided shovel''' | result = [ ]
for key , value in sorted ( shovel . map . items ( ) ) :
if prefix :
key = prefix + '.' + key
if isinstance ( value , Shovel ) :
result . append ( ( key , None , level ) )
result . extend ( heirarchical_helper ( value , key , level + 1 ) )
else :
result . append ( ( key , value . doc or '(No docstring)' , level ) )
return result |
def getNeighbors ( trainingSet , testInstance , k , considerDimensions = None ) :
"""collect the k most similar instances in the trainingSet for a given test
instance
: param trainingSet : A list of data instances
: param testInstance : a single data instance
: param k : number of neighbors
: param considerDimensions : a list of dimensions to consider
: return : neighbors : a list of neighbor instance""" | if considerDimensions is None :
considerDimensions = len ( testInstance ) - 1
neighborList = [ ]
for x in range ( len ( trainingSet ) ) :
dist = euclideanDistance ( testInstance , trainingSet [ x ] , considerDimensions )
neighborList . append ( ( trainingSet [ x ] , dist ) )
neighborList . sort ( key = operator . itemgetter ( 1 ) )
neighbors = [ ]
distances = [ ]
for x in range ( k ) :
neighbors . append ( neighborList [ x ] [ 0 ] )
distances . append ( neighborList [ x ] [ 1 ] )
return neighbors |
def decompose ( cls , heights , t = None , t0 = None , interval = None , constituents = constituent . noaa , initial = None , n_period = 2 , callback = None , full_output = False ) :
"""Return an instance of Tide which has been fitted to a series of tidal observations .
Arguments :
It is not necessary to provide t0 or interval if t is provided .
heights - - ndarray of tidal observation heights
t - - ndarray of tidal observation times
t0 - - datetime representing the time at which heights [ 0 ] was recorded
interval - - hourly interval between readings
constituents - - list of constituents to use in the fit ( default : constituent . noaa )
initial - - optional Tide instance to use as first guess for least squares solver
n _ period - - only include constituents which complete at least this many periods ( default : 2)
callback - - optional function to be called at each iteration of the solver
full _ output - - whether to return the output of scipy ' s leastsq solver ( default : False )""" | if t is not None :
if isinstance ( t [ 0 ] , datetime ) :
hours = Tide . _hours ( t [ 0 ] , t )
t0 = t [ 0 ]
elif t0 is not None :
hours = t
else :
raise ValueError ( "t can be an array of datetimes, or an array " "of hours since t0 in which case t0 must be " "specified." )
elif None not in [ t0 , interval ] :
hours = np . arange ( len ( heights ) ) * interval
else :
raise ValueError ( "Must provide t(datetimes), or t(hours) and " "t0(datetime), or interval(hours) and t0(datetime) " "so that each height can be identified with an " "instant in time." )
# Remove duplicate constituents ( those which travel at exactly the same
# speed , irrespective of phase )
constituents = list ( OrderedDict . fromkeys ( constituents ) )
# No need for least squares to find the mean water level constituent z0,
# work relative to mean
constituents = [ c for c in constituents if not c == constituent . _Z0 ]
z0 = np . mean ( heights )
heights = heights - z0
# Only analyse frequencies which complete at least n _ period cycles over
# the data period .
constituents = [ c for c in constituents if 360.0 * n_period < hours [ - 1 ] * c . speed ( astro ( t0 ) ) ]
n = len ( constituents )
sort = np . argsort ( hours )
hours = hours [ sort ]
heights = heights [ sort ]
# We partition our time / height data into intervals over which we consider
# the values of u and f to assume a constant value ( that is , their true
# value at the midpoint of the interval ) . Constituent
# speeds change much more slowly than the node factors , so we will
# consider these constant and equal to their speed at t0 , regardless of
# the length of the time series .
partition = 240.0
t = Tide . _partition ( hours , partition )
times = Tide . _times ( t0 , [ ( i + 0.5 ) * partition for i in range ( len ( t ) ) ] )
speed , u , f , V0 = Tide . _prepare ( constituents , t0 , times , radians = True )
# Residual to be minimised by variation of parameters ( amplitudes , phases )
def residual ( hp ) :
H , p = hp [ : n , np . newaxis ] , hp [ n : , np . newaxis ]
s = np . concatenate ( [ Tide . _tidal_series ( t_i , H , p , speed , u_i , f_i , V0 ) for t_i , u_i , f_i in izip ( t , u , f ) ] )
res = heights - s
if callback :
callback ( res )
return res
# Analytic Jacobian of the residual - this makes solving significantly
# faster than just using gradient approximation , especially with many
# measurements / constituents .
def D_residual ( hp ) :
H , p = hp [ : n , np . newaxis ] , hp [ n : , np . newaxis ]
ds_dH = np . concatenate ( [ f_i * np . cos ( speed * t_i + u_i + V0 - p ) for t_i , u_i , f_i in izip ( t , u , f ) ] , axis = 1 )
ds_dp = np . concatenate ( [ H * f_i * np . sin ( speed * t_i + u_i + V0 - p ) for t_i , u_i , f_i in izip ( t , u , f ) ] , axis = 1 )
return np . append ( - ds_dH , - ds_dp , axis = 0 )
# Initial guess for solver , haven ' t done any analysis on this since the
# solver seems to converge well regardless of the initial guess We do
# however scale the initial amplitude guess with some measure of the
# variation
amplitudes = np . ones ( n ) * ( np . sqrt ( np . dot ( heights , heights ) ) / len ( heights ) )
phases = np . ones ( n )
if initial :
for ( c0 , amplitude , phase ) in initial . model :
for i , c in enumerate ( constituents ) :
if c0 == c :
amplitudes [ i ] = amplitude
phases [ i ] = d2r * phase
initial = np . append ( amplitudes , phases )
lsq = leastsq ( residual , initial , Dfun = D_residual , col_deriv = True , ftol = 1e-7 )
model = np . zeros ( 1 + n , dtype = cls . dtype )
model [ 0 ] = ( constituent . _Z0 , z0 , 0 )
model [ 1 : ] [ 'constituent' ] = constituents [ : ]
model [ 1 : ] [ 'amplitude' ] = lsq [ 0 ] [ : n ]
model [ 1 : ] [ 'phase' ] = lsq [ 0 ] [ n : ]
if full_output :
return cls ( model = model , radians = True ) , lsq
return cls ( model = model , radians = True ) |
def _get_anchor ( module_to_name , fullname ) :
"""Turn a full member name into an anchor .
Args :
module _ to _ name : Dictionary mapping modules to short names .
fullname : Fully qualified name of symbol .
Returns :
HTML anchor string . The longest module name prefix of fullname is
removed to make the anchor .
Raises :
ValueError : If fullname uses characters invalid in an anchor .""" | if not _anchor_re . match ( fullname ) :
raise ValueError ( "'%s' is not a valid anchor" % fullname )
anchor = fullname
for module_name in module_to_name . values ( ) :
if fullname . startswith ( module_name + "." ) :
rest = fullname [ len ( module_name ) + 1 : ]
# Use this prefix iff it is longer than any found before
if len ( anchor ) > len ( rest ) :
anchor = rest
return anchor |
def brightness ( self , brightness ) :
"""Set the brightness .
: param brightness : Value to set ( 0.0-1.0 ) .""" | try :
cmd = self . command_set . brightness ( brightness )
self . send ( cmd )
self . _brightness = brightness
except AttributeError :
self . _setter ( '_brightness' , brightness , self . _dimmest , self . _brightest , self . _to_brightness ) |
def linkify_with_checkmodulations ( self , checkmodulations ) :
"""Link checkmodulation object
: param checkmodulations : checkmodulations object
: type checkmodulations : alignak . objects . checkmodulation . Checkmodulations
: return : None""" | for i in self :
if not hasattr ( i , 'checkmodulations' ) :
continue
links_list = strip_and_uniq ( i . checkmodulations )
new = [ ]
for name in [ e for e in links_list if e ] :
modulation = checkmodulations . find_by_name ( name )
if modulation is not None and modulation . uuid not in new :
new . append ( modulation . uuid )
else :
i . add_error ( "The checkmodulations of the %s '%s' named " "'%s' is unknown!" % ( i . __class__ . my_type , i . get_name ( ) , name ) )
i . checkmodulations = new |
def team_profiles ( self , team ) :
"""Get team ' s social media profiles linked on their TBA page .
: param team : Team to get data on .
: return : List of Profile objects .""" | return [ Profile ( raw ) for raw in self . _get ( 'team/%s/social_media' % self . team_key ( team ) ) ] |
def backspace ( self , n = 1 , interval = 0 , pre_dl = None , post_dl = None ) :
"""Press backspace key n times .
* * 中文文档 * *
按退格键 n 次 。""" | self . delay ( pre_dl )
self . k . tap_key ( self . k . backspace_key , n , interval )
self . delay ( post_dl ) |
def read_git_branch ( ) :
"""Obtain the current branch name from the Git repository . If on Travis CI ,
use the ` ` TRAVIS _ BRANCH ` ` environment variable .""" | if os . getenv ( 'TRAVIS' ) :
return os . getenv ( 'TRAVIS_BRANCH' )
else :
try :
repo = git . repo . base . Repo ( search_parent_directories = True )
return repo . active_branch . name
except Exception :
return '' |
def isConnected ( self ) :
"""Returns whether or not this connection is currently
active .
: return < bool > connected""" | for pool in self . __pool . values ( ) :
if not pool . empty ( ) :
return True
return False |
def _get_curvature ( nodes , tangent_vec , s ) :
r"""Compute the signed curvature of a curve at : math : ` s ` .
Computed via
. . math : :
\ frac { B ' ( s ) \ times B ' ' ( s ) } { \ left \ lVert B ' ( s ) \ right \ rVert _ 2 ^ 3}
. . image : : . . / images / get _ curvature . png
: align : center
. . testsetup : : get - curvature
import numpy as np
import bezier
from bezier . _ curve _ helpers import evaluate _ hodograph
from bezier . _ curve _ helpers import get _ curvature
. . doctest : : get - curvature
: options : + NORMALIZE _ WHITESPACE
> > > nodes = np . asfortranarray ( [
. . . [ 1.0 , 0.75 , 0.5 , 0.25 , 0.0 ] ,
. . . [ 0.0 , 2.0 , - 2.0 , 2.0 , 0.0 ] ,
> > > s = 0.5
> > > tangent _ vec = evaluate _ hodograph ( s , nodes )
> > > tangent _ vec
array ( [ [ - 1 . ] ,
> > > curvature = get _ curvature ( nodes , tangent _ vec , s )
> > > curvature
-12.0
. . testcleanup : : get - curvature
import make _ images
make _ images . get _ curvature ( nodes , s , tangent _ vec , curvature )
. . note : :
There is also a Fortran implementation of this function , which
will be used if it can be built .
Args :
nodes ( numpy . ndarray ) : The nodes of a curve .
tangent _ vec ( numpy . ndarray ) : The already computed value of
: math : ` B ' ( s ) `
s ( float ) : The parameter value along the curve .
Returns :
float : The signed curvature .""" | _ , num_nodes = np . shape ( nodes )
if num_nodes == 2 : # Lines have no curvature .
return 0.0
# NOTE : We somewhat replicate code in ` ` evaluate _ hodograph ( ) ` ` here .
first_deriv = nodes [ : , 1 : ] - nodes [ : , : - 1 ]
second_deriv = first_deriv [ : , 1 : ] - first_deriv [ : , : - 1 ]
concavity = ( ( num_nodes - 1 ) * ( num_nodes - 2 ) * evaluate_multi ( second_deriv , np . asfortranarray ( [ s ] ) ) )
curvature = _helpers . cross_product ( tangent_vec . ravel ( order = "F" ) , concavity . ravel ( order = "F" ) )
# NOTE : We convert to 1D to make sure NumPy uses vector norm .
curvature /= np . linalg . norm ( tangent_vec [ : , 0 ] , ord = 2 ) ** 3
return curvature |
def get_resource_areas_by_host ( self , host_id ) :
"""GetResourceAreasByHost .
[ Preview API ]
: param str host _ id :
: rtype : [ ResourceAreaInfo ]""" | query_parameters = { }
if host_id is not None :
query_parameters [ 'hostId' ] = self . _serialize . query ( 'host_id' , host_id , 'str' )
response = self . _send ( http_method = 'GET' , location_id = 'e81700f7-3be2-46de-8624-2eb35882fcaa' , version = '5.0-preview.1' , query_parameters = query_parameters )
return self . _deserialize ( '[ResourceAreaInfo]' , self . _unwrap_collection ( response ) ) |
def validateTime ( value , blank = False , strip = None , allowlistRegexes = None , blocklistRegexes = None , formats = ( '%H:%M:%S' , '%H:%M' , '%X' ) , excMsg = None ) :
"""Raises ValidationException if value is not a time formatted in one
of the formats formats . Returns a datetime . time object of value .
* value ( str ) : The value being validated as a time .
* blank ( bool ) : If True , a blank string will be accepted . Defaults to False .
* strip ( bool , str , None ) : If None , whitespace is stripped from value . If a str , the characters in it are stripped from value . If False , nothing is stripped .
* allowlistRegexes ( Sequence , None ) : A sequence of regex str that will explicitly pass validation , even if they aren ' t numbers .
* blocklistRegexes ( Sequence , None ) : A sequence of regex str or ( regex _ str , response _ str ) tuples that , if matched , will explicitly fail validation .
* formats : A tuple of strings that can be passed to time . strftime , dictating the possible formats for a valid time .
* excMsg ( str ) : A custom message to use in the raised ValidationException .
> > > import pysimplevalidate as pysv
> > > pysv . validateTime ( ' 12:00:01 ' )
datetime . time ( 12 , 0 , 1)
> > > pysv . validateTime ( ' 13:00:01 ' )
datetime . time ( 13 , 0 , 1)
> > > pysv . validateTime ( ' 25:00:01 ' )
Traceback ( most recent call last ) :
pysimplevalidate . ValidationException : ' 25:00:01 ' is not a valid time .
> > > pysv . validateTime ( ' hour 12 minute 01 ' , formats = [ ' hour % H minute % M ' ] )
datetime . time ( 12 , 1)""" | # TODO - handle this
# Reuse the logic in _ validateToDateTimeFormat ( ) for this function .
try :
dt = _validateToDateTimeFormat ( value , formats , blank = blank , strip = strip , allowlistRegexes = allowlistRegexes , blocklistRegexes = blocklistRegexes )
return datetime . time ( dt . hour , dt . minute , dt . second , dt . microsecond )
except ValidationException :
_raiseValidationException ( _ ( '%r is not a valid time.' ) % ( _errstr ( value ) ) , excMsg ) |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.