idx
int64
0
63k
question
stringlengths
53
5.28k
target
stringlengths
5
805
41,500
def read ( file , frames = - 1 , start = 0 , stop = None , dtype = 'float64' , always_2d = False , fill_value = None , out = None , samplerate = None , channels = None , format = None , subtype = None , endian = None , closefd = True ) : with SoundFile ( file , 'r' , samplerate , channels , subtype , endian , format , closefd ) as f : frames = f . _prepare_read ( start , stop , frames ) data = f . read ( frames , dtype , always_2d , fill_value , out ) return data , f . samplerate
Provide audio data from a sound file as NumPy array .
41,501
def write ( file , data , samplerate , subtype = None , endian = None , format = None , closefd = True ) : import numpy as np data = np . asarray ( data ) if data . ndim == 1 : channels = 1 else : channels = data . shape [ 1 ] with SoundFile ( file , 'w' , samplerate , channels , subtype , endian , format , closefd ) as f : f . write ( data )
Write data to a sound file .
41,502
def available_subtypes ( format = None ) : subtypes = _available_formats_helper ( _snd . SFC_GET_FORMAT_SUBTYPE_COUNT , _snd . SFC_GET_FORMAT_SUBTYPE ) return dict ( ( subtype , name ) for subtype , name in subtypes if format is None or check_format ( format , subtype ) )
Return a dictionary of available subtypes .
41,503
def _error_check ( err , prefix = "" ) : if err != 0 : err_str = _snd . sf_error_number ( err ) raise RuntimeError ( prefix + _ffi . string ( err_str ) . decode ( 'utf-8' , 'replace' ) )
Pretty - print a numerical error code if there is an error .
41,504
def _format_int ( format , subtype , endian ) : result = _check_format ( format ) if subtype is None : subtype = default_subtype ( format ) if subtype is None : raise TypeError ( "No default subtype for major format {0!r}" . format ( format ) ) elif not isinstance ( subtype , ( _unicode , str ) ) : raise TypeError ( "Invalid subtype: {0!r}" . format ( subtype ) ) try : result |= _subtypes [ subtype . upper ( ) ] except KeyError : raise ValueError ( "Unknown subtype: {0!r}" . format ( subtype ) ) if endian is None : endian = 'FILE' elif not isinstance ( endian , ( _unicode , str ) ) : raise TypeError ( "Invalid endian-ness: {0!r}" . format ( endian ) ) try : result |= _endians [ endian . upper ( ) ] except KeyError : raise ValueError ( "Unknown endian-ness: {0!r}" . format ( endian ) ) info = _ffi . new ( "SF_INFO*" ) info . format = result info . channels = 1 if _snd . sf_format_check ( info ) == _snd . SF_FALSE : raise ValueError ( "Invalid combination of format, subtype and endian" ) return result
Return numeric ID for given format|subtype|endian combo .
41,505
def _check_mode ( mode ) : if not isinstance ( mode , ( _unicode , str ) ) : raise TypeError ( "Invalid mode: {0!r}" . format ( mode ) ) mode_set = set ( mode ) if mode_set . difference ( 'xrwb+' ) or len ( mode ) > len ( mode_set ) : raise ValueError ( "Invalid mode: {0!r}" . format ( mode ) ) if len ( mode_set . intersection ( 'xrw' ) ) != 1 : raise ValueError ( "mode must contain exactly one of 'xrw'" ) if '+' in mode_set : mode_int = _snd . SFM_RDWR elif 'r' in mode_set : mode_int = _snd . SFM_READ else : mode_int = _snd . SFM_WRITE return mode_int
Check if mode is valid and return its integer representation .
41,506
def _create_info_struct ( file , mode , samplerate , channels , format , subtype , endian ) : original_format = format if format is None : format = _get_format_from_filename ( file , mode ) assert isinstance ( format , ( _unicode , str ) ) else : _check_format ( format ) info = _ffi . new ( "SF_INFO*" ) if 'r' not in mode or format . upper ( ) == 'RAW' : if samplerate is None : raise TypeError ( "samplerate must be specified" ) info . samplerate = samplerate if channels is None : raise TypeError ( "channels must be specified" ) info . channels = channels info . format = _format_int ( format , subtype , endian ) else : if any ( arg is not None for arg in ( samplerate , channels , original_format , subtype , endian ) ) : raise TypeError ( "Not allowed for existing files (except 'RAW'): " "samplerate, channels, format, subtype, endian" ) return info
Check arguments and create SF_INFO struct .
41,507
def _format_str ( format_int ) : for dictionary in _formats , _subtypes , _endians : for k , v in dictionary . items ( ) : if v == format_int : return k else : return 'n/a'
Return the string representation of a given numeric format .
41,508
def _format_info ( format_int , format_flag = _snd . SFC_GET_FORMAT_INFO ) : format_info = _ffi . new ( "SF_FORMAT_INFO*" ) format_info . format = format_int _snd . sf_command ( _ffi . NULL , format_flag , format_info , _ffi . sizeof ( "SF_FORMAT_INFO" ) ) name = format_info . name return ( _format_str ( format_info . format ) , _ffi . string ( name ) . decode ( 'utf-8' , 'replace' ) if name else "" )
Return the ID and short description of a given format .
41,509
def _check_format ( format_str ) : if not isinstance ( format_str , ( _unicode , str ) ) : raise TypeError ( "Invalid format: {0!r}" . format ( format_str ) ) try : format_int = _formats [ format_str . upper ( ) ] except KeyError : raise ValueError ( "Unknown format: {0!r}" . format ( format_str ) ) return format_int
Check if format_str is valid and return format ID .
41,510
def _has_virtual_io_attrs ( file , mode_int ) : readonly = mode_int == _snd . SFM_READ writeonly = mode_int == _snd . SFM_WRITE return all ( [ hasattr ( file , 'seek' ) , hasattr ( file , 'tell' ) , hasattr ( file , 'write' ) or readonly , hasattr ( file , 'read' ) or hasattr ( file , 'readinto' ) or writeonly , ] )
Check if file has all the necessary attributes for virtual IO .
41,511
def extra_info ( self ) : info = _ffi . new ( "char[]" , 2 ** 14 ) _snd . sf_command ( self . _file , _snd . SFC_GET_LOG_INFO , info , _ffi . sizeof ( info ) ) return _ffi . string ( info ) . decode ( 'utf-8' , 'replace' )
Retrieve the log string generated when opening the file .
41,512
def read ( self , frames = - 1 , dtype = 'float64' , always_2d = False , fill_value = None , out = None ) : if out is None : frames = self . _check_frames ( frames , fill_value ) out = self . _create_empty_array ( frames , always_2d , dtype ) else : if frames < 0 or frames > len ( out ) : frames = len ( out ) frames = self . _array_io ( 'read' , out , frames ) if len ( out ) > frames : if fill_value is None : out = out [ : frames ] else : out [ frames : ] = fill_value return out
Read from the file and return data as NumPy array .
41,513
def buffer_read ( self , frames = - 1 , dtype = None ) : frames = self . _check_frames ( frames , fill_value = None ) ctype = self . _check_dtype ( dtype ) cdata = _ffi . new ( ctype + '[]' , frames * self . channels ) read_frames = self . _cdata_io ( 'read' , cdata , ctype , frames ) assert read_frames == frames return _ffi . buffer ( cdata )
Read from the file and return data as buffer object .
41,514
def buffer_read_into ( self , buffer , dtype ) : ctype = self . _check_dtype ( dtype ) cdata , frames = self . _check_buffer ( buffer , ctype ) frames = self . _cdata_io ( 'read' , cdata , ctype , frames ) return frames
Read from the file into a given buffer object .
41,515
def write ( self , data ) : import numpy as np data = np . ascontiguousarray ( data ) written = self . _array_io ( 'write' , data , len ( data ) ) assert written == len ( data ) self . _update_frames ( written )
Write audio data from a NumPy array to the file .
41,516
def truncate ( self , frames = None ) : if frames is None : frames = self . tell ( ) err = _snd . sf_command ( self . _file , _snd . SFC_FILE_TRUNCATE , _ffi . new ( "sf_count_t*" , frames ) , _ffi . sizeof ( "sf_count_t" ) ) if err : raise RuntimeError ( "Error truncating the file" ) self . _info . frames = frames
Truncate the file to a given number of frames .
41,517
def close ( self ) : if not self . closed : self . flush ( ) err = _snd . sf_close ( self . _file ) self . _file = None _error_check ( err )
Close the file . Can be called multiple times .
41,518
def _check_frames ( self , frames , fill_value ) : if self . seekable ( ) : remaining_frames = self . frames - self . tell ( ) if frames < 0 or ( frames > remaining_frames and fill_value is None ) : frames = remaining_frames elif frames < 0 : raise ValueError ( "frames must be specified for non-seekable files" ) return frames
Reduce frames to no more than are available in the file .
41,519
def _check_buffer ( self , data , ctype ) : assert ctype in _ffi_types . values ( ) if not isinstance ( data , bytes ) : data = _ffi . from_buffer ( data ) frames , remainder = divmod ( len ( data ) , self . channels * _ffi . sizeof ( ctype ) ) if remainder : raise ValueError ( "Data size must be a multiple of frame size" ) return data , frames
Convert buffer to cdata and check for valid size .
41,520
def _create_empty_array ( self , frames , always_2d , dtype ) : import numpy as np if always_2d or self . channels > 1 : shape = frames , self . channels else : shape = frames , return np . empty ( shape , dtype , order = 'C' )
Create an empty array with appropriate shape .
41,521
def _check_dtype ( self , dtype ) : try : return _ffi_types [ dtype ] except KeyError : raise ValueError ( "dtype must be one of {0!r} and not {1!r}" . format ( sorted ( _ffi_types . keys ( ) ) , dtype ) )
Check if dtype string is valid and return ctype string .
41,522
def _array_io ( self , action , array , frames ) : if ( array . ndim not in ( 1 , 2 ) or array . ndim == 1 and self . channels != 1 or array . ndim == 2 and array . shape [ 1 ] != self . channels ) : raise ValueError ( "Invalid shape: {0!r}" . format ( array . shape ) ) if not array . flags . c_contiguous : raise ValueError ( "Data must be C-contiguous" ) ctype = self . _check_dtype ( array . dtype . name ) assert array . dtype . itemsize == _ffi . sizeof ( ctype ) cdata = _ffi . cast ( ctype + '*' , array . __array_interface__ [ 'data' ] [ 0 ] ) return self . _cdata_io ( action , cdata , ctype , frames )
Check array and call low - level IO function .
41,523
def _update_frames ( self , written ) : if self . seekable ( ) : curr = self . tell ( ) self . _info . frames = self . seek ( 0 , SEEK_END ) self . seek ( curr , SEEK_SET ) else : self . _info . frames += written
Update self . frames after writing .
41,524
def _prepare_read ( self , start , stop , frames ) : if start != 0 and not self . seekable ( ) : raise ValueError ( "start is only allowed for seekable files" ) if frames >= 0 and stop is not None : raise TypeError ( "Only one of {frames, stop} may be used" ) start , stop , _ = slice ( start , stop ) . indices ( self . frames ) if stop < start : stop = start if frames < 0 : frames = stop - start if self . seekable ( ) : self . seek ( start , SEEK_SET ) return frames
Seek to start frame and calculate length .
41,525
def probes_used_generate_vector ( probe_files_full , probe_files_model ) : import numpy as np C_probesUsed = np . ndarray ( ( len ( probe_files_full ) , ) , 'bool' ) C_probesUsed . fill ( False ) c = 0 for k in sorted ( probe_files_full . keys ( ) ) : if probe_files_model . has_key ( k ) : C_probesUsed [ c ] = True c += 1 return C_probesUsed
Generates boolean matrices indicating which are the probes for each model
41,526
def probes_used_extract_scores ( full_scores , same_probes ) : if full_scores . shape [ 1 ] != same_probes . shape [ 0 ] : raise "Size mismatch" import numpy as np model_scores = np . ndarray ( ( full_scores . shape [ 0 ] , np . sum ( same_probes ) ) , 'float64' ) c = 0 for i in range ( 0 , full_scores . shape [ 1 ] ) : if same_probes [ i ] : for j in range ( 0 , full_scores . shape [ 0 ] ) : model_scores [ j , c ] = full_scores [ j , i ] c += 1 return model_scores
Extracts a matrix of scores for a model given a probes_used row vector of boolean
41,527
def read ( filename ) : import scipy . io . wavfile rate , audio = scipy . io . wavfile . read ( filename ) data = numpy . cast [ 'float' ] ( audio ) return rate , data
Read audio file
41,528
def normalize_std_array ( vector ) : length = 1 n_samples = len ( vector ) mean = numpy . ndarray ( ( length , ) , 'float64' ) std = numpy . ndarray ( ( length , ) , 'float64' ) mean . fill ( 0 ) std . fill ( 0 ) for array in vector : x = array . astype ( 'float64' ) mean += x std += ( x ** 2 ) mean /= n_samples std /= n_samples std -= ( mean ** 2 ) std = std ** 0.5 arrayset = numpy . ndarray ( shape = ( n_samples , mean . shape [ 0 ] ) , dtype = numpy . float64 ) for i in range ( 0 , n_samples ) : arrayset [ i , : ] = ( vector [ i ] - mean ) / std return arrayset
Applies a unit mean and variance normalization to an arrayset
41,529
def _conversion ( self , input_signal , vad_file ) : e = bob . ap . Energy ( rate_wavsample [ 0 ] , self . win_length_ms , self . win_shift_ms ) energy_array = e ( rate_wavsample [ 1 ] ) labels = self . use_existing_vad ( energy_array , vad_file ) return labels
Converts an external VAD to follow the Spear convention . Energy is used in order to avoind out - of - bound array indexes .
41,530
def mod_4hz ( self , rate_wavsample ) : wl = self . win_length_ms ws = self . win_shift_ms nf = self . n_filters f_min = self . f_min f_max = self . f_max pre = self . pre_emphasis_coef c = bob . ap . Spectrogram ( rate_wavsample [ 0 ] , wl , ws , nf , f_min , f_max , pre ) c . energy_filter = True c . log_filter = False c . energy_bands = True sig = rate_wavsample [ 1 ] energy_bands = c ( sig ) filtering_res = self . pass_band_filtering ( energy_bands , rate_wavsample [ 0 ] ) mod_4hz = self . modulation_4hz ( filtering_res , rate_wavsample ) mod_4hz = self . averaging ( mod_4hz ) e = bob . ap . Energy ( rate_wavsample [ 0 ] , wl , ws ) energy_array = e ( rate_wavsample [ 1 ] ) labels = self . _voice_activity_detection ( energy_array , mod_4hz ) labels = utils . smoothing ( labels , self . smoothing_window ) logger . info ( "After Mod-4Hz based VAD there are %d frames remaining over %d" , numpy . sum ( labels ) , len ( labels ) ) return labels , energy_array , mod_4hz
Computes and returns the 4Hz modulation energy features for the given input wave file
41,531
def read_matlab_files ( self , biofile , directory , extension ) : import bob . io . matlab data_path = biofile . make_path ( directory , extension ) return bob . io . base . load ( data_path )
Read pre - computed CQCC Matlab features here
41,532
def command_line_arguments ( command_line_parameters ) : parser = argparse . ArgumentParser ( description = 'Execute baseline algorithms with default parameters' , formatter_class = argparse . ArgumentDefaultsHelpFormatter ) parser . add_argument ( '-a' , '--algorithms' , choices = all_algorithms , default = ( 'gmm-voxforge' , ) , nargs = '+' , help = 'Select one (or more) algorithms that you want to execute.' ) parser . add_argument ( '--all' , action = 'store_true' , help = 'Select all algorithms.' ) parser . add_argument ( '-d' , '--database' , choices = available_databases , default = 'voxforge' , help = 'The database on which the baseline algorithm is executed.' ) parser . add_argument ( '-b' , '--baseline-directory' , default = 'baselines' , help = 'The sub-directory, where the baseline results are stored.' ) parser . add_argument ( '-f' , '--directory' , help = 'The directory to write the data of the experiment into. If not specified, the default directories of the verify.py script are used (see verify.py --help).' ) parser . add_argument ( '-g' , '--grid' , action = 'store_true' , help = 'Execute the algorithm in the SGE grid.' ) parser . add_argument ( '-l' , '--parallel' , type = int , help = 'Run the algorithms in parallel on the local machine, using the given number of parallel threads' ) parser . add_argument ( '-z' , '--zt-norm' , action = 'store_false' , help = 'Compute the ZT norm for the files (might not be availabe for all databases).' ) parser . add_argument ( '-q' , '--dry-run' , action = 'store_true' , help = 'Just print the commands, but do not execute them.' ) parser . add_argument ( '-e' , '--evaluate' , nargs = '+' , choices = ( 'EER' , 'HTER' , 'ROC' , 'DET' , 'CMC' , 'RR' ) , help = 'Evaluate the results of the algorithms (instead of running them) using the given evaluation techniques.' ) parser . add_argument ( 'parameters' , nargs = argparse . REMAINDER , help = 'Parameters directly passed to the verify.py script.' ) bob . core . log . add_command_line_option ( parser ) args = parser . parse_args ( command_line_parameters ) if args . all : args . algorithms = all_algorithms bob . core . log . set_verbosity_level ( logger , args . verbose ) return args
Defines the command line parameters that are accepted .
41,533
def calc_mean ( c0 , c1 = [ ] ) : if c1 != [ ] : return ( numpy . mean ( c0 , 0 ) + numpy . mean ( c1 , 0 ) ) / 2. else : return numpy . mean ( c0 , 0 )
Calculates the mean of the data .
41,534
def calc_std ( c0 , c1 = [ ] ) : if c1 == [ ] : return numpy . std ( c0 , 0 ) prop = float ( len ( c0 ) ) / float ( len ( c1 ) ) if prop < 1 : p0 = int ( math . ceil ( 1 / prop ) ) p1 = 1 else : p0 = 1 p1 = int ( math . ceil ( prop ) ) return numpy . std ( numpy . vstack ( p0 * [ c0 ] + p1 * [ c1 ] ) , 0 )
Calculates the variance of the data .
41,535
def calc_mean_std ( c0 , c1 = [ ] , nonStdZero = False ) : mi = calc_mean ( c0 , c1 ) std = calc_std ( c0 , c1 ) if ( nonStdZero ) : std [ std == 0 ] = 1 return mi , std
Calculates both the mean of the data .
41,536
def solve_sweep_structure ( self , structures , sweep_param_list , filename = "structure_n_effs.dat" , plot = True , x_label = "Structure number" , fraction_mode_list = [ ] , ) : n_effs = [ ] mode_types = [ ] fractions_te = [ ] fractions_tm = [ ] for s in tqdm . tqdm ( structures , ncols = 70 ) : self . solve ( s ) n_effs . append ( np . real ( self . n_effs ) ) mode_types . append ( self . _get_mode_types ( ) ) fractions_te . append ( self . fraction_te ) fractions_tm . append ( self . fraction_tm ) if filename : self . _write_n_effs_to_file ( n_effs , self . _modes_directory + filename , sweep_param_list ) with open ( self . _modes_directory + "mode_types.dat" , "w" ) as fs : header = "," . join ( "Mode%i" % i for i , _ in enumerate ( mode_types [ 0 ] ) ) fs . write ( "# " + header + "\n" ) for mt in mode_types : txt = "," . join ( "%s %.2f" % pair for pair in mt ) fs . write ( txt + "\n" ) with open ( self . _modes_directory + "fraction_te.dat" , "w" ) as fs : header = "fraction te" fs . write ( "# param sweep," + header + "\n" ) for param , fte in zip ( sweep_param_list , fractions_te ) : txt = "%.6f," % param txt += "," . join ( "%.2f" % f for f in fte ) fs . write ( txt + "\n" ) with open ( self . _modes_directory + "fraction_tm.dat" , "w" ) as fs : header = "fraction tm" fs . write ( "# param sweep," + header + "\n" ) for param , ftm in zip ( sweep_param_list , fractions_tm ) : txt = "%.6f," % param txt += "," . join ( "%.2f" % f for f in ftm ) fs . write ( txt + "\n" ) if plot : if MPL : title = "$n_{eff}$ vs %s" % x_label y_label = "$n_{eff}$" else : title = "n_{effs} vs %s" % x_label y_label = "n_{eff}" self . _plot_n_effs ( self . _modes_directory + filename , self . _modes_directory + "fraction_te.dat" , x_label , y_label , title ) title = "TE Fraction vs %s" % x_label self . _plot_fraction ( self . _modes_directory + "fraction_te.dat" , x_label , "TE Fraction [%]" , title , fraction_mode_list , ) title = "TM Fraction vs %s" % x_label self . _plot_fraction ( self . _modes_directory + "fraction_tm.dat" , x_label , "TM Fraction [%]" , title , fraction_mode_list , ) return n_effs
Find the modes of many structures .
41,537
def solve_sweep_wavelength ( self , structure , wavelengths , filename = "wavelength_n_effs.dat" , plot = True , ) : n_effs = [ ] for w in tqdm . tqdm ( wavelengths , ncols = 70 ) : structure . change_wavelength ( w ) self . solve ( structure ) n_effs . append ( np . real ( self . n_effs ) ) if filename : self . _write_n_effs_to_file ( n_effs , self . _modes_directory + filename , wavelengths ) if plot : if MPL : title = "$n_{eff}$ vs Wavelength" y_label = "$n_{eff}$" else : title = "n_{effs} vs Wavelength" % x_label y_label = "n_{eff}" self . _plot_n_effs ( self . _modes_directory + filename , self . _modes_directory + "fraction_te.dat" , "Wavelength" , "n_{eff}" , title , ) return n_effs
Solve for the effective indices of a fixed structure at different wavelengths .
41,538
def _add_material ( self , x_bot_left , y_bot_left , x_top_right , y_top_right , n_material , angle = 0 ) : x_mask = np . logical_and ( x_bot_left <= self . x , self . x <= x_top_right ) y_mask = np . logical_and ( y_bot_left <= self . y , self . y <= y_top_right ) xy_mask = np . kron ( y_mask , x_mask ) . reshape ( ( y_mask . size , x_mask . size ) ) self . n [ xy_mask ] = n_material if angle : self . _add_triangular_sides ( xy_mask , angle , y_top_right , y_bot_left , x_top_right , x_bot_left , n_material ) return self . n
A low - level function that allows writing a rectangle refractive index profile to a Structure .
41,539
def add_material ( self , x_min , x_max , n , angle = 0 ) : self . _mat_params . append ( [ x_min , x_max , n , angle ] ) if not callable ( n ) : n_mat = lambda wl : n else : n_mat = n Structure . _add_material ( self , x_min , self . y_min , x_max , self . y_max , n_mat ( self . _wl ) , angle ) return self . n
Add a refractive index between two x - points .
41,540
def trapz2 ( f , x = None , y = None , dx = 1.0 , dy = 1.0 ) : return numpy . trapz ( numpy . trapz ( f , x = y , dx = dy ) , x = x , dx = dx )
Double integrate .
41,541
def solve ( self , neigs = 4 , tol = 0 , guess = None , mode_profiles = True , initial_mode_guess = None ) : from scipy . sparse . linalg import eigen self . nmodes = neigs self . tol = tol A = self . build_matrix ( ) if guess is not None : k = 2 * numpy . pi / self . wl shift = ( guess * k ) ** 2 else : shift = None [ eigvals , eigvecs ] = eigen . eigs ( A , k = neigs , which = 'LR' , tol = 0.001 , ncv = None , v0 = initial_mode_guess , return_eigenvectors = mode_profiles , sigma = shift ) neffs = self . wl * scipy . sqrt ( eigvals ) / ( 2 * numpy . pi ) if mode_profiles : Hxs = [ ] Hys = [ ] nx = self . nx ny = self . ny for ieig in range ( neigs ) : Hxs . append ( eigvecs [ : nx * ny , ieig ] . reshape ( nx , ny ) ) Hys . append ( eigvecs [ nx * ny : , ieig ] . reshape ( nx , ny ) ) idx = numpy . flipud ( numpy . argsort ( neffs ) ) neffs = neffs [ idx ] self . neff = neffs if mode_profiles : tmpx = [ ] tmpy = [ ] for i in idx : tmpx . append ( Hxs [ i ] ) tmpy . append ( Hys [ i ] ) Hxs = tmpx Hys = tmpy [ Hzs , Exs , Eys , Ezs ] = self . compute_other_fields ( neffs , Hxs , Hys ) self . modes = [ ] for ( neff , Hx , Hy , Hz , Ex , Ey , Ez ) in zip ( neffs , Hxs , Hys , Hzs , Exs , Eys , Ezs ) : self . modes . append ( FDMode ( self . wl , self . x , self . y , neff , Ey , Ex , Ez , Hy , Hx , Hz ) . normalize ( ) ) return self
This function finds the eigenmodes .
41,542
def grating_coupler_period ( wavelength , n_eff , n_clad , incidence_angle_deg , diffration_order = 1 ) : k0 = 2. * np . pi / wavelength beta = n_eff . real * k0 n_inc = n_clad grating_period = ( 2. * np . pi * diffration_order ) / ( beta - k0 * n_inc * np . sin ( np . radians ( incidence_angle_deg ) ) ) return grating_period
Calculate the period needed for a grating coupler .
41,543
def oop ( aa ) : return ( '%s %s %s %.2f %+.2f %s %s %s %s %+.2f %s %s %.2f %.4f %.4f' % ( aa . stock_no , aa . stock_name , aa . data_date [ - 1 ] , aa . raw_data [ - 1 ] , aa . range_per , aa . MAC ( 3 ) , aa . MAC ( 6 ) , aa . MAC ( 18 ) , aa . MAO ( 3 , 6 ) [ 1 ] , aa . MAO ( 3 , 6 ) [ 0 ] [ 1 ] [ - 1 ] , aa . MAO ( 3 , 6 ) [ 0 ] [ 0 ] , aa . RABC , aa . stock_vol [ - 1 ] / 1000 , aa . SD , aa . CV ) ) . encode ( 'utf-8' )
For cmd output .
41,544
def reflection ( n1 , n2 ) : r = abs ( ( n1 - n2 ) / ( n1 + n2 ) ) ** 2 return r
Calculate the power reflection at the interface of two refractive index materials .
41,545
def coupling_efficiency ( mode_solver , fibre_mfd , fibre_offset_x = 0 , fibre_offset_y = 0 , n_eff_fibre = 1.441 ) : etas = [ ] gaus = _make_gaussian ( mode_solver . _structure . xc , mode_solver . _structure . yc , fibre_mfd , fibre_offset_x , fibre_offset_y ) for mode , n_eff in zip ( mode_solver . modes , mode_solver . n_effs ) : o = abs ( _overlap ( mode , gaus ) ) t = abs ( transmission ( n_eff , n_eff_fibre ) ) eta = o * t etas . append ( eta ) return etas
Finds the coupling efficiency between a solved fundamental mode and a fibre of given MFD .
41,546
def getIndex ( reference ) : if reference : reffas = reference else : parent_directory = path . dirname ( path . abspath ( path . dirname ( __file__ ) ) ) reffas = path . join ( parent_directory , "reference/DNA_CS.fasta" ) if not path . isfile ( reffas ) : logging . error ( "Could not find reference fasta for lambda genome." ) sys . exit ( "Could not find reference fasta for lambda genome." ) aligner = mp . Aligner ( reffas , preset = "map-ont" ) if not aligner : logging . error ( "Failed to load/build index" ) raise Exception ( "ERROR: failed to load/build index" ) return aligner
Find the reference folder using the location of the script file Create the index test if successful
41,547
def _exec_ipmitool ( driver_info , command ) : ipmi_cmd = ( "ipmitool -H %(address)s" " -I lanplus -U %(user)s -P %(passwd)s %(cmd)s" % { 'address' : driver_info [ 'address' ] , 'user' : driver_info [ 'username' ] , 'passwd' : driver_info [ 'password' ] , 'cmd' : command } ) out = None try : out = subprocess . check_output ( ipmi_cmd , shell = True ) except Exception : pass return out
Execute the ipmitool command .
41,548
def get_nic_capacity ( driver_info , ilo_fw ) : i = 0x0 value = None ilo_fw_rev = get_ilo_version ( ilo_fw ) or DEFAULT_FW_REV if ilo_fw_rev < MIN_SUGGESTED_FW_REV : for i in range ( 0xff ) : if ( i < 0x6e ) or ( i > 0xee ) : cmd = "fru print %s" % hex ( i ) out = _exec_ipmitool ( driver_info , cmd ) if out and 'port' in out and 'Adapter' in out : value = _parse_ipmi_nic_capacity ( out ) if value is not None : break else : continue else : cmd = "fru print" out = _exec_ipmitool ( driver_info , cmd ) if out : for line in out . split ( '\n' ) : if line and 'port' in line and 'Adapter' in line : value = _parse_ipmi_nic_capacity ( line ) if value is not None : break return value
Gets the FRU data to see if it is NIC data
41,549
def _parse_ipmi_nic_capacity ( nic_out ) : if ( ( "Device not present" in nic_out ) or ( "Unknown FRU header" in nic_out ) or not nic_out ) : return None capacity = None product_name = None data = nic_out . split ( '\n' ) for item in data : fields = item . split ( ':' ) if len ( fields ) > 1 : first_field = fields [ 0 ] . strip ( ) if first_field == "Product Name" : product_name = ':' . join ( fields [ 1 : ] ) break if product_name : product_name_array = product_name . split ( ' ' ) for item in product_name_array : if 'Gb' in item : capacity_int = item . strip ( 'Gb' ) if capacity_int . isdigit ( ) : capacity = item return capacity
Parse the FRU output for NIC capacity
41,550
def _extract_encryption_metadata ( entity , require_encryption , key_encryption_key , key_resolver ) : _validate_not_none ( 'entity' , entity ) try : encrypted_properties_list = _decode_base64_to_bytes ( entity [ '_ClientEncryptionMetadata2' ] ) encryption_data = entity [ '_ClientEncryptionMetadata1' ] encryption_data = _dict_to_encryption_data ( loads ( encryption_data ) ) except Exception as e : if require_encryption : raise ValueError ( _ERROR_ENTITY_NOT_ENCRYPTED ) else : return ( None , None , None , None ) if not ( encryption_data . encryption_agent . encryption_algorithm == _EncryptionAlgorithm . AES_CBC_256 ) : raise ValueError ( _ERROR_UNSUPPORTED_ENCRYPTION_ALGORITHM ) content_encryption_key = _validate_and_unwrap_cek ( encryption_data , key_encryption_key , key_resolver ) isJavaV1 = ( encryption_data . key_wrapping_metadata is None ) or ( ( encryption_data . encryption_agent . protocol == _ENCRYPTION_PROTOCOL_V1 ) and 'EncryptionLibrary' in encryption_data . key_wrapping_metadata and 'Java' in encryption_data . key_wrapping_metadata [ 'EncryptionLibrary' ] ) metadataIV = _generate_property_iv ( encryption_data . content_encryption_IV , entity [ 'PartitionKey' ] , entity [ 'RowKey' ] , '_ClientEncryptionMetadata2' , isJavaV1 ) cipher = _generate_AES_CBC_cipher ( content_encryption_key , metadataIV ) decryptor = cipher . decryptor ( ) encrypted_properties_list = decryptor . update ( encrypted_properties_list ) + decryptor . finalize ( ) unpadder = PKCS7 ( 128 ) . unpadder ( ) encrypted_properties_list = unpadder . update ( encrypted_properties_list ) + unpadder . finalize ( ) encrypted_properties_list = encrypted_properties_list . decode ( 'utf-8' ) if isJavaV1 : encrypted_properties_list = encrypted_properties_list [ 1 : - 1 ] encrypted_properties_list = encrypted_properties_list . split ( ', ' ) else : encrypted_properties_list = loads ( encrypted_properties_list ) return ( encryption_data . content_encryption_IV , encrypted_properties_list , content_encryption_key , isJavaV1 )
Extracts the encryption metadata from the given entity setting them to be utf - 8 strings . If no encryption metadata is present will return None for all return values unless require_encryption is true in which case the method will throw .
41,551
def logical_drives ( self ) : return logical_drive . HPELogicalDriveCollection ( self . _conn , utils . get_subresource_path_by ( self , [ 'Links' , 'LogicalDrives' ] ) , redfish_version = self . redfish_version )
Gets the resource HPELogicalDriveCollection of ArrayControllers
41,552
def physical_drives ( self ) : return physical_drive . HPEPhysicalDriveCollection ( self . _conn , utils . get_subresource_path_by ( self , [ 'Links' , 'PhysicalDrives' ] ) , redfish_version = self . redfish_version )
Gets the resource HPEPhysicalDriveCollection of ArrayControllers
41,553
def physical_drives_maximum_size_mib ( self ) : return utils . max_safe ( [ member . physical_drives . maximum_size_mib for member in self . get_members ( ) ] )
Gets the biggest disk
41,554
def array_controller_by_location ( self , location ) : for member in self . get_members ( ) : if member . location == location : return member
Returns array controller instance by location
41,555
def array_controller_by_model ( self , model ) : for member in self . get_members ( ) : if member . model == model : return member
Returns array controller instance by model
41,556
def get_subresource_path_by ( resource , subresource_path ) : if isinstance ( subresource_path , six . string_types ) : subresource_path = [ subresource_path ] elif not subresource_path : raise ValueError ( '"subresource_path" cannot be empty' ) body = resource . json for path_item in subresource_path : body = body . get ( path_item , { } ) if not body : raise exception . MissingAttributeError ( attribute = '/' . join ( subresource_path ) , resource = resource . path ) if '@odata.id' not in body : raise exception . MissingAttributeError ( attribute = '/' . join ( subresource_path ) + '/@odata.id' , resource = resource . path ) return body [ '@odata.id' ]
Helper function to find the resource path
41,557
def get_supported_boot_mode ( supported_boot_mode ) : boot_mode_bios = 'false' boot_mode_uefi = 'false' if ( supported_boot_mode == sys_cons . SUPPORTED_LEGACY_BIOS_ONLY ) : boot_mode_bios = 'true' elif ( supported_boot_mode == sys_cons . SUPPORTED_UEFI_ONLY ) : boot_mode_uefi = 'true' elif ( supported_boot_mode == sys_cons . SUPPORTED_LEGACY_BIOS_AND_UEFI ) : boot_mode_bios = 'true' boot_mode_uefi = 'true' return SupportedBootModes ( boot_mode_bios = boot_mode_bios , boot_mode_uefi = boot_mode_uefi )
Return bios and uefi support .
41,558
def get_allowed_operations ( resource , subresouce_path ) : uri = get_subresource_path_by ( resource , subresouce_path ) response = resource . _conn . get ( path = uri ) return response . headers [ 'Allow' ]
Helper function to get the HTTP allowed methods .
41,559
def _op ( self , method , path = '' , data = None , headers = None ) : resp = super ( HPEConnector , self ) . _op ( method , path , data , headers , allow_redirects = False ) if resp . status_code == 308 : path = urlparse ( resp . headers [ 'Location' ] ) . path resp = super ( HPEConnector , self ) . _op ( method , path , data , headers ) return resp
Overrides the base method to support retrying the operation .
41,560
def generate_file_shared_access_signature ( self , share_name , directory_name = None , file_name = None , permission = None , expiry = None , start = None , id = None , ip = None , protocol = None , cache_control = None , content_disposition = None , content_encoding = None , content_language = None , content_type = None ) : _validate_not_none ( 'share_name' , share_name ) _validate_not_none ( 'file_name' , file_name ) _validate_not_none ( 'self.account_name' , self . account_name ) _validate_not_none ( 'self.account_key' , self . account_key ) sas = SharedAccessSignature ( self . account_name , self . account_key ) return sas . generate_file ( share_name , directory_name , file_name , permission , expiry , start = start , id = id , ip = ip , protocol = protocol , cache_control = cache_control , content_disposition = content_disposition , content_encoding = content_encoding , content_language = content_language , content_type = content_type , )
Generates a shared access signature for the file . Use the returned signature with the sas_token parameter of FileService .
41,561
def get_share_properties ( self , share_name , timeout = None ) : _validate_not_none ( 'share_name' , share_name ) request = HTTPRequest ( ) request . method = 'GET' request . host = self . _get_host ( ) request . path = _get_path ( share_name ) request . query = [ ( 'restype' , 'share' ) , ( 'timeout' , _int_to_str ( timeout ) ) , ] response = self . _perform_request ( request ) return _parse_share ( share_name , response )
Returns all user - defined metadata and system properties for the specified share . The data returned does not include the shares s list of files or directories .
41,562
def get_share_metadata ( self , share_name , timeout = None ) : _validate_not_none ( 'share_name' , share_name ) request = HTTPRequest ( ) request . method = 'GET' request . host = self . _get_host ( ) request . path = _get_path ( share_name ) request . query = [ ( 'restype' , 'share' ) , ( 'comp' , 'metadata' ) , ( 'timeout' , _int_to_str ( timeout ) ) , ] response = self . _perform_request ( request ) return _parse_metadata ( response )
Returns all user - defined metadata for the specified share .
41,563
def set_file_properties ( self , share_name , directory_name , file_name , content_settings , timeout = None ) : _validate_not_none ( 'share_name' , share_name ) _validate_not_none ( 'file_name' , file_name ) _validate_not_none ( 'content_settings' , content_settings ) request = HTTPRequest ( ) request . method = 'PUT' request . host = self . _get_host ( ) request . path = _get_path ( share_name , directory_name , file_name ) request . query = [ ( 'comp' , 'properties' ) , ( 'timeout' , _int_to_str ( timeout ) ) , ] request . headers = None request . headers = content_settings . _to_headers ( ) self . _perform_request ( request )
Sets system properties on the file . If one property is set for the content_settings all properties will be overriden .
41,564
def copy_file ( self , share_name , directory_name , file_name , copy_source , metadata = None , timeout = None ) : _validate_not_none ( 'share_name' , share_name ) _validate_not_none ( 'file_name' , file_name ) _validate_not_none ( 'copy_source' , copy_source ) request = HTTPRequest ( ) request . method = 'PUT' request . host = self . _get_host ( ) request . path = _get_path ( share_name , directory_name , file_name ) request . query = [ ( 'timeout' , _int_to_str ( timeout ) ) ] request . headers = [ ( 'x-ms-copy-source' , _to_str ( copy_source ) ) , ( 'x-ms-meta-name-values' , metadata ) , ] response = self . _perform_request ( request ) props = _parse_properties ( response , FileProperties ) return props . copy
Copies a blob or file to a destination file within the storage account .
41,565
def create_file_from_bytes ( self , share_name , directory_name , file_name , file , index = 0 , count = None , content_settings = None , metadata = None , progress_callback = None , max_connections = 1 , max_retries = 5 , retry_wait = 1.0 , timeout = None ) : _validate_not_none ( 'share_name' , share_name ) _validate_not_none ( 'file_name' , file_name ) _validate_not_none ( 'file' , file ) _validate_type_bytes ( 'file' , file ) if index < 0 : raise TypeError ( _ERROR_VALUE_NEGATIVE . format ( 'index' ) ) if count is None or count < 0 : count = len ( file ) - index stream = BytesIO ( file ) stream . seek ( index ) self . create_file_from_stream ( share_name , directory_name , file_name , stream , count , content_settings , metadata , progress_callback , max_connections , max_retries , retry_wait , timeout )
Creates a new file from an array of bytes or updates the content of an existing file with automatic chunking and progress notifications .
41,566
def list_ranges ( self , share_name , directory_name , file_name , start_range = None , end_range = None , timeout = None ) : _validate_not_none ( 'share_name' , share_name ) _validate_not_none ( 'file_name' , file_name ) request = HTTPRequest ( ) request . method = 'GET' request . host = self . _get_host ( ) request . path = _get_path ( share_name , directory_name , file_name ) request . query = [ ( 'comp' , 'rangelist' ) , ( 'timeout' , _int_to_str ( timeout ) ) , ] if start_range is not None : _validate_and_format_range_headers ( request , start_range , end_range , start_range_required = False , end_range_required = False ) response = self . _perform_request ( request ) return _convert_xml_to_ranges ( response )
Retrieves the valid ranges for a file .
41,567
def _get_sushy_system ( self , system_id ) : system_url = parse . urljoin ( self . _sushy . get_system_collection_path ( ) , system_id ) try : return self . _sushy . get_system ( system_url ) except sushy . exceptions . SushyError as e : msg = ( self . _ ( 'The Redfish System "%(system)s" was not found. ' 'Error %(error)s' ) % { 'system' : system_id , 'error' : str ( e ) } ) LOG . debug ( msg ) raise exception . IloError ( msg )
Get the sushy system for system_id
41,568
def _get_sushy_manager ( self , manager_id ) : manager_url = parse . urljoin ( self . _sushy . get_manager_collection_path ( ) , manager_id ) try : return self . _sushy . get_manager ( manager_url ) except sushy . exceptions . SushyError as e : msg = ( self . _ ( 'The Redfish Manager "%(manager)s" was not found. ' 'Error %(error)s' ) % { 'manager' : manager_id , 'error' : str ( e ) } ) LOG . debug ( msg ) raise exception . IloError ( msg )
Get the sushy Manager for manager_id
41,569
def get_host_power_status ( self ) : sushy_system = self . _get_sushy_system ( PROLIANT_SYSTEM_ID ) return GET_POWER_STATE_MAP . get ( sushy_system . power_state )
Request the power state of the server .
41,570
def reset_server ( self ) : sushy_system = self . _get_sushy_system ( PROLIANT_SYSTEM_ID ) try : sushy_system . reset_system ( sushy . RESET_FORCE_RESTART ) except sushy . exceptions . SushyError as e : msg = ( self . _ ( 'The Redfish controller failed to reset server. ' 'Error %(error)s' ) % { 'error' : str ( e ) } ) LOG . debug ( msg ) raise exception . IloError ( msg )
Resets the server .
41,571
def set_host_power ( self , target_value ) : if target_value not in POWER_RESET_MAP : msg = ( 'The parameter "%(parameter)s" value "%(target_value)s" is ' 'invalid. Valid values are: %(valid_power_values)s' % { 'parameter' : 'target_value' , 'target_value' : target_value , 'valid_power_values' : POWER_RESET_MAP . keys ( ) } ) raise exception . InvalidInputError ( msg ) current_power_status = self . get_host_power_status ( ) if current_power_status == target_value : LOG . debug ( self . _ ( "Node is already in '%(target_value)s' power " "state." ) , { 'target_value' : target_value } ) return sushy_system = self . _get_sushy_system ( PROLIANT_SYSTEM_ID ) try : sushy_system . reset_system ( POWER_RESET_MAP [ target_value ] ) except sushy . exceptions . SushyError as e : msg = ( self . _ ( 'The Redfish controller failed to set power state ' 'of server to %(target_value)s. Error %(error)s' ) % { 'target_value' : target_value , 'error' : str ( e ) } ) LOG . debug ( msg ) raise exception . IloError ( msg )
Sets the power state of the system .
41,572
def _validate_virtual_media ( self , device ) : if device not in VIRTUAL_MEDIA_MAP : msg = ( self . _ ( "Invalid device '%s'. Valid devices: FLOPPY or " "CDROM." ) % device ) LOG . debug ( msg ) raise exception . IloInvalidInputError ( msg )
Check if the device is valid device .
41,573
def _is_boot_mode_uefi ( self ) : boot_mode = self . get_current_boot_mode ( ) return ( boot_mode == BOOT_MODE_MAP . get ( sys_cons . BIOS_BOOT_MODE_UEFI ) )
Checks if the system is in uefi boot mode .
41,574
def get_supported_boot_mode ( self ) : sushy_system = self . _get_sushy_system ( PROLIANT_SYSTEM_ID ) try : return SUPPORTED_BOOT_MODE_MAP . get ( sushy_system . supported_boot_mode ) except sushy . exceptions . SushyError as e : msg = ( self . _ ( 'The Redfish controller failed to get the ' 'supported boot modes. Error: %s' ) % e ) LOG . debug ( msg ) raise exception . IloError ( msg )
Get the system supported boot modes .
41,575
def get_server_capabilities ( self ) : capabilities = { } sushy_system = self . _get_sushy_system ( PROLIANT_SYSTEM_ID ) sushy_manager = self . _get_sushy_manager ( PROLIANT_MANAGER_ID ) try : count = len ( sushy_system . pci_devices . gpu_devices ) boot_mode = rf_utils . get_supported_boot_mode ( sushy_system . supported_boot_mode ) capabilities . update ( { 'pci_gpu_devices' : count , 'ilo_firmware_version' : sushy_manager . firmware_version , 'rom_firmware_version' : sushy_system . rom_version , 'server_model' : sushy_system . model , 'nic_capacity' : sushy_system . pci_devices . max_nic_capacity , 'boot_mode_bios' : boot_mode . boot_mode_bios , 'boot_mode_uefi' : boot_mode . boot_mode_uefi } ) tpm_state = sushy_system . bios_settings . tpm_state all_key_to_value_expression_tuples = [ ( 'sriov_enabled' , sushy_system . bios_settings . sriov == sys_cons . SRIOV_ENABLED ) , ( 'cpu_vt' , sushy_system . bios_settings . cpu_vt == ( sys_cons . CPUVT_ENABLED ) ) , ( 'trusted_boot' , ( tpm_state == sys_cons . TPM_PRESENT_ENABLED or tpm_state == sys_cons . TPM_PRESENT_DISABLED ) ) , ( 'secure_boot' , self . _has_secure_boot ( ) ) , ( 'iscsi_boot' , ( sushy_system . bios_settings . iscsi_resource . is_iscsi_boot_supported ( ) ) ) , ( 'hardware_supports_raid' , len ( sushy_system . smart_storage . array_controllers . members_identities ) > 0 ) , ( 'has_ssd' , common_storage . has_ssd ( sushy_system ) ) , ( 'has_rotational' , common_storage . has_rotational ( sushy_system ) ) , ( 'has_nvme_ssd' , common_storage . has_nvme_ssd ( sushy_system ) ) ] all_key_to_value_expression_tuples += ( [ ( 'logical_raid_level_' + x , True ) for x in sushy_system . smart_storage . logical_raid_levels ] ) all_key_to_value_expression_tuples += ( [ ( 'drive_rotational_' + str ( x ) + '_rpm' , True ) for x in common_storage . get_drive_rotational_speed_rpm ( sushy_system ) ] ) capabilities . update ( { key : 'true' for ( key , value ) in all_key_to_value_expression_tuples if value } ) memory_data = sushy_system . memory . details ( ) if memory_data . has_nvdimm_n : capabilities . update ( { 'persistent_memory' : ( json . dumps ( memory_data . has_persistent_memory ) ) , 'nvdimm_n' : ( json . dumps ( memory_data . has_nvdimm_n ) ) , 'logical_nvdimm_n' : ( json . dumps ( memory_data . has_logical_nvdimm_n ) ) } ) except sushy . exceptions . SushyError as e : msg = ( self . _ ( "The Redfish controller is unable to get " "resource or its members. Error " "%(error)s)" ) % { 'error' : str ( e ) } ) LOG . debug ( msg ) raise exception . IloError ( msg ) return capabilities
Returns the server capabilities
41,576
def get_essential_properties ( self ) : sushy_system = self . _get_sushy_system ( PROLIANT_SYSTEM_ID ) try : prop = { 'memory_mb' : ( sushy_system . memory_summary . size_gib * 1024 ) , 'cpus' : sushy_system . processors . summary . count , 'cpu_arch' : sushy_map . PROCESSOR_ARCH_VALUE_MAP_REV . get ( sushy_system . processors . summary . architecture ) , 'local_gb' : common_storage . get_local_gb ( sushy_system ) } return { 'properties' : prop , 'macs' : sushy_system . ethernet_interfaces . summary } except sushy . exceptions . SushyError as e : msg = ( self . _ ( 'The Redfish controller failed to get the ' 'resource data. Error %(error)s' ) % { 'error' : str ( e ) } ) LOG . debug ( msg ) raise exception . IloError ( msg )
Constructs the dictionary of essential properties
41,577
def _change_iscsi_target_settings ( self , iscsi_info ) : sushy_system = self . _get_sushy_system ( PROLIANT_SYSTEM_ID ) try : pci_settings_map = ( sushy_system . bios_settings . bios_mappings . pci_settings_mappings ) nics = [ ] for mapping in pci_settings_map : for subinstance in mapping [ 'Subinstances' ] : for association in subinstance [ 'Associations' ] : if 'NicBoot' in association : nics . append ( association ) except sushy . exceptions . SushyError as e : msg = ( self . _ ( 'The Redfish controller failed to get the ' 'bios mappings. Error %(error)s' ) % { 'error' : str ( e ) } ) LOG . debug ( msg ) raise exception . IloError ( msg ) if not nics : msg = ( 'No nics were found on the system' ) raise exception . IloError ( msg ) iscsi_infos = [ ] for nic in nics : data = iscsi_info . copy ( ) data [ 'iSCSIAttemptName' ] = nic data [ 'iSCSINicSource' ] = nic data [ 'iSCSIAttemptInstance' ] = nics . index ( nic ) + 1 iscsi_infos . append ( data ) iscsi_data = { 'iSCSISources' : iscsi_infos } try : ( sushy_system . bios_settings . iscsi_resource . iscsi_settings . update_iscsi_settings ( iscsi_data ) ) except sushy . exceptions . SushyError as e : msg = ( self . _ ( "The Redfish controller is failed to update iSCSI " "settings. Error %(error)s" ) % { 'error' : str ( e ) } ) LOG . debug ( msg ) raise exception . IloError ( msg )
Change iSCSI target settings .
41,578
def set_iscsi_info ( self , target_name , lun , ip_address , port = '3260' , auth_method = None , username = None , password = None ) : if ( self . _is_boot_mode_uefi ( ) ) : iscsi_info = { } iscsi_info [ 'iSCSITargetName' ] = target_name iscsi_info [ 'iSCSILUN' ] = lun iscsi_info [ 'iSCSITargetIpAddress' ] = ip_address iscsi_info [ 'iSCSITargetTcpPort' ] = int ( port ) iscsi_info [ 'iSCSITargetInfoViaDHCP' ] = False iscsi_info [ 'iSCSIConnection' ] = 'Enabled' if ( auth_method == 'CHAP' ) : iscsi_info [ 'iSCSIAuthenticationMethod' ] = 'Chap' iscsi_info [ 'iSCSIChapUsername' ] = username iscsi_info [ 'iSCSIChapSecret' ] = password self . _change_iscsi_target_settings ( iscsi_info ) else : msg = 'iSCSI boot is not supported in the BIOS boot mode' raise exception . IloCommandNotSupportedInBiosError ( msg )
Set iSCSI details of the system in UEFI boot mode .
41,579
def get_host_post_state ( self ) : sushy_system = self . _get_sushy_system ( PROLIANT_SYSTEM_ID ) return GET_POST_STATE_MAP . get ( sushy_system . post_state )
Get the current state of system POST .
41,580
def _get_collection ( self , collection_uri , request_headers = None ) : status , headers , thecollection = self . _rest_get ( collection_uri ) if status != 200 : msg = self . _get_extended_error ( thecollection ) raise exception . IloError ( msg ) while status < 300 : ctype = self . _get_type ( thecollection ) if ( ctype not in [ 'Collection.0' , 'Collection.1' ] ) : raise exception . IloError ( "collection not found" ) if 'Items' in thecollection : for item in thecollection [ 'Items' ] : memberuri = None if 'links' in item and 'self' in item [ 'links' ] : memberuri = item [ 'links' ] [ 'self' ] [ 'href' ] yield 200 , None , item , memberuri elif ( 'links' in thecollection and 'Member' in thecollection [ 'links' ] ) : for memberuri in thecollection [ 'links' ] [ 'Member' ] : status , headers , member = self . _rest_get ( memberuri [ 'href' ] ) yield status , headers , member , memberuri [ 'href' ] if ( 'links' in thecollection and 'NextPage' in thecollection [ 'links' ] ) : next_link_uri = ( collection_uri + '?page=' + str ( thecollection [ 'links' ] [ 'NextPage' ] [ 'page' ] ) ) status , headers , thecollection = self . _rest_get ( next_link_uri ) else : break
Generator function that returns collection members .
41,581
def _get_type ( self , obj ) : typever = obj [ 'Type' ] typesplit = typever . split ( '.' ) return typesplit [ 0 ] + '.' + typesplit [ 1 ]
Return the type of an object .
41,582
def _render_extended_error_message_list ( self , extended_error ) : messages = [ ] if isinstance ( extended_error , dict ) : if ( 'Type' in extended_error and extended_error [ 'Type' ] . startswith ( 'ExtendedError.' ) ) : for msg in extended_error [ 'Messages' ] : message_id = msg [ 'MessageID' ] x = message_id . split ( '.' ) registry = x [ 0 ] msgkey = x [ len ( x ) - 1 ] if ( registry in self . message_registries and msgkey in self . message_registries [ registry ] [ 'Messages' ] ) : rmsgs = self . message_registries [ registry ] [ 'Messages' ] msg_dict = rmsgs [ msgkey ] msg_str = message_id + ': ' + msg_dict [ 'Message' ] for argn in range ( 0 , msg_dict [ 'NumberOfArgs' ] ) : subst = '%' + str ( argn + 1 ) m = str ( msg [ 'MessageArgs' ] [ argn ] ) msg_str = msg_str . replace ( subst , m ) if ( 'Resolution' in msg_dict and msg_dict [ 'Resolution' ] != 'None' ) : msg_str += ' ' + msg_dict [ 'Resolution' ] messages . append ( msg_str ) else : messages . append ( str ( message_id ) ) return messages
Parse the ExtendedError object and retruns the message .
41,583
def _get_host_details ( self ) : status , headers , system = self . _rest_get ( '/rest/v1/Systems/1' ) if status < 300 : stype = self . _get_type ( system ) if stype not in [ 'ComputerSystem.0' , 'ComputerSystem.1' ] : msg = "%s is not a valid system type " % stype raise exception . IloError ( msg ) else : msg = self . _get_extended_error ( system ) raise exception . IloError ( msg ) return system
Get the system details .
41,584
def _check_bios_resource ( self , properties = [ ] ) : system = self . _get_host_details ( ) if ( 'links' in system [ 'Oem' ] [ 'Hp' ] and 'BIOS' in system [ 'Oem' ] [ 'Hp' ] [ 'links' ] ) : bios_uri = system [ 'Oem' ] [ 'Hp' ] [ 'links' ] [ 'BIOS' ] [ 'href' ] status , headers , bios_settings = self . _rest_get ( bios_uri ) if status >= 300 : msg = self . _get_extended_error ( bios_settings ) raise exception . IloError ( msg ) for property in properties : if property not in bios_settings : msg = ( 'BIOS Property "' + property + '" is not' ' supported on this system.' ) raise exception . IloCommandNotSupportedError ( msg ) return headers , bios_uri , bios_settings else : msg = ( '"links/BIOS" section in ComputerSystem/Oem/Hp' ' does not exist' ) raise exception . IloCommandNotSupportedError ( msg )
Check if the bios resource exists .
41,585
def _get_pci_devices ( self ) : system = self . _get_host_details ( ) if ( 'links' in system [ 'Oem' ] [ 'Hp' ] and 'PCIDevices' in system [ 'Oem' ] [ 'Hp' ] [ 'links' ] ) : pci_uri = system [ 'Oem' ] [ 'Hp' ] [ 'links' ] [ 'PCIDevices' ] [ 'href' ] status , headers , pci_device_list = self . _rest_get ( pci_uri ) if status >= 300 : msg = self . _get_extended_error ( pci_device_list ) raise exception . IloError ( msg ) return pci_device_list else : msg = ( 'links/PCIDevices section in ComputerSystem/Oem/Hp' ' does not exist' ) raise exception . IloCommandNotSupportedError ( msg )
Gets the PCI devices .
41,586
def _get_gpu_pci_devices ( self ) : pci_device_list = self . _get_pci_devices ( ) gpu_list = [ ] items = pci_device_list [ 'Items' ] for item in items : if item [ 'ClassCode' ] in CLASSCODE_FOR_GPU_DEVICES : if item [ 'SubclassCode' ] in SUBCLASSCODE_FOR_GPU_DEVICES : gpu_list . append ( item ) return gpu_list
Returns the list of gpu devices .
41,587
def _get_storage_resource ( self ) : system = self . _get_host_details ( ) if ( 'links' in system [ 'Oem' ] [ 'Hp' ] and 'SmartStorage' in system [ 'Oem' ] [ 'Hp' ] [ 'links' ] ) : storage_uri = system [ 'Oem' ] [ 'Hp' ] [ 'links' ] [ 'SmartStorage' ] [ 'href' ] status , headers , storage_settings = self . _rest_get ( storage_uri ) if status >= 300 : msg = self . _get_extended_error ( storage_settings ) raise exception . IloError ( msg ) return headers , storage_uri , storage_settings else : msg = ( '"links/SmartStorage" section in ComputerSystem/Oem/Hp' ' does not exist' ) raise exception . IloCommandNotSupportedError ( msg )
Gets the SmartStorage resource if exists .
41,588
def _get_array_controller_resource ( self ) : headers , storage_uri , storage_settings = self . _get_storage_resource ( ) if ( 'links' in storage_settings and 'ArrayControllers' in storage_settings [ 'links' ] ) : array_uri = storage_settings [ 'links' ] [ 'ArrayControllers' ] [ 'href' ] status , headers , array_settings = self . _rest_get ( array_uri ) if status >= 300 : msg = self . _get_extended_error ( array_settings ) raise exception . IloError ( msg ) return headers , array_uri , array_settings else : msg = ( '"links/ArrayControllers" section in SmartStorage' ' does not exist' ) raise exception . IloCommandNotSupportedError ( msg )
Gets the ArrayController resource if exists .
41,589
def _create_list_of_array_controllers ( self ) : headers , array_uri , array_settings = ( self . _get_array_controller_resource ( ) ) array_uri_links = [ ] if ( 'links' in array_settings and 'Member' in array_settings [ 'links' ] ) : array_uri_links = array_settings [ 'links' ] [ 'Member' ] else : msg = ( '"links/Member" section in ArrayControllers' ' does not exist' ) raise exception . IloCommandNotSupportedError ( msg ) return array_uri_links
Creates the list of Array Controller URIs .
41,590
def _get_drive_type_and_speed ( self ) : disk_details = self . _get_physical_drive_resource ( ) drive_hdd = False drive_ssd = False drive_details = { } speed_const_list = [ 4800 , 5400 , 7200 , 10000 , 15000 ] if disk_details : for item in disk_details : value = item [ 'MediaType' ] if value == "HDD" : drive_hdd = True speed = item [ 'RotationalSpeedRpm' ] if speed in speed_const_list : var = 'rotational_drive_' + str ( speed ) + '_rpm' drive_details . update ( { var : 'true' } ) else : drive_ssd = True if drive_hdd : drive_details . update ( { 'has_rotational' : 'true' } ) if drive_ssd : drive_details . update ( { 'has_ssd' : 'true' } ) return drive_details if len ( drive_details . keys ( ) ) > 0 else None
Gets the disk drive type .
41,591
def _get_drive_resource ( self , drive_name ) : disk_details_list = [ ] array_uri_links = self . _create_list_of_array_controllers ( ) for array_link in array_uri_links : _ , _ , member_settings = ( self . _rest_get ( array_link [ 'href' ] ) ) if ( 'links' in member_settings and drive_name in member_settings [ 'links' ] ) : disk_uri = member_settings [ 'links' ] [ drive_name ] [ 'href' ] headers , disk_member_uri , disk_mem = ( self . _rest_get ( disk_uri ) ) if ( 'links' in disk_mem and 'Member' in disk_mem [ 'links' ] ) : for disk_link in disk_mem [ 'links' ] [ 'Member' ] : diskdrive_uri = disk_link [ 'href' ] _ , _ , disk_details = ( self . _rest_get ( diskdrive_uri ) ) disk_details_list . append ( disk_details ) else : msg = ( '"links/Member" section in %s' ' does not exist' , drive_name ) raise exception . IloCommandNotSupportedError ( msg ) else : msg = ( '"links/%s" section in ' ' ArrayController/links/Member does not exist' , drive_name ) raise exception . IloCommandNotSupportedError ( msg ) if disk_details_list : return disk_details_list
Gets the DiskDrive resource if exists .
41,592
def _get_logical_raid_levels ( self ) : logical_drive_details = self . _get_logical_drive_resource ( ) raid_level = { } if logical_drive_details : for item in logical_drive_details : if 'Raid' in item : raid_level_var = "logical_raid_level_" + item [ 'Raid' ] raid_level . update ( { raid_level_var : 'true' } ) return raid_level if len ( raid_level . keys ( ) ) > 0 else None
Gets the different raid levels configured on a server .
41,593
def _is_raid_supported ( self ) : header , uri , array_resource = self . _get_array_controller_resource ( ) return True if array_resource [ 'Total' ] > 0 else False
Get the RAID support on the server .
41,594
def _get_bios_settings_resource ( self , data ) : try : bios_settings_uri = data [ 'links' ] [ 'Settings' ] [ 'href' ] except KeyError : msg = ( 'BIOS Settings resource not found.' ) raise exception . IloError ( msg ) status , headers , bios_settings = self . _rest_get ( bios_settings_uri ) if status != 200 : msg = self . _get_extended_error ( bios_settings ) raise exception . IloError ( msg ) return headers , bios_settings_uri , bios_settings
Get the BIOS settings resource .
41,595
def _validate_if_patch_supported ( self , headers , uri ) : if not self . _operation_allowed ( headers , 'PATCH' ) : msg = ( 'PATCH Operation not supported on the resource ' '"%s"' % uri ) raise exception . IloError ( msg )
Check if the PATCH Operation is allowed on the resource .
41,596
def _get_bios_setting ( self , bios_property ) : headers , bios_uri , bios_settings = self . _check_bios_resource ( [ bios_property ] ) return bios_settings [ bios_property ]
Retrieves bios settings of the server .
41,597
def _get_bios_hash_password ( self , bios_password ) : request_headers = { } if bios_password : bios_password_hash = hashlib . sha256 ( ( bios_password . encode ( ) ) . hexdigest ( ) . upper ( ) ) request_headers [ 'X-HPRESTFULAPI-AuthToken' ] = bios_password_hash return request_headers
Get the hashed BIOS password .
41,598
def _change_bios_setting ( self , properties ) : keys = properties . keys ( ) headers , bios_uri , settings = self . _check_bios_resource ( keys ) if not self . _operation_allowed ( headers , 'PATCH' ) : headers , bios_uri , _ = self . _get_bios_settings_resource ( settings ) self . _validate_if_patch_supported ( headers , bios_uri ) request_headers = self . _get_bios_hash_password ( self . bios_password ) status , headers , response = self . _rest_patch ( bios_uri , request_headers , properties ) if status >= 300 : msg = self . _get_extended_error ( response ) raise exception . IloError ( msg )
Change the bios settings to specified values .
41,599
def _get_iscsi_settings_resource ( self , data ) : try : iscsi_settings_uri = data [ 'links' ] [ 'Settings' ] [ 'href' ] except KeyError : msg = ( 'iscsi settings resource not found.' ) raise exception . IloCommandNotSupportedError ( msg ) status , headers , iscsi_settings = self . _rest_get ( iscsi_settings_uri ) if status != 200 : msg = self . _get_extended_error ( iscsi_settings ) raise exception . IloError ( msg ) return headers , iscsi_settings_uri , iscsi_settings
Get the iscsi settings resoure .