idx
int64
0
63k
question
stringlengths
61
4.03k
target
stringlengths
6
1.23k
31,600
def _initial_estimate ( self , y , modelmat ) : if isinstance ( self , LinearGAM ) : n , m = modelmat . shape return np . ones ( m ) * np . sqrt ( EPS ) y = deepcopy ( y ) . astype ( 'float64' ) y [ y == 0 ] += .01 y [ y == 1 ] -= .01 y_ = self . link . link ( y , self . distribution ) y_ = make_2d ( y_ , verbose = Fal...
Makes an inital estimate for the model coefficients .
31,601
def _on_loop_start ( self , variables ) : for callback in self . callbacks : if hasattr ( callback , 'on_loop_start' ) : self . logs_ [ str ( callback ) ] . append ( callback . on_loop_start ( ** variables ) )
performs on - loop - start actions like callbacks
31,602
def _on_loop_end ( self , variables ) : for callback in self . callbacks : if hasattr ( callback , 'on_loop_end' ) : self . logs_ [ str ( callback ) ] . append ( callback . on_loop_end ( ** variables ) )
performs on - loop - end actions like callbacks
31,603
def deviance_residuals ( self , X , y , weights = None , scaled = False ) : if not self . _is_fitted : raise AttributeError ( 'GAM has not been fitted. Call fit first.' ) y = check_y ( y , self . link , self . distribution , verbose = self . verbose ) X = check_X ( X , n_feats = self . statistics_ [ 'm_features' ] , ed...
method to compute the deviance residuals of the model
31,604
def _estimate_model_statistics ( self , y , modelmat , inner = None , BW = None , B = None , weights = None , U1 = None ) : lp = self . _linear_predictor ( modelmat = modelmat ) mu = self . link . mu ( lp , self . distribution ) self . statistics_ [ 'edof_per_coef' ] = np . diagonal ( U1 . dot ( U1 . T ) ) self . stati...
method to compute all of the model statistics
31,605
def _estimate_AIC ( self , y , mu , weights = None ) : estimated_scale = not ( self . distribution . _known_scale ) return - 2 * self . _loglikelihood ( y = y , mu = mu , weights = weights ) + 2 * self . statistics_ [ 'edof' ] + 2 * estimated_scale
estimate the Akaike Information Criterion
31,606
def _estimate_AICc ( self , y , mu , weights = None ) : edof = self . statistics_ [ 'edof' ] if self . statistics_ [ 'AIC' ] is None : self . statistics_ [ 'AIC' ] = self . _estimate_AIC ( y , mu , weights ) return self . statistics_ [ 'AIC' ] + 2 * ( edof + 1 ) * ( edof + 2 ) / ( y . shape [ 0 ] - edof - 2 )
estimate the corrected Akaike Information Criterion
31,607
def _estimate_r2 ( self , X = None , y = None , mu = None , weights = None ) : if mu is None : mu = self . predict_mu ( X = X ) if weights is None : weights = np . ones_like ( y ) . astype ( 'float64' ) null_mu = y . mean ( ) * np . ones_like ( y ) . astype ( 'float64' ) null_d = self . distribution . deviance ( y = y ...
estimate some pseudo R^2 values
31,608
def _estimate_GCV_UBRE ( self , X = None , y = None , modelmat = None , gamma = 1.4 , add_scale = True , weights = None ) : if gamma < 1 : raise ValueError ( 'gamma scaling should be greater than 1, ' 'but found gamma = {}' , format ( gamma ) ) if modelmat is None : modelmat = self . _modelmat ( X ) if weights is None ...
Generalized Cross Validation and Un - Biased Risk Estimator .
31,609
def _estimate_p_values ( self ) : if not self . _is_fitted : raise AttributeError ( 'GAM has not been fitted. Call fit first.' ) p_values = [ ] for term_i in range ( len ( self . terms ) ) : p_values . append ( self . _compute_p_value ( term_i ) ) return p_values
estimate the p - values for all features
31,610
def _compute_p_value ( self , term_i ) : if not self . _is_fitted : raise AttributeError ( 'GAM has not been fitted. Call fit first.' ) idxs = self . terms . get_coef_indices ( term_i ) cov = self . statistics_ [ 'cov' ] [ idxs ] [ : , idxs ] coef = self . coef_ [ idxs ] if isinstance ( self . terms [ term_i ] , Spline...
compute the p - value of the desired feature
31,611
def confidence_intervals ( self , X , width = .95 , quantiles = None ) : if not self . _is_fitted : raise AttributeError ( 'GAM has not been fitted. Call fit first.' ) X = check_X ( X , n_feats = self . statistics_ [ 'm_features' ] , edge_knots = self . edge_knots_ , dtypes = self . dtype , features = self . feature , ...
estimate confidence intervals for the model .
31,612
def _get_quantiles ( self , X , width , quantiles , modelmat = None , lp = None , prediction = False , xform = True , term = - 1 ) : if quantiles is not None : quantiles = np . atleast_1d ( quantiles ) else : alpha = ( 1 - width ) / 2. quantiles = [ alpha , 1 - alpha ] for quantile in quantiles : if ( quantile >= 1 ) o...
estimate prediction intervals for LinearGAM
31,613
def _flatten_mesh ( self , Xs , term ) : n = Xs [ 0 ] . size if self . terms [ term ] . istensor : terms = self . terms [ term ] else : terms = [ self . terms [ term ] ] X = np . zeros ( ( n , self . statistics_ [ 'm_features' ] ) ) for term_ , x in zip ( terms , Xs ) : X [ : , term_ . feature ] = x . ravel ( ) return ...
flatten the mesh and distribute into a feature matrix
31,614
def generate_X_grid ( self , term , n = 100 , meshgrid = False ) : if not self . _is_fitted : raise AttributeError ( 'GAM has not been fitted. Call fit first.' ) if self . terms [ term ] . isintercept : raise ValueError ( 'cannot create grid for intercept term' ) if self . terms [ term ] . istensor : Xs = [ ] for term_...
create a nice grid of X data
31,615
def partial_dependence ( self , term , X = None , width = None , quantiles = None , meshgrid = False ) : if not self . _is_fitted : raise AttributeError ( 'GAM has not been fitted. Call fit first.' ) if not isinstance ( term , int ) : raise ValueError ( 'term must be an integer, but found term: {}' . format ( term ) ) ...
Computes the term functions for the GAM and possibly their confidence intervals .
31,616
def sample ( self , X , y , quantity = 'y' , sample_at_X = None , weights = None , n_draws = 100 , n_bootstraps = 5 , objective = 'auto' ) : if quantity not in { 'mu' , 'coef' , 'y' } : raise ValueError ( "`quantity` must be one of 'mu', 'coef', 'y';" " got {}" . format ( quantity ) ) coef_draws = self . _sample_coef (...
Simulate from the posterior of the coefficients and smoothing params .
31,617
def _sample_coef ( self , X , y , weights = None , n_draws = 100 , n_bootstraps = 1 , objective = 'auto' ) : if not self . _is_fitted : raise AttributeError ( 'GAM has not been fitted. Call fit first.' ) if n_bootstraps < 1 : raise ValueError ( 'n_bootstraps must be >= 1;' ' got {}' . format ( n_bootstraps ) ) if n_dra...
Simulate from the posterior of the coefficients .
31,618
def _bootstrap_samples_of_smoothing ( self , X , y , weights = None , n_bootstraps = 1 , objective = 'auto' ) : mu = self . predict_mu ( X ) coef_bootstraps = [ self . coef_ ] cov_bootstraps = [ load_diagonal ( self . statistics_ [ 'cov' ] ) ] for _ in range ( n_bootstraps - 1 ) : y_bootstrap = self . distribution . sa...
Sample the smoothing parameters using simulated response data .
31,619
def _simulate_coef_from_bootstraps ( self , n_draws , coef_bootstraps , cov_bootstraps ) : random_bootstrap_indices = np . random . choice ( np . arange ( len ( coef_bootstraps ) ) , size = n_draws , replace = True ) bootstrap_index_to_draw_indices = defaultdict ( list ) for draw_index , bootstrap_index in enumerate ( ...
Simulate coefficients using bootstrap samples .
31,620
def accuracy ( self , X = None , y = None , mu = None ) : if not self . _is_fitted : raise AttributeError ( 'GAM has not been fitted. Call fit first.' ) y = check_y ( y , self . link , self . distribution , verbose = self . verbose ) if X is not None : X = check_X ( X , n_feats = self . statistics_ [ 'm_features' ] , e...
computes the accuracy of the LogisticGAM
31,621
def _exposure_to_weights ( self , y , exposure = None , weights = None ) : y = y . ravel ( ) if exposure is not None : exposure = np . array ( exposure ) . astype ( 'f' ) . ravel ( ) exposure = check_array ( exposure , name = 'sample exposure' , ndim = 1 , verbose = self . verbose ) else : exposure = np . ones_like ( y...
simple tool to create a common API
31,622
def predict ( self , X , exposure = None ) : if not self . _is_fitted : raise AttributeError ( 'GAM has not been fitted. Call fit first.' ) X = check_X ( X , n_feats = self . statistics_ [ 'm_features' ] , edge_knots = self . edge_knots_ , dtypes = self . dtype , features = self . feature , verbose = self . verbose ) i...
preduct expected value of target given model and input X often this is done via expected value of GAM given input X
31,623
def gridsearch ( self , X , y , exposure = None , weights = None , return_scores = False , keep_best = True , objective = 'auto' , ** param_grids ) : y , weights = self . _exposure_to_weights ( y , exposure , weights ) return super ( PoissonGAM , self ) . gridsearch ( X , y , weights = weights , return_scores = return_...
performs a grid search over a space of parameters for a given objective
31,624
def _get_quantile_ratio ( self , X , y ) : y_pred = self . predict ( X ) return ( y_pred > y ) . mean ( )
find the expirical quantile of the model
31,625
def fit_quantile ( self , X , y , quantile , max_iter = 20 , tol = 0.01 , weights = None ) : def _within_tol ( a , b , tol ) : return np . abs ( a - b ) <= tol if quantile <= 0 or quantile >= 1 : raise ValueError ( 'quantile must be on (0, 1), but found {}' . format ( quantile ) ) if tol <= 0 : raise ValueError ( 'tol ...
fit ExpectileGAM to a desired quantile via binary search
31,626
def nice_repr ( name , param_kvs , line_width = 30 , line_offset = 5 , decimals = 3 , args = None , flatten_attrs = True ) : if not param_kvs and not args : return '{}()' . format ( name ) ks = list ( param_kvs . keys ( ) ) vs = list ( param_kvs . values ( ) ) idxs = np . argsort ( ks ) param_kvs = [ ( ks [ i ] , vs [ ...
tool to do a nice repr of a class .
31,627
def get_params ( self , deep = False ) : attrs = self . __dict__ for attr in self . _include : attrs [ attr ] = getattr ( self , attr ) if deep is True : return attrs return dict ( [ ( k , v ) for k , v in list ( attrs . items ( ) ) if ( k [ 0 ] != '_' ) and ( k [ - 1 ] != '_' ) and ( k not in self . _exclude ) ] )
returns a dict of all of the object s user - facing parameters
31,628
def set_params ( self , deep = False , force = False , ** parameters ) : param_names = self . get_params ( deep = deep ) . keys ( ) for parameter , value in parameters . items ( ) : if ( parameter in param_names or force or ( hasattr ( self , parameter ) and parameter == parameter . strip ( '_' ) ) ) : setattr ( self ,...
sets an object s paramters
31,629
def build_from_info ( cls , info ) : info = deepcopy ( info ) if 'term_type' in info : cls_ = TERMS [ info . pop ( 'term_type' ) ] if issubclass ( cls_ , MetaTermMixin ) : return cls_ . build_from_info ( info ) else : cls_ = cls return cls_ ( ** info )
build a Term instance from a dict
31,630
def _has_terms ( self ) : loc = self . _super_get ( '_term_location' ) return self . _super_has ( loc ) and isiterable ( self . _super_get ( loc ) ) and len ( self . _super_get ( loc ) ) > 0 and all ( [ isinstance ( term , Term ) for term in self . _super_get ( loc ) ] )
bool whether the instance has any sub - terms
31,631
def build_from_info ( cls , info ) : terms = [ ] for term_info in info [ 'terms' ] : terms . append ( SplineTerm . build_from_info ( term_info ) ) return cls ( * terms )
build a TensorTerm instance from a dict
31,632
def hasconstraint ( self ) : constrained = False for term in self . _terms : constrained = constrained or term . hasconstraint return constrained
bool whether the term has any constraints
31,633
def _build_marginal_constraints ( self , i , coef , constraint_lam , constraint_l2 ) : composite_C = np . zeros ( ( len ( coef ) , len ( coef ) ) ) for slice_ in self . _iterate_marginal_coef_slices ( i ) : coef_slice = coef [ slice_ ] slice_C = self . _terms [ i ] . build_constraints ( coef_slice , constraint_lam , co...
builds a constraint matrix for a marginal term in the tensor term
31,634
def _iterate_marginal_coef_slices ( self , i ) : dims = [ term_ . n_coefs for term_ in self ] idxs = np . arange ( np . prod ( dims ) ) idxs = idxs . reshape ( dims ) idxs = np . moveaxis ( idxs , i , 0 ) . reshape ( idxs . shape [ i ] , int ( idxs . size / idxs . shape [ i ] ) ) for slice_ in idxs . T : yield slice_
iterator of indices into tensor s coef vector for marginal term i s coefs
31,635
def info ( self ) : info = { 'term_type' : 'term_list' , 'verbose' : self . verbose } info . update ( { 'terms' : [ term . info for term in self . _terms ] } ) return info
get information about the terms in the term list
31,636
def build_from_info ( cls , info ) : info = deepcopy ( info ) terms = [ ] for term_info in info [ 'terms' ] : terms . append ( Term . build_from_info ( term_info ) ) return cls ( * terms )
build a TermList instance from a dict
31,637
def pop ( self , i = None ) : if i == None : i = len ( self ) - 1 if i >= len ( self . _terms ) or i < 0 : raise ValueError ( 'requested pop {}th term, but found only {} terms' . format ( i , len ( self . _terms ) ) ) term = self . _terms [ i ] self . _terms = self . _terms [ : i ] + self . _terms [ i + 1 : ] return te...
remove the ith term from the term list
31,638
def get_coef_indices ( self , i = - 1 ) : if i == - 1 : return list ( range ( self . n_coefs ) ) if i >= len ( self . _terms ) : raise ValueError ( 'requested {}th term, but found only {} terms' . format ( i , len ( self . _terms ) ) ) start = 0 for term in self . _terms [ : i ] : start += term . n_coefs stop = start +...
get the indices for the coefficients of a term in the term list
31,639
def load_data ( self ) : print ( 'Extracting training data from csv...' ) images = list ( ) labels = list ( ) emotion_index_map = dict ( ) with open ( self . datapath ) as csv_file : reader = csv . reader ( csv_file , delimiter = ',' , quotechar = '"' ) for row in reader : label_class = row [ self . csv_label_col ] if ...
Loads image and label data from specified csv file path .
31,640
def _load_dataset ( self , images , labels , emotion_index_map ) : train_images , test_images , train_labels , test_labels = train_test_split ( images , labels , test_size = self . validation_split , random_state = 42 , stratify = labels ) dataset = Dataset ( train_images , test_images , train_labels , test_labels , em...
Loads Dataset object with images labels and other data .
31,641
def load_data ( self ) : images = list ( ) labels = list ( ) emotion_index_map = dict ( ) label_directories = [ dir for dir in os . listdir ( self . datapath ) if not dir . startswith ( '.' ) ] for label_directory in label_directories : if self . target_emotion_map : if label_directory not in self . target_emotion_map ...
Loads image and label data from specified directory path .
31,642
def _check_directory_arguments ( self ) : if not os . path . isdir ( self . datapath ) : raise ( NotADirectoryError ( 'Directory does not exist: %s' % self . datapath ) ) if self . time_delay : if self . time_delay < 1 : raise ValueError ( 'Time step argument must be greater than 0, but gave: %i' % self . time_delay ) ...
Validates arguments for loading from directories including static image and time series directories .
31,643
def predict ( self , image_file ) : image = misc . imread ( image_file ) gray_image = image if len ( image . shape ) > 2 : gray_image = cv2 . cvtColor ( image , code = cv2 . COLOR_BGR2GRAY ) resized_image = cv2 . resize ( gray_image , self . target_dimensions , interpolation = cv2 . INTER_LINEAR ) final_image = np . ar...
Predicts discrete emotion for given image .
31,644
def _check_emotion_set_is_supported ( self ) : supported_emotion_subsets = [ set ( [ 'anger' , 'fear' , 'surprise' , 'calm' ] ) , set ( [ 'happiness' , 'disgust' , 'surprise' ] ) , set ( [ 'anger' , 'fear' , 'surprise' ] ) , set ( [ 'anger' , 'fear' , 'calm' ] ) , set ( [ 'anger' , 'happiness' , 'calm' ] ) , set ( [ 'a...
Validates set of user - supplied target emotions .
31,645
def _choose_model_from_target_emotions ( self ) : model_indices = [ self . emotion_index_map [ emotion ] for emotion in self . target_emotions ] sorted_indices = [ str ( idx ) for idx in sorted ( model_indices ) ] model_suffix = '' . join ( sorted_indices ) model_file = 'models/conv_model_%s.hdf5' % model_suffix emotio...
Initializes pre - trained deep learning model for the set of target emotions supplied by user .
31,646
def apply_transform ( sample , transform_matrix , channel_axis = 0 , fill_mode = 'nearest' , cval = 0. ) : if sample . ndim == 4 : channel_axis = channel_axis - 1 transformed_frames = [ transform ( frame , transform_matrix , channel_axis , fill_mode , cval ) for frame in sample ] return np . stack ( transformed_frames ...
Apply the image transformation specified by a matrix .
31,647
def standardize ( self , x ) : if self . preprocessing_function : x = self . preprocessing_function ( x ) if self . rescale : x *= self . rescale if self . samplewise_center : x -= np . mean ( x , keepdims = True ) if self . samplewise_std_normalization : x /= np . std ( x , keepdims = True ) + 1e-7 if self . featurewi...
Apply the normalization configuration to a batch of inputs .
31,648
def fit ( self , x , augment = False , rounds = 1 , seed = None ) : x = np . asarray ( x , dtype = K . floatx ( ) ) if x . shape [ self . channel_axis ] not in { 1 , 3 , 4 } : warnings . warn ( 'Expected input to be images (as Numpy array) ' 'following the data format convention "' + self . data_format + '" ' '(channel...
Fits internal statistics to some sample data .
31,649
def next ( self ) : with self . lock : index_array = next ( self . index_generator ) return self . _get_batches_of_transformed_samples ( index_array )
For python 2 . x .
31,650
def _init_model ( self ) : model = Sequential ( ) model . add ( ConvLSTM2D ( filters = self . filters , kernel_size = self . kernel_size , activation = self . activation , input_shape = [ self . time_delay ] + list ( self . image_size ) + [ self . channels ] , data_format = 'channels_last' , return_sequences = True ) )...
Composes all layers of CNN .
31,651
def _init_model ( self ) : model = Sequential ( ) model . add ( Conv2D ( input_shape = list ( self . image_size ) + [ self . channels ] , filters = self . filters , kernel_size = self . kernel_size , activation = 'relu' , data_format = 'channels_last' ) ) model . add ( Conv2D ( filters = self . filters , kernel_size = ...
Composes all layers of 2D CNN .
31,652
def _init_model ( self ) : model = Sequential ( ) model . add ( Conv3D ( input_shape = [ self . time_delay ] + list ( self . image_size ) + [ self . channels ] , filters = self . filters , kernel_size = self . kernel_size , activation = 'relu' , data_format = 'channels_last' ) ) model . add ( Conv3D ( filters = self . ...
Composes all layers of 3D CNN .
31,653
def build_query_cmd ( self , query , csv = False ) : _cmd = "su postgres -c %s" if not csv : _dbcmd = "psql foreman -c %s" else : _dbcmd = "psql foreman -A -F , -X -c %s" dbq = _dbcmd % quote ( query ) return _cmd % quote ( dbq )
Builds the command needed to invoke the pgsql query as the postgres user . The query requires significant quoting work to satisfy both the shell and postgres parsing requirements . Note that this will generate a large amount of quoting in sos logs referencing the command being run
31,654
def is_enabled ( self , name ) : if self . services and name in self . services : return self . services [ name ] [ 'config' ] == 'enabled' return False
Check if given service name is enabled
31,655
def is_disabled ( self , name ) : if self . services and name in self . services : return self . services [ name ] [ 'config' ] == 'disabled' return False
Check if a given service name is disabled
31,656
def _query_service ( self , name ) : if self . query_cmd : try : return sos_get_command_output ( "%s %s" % ( self . query_cmd , name ) ) except Exception : return None return None
Query an individual service
31,657
def get_service_status ( self , name ) : svc = self . _query_service ( name ) if svc is not None : return { 'name' : name , 'status' : self . parse_query ( svc [ 'output' ] ) , 'output' : svc [ 'output' ] } else : return { 'name' : name , 'status' : 'missing' , 'output' : '' }
Returns the status for the given service name along with the output of the query command
31,658
def all_pkgs_by_name_regex ( self , regex_name , flags = 0 ) : reg = re . compile ( regex_name , flags ) return [ pkg for pkg in self . all_pkgs ( ) . keys ( ) if reg . match ( pkg ) ]
Return a list of packages that match regex_name .
31,659
def pkg_by_name ( self , name ) : pkgmatches = self . all_pkgs_by_name ( name ) if ( len ( pkgmatches ) != 0 ) : return self . all_pkgs_by_name ( name ) [ - 1 ] else : return None
Return a single package that matches name .
31,660
def all_pkgs ( self ) : if not self . packages : self . packages = self . get_pkg_list ( ) return self . packages
Return a list of all packages .
31,661
def all_files ( self ) : if self . files_command and not self . files : cmd = self . files_command files = shell_out ( cmd , timeout = 0 , chroot = self . chroot ) self . files = files . splitlines ( ) return self . files
Returns a list of files known by the package manager
31,662
def write ( self , presets_path ) : if self . builtin : raise TypeError ( "Cannot write built-in preset" ) odict = self . opts . dict ( ) pdict = { self . name : { DESC : self . desc , NOTE : self . note , OPTS : odict } } if not os . path . exists ( presets_path ) : os . makedirs ( presets_path , mode = 0o755 ) with o...
Write this preset to disk in JSON notation .
31,663
def get_archive_name ( self ) : name = self . get_local_name ( ) . split ( '.' ) [ 0 ] case = self . case_id label = self . commons [ 'cmdlineopts' ] . label date = '' rand = '' . join ( random . choice ( string . ascii_lowercase ) for x in range ( 7 ) ) if self . name_pattern == 'legacy' : nstr = "sosreport-{name}{cas...
This function should return the filename of the archive without the extension .
31,664
def validate_plugin ( self , plugin_class , experimental = False ) : valid_subclasses = [ IndependentPlugin ] + self . valid_subclasses if experimental : valid_subclasses += [ ExperimentalPlugin ] return any ( issubclass ( plugin_class , class_ ) for class_ in valid_subclasses )
Verifies that the plugin_class should execute under this policy
31,665
def _print ( self , msg = None , always = False ) : if always or not self . commons [ 'cmdlineopts' ] . quiet : if msg : print_ ( msg ) else : print_ ( )
A wrapper around print that only prints if we are not running in quiet mode
31,666
def get_msg ( self ) : width = 72 _msg = self . msg % { 'distro' : self . distro , 'vendor' : self . vendor , 'vendor_url' : self . vendor_url , 'vendor_text' : self . vendor_text , 'tmpdir' : self . commons [ 'tmpdir' ] } _fmt = "" for line in _msg . splitlines ( ) : _fmt = _fmt + fill ( line , width , replace_whitesp...
This method is used to prepare the preamble text to display to the user in non - batch mode . If your policy sets self . distro that text will be substituted accordingly . You can also override this method to do something more complicated .
31,667
def register_presets ( self , presets , replace = False ) : if replace : self . presets = { } self . presets . update ( presets )
Add new presets to this policy object .
31,668
def find_preset ( self , preset ) : for match in self . presets . keys ( ) : if match == preset : return self . presets [ match ] return None
Find a preset profile matching the specified preset string .
31,669
def load_presets ( self , presets_path = None ) : presets_path = presets_path or self . presets_path if not os . path . exists ( presets_path ) : return for preset_path in os . listdir ( presets_path ) : preset_path = os . path . join ( presets_path , preset_path ) try : preset_data = json . load ( open ( preset_path )...
Load presets from disk .
31,670
def add_preset ( self , name = None , desc = None , note = None , opts = SoSOptions ( ) ) : presets_path = self . presets_path if not name : raise ValueError ( "Preset name cannot be empty" ) if name in self . presets . keys ( ) : raise ValueError ( "A preset with name '%s' already exists" % name ) preset = PresetDefau...
Add a new on - disk preset and write it to the configured presets path .
31,671
def lsmod ( self ) : lines = shell_out ( "lsmod" , timeout = 0 ) . splitlines ( ) return [ line . split ( ) [ 0 ] . strip ( ) for line in lines ]
Return a list of kernel module names as strings .
31,672
def do_lvmdump ( self , metadata = False ) : lvmdump_path = self . get_cmd_output_path ( name = "lvmdump" , make = False ) lvmdump_cmd = "lvmdump %s -d '%s'" lvmdump_opts = "" if metadata : lvmdump_opts = "-a -m" cmd = lvmdump_cmd % ( lvmdump_opts , lvmdump_path ) self . add_cmd_output ( cmd , chroot = self . tmp_in_sy...
Collects an lvmdump in standard format with optional metadata archives for each physical volume present .
31,673
def __str ( self , quote = False , sep = " " , prefix = "" , suffix = "" ) : args = prefix arg_fmt = "=%s" for arg in _arg_names : args += arg + arg_fmt + sep args . strip ( sep ) vals = [ getattr ( self , arg ) for arg in _arg_names ] if not quote : vals = [ "," . join ( v ) if _is_seq ( v ) else v for v in vals ] els...
Format a SoSOptions object as a human or machine readable string .
31,674
def from_args ( cls , args ) : opts = SoSOptions ( ) opts . _merge_opts ( args , True ) return opts
Initialise a new SoSOptions object from a Namespace obtained by parsing command line arguments .
31,675
def merge ( self , src , skip_default = True ) : for arg in _arg_names : if not hasattr ( src , arg ) : continue if getattr ( src , arg ) is not None or not skip_default : self . _merge_opt ( arg , src , False )
Merge another set of SoSOptions into this object .
31,676
def dict ( self ) : odict = { } for arg in _arg_names : value = getattr ( self , arg ) if arg in ( 'add_preset' , 'del_preset' , 'desc' , 'note' ) : value = None odict [ arg ] = value return odict
Return this SoSOptions option values as a dictionary of argument name to value mappings .
31,677
def to_args ( self ) : def has_value ( name , value ) : null_values = ( "False" , "None" , "[]" , '""' , "''" , "0" ) if not value or value in null_values : return False if name in _arg_defaults : if str ( value ) == str ( _arg_defaults [ name ] ) : return False return True def filter_opt ( name , value ) : if name in ...
Return command arguments for this object .
31,678
def checkdebug ( self ) : opt = self . file_grep ( r"^(DEFAULT_LOGGING|DAEMONOPTIONS)=(.*)" , * self . files ) for opt1 in opt : for opt2 in opt1 . split ( " " ) : if opt2 in ( "--debug" , "debug" ) : return True return False
testing if autofs debug has been enabled anywhere
31,679
def getdaemondebug ( self ) : debugout = self . file_grep ( r"^(daemon.*)\s+(\/var\/log\/.*)" , * self . files ) for i in debugout : return i [ 1 ]
capture daemon debug output
31,680
def is_jar ( path ) : if os . path . isfile ( path ) and zipfile . is_zipfile ( path ) : try : with zipfile . ZipFile ( path ) as f : if "META-INF/MANIFEST.MF" in f . namelist ( ) : return True except ( IOError , zipfile . BadZipfile ) : pass return False
Check whether given file is a JAR file .
31,681
def get_maven_id ( jar_path ) : props = { } try : with zipfile . ZipFile ( jar_path ) as f : r = re . compile ( "META-INF/maven/[^/]+/[^/]+/pom.properties$" ) result = [ x for x in f . namelist ( ) if r . match ( x ) ] if len ( result ) != 1 : return None with f . open ( result [ 0 ] ) as props_f : for line in props_f ...
Extract Maven coordinates from a given JAR file if possible .
31,682
def get_jar_id ( jar_path ) : jar_id = "" try : with open ( jar_path , mode = "rb" ) as f : m = hashlib . sha1 ( ) for buf in iter ( partial ( f . read , 4096 ) , b'' ) : m . update ( buf ) jar_id = m . hexdigest ( ) except IOError : pass return jar_id
Compute JAR id .
31,683
def setup ( self ) : options = "" amqps_prefix = "" if self . get_option ( "ssl" ) : amqps_prefix = "amqps://" for option in [ "ssl-certificate" , "ssl-key" ] : if self . get_option ( option ) : amqps_prefix = "amqps://" options = ( options + " --%s=" % ( option ) + self . get_option ( option ) ) if self . get_option (...
performs data collection for qpid broker
31,684
def _exception ( etype , eval_ , etrace ) : if hasattr ( sys , 'ps1' ) or not sys . stderr . isatty ( ) : sys . __excepthook__ ( etype , eval_ , etrace ) else : traceback . print_exception ( etype , eval_ , etrace , limit = 2 , file = sys . stdout ) six . print_ ( ) pdb . pm ( )
Wrap exception in debugger if not in tty
31,685
def add_preset ( self , name , desc = "" , note = "" ) : policy = self . policy if policy . find_preset ( name ) : self . ui_log . error ( "A preset named '%s' already exists" % name ) return False desc = desc or self . opts . desc note = note or self . opts . note try : policy . add_preset ( name = name , desc = desc ...
Add a new command line preset for the current options with the specified name .
31,686
def del_preset ( self , name ) : policy = self . policy if not policy . find_preset ( name ) : self . ui_log . error ( "Preset '%s' not found" % name ) return False try : policy . del_preset ( name = name ) except Exception as e : self . ui_log . error ( str ( e ) + "\n" ) return False self . ui_log . info ( "Deleted p...
Delete a named command line preset .
31,687
def version ( self ) : versions = [ ] versions . append ( "sosreport: %s" % __version__ ) for plugname , plug in self . loaded_plugins : versions . append ( "%s: %s" % ( plugname , plug . version ) ) self . archive . add_string ( content = "\n" . join ( versions ) , dest = 'version.txt' )
Fetch version information from all plugins and store in the report version file
31,688
def get_navicli_SP_info ( self , SP_address ) : self . add_cmd_output ( [ "navicli -h %s getall" % SP_address , "navicli -h %s getsptime -spa" % SP_address , "navicli -h %s getsptime -spb" % SP_address , "navicli -h %s getlog" % SP_address , "navicli -h %s getdisk" % SP_address , "navicli -h %s getcache" % SP_address ,...
EMC Navisphere Host Agent NAVICLI specific information - CLARiiON - commands
31,689
def is_static_etcd ( self ) : return os . path . exists ( os . path . join ( self . static_pod_dir , "etcd.yaml" ) )
Determine if we are on a node running etcd
31,690
def get_ip_netns ( self , ip_netns_file ) : out = [ ] try : ip_netns_out = open ( ip_netns_file ) . read ( ) except IOError : return out for line in ip_netns_out . splitlines ( ) : if line . startswith ( "Object \"netns\" is unknown" ) or line . isspace ( ) or line [ : 1 ] . isspace ( ) : return out out . append ( line...
Returns a list for which items are namespaces in the output of ip netns stored in the ip_netns_file .
31,691
def collect_iptable ( self , tablename ) : modname = "iptable_" + tablename if self . check_ext_prog ( "grep -q %s /proc/modules" % modname ) : cmd = "iptables -t " + tablename + " -nvL" self . add_cmd_output ( cmd )
When running the iptables command it unfortunately auto - loads the modules before trying to get output . Some people explicitly don t want this so check if the modules are loaded before running the command . If they aren t loaded there can t possibly be any relevant rules in that table
31,692
def collect_ip6table ( self , tablename ) : modname = "ip6table_" + tablename if self . check_ext_prog ( "grep -q %s /proc/modules" % modname ) : cmd = "ip6tables -t " + tablename + " -nvL" self . add_cmd_output ( cmd )
Same as function above but for ipv6
31,693
def postproc ( self ) : self . do_file_sub ( "/etc/ovirt-engine/engine-config/engine-config.properties" , r"Password.type=(.*)" , r"Password.type=********" ) self . do_file_sub ( "/etc/rhevm/rhevm-config/rhevm-config.properties" , r"Password.type=(.*)" , r"Password.type=********" ) engine_files = ( 'ovirt-engine.xml' ,...
Obfuscate sensitive keys .
31,694
def get_log_dir ( self , conf_file ) : log_dir = None with open ( conf_file , 'r' ) as conf_f : for line in conf_f : line = line . split ( '#' ) [ 0 ] . strip ( ) try : ( key , value ) = line . split ( '=' , 1 ) if key . strip ( ) == 'log-dir' : log_dir = value . strip ( ) except ValueError : pass return log_dir
Get watchdog log directory .
31,695
def setup ( self ) : conf_file = self . get_option ( 'conf_file' ) log_dir = '/var/log/watchdog' self . add_copy_spec ( [ conf_file , '/etc/sysconfig/watchdog' , ] ) self . add_copy_spec ( [ '/etc/watchdog.d' , '/usr/libexec/watchdog/scripts' , ] ) try : res = self . get_log_dir ( conf_file ) if res : log_dir = res exc...
Collect watchdog information .
31,696
def get_volume_names ( self , volume_file ) : out = [ ] fp = open ( volume_file , 'r' ) for line in fp . readlines ( ) : if not line . startswith ( "Volume Name:" ) : continue volname = line [ 12 : - 1 ] out . append ( volname ) fp . close ( ) return out
Return a dictionary for which key are volume names according to the output of gluster volume info stored in volume_file .
31,697
def dist_version ( self ) : try : with open ( '/etc/lsb-release' , 'r' ) as fp : lines = fp . readlines ( ) for line in lines : if "DISTRIB_RELEASE" in line : return line . split ( "=" ) [ 1 ] . strip ( ) return False except IOError : return False
Returns the version stated in DISTRIB_RELEASE
31,698
def _make_leading_paths ( self , src , mode = 0o700 ) : self . log_debug ( "Making leading paths for %s" % src ) root = self . _archive_root dest = src def in_archive ( path ) : return path . startswith ( os . path . join ( root , "" ) ) if not src . startswith ( "/" ) : src_dir = src else : src_dir = src if os . path ...
Create leading path components
31,699
def _check_path ( self , src , path_type , dest = None , force = False ) : dest = dest or self . dest_path ( src ) if path_type == P_DIR : dest_dir = dest else : dest_dir = os . path . split ( dest ) [ 0 ] if not dest_dir : return dest if os . path . exists ( dest_dir ) and not os . path . isdir ( dest_dir ) : raise Va...
Check a new destination path in the archive .