idx
int64
0
63k
question
stringlengths
61
4.03k
target
stringlengths
6
1.23k
32,000
def _expand_paths ( path ) : dir_name = os . path . dirname ( path ) paths = [ ] logger . debug ( "Attempting to expand %s" , path ) if os . path . isdir ( dir_name ) : files = os . listdir ( dir_name ) match = os . path . basename ( path ) for file_path in files : if re . match ( match , file_path ) : expanded_path = ...
Expand wildcarded paths
32,001
def validate_remove_file ( remove_file ) : if not os . path . isfile ( remove_file ) : logger . warn ( "WARN: Remove file does not exist" ) return False mode = stat . S_IMODE ( os . stat ( remove_file ) . st_mode ) if not mode == 0o600 : logger . error ( "ERROR: Invalid remove file permissions" "Expected 0600 got %s" %...
Validate the remove file
32,002
def write_data_to_file ( data , filepath ) : try : os . makedirs ( os . path . dirname ( filepath ) , 0o700 ) except OSError : pass write_to_disk ( filepath , content = data )
Write data to file
32,003
def magic_plan_b ( filename ) : cmd = shlex . split ( 'file --mime-type --mime-encoding ' + filename ) stdout , stderr = Popen ( cmd , stdout = PIPE ) . communicate ( ) stdout = stdout . decode ( "utf-8" ) mime_str = stdout . split ( filename + ': ' ) [ 1 ] . strip ( ) return mime_str
Use this in instances where python - magic is MIA and can t be installed for whatever reason
32,004
def modify_config_file ( updates ) : cmd = '/bin/sed ' for key in updates : cmd = cmd + '-e \'s/^#*{key}.*=.*$/{key}={value}/\' ' . format ( key = key , value = updates [ key ] ) cmd = cmd + constants . default_conf_file status = run_command_get_output ( cmd ) write_to_disk ( constants . default_conf_file , content = s...
Update the config file with certain things
32,005
def users ( self , proc ) : ret = { } if self . first_column in [ 'USER' , 'UID' ] : for row in self . data : if proc == row [ self . command_name ] : if row [ self . first_column ] not in ret : ret [ row [ self . first_column ] ] = [ ] ret [ row [ self . first_column ] ] . append ( row [ "PID" ] ) return ret
Searches for all users running a given command .
32,006
def fuzzy_match ( self , proc ) : return any ( proc in row [ self . command_name ] for row in self . data )
Are there any commands that contain the given text?
32,007
def number_occurences ( self , proc ) : return len ( [ True for row in self . data if proc in row [ self . command_name ] ] )
Returns the number of occurencies of commands that contain given text
32,008
def parse_content ( self , content ) : self . active_lines_unparsed = get_active_lines ( content ) if content is not None else [ ] self . active_settings = split_kv_pairs ( content , use_partition = False ) if content is not None else [ ]
Main parsing class method which stores all interesting data from the content .
32,009
def cluster_info ( cpu , cfg ) : cpus = cpu . cpu_count pods_per_core = cfg . doc . find ( "pods-per-core" ) pods_per_core_int = int ( pods_per_core . value ) if pods_per_core else PODS_PER_CORE cfg_max_pods = cfg . doc . find ( "max-pods" ) cfg_max_pods_int = int ( cfg_max_pods . value ) if cfg_max_pods else MAX_PODS ...
Collects fact for each host
32,010
def master_etcd ( info , meta , max_pod_cluster , label ) : nodes = meta . get ( label , [ ] ) or [ ] info = info [ info [ "machine_id" ] . isin ( nodes ) ] if info . empty : return cpu_factor = max_pod_cluster / 1000.0 nocpu_expected = MASTER_MIN_CORE + ( max_pod_cluster / 1000.0 ) bad = info [ info [ "cpu_count" ] < ...
Function used to create the response for all master node types
32,011
def infra_nodes ( info , meta , max_pod_cluster , label , key ) : nodes = meta . get ( label , [ ] ) or [ ] infos = info [ info [ "machine_id" ] . isin ( nodes ) ] if infos . empty : return return make_response ( key , max_pod_cluster = max_pod_cluster , infos = infos , GREEN = Fore . GREEN , RED = Fore . RED , YELLOW ...
Function used to create the response for all infra node types
32,012
def parse_df_lines ( df_content ) : df_ls = { } df_out = [ ] is_sep = False columns = Record . _fields for line in df_content [ 1 : ] : line_splits = line . rstrip ( ) . split ( None , 5 ) if len ( line_splits ) >= 6 : for i , name in enumerate ( columns ) : df_ls [ name ] = line_splits [ i ] is_sep = False elif len ( ...
Parse contents of each line in df output .
32,013
def get_param ( self , keyword , param = None , default = None ) : if not keyword or keyword not in self . data : return [ default ] if self . data [ keyword ] is None : return [ None ] if not param : return self . data [ keyword ] found = [ ] for line in self . data [ keyword ] : words = line . strip ( ) . split ( ) i...
Get all the parameters for a given keyword or default if keyword or parameter are not present in the configuration .
32,014
def get_last ( self , keyword , param = None , default = None ) : return self . get_param ( keyword , param , default ) [ - 1 ]
Get the parameters for a given keyword or default if keyword or parameter are not present in the configuration .
32,015
def _replace_tabs ( s , ts = 8 ) : result = '' for c in s : if c == '\t' : while True : result += ' ' if len ( result ) % ts == 0 : break else : result += c return result
Replace the tabs in s and keep its original alignment with the tab - stop equals to ts
32,016
def extract ( path , timeout = None , extract_dir = None , content_type = None ) : content_type = content_type or content_type_from_file ( path ) if content_type == "application/zip" : extractor = ZipExtractor ( timeout = timeout ) else : extractor = TarExtractor ( timeout = timeout ) try : ctx = extractor . from_path ...
Extract path into a temporary directory in extract_dir .
32,017
def report ( rel ) : if "Fedora" in rel . product : return make_pass ( "IS_FEDORA" , product = rel . product ) else : return make_fail ( "IS_NOT_FEDORA" , product = rel . product )
Fires if the machine is running Fedora .
32,018
def _make_cron_re ( ) : range_ = r"{val}(?:-{val}(?:/\d+)?)?" template = r"(?P<{name}>" + "(?:\*(?:/\d+)?|{r}(?:,{r})*)" . format ( r = range_ ) + ")\s+" return ( r'^\s*' + template . format ( name = 'minute' , val = r'(?:\d|[012345]\d)' ) + template . format ( name = 'hour' , val = r'(?:\d|[01]\d|2[0123])' ) + templat...
Make the regular expression that matches a crontab cron line .
32,019
def verify_connectivity ( config ) : logger . debug ( "Verifying Connectivity" ) ic = InsightsConnection ( config ) try : branch_info = ic . get_branch_info ( ) except requests . ConnectionError as e : logger . debug ( e ) logger . debug ( "Failed to connect to satellite" ) return False except LookupError as e : logger...
Verify connectivity to satellite server
32,020
def set_auto_configuration ( config , hostname , ca_cert , proxy , is_satellite ) : logger . debug ( "Attempting to auto configure!" ) logger . debug ( "Attempting to auto configure hostname: %s" , hostname ) logger . debug ( "Attempting to auto configure CA cert: %s" , ca_cert ) logger . debug ( "Attempting to auto co...
Set config based on discovered data
32,021
def _try_satellite6_configuration ( config ) : try : rhsm_config = _importInitConfig ( ) logger . debug ( 'Trying to autoconfigure...' ) cert = open ( rhsmCertificate . certpath ( ) , 'r' ) . read ( ) key = open ( rhsmCertificate . keypath ( ) , 'r' ) . read ( ) rhsm = rhsmCertificate ( key , cert ) is_satellite = Fals...
Try to autoconfigure for Satellite 6
32,022
def _try_satellite5_configuration ( config ) : logger . debug ( "Trying Satellite 5 auto_config" ) rhn_config = '/etc/sysconfig/rhn/up2date' systemid = '/etc/sysconfig/rhn/systemid' if os . path . isfile ( rhn_config ) : if os . path . isfile ( systemid ) : config . systemid = _read_systemid_file ( systemid ) else : lo...
Attempt to determine Satellite 5 Configuration
32,023
def get_chain ( self , name , table = "filter" ) : return [ r for r in self . rules if r [ "table" ] == table and r [ "chain" ] == name ]
Get the list of rules for a particular chain . Chain order is kept intact .
32,024
def table_chains ( self , table = "filter" ) : return dict ( ( c [ "name" ] , self . get_chain ( c [ "name" ] , table ) ) for c in self . get_table ( table ) )
Get a dict where the keys are all the chains for the given table and each value is the set of rules defined for the given chain .
32,025
def verify ( self , egg_path , gpg_key = constants . pub_gpg_path ) : if egg_path and not os . path . isfile ( egg_path ) : the_message = "Provided egg path %s does not exist, cannot verify." % ( egg_path ) logger . debug ( the_message ) return { 'gpg' : False , 'stderr' : the_message , 'stdout' : the_message , 'rc' : ...
Verifies the GPG signature of the egg . The signature is assumed to be in the same directory as the egg and named the same as the egg except with an additional . asc extension .
32,026
def get_diagnosis ( self , remediation_id = None ) : if self . config . offline : logger . error ( 'Cannot get diagnosis in offline mode.' ) return None return self . connection . get_diagnosis ( remediation_id )
returns JSON of diagnosis data on success None on failure Optional arg remediation_id to get a particular remediation set .
32,027
def delete_cached_branch_info ( self ) : if os . path . isfile ( constants . cached_branch_info ) : logger . debug ( 'Deleting cached branch_info file...' ) os . remove ( constants . cached_branch_info ) else : logger . debug ( 'Cached branch_info file does not exist.' )
Deletes cached branch_info file
32,028
def clear_local_registration ( self ) : delete_registered_file ( ) delete_unregistered_file ( ) write_to_disk ( constants . machine_id_file , delete = True ) logger . debug ( 'Re-register set, forcing registration.' ) logger . debug ( 'New machine-id: %s' , generate_machine_id ( new = True ) )
Deletes dotfiles and machine - id for fresh registration
32,029
def col ( loc , strg ) : s = strg return 1 if loc < len ( s ) and s [ loc ] == '\n' else loc - s . rfind ( "\n" , 0 , loc )
Returns current column within a string counting newlines as line separators . The first column is number 1 .
32,030
def upcaseTokens ( s , l , t ) : return [ tt . upper ( ) for tt in map ( _ustr , t ) ]
Helper parse action to convert tokens to upper case .
32,031
def downcaseTokens ( s , l , t ) : return [ tt . lower ( ) for tt in map ( _ustr , t ) ]
Helper parse action to convert tokens to lower case .
32,032
def infixNotation ( baseExpr , opList , lpar = Suppress ( '(' ) , rpar = Suppress ( ')' ) ) : ret = Forward ( ) lastExpr = baseExpr | ( lpar + ret + rpar ) for i , operDef in enumerate ( opList ) : opExpr , arity , rightLeftAssoc , pa = ( operDef + ( None , ) ) [ : 4 ] termName = "%s term" % opExpr if arity < 3 else "%...
Helper method for constructing grammars of expressions made up of operators working in a precedence hierarchy . Operators may be unary or binary left - or right - associative . Parse actions can also be attached to operator expressions .
32,033
def report ( shas , meta ) : num_members = meta . num_members uniq = shas . sha . unique ( ) if len ( shas ) != num_members or len ( uniq ) != 1 : return make_fail ( "DISTINCT_NTP_CONFS" , confs = len ( uniq ) , nodes = num_members ) return make_pass ( "MATCHING_NTP_CONFS" , nodes = meta [ 'nodes' ] , servers = meta [ ...
Cluster rule to compare ntp . conf files across a cluster
32,034
def _create_ip_report ( self ) : try : ip_report_name = os . path . join ( self . report_dir , "%s-ip.csv" % self . session ) self . logger . con_out ( 'Creating IP Report - %s' , ip_report_name ) ip_report = open ( ip_report_name , 'wt' ) ip_report . write ( 'Obfuscated IP,Original IP\n' ) for k , v in self . ip_db . ...
this will take the obfuscated ip and hostname databases and output csv files
32,035
def _create_archive ( self ) : try : self . archive_path = os . path . join ( self . report_dir , "%s.tar.gz" % self . session ) self . logger . con_out ( 'Creating SOSCleaner Archive - %s' , self . archive_path ) t = tarfile . open ( self . archive_path , 'w:gz' ) for dirpath , dirnames , filenames in os . walk ( self...
This will create a tar . gz compressed archive of the scrubbed directory
32,036
def _clean_up ( self ) : self . logger . info ( 'Beginning Clean Up Process' ) try : if self . origin_path : self . logger . info ( 'Removing Origin Directory - %s' , self . origin_path ) shutil . rmtree ( self . origin_path ) self . logger . info ( 'Removing Working Directory - %s' , self . dir_path ) shutil . rmtree ...
This will clean up origin directories etc .
32,037
def _hn2db ( self , hn ) : db = self . hn_db hn_found = False for k , v in db . items ( ) : if v == hn : ret_hn = k hn_found = True if hn_found : return ret_hn else : self . hostname_count += 1 o_domain = self . root_domain for od , d in self . dn_db . items ( ) : if d in hn : o_domain = od new_hn = "host%s.%s" % ( sel...
This will add a hostname for a hostname for an included domain or return an existing entry
32,038
def _file_list ( self , folder ) : rtn = [ ] walk = self . _walk_report ( folder ) for key , val in walk . items ( ) : for v in val : x = os . path . join ( key , v ) rtn . append ( x ) self . file_count = len ( rtn ) return rtn
returns a list of file names in an sosreport directory
32,039
def _clean_line ( self , l ) : new_line = self . _sub_ip ( l ) new_line = self . _sub_hostname ( new_line ) new_line = self . _sub_keywords ( new_line ) return new_line
this will return a line with obfuscations for all possible variables hostname ip etc .
32,040
def _clean_file ( self , f ) : if os . path . exists ( f ) and not os . path . islink ( f ) : tmp_file = tempfile . TemporaryFile ( mode = 'w+b' ) try : fh = open ( f , 'r' ) data = fh . readlines ( ) fh . close ( ) if len ( data ) > 0 : for l in data : new_l = self . _clean_line ( l ) tmp_file . write ( new_l . encode...
this will take a given file path scrub it accordingly and save a new copy of the file in the same location
32,041
def _add_extra_files ( self , files ) : try : for f in files : self . logger . con_out ( "adding additional file for analysis: %s" % f ) fname = os . path . basename ( f ) f_new = os . path . join ( self . dir_path , fname ) shutil . copyfile ( f , f_new ) except IOError as e : self . logger . con_out ( "ExtraFileError...
if extra files are to be analyzed with an sosreport this will add them to the origin path to be analyzed
32,042
def _clean_files_only ( self , files ) : try : if not ( os . path . exists ( self . origin_path ) ) : self . logger . info ( "Creating Origin Path - %s" % self . origin_path ) os . makedirs ( self . origin_path ) if not ( os . path . exists ( self . dir_path ) ) : self . logger . info ( "Creating Directory Path - %s" %...
if a user only wants to process one or more specific files instead of a full sosreport
32,043
def clean_report ( self , options , sosreport ) : if options . report_dir : if os . path . isdir ( options . report_dir ) : self . report_dir = options . report_dir self . origin_path , self . dir_path , self . session , self . logfile , self . uuid = self . _prep_environment ( ) self . _start_logging ( self . logfile ...
this is the primary function to put everything together and analyze an sosreport
32,044
def parse_content ( self , content ) : self . rows = [ ] if len ( content ) < 2 : self . no_data = True return headers = [ ] field_re = re . compile ( r'\w+(\s\w+)*' ) for match in field_re . finditer ( content [ 0 ] ) : headers . append ( { 'name' : match . group ( ) , 'start' : match . start ( ) } ) for line in conte...
Parse the lines given into a list of dictionaries for each row . This is stored in the rows attribute .
32,045
def _ip_int_from_prefix ( self , prefixlen = None ) : if prefixlen is None : prefixlen = self . _prefixlen return self . _ALL_ONES ^ ( self . _ALL_ONES >> prefixlen )
Turn the prefix length netmask into a int for comparison .
32,046
def _ip_string_from_prefix ( self , prefixlen = None ) : if not prefixlen : prefixlen = self . _prefixlen return self . _string_from_ip_int ( self . _ip_int_from_prefix ( prefixlen ) )
Turn a prefix length into a dotted decimal string .
32,047
def check_output ( self , cmd , timeout = None , keep_rc = False , env = None ) : return subproc . call ( cmd , timeout = timeout or self . timeout , keep_rc = keep_rc , env = env )
Subclasses can override to provide special environment setup command prefixes etc .
32,048
def create_archive_dir ( self ) : archive_dir = os . path . join ( self . tmp_dir , self . archive_name ) os . makedirs ( archive_dir , 0o700 ) return archive_dir
Create the archive dir
32,049
def create_command_dir ( self ) : cmd_dir = os . path . join ( self . archive_dir , "insights_commands" ) os . makedirs ( cmd_dir , 0o700 ) return cmd_dir
Create the sos_commands dir
32,050
def get_full_archive_path ( self , path ) : return os . path . join ( self . archive_dir , path . lstrip ( '/' ) )
Returns the full archive path
32,051
def _copy_file ( self , path ) : full_path = self . get_full_archive_path ( path ) try : os . makedirs ( os . path . dirname ( full_path ) ) except OSError : pass logger . debug ( "Copying %s to %s" , path , full_path ) shutil . copyfile ( path , full_path ) return path
Copy just a single file
32,052
def copy_file ( self , path ) : if "*" in path : paths = _expand_paths ( path ) if paths : for path in paths : self . _copy_file ( path ) else : if os . path . isfile ( path ) : return self . _copy_file ( path ) else : logger . debug ( "File %s does not exist" , path ) return False
Copy a single file or regex creating the necessary directories
32,053
def copy_dir ( self , path ) : for directory in path : if os . path . isdir ( path ) : full_path = os . path . join ( self . archive_dir , directory . lstrip ( '/' ) ) logger . debug ( "Copying %s to %s" , directory , full_path ) shutil . copytree ( directory , full_path ) else : logger . debug ( "Not a directory: %s" ...
Recursively copy directory
32,054
def create_tar_file ( self , full_archive = False ) : tar_file_name = os . path . join ( self . archive_tmp_dir , self . archive_name ) ext = "" if self . compressor == "none" else ".%s" % self . compressor tar_file_name = tar_file_name + ".tar" + ext logger . debug ( "Tar File: " + tar_file_name ) subprocess . call ( ...
Create tar file to be compressed
32,055
def delete_tmp_dir ( self ) : logger . debug ( "Deleting: " + self . tmp_dir ) shutil . rmtree ( self . tmp_dir , True )
Delete the entire tmp dir
32,056
def delete_archive_dir ( self ) : logger . debug ( "Deleting: " + self . archive_dir ) shutil . rmtree ( self . archive_dir , True )
Delete the entire archive dir
32,057
def delete_archive_file ( self ) : logger . debug ( "Deleting %s" , self . archive_tmp_dir ) shutil . rmtree ( self . archive_tmp_dir , True )
Delete the directory containing the constructed archive
32,058
def add_metadata_to_archive ( self , metadata , meta_path ) : archive_path = self . get_full_archive_path ( meta_path . lstrip ( '/' ) ) write_data_to_file ( metadata , archive_path )
Add metadata to archive
32,059
def get ( self , url , params = { } , headers = { } , auth = ( ) , certificate_path = None ) : certificate_path = certificate_path if certificate_path else False return self . session . get ( url , params = params , headers = headers , verify = certificate_path , auth = auth , timeout = self . timeout )
Returns the response payload from the request to the given URL .
32,060
def update_headers ( self , response ) : if 'expires' in response . headers and 'cache-control' in response . headers : self . msg = self . server_cache_headers return response . headers else : self . msg = self . default_cache_vars date = parsedate ( response . headers [ 'date' ] ) expires = datetime ( * date [ : 6 ] ...
Returns the updated caching headers .
32,061
def hostname ( hn , ft , si ) : if not hn or not hn . fqdn : hn = ft if hn and hn . fqdn : fqdn = hn . fqdn hostname = hn . hostname if hn . hostname else fqdn . split ( "." ) [ 0 ] domain = hn . domain if hn . domain else "." . join ( fqdn . split ( "." ) [ 1 : ] ) return Hostname ( fqdn , hostname , domain ) else : f...
Check hostname facter and systemid to get the fqdn hostname and domain .
32,062
def parse_doc ( f , ctx = None , overwrite = False ) : lg = LineGetter ( f , comment_marker = ( "#" , ";" ) , strip = False ) cfg = ConfigParser ( ctx ) . parse_doc ( lg ) set_defaults ( cfg ) if overwrite : squash ( cfg ) return cfg
Accepts an open file or a list of lines .
32,063
def pad_release ( release_to_pad , num_sections = 4 ) : parts = release_to_pad . split ( '.' ) if len ( parts ) > num_sections : raise ValueError ( "Too many sections encountered ({found} > {num} in release string {rel}" . format ( found = len ( parts ) , num = num_sections , rel = release_to_pad ) ) pad_count = num_se...
Pad out package and kernel release versions so that LooseVersion comparisons will be correct .
32,064
def parse_content ( self , content ) : self . program = None self . status = None self . link = None self . best = None self . paths = [ ] current_path = None for line in content : words = line . split ( None ) if ' - status is' in line : if self . program : raise ParseException ( "Program line for {newprog} found in o...
Parse the output of the alternatives command .
32,065
def validate_lines ( results , bad_lines ) : if results and len ( results ) == 1 : first = results [ 0 ] if any ( l in first . lower ( ) for l in bad_lines ) : return False return True
If results contains a single line and that line is included in the bad_lines list this function returns False . If no bad line is found the function returns True
32,066
def _scan ( cls , result_key , scanner ) : if result_key in cls . scanner_keys : raise ValueError ( "'%s' is already a registered scanner key" % result_key ) cls . scanners . append ( scanner ) cls . scanner_keys . add ( result_key )
Registers a scanner which is a function that will be called once per logical line in a document . A scanners job is to evaluate the content of the line and set a so - called result_key on the class to be retrieved later by a rule .
32,067
def any ( cls , result_key , func ) : def scanner ( self , obj ) : current_value = getattr ( self , result_key , None ) setattr ( self , result_key , current_value or func ( obj ) ) cls . _scan ( result_key , scanner )
Sets the result_key to the output of func if func ever returns truthy
32,068
def parse_content ( self , content ) : self . lines = content for scanner in self . scanners : scanner ( self )
Use all the defined scanners to search the log file setting the properties defined in the scanner .
32,069
def _valid_search ( self , s ) : if isinstance ( s , six . string_types ) : return lambda l : s in l elif ( isinstance ( s , list ) and len ( s ) > 0 and all ( isinstance ( w , six . string_types ) for w in s ) ) : return lambda l : all ( w in l for w in s ) elif s is not None : raise TypeError ( 'Search items must be ...
Check this given s it must be a string or a list of strings . Otherwise a TypeError will be raised .
32,070
def get ( self , s ) : ret = [ ] search_by_expression = self . _valid_search ( s ) for l in self . lines : if search_by_expression ( l ) : ret . append ( self . _parse_line ( l ) ) return ret
Returns all lines that contain s anywhere and wrap them in a list of dictionaries . s can be either a single string or a string list . For list all keywords in the list must be found in each line .
32,071
def scan ( cls , result_key , func ) : if result_key in cls . scanner_keys : raise ValueError ( "'%s' is already a registered scanner key" % result_key ) def scanner ( self ) : result = func ( self ) setattr ( self , result_key , result ) cls . scanners . append ( scanner ) cls . scanner_keys . add ( result_key )
Define computed fields based on a string to grep for . This is preferred to utilizing raw log lines in plugins because computed fields will be serialized whereas raw log lines will not .
32,072
def keep_scan ( cls , result_key , token ) : def _scan ( self ) : return self . get ( token ) cls . scan ( result_key , _scan )
Define a property that is set to the list of lines that contain the given token . Uses the get method of the log file .
32,073
def parse_content ( self , content , allow_no_value = False ) : super ( IniConfigFile , self ) . parse_content ( content ) config = RawConfigParser ( allow_no_value = allow_no_value ) fp = io . StringIO ( u"\n" . join ( content ) ) config . readfp ( fp , filename = self . file_name ) self . data = config
Parses content of the config file .
32,074
def path_entry ( self , path ) : if path [ 0 ] != '/' : return None path_parts = path . split ( '/' ) directory = '/' . join ( path_parts [ : - 1 ] ) name = path_parts [ - 1 ] if directory not in self . listings : return None if name not in self . listings [ directory ] [ 'entries' ] : return None return self . listing...
The parsed data given a path which is separated into its directory and entry name .
32,075
def image_by_name ( img_name , images = None ) : i_reg , i_rep , i_tag = _decompose ( img_name ) if not i_reg : i_reg = '*' if not i_tag : i_tag = '*' if images is None : c = docker . Client ( ** kwargs_from_env ( ) ) images = c . images ( all = False ) valid_images = [ ] for i in images : for t in i [ 'RepoTags' ] : r...
Returns a list of image data for images which match img_name . Will optionally take a list of images from a docker . Client . images query to avoid multiple docker queries .
32,076
def subp ( cmd ) : proc = subprocess . Popen ( cmd , stdout = subprocess . PIPE , stderr = subprocess . PIPE ) out , err = proc . communicate ( ) return ReturnTuple ( proc . returncode , stdout = out , stderr = err )
Run a command as a subprocess . Return a triple of return code standard out standard err .
32,077
def print_scan_summary ( json_data , names = None ) : max_col_width = 50 min_width = 15 def _max_width ( data ) : max_name = 0 for name in data : max_name = len ( data [ name ] ) if len ( data [ name ] ) > max_name else max_name if max_name < min_width : max_name = min_width if max_name > max_col_width : max_name = max...
Print a summary of the data returned from a CVE scan .
32,078
def print_detail_scan_summary ( json_data , names = None ) : clean = True sevs = [ 'Critical' , 'Important' , 'Moderate' , 'Low' ] cve_summary = json_data [ 'host_results' ] image_template = " {0:10}: {1}" cve_template = " {0:10}: {1}" for image in cve_summary . keys ( ) : image_res = cve_summary [ image ] writeOu...
Print a detailed summary of the data returned from a CVE scan .
32,079
def call ( cmd , timeout = None , signum = signal . SIGKILL , keep_rc = False , encoding = "utf-8" , env = os . environ ) : if not isinstance ( cmd , list ) : cmd = [ cmd ] p = Pipeline ( * cmd , timeout = timeout , signum = signum , env = env ) res = p ( keep_rc = keep_rc ) if keep_rc : rc , output = res output = outp...
Execute a cmd or list of commands with an optional timeout in seconds .
32,080
def write ( self , output , mode = "w" , keep_rc = False ) : if isinstance ( output , six . string_types ) : already_exists = os . path . exists ( output ) try : with open ( output , mode ) as f : p = self . _build_pipes ( f ) rc = p . wait ( ) if keep_rc : return rc if rc : raise CalledProcessError ( rc , self . cmds ...
Executes the pipeline and writes the results to the supplied output . If output is a filename and the file didn t already exist before trying to write the file will be removed if an exception is raised .
32,081
def validate_kwargs ( self , kwargs ) : if not self . response_type : msg = "response_type must be set on the Response subclass." raise ValidationException ( msg ) if ( self . key_name and self . key_name in kwargs ) or "type" in kwargs : name = self . __class__ . __name__ msg = "%s is an invalid argument for %s" % ( s...
Validates expected subclass attributes and constructor keyword arguments .
32,082
def validate_key ( self , key ) : if not key : name = self . __class__ . __name__ msg = "%s response missing %s" % ( name , self . key_name ) raise ValidationException ( msg , self ) elif not isinstance ( key , str ) : msg = "Response contains invalid %s type" % self . key_name raise ValidationException ( msg , type ( ...
Called if the key_name class attribute is not None .
32,083
def adjust_for_length ( self , key , r , kwargs ) : length = len ( str ( kwargs ) ) if length > settings . defaults [ "max_detail_length" ] : self . _log_length_error ( key , length ) r [ "max_detail_length_error" ] = length return r return kwargs
Converts the response to a string and compares its length to a max length specified in settings . If the response is too long an error is logged and an abbreviated response is returned instead .
32,084
def _log_length_error ( self , key , length ) : extra = { "max_detail_length" : settings . defaults [ "max_detail_length" ] , "len" : length } if self . key_name : extra [ self . key_name ] = key msg = "Length of data in %s is too long." % self . __class__ . __name__ log . error ( msg , extra = extra )
Helper function for logging a response length error .
32,085
def _parse_oracle ( lines ) : config = { } for line in get_active_lines ( lines ) : if '\00' in line : line = cleanup . sub ( '' , line ) if '=' in line : ( key , value ) = line . split ( '=' , 1 ) key = key . strip ( whitespace + '"\'' ) . lower ( ) if ',' in line : value = [ s . strip ( whitespace + '"\'' ) . lower (...
Performs the actual file parsing returning a dict of the config values in a given Oracle DB config file .
32,086
def apply_upstring ( upstring , component_list ) : assert len ( upstring ) == len ( component_list ) def add_up_key ( comp_dict , up_indicator ) : assert up_indicator == 'U' or up_indicator == "_" comp_dict [ 'up' ] = up_indicator == 'U' for comp_dict , up_indicator in zip ( component_list , upstring ) : add_up_key ( c...
Update the dictionaries resulting from parse_array_start with the up key based on the upstring returned from parse_upstring .
32,087
def remove ( path , chmod = False ) : if not os . path . exists ( path ) : return if chmod : cmd = "chmod -R 755 %s" % path subproc . call ( cmd ) cmd = 'rm -rf "{p}"' . format ( p = path ) subproc . call ( cmd )
Remove a file or directory located on the filesystem at path .
32,088
def ensure_path ( path , mode = 0o777 ) : if path : try : umask = os . umask ( 000 ) os . makedirs ( path , mode ) os . umask ( umask ) except OSError as e : if e . errno != errno . EEXIST : raise
Ensure that path exists in a multiprocessing safe way .
32,089
def uptime ( ut , facter ) : ut = ut if ut and ut . loadavg : return Uptime ( ut . currtime , ut . updays , ut . uphhmm , ut . users , ut . loadavg , ut . uptime ) ft = facter if ft and hasattr ( ft , 'uptime_seconds' ) : import datetime secs = int ( ft . uptime_seconds ) up_dd = secs // ( 3600 * 24 ) up_hh = ( secs % ...
Check uptime and facts to get the uptime information .
32,090
def get_node ( service_name , host_name ) : return common_pb2 . Node ( identifier = common_pb2 . ProcessIdentifier ( host_name = socket . gethostname ( ) if host_name is None else host_name , pid = os . getpid ( ) , start_timestamp = proto_ts_from_datetime ( datetime . datetime . utcnow ( ) ) ) , library_info = common_...
Generates Node message from params and system information .
32,091
def end_span ( self , * args , ** kwargs ) : cur_span = self . current_span ( ) if cur_span is None and self . _spans_list : cur_span = self . _spans_list [ - 1 ] if cur_span is None : logging . warning ( 'No active span, cannot do end_span.' ) return cur_span . finish ( ) self . span_context . span_id = cur_span . par...
End a span . Update the span_id in SpanContext to the current span s parent span id ; Update the current span .
32,092
def add_attribute_to_current_span ( self , attribute_key , attribute_value ) : current_span = self . current_span ( ) current_span . add_attribute ( attribute_key , attribute_value )
Add attribute to current span .
32,093
def get_span_datas ( self , span ) : span_datas = [ span_data_module . SpanData ( name = ss . name , context = self . span_context , span_id = ss . span_id , parent_span_id = ss . parent_span . span_id if ss . parent_span else None , attributes = ss . attributes , start_time = ss . start_time , end_time = ss . end_time...
Extracts a list of SpanData tuples from a span
32,094
def add ( self , metric_producer ) : if metric_producer is None : raise ValueError with self . mp_lock : self . metric_producers . add ( metric_producer )
Add a metric producer .
32,095
def remove ( self , metric_producer ) : if metric_producer is None : raise ValueError try : with self . mp_lock : self . metric_producers . remove ( metric_producer ) except KeyError : pass
Remove a metric producer .
32,096
def add_message_event ( proto_message , span , message_event_type , message_id = 1 ) : span . add_time_event ( time_event = time_event . TimeEvent ( datetime . utcnow ( ) , message_event = time_event . MessageEvent ( message_id , type = message_event_type , uncompressed_size_bytes = proto_message . ByteSize ( ) ) ) )
Adds a MessageEvent to the span based off of the given protobuf message
32,097
def wrap_iter_with_message_events ( request_or_response_iter , span , message_event_type ) : for message_id , message in enumerate ( request_or_response_iter , start = 1 ) : add_message_event ( proto_message = message , span = span , message_event_type = message_event_type , message_id = message_id ) yield message
Wraps a request or response iterator to add message events to the span for each proto message sent or received
32,098
def format_stack_frame_json ( self ) : stack_frame_json = { } stack_frame_json [ 'function_name' ] = get_truncatable_str ( self . func_name ) stack_frame_json [ 'original_function_name' ] = get_truncatable_str ( self . original_func_name ) stack_frame_json [ 'file_name' ] = get_truncatable_str ( self . file_name ) stac...
Convert StackFrame object to json format .
32,099
def from_traceback ( cls , tb ) : stack_trace = cls ( stack_trace_hash_id = generate_hash_id_from_traceback ( tb ) ) for tb_frame_info in traceback . extract_tb ( tb ) : filename , line_num , fn_name , _ = tb_frame_info stack_trace . add_stack_frame ( StackFrame ( func_name = fn_name , original_func_name = fn_name , fi...
Initializes a StackTrace from a python traceback instance